repo
stringlengths
2
99
file
stringlengths
13
225
code
stringlengths
0
18.3M
file_length
int64
0
18.3M
avg_line_length
float64
0
1.36M
max_line_length
int64
0
4.26M
extension_type
stringclasses
1 value
FATE
FATE-master/python/federatedml/secureprotol/number_theory/group/twisted_edwards_curve_group.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.secureprotol.number_theory.field.integers_modulo_prime_field import IntegersModuloPrimeArithmetic, \ IntegersModuloPrimeElement from federatedml.secureprotol.number_theory.group.cyclc_group import CyclicGroupArithmetic, CyclicGroupElement from federatedml.util.conversion import int_to_bytes, bytes_to_int, int_to_binary_representation class TwistedEdwardsCurveElement(CyclicGroupElement): def __init__(self, x, y, arithmetic=None): super(TwistedEdwardsCurveElement, self).__init__() self.x = x # X-coordinate self.y = y # Y-coordinate if arithmetic is not None: if not arithmetic.is_in_group(self): raise ValueError("This element is not in TEC group") def output(self): """ Output (X-coordinate, Y-coordinate) :return: str """ return "(" + str(self.x.val) + ", " + str(self.y.val) + ")" class TwistedEdwardsCurveArithmetic(CyclicGroupArithmetic): """ See Bernstein, Daniel J., et al. "Twisted edwards curves." 2008, Bernstein, Daniel J., et al. "High-speed high-security signatures." 2012, and https://tools.ietf.org/id/draft-struik-lwig-curve-representations-00.html#dom-parms for more details """ def __init__(self, galois_field_arithmetic=IntegersModuloPrimeArithmetic(2 ** 255 - 19), a=IntegersModuloPrimeElement(2 ** 255 - 20), d=IntegersModuloPrimeElement( 37095705934669439343138083508754565189542113879843219016388785533085940283555), identity=TwistedEdwardsCurveElement(IntegersModuloPrimeElement(0), IntegersModuloPrimeElement(1)), generator=None): if generator is None: super(TwistedEdwardsCurveArithmetic, self).__init__(identity, self.default_generator()) else: super(TwistedEdwardsCurveArithmetic, self).__init__(identity, generator) self.FA = galois_field_arithmetic self.a = a self.d = d @staticmethod def default_generator(): x = IntegersModuloPrimeElement(15112221349535400772501151409588531511454012693041857206046113283949847762202) y = IntegersModuloPrimeElement(46316835694926478169428394003475163141307993866256225615783033603165251855960) return TwistedEdwardsCurveElement(x, y) def is_in_group(self, element): x = element.x y = element.y # left = ax^2 + y^2 ax_square = self.FA.mul(self.a, self.FA.pow(x, 2)) y_square = self.FA.pow(y, 2) left = self.FA.add(ax_square, y_square) # right = 1 + dx^2y^2 one = self.FA.get_mul_identity() dx_square_y_square = self.FA.mul(self.d, self.FA.mul(self.FA.pow(x, 2), self.FA.pow(y, 2))) right = self.FA.add(one, dx_square_y_square) # check if left == right if self.FA.sub(left, right) == self.FA.get_add_identity(): return True else: return False def add(self, a, b): """ (x1, y1) + (x2, y2) = ((x1y2 + y1x2) / (1 + dx1x2y1y2), (y1y2 - ax1x2) / (1 - dx1x2y1y2)) :param a: TwistedEdwardsCurveElement :param b: TwistedEdwardsCurveElement :return: """ if not isinstance(a, TwistedEdwardsCurveElement) or not isinstance(b, TwistedEdwardsCurveElement): raise TypeError("Addition only supports two objects") x1 = a.x y1 = a.y x2 = b.x y2 = b.y # calculate essential components x1y2 = self.FA.mul(x1, y2) x2y1 = self.FA.mul(x2, y1) ax1x2 = self.FA.mul(self.a, self.FA.mul(x1, x2)) y1y2 = self.FA.mul(y1, y2) dx1x2y1y2 = self.FA.mul(self.d, self.FA.mul(x1y2, x2y1)) # calculate the first coordinate numerator_x3 = self.FA.add(x1y2, x2y1) denominator_x3 = self.FA.add(self.FA.get_mul_identity(), dx1x2y1y2) x3 = self.FA.div(numerator_x3, denominator_x3) # calculate the second coordinate numerator_y3 = self.FA.sub(y1y2, ax1x2) denominator_y3 = self.FA.sub(self.FA.get_mul_identity(), dx1x2y1y2) y3 = self.FA.div(numerator_y3, denominator_y3) return TwistedEdwardsCurveElement(x3, y3) def neg(self, a): """ -(x, y) = (-x, y) :param a: TwistedEdwardsCurveElement :return: """ if not isinstance(a, TwistedEdwardsCurveElement): raise TypeError("Negative only supports an object") x = a.x y = a.y return TwistedEdwardsCurveElement(self.FA.neg(x), y) def sub(self, a, b): """ :param a: TwistedEdwardsCurveElement :param b: TwistedEdwardsCurveElement :return: """ return self.add(a, self.neg(b)) def mul(self, scalar, a): """ :param scalar: int :param a: TwistedEdwardsCurveElement :return: """ if not isinstance(scalar, int) or not isinstance(a, TwistedEdwardsCurveElement): raise TypeError("Multiplication only supports a scalar with an object") if scalar == 0: return self.get_identity() elif scalar < 0: raise TypeError("Multiplication only supports non-negative scalars") else: binary_representation = int_to_binary_representation(scalar) res = self.identity for exponent in binary_representation: res = self.add(res, self._multiple_twice(exponent, a)) return res def _twice(self, a): """ 2 * (x, y) = (2xy / (ax^2 + y^2), (y^2 - ax^2) / (2 - ax^2 - y^2)) :param a: TwistedEdwardsCurveElement :return: """ if not isinstance(a, TwistedEdwardsCurveElement): raise TypeError("Double only supports an object") x = a.x y = a.y # calculate essential components ax_square = self.FA.mul(self.a, self.FA.pow(x, 2)) y_square = self.FA.pow(y, 2) two = self.FA.mul(2, self.FA.get_mul_identity()) # calculate the first coordinate numerator_x3 = self.FA.mul(2, self.FA.mul(x, y)) denominator_x3 = self.FA.add(ax_square, y_square) x3 = self.FA.div(numerator_x3, denominator_x3) # calculate the second coordinate numerator_y3 = self.FA.sub(y_square, ax_square) denominator_y3 = self.FA.sub(two, denominator_x3) y3 = self.FA.div(numerator_y3, denominator_y3) return TwistedEdwardsCurveElement(x3, y3) def _multiple_twice(self, multiple, a): """ 2^multiple * a :param multiple: int >= 0 :param a: TwistedEdwardsCurveElement :return: """ if multiple == 0: return a else: res = a for i in range(multiple): res = self._twice(res) return res def encode(self, a): """ Encode an element to a 33-byte bytes for feeding into a cryptographic hash function :param a: TwistedEdwardsCurveElement :return: """ pos_sign = "00" neg_sign = "FF" if self.FA.is_positive(a.x): return bytes.fromhex(pos_sign) + int_to_bytes(a.y.val) else: return bytes.fromhex(neg_sign) + int_to_bytes(a.y.val) def decode(self, bytes_arr: bytes): """ Decode a bytes objects, expected to be 32 bytes (256 bits) long, into a self-typed object Note that this decode is not simply a reverse of the encode above :param bytes_arr: :return: Output -1 is the result is not in the TEC group, otherwise the correct answer """ if len(bytes_arr) % 2 != 0: raise ValueError("Cannot decode an odd-long bytes into a TEC element") y = IntegersModuloPrimeElement(bytes_to_int(bytes_arr), arithmetic=self.FA) # determine x denominator = self.FA.sub(self.a, self.FA.mul(self.d, self.FA.pow(y, 2))) # a - dy^2 numerator = self.FA.sub(self.FA.get_mul_identity(), self.FA.pow(y, 2)) x_pos, x_neg = self.FA.sqrt(self.FA.div(numerator, denominator)) # if the decoded object is invalid, return -1 if isinstance(x_pos, int) and x_pos == -1: return -1 # if the first byte of the byte_arr is less than half a byte's bit-length, # then use the positive square root as x, otherwise negative x = x_pos if bytes_arr[0] < 128 else x_neg return TwistedEdwardsCurveElement(x, y) def get_field_order(self): return self.FA.mod
9,327
36.612903
117
py
FATE
FATE-master/python/federatedml/secureprotol/number_theory/field/integers_modulo_prime_field.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.secureprotol.gmpy_math import invert, is_prime, powmod, tonelli, legendre from federatedml.secureprotol.number_theory.field.base_galois_field import GaloisFieldElement, GaloisFieldArithmetic class IntegersModuloPrimeElement(GaloisFieldElement): """ A realization of GF: integers modulo a prime """ def __init__(self, val, arithmetic=None): """ :param val: int :param arithmetic: IntegersModuloPrimeArithmetic """ super(IntegersModuloPrimeElement, self).__init__() if arithmetic is not None: # might need rectification self.val = arithmetic.rectify(val) else: # need no rectification self.val = val class IntegersModuloPrimeArithmetic(GaloisFieldArithmetic): """ For the finite field - integers modulo a prime """ def __init__(self, mod): add_identity = IntegersModuloPrimeElement(0) mul_identity = IntegersModuloPrimeElement(1) super(IntegersModuloPrimeArithmetic, self).__init__(add_identity, mul_identity) self.mod = mod # mod base self._check_mod_prime() def rectify(self, a): """ Rectify an out-of-range element back to this field :param a: int :return: int """ return a % self.mod def add(self, a, b): """ :param a: IntegersModuloPrimeElement :param b: IntegersModuloPrimeElement :return: IntegersModuloPrimeElement """ if not isinstance(a, IntegersModuloPrimeElement) or not isinstance(b, IntegersModuloPrimeElement): raise TypeError("Addition only supports IntegersModuloPrimeElement objects") return IntegersModuloPrimeElement((a.val + b.val) % self.mod) def neg(self, a): """ :param a: IntegersModuloPrimeElement :return: IntegersModuloPrimeElement """ if not isinstance(a, IntegersModuloPrimeElement): raise TypeError("Negative only supports IntegersModuloPrimeElement objects") return IntegersModuloPrimeElement(self.mod - a.val) def sub(self, a, b): """ :param a: IntegersModuloPrimeElement :param b: IntegersModuloPrimeElement :return: IntegersModuloPrimeElement """ return self.add(a, self.neg(b)) def mul(self, a, b): """ :param a: IntegersModuloPrimeElement :param b: IntegersModuloPrimeElement :return: IntegersModuloPrimeElement """ if isinstance(a, IntegersModuloPrimeElement) and isinstance(b, IntegersModuloPrimeElement): return IntegersModuloPrimeElement((a.val * b.val) % self.mod) elif isinstance(a, IntegersModuloPrimeElement) and isinstance(b, int): if b == 0: return self.add_identity elif b < 0: raise ValueError("Scalars in multiplication must be non-negative") else: return IntegersModuloPrimeElement((a.val * b) % self.mod) elif isinstance(a, int) and isinstance(b, IntegersModuloPrimeElement): if a == 0: return self.add_identity elif a < 0: raise ValueError("Scalars in multiplication must be non-negative") else: return IntegersModuloPrimeElement((a * b.val) % self.mod) else: raise TypeError("Multiplication only supports two IntegersModuloPrimeElement objects" + "one int plus one object") def invert(self, a): """ :param a: IntegersModuloPrimeElement :return: IntegersModuloPrimeElement """ if not isinstance(a, IntegersModuloPrimeElement): raise TypeError("Invert only supports IntegersModuloPrimeElement objects") return IntegersModuloPrimeElement(invert(a.val, self.mod)) def div(self, a, b): """ :param a: IntegersModuloPrimeElement :return: IntegersModuloPrimeElement """ if not isinstance(a, IntegersModuloPrimeElement) or not isinstance(b, IntegersModuloPrimeElement): raise TypeError("Division only supports IntegersModuloPrimeElement objects") return self.mul(a, self.invert(b)) def pow(self, a, e): """ :param a: IntegersModuloPrimeElement :param e: int :return: IntegersModuloPrimeElement """ if not isinstance(a, IntegersModuloPrimeElement) or not isinstance(e, int): raise TypeError("Power only supports IntegersModuloPrimeElement to the int's") if e == 0: return self.mul_identity elif e < 0: raise ValueError("Exponents in power must be non-negative") else: return IntegersModuloPrimeElement(powmod(a.val, e, self.mod)) def sqrt(self, a): """ sqrt(a) found by the Tonelli–Shanks algorithm :param a: IntegersModuloPrimeElement :return: Output -1 if a is not a quadratic residue, otherwise the correct square roots (root, -root) Note root < self.mod / 2 """ if not isinstance(a, IntegersModuloPrimeElement): raise TypeError("Square root only supports an object") if self.is_a_quadratic_residue(a): root_raw = tonelli(a.val, self.mod) root_raw_other = self.mod - root_raw if root_raw < root_raw_other: return IntegersModuloPrimeElement(root_raw), IntegersModuloPrimeElement(root_raw_other) else: return IntegersModuloPrimeElement(root_raw_other), IntegersModuloPrimeElement(root_raw) else: return -1, -1 def is_a_quadratic_residue(self, a): """ Check if a is a quadratic residue :param a: IntegersModuloPrimeElement :return: """ if not isinstance(a, IntegersModuloPrimeElement): raise ValueError("Only check an object") return legendre(a.val, self.mod) == 1 def is_positive(self, a): """ Check if a is positive in this field, i.e., if a < self.mod / 2 :param a: IntegersModuloPrimeElement :return: """ return a.val < self.mod / 2 def _check_mod_prime(self): if not is_prime(self.mod): raise ValueError("Galois fields take only prime orders") def get_add_identity(self): return self.add_identity def get_mul_identity(self): return self.mul_identity
7,229
34.970149
116
py
FATE
FATE-master/python/federatedml/secureprotol/number_theory/field/__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #
663
33.947368
75
py
FATE
FATE-master/python/federatedml/secureprotol/number_theory/field/base_galois_field.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # class GaloisFieldElement(object): """ Element of a finite field """ def __init__(self): pass def __eq__(self, other): return self.__dict__ == other.__dict__ class GaloisFieldArithmetic(object): """ A collection of arithmetic operators for finite field elements """ def __init__(self, add_identity, mul_identity): self.add_identity = add_identity # additive identity self.mul_identity = mul_identity # multiplicative identity def add(self, a, b): """ a + b """ pass def neg(self, a): """ -a """ pass def sub(self, a, b): """ a - b """ pass def mul(self, a, b): """ a * b """ pass def invert(self, a): """ a^(-1) """ pass def div(self, a, b): """ a / b """ pass def pow(self, a, e): """ a^e """ pass def get_add_identity(self): return self.add_identity def get_mul_identity(self): return self.mul_identity
1,833
19.606742
75
py
FATE
FATE-master/python/federatedml/secureprotol/spdz/__init__.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.secureprotol.spdz.spdz import SPDZ
669
36.222222
75
py
FATE
FATE-master/python/federatedml/secureprotol/spdz/spdz.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.secureprotol.fate_paillier import PaillierKeypair from federatedml.secureprotol.spdz.communicator import Communicator from federatedml.secureprotol.spdz.utils import NamingService from federatedml.secureprotol.spdz.utils import naming class SPDZ(object): __instance = None @classmethod def get_instance(cls) -> 'SPDZ': return cls.__instance @classmethod def set_instance(cls, instance): prev = cls.__instance cls.__instance = instance return prev @classmethod def has_instance(cls): return cls.__instance is not None def __init__(self, name="ss", q_field=None, local_party=None, all_parties=None, use_mix_rand=False, n_length=1024): self.name_service = naming.NamingService(name) self._prev_name_service = None self._pre_instance = None self.communicator = Communicator(local_party, all_parties) self.party_idx = self.communicator.party_idx self.other_parties = self.communicator.other_parties if len(self.other_parties) > 1: raise EnvironmentError("support 2-party secret share only") self.public_key, self.private_key = PaillierKeypair.generate_keypair(n_length=n_length) if q_field is None: q_field = self.public_key.n self.q_field = self._align_q_field(q_field) self.use_mix_rand = use_mix_rand def __enter__(self): self._prev_name_service = NamingService.set_instance(self.name_service) self._pre_instance = self.set_instance(self) return self def __exit__(self, exc_type, exc_val, exc_tb): NamingService.set_instance(self._pre_instance) # self.communicator.clean() def __reduce__(self): raise PermissionError("it's unsafe to transfer this") def partial_rescontruct(self): # todo: partial parties gets rescontructed tensor pass @classmethod def dot(cls, left, right, target_name=None): return left.dot(right, target_name) def set_flowid(self, flowid): self.communicator.set_flowid(flowid) def _align_q_field(self, q_field): self.communicator.remote_q_field(q_field=q_field, party=self.other_parties) other_q_field = self.communicator.get_q_field(party=self.other_parties) other_q_field.append(q_field) max_q_field = max(other_q_field) return max_q_field
3,047
33.247191
119
py
FATE
FATE-master/python/federatedml/secureprotol/spdz/secure_matrix/secure_matrix.py
# # Copyright 2021 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import numpy as np from fate_arch.common import Party from fate_arch.session import is_table from federatedml.secureprotol.fixedpoint import FixedPointEndec from federatedml.secureprotol.spdz.tensor import fixedpoint_numpy, fixedpoint_table from federatedml.transfer_variable.transfer_class.secret_share_transfer_variable import SecretShareTransferVariable from federatedml.util import consts class SecureMatrix(object): # SecureMatrix in SecretSharing With He; def __init__(self, party: Party, q_field, other_party): self.transfer_variable = SecretShareTransferVariable() self.party = party self.other_party = other_party self.q_field = q_field self.encoder = None self.get_or_create_endec(self.q_field) def set_flowid(self, flowid): self.transfer_variable.set_flowid(flowid) def get_or_create_endec(self, q_field, **kwargs): if self.encoder is None: self.encoder = FixedPointEndec(q_field) return self.encoder def secure_matrix_mul(self, matrix, tensor_name, cipher=None, suffix=tuple(), is_fixedpoint_table=True): current_suffix = ("secure_matrix_mul",) + suffix dst_role = consts.GUEST if self.party.role == consts.HOST else consts.HOST if cipher is not None: de_matrix = self.encoder.decode(matrix.value) if isinstance(matrix, fixedpoint_table.FixedPointTensor): encrypt_mat = cipher.distribute_encrypt(de_matrix) else: encrypt_mat = cipher.recursive_encrypt(de_matrix) # remote encrypted matrix; self.transfer_variable.encrypted_share_matrix.remote(encrypt_mat, role=dst_role, idx=0, suffix=current_suffix) share_tensor = SecureMatrix.from_source(tensor_name, self.other_party, cipher, self.q_field, self.encoder, is_fixedpoint_table=is_fixedpoint_table) return share_tensor else: share = self.transfer_variable.encrypted_share_matrix.get(role=dst_role, idx=0, suffix=current_suffix) if is_table(share): share = fixedpoint_table.PaillierFixedPointTensor(share) ret = share.dot(matrix) else: share = fixedpoint_numpy.PaillierFixedPointTensor(share) ret = share.dot(matrix) share_tensor = SecureMatrix.from_source(tensor_name, ret, cipher, self.q_field, self.encoder) return share_tensor def share_encrypted_matrix(self, suffix, is_remote, cipher, **kwargs): current_suffix = ("share_encrypted_matrix",) + suffix if is_remote: for var_name, var in kwargs.items(): dst_role = consts.GUEST if self.party.role == consts.HOST else consts.HOST if isinstance(var, fixedpoint_table.FixedPointTensor): encrypt_var = cipher.distribute_encrypt(var.value) else: encrypt_var = cipher.recursive_encrypt(var.value) self.transfer_variable.encrypted_share_matrix.remote(encrypt_var, role=dst_role, suffix=(var_name,) + current_suffix) else: res = [] for var_name in kwargs.keys(): dst_role = consts.GUEST if self.party.role == consts.HOST else consts.HOST z = self.transfer_variable.encrypted_share_matrix.get(role=dst_role, idx=0, suffix=(var_name,) + current_suffix) if is_table(z): res.append(fixedpoint_table.PaillierFixedPointTensor(z)) else: res.append(fixedpoint_numpy.PaillierFixedPointTensor(z)) return tuple(res) @classmethod def from_source(cls, tensor_name, source, cipher, q_field, encoder, is_fixedpoint_table=True): if is_table(source): share_tensor = fixedpoint_table.PaillierFixedPointTensor.from_source(tensor_name=tensor_name, source=source, encoder=encoder, q_field=q_field) return share_tensor elif isinstance(source, np.ndarray): share_tensor = fixedpoint_numpy.PaillierFixedPointTensor.from_source(tensor_name=tensor_name, source=source, encoder=encoder, q_field=q_field) return share_tensor elif isinstance(source, (fixedpoint_table.PaillierFixedPointTensor, fixedpoint_numpy.PaillierFixedPointTensor)): return cls.from_source(tensor_name, source.value, cipher, q_field, encoder, is_fixedpoint_table) elif isinstance(source, Party): if is_fixedpoint_table: share_tensor = fixedpoint_table.PaillierFixedPointTensor.from_source(tensor_name=tensor_name, source=source, encoder=encoder, q_field=q_field, cipher=cipher) else: share_tensor = fixedpoint_numpy.PaillierFixedPointTensor.from_source(tensor_name=tensor_name, source=source, encoder=encoder, q_field=q_field, cipher=cipher) return share_tensor else: raise ValueError(f"type={type(source)}")
7,787
49.571429
115
py
FATE
FATE-master/python/federatedml/secureprotol/spdz/secure_matrix/__init__.py
0
0
0
py
FATE
FATE-master/python/federatedml/secureprotol/spdz/communicator/federation.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.transfer_variable.transfer_class.secret_share_transfer_variable import SecretShareTransferVariable class Communicator(object): def __init__(self, local_party=None, all_parties=None): self._transfer_variable = SecretShareTransferVariable() self._share_variable = self._transfer_variable.share.disable_auto_clean() self._rescontruct_variable = self._transfer_variable.rescontruct.set_preserve_num(3) self._mul_triplets_encrypted_variable = self._transfer_variable.multiply_triplets_encrypted.set_preserve_num(3) self._mul_triplets_cross_variable = self._transfer_variable.multiply_triplets_cross.set_preserve_num(3) self._q_field_variable = self._transfer_variable.q_field.disable_auto_clean() self._local_party = self._transfer_variable.local_party() if local_party is None else local_party self._all_parties = self._transfer_variable.all_parties() if all_parties is None else all_parties self._party_idx = self._all_parties.index(self._local_party) self._other_parties = self._all_parties[:self._party_idx] + self._all_parties[(self._party_idx + 1):] @property def party(self): return self._local_party @property def parties(self): return self._all_parties @property def other_parties(self): return self._other_parties @property def party_idx(self): return self._party_idx def remote_q_field(self, q_field, party): return self._q_field_variable.remote_parties(q_field, party, suffix=("q_field",)) def get_q_field(self, party): return self._q_field_variable.get_parties(party, suffix=("q_field",)) def get_rescontruct_shares(self, tensor_name): return self._rescontruct_variable.get_parties(self._other_parties, suffix=(tensor_name,)) def broadcast_rescontruct_share(self, share, tensor_name): return self._rescontruct_variable.remote_parties(share, self._other_parties, suffix=(tensor_name,)) def remote_share(self, share, tensor_name, party): return self._share_variable.remote_parties(share, party, suffix=(tensor_name,)) def get_share(self, tensor_name, party): return self._share_variable.get_parties(party, suffix=(tensor_name,)) def remote_encrypted_tensor(self, encrypted, tag): return self._mul_triplets_encrypted_variable.remote_parties(encrypted, parties=self._other_parties, suffix=tag) def remote_encrypted_cross_tensor(self, encrypted, parties, tag): return self._mul_triplets_cross_variable.remote_parties(encrypted, parties=parties, suffix=tag) def get_encrypted_tensors(self, tag): return (self._other_parties, self._mul_triplets_encrypted_variable.get_parties(parties=self._other_parties, suffix=tag)) def get_encrypted_cross_tensors(self, tag): return self._mul_triplets_cross_variable.get_parties(parties=self._other_parties, suffix=tag) def clean(self): self._rescontruct_variable.clean() self._share_variable.clean() self._rescontruct_variable.clean() self._mul_triplets_encrypted_variable.clean() self._mul_triplets_cross_variable.clean() self._q_field_variable.clean() def set_flowid(self, flowid): self._transfer_variable.set_flowid(flowid)
3,966
42.593407
119
py
FATE
FATE-master/python/federatedml/secureprotol/spdz/communicator/__init__.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.secureprotol.spdz.communicator.federation import Communicator
696
37.722222
78
py
FATE
FATE-master/python/federatedml/secureprotol/spdz/beaver_triples/__init__.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.secureprotol.spdz.beaver_triples.he import beaver_triplets
693
37.555556
75
py
FATE
FATE-master/python/federatedml/secureprotol/spdz/beaver_triples/he.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import numpy as np from fate_arch.session import is_table from federatedml.secureprotol.spdz.communicator import Communicator from federatedml.secureprotol.spdz.utils import rand_tensor, urand_tensor from federatedml.util import LOGGER def encrypt_tensor(tensor, public_key): encrypted_zero = public_key.encrypt(0) if isinstance(tensor, np.ndarray): return np.vectorize(lambda e: encrypted_zero + e)(tensor) elif is_table(tensor): return tensor.mapValues(lambda x: np.vectorize(lambda e: encrypted_zero + e)(x)) else: raise NotImplementedError(f"type={type(tensor)}") def decrypt_tensor(tensor, private_key, otypes): if isinstance(tensor, np.ndarray): return np.vectorize(private_key.decrypt, otypes)(tensor) elif is_table(tensor): return tensor.mapValues(lambda x: np.vectorize(private_key.decrypt, otypes)(x)) else: raise NotImplementedError(f"type={type(tensor)}") def beaver_triplets(a_tensor, b_tensor, dot, q_field, he_key_pair, communicator: Communicator, name): public_key, private_key = he_key_pair a = rand_tensor(q_field, a_tensor) b = rand_tensor(q_field, b_tensor) def _cross(self_index, other_index): LOGGER.debug(f"_cross: a={a}, b={b}") _c = dot(a, b) encrypted_a = encrypt_tensor(a, public_key) communicator.remote_encrypted_tensor(encrypted=encrypted_a, tag=f"{name}_a_{self_index}") r = urand_tensor(q_field, _c) _p, (ea,) = communicator.get_encrypted_tensors(tag=f"{name}_a_{other_index}") eab = dot(ea, b) eab += r _c -= r communicator.remote_encrypted_cross_tensor(encrypted=eab, parties=_p, tag=f"{name}_cross_a_{other_index}_b_{self_index}") crosses = communicator.get_encrypted_cross_tensors(tag=f"{name}_cross_a_{self_index}_b_{other_index}") for eab in crosses: _c += decrypt_tensor(eab, private_key, [object]) return _c c = _cross(communicator.party_idx, 1 - communicator.party_idx) return a, b, c % q_field
2,781
38.742857
110
py
FATE
FATE-master/python/federatedml/secureprotol/spdz/test/host.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import numpy as np from federatedml.secureprotol.spdz.tensor.fixedpoint_numpy import FixedPointTensor from federatedml.secureprotol.spdz import SPDZ from fate_arch.session import Session s = Session() # on host side guest_party_id = 10000 host_party_id = 10001 host_proxy_ip = "192.168.0.1" # Generally, it is your current machine IP federation_id = "spdz_demo" # choose a common federation id (this should be same in both site) session_id = "_".join([federation_id, "host", str(host_party_id)]) s.init_computing(session_id) s.init_federation(federation_id, runtime_conf={ "local": {"role": "host", "party_id": host_party_id}, "role": {"guest": [guest_party_id], "host": [host_party_id]}, }, service_conf={"host": host_proxy_ip, "port": 9370}) s.as_global() partys = s.parties.all_parties # [Party(role=guest, party_id=10000), Party(role=host, party_id=10001)] # on host side(assuming PartyId is partys[1]): data = np.array([[3, 2, 1], [6, 5, 4]]) with SPDZ() as spdz: y = FixedPointTensor.from_source("y", data) x = FixedPointTensor.from_source("x", partys[0]) z = (x + y).get() t = (x - y).get() print(z) print(t)
1,863
36.28
98
py
FATE
FATE-master/python/federatedml/secureprotol/spdz/test/test_fix_point.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import random import unittest import uuid from concurrent.futures import ProcessPoolExecutor, as_completed import numpy as np from federatedml.secureprotol.spdz import SPDZ from federatedml.secureprotol.spdz.tensor.fixedpoint_numpy import FixedPointTensor from federatedml.transfer_variable.transfer_class.secret_share_transfer_variable import SecretShareTransferVariable NUM_HOSTS = 1 EPS = 0.001 def session_init(job_id, idx): from fate_arch import session role = "guest" if idx < 1 else "host" party_id = 9999 + idx if idx < 1 else 10000 + (idx - 1) role_parties = { "host": [ 10000 + i for i in range(NUM_HOSTS) ], "guest": [ 9999 + i for i in range(1) ] } sess = session.init(job_id) sess.init_federation(job_id, dict(local=dict(role=role, party_id=party_id), role=role_parties)) return sess.parties.local_party(), sess.parties.all_parties() def submit(func, *args, **kwargs): with ProcessPoolExecutor() as pool: num = NUM_HOSTS + 1 result = [None] * num futures = {} for _idx in range(num): kv = kwargs.copy() kv["idx"] = _idx futures[pool.submit(func, *args, **kv)] = _idx for future in as_completed(futures): result[futures[future]] = future.result() return result def create_and_get(job_id, idx, data): _, all_parties = session_init(job_id, idx) with SPDZ(): if idx == 0: x = FixedPointTensor.from_source("x", data) else: x = FixedPointTensor.from_source("x", all_parties[0]) return x.get() def add_and_sub(job_id, idx, data_list): _, all_parties = session_init(job_id, idx) with SPDZ(): if idx == 0: x = FixedPointTensor.from_source("x", data_list[0]) y = FixedPointTensor.from_source("y", all_parties[1]) else: x = FixedPointTensor.from_source("x", all_parties[0]) y = FixedPointTensor.from_source("y", data_list[1]) a = (x + y).get() b = (x - y).get() return a, b def add_and_sub_plaintext(job_id, idx, data_list): _, all_parties = session_init(job_id, idx) with SPDZ(): if idx == 0: x = FixedPointTensor.from_source("x", data_list[0]) else: x = FixedPointTensor.from_source("x", all_parties[0]) y = data_list[1] a = (x + y).get() a1 = (y + x).get() b = (x - y).get() b1 = (y - x).get() return a, a1, b, b1 def mul_plaintext(job_id, idx, data_list): _, all_parties = session_init(job_id, idx) with SPDZ(): if idx == 0: x = FixedPointTensor.from_source("x", data_list[0]) else: x = FixedPointTensor.from_source("x", all_parties[0]) y = data_list[1] return (x * y).get(), (y * x).get() def mat_mul(job_id, idx, data_list): _, all_parties = session_init(job_id, idx) with SPDZ(): if idx == 0: x = FixedPointTensor.from_source("x", data_list[0]) y = FixedPointTensor.from_source("y", all_parties[1]) else: x = FixedPointTensor.from_source("x", all_parties[0]) y = FixedPointTensor.from_source("y", data_list[1]) return (x @ y).get() def einsum(job_id, idx, einsum_expr, data_list): _, all_parties = session_init(job_id, idx) with SPDZ(): if idx == 0: x = FixedPointTensor.from_source("x", data_list[0]) y = FixedPointTensor.from_source("y", all_parties[1]) else: x = FixedPointTensor.from_source("x", all_parties[0]) y = FixedPointTensor.from_source("y", data_list[1]) return x.einsum(y, einsum_expr).get() class TestSyncBase(unittest.TestCase): def setUp(self) -> None: self.transfer_variable = SecretShareTransferVariable() self.job_id = str(uuid.uuid1()) self.transfer_variable.set_flowid(self.job_id) def test_create_and_get(self): data = np.random.rand(10, 15) rec = submit(create_and_get, self.job_id, data=data) for x in rec: self.assertAlmostEqual(np.linalg.norm(x - data), 0, delta=EPS) def test_add_and_sub(self): x = np.random.rand(10, 15) y = np.random.rand(10, 15) data_list = [x, y] rec = submit(add_and_sub, self.job_id, data_list=data_list) for a, b in rec: self.assertAlmostEqual(np.linalg.norm((x + y) - a), 0, delta=2 * EPS) self.assertAlmostEqual(np.linalg.norm((x - y) - b), 0, delta=2 * EPS) def test_add_and_sub_plaintext(self): # x = np.random.rand(10, 15) # y = np.random.rand(10, 15) x = np.array([1, 2, 3, 4]) y = np.array([5, 6, 7, 8]) data_list = [x, y] rec = submit(add_and_sub_plaintext, self.job_id, data_list=data_list) for a, a1, b, b1 in rec: self.assertAlmostEqual(np.linalg.norm((x + y) - a), 0, delta=2 * EPS) self.assertAlmostEqual(np.linalg.norm((x + y) - a1), 0, delta=2 * EPS) self.assertAlmostEquals(np.linalg.norm((x - y) - b), 0, delta=2 * EPS) self.assertAlmostEquals(np.linalg.norm((y - x) - b1), 0, delta=2 * EPS) def test_mul_plaintext(self): x = np.random.rand(10, 15) y = random.randint(1, 10000) data_list = [x, y] rec = submit(mul_plaintext, self.job_id, data_list=data_list) for a, b in rec: self.assertAlmostEqual(np.linalg.norm((x * y) - a), 0, delta=y * EPS) self.assertAlmostEqual(np.linalg.norm((x * y) - b), 0, delta=y * EPS) def test_matmul(self): j_dim = 15 x = np.random.rand(10, j_dim) y = np.random.rand(j_dim, 20) data_list = [x, y] rec = submit(mat_mul, self.job_id, data_list=data_list) for a in rec: self.assertAlmostEqual(np.linalg.norm((x @ y) - a), 0, delta=j_dim * EPS) def test_einsum(self): j_dim = 5 k_dim = 4 x = np.random.rand(10, j_dim, k_dim) y = np.random.rand(k_dim, j_dim, 20) einsum_expr = "ijk,kjl->il" data_list = [x, y] rec = submit(einsum, self.job_id, einsum_expr=einsum_expr, data_list=data_list) for a in rec: self.assertAlmostEqual(np.linalg.norm(np.einsum(einsum_expr, x, y) - a), 0, delta=j_dim * k_dim * EPS)
7,690
34.606481
115
py
FATE
FATE-master/python/federatedml/secureprotol/spdz/test/__init__.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #
616
37.5625
75
py
FATE
FATE-master/python/federatedml/secureprotol/spdz/test/guest.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import numpy as np from federatedml.secureprotol.spdz.tensor.fixedpoint_numpy import FixedPointTensor from federatedml.secureprotol.spdz import SPDZ from fate_arch.session import Session s = Session() # on guest side guest_party_id = 10000 host_party_id = 10001 guest_proxy_ip = "192.168.0.2" # Generally, it is your current machine IP federation_id = "spdz_demo" # choose a common federation id (this should be same in both site) session_id = "_".join([federation_id, "guest", str(guest_party_id)]) s.init_computing(session_id) s.init_federation(federation_id, runtime_conf={ "local": {"role": "guest", "party_id": guest_party_id}, "role": {"guest": [guest_party_id], "host": [host_party_id]}, }, service_conf={"host": guest_proxy_ip, "port": 9370}) s.as_global() partys = s.parties.all_parties # [Party(role=guest, party_id=10000), Party(role=host, party_id=10001)] # on guest side(assuming local Party is partys[0]): data = np.array([[1, 2, 3], [4, 5, 6]]) with SPDZ() as spdz: x = FixedPointTensor.from_source("x", data) y = FixedPointTensor.from_source("y", partys[1]) z = (x + y).get() t = (x - y).get() print(z) print(t)
1,875
36.52
98
py
FATE
FATE-master/python/federatedml/secureprotol/spdz/tensor/base.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import abc from federatedml.secureprotol.spdz.utils import NamingService class TensorBase(object): __array_ufunc__ = None def __init__(self, q_field, tensor_name: str = None): self.q_field = q_field self.tensor_name = NamingService.get_instance().next() if tensor_name is None else tensor_name @classmethod def get_spdz(cls): from federatedml.secureprotol.spdz import SPDZ return SPDZ.get_instance() @abc.abstractmethod def dot(self, other, target_name=None): pass
1,151
31
102
py
FATE
FATE-master/python/federatedml/secureprotol/spdz/tensor/fixedpoint_table.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import operator from collections import Iterable import functools import numpy as np from fate_arch.common import Party from fate_arch.session import is_table from federatedml.secureprotol.spdz.beaver_triples import beaver_triplets from federatedml.secureprotol.spdz.tensor import fixedpoint_numpy from federatedml.secureprotol.spdz.tensor.base import TensorBase from federatedml.secureprotol.spdz.utils import NamingService from federatedml.secureprotol.spdz.utils import urand_tensor # from federatedml.secureprotol.spdz.tensor.fixedpoint_endec import FixedPointEndec from federatedml.secureprotol.fixedpoint import FixedPointEndec def _table_binary_op(x, y, op): return x.join(y, lambda a, b: op(a, b)) def _table_binary_mod_op(x, y, q_field, op): return x.join(y, lambda a, b: op(a, b) % q_field) def _table_scalar_op(x, d, op): return x.mapValues(lambda a: op(a, d)) def _table_scalar_mod_op(x, d, q_field, op): return x.mapValues(lambda a: op(a, d) % q_field) def _table_dot_mod_func(it, q_field): ret = None for _, (x, y) in it: if ret is None: ret = np.tensordot(x, y, [[], []]) % q_field else: ret = (ret + np.tensordot(x, y, [[], []])) % q_field return ret def _table_dot_func(it): ret = None for _, (x, y) in it: if ret is None: ret = np.tensordot(x, y, [[], []]) else: ret += np.tensordot(x, y, [[], []]) return ret def table_dot(a_table, b_table): return a_table.join(b_table, lambda x, y: [x, y]) \ .applyPartitions(lambda it: _table_dot_func(it)) \ .reduce(lambda x, y: x + y) def table_dot_mod(a_table, b_table, q_field): return a_table.join(b_table, lambda x, y: [x, y]) \ .applyPartitions(lambda it: _table_dot_mod_func(it, q_field)) \ .reduce(lambda x, y: x if y is None else y if x is None else x + y) class FixedPointTensor(TensorBase): """ a table based tensor """ __array_ufunc__ = None def __init__(self, value, q_field, endec, tensor_name: str = None): super().__init__(q_field, tensor_name) self.value = value self.endec = endec self.tensor_name = NamingService.get_instance().next() if tensor_name is None else tensor_name def dot(self, other: 'FixedPointTensor', target_name=None): spdz = self.get_spdz() if target_name is None: target_name = NamingService.get_instance().next() a, b, c = beaver_triplets(a_tensor=self.value, b_tensor=other.value, dot=table_dot, q_field=self.q_field, he_key_pair=(spdz.public_key, spdz.private_key), communicator=spdz.communicator, name=target_name) x_add_a = (self + a).rescontruct(f"{target_name}_confuse_x") y_add_b = (other + b).rescontruct(f"{target_name}_confuse_y") cross = c - table_dot_mod(a, y_add_b, self.q_field) - table_dot_mod(x_add_a, b, self.q_field) if spdz.party_idx == 0: cross += table_dot_mod(x_add_a, y_add_b, self.q_field) cross = cross % self.q_field cross = self.endec.truncate(cross, self.get_spdz().party_idx) share = fixedpoint_numpy.FixedPointTensor(cross, self.q_field, self.endec, target_name) return share def dot_local(self, other, target_name=None): def _vec_dot(x, y, party_idx, q_field, endec): ret = np.dot(x, y) % q_field ret = endec.truncate(ret, party_idx) if not isinstance(ret, np.ndarray): ret = np.array([ret]) return ret if isinstance(other, FixedPointTensor) or isinstance(other, fixedpoint_numpy.FixedPointTensor): other = other.value if isinstance(other, np.ndarray): party_idx = self.get_spdz().party_idx f = functools.partial(_vec_dot, y=other, party_idx=party_idx, q_field=self.q_field, endec=self.endec) ret = self.value.mapValues(f) return self._boxed(ret, target_name) elif is_table(other): ret = table_dot_mod(self.value, other, self.q_field).reshape((1, -1))[0] ret = self.endec.truncate(ret, self.get_spdz().party_idx) return fixedpoint_numpy.FixedPointTensor(ret, self.q_field, self.endec, target_name) else: raise ValueError(f"type={type(other)}") def reduce(self, func, **kwargs): ret = self.value.reduce(func) return fixedpoint_numpy.FixedPointTensor(ret, self.q_field, self.endec ) @property def shape(self): return self.value.count(), len(self.value.first()[1]) @classmethod def from_source(cls, tensor_name, source, **kwargs): spdz = cls.get_spdz() q_field = kwargs['q_field'] if 'q_field' in kwargs else spdz.q_field if 'encoder' in kwargs: encoder = kwargs['encoder'] else: base = kwargs['base'] if 'base' in kwargs else 10 frac = kwargs['frac'] if 'frac' in kwargs else 4 encoder = FixedPointEndec(n=q_field, field=q_field, base=base, precision_fractional=frac) if is_table(source): source = encoder.encode(source) _pre = urand_tensor(q_field, source, use_mix=spdz.use_mix_rand) spdz.communicator.remote_share(share=_pre, tensor_name=tensor_name, party=spdz.other_parties[0]) for _party in spdz.other_parties[1:]: r = urand_tensor(q_field, source, use_mix=spdz.use_mix_rand) spdz.communicator.remote_share(share=_table_binary_mod_op(r, _pre, q_field, operator.sub), tensor_name=tensor_name, party=_party) _pre = r share = _table_binary_mod_op(source, _pre, q_field, operator.sub) elif isinstance(source, Party): share = spdz.communicator.get_share(tensor_name=tensor_name, party=source)[0] else: raise ValueError(f"type={type(source)}") return FixedPointTensor(share, q_field, encoder, tensor_name) def get(self, tensor_name=None, broadcast=True): return self.endec.decode(self.rescontruct(tensor_name, broadcast)) def rescontruct(self, tensor_name=None, broadcast=True): from federatedml.secureprotol.spdz import SPDZ spdz = SPDZ.get_instance() share_val = self.value.copy() name = tensor_name or self.tensor_name if name is None: raise ValueError("name not specified") # remote share to other parties if broadcast: spdz.communicator.broadcast_rescontruct_share(share_val, name) # get shares from other parties for other_share in spdz.communicator.get_rescontruct_shares(name): share_val = _table_binary_mod_op(share_val, other_share, self.q_field, operator.add) return share_val def broadcast_reconstruct_share(self, tensor_name=None): from federatedml.secureprotol.spdz import SPDZ spdz = SPDZ.get_instance() share_val = self.value.copy() name = tensor_name or self.tensor_name if name is None: raise ValueError("name not specified") # remote share to other parties spdz.communicator.broadcast_rescontruct_share(share_val, name) return share_val def __str__(self): return f"tensor_name={self.tensor_name}, value={self.value}" def __repr__(self): return self.__str__() def as_name(self, tensor_name): return self._boxed(value=self.value, tensor_name=tensor_name) def __add__(self, other): if isinstance(other, PaillierFixedPointTensor): z_value = _table_binary_op(self.value, other.value, operator.add) return PaillierFixedPointTensor(z_value) elif isinstance(other, FixedPointTensor): z_value = _table_binary_mod_op(self.value, other.value, self.q_field, operator.add) elif is_table(other): z_value = _table_binary_mod_op(self.value, other, self.q_field, operator.add) else: z_value = _table_scalar_mod_op(self.value, other, self.q_field, operator.add) return self._boxed(z_value) def __radd__(self, other): return self.__add__(other) def __sub__(self, other): if isinstance(other, PaillierFixedPointTensor): z_value = _table_binary_op(self.value, other.value, operator.sub) return PaillierFixedPointTensor(z_value) elif isinstance(other, FixedPointTensor): z_value = _table_binary_mod_op(self.value, other.value, self.q_field, operator.sub) elif is_table(other): z_value = _table_binary_mod_op(self.value, other, self.q_field, operator.sub) else: z_value = _table_scalar_mod_op(self.value, other, self.q_field, operator.sub) return self._boxed(z_value) def __rsub__(self, other): if isinstance(other, (PaillierFixedPointTensor, FixedPointTensor)): return other - self elif is_table(other): z_value = _table_binary_mod_op(other, self.value, self.q_field, operator.sub) else: z_value = _table_scalar_mod_op(self.value, other, self.q_field, -1 * operator.sub) return self._boxed(z_value) def __mul__(self, other): if isinstance(other, FixedPointTensor): z_value = _table_binary_mod_op(self.value, other.value, self.q_field, operator.mul) elif isinstance(other, PaillierFixedPointTensor): z_value = _table_binary_op(self.value, other.value, operator.mul) else: z_value = _table_scalar_mod_op(self.value, other, self.q_field, operator.mul) z_value = self.endec.truncate(z_value, self.get_spdz().party_idx) return self._boxed(z_value) def __rmul__(self, other): return self.__mul__(other) def __mod__(self, other): if not isinstance(other, (int, np.integer)): raise NotImplementedError("__mod__ support integer only") return self._boxed(_table_scalar_op(self.value, other, operator.mod)) def _boxed(self, value, tensor_name=None): return FixedPointTensor(value=value, q_field=self.q_field, endec=self.endec, tensor_name=tensor_name) class PaillierFixedPointTensor(TensorBase): __array_ufunc__ = None def __init__(self, value, tensor_name: str = None, cipher=None): super().__init__(q_field=None, tensor_name=tensor_name) self.value = value self.cipher = cipher def dot(self, other, target_name=None): def _vec_dot(x, y): ret = np.dot(x, y) if not isinstance(ret, np.ndarray): ret = np.array([ret]) return ret if isinstance(other, (FixedPointTensor, fixedpoint_numpy.FixedPointTensor)): other = other.value if isinstance(other, np.ndarray): ret = self.value.mapValues(lambda x: _vec_dot(x, other)) return self._boxed(ret, target_name) elif is_table(other): ret = table_dot(self.value, other).reshape((1, -1))[0] return fixedpoint_numpy.PaillierFixedPointTensor(ret, target_name) else: raise ValueError(f"type={type(other)}") def reduce(self, func, **kwargs): ret = self.value.reduce(func) return fixedpoint_numpy.PaillierFixedPointTensor(ret) def __str__(self): return f"tensor_name={self.tensor_name}, value={self.value}" def __repr__(self): return self.__str__() def __add__(self, other): if isinstance(other, (PaillierFixedPointTensor, FixedPointTensor)): return self._boxed(_table_binary_op(self.value, other.value, operator.add)) elif is_table(other): return self._boxed(_table_binary_op(self.value, other, operator.add)) else: return self._boxed(_table_scalar_op(self.value, other, operator.add)) def __radd__(self, other): return self.__add__(other) def __sub__(self, other): if isinstance(other, (PaillierFixedPointTensor, FixedPointTensor)): return self._boxed(_table_binary_op(self.value, other.value, operator.sub)) elif is_table(other): return self._boxed(_table_binary_op(self.value, other, operator.sub)) else: return self._boxed(_table_scalar_op(self.value, other, operator.sub)) def __rsub__(self, other): if isinstance(other, (PaillierFixedPointTensor, FixedPointTensor)): return self._boxed(_table_binary_op(other.value, self.value, operator.sub)) elif is_table(other): return self._boxed(_table_binary_op(other, self.value, operator.sub)) else: return self._boxed(_table_scalar_op(self.value, other, -1 * operator.sub)) def __mul__(self, other): if isinstance(other, FixedPointTensor): z_value = _table_binary_op(self.value, other.value, operator.mul) elif is_table(other): z_value = _table_binary_op(self.value, other, operator.mul) else: z_value = _table_scalar_op(self.value, other, operator.mul) return self._boxed(z_value) def __rmul__(self, other): return self.__mul__(other) def _boxed(self, value, tensor_name=None): return PaillierFixedPointTensor(value=value, tensor_name=tensor_name) @classmethod def from_source(cls, tensor_name, source, **kwargs): spdz = cls.get_spdz() q_field = kwargs['q_field'] if 'q_field' in kwargs else spdz.q_field if 'encoder' in kwargs: encoder = kwargs['encoder'] else: base = kwargs['base'] if 'base' in kwargs else 10 frac = kwargs['frac'] if 'frac' in kwargs else 4 encoder = FixedPointEndec(n=q_field, field=q_field, base=base, precision_fractional=frac) if is_table(source): _pre = urand_tensor(q_field, source, use_mix=spdz.use_mix_rand) share = _pre spdz.communicator.remote_share(share=_table_binary_op(source, encoder.decode(_pre), operator.sub), tensor_name=tensor_name, party=spdz.other_parties[-1]) return FixedPointTensor(value=share, q_field=q_field, endec=encoder, tensor_name=tensor_name) elif isinstance(source, Party): share = spdz.communicator.get_share(tensor_name=tensor_name, party=source)[0] is_cipher_source = kwargs['is_cipher_source'] if 'is_cipher_source' in kwargs else True if is_cipher_source: cipher = kwargs.get("cipher") if cipher is None: raise ValueError("Cipher is not provided") share = cipher.distribute_decrypt(share) share = encoder.encode(share) return FixedPointTensor(value=share, q_field=q_field, endec=encoder, tensor_name=tensor_name) else: raise ValueError(f"type={type(source)}")
16,340
39.954887
110
py
FATE
FATE-master/python/federatedml/secureprotol/spdz/tensor/__init__.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #
616
37.5625
75
py
FATE
FATE-master/python/federatedml/secureprotol/spdz/tensor/fixedpoint_numpy.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import functools import numpy as np from fate_arch.common import Party from fate_arch.computing import is_table from federatedml.secureprotol.spdz.beaver_triples import beaver_triplets from federatedml.secureprotol.spdz.tensor import fixedpoint_table from federatedml.secureprotol.spdz.tensor.base import TensorBase from federatedml.secureprotol.spdz.utils import urand_tensor # from federatedml.secureprotol.spdz.tensor.fixedpoint_endec import FixedPointEndec from federatedml.secureprotol.fixedpoint import FixedPointEndec from federatedml.util import LOGGER class FixedPointTensor(TensorBase): __array_ufunc__ = None def __init__(self, value, q_field, endec, tensor_name: str = None): super().__init__(q_field, tensor_name) self.endec = endec self.value = value @property def shape(self): return self.value.shape def reshape(self, shape): return self._boxed(self.value.reshape(shape)) def dot(self, other, target_name=None): return self.einsum(other, "ij,ik->jk", target_name) def dot_local(self, other, target_name=None): if isinstance(other, FixedPointTensor): other = other.value ret = np.dot(self.value, other) % self.q_field ret = self.endec.truncate(ret, self.get_spdz().party_idx) if not isinstance(ret, np.ndarray): ret = np.array([ret]) return self._boxed(ret, target_name) def sub_matrix(self, tensor_name: str, row_indices=None, col_indices=None, rm_row_indices=None, rm_col_indices=None): if row_indices is not None: x_indices = list(row_indices) elif row_indices is None and rm_row_indices is not None: x_indices = [i for i in range(self.value.shape[0]) if i not in rm_row_indices] else: raise RuntimeError(f"invalid argument") if col_indices is not None: y_indices = list(col_indices) elif row_indices is None and rm_col_indices is not None: y_indices = [i for i in range(self.value.shape[0]) if i not in rm_col_indices] else: raise RuntimeError(f"invalid argument") value = self.value[x_indices, :][:, y_indices] return FixedPointTensor(value=value, q_field=self.q_field, endec=self.endec, tensor_name=tensor_name) @classmethod def from_source(cls, tensor_name, source, **kwargs): spdz = cls.get_spdz() q_field = kwargs['q_field'] if 'q_field' in kwargs else spdz.q_field if 'encoder' in kwargs: encoder = kwargs['encoder'] else: base = kwargs['base'] if 'base' in kwargs else 10 frac = kwargs['frac'] if 'frac' in kwargs else 4 encoder = FixedPointEndec(n=q_field, field=q_field, base=base, precision_fractional=frac) if isinstance(source, np.ndarray): source = encoder.encode(source) _pre = urand_tensor(q_field, source) spdz.communicator.remote_share(share=_pre, tensor_name=tensor_name, party=spdz.other_parties[0]) for _party in spdz.other_parties[1:]: r = urand_tensor(q_field, source) spdz.communicator.remote_share(share=(r - _pre) % q_field, tensor_name=tensor_name, party=_party) _pre = r share = (source - _pre) % q_field elif isinstance(source, Party): share = spdz.communicator.get_share(tensor_name=tensor_name, party=source)[0] else: raise ValueError(f"type={type(source)}") return FixedPointTensor(share, q_field, encoder, tensor_name) def einsum(self, other: 'FixedPointTensor', einsum_expr, target_name=None): spdz = self.get_spdz() target_name = target_name or spdz.name_service.next() def _dot_func(_x, _y): ret = np.dot(_x, _y) if not isinstance(ret, np.ndarray): ret = np.array([ret]) return ret # return np.einsum(einsum_expr, _x, _y, optimize=True) a, b, c = beaver_triplets(a_tensor=self.value, b_tensor=other.value, dot=_dot_func, q_field=self.q_field, he_key_pair=(spdz.public_key, spdz.private_key), communicator=spdz.communicator, name=target_name) x_add_a = self._raw_add(a).reconstruct(f"{target_name}_confuse_x") y_add_b = other._raw_add(b).reconstruct(f"{target_name}_confuse_y") cross = c - _dot_func(a, y_add_b) - _dot_func(x_add_a, b) if spdz.party_idx == 0: cross += _dot_func(x_add_a, y_add_b) cross = cross % self.q_field cross = self.endec.truncate(cross, self.get_spdz().party_idx) share = self._boxed(cross, tensor_name=target_name) return share def get(self, tensor_name=None, broadcast=True): return self.endec.decode(self.reconstruct(tensor_name, broadcast)) def reconstruct(self, tensor_name=None, broadcast=True): from federatedml.secureprotol.spdz import SPDZ spdz = SPDZ.get_instance() share_val = self.value.copy() name = tensor_name or self.tensor_name if name is None: raise ValueError("name not specified") # remote share to other parties if broadcast: spdz.communicator.broadcast_rescontruct_share(share_val, name) # get shares from other parties for other_share in spdz.communicator.get_rescontruct_shares(name): # LOGGER.debug(f"share_val: {share_val}, other_share: {other_share}") share_val += other_share try: share_val %= self.q_field return share_val except BaseException: return share_val def transpose(self): value = self.value.transpose() return self._boxed(value) def broadcast_reconstruct_share(self, tensor_name=None): from federatedml.secureprotol.spdz import SPDZ spdz = SPDZ.get_instance() share_val = self.value.copy() name = tensor_name or self.tensor_name if name is None: raise ValueError("name not specified") # remote share to other parties spdz.communicator.broadcast_rescontruct_share(share_val, name) return share_val def _boxed(self, value, tensor_name=None): return FixedPointTensor(value=value, q_field=self.q_field, endec=self.endec, tensor_name=tensor_name) def __str__(self): return f"tensor_name={self.tensor_name}, value={self.value}" def __repr__(self): return self.__str__() def as_name(self, tensor_name): return self._boxed(value=self.value, tensor_name=tensor_name) def _raw_add(self, other): z_value = (self.value + other) % self.q_field return self._boxed(z_value) def _raw_sub(self, other): z_value = (self.value - other) % self.q_field return self._boxed(z_value) def __add__(self, other): if isinstance(other, PaillierFixedPointTensor): z_value = (self.value + other) return PaillierFixedPointTensor(z_value) elif isinstance(other, FixedPointTensor): return self._raw_add(other.value) z_value = (self.value + other) % self.q_field return self._boxed(z_value) def __radd__(self, other): return self.__add__(other) def __sub__(self, other): if isinstance(other, PaillierFixedPointTensor): z_value = (self.value - other) return PaillierFixedPointTensor(z_value) elif isinstance(other, FixedPointTensor): return self._raw_sub(other.value) z_value = (self.value - other) % self.q_field return self._boxed(z_value) def __rsub__(self, other): if isinstance(other, (PaillierFixedPointTensor, FixedPointTensor)): return other - self z_value = (other - self.value) % self.q_field return self._boxed(z_value) def __mul__(self, other): if isinstance(other, PaillierFixedPointTensor): z_value = self.value * other.value return PaillierFixedPointTensor(z_value) if isinstance(other, FixedPointTensor): other = other.value z_value = self.value * other z_value = z_value % self.q_field z_value = self.endec.truncate(z_value, self.get_spdz().party_idx) return self._boxed(z_value) def __rmul__(self, other): return self.__mul__(other) def __matmul__(self, other): return self.einsum(other, "ij,jk->ik") class PaillierFixedPointTensor(TensorBase): __array_ufunc__ = None def __init__(self, value, tensor_name: str = None, cipher=None): super().__init__(q_field=None, tensor_name=tensor_name) self.value = value self.cipher = cipher def dot(self, other, target_name=None): def _vec_dot(x, y): ret = np.dot(x, y) if not isinstance(ret, np.ndarray): ret = np.array([ret]) return ret if isinstance(other, (FixedPointTensor, fixedpoint_table.FixedPointTensor)): other = other.value if isinstance(other, np.ndarray): ret = _vec_dot(self.value, other) return self._boxed(ret, target_name) elif is_table(other): f = functools.partial(_vec_dot, self.value) ret = other.mapValues(f) return fixedpoint_table.PaillierFixedPointTensor(value=ret, tensor_name=target_name, cipher=self.cipher) else: raise ValueError(f"type={type(other)}") def broadcast_reconstruct_share(self, tensor_name=None): from federatedml.secureprotol.spdz import SPDZ spdz = SPDZ.get_instance() share_val = self.value.copy() name = tensor_name or self.tensor_name if name is None: raise ValueError("name not specified") # remote share to other parties spdz.communicator.broadcast_rescontruct_share(share_val, name) return share_val def __str__(self): return f"tensor_name={self.tensor_name}, value={self.value}" def __repr__(self): return self.__str__() def _raw_add(self, other): z_value = (self.value + other) return self._boxed(z_value) def _raw_sub(self, other): z_value = (self.value - other) return self._boxed(z_value) def __add__(self, other): if isinstance(other, (PaillierFixedPointTensor, FixedPointTensor)): return self._raw_add(other.value) else: return self._raw_add(other) def __radd__(self, other): return self.__add__(other) def __sub__(self, other): if isinstance(other, (PaillierFixedPointTensor, FixedPointTensor)): return self._raw_sub(other.value) else: return self._raw_sub(other) def __rsub__(self, other): if isinstance(other, (PaillierFixedPointTensor, FixedPointTensor)): z_value = other.value - self.value else: z_value = other - self.value return self._boxed(z_value) def __mul__(self, other): if isinstance(other, PaillierFixedPointTensor): raise NotImplementedError("__mul__ not support PaillierFixedPointTensor") elif isinstance(other, FixedPointTensor): return self._boxed(self.value * other.value) else: return self._boxed(self.value * other) def __rmul__(self, other): self.__mul__(other) def _boxed(self, value, tensor_name=None): return PaillierFixedPointTensor(value=value, tensor_name=tensor_name, cipher=self.cipher) @classmethod def from_source(cls, tensor_name, source, **kwargs): spdz = cls.get_spdz() q_field = kwargs['q_field'] if 'q_field' in kwargs else spdz.q_field if 'encoder' in kwargs: encoder = kwargs['encoder'] else: base = kwargs['base'] if 'base' in kwargs else 10 frac = kwargs['frac'] if 'frac' in kwargs else 4 encoder = FixedPointEndec(n=q_field, field=q_field, base=base, precision_fractional=frac) if isinstance(source, np.ndarray): _pre = urand_tensor(q_field, source) share = _pre spdz.communicator.remote_share(share=source - encoder.decode(_pre), tensor_name=tensor_name, party=spdz.other_parties[-1]) return FixedPointTensor(value=share, q_field=q_field, endec=encoder, tensor_name=tensor_name) elif isinstance(source, Party): share = spdz.communicator.get_share(tensor_name=tensor_name, party=source)[0] is_cipher_source = kwargs['is_cipher_source'] if 'is_cipher_source' in kwargs else True if is_cipher_source: cipher = kwargs['cipher'] share = cipher.recursive_decrypt(share) share = encoder.encode(share) return FixedPointTensor(value=share, q_field=q_field, endec=encoder, tensor_name=tensor_name) else: raise ValueError(f"type={type(source)}")
14,360
37.604839
113
py
FATE
FATE-master/python/federatedml/secureprotol/spdz/tensor/fixedpoint_endec.py
# # Copyright 2021 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import functools import numpy as np from fate_arch.session import is_table class FixedPointEndec(object): def __init__(self, field: int, base: int, precision_fractional: int, *args, **kwargs): self.field = field self.base = base self.precision_fractional = precision_fractional def _encode(self, float_tensor: np.ndarray, check_range=True): upscaled = (float_tensor * self.base ** self.precision_fractional).astype(np.int64) if check_range: assert (np.abs(upscaled) < (self.field / 2)).all(), ( f"{float_tensor} cannot be correctly embedded: choose bigger field or a lower precision" ) field_element = upscaled % self.field return field_element def _decode(self, integer_tensor: np.ndarray): value = integer_tensor % self.field gate = value > self.field // 2 neg_nums = (value - self.field) * gate pos_nums = value * (1 - gate) result = (neg_nums + pos_nums) / (self.base ** self.precision_fractional) return result def _truncate(self, integer_tensor, idx=0): if idx == 0: return self.field - (self.field - integer_tensor) // (self.base ** self.precision_fractional) else: return integer_tensor // (self.base ** self.precision_fractional) def encode(self, float_tensor, check_range=True): if isinstance(float_tensor, (float, np.float)): float_tensor = np.array(float_tensor) if isinstance(float_tensor, np.ndarray): return self._encode(float_tensor, check_range) elif is_table(float_tensor): f = functools.partial(self._encode, check_range=check_range) return float_tensor.mapValues(f) else: raise ValueError(f"unsupported type: {type(float_tensor)}") def decode(self, integer_tensor): if isinstance(integer_tensor, (int, np.int16, np.int32, np.int64)): integer_tensor = np.array(integer_tensor) if isinstance(integer_tensor, np.ndarray): return self._decode(integer_tensor) elif is_table(integer_tensor): f = functools.partial(self._decode) return integer_tensor.mapValues(lambda x: f) else: raise ValueError(f"unsupported type: {type(integer_tensor)}") def truncate(self, integer_tensor, idx=0): if isinstance(integer_tensor, (int, np.int16, np.int32, np.int64)): integer_tensor = np.array(integer_tensor) if isinstance(integer_tensor, np.ndarray): return self._truncate(integer_tensor, idx) elif is_table(integer_tensor): f = functools.partial(self._truncate, idx=idx) return integer_tensor.mapValues(f) else: raise ValueError(f"unsupported type: {type(integer_tensor)}")
3,490
40.070588
105
py
FATE
FATE-master/python/federatedml/secureprotol/spdz/utils/random_utils2.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import functools import math import random import sys import numpy as np from fate_arch.session import is_table from federatedml.secureprotol.fixedpoint import FixedPointNumber FLOAT_MANTISSA_BITS = 32 PRECISION = 2 ** FLOAT_MANTISSA_BITS def rand_number_generator(q_field): number = FixedPointNumber(encoding=random.randint(1, PRECISION), exponent=math.floor((FLOAT_MANTISSA_BITS / 2) / FixedPointNumber.LOG2_BASE), n=q_field ) return number def rand_tensor(q_field, tensor): if is_table(tensor): return tensor.mapValues( lambda x: np.array([rand_number_generator(q_field=q_field) for _ in x], dtype=FixedPointNumber) ) if isinstance(tensor, np.ndarray): arr = np.zeros(shape=tensor.shape, dtype=FixedPointNumber) view = arr.view().reshape(-1) for i in range(arr.size): view[i] = rand_number_generator(q_field=q_field) return arr raise NotImplementedError(f"type={type(tensor)}") class _MixRand(object): def __init__(self, q_field, base_size=1000, inc_velocity=0.1, inc_velocity_deceleration=0.01): self._caches = [] self._q_field = q_field # generate base random numbers for _ in range(base_size): rand_num = rand_number_generator(q_field=self._q_field) self._caches.append(rand_num) self._inc_rate = inc_velocity self._inc_velocity_deceleration = inc_velocity_deceleration def _inc(self): rand_num = rand_number_generator(q_field=self._q_field) self._caches.append(rand_num) def __next__(self): if random.random() < self._inc_rate: self._inc() return self._caches[random.randint(0, len(self._caches) - 1)] def __iter__(self): return self def _mix_rand_func(it, q_field): _mix = _MixRand(q_field) result = [] for k, v in it: result.append((k, np.array([next(_mix) for _ in v], dtype=object))) return result def urand_tensor(q_field, tensor, use_mix=False): if is_table(tensor): if use_mix: return tensor.mapPartitions(functools.partial(_mix_rand_func, q_field=q_field), use_previous_behavior=False, preserves_partitioning=True) return tensor.mapValues( lambda x: np.array([rand_number_generator(q_field=q_field) for _ in x], dtype=FixedPointNumber)) if isinstance(tensor, np.ndarray): arr = np.zeros(shape=tensor.shape, dtype=FixedPointNumber) view = arr.view().reshape(-1) for i in range(arr.size): view[i] = rand_number_generator(q_field=q_field) return arr raise NotImplementedError(f"type={type(tensor)}")
3,703
32.981651
98
py
FATE
FATE-master/python/federatedml/secureprotol/spdz/utils/random_utils.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import array import functools import random import numpy as np from fate_arch.session import is_table def rand_tensor(q_field, tensor): if is_table(tensor): return tensor.mapValues( lambda x: np.array([random.randint(1, q_field) for _ in x], dtype=object)) if isinstance(tensor, np.ndarray): arr = np.array([random.randint(1, q_field) for _ in tensor], dtype=object) return arr raise NotImplementedError(f"type={type(tensor)}") class _MixRand(object): def __init__(self, lower, upper, base_size=1000, inc_velocity=0.1, inc_velocity_deceleration=0.01): self._lower = lower if self._lower < 0: raise ValueError(f"lower should great than 0, found {self._lower}") self._upper = upper if self._upper < self._lower: raise ValueError(f"requires upper >= lower, yet upper={upper} and lower={lower}") if self._upper <= 0x40000000: self._caches = array.array('i') else: self._caches = array.array('l') # generate base random numbers for _ in range(base_size): self._caches.append(random.SystemRandom().randint(self._lower, self._upper)) self._inc_rate = inc_velocity self._inc_velocity_deceleration = inc_velocity_deceleration def _inc(self): self._caches.append(random.SystemRandom().randint(self._lower, self._upper)) def __next__(self): if random.random() < self._inc_rate: self._inc() return self._caches[random.randint(0, len(self._caches) - 1)] def __iter__(self): return self def _mix_rand_func(it, q_field): _mix = _MixRand(1, q_field) result = [] for k, v in it: result.append((k, np.array([next(_mix) for _ in v], dtype=object))) return result def urand_tensor(q_field, tensor, use_mix=False): if is_table(tensor): if use_mix: return tensor.mapPartitions(functools.partial(_mix_rand_func, q_field=q_field), use_previous_behavior=False, preserves_partitioning=True) return tensor.mapValues( lambda x: np.array([random.SystemRandom().randint(1, q_field) for _ in x], dtype=object)) if isinstance(tensor, np.ndarray): arr = np.zeros(shape=tensor.shape, dtype=object) view = arr.view().reshape(-1) for i in range(arr.size): view[i] = random.SystemRandom().randint(1, q_field) return arr raise NotImplementedError(f"type={type(tensor)}")
3,211
35.089888
103
py
FATE
FATE-master/python/federatedml/secureprotol/spdz/utils/__init__.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.secureprotol.spdz.utils.naming import NamingService # from federatedml.secureprotol.spdz.utils.random_utils import rand_tensor from federatedml.secureprotol.spdz.utils.random_utils2 import rand_tensor, urand_tensor
848
43.684211
87
py
FATE
FATE-master/python/federatedml/secureprotol/spdz/utils/naming.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import hashlib class NamingService(object): __instance = None @classmethod def get_instance(cls): if cls.__instance is None: raise EnvironmentError("naming service not set") return cls.__instance @classmethod def set_instance(cls, instance): prev = cls.__instance cls.__instance = instance return prev def __init__(self, init_name="ss"): self._name = hashlib.md5(init_name.encode("utf-8")).hexdigest() def next(self): self._name = hashlib.md5(self._name.encode("utf-8")).hexdigest() return self._name
1,227
29.7
75
py
FATE
FATE-master/python/federatedml/secureprotol/oblivious_transfer/base_oblivious_transfer.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # class ObliviousTransfer(object): """ Base OT class """ def __init__(self): pass class ObliviousTransferKey(object): """ A key structure used in OT protocols """ def __init__(self, index, key): """ :param index: natural numbers :param key: """ self.index = index self.key = key
1,035
23.093023
75
py
FATE
FATE-master/python/federatedml/secureprotol/oblivious_transfer/__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #
663
33.947368
75
py
FATE
FATE-master/python/federatedml/secureprotol/oblivious_transfer/hauck_oblivious_transfer/hauck_oblivious_transfer.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import random from federatedml.secureprotol.number_theory.group.twisted_edwards_curve_group import TwistedEdwardsCurveArithmetic from federatedml.secureprotol.oblivious_transfer.base_oblivious_transfer import ObliviousTransfer from federatedml.secureprotol.random_oracle.hash_function.sha256 import Sha256 from federatedml.secureprotol.random_oracle.message_authentication_code.sha256_mac import Sha256MAC from federatedml.transfer_variable.transfer_class.oblivious_transfer_transfer_variable \ import ObliviousTransferTransferVariable class HauckObliviousTransfer(ObliviousTransfer): """ An implementation of the work in Hauck Eduard, and Julian Loss. "Efficient and universally composable protocols for oblivious transfer from the CDH assumption." 2017 Currently supports only 1-N scenarios """ def __init__(self): super(HauckObliviousTransfer, self).__init__() self.tec_arithmetic = TwistedEdwardsCurveArithmetic() self.hash = Sha256() self.mac = None # the MAC's init needs a key self.transfer_variable = ObliviousTransferTransferVariable() def _gen_random_scalar(self): """ Generate a random integer over [0, q - 1], where q is the order of the Galois field used :return: """ return random.randint(0, self.tec_arithmetic.get_field_order() - 1) def _hash_tec_element(self, element): """ Hash a Twisted Edwards Curve element :param element: TwistedEdwardsCurveElement :return: -1 if hash fails, otherwise the correct TwistedEdwardsCurveElement """ element_bytes = self.tec_arithmetic.encode(element) element_digest = self.hash.digest(element_bytes) return self.tec_arithmetic.decode(element_digest) def _init_mac(self, s, r): """ Init the MAC with key = (S, R) :param s, r :return: """ key = self.tec_arithmetic.encode(s) + self.tec_arithmetic.encode(r) self.mac = Sha256MAC(key) def _mac_tec_element(self, element, decode_output=False): """ MAC a Twisted Edwards Curve element If decode_output = True, decode the 256-bit bytes to a TEC element, otherwise output 32byte bytes :param element: TwistedEdwardsCurveElement :return: -1 or the correct TwistedEdwardsCurveElement if decode_output = True, otherwise 32-byte bytes """ element_bytes = self.tec_arithmetic.encode(element) if self.mac is None: raise ValueError("MAC not initialized") element_digest = self.mac.digest(element_bytes) if decode_output: return self.tec_arithmetic.decode(element_digest) else: return element_digest
3,442
39.505882
114
py
FATE
FATE-master/python/federatedml/secureprotol/oblivious_transfer/hauck_oblivious_transfer/hauck_oblivious_transfer_receiver.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.secureprotol.number_theory.group.twisted_edwards_curve_group import TwistedEdwardsCurveElement from federatedml.secureprotol.oblivious_transfer.base_oblivious_transfer import ObliviousTransferKey from federatedml.secureprotol.oblivious_transfer.hauck_oblivious_transfer.hauck_oblivious_transfer import \ HauckObliviousTransfer from federatedml.util import consts, LOGGER class HauckObliviousTransferReceiver(HauckObliviousTransfer): """ Hauck-OT for the receiver (guest) """ def __init__(self): super(HauckObliviousTransferReceiver, self).__init__() def key_derivation(self, target): """ Generate a key the corresponds to target :param target: k int >= 0 in k-N OT :return: ObliviousTransferKey """ LOGGER.info("enter receiver key derivation phase for target = {}".format(target)) # 1. Choose a random scalar from Z^q x = self._gen_random_scalar() # x LOGGER.info("randomly generated scalar x") # 2. Get S = yG from the sender and check its legality attempt_count = 0 while True: s = self.transfer_variable.s.get(idx=0, suffix=(attempt_count,)) # s = federation.get(name=self.transfer_variable.s.name, # tag=self.transfer_variable.generate_transferid(self.transfer_variable.s, attempt_count), # idx=0) LOGGER.info("got from host S = " + s.output()) s_legal, t = self._check_s_t_legal(s) self.transfer_variable.s_legal.remote(s_legal, suffix=(attempt_count,), role=consts.HOST, idx=0) # federation.remote(obj=s_legal, # name=self.transfer_variable.s_legal.name, # tag=self.transfer_variable.generate_transferid(self.transfer_variable.s_legal, # attempt_count), # role=consts.HOST, # idx=0) if s_legal: LOGGER.info("S is legal at {} attempt".format(attempt_count)) break else: LOGGER.info("S is illegal at {} attempt, will retry to get a legal S".format(attempt_count)) attempt_count += 1 # 3. Slack LOGGER.info("S is hashed to get T = " + t.output()) # 4. Compute and send to the sender R = cT + xG, also init the MAC c = target ct = self.tec_arithmetic.mul(scalar=c, a=t) # cT xg = self.tec_arithmetic.mul(scalar=x, a=self.tec_arithmetic.get_generator()) # xG r = self.tec_arithmetic.add(a=ct, b=xg) # R = cT + xG self.transfer_variable.r.remote(r, role=consts.HOST, idx=0) # federation.remote(obj=r, # name=self.transfer_variable.r.name, # tag=self.transfer_variable.generate_transferid(self.transfer_variable.r), # role=consts.HOST, # idx=0) LOGGER.info("sent to host R = " + r.output()) self._init_mac(s, r) # 5. MAC and output the correct key xs = self.tec_arithmetic.mul(scalar=x, a=s) # LOGGER.info("target index = " + str(target)) # LOGGER.info("target key before MAC = " + xs.output()) target_key = self._mac_tec_element(xs) # LOGGER.info("target key = {}".format(target_key)) return ObliviousTransferKey(target, target_key) def _check_s_t_legal(self, s): """ Check if s is in the TEC group and t is valid :param s: TwistedEdwardsCurveElement :return: """ t = self._hash_tec_element(s) return self.tec_arithmetic.is_in_group(s) and isinstance(t, TwistedEdwardsCurveElement), t
4,818
43.211009
121
py
FATE
FATE-master/python/federatedml/secureprotol/oblivious_transfer/hauck_oblivious_transfer/__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #
663
33.947368
75
py
FATE
FATE-master/python/federatedml/secureprotol/oblivious_transfer/hauck_oblivious_transfer/hauck_oblivious_transfer_sender.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.secureprotol.oblivious_transfer.base_oblivious_transfer import ObliviousTransferKey from federatedml.secureprotol.oblivious_transfer.hauck_oblivious_transfer.hauck_oblivious_transfer import \ HauckObliviousTransfer from federatedml.util import consts, LOGGER class HauckObliviousTransferSender(HauckObliviousTransfer): """ Hauck-OT for the sender (host) """ def __init__(self): super(HauckObliviousTransferSender, self).__init__() def key_derivation(self, target_num): """ Derive a list of keys for encryption and transmission :param target_num: N in k-N OT :return: List[ObliviousTransferKey] """ LOGGER.info("enter sender key derivation phase for target num = {}".format(target_num)) # 1. Choose a random scalar (y) from Z^q, calculate S and T to verify its legality y, s, t = self._gen_legal_y_s_t() # 2. Send S to the receiver, if it is illegal addressed by the receiver, regenerate y, S, T attempt_count = 0 while True: self.transfer_variable.s.remote(s, suffix=(attempt_count,), role=consts.GUEST, idx=0) # federation.remote(obj=s, # name=self.transfer_variable.s.name, # tag=self.transfer_variable.generate_transferid(self.transfer_variable.s, attempt_count), # role=consts.GUEST, # idx=0) LOGGER.info("sent S to guest for the {}-th time".format(attempt_count)) s_legal = self.transfer_variable.s_legal.get(idx=0, suffix=(attempt_count,)) # s_legal = federation.get(name=self.transfer_variable.s_legal.name, # tag=self.transfer_variable.generate_transferid(self.transfer_variable.s_legal, # attempt_count), # idx=0) if s_legal: LOGGER.info("receiver confirms the legality of S at {} attempt, will proceed".format(attempt_count)) break else: LOGGER.info("receiver rejects this S at {} attempt, will regenerate S".format(attempt_count)) y, s, t = self._gen_legal_y_s_t() attempt_count += 1 # 3. Wait for the receiver to hash S to get T LOGGER.info("waiting for the receiver to hash S to get T") # 4. Get R = cT + xG from the receiver, also init the MAC r = self.transfer_variable.r.get(idx=0) # r = federation.get(name=self.transfer_variable.r.name, # tag=self.transfer_variable.generate_transferid(self.transfer_variable.r), # idx=0) LOGGER.info("got from guest R = " + r.output()) self._init_mac(s, r) # 5. MAC and output the key list key_list = [] yt = self.tec_arithmetic.mul(scalar=y, a=t) # yT yr = self.tec_arithmetic.mul(scalar=y, a=r) # yR for i in range(target_num): iyt = self.tec_arithmetic.mul(scalar=i, a=yt) # iyT diff = self.tec_arithmetic.sub(a=yr, b=iyt) # yR - iyT key = self._mac_tec_element(diff) # LOGGER.info("{}-th key generated".format(i)) # LOGGER.info("key before MAC = " + diff.output()) # LOGGER.info("key = {}".format(key)) key_list.append(ObliviousTransferKey(i, key)) LOGGER.info("all keys successfully generated") return key_list def _gen_legal_y_s_t(self): while True: y = self._gen_random_scalar() s = self.tec_arithmetic.mul(scalar=y, a=self.tec_arithmetic.get_generator()) # S = yG t = self._hash_tec_element(s) if self.tec_arithmetic.is_in_group(s) and not isinstance(t, int): # Both S and T are legal LOGGER.info("randomly generated y, S, T") return y, s, t
4,916
44.527778
120
py
FATE
FATE-master/python/federatedml/secureprotol/symmetric_encryption/py_aes_encryption.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os import numpy as np from federatedml.secureprotol.symmetric_encryption.py_aes_core import AESModeOfOperationOFB from federatedml.secureprotol.symmetric_encryption.symmetric_encryption import SymmetricKey from federatedml.util import conversion class AESKey(SymmetricKey): """ Note that a key cannot used for both encryption and decryption scenarios """ def __init__(self, key, nonce=None): """ :param key: bytes, must be 16, 24 or 32 bytes long :param nonce: bytes, must be 16 bytes long """ super(AESKey, self).__init__() if nonce is None: self.nonce = os.urandom(16) self.key = key self.cipher_core = AESModeOfOperationOFB(key=self.key, iv=self.nonce) else: self.nonce = nonce self.key = key self.cipher_core = AESModeOfOperationOFB(key=self.key, iv=self.nonce) def _renew(self): """ Self renew cipher_core after encryption and decryption :return: """ self.cipher_core = AESModeOfOperationOFB(key=self.key, iv=self.nonce) class AESEncryptKey(AESKey): """ AES encryption scheme Note that the ciphertext size is affected only by that of the plaintext, instead of the key length """ def __init__(self, key): super(AESEncryptKey, self).__init__(key=key) def encrypt(self, plaintext): if isinstance(plaintext, list): return [self.encrypt_single_val(p) for p in plaintext] else: return self.encrypt_single_val(plaintext) def encrypt_single_val(self, plaintext): if not isinstance(plaintext, bytes): plaintext = self._all_to_bytes(plaintext) elif isinstance(plaintext, bytes): pass else: raise TypeError("AES encryptor supports bytes/int/float/str") ciphertext = self.cipher_core.encrypt(plaintext) self._renew() return ciphertext @staticmethod def _all_to_bytes(message): """ Convert an int/float/str to bytes, e.g., 1.65 -> b'1.65', 'hello -> b'hello' :param message: int/float/str :return: -1 if type error, otherwise str """ if isinstance(message, int) or isinstance(message, float): return conversion.str_to_bytes(str(message)) elif isinstance(message, str): return conversion.str_to_bytes(message) else: return -1 def get_nonce(self): return self.nonce class AESDecryptKey(AESKey): """ AES decryption scheme """ def __init__(self, key, nonce): super(AESDecryptKey, self).__init__(key=key, nonce=nonce) def decrypt(self, ciphertext): if isinstance(ciphertext, list): return np.array([self.decrypt_single_val(p) for p in ciphertext]) else: return self.decrypt_single_val(ciphertext) def decrypt_single_val(self, ciphertext): """ :param ciphertext: bytes :return: str """ if not isinstance(ciphertext, bytes): raise TypeError("AES decryptor supports bytes only") plaintext = conversion.bytes_to_str(self.cipher_core.decrypt(ciphertext)) self._renew() return plaintext
3,973
30.792
102
py
FATE
FATE-master/python/federatedml/secureprotol/symmetric_encryption/symmetric_encryption.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # class SymmetricKey(object): """ Symmetric encryption key """ def __init__(self): pass def encrypt(self, plaintext): """ Encryption method :param plaintext: :return: """ pass def decrypt(self, ciphertext): """ Decryption method :param ciphertext: :return: """ pass class SymmetricCiphertext(object): def __init__(self): pass
1,132
23.106383
75
py
FATE
FATE-master/python/federatedml/secureprotol/symmetric_encryption/xor_encryption.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.secureprotol.symmetric_encryption.symmetric_encryption import SymmetricKey, SymmetricCiphertext from federatedml.util import conversion class XorCipherKey(SymmetricKey): """ key = (self.alpha, self.beta), expected to be 256 bits Enc(m) = (m XOR self.alpha, self.beta) Dec(c) = c.message XOR alpha if c.verifier == self.beta, None otherwise Note that the maximum size of the plaintext supported is principally determined by len(key) // 2 """ def __init__(self, key): """ self.alpha and self.beta are str-typed binaries, e.g., '1010' :param key: bytes """ super(XorCipherKey, self).__init__() self.alpha = conversion.bytes_to_bin(key[:(len(key) // 2)]) # binary string self.beta = conversion.bytes_to_bin(key[(len(key) // 2):]) # binary string if len(self.beta) % 8 != 0: raise ValueError("XOR encryption invalid key") self.beta_string = conversion.bin_to_str(self.beta) # unicode-string def encrypt(self, plaintext): """ :param plaintext: int/float/str :return: XorCiphertext """ plaintext_bin = self._all_to_bin(plaintext) if plaintext_bin == -1: raise TypeError('Xor encryption only supports int/float/str plaintext') ciphertext_bin = self._xor(plaintext_bin, self.alpha) ciphertext = self._bin_to_str(ciphertext_bin) return XorCiphertext(ciphertext, self.beta_string[:len(ciphertext)]) def decrypt(self, ciphertext): """ :param ciphertext: XorCiphertext :return: str """ if ciphertext.verifier != self.beta_string[:len(ciphertext.verifier)]: raise ValueError("XOR encryption invalid ciphertext") ciphertext_bin = self._all_to_bin(ciphertext.message) plaintext_bin = self._xor(ciphertext_bin, self.alpha) return self._bin_to_str(plaintext_bin) @staticmethod def _xor(str1, str2): """ Compute the bit-wise XOR result of two binary numbers in string, e.g., 01011010 = _xor('10101010', '11110010') If two string are different in length, XOR starts applying from highest (left-most) bit, and abandons the longer one's mantissa :param str1: str, whose length must be a multiple of 8 :param str2: str, whose length must be a multiple of 8 :return: str, whose length must be a multiple of 8 """ res = '' for i in range(min(len(str1), len(str2))): res += XorCipherKey._xor_bit(str1[i], str2[i]) return res @staticmethod def _xor_bit(char1, char2): """ Compute the XOR result of two bits in string, e.g., '1' = _xor_bit('0', '1') :param char1: str :param char2: str :return: str """ return '0' if char1 == char2 else '1' @staticmethod def _all_to_bin(message): """ Convert an int/float/str to a binary number in string, e.g., 1.65 -> '110001101110110110110101' :param message: int/float/str :return: -1 if type error, otherwise str """ if isinstance(message, int) or isinstance(message, float): return conversion.str_to_bin(str(message)) elif isinstance(message, str): return conversion.str_to_bin(message) else: return -1 @staticmethod def _bin_to_str(message): """ Convert a binary number in string to Unicode string :param message: str, whose length must be a multiple of 8 :return: str """ return conversion.bin_to_str(message) class XorCiphertext(SymmetricCiphertext): """ ciphertext = (self.message, self.verifier) """ def __init__(self, message, verifier): super(XorCiphertext, self).__init__() self.message = message self.verifier = verifier
4,617
35.650794
120
py
FATE
FATE-master/python/federatedml/secureprotol/symmetric_encryption/cryptor_executor.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import functools class CryptoExecutor(object): def __init__(self, cipher_core): self.cipher_core = cipher_core def init(self): self.cipher_core.init() def renew(self, cipher_core): self.cipher_core = cipher_core def map_hash_encrypt(self, plaintable, mode, hash_operator, salt): """ Process the input Table as (k, v) (k, enc_k) for mode == 0 (enc_k, -1) for mode == 1 (enc_k, v) for mode == 2 (k, (enc_k, v)) for mode == 3 (enc_k, k) for mode == 4 (enc_k, (k, v)) for mode == 5 :param plaintable: Table :param mode: int :return: Table """ if mode == 0: return plaintable.map( lambda k, v: ( k, self.cipher_core.encrypt( hash_operator.compute( k, suffix_salt=salt)))) elif mode == 1: return plaintable.map( lambda k, v: ( self.cipher_core.encrypt( hash_operator.compute( k, suffix_salt=salt)), -1)) elif mode == 2: return plaintable.map( lambda k, v: ( self.cipher_core.encrypt( hash_operator.compute( k, suffix_salt=salt)), v)) elif mode == 3: return plaintable.map( lambda k, v: ( k, (self.cipher_core.encrypt( hash_operator.compute( k, suffix_salt=salt)), v))) elif mode == 4: return plaintable.map( lambda k, v: ( self.cipher_core.encrypt( hash_operator.compute( k, suffix_salt=salt)), k)) elif mode == 5: return plaintable.map( lambda k, v: (self.cipher_core.encrypt(hash_operator.compute(k, suffix_salt=salt)), (k, v))) else: raise ValueError("Unsupported mode for crypto_executor map encryption") def map_encrypt(self, plaintable, mode): """ Process the input Table as (k, v) (k, enc_k) for mode == 0 (enc_k, -1) for mode == 1 (enc_k, v) for mode == 2 (k, (enc_k, v)) for mode == 3 (enc_k, k) for mode == 4 (enc_k, (k, v)) for mode == 5 :param plaintable: Table :param mode: int :return: Table """ if mode == 0: return plaintable.map(lambda k, v: (k, self.cipher_core.encrypt(k))) elif mode == 1: return plaintable.map(lambda k, v: (self.cipher_core.encrypt(k), -1)) elif mode == 2: return plaintable.map(lambda k, v: (self.cipher_core.encrypt(k), v)) elif mode == 3: return plaintable.map(lambda k, v: (k, (self.cipher_core.encrypt(k), v))) elif mode == 4: return plaintable.map(lambda k, v: (self.cipher_core.encrypt(k), k)) elif mode == 5: return plaintable.map(lambda k, v: (self.cipher_core.encrypt(k), (k, v))) else: raise ValueError("Unsupported mode for crypto_executor map encryption") def map_values_encrypt(self, plaintable, mode): """ Process the input Table as v enc_v if mode == 0 :param plaintable: Table :param mode: int :return: """ if mode == 0: return plaintable.mapValues(lambda v: self.cipher_core.encrypt(v)) else: raise ValueError("Unsupported mode for crypto_executor map_values encryption") def map_decrypt(self, ciphertable, mode): """ Process the input Table as (k, v) (k, dec_k) for mode == 0 (dec_k, -1) for mode == 1 (dec_k, v) for mode == 2 (k, (dec_k, v)) for mode == 3 :param ciphertable: Table :param mode: int :return: Table """ if mode == 0: return ciphertable.map(lambda k, v: (k, self.cipher_core.decrypt(k))) elif mode == 1: return ciphertable.map(lambda k, v: (self.cipher_core.decrypt(k), -1)) elif mode == 2: return ciphertable.map(lambda k, v: (self.cipher_core.decrypt(k), v)) elif mode == 3: return ciphertable.map(lambda k, v: (k, (self.cipher_core.decrypt(k), v))) elif mode == 4: return ciphertable.map(lambda k, v: (self.cipher_core.decrypt(k), v)) elif mode == 5: return ciphertable.map(lambda k, v: (self.cipher_core.decrypt(k), v)) else: raise ValueError("Unsupported mode for crypto_executor map decryption") def map_values_decrypt(self, ciphertable, mode): """ Process the input Table as v dec_v if mode == 0 decode(dec_v) if mode == 1 :param ciphertable: Table :param mode: int :return: """ if mode == 0: return ciphertable.mapValues(lambda v: self.cipher_core.decrypt(v)) elif mode == 1: f = functools.partial(self.cipher_core.decrypt, decode_output=True) return ciphertable.mapValues(lambda v: f(v)) else: raise ValueError("Unsupported mode for crypto_executor map_values encryption") def get_nonce(self): return self.cipher_core.get_nonce()
6,129
35.058824
108
py
FATE
FATE-master/python/federatedml/secureprotol/symmetric_encryption/__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #
663
33.947368
75
py
FATE
FATE-master/python/federatedml/secureprotol/symmetric_encryption/aes_encryption.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from Cryptodome.Cipher import AES from federatedml.secureprotol.symmetric_encryption.symmetric_encryption import SymmetricKey from federatedml.util import conversion class AESKey(SymmetricKey): """ Note that a key cannot used for both encryption and decryption scenarios """ def __init__(self, key, nonce=None): """ :param key: bytes, must be 16, 24 or 32 bytes long """ super(AESKey, self).__init__() if nonce is None: self.cipher_core = AES.new(key, AES.MODE_EAX) self.nonce = self.cipher_core.nonce # noise, generated by the encryptor, must be provided to the decryptor else: self.cipher_core = AES.new(key, AES.MODE_EAX, nonce=nonce) self.nonce = nonce class AESEncryptKey(AESKey): """ AES encryption scheme Note that the ciphertext size is affected only by that of the plaintext, instead of the key length """ def __init__(self, key): super(AESEncryptKey, self).__init__(key=key) def encrypt(self, plaintext): """ :param plaintext: bytes/int/float/str :return: bytes """ if not isinstance(plaintext, bytes): plaintext = self._all_to_bytes(plaintext) elif isinstance(plaintext, bytes): pass else: raise TypeError("AES encryptor supports bytes/int/float/str") return self.cipher_core.encrypt(plaintext) def get_nonce(self): return self.nonce @staticmethod def _all_to_bytes(message): """ Convert an int/float/str to bytes, e.g., 1.65 -> b'1.65', 'hello -> b'hello' :param message: int/float/str :return: -1 if type error, otherwise str """ if isinstance(message, int) or isinstance(message, float): return conversion.str_to_bytes(str(message)) elif isinstance(message, str): return conversion.str_to_bytes(message) else: return -1 class AESDecryptKey(AESKey): """ AES decryption scheme """ def __init__(self, key, nonce): super(AESDecryptKey, self).__init__(key=key, nonce=nonce) def decrypt(self, ciphertext): """ :param ciphertext: bytes :return: str """ if not isinstance(ciphertext, bytes): raise TypeError("AES decryptor supports bytes only") return conversion.bytes_to_str(self.cipher_core.decrypt(ciphertext))
3,153
29.921569
119
py
FATE
FATE-master/python/federatedml/secureprotol/symmetric_encryption/py_aes_core.py
# The MIT License (MIT) # # Copyright (c) 2014 Richard Moore # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # This is a pure-Python implementation of the AES algorithm and AES common # modes of operation. # See: https://en.wikipedia.org/wiki/Advanced_Encryption_Standard # Honestly, the best description of the modes of operations are the wonderful # diagrams on Wikipedia. They explain in moments what my words could never # achieve. Hence the inline documentation here is sparer than I'd prefer. # See: https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation # Also useful, PyCrypto, a crypto library implemented in C with Python bindings: # https://www.dlitz.net/software/pycrypto/ # Supported key sizes: # 128-bit # 192-bit # 256-bit # Supported modes of operation: # ECB - Electronic Codebook # CBC - Cipher-Block Chaining # CFB - Cipher Feedback # OFB - Output Feedback # CTR - Counter # See the README.md for API details and general information. import copy import struct __all__ = ["AES", "AESModeOfOperationCTR", "AESModeOfOperationCBC", "AESModeOfOperationCFB", "AESModeOfOperationECB", "AESModeOfOperationOFB", "AESModesOfOperation", "Counter"] def _compact_word(word): return (word[0] << 24) | (word[1] << 16) | (word[2] << 8) | word[3] def _string_to_bytes(text): return list(ord(c) for c in text) def _bytes_to_string(binary): return "".join(chr(b) for b in binary) def _concat_list(a, b): return a + b # Python 3 compatibility try: xrange except Exception: xrange = range # Python 3 supports bytes, which is already an array of integers def _string_to_bytes(text): if isinstance(text, bytes): return text return [ord(c) for c in text] # In Python 3, we return bytes def _bytes_to_string(binary): return bytes(binary) # Python 3 cannot concatenate a list onto a bytes, so we bytes-ify it first def _concat_list(a, b): return a + bytes(b) # Based *largely* on the Rijndael implementation # See: http://csrc.nist.gov/publications/fips/fips197/fips-197.pdf class AES(object): '''Encapsulates the AES block cipher. You generally should not need this. Use the AESModeOfOperation classes below instead.''' # Number of rounds by keysize number_of_rounds = {16: 10, 24: 12, 32: 14} # Round constant words rcon = [ 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91 ] # S-box and Inverse S-box (S is for Substitution) S = [ 0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76, 0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0, 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0, 0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc, 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15, 0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a, 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75, 0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84, 0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b, 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf, 0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85, 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8, 0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2, 0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17, 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73, 0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb, 0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c, 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79, 0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9, 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08, 0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6, 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a, 0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e, 0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94, 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf, 0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16 ] Si =[ 0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb, 0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb, 0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e, 0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2, 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25, 0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92, 0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda, 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84, 0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a, 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06, 0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b, 0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea, 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73, 0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e, 0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89, 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b, 0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4, 0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31, 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f, 0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef, 0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0, 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61, 0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d ] # Transformations for encryption T1 = [ 0xc66363a5, 0xf87c7c84, 0xee777799, 0xf67b7b8d, 0xfff2f20d, 0xd66b6bbd, 0xde6f6fb1, 0x91c5c554, 0x60303050, 0x02010103, 0xce6767a9, 0x562b2b7d, 0xe7fefe19, 0xb5d7d762, 0x4dababe6, 0xec76769a, 0x8fcaca45, 0x1f82829d, 0x89c9c940, 0xfa7d7d87, 0xeffafa15, 0xb25959eb, 0x8e4747c9, 0xfbf0f00b, 0x41adadec, 0xb3d4d467, 0x5fa2a2fd, 0x45afafea, 0x239c9cbf, 0x53a4a4f7, 0xe4727296, 0x9bc0c05b, 0x75b7b7c2, 0xe1fdfd1c, 0x3d9393ae, 0x4c26266a, 0x6c36365a, 0x7e3f3f41, 0xf5f7f702, 0x83cccc4f, 0x6834345c, 0x51a5a5f4, 0xd1e5e534, 0xf9f1f108, 0xe2717193, 0xabd8d873, 0x62313153, 0x2a15153f, 0x0804040c, 0x95c7c752, 0x46232365, 0x9dc3c35e, 0x30181828, 0x379696a1, 0x0a05050f, 0x2f9a9ab5, 0x0e070709, 0x24121236, 0x1b80809b, 0xdfe2e23d, 0xcdebeb26, 0x4e272769, 0x7fb2b2cd, 0xea75759f, 0x1209091b, 0x1d83839e, 0x582c2c74, 0x341a1a2e, 0x361b1b2d, 0xdc6e6eb2, 0xb45a5aee, 0x5ba0a0fb, 0xa45252f6, 0x763b3b4d, 0xb7d6d661, 0x7db3b3ce, 0x5229297b, 0xdde3e33e, 0x5e2f2f71, 0x13848497, 0xa65353f5, 0xb9d1d168, 0x00000000, 0xc1eded2c, 0x40202060, 0xe3fcfc1f, 0x79b1b1c8, 0xb65b5bed, 0xd46a6abe, 0x8dcbcb46, 0x67bebed9, 0x7239394b, 0x944a4ade, 0x984c4cd4, 0xb05858e8, 0x85cfcf4a, 0xbbd0d06b, 0xc5efef2a, 0x4faaaae5, 0xedfbfb16, 0x864343c5, 0x9a4d4dd7, 0x66333355, 0x11858594, 0x8a4545cf, 0xe9f9f910, 0x04020206, 0xfe7f7f81, 0xa05050f0, 0x783c3c44, 0x259f9fba, 0x4ba8a8e3, 0xa25151f3, 0x5da3a3fe, 0x804040c0, 0x058f8f8a, 0x3f9292ad, 0x219d9dbc, 0x70383848, 0xf1f5f504, 0x63bcbcdf, 0x77b6b6c1, 0xafdada75, 0x42212163, 0x20101030, 0xe5ffff1a, 0xfdf3f30e, 0xbfd2d26d, 0x81cdcd4c, 0x180c0c14, 0x26131335, 0xc3ecec2f, 0xbe5f5fe1, 0x359797a2, 0x884444cc, 0x2e171739, 0x93c4c457, 0x55a7a7f2, 0xfc7e7e82, 0x7a3d3d47, 0xc86464ac, 0xba5d5de7, 0x3219192b, 0xe6737395, 0xc06060a0, 0x19818198, 0x9e4f4fd1, 0xa3dcdc7f, 0x44222266, 0x542a2a7e, 0x3b9090ab, 0x0b888883, 0x8c4646ca, 0xc7eeee29, 0x6bb8b8d3, 0x2814143c, 0xa7dede79, 0xbc5e5ee2, 0x160b0b1d, 0xaddbdb76, 0xdbe0e03b, 0x64323256, 0x743a3a4e, 0x140a0a1e, 0x924949db, 0x0c06060a, 0x4824246c, 0xb85c5ce4, 0x9fc2c25d, 0xbdd3d36e, 0x43acacef, 0xc46262a6, 0x399191a8, 0x319595a4, 0xd3e4e437, 0xf279798b, 0xd5e7e732, 0x8bc8c843, 0x6e373759, 0xda6d6db7, 0x018d8d8c, 0xb1d5d564, 0x9c4e4ed2, 0x49a9a9e0, 0xd86c6cb4, 0xac5656fa, 0xf3f4f407, 0xcfeaea25, 0xca6565af, 0xf47a7a8e, 0x47aeaee9, 0x10080818, 0x6fbabad5, 0xf0787888, 0x4a25256f, 0x5c2e2e72, 0x381c1c24, 0x57a6a6f1, 0x73b4b4c7, 0x97c6c651, 0xcbe8e823, 0xa1dddd7c, 0xe874749c, 0x3e1f1f21, 0x964b4bdd, 0x61bdbddc, 0x0d8b8b86, 0x0f8a8a85, 0xe0707090, 0x7c3e3e42, 0x71b5b5c4, 0xcc6666aa, 0x904848d8, 0x06030305, 0xf7f6f601, 0x1c0e0e12, 0xc26161a3, 0x6a35355f, 0xae5757f9, 0x69b9b9d0, 0x17868691, 0x99c1c158, 0x3a1d1d27, 0x279e9eb9, 0xd9e1e138, 0xebf8f813, 0x2b9898b3, 0x22111133, 0xd26969bb, 0xa9d9d970, 0x078e8e89, 0x339494a7, 0x2d9b9bb6, 0x3c1e1e22, 0x15878792, 0xc9e9e920, 0x87cece49, 0xaa5555ff, 0x50282878, 0xa5dfdf7a, 0x038c8c8f, 0x59a1a1f8, 0x09898980, 0x1a0d0d17, 0x65bfbfda, 0xd7e6e631, 0x844242c6, 0xd06868b8, 0x824141c3, 0x299999b0, 0x5a2d2d77, 0x1e0f0f11, 0x7bb0b0cb, 0xa85454fc, 0x6dbbbbd6, 0x2c16163a ] T2 = [ 0xa5c66363, 0x84f87c7c, 0x99ee7777, 0x8df67b7b, 0x0dfff2f2, 0xbdd66b6b, 0xb1de6f6f, 0x5491c5c5, 0x50603030, 0x03020101, 0xa9ce6767, 0x7d562b2b, 0x19e7fefe, 0x62b5d7d7, 0xe64dabab, 0x9aec7676, 0x458fcaca, 0x9d1f8282, 0x4089c9c9, 0x87fa7d7d, 0x15effafa, 0xebb25959, 0xc98e4747, 0x0bfbf0f0, 0xec41adad, 0x67b3d4d4, 0xfd5fa2a2, 0xea45afaf, 0xbf239c9c, 0xf753a4a4, 0x96e47272, 0x5b9bc0c0, 0xc275b7b7, 0x1ce1fdfd, 0xae3d9393, 0x6a4c2626, 0x5a6c3636, 0x417e3f3f, 0x02f5f7f7, 0x4f83cccc, 0x5c683434, 0xf451a5a5, 0x34d1e5e5, 0x08f9f1f1, 0x93e27171, 0x73abd8d8, 0x53623131, 0x3f2a1515, 0x0c080404, 0x5295c7c7, 0x65462323, 0x5e9dc3c3, 0x28301818, 0xa1379696, 0x0f0a0505, 0xb52f9a9a, 0x090e0707, 0x36241212, 0x9b1b8080, 0x3ddfe2e2, 0x26cdebeb, 0x694e2727, 0xcd7fb2b2, 0x9fea7575, 0x1b120909, 0x9e1d8383, 0x74582c2c, 0x2e341a1a, 0x2d361b1b, 0xb2dc6e6e, 0xeeb45a5a, 0xfb5ba0a0, 0xf6a45252, 0x4d763b3b, 0x61b7d6d6, 0xce7db3b3, 0x7b522929, 0x3edde3e3, 0x715e2f2f, 0x97138484, 0xf5a65353, 0x68b9d1d1, 0x00000000, 0x2cc1eded, 0x60402020, 0x1fe3fcfc, 0xc879b1b1, 0xedb65b5b, 0xbed46a6a, 0x468dcbcb, 0xd967bebe, 0x4b723939, 0xde944a4a, 0xd4984c4c, 0xe8b05858, 0x4a85cfcf, 0x6bbbd0d0, 0x2ac5efef, 0xe54faaaa, 0x16edfbfb, 0xc5864343, 0xd79a4d4d, 0x55663333, 0x94118585, 0xcf8a4545, 0x10e9f9f9, 0x06040202, 0x81fe7f7f, 0xf0a05050, 0x44783c3c, 0xba259f9f, 0xe34ba8a8, 0xf3a25151, 0xfe5da3a3, 0xc0804040, 0x8a058f8f, 0xad3f9292, 0xbc219d9d, 0x48703838, 0x04f1f5f5, 0xdf63bcbc, 0xc177b6b6, 0x75afdada, 0x63422121, 0x30201010, 0x1ae5ffff, 0x0efdf3f3, 0x6dbfd2d2, 0x4c81cdcd, 0x14180c0c, 0x35261313, 0x2fc3ecec, 0xe1be5f5f, 0xa2359797, 0xcc884444, 0x392e1717, 0x5793c4c4, 0xf255a7a7, 0x82fc7e7e, 0x477a3d3d, 0xacc86464, 0xe7ba5d5d, 0x2b321919, 0x95e67373, 0xa0c06060, 0x98198181, 0xd19e4f4f, 0x7fa3dcdc, 0x66442222, 0x7e542a2a, 0xab3b9090, 0x830b8888, 0xca8c4646, 0x29c7eeee, 0xd36bb8b8, 0x3c281414, 0x79a7dede, 0xe2bc5e5e, 0x1d160b0b, 0x76addbdb, 0x3bdbe0e0, 0x56643232, 0x4e743a3a, 0x1e140a0a, 0xdb924949, 0x0a0c0606, 0x6c482424, 0xe4b85c5c, 0x5d9fc2c2, 0x6ebdd3d3, 0xef43acac, 0xa6c46262, 0xa8399191, 0xa4319595, 0x37d3e4e4, 0x8bf27979, 0x32d5e7e7, 0x438bc8c8, 0x596e3737, 0xb7da6d6d, 0x8c018d8d, 0x64b1d5d5, 0xd29c4e4e, 0xe049a9a9, 0xb4d86c6c, 0xfaac5656, 0x07f3f4f4, 0x25cfeaea, 0xafca6565, 0x8ef47a7a, 0xe947aeae, 0x18100808, 0xd56fbaba, 0x88f07878, 0x6f4a2525, 0x725c2e2e, 0x24381c1c, 0xf157a6a6, 0xc773b4b4, 0x5197c6c6, 0x23cbe8e8, 0x7ca1dddd, 0x9ce87474, 0x213e1f1f, 0xdd964b4b, 0xdc61bdbd, 0x860d8b8b, 0x850f8a8a, 0x90e07070, 0x427c3e3e, 0xc471b5b5, 0xaacc6666, 0xd8904848, 0x05060303, 0x01f7f6f6, 0x121c0e0e, 0xa3c26161, 0x5f6a3535, 0xf9ae5757, 0xd069b9b9, 0x91178686, 0x5899c1c1, 0x273a1d1d, 0xb9279e9e, 0x38d9e1e1, 0x13ebf8f8, 0xb32b9898, 0x33221111, 0xbbd26969, 0x70a9d9d9, 0x89078e8e, 0xa7339494, 0xb62d9b9b, 0x223c1e1e, 0x92158787, 0x20c9e9e9, 0x4987cece, 0xffaa5555, 0x78502828, 0x7aa5dfdf, 0x8f038c8c, 0xf859a1a1, 0x80098989, 0x171a0d0d, 0xda65bfbf, 0x31d7e6e6, 0xc6844242, 0xb8d06868, 0xc3824141, 0xb0299999, 0x775a2d2d, 0x111e0f0f, 0xcb7bb0b0, 0xfca85454, 0xd66dbbbb, 0x3a2c1616 ] T3 = [ 0x63a5c663, 0x7c84f87c, 0x7799ee77, 0x7b8df67b, 0xf20dfff2, 0x6bbdd66b, 0x6fb1de6f, 0xc55491c5, 0x30506030, 0x01030201, 0x67a9ce67, 0x2b7d562b, 0xfe19e7fe, 0xd762b5d7, 0xabe64dab, 0x769aec76, 0xca458fca, 0x829d1f82, 0xc94089c9, 0x7d87fa7d, 0xfa15effa, 0x59ebb259, 0x47c98e47, 0xf00bfbf0, 0xadec41ad, 0xd467b3d4, 0xa2fd5fa2, 0xafea45af, 0x9cbf239c, 0xa4f753a4, 0x7296e472, 0xc05b9bc0, 0xb7c275b7, 0xfd1ce1fd, 0x93ae3d93, 0x266a4c26, 0x365a6c36, 0x3f417e3f, 0xf702f5f7, 0xcc4f83cc, 0x345c6834, 0xa5f451a5, 0xe534d1e5, 0xf108f9f1, 0x7193e271, 0xd873abd8, 0x31536231, 0x153f2a15, 0x040c0804, 0xc75295c7, 0x23654623, 0xc35e9dc3, 0x18283018, 0x96a13796, 0x050f0a05, 0x9ab52f9a, 0x07090e07, 0x12362412, 0x809b1b80, 0xe23ddfe2, 0xeb26cdeb, 0x27694e27, 0xb2cd7fb2, 0x759fea75, 0x091b1209, 0x839e1d83, 0x2c74582c, 0x1a2e341a, 0x1b2d361b, 0x6eb2dc6e, 0x5aeeb45a, 0xa0fb5ba0, 0x52f6a452, 0x3b4d763b, 0xd661b7d6, 0xb3ce7db3, 0x297b5229, 0xe33edde3, 0x2f715e2f, 0x84971384, 0x53f5a653, 0xd168b9d1, 0x00000000, 0xed2cc1ed, 0x20604020, 0xfc1fe3fc, 0xb1c879b1, 0x5bedb65b, 0x6abed46a, 0xcb468dcb, 0xbed967be, 0x394b7239, 0x4ade944a, 0x4cd4984c, 0x58e8b058, 0xcf4a85cf, 0xd06bbbd0, 0xef2ac5ef, 0xaae54faa, 0xfb16edfb, 0x43c58643, 0x4dd79a4d, 0x33556633, 0x85941185, 0x45cf8a45, 0xf910e9f9, 0x02060402, 0x7f81fe7f, 0x50f0a050, 0x3c44783c, 0x9fba259f, 0xa8e34ba8, 0x51f3a251, 0xa3fe5da3, 0x40c08040, 0x8f8a058f, 0x92ad3f92, 0x9dbc219d, 0x38487038, 0xf504f1f5, 0xbcdf63bc, 0xb6c177b6, 0xda75afda, 0x21634221, 0x10302010, 0xff1ae5ff, 0xf30efdf3, 0xd26dbfd2, 0xcd4c81cd, 0x0c14180c, 0x13352613, 0xec2fc3ec, 0x5fe1be5f, 0x97a23597, 0x44cc8844, 0x17392e17, 0xc45793c4, 0xa7f255a7, 0x7e82fc7e, 0x3d477a3d, 0x64acc864, 0x5de7ba5d, 0x192b3219, 0x7395e673, 0x60a0c060, 0x81981981, 0x4fd19e4f, 0xdc7fa3dc, 0x22664422, 0x2a7e542a, 0x90ab3b90, 0x88830b88, 0x46ca8c46, 0xee29c7ee, 0xb8d36bb8, 0x143c2814, 0xde79a7de, 0x5ee2bc5e, 0x0b1d160b, 0xdb76addb, 0xe03bdbe0, 0x32566432, 0x3a4e743a, 0x0a1e140a, 0x49db9249, 0x060a0c06, 0x246c4824, 0x5ce4b85c, 0xc25d9fc2, 0xd36ebdd3, 0xacef43ac, 0x62a6c462, 0x91a83991, 0x95a43195, 0xe437d3e4, 0x798bf279, 0xe732d5e7, 0xc8438bc8, 0x37596e37, 0x6db7da6d, 0x8d8c018d, 0xd564b1d5, 0x4ed29c4e, 0xa9e049a9, 0x6cb4d86c, 0x56faac56, 0xf407f3f4, 0xea25cfea, 0x65afca65, 0x7a8ef47a, 0xaee947ae, 0x08181008, 0xbad56fba, 0x7888f078, 0x256f4a25, 0x2e725c2e, 0x1c24381c, 0xa6f157a6, 0xb4c773b4, 0xc65197c6, 0xe823cbe8, 0xdd7ca1dd, 0x749ce874, 0x1f213e1f, 0x4bdd964b, 0xbddc61bd, 0x8b860d8b, 0x8a850f8a, 0x7090e070, 0x3e427c3e, 0xb5c471b5, 0x66aacc66, 0x48d89048, 0x03050603, 0xf601f7f6, 0x0e121c0e, 0x61a3c261, 0x355f6a35, 0x57f9ae57, 0xb9d069b9, 0x86911786, 0xc15899c1, 0x1d273a1d, 0x9eb9279e, 0xe138d9e1, 0xf813ebf8, 0x98b32b98, 0x11332211, 0x69bbd269, 0xd970a9d9, 0x8e89078e, 0x94a73394, 0x9bb62d9b, 0x1e223c1e, 0x87921587, 0xe920c9e9, 0xce4987ce, 0x55ffaa55, 0x28785028, 0xdf7aa5df, 0x8c8f038c, 0xa1f859a1, 0x89800989, 0x0d171a0d, 0xbfda65bf, 0xe631d7e6, 0x42c68442, 0x68b8d068, 0x41c38241, 0x99b02999, 0x2d775a2d, 0x0f111e0f, 0xb0cb7bb0, 0x54fca854, 0xbbd66dbb, 0x163a2c16 ] T4 = [ 0x6363a5c6, 0x7c7c84f8, 0x777799ee, 0x7b7b8df6, 0xf2f20dff, 0x6b6bbdd6, 0x6f6fb1de, 0xc5c55491, 0x30305060, 0x01010302, 0x6767a9ce, 0x2b2b7d56, 0xfefe19e7, 0xd7d762b5, 0xababe64d, 0x76769aec, 0xcaca458f, 0x82829d1f, 0xc9c94089, 0x7d7d87fa, 0xfafa15ef, 0x5959ebb2, 0x4747c98e, 0xf0f00bfb, 0xadadec41, 0xd4d467b3, 0xa2a2fd5f, 0xafafea45, 0x9c9cbf23, 0xa4a4f753, 0x727296e4, 0xc0c05b9b, 0xb7b7c275, 0xfdfd1ce1, 0x9393ae3d, 0x26266a4c, 0x36365a6c, 0x3f3f417e, 0xf7f702f5, 0xcccc4f83, 0x34345c68, 0xa5a5f451, 0xe5e534d1, 0xf1f108f9, 0x717193e2, 0xd8d873ab, 0x31315362, 0x15153f2a, 0x04040c08, 0xc7c75295, 0x23236546, 0xc3c35e9d, 0x18182830, 0x9696a137, 0x05050f0a, 0x9a9ab52f, 0x0707090e, 0x12123624, 0x80809b1b, 0xe2e23ddf, 0xebeb26cd, 0x2727694e, 0xb2b2cd7f, 0x75759fea, 0x09091b12, 0x83839e1d, 0x2c2c7458, 0x1a1a2e34, 0x1b1b2d36, 0x6e6eb2dc, 0x5a5aeeb4, 0xa0a0fb5b, 0x5252f6a4, 0x3b3b4d76, 0xd6d661b7, 0xb3b3ce7d, 0x29297b52, 0xe3e33edd, 0x2f2f715e, 0x84849713, 0x5353f5a6, 0xd1d168b9, 0x00000000, 0xeded2cc1, 0x20206040, 0xfcfc1fe3, 0xb1b1c879, 0x5b5bedb6, 0x6a6abed4, 0xcbcb468d, 0xbebed967, 0x39394b72, 0x4a4ade94, 0x4c4cd498, 0x5858e8b0, 0xcfcf4a85, 0xd0d06bbb, 0xefef2ac5, 0xaaaae54f, 0xfbfb16ed, 0x4343c586, 0x4d4dd79a, 0x33335566, 0x85859411, 0x4545cf8a, 0xf9f910e9, 0x02020604, 0x7f7f81fe, 0x5050f0a0, 0x3c3c4478, 0x9f9fba25, 0xa8a8e34b, 0x5151f3a2, 0xa3a3fe5d, 0x4040c080, 0x8f8f8a05, 0x9292ad3f, 0x9d9dbc21, 0x38384870, 0xf5f504f1, 0xbcbcdf63, 0xb6b6c177, 0xdada75af, 0x21216342, 0x10103020, 0xffff1ae5, 0xf3f30efd, 0xd2d26dbf, 0xcdcd4c81, 0x0c0c1418, 0x13133526, 0xecec2fc3, 0x5f5fe1be, 0x9797a235, 0x4444cc88, 0x1717392e, 0xc4c45793, 0xa7a7f255, 0x7e7e82fc, 0x3d3d477a, 0x6464acc8, 0x5d5de7ba, 0x19192b32, 0x737395e6, 0x6060a0c0, 0x81819819, 0x4f4fd19e, 0xdcdc7fa3, 0x22226644, 0x2a2a7e54, 0x9090ab3b, 0x8888830b, 0x4646ca8c, 0xeeee29c7, 0xb8b8d36b, 0x14143c28, 0xdede79a7, 0x5e5ee2bc, 0x0b0b1d16, 0xdbdb76ad, 0xe0e03bdb, 0x32325664, 0x3a3a4e74, 0x0a0a1e14, 0x4949db92, 0x06060a0c, 0x24246c48, 0x5c5ce4b8, 0xc2c25d9f, 0xd3d36ebd, 0xacacef43, 0x6262a6c4, 0x9191a839, 0x9595a431, 0xe4e437d3, 0x79798bf2, 0xe7e732d5, 0xc8c8438b, 0x3737596e, 0x6d6db7da, 0x8d8d8c01, 0xd5d564b1, 0x4e4ed29c, 0xa9a9e049, 0x6c6cb4d8, 0x5656faac, 0xf4f407f3, 0xeaea25cf, 0x6565afca, 0x7a7a8ef4, 0xaeaee947, 0x08081810, 0xbabad56f, 0x787888f0, 0x25256f4a, 0x2e2e725c, 0x1c1c2438, 0xa6a6f157, 0xb4b4c773, 0xc6c65197, 0xe8e823cb, 0xdddd7ca1, 0x74749ce8, 0x1f1f213e, 0x4b4bdd96, 0xbdbddc61, 0x8b8b860d, 0x8a8a850f, 0x707090e0, 0x3e3e427c, 0xb5b5c471, 0x6666aacc, 0x4848d890, 0x03030506, 0xf6f601f7, 0x0e0e121c, 0x6161a3c2, 0x35355f6a, 0x5757f9ae, 0xb9b9d069, 0x86869117, 0xc1c15899, 0x1d1d273a, 0x9e9eb927, 0xe1e138d9, 0xf8f813eb, 0x9898b32b, 0x11113322, 0x6969bbd2, 0xd9d970a9, 0x8e8e8907, 0x9494a733, 0x9b9bb62d, 0x1e1e223c, 0x87879215, 0xe9e920c9, 0xcece4987, 0x5555ffaa, 0x28287850, 0xdfdf7aa5, 0x8c8c8f03, 0xa1a1f859, 0x89898009, 0x0d0d171a, 0xbfbfda65, 0xe6e631d7, 0x4242c684, 0x6868b8d0, 0x4141c382, 0x9999b029, 0x2d2d775a, 0x0f0f111e, 0xb0b0cb7b, 0x5454fca8, 0xbbbbd66d, 0x16163a2c ] # Transformations for decryption T5 = [ 0x51f4a750, 0x7e416553, 0x1a17a4c3, 0x3a275e96, 0x3bab6bcb, 0x1f9d45f1, 0xacfa58ab, 0x4be30393, 0x2030fa55, 0xad766df6, 0x88cc7691, 0xf5024c25, 0x4fe5d7fc, 0xc52acbd7, 0x26354480, 0xb562a38f, 0xdeb15a49, 0x25ba1b67, 0x45ea0e98, 0x5dfec0e1, 0xc32f7502, 0x814cf012, 0x8d4697a3, 0x6bd3f9c6, 0x038f5fe7, 0x15929c95, 0xbf6d7aeb, 0x955259da, 0xd4be832d, 0x587421d3, 0x49e06929, 0x8ec9c844, 0x75c2896a, 0xf48e7978, 0x99583e6b, 0x27b971dd, 0xbee14fb6, 0xf088ad17, 0xc920ac66, 0x7dce3ab4, 0x63df4a18, 0xe51a3182, 0x97513360, 0x62537f45, 0xb16477e0, 0xbb6bae84, 0xfe81a01c, 0xf9082b94, 0x70486858, 0x8f45fd19, 0x94de6c87, 0x527bf8b7, 0xab73d323, 0x724b02e2, 0xe31f8f57, 0x6655ab2a, 0xb2eb2807, 0x2fb5c203, 0x86c57b9a, 0xd33708a5, 0x302887f2, 0x23bfa5b2, 0x02036aba, 0xed16825c, 0x8acf1c2b, 0xa779b492, 0xf307f2f0, 0x4e69e2a1, 0x65daf4cd, 0x0605bed5, 0xd134621f, 0xc4a6fe8a, 0x342e539d, 0xa2f355a0, 0x058ae132, 0xa4f6eb75, 0x0b83ec39, 0x4060efaa, 0x5e719f06, 0xbd6e1051, 0x3e218af9, 0x96dd063d, 0xdd3e05ae, 0x4de6bd46, 0x91548db5, 0x71c45d05, 0x0406d46f, 0x605015ff, 0x1998fb24, 0xd6bde997, 0x894043cc, 0x67d99e77, 0xb0e842bd, 0x07898b88, 0xe7195b38, 0x79c8eedb, 0xa17c0a47, 0x7c420fe9, 0xf8841ec9, 0x00000000, 0x09808683, 0x322bed48, 0x1e1170ac, 0x6c5a724e, 0xfd0efffb, 0x0f853856, 0x3daed51e, 0x362d3927, 0x0a0fd964, 0x685ca621, 0x9b5b54d1, 0x24362e3a, 0x0c0a67b1, 0x9357e70f, 0xb4ee96d2, 0x1b9b919e, 0x80c0c54f, 0x61dc20a2, 0x5a774b69, 0x1c121a16, 0xe293ba0a, 0xc0a02ae5, 0x3c22e043, 0x121b171d, 0x0e090d0b, 0xf28bc7ad, 0x2db6a8b9, 0x141ea9c8, 0x57f11985, 0xaf75074c, 0xee99ddbb, 0xa37f60fd, 0xf701269f, 0x5c72f5bc, 0x44663bc5, 0x5bfb7e34, 0x8b432976, 0xcb23c6dc, 0xb6edfc68, 0xb8e4f163, 0xd731dcca, 0x42638510, 0x13972240, 0x84c61120, 0x854a247d, 0xd2bb3df8, 0xaef93211, 0xc729a16d, 0x1d9e2f4b, 0xdcb230f3, 0x0d8652ec, 0x77c1e3d0, 0x2bb3166c, 0xa970b999, 0x119448fa, 0x47e96422, 0xa8fc8cc4, 0xa0f03f1a, 0x567d2cd8, 0x223390ef, 0x87494ec7, 0xd938d1c1, 0x8ccaa2fe, 0x98d40b36, 0xa6f581cf, 0xa57ade28, 0xdab78e26, 0x3fadbfa4, 0x2c3a9de4, 0x5078920d, 0x6a5fcc9b, 0x547e4662, 0xf68d13c2, 0x90d8b8e8, 0x2e39f75e, 0x82c3aff5, 0x9f5d80be, 0x69d0937c, 0x6fd52da9, 0xcf2512b3, 0xc8ac993b, 0x10187da7, 0xe89c636e, 0xdb3bbb7b, 0xcd267809, 0x6e5918f4, 0xec9ab701, 0x834f9aa8, 0xe6956e65, 0xaaffe67e, 0x21bccf08, 0xef15e8e6, 0xbae79bd9, 0x4a6f36ce, 0xea9f09d4, 0x29b07cd6, 0x31a4b2af, 0x2a3f2331, 0xc6a59430, 0x35a266c0, 0x744ebc37, 0xfc82caa6, 0xe090d0b0, 0x33a7d815, 0xf104984a, 0x41ecdaf7, 0x7fcd500e, 0x1791f62f, 0x764dd68d, 0x43efb04d, 0xccaa4d54, 0xe49604df, 0x9ed1b5e3, 0x4c6a881b, 0xc12c1fb8, 0x4665517f, 0x9d5eea04, 0x018c355d, 0xfa877473, 0xfb0b412e, 0xb3671d5a, 0x92dbd252, 0xe9105633, 0x6dd64713, 0x9ad7618c, 0x37a10c7a, 0x59f8148e, 0xeb133c89, 0xcea927ee, 0xb761c935, 0xe11ce5ed, 0x7a47b13c, 0x9cd2df59, 0x55f2733f, 0x1814ce79, 0x73c737bf, 0x53f7cdea, 0x5ffdaa5b, 0xdf3d6f14, 0x7844db86, 0xcaaff381, 0xb968c43e, 0x3824342c, 0xc2a3405f, 0x161dc372, 0xbce2250c, 0x283c498b, 0xff0d9541, 0x39a80171, 0x080cb3de, 0xd8b4e49c, 0x6456c190, 0x7bcb8461, 0xd532b670, 0x486c5c74, 0xd0b85742 ] T6 = [ 0x5051f4a7, 0x537e4165, 0xc31a17a4, 0x963a275e, 0xcb3bab6b, 0xf11f9d45, 0xabacfa58, 0x934be303, 0x552030fa, 0xf6ad766d, 0x9188cc76, 0x25f5024c, 0xfc4fe5d7, 0xd7c52acb, 0x80263544, 0x8fb562a3, 0x49deb15a, 0x6725ba1b, 0x9845ea0e, 0xe15dfec0, 0x02c32f75, 0x12814cf0, 0xa38d4697, 0xc66bd3f9, 0xe7038f5f, 0x9515929c, 0xebbf6d7a, 0xda955259, 0x2dd4be83, 0xd3587421, 0x2949e069, 0x448ec9c8, 0x6a75c289, 0x78f48e79, 0x6b99583e, 0xdd27b971, 0xb6bee14f, 0x17f088ad, 0x66c920ac, 0xb47dce3a, 0x1863df4a, 0x82e51a31, 0x60975133, 0x4562537f, 0xe0b16477, 0x84bb6bae, 0x1cfe81a0, 0x94f9082b, 0x58704868, 0x198f45fd, 0x8794de6c, 0xb7527bf8, 0x23ab73d3, 0xe2724b02, 0x57e31f8f, 0x2a6655ab, 0x07b2eb28, 0x032fb5c2, 0x9a86c57b, 0xa5d33708, 0xf2302887, 0xb223bfa5, 0xba02036a, 0x5ced1682, 0x2b8acf1c, 0x92a779b4, 0xf0f307f2, 0xa14e69e2, 0xcd65daf4, 0xd50605be, 0x1fd13462, 0x8ac4a6fe, 0x9d342e53, 0xa0a2f355, 0x32058ae1, 0x75a4f6eb, 0x390b83ec, 0xaa4060ef, 0x065e719f, 0x51bd6e10, 0xf93e218a, 0x3d96dd06, 0xaedd3e05, 0x464de6bd, 0xb591548d, 0x0571c45d, 0x6f0406d4, 0xff605015, 0x241998fb, 0x97d6bde9, 0xcc894043, 0x7767d99e, 0xbdb0e842, 0x8807898b, 0x38e7195b, 0xdb79c8ee, 0x47a17c0a, 0xe97c420f, 0xc9f8841e, 0x00000000, 0x83098086, 0x48322bed, 0xac1e1170, 0x4e6c5a72, 0xfbfd0eff, 0x560f8538, 0x1e3daed5, 0x27362d39, 0x640a0fd9, 0x21685ca6, 0xd19b5b54, 0x3a24362e, 0xb10c0a67, 0x0f9357e7, 0xd2b4ee96, 0x9e1b9b91, 0x4f80c0c5, 0xa261dc20, 0x695a774b, 0x161c121a, 0x0ae293ba, 0xe5c0a02a, 0x433c22e0, 0x1d121b17, 0x0b0e090d, 0xadf28bc7, 0xb92db6a8, 0xc8141ea9, 0x8557f119, 0x4caf7507, 0xbbee99dd, 0xfda37f60, 0x9ff70126, 0xbc5c72f5, 0xc544663b, 0x345bfb7e, 0x768b4329, 0xdccb23c6, 0x68b6edfc, 0x63b8e4f1, 0xcad731dc, 0x10426385, 0x40139722, 0x2084c611, 0x7d854a24, 0xf8d2bb3d, 0x11aef932, 0x6dc729a1, 0x4b1d9e2f, 0xf3dcb230, 0xec0d8652, 0xd077c1e3, 0x6c2bb316, 0x99a970b9, 0xfa119448, 0x2247e964, 0xc4a8fc8c, 0x1aa0f03f, 0xd8567d2c, 0xef223390, 0xc787494e, 0xc1d938d1, 0xfe8ccaa2, 0x3698d40b, 0xcfa6f581, 0x28a57ade, 0x26dab78e, 0xa43fadbf, 0xe42c3a9d, 0x0d507892, 0x9b6a5fcc, 0x62547e46, 0xc2f68d13, 0xe890d8b8, 0x5e2e39f7, 0xf582c3af, 0xbe9f5d80, 0x7c69d093, 0xa96fd52d, 0xb3cf2512, 0x3bc8ac99, 0xa710187d, 0x6ee89c63, 0x7bdb3bbb, 0x09cd2678, 0xf46e5918, 0x01ec9ab7, 0xa8834f9a, 0x65e6956e, 0x7eaaffe6, 0x0821bccf, 0xe6ef15e8, 0xd9bae79b, 0xce4a6f36, 0xd4ea9f09, 0xd629b07c, 0xaf31a4b2, 0x312a3f23, 0x30c6a594, 0xc035a266, 0x37744ebc, 0xa6fc82ca, 0xb0e090d0, 0x1533a7d8, 0x4af10498, 0xf741ecda, 0x0e7fcd50, 0x2f1791f6, 0x8d764dd6, 0x4d43efb0, 0x54ccaa4d, 0xdfe49604, 0xe39ed1b5, 0x1b4c6a88, 0xb8c12c1f, 0x7f466551, 0x049d5eea, 0x5d018c35, 0x73fa8774, 0x2efb0b41, 0x5ab3671d, 0x5292dbd2, 0x33e91056, 0x136dd647, 0x8c9ad761, 0x7a37a10c, 0x8e59f814, 0x89eb133c, 0xeecea927, 0x35b761c9, 0xede11ce5, 0x3c7a47b1, 0x599cd2df, 0x3f55f273, 0x791814ce, 0xbf73c737, 0xea53f7cd, 0x5b5ffdaa, 0x14df3d6f, 0x867844db, 0x81caaff3, 0x3eb968c4, 0x2c382434, 0x5fc2a340, 0x72161dc3, 0x0cbce225, 0x8b283c49, 0x41ff0d95, 0x7139a801, 0xde080cb3, 0x9cd8b4e4, 0x906456c1, 0x617bcb84, 0x70d532b6, 0x74486c5c, 0x42d0b857 ] T7 = [ 0xa75051f4, 0x65537e41, 0xa4c31a17, 0x5e963a27, 0x6bcb3bab, 0x45f11f9d, 0x58abacfa, 0x03934be3, 0xfa552030, 0x6df6ad76, 0x769188cc, 0x4c25f502, 0xd7fc4fe5, 0xcbd7c52a, 0x44802635, 0xa38fb562, 0x5a49deb1, 0x1b6725ba, 0x0e9845ea, 0xc0e15dfe, 0x7502c32f, 0xf012814c, 0x97a38d46, 0xf9c66bd3, 0x5fe7038f, 0x9c951592, 0x7aebbf6d, 0x59da9552, 0x832dd4be, 0x21d35874, 0x692949e0, 0xc8448ec9, 0x896a75c2, 0x7978f48e, 0x3e6b9958, 0x71dd27b9, 0x4fb6bee1, 0xad17f088, 0xac66c920, 0x3ab47dce, 0x4a1863df, 0x3182e51a, 0x33609751, 0x7f456253, 0x77e0b164, 0xae84bb6b, 0xa01cfe81, 0x2b94f908, 0x68587048, 0xfd198f45, 0x6c8794de, 0xf8b7527b, 0xd323ab73, 0x02e2724b, 0x8f57e31f, 0xab2a6655, 0x2807b2eb, 0xc2032fb5, 0x7b9a86c5, 0x08a5d337, 0x87f23028, 0xa5b223bf, 0x6aba0203, 0x825ced16, 0x1c2b8acf, 0xb492a779, 0xf2f0f307, 0xe2a14e69, 0xf4cd65da, 0xbed50605, 0x621fd134, 0xfe8ac4a6, 0x539d342e, 0x55a0a2f3, 0xe132058a, 0xeb75a4f6, 0xec390b83, 0xefaa4060, 0x9f065e71, 0x1051bd6e, 0x8af93e21, 0x063d96dd, 0x05aedd3e, 0xbd464de6, 0x8db59154, 0x5d0571c4, 0xd46f0406, 0x15ff6050, 0xfb241998, 0xe997d6bd, 0x43cc8940, 0x9e7767d9, 0x42bdb0e8, 0x8b880789, 0x5b38e719, 0xeedb79c8, 0x0a47a17c, 0x0fe97c42, 0x1ec9f884, 0x00000000, 0x86830980, 0xed48322b, 0x70ac1e11, 0x724e6c5a, 0xfffbfd0e, 0x38560f85, 0xd51e3dae, 0x3927362d, 0xd9640a0f, 0xa621685c, 0x54d19b5b, 0x2e3a2436, 0x67b10c0a, 0xe70f9357, 0x96d2b4ee, 0x919e1b9b, 0xc54f80c0, 0x20a261dc, 0x4b695a77, 0x1a161c12, 0xba0ae293, 0x2ae5c0a0, 0xe0433c22, 0x171d121b, 0x0d0b0e09, 0xc7adf28b, 0xa8b92db6, 0xa9c8141e, 0x198557f1, 0x074caf75, 0xddbbee99, 0x60fda37f, 0x269ff701, 0xf5bc5c72, 0x3bc54466, 0x7e345bfb, 0x29768b43, 0xc6dccb23, 0xfc68b6ed, 0xf163b8e4, 0xdccad731, 0x85104263, 0x22401397, 0x112084c6, 0x247d854a, 0x3df8d2bb, 0x3211aef9, 0xa16dc729, 0x2f4b1d9e, 0x30f3dcb2, 0x52ec0d86, 0xe3d077c1, 0x166c2bb3, 0xb999a970, 0x48fa1194, 0x642247e9, 0x8cc4a8fc, 0x3f1aa0f0, 0x2cd8567d, 0x90ef2233, 0x4ec78749, 0xd1c1d938, 0xa2fe8cca, 0x0b3698d4, 0x81cfa6f5, 0xde28a57a, 0x8e26dab7, 0xbfa43fad, 0x9de42c3a, 0x920d5078, 0xcc9b6a5f, 0x4662547e, 0x13c2f68d, 0xb8e890d8, 0xf75e2e39, 0xaff582c3, 0x80be9f5d, 0x937c69d0, 0x2da96fd5, 0x12b3cf25, 0x993bc8ac, 0x7da71018, 0x636ee89c, 0xbb7bdb3b, 0x7809cd26, 0x18f46e59, 0xb701ec9a, 0x9aa8834f, 0x6e65e695, 0xe67eaaff, 0xcf0821bc, 0xe8e6ef15, 0x9bd9bae7, 0x36ce4a6f, 0x09d4ea9f, 0x7cd629b0, 0xb2af31a4, 0x23312a3f, 0x9430c6a5, 0x66c035a2, 0xbc37744e, 0xcaa6fc82, 0xd0b0e090, 0xd81533a7, 0x984af104, 0xdaf741ec, 0x500e7fcd, 0xf62f1791, 0xd68d764d, 0xb04d43ef, 0x4d54ccaa, 0x04dfe496, 0xb5e39ed1, 0x881b4c6a, 0x1fb8c12c, 0x517f4665, 0xea049d5e, 0x355d018c, 0x7473fa87, 0x412efb0b, 0x1d5ab367, 0xd25292db, 0x5633e910, 0x47136dd6, 0x618c9ad7, 0x0c7a37a1, 0x148e59f8, 0x3c89eb13, 0x27eecea9, 0xc935b761, 0xe5ede11c, 0xb13c7a47, 0xdf599cd2, 0x733f55f2, 0xce791814, 0x37bf73c7, 0xcdea53f7, 0xaa5b5ffd, 0x6f14df3d, 0xdb867844, 0xf381caaf, 0xc43eb968, 0x342c3824, 0x405fc2a3, 0xc372161d, 0x250cbce2, 0x498b283c, 0x9541ff0d, 0x017139a8, 0xb3de080c, 0xe49cd8b4, 0xc1906456, 0x84617bcb, 0xb670d532, 0x5c74486c, 0x5742d0b8 ] T8 = [ 0xf4a75051, 0x4165537e, 0x17a4c31a, 0x275e963a, 0xab6bcb3b, 0x9d45f11f, 0xfa58abac, 0xe303934b, 0x30fa5520, 0x766df6ad, 0xcc769188, 0x024c25f5, 0xe5d7fc4f, 0x2acbd7c5, 0x35448026, 0x62a38fb5, 0xb15a49de, 0xba1b6725, 0xea0e9845, 0xfec0e15d, 0x2f7502c3, 0x4cf01281, 0x4697a38d, 0xd3f9c66b, 0x8f5fe703, 0x929c9515, 0x6d7aebbf, 0x5259da95, 0xbe832dd4, 0x7421d358, 0xe0692949, 0xc9c8448e, 0xc2896a75, 0x8e7978f4, 0x583e6b99, 0xb971dd27, 0xe14fb6be, 0x88ad17f0, 0x20ac66c9, 0xce3ab47d, 0xdf4a1863, 0x1a3182e5, 0x51336097, 0x537f4562, 0x6477e0b1, 0x6bae84bb, 0x81a01cfe, 0x082b94f9, 0x48685870, 0x45fd198f, 0xde6c8794, 0x7bf8b752, 0x73d323ab, 0x4b02e272, 0x1f8f57e3, 0x55ab2a66, 0xeb2807b2, 0xb5c2032f, 0xc57b9a86, 0x3708a5d3, 0x2887f230, 0xbfa5b223, 0x036aba02, 0x16825ced, 0xcf1c2b8a, 0x79b492a7, 0x07f2f0f3, 0x69e2a14e, 0xdaf4cd65, 0x05bed506, 0x34621fd1, 0xa6fe8ac4, 0x2e539d34, 0xf355a0a2, 0x8ae13205, 0xf6eb75a4, 0x83ec390b, 0x60efaa40, 0x719f065e, 0x6e1051bd, 0x218af93e, 0xdd063d96, 0x3e05aedd, 0xe6bd464d, 0x548db591, 0xc45d0571, 0x06d46f04, 0x5015ff60, 0x98fb2419, 0xbde997d6, 0x4043cc89, 0xd99e7767, 0xe842bdb0, 0x898b8807, 0x195b38e7, 0xc8eedb79, 0x7c0a47a1, 0x420fe97c, 0x841ec9f8, 0x00000000, 0x80868309, 0x2bed4832, 0x1170ac1e, 0x5a724e6c, 0x0efffbfd, 0x8538560f, 0xaed51e3d, 0x2d392736, 0x0fd9640a, 0x5ca62168, 0x5b54d19b, 0x362e3a24, 0x0a67b10c, 0x57e70f93, 0xee96d2b4, 0x9b919e1b, 0xc0c54f80, 0xdc20a261, 0x774b695a, 0x121a161c, 0x93ba0ae2, 0xa02ae5c0, 0x22e0433c, 0x1b171d12, 0x090d0b0e, 0x8bc7adf2, 0xb6a8b92d, 0x1ea9c814, 0xf1198557, 0x75074caf, 0x99ddbbee, 0x7f60fda3, 0x01269ff7, 0x72f5bc5c, 0x663bc544, 0xfb7e345b, 0x4329768b, 0x23c6dccb, 0xedfc68b6, 0xe4f163b8, 0x31dccad7, 0x63851042, 0x97224013, 0xc6112084, 0x4a247d85, 0xbb3df8d2, 0xf93211ae, 0x29a16dc7, 0x9e2f4b1d, 0xb230f3dc, 0x8652ec0d, 0xc1e3d077, 0xb3166c2b, 0x70b999a9, 0x9448fa11, 0xe9642247, 0xfc8cc4a8, 0xf03f1aa0, 0x7d2cd856, 0x3390ef22, 0x494ec787, 0x38d1c1d9, 0xcaa2fe8c, 0xd40b3698, 0xf581cfa6, 0x7ade28a5, 0xb78e26da, 0xadbfa43f, 0x3a9de42c, 0x78920d50, 0x5fcc9b6a, 0x7e466254, 0x8d13c2f6, 0xd8b8e890, 0x39f75e2e, 0xc3aff582, 0x5d80be9f, 0xd0937c69, 0xd52da96f, 0x2512b3cf, 0xac993bc8, 0x187da710, 0x9c636ee8, 0x3bbb7bdb, 0x267809cd, 0x5918f46e, 0x9ab701ec, 0x4f9aa883, 0x956e65e6, 0xffe67eaa, 0xbccf0821, 0x15e8e6ef, 0xe79bd9ba, 0x6f36ce4a, 0x9f09d4ea, 0xb07cd629, 0xa4b2af31, 0x3f23312a, 0xa59430c6, 0xa266c035, 0x4ebc3774, 0x82caa6fc, 0x90d0b0e0, 0xa7d81533, 0x04984af1, 0xecdaf741, 0xcd500e7f, 0x91f62f17, 0x4dd68d76, 0xefb04d43, 0xaa4d54cc, 0x9604dfe4, 0xd1b5e39e, 0x6a881b4c, 0x2c1fb8c1, 0x65517f46, 0x5eea049d, 0x8c355d01, 0x877473fa, 0x0b412efb, 0x671d5ab3, 0xdbd25292, 0x105633e9, 0xd647136d, 0xd7618c9a, 0xa10c7a37, 0xf8148e59, 0x133c89eb, 0xa927eece, 0x61c935b7, 0x1ce5ede1, 0x47b13c7a, 0xd2df599c, 0xf2733f55, 0x14ce7918, 0xc737bf73, 0xf7cdea53, 0xfdaa5b5f, 0x3d6f14df, 0x44db8678, 0xaff381ca, 0x68c43eb9, 0x24342c38, 0xa3405fc2, 0x1dc37216, 0xe2250cbc, 0x3c498b28, 0x0d9541ff, 0xa8017139, 0x0cb3de08, 0xb4e49cd8, 0x56c19064, 0xcb84617b, 0x32b670d5, 0x6c5c7448, 0xb85742d0 ] # Transformations for decryption key expansion U1 = [ 0x00000000, 0x0e090d0b, 0x1c121a16, 0x121b171d, 0x3824342c, 0x362d3927, 0x24362e3a, 0x2a3f2331, 0x70486858, 0x7e416553, 0x6c5a724e, 0x62537f45, 0x486c5c74, 0x4665517f, 0x547e4662, 0x5a774b69, 0xe090d0b0, 0xee99ddbb, 0xfc82caa6, 0xf28bc7ad, 0xd8b4e49c, 0xd6bde997, 0xc4a6fe8a, 0xcaaff381, 0x90d8b8e8, 0x9ed1b5e3, 0x8ccaa2fe, 0x82c3aff5, 0xa8fc8cc4, 0xa6f581cf, 0xb4ee96d2, 0xbae79bd9, 0xdb3bbb7b, 0xd532b670, 0xc729a16d, 0xc920ac66, 0xe31f8f57, 0xed16825c, 0xff0d9541, 0xf104984a, 0xab73d323, 0xa57ade28, 0xb761c935, 0xb968c43e, 0x9357e70f, 0x9d5eea04, 0x8f45fd19, 0x814cf012, 0x3bab6bcb, 0x35a266c0, 0x27b971dd, 0x29b07cd6, 0x038f5fe7, 0x0d8652ec, 0x1f9d45f1, 0x119448fa, 0x4be30393, 0x45ea0e98, 0x57f11985, 0x59f8148e, 0x73c737bf, 0x7dce3ab4, 0x6fd52da9, 0x61dc20a2, 0xad766df6, 0xa37f60fd, 0xb16477e0, 0xbf6d7aeb, 0x955259da, 0x9b5b54d1, 0x894043cc, 0x87494ec7, 0xdd3e05ae, 0xd33708a5, 0xc12c1fb8, 0xcf2512b3, 0xe51a3182, 0xeb133c89, 0xf9082b94, 0xf701269f, 0x4de6bd46, 0x43efb04d, 0x51f4a750, 0x5ffdaa5b, 0x75c2896a, 0x7bcb8461, 0x69d0937c, 0x67d99e77, 0x3daed51e, 0x33a7d815, 0x21bccf08, 0x2fb5c203, 0x058ae132, 0x0b83ec39, 0x1998fb24, 0x1791f62f, 0x764dd68d, 0x7844db86, 0x6a5fcc9b, 0x6456c190, 0x4e69e2a1, 0x4060efaa, 0x527bf8b7, 0x5c72f5bc, 0x0605bed5, 0x080cb3de, 0x1a17a4c3, 0x141ea9c8, 0x3e218af9, 0x302887f2, 0x223390ef, 0x2c3a9de4, 0x96dd063d, 0x98d40b36, 0x8acf1c2b, 0x84c61120, 0xaef93211, 0xa0f03f1a, 0xb2eb2807, 0xbce2250c, 0xe6956e65, 0xe89c636e, 0xfa877473, 0xf48e7978, 0xdeb15a49, 0xd0b85742, 0xc2a3405f, 0xccaa4d54, 0x41ecdaf7, 0x4fe5d7fc, 0x5dfec0e1, 0x53f7cdea, 0x79c8eedb, 0x77c1e3d0, 0x65daf4cd, 0x6bd3f9c6, 0x31a4b2af, 0x3fadbfa4, 0x2db6a8b9, 0x23bfa5b2, 0x09808683, 0x07898b88, 0x15929c95, 0x1b9b919e, 0xa17c0a47, 0xaf75074c, 0xbd6e1051, 0xb3671d5a, 0x99583e6b, 0x97513360, 0x854a247d, 0x8b432976, 0xd134621f, 0xdf3d6f14, 0xcd267809, 0xc32f7502, 0xe9105633, 0xe7195b38, 0xf5024c25, 0xfb0b412e, 0x9ad7618c, 0x94de6c87, 0x86c57b9a, 0x88cc7691, 0xa2f355a0, 0xacfa58ab, 0xbee14fb6, 0xb0e842bd, 0xea9f09d4, 0xe49604df, 0xf68d13c2, 0xf8841ec9, 0xd2bb3df8, 0xdcb230f3, 0xcea927ee, 0xc0a02ae5, 0x7a47b13c, 0x744ebc37, 0x6655ab2a, 0x685ca621, 0x42638510, 0x4c6a881b, 0x5e719f06, 0x5078920d, 0x0a0fd964, 0x0406d46f, 0x161dc372, 0x1814ce79, 0x322bed48, 0x3c22e043, 0x2e39f75e, 0x2030fa55, 0xec9ab701, 0xe293ba0a, 0xf088ad17, 0xfe81a01c, 0xd4be832d, 0xdab78e26, 0xc8ac993b, 0xc6a59430, 0x9cd2df59, 0x92dbd252, 0x80c0c54f, 0x8ec9c844, 0xa4f6eb75, 0xaaffe67e, 0xb8e4f163, 0xb6edfc68, 0x0c0a67b1, 0x02036aba, 0x10187da7, 0x1e1170ac, 0x342e539d, 0x3a275e96, 0x283c498b, 0x26354480, 0x7c420fe9, 0x724b02e2, 0x605015ff, 0x6e5918f4, 0x44663bc5, 0x4a6f36ce, 0x587421d3, 0x567d2cd8, 0x37a10c7a, 0x39a80171, 0x2bb3166c, 0x25ba1b67, 0x0f853856, 0x018c355d, 0x13972240, 0x1d9e2f4b, 0x47e96422, 0x49e06929, 0x5bfb7e34, 0x55f2733f, 0x7fcd500e, 0x71c45d05, 0x63df4a18, 0x6dd64713, 0xd731dcca, 0xd938d1c1, 0xcb23c6dc, 0xc52acbd7, 0xef15e8e6, 0xe11ce5ed, 0xf307f2f0, 0xfd0efffb, 0xa779b492, 0xa970b999, 0xbb6bae84, 0xb562a38f, 0x9f5d80be, 0x91548db5, 0x834f9aa8, 0x8d4697a3 ] U2 = [ 0x00000000, 0x0b0e090d, 0x161c121a, 0x1d121b17, 0x2c382434, 0x27362d39, 0x3a24362e, 0x312a3f23, 0x58704868, 0x537e4165, 0x4e6c5a72, 0x4562537f, 0x74486c5c, 0x7f466551, 0x62547e46, 0x695a774b, 0xb0e090d0, 0xbbee99dd, 0xa6fc82ca, 0xadf28bc7, 0x9cd8b4e4, 0x97d6bde9, 0x8ac4a6fe, 0x81caaff3, 0xe890d8b8, 0xe39ed1b5, 0xfe8ccaa2, 0xf582c3af, 0xc4a8fc8c, 0xcfa6f581, 0xd2b4ee96, 0xd9bae79b, 0x7bdb3bbb, 0x70d532b6, 0x6dc729a1, 0x66c920ac, 0x57e31f8f, 0x5ced1682, 0x41ff0d95, 0x4af10498, 0x23ab73d3, 0x28a57ade, 0x35b761c9, 0x3eb968c4, 0x0f9357e7, 0x049d5eea, 0x198f45fd, 0x12814cf0, 0xcb3bab6b, 0xc035a266, 0xdd27b971, 0xd629b07c, 0xe7038f5f, 0xec0d8652, 0xf11f9d45, 0xfa119448, 0x934be303, 0x9845ea0e, 0x8557f119, 0x8e59f814, 0xbf73c737, 0xb47dce3a, 0xa96fd52d, 0xa261dc20, 0xf6ad766d, 0xfda37f60, 0xe0b16477, 0xebbf6d7a, 0xda955259, 0xd19b5b54, 0xcc894043, 0xc787494e, 0xaedd3e05, 0xa5d33708, 0xb8c12c1f, 0xb3cf2512, 0x82e51a31, 0x89eb133c, 0x94f9082b, 0x9ff70126, 0x464de6bd, 0x4d43efb0, 0x5051f4a7, 0x5b5ffdaa, 0x6a75c289, 0x617bcb84, 0x7c69d093, 0x7767d99e, 0x1e3daed5, 0x1533a7d8, 0x0821bccf, 0x032fb5c2, 0x32058ae1, 0x390b83ec, 0x241998fb, 0x2f1791f6, 0x8d764dd6, 0x867844db, 0x9b6a5fcc, 0x906456c1, 0xa14e69e2, 0xaa4060ef, 0xb7527bf8, 0xbc5c72f5, 0xd50605be, 0xde080cb3, 0xc31a17a4, 0xc8141ea9, 0xf93e218a, 0xf2302887, 0xef223390, 0xe42c3a9d, 0x3d96dd06, 0x3698d40b, 0x2b8acf1c, 0x2084c611, 0x11aef932, 0x1aa0f03f, 0x07b2eb28, 0x0cbce225, 0x65e6956e, 0x6ee89c63, 0x73fa8774, 0x78f48e79, 0x49deb15a, 0x42d0b857, 0x5fc2a340, 0x54ccaa4d, 0xf741ecda, 0xfc4fe5d7, 0xe15dfec0, 0xea53f7cd, 0xdb79c8ee, 0xd077c1e3, 0xcd65daf4, 0xc66bd3f9, 0xaf31a4b2, 0xa43fadbf, 0xb92db6a8, 0xb223bfa5, 0x83098086, 0x8807898b, 0x9515929c, 0x9e1b9b91, 0x47a17c0a, 0x4caf7507, 0x51bd6e10, 0x5ab3671d, 0x6b99583e, 0x60975133, 0x7d854a24, 0x768b4329, 0x1fd13462, 0x14df3d6f, 0x09cd2678, 0x02c32f75, 0x33e91056, 0x38e7195b, 0x25f5024c, 0x2efb0b41, 0x8c9ad761, 0x8794de6c, 0x9a86c57b, 0x9188cc76, 0xa0a2f355, 0xabacfa58, 0xb6bee14f, 0xbdb0e842, 0xd4ea9f09, 0xdfe49604, 0xc2f68d13, 0xc9f8841e, 0xf8d2bb3d, 0xf3dcb230, 0xeecea927, 0xe5c0a02a, 0x3c7a47b1, 0x37744ebc, 0x2a6655ab, 0x21685ca6, 0x10426385, 0x1b4c6a88, 0x065e719f, 0x0d507892, 0x640a0fd9, 0x6f0406d4, 0x72161dc3, 0x791814ce, 0x48322bed, 0x433c22e0, 0x5e2e39f7, 0x552030fa, 0x01ec9ab7, 0x0ae293ba, 0x17f088ad, 0x1cfe81a0, 0x2dd4be83, 0x26dab78e, 0x3bc8ac99, 0x30c6a594, 0x599cd2df, 0x5292dbd2, 0x4f80c0c5, 0x448ec9c8, 0x75a4f6eb, 0x7eaaffe6, 0x63b8e4f1, 0x68b6edfc, 0xb10c0a67, 0xba02036a, 0xa710187d, 0xac1e1170, 0x9d342e53, 0x963a275e, 0x8b283c49, 0x80263544, 0xe97c420f, 0xe2724b02, 0xff605015, 0xf46e5918, 0xc544663b, 0xce4a6f36, 0xd3587421, 0xd8567d2c, 0x7a37a10c, 0x7139a801, 0x6c2bb316, 0x6725ba1b, 0x560f8538, 0x5d018c35, 0x40139722, 0x4b1d9e2f, 0x2247e964, 0x2949e069, 0x345bfb7e, 0x3f55f273, 0x0e7fcd50, 0x0571c45d, 0x1863df4a, 0x136dd647, 0xcad731dc, 0xc1d938d1, 0xdccb23c6, 0xd7c52acb, 0xe6ef15e8, 0xede11ce5, 0xf0f307f2, 0xfbfd0eff, 0x92a779b4, 0x99a970b9, 0x84bb6bae, 0x8fb562a3, 0xbe9f5d80, 0xb591548d, 0xa8834f9a, 0xa38d4697 ] U3 = [ 0x00000000, 0x0d0b0e09, 0x1a161c12, 0x171d121b, 0x342c3824, 0x3927362d, 0x2e3a2436, 0x23312a3f, 0x68587048, 0x65537e41, 0x724e6c5a, 0x7f456253, 0x5c74486c, 0x517f4665, 0x4662547e, 0x4b695a77, 0xd0b0e090, 0xddbbee99, 0xcaa6fc82, 0xc7adf28b, 0xe49cd8b4, 0xe997d6bd, 0xfe8ac4a6, 0xf381caaf, 0xb8e890d8, 0xb5e39ed1, 0xa2fe8cca, 0xaff582c3, 0x8cc4a8fc, 0x81cfa6f5, 0x96d2b4ee, 0x9bd9bae7, 0xbb7bdb3b, 0xb670d532, 0xa16dc729, 0xac66c920, 0x8f57e31f, 0x825ced16, 0x9541ff0d, 0x984af104, 0xd323ab73, 0xde28a57a, 0xc935b761, 0xc43eb968, 0xe70f9357, 0xea049d5e, 0xfd198f45, 0xf012814c, 0x6bcb3bab, 0x66c035a2, 0x71dd27b9, 0x7cd629b0, 0x5fe7038f, 0x52ec0d86, 0x45f11f9d, 0x48fa1194, 0x03934be3, 0x0e9845ea, 0x198557f1, 0x148e59f8, 0x37bf73c7, 0x3ab47dce, 0x2da96fd5, 0x20a261dc, 0x6df6ad76, 0x60fda37f, 0x77e0b164, 0x7aebbf6d, 0x59da9552, 0x54d19b5b, 0x43cc8940, 0x4ec78749, 0x05aedd3e, 0x08a5d337, 0x1fb8c12c, 0x12b3cf25, 0x3182e51a, 0x3c89eb13, 0x2b94f908, 0x269ff701, 0xbd464de6, 0xb04d43ef, 0xa75051f4, 0xaa5b5ffd, 0x896a75c2, 0x84617bcb, 0x937c69d0, 0x9e7767d9, 0xd51e3dae, 0xd81533a7, 0xcf0821bc, 0xc2032fb5, 0xe132058a, 0xec390b83, 0xfb241998, 0xf62f1791, 0xd68d764d, 0xdb867844, 0xcc9b6a5f, 0xc1906456, 0xe2a14e69, 0xefaa4060, 0xf8b7527b, 0xf5bc5c72, 0xbed50605, 0xb3de080c, 0xa4c31a17, 0xa9c8141e, 0x8af93e21, 0x87f23028, 0x90ef2233, 0x9de42c3a, 0x063d96dd, 0x0b3698d4, 0x1c2b8acf, 0x112084c6, 0x3211aef9, 0x3f1aa0f0, 0x2807b2eb, 0x250cbce2, 0x6e65e695, 0x636ee89c, 0x7473fa87, 0x7978f48e, 0x5a49deb1, 0x5742d0b8, 0x405fc2a3, 0x4d54ccaa, 0xdaf741ec, 0xd7fc4fe5, 0xc0e15dfe, 0xcdea53f7, 0xeedb79c8, 0xe3d077c1, 0xf4cd65da, 0xf9c66bd3, 0xb2af31a4, 0xbfa43fad, 0xa8b92db6, 0xa5b223bf, 0x86830980, 0x8b880789, 0x9c951592, 0x919e1b9b, 0x0a47a17c, 0x074caf75, 0x1051bd6e, 0x1d5ab367, 0x3e6b9958, 0x33609751, 0x247d854a, 0x29768b43, 0x621fd134, 0x6f14df3d, 0x7809cd26, 0x7502c32f, 0x5633e910, 0x5b38e719, 0x4c25f502, 0x412efb0b, 0x618c9ad7, 0x6c8794de, 0x7b9a86c5, 0x769188cc, 0x55a0a2f3, 0x58abacfa, 0x4fb6bee1, 0x42bdb0e8, 0x09d4ea9f, 0x04dfe496, 0x13c2f68d, 0x1ec9f884, 0x3df8d2bb, 0x30f3dcb2, 0x27eecea9, 0x2ae5c0a0, 0xb13c7a47, 0xbc37744e, 0xab2a6655, 0xa621685c, 0x85104263, 0x881b4c6a, 0x9f065e71, 0x920d5078, 0xd9640a0f, 0xd46f0406, 0xc372161d, 0xce791814, 0xed48322b, 0xe0433c22, 0xf75e2e39, 0xfa552030, 0xb701ec9a, 0xba0ae293, 0xad17f088, 0xa01cfe81, 0x832dd4be, 0x8e26dab7, 0x993bc8ac, 0x9430c6a5, 0xdf599cd2, 0xd25292db, 0xc54f80c0, 0xc8448ec9, 0xeb75a4f6, 0xe67eaaff, 0xf163b8e4, 0xfc68b6ed, 0x67b10c0a, 0x6aba0203, 0x7da71018, 0x70ac1e11, 0x539d342e, 0x5e963a27, 0x498b283c, 0x44802635, 0x0fe97c42, 0x02e2724b, 0x15ff6050, 0x18f46e59, 0x3bc54466, 0x36ce4a6f, 0x21d35874, 0x2cd8567d, 0x0c7a37a1, 0x017139a8, 0x166c2bb3, 0x1b6725ba, 0x38560f85, 0x355d018c, 0x22401397, 0x2f4b1d9e, 0x642247e9, 0x692949e0, 0x7e345bfb, 0x733f55f2, 0x500e7fcd, 0x5d0571c4, 0x4a1863df, 0x47136dd6, 0xdccad731, 0xd1c1d938, 0xc6dccb23, 0xcbd7c52a, 0xe8e6ef15, 0xe5ede11c, 0xf2f0f307, 0xfffbfd0e, 0xb492a779, 0xb999a970, 0xae84bb6b, 0xa38fb562, 0x80be9f5d, 0x8db59154, 0x9aa8834f, 0x97a38d46 ] U4 = [ 0x00000000, 0x090d0b0e, 0x121a161c, 0x1b171d12, 0x24342c38, 0x2d392736, 0x362e3a24, 0x3f23312a, 0x48685870, 0x4165537e, 0x5a724e6c, 0x537f4562, 0x6c5c7448, 0x65517f46, 0x7e466254, 0x774b695a, 0x90d0b0e0, 0x99ddbbee, 0x82caa6fc, 0x8bc7adf2, 0xb4e49cd8, 0xbde997d6, 0xa6fe8ac4, 0xaff381ca, 0xd8b8e890, 0xd1b5e39e, 0xcaa2fe8c, 0xc3aff582, 0xfc8cc4a8, 0xf581cfa6, 0xee96d2b4, 0xe79bd9ba, 0x3bbb7bdb, 0x32b670d5, 0x29a16dc7, 0x20ac66c9, 0x1f8f57e3, 0x16825ced, 0x0d9541ff, 0x04984af1, 0x73d323ab, 0x7ade28a5, 0x61c935b7, 0x68c43eb9, 0x57e70f93, 0x5eea049d, 0x45fd198f, 0x4cf01281, 0xab6bcb3b, 0xa266c035, 0xb971dd27, 0xb07cd629, 0x8f5fe703, 0x8652ec0d, 0x9d45f11f, 0x9448fa11, 0xe303934b, 0xea0e9845, 0xf1198557, 0xf8148e59, 0xc737bf73, 0xce3ab47d, 0xd52da96f, 0xdc20a261, 0x766df6ad, 0x7f60fda3, 0x6477e0b1, 0x6d7aebbf, 0x5259da95, 0x5b54d19b, 0x4043cc89, 0x494ec787, 0x3e05aedd, 0x3708a5d3, 0x2c1fb8c1, 0x2512b3cf, 0x1a3182e5, 0x133c89eb, 0x082b94f9, 0x01269ff7, 0xe6bd464d, 0xefb04d43, 0xf4a75051, 0xfdaa5b5f, 0xc2896a75, 0xcb84617b, 0xd0937c69, 0xd99e7767, 0xaed51e3d, 0xa7d81533, 0xbccf0821, 0xb5c2032f, 0x8ae13205, 0x83ec390b, 0x98fb2419, 0x91f62f17, 0x4dd68d76, 0x44db8678, 0x5fcc9b6a, 0x56c19064, 0x69e2a14e, 0x60efaa40, 0x7bf8b752, 0x72f5bc5c, 0x05bed506, 0x0cb3de08, 0x17a4c31a, 0x1ea9c814, 0x218af93e, 0x2887f230, 0x3390ef22, 0x3a9de42c, 0xdd063d96, 0xd40b3698, 0xcf1c2b8a, 0xc6112084, 0xf93211ae, 0xf03f1aa0, 0xeb2807b2, 0xe2250cbc, 0x956e65e6, 0x9c636ee8, 0x877473fa, 0x8e7978f4, 0xb15a49de, 0xb85742d0, 0xa3405fc2, 0xaa4d54cc, 0xecdaf741, 0xe5d7fc4f, 0xfec0e15d, 0xf7cdea53, 0xc8eedb79, 0xc1e3d077, 0xdaf4cd65, 0xd3f9c66b, 0xa4b2af31, 0xadbfa43f, 0xb6a8b92d, 0xbfa5b223, 0x80868309, 0x898b8807, 0x929c9515, 0x9b919e1b, 0x7c0a47a1, 0x75074caf, 0x6e1051bd, 0x671d5ab3, 0x583e6b99, 0x51336097, 0x4a247d85, 0x4329768b, 0x34621fd1, 0x3d6f14df, 0x267809cd, 0x2f7502c3, 0x105633e9, 0x195b38e7, 0x024c25f5, 0x0b412efb, 0xd7618c9a, 0xde6c8794, 0xc57b9a86, 0xcc769188, 0xf355a0a2, 0xfa58abac, 0xe14fb6be, 0xe842bdb0, 0x9f09d4ea, 0x9604dfe4, 0x8d13c2f6, 0x841ec9f8, 0xbb3df8d2, 0xb230f3dc, 0xa927eece, 0xa02ae5c0, 0x47b13c7a, 0x4ebc3774, 0x55ab2a66, 0x5ca62168, 0x63851042, 0x6a881b4c, 0x719f065e, 0x78920d50, 0x0fd9640a, 0x06d46f04, 0x1dc37216, 0x14ce7918, 0x2bed4832, 0x22e0433c, 0x39f75e2e, 0x30fa5520, 0x9ab701ec, 0x93ba0ae2, 0x88ad17f0, 0x81a01cfe, 0xbe832dd4, 0xb78e26da, 0xac993bc8, 0xa59430c6, 0xd2df599c, 0xdbd25292, 0xc0c54f80, 0xc9c8448e, 0xf6eb75a4, 0xffe67eaa, 0xe4f163b8, 0xedfc68b6, 0x0a67b10c, 0x036aba02, 0x187da710, 0x1170ac1e, 0x2e539d34, 0x275e963a, 0x3c498b28, 0x35448026, 0x420fe97c, 0x4b02e272, 0x5015ff60, 0x5918f46e, 0x663bc544, 0x6f36ce4a, 0x7421d358, 0x7d2cd856, 0xa10c7a37, 0xa8017139, 0xb3166c2b, 0xba1b6725, 0x8538560f, 0x8c355d01, 0x97224013, 0x9e2f4b1d, 0xe9642247, 0xe0692949, 0xfb7e345b, 0xf2733f55, 0xcd500e7f, 0xc45d0571, 0xdf4a1863, 0xd647136d, 0x31dccad7, 0x38d1c1d9, 0x23c6dccb, 0x2acbd7c5, 0x15e8e6ef, 0x1ce5ede1, 0x07f2f0f3, 0x0efffbfd, 0x79b492a7, 0x70b999a9, 0x6bae84bb, 0x62a38fb5, 0x5d80be9f, 0x548db591, 0x4f9aa883, 0x4697a38d ] def __init__(self, key): if len(key) not in (16, 24, 32): raise ValueError('Invalid key size') rounds = self.number_of_rounds[len(key)] # Encryption round keys self._Ke = [[0] * 4 for i in xrange(rounds + 1)] # Decryption round keys self._Kd = [[0] * 4 for i in xrange(rounds + 1)] round_key_count = (rounds + 1) * 4 KC = len(key) // 4 # Convert the key into ints tk = [ struct.unpack('>i', key[i:i + 4])[0] for i in xrange(0, len(key), 4) ] # Copy values into round key arrays for i in xrange(0, KC): self._Ke[i // 4][i % 4] = tk[i] self._Kd[rounds - (i // 4)][i % 4] = tk[i] # Key expansion (fips-197 section 5.2) rconpointer = 0 t = KC while t < round_key_count: tt = tk[KC - 1] tk[0] ^= ((self.S[(tt >> 16) & 0xFF] << 24) ^ (self.S[(tt >> 8) & 0xFF] << 16) ^ (self.S[ tt & 0xFF] << 8) ^ self.S[(tt >> 24) & 0xFF] ^ (self.rcon[rconpointer] << 24)) rconpointer += 1 if KC != 8: for i in xrange(1, KC): tk[i] ^= tk[i - 1] # Key expansion for 256-bit keys is "slightly different" (fips-197) else: for i in xrange(1, KC // 2): tk[i] ^= tk[i - 1] tt = tk[KC // 2 - 1] tk[KC // 2] ^= (self.S[ tt & 0xFF] ^ (self.S[(tt >> 8) & 0xFF] << 8) ^ (self.S[(tt >> 16) & 0xFF] << 16) ^ (self.S[(tt >> 24) & 0xFF] << 24)) for i in xrange(KC // 2 + 1, KC): tk[i] ^= tk[i - 1] # Copy values into round key arrays j = 0 while j < KC and t < round_key_count: self._Ke[t // 4][t % 4] = tk[j] self._Kd[rounds - (t // 4)][t % 4] = tk[j] j += 1 t += 1 # Inverse-Cipher-ify the decryption round key (fips-197 section 5.3) for r in xrange(1, rounds): for j in xrange(0, 4): tt = self._Kd[r][j] self._Kd[r][j] = (self.U1[(tt >> 24) & 0xFF] ^ self.U2[(tt >> 16) & 0xFF] ^ self.U3[(tt >> 8) & 0xFF] ^ self.U4[ tt & 0xFF]) def encrypt(self, plaintext): 'Encrypt a block of plain text using the AES block cipher.' if len(plaintext) != 16: raise ValueError('wrong block length') rounds = len(self._Ke) - 1 (s1, s2, s3) = [1, 2, 3] a = [0, 0, 0, 0] # Convert plaintext to (ints ^ key) t = [(_compact_word(plaintext[4 * i:4 * i + 4]) ^ self._Ke[0][i]) for i in xrange(0, 4)] # Apply round transforms for r in xrange(1, rounds): for i in xrange(0, 4): a[i] = (self.T1[(t[ i ] >> 24) & 0xFF] ^ self.T2[(t[(i + s1) % 4] >> 16) & 0xFF] ^ self.T3[(t[(i + s2) % 4] >> 8) & 0xFF] ^ self.T4[ t[(i + s3) % 4] & 0xFF] ^ self._Ke[r][i]) t = copy.copy(a) # The last round is special result = [ ] for i in xrange(0, 4): tt = self._Ke[rounds][i] result.append((self.S[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF) result.append((self.S[(t[(i + s1) % 4] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF) result.append((self.S[(t[(i + s2) % 4] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF) result.append((self.S[ t[(i + s3) % 4] & 0xFF] ^ tt ) & 0xFF) return result def decrypt(self, ciphertext): 'Decrypt a block of cipher text using the AES block cipher.' if len(ciphertext) != 16: raise ValueError('wrong block length') rounds = len(self._Kd) - 1 (s1, s2, s3) = [3, 2, 1] a = [0, 0, 0, 0] # Convert ciphertext to (ints ^ key) t = [(_compact_word(ciphertext[4 * i:4 * i + 4]) ^ self._Kd[0][i]) for i in xrange(0, 4)] # Apply round transforms for r in xrange(1, rounds): for i in xrange(0, 4): a[i] = (self.T5[(t[ i ] >> 24) & 0xFF] ^ self.T6[(t[(i + s1) % 4] >> 16) & 0xFF] ^ self.T7[(t[(i + s2) % 4] >> 8) & 0xFF] ^ self.T8[ t[(i + s3) % 4] & 0xFF] ^ self._Kd[r][i]) t = copy.copy(a) # The last round is special result = [ ] for i in xrange(0, 4): tt = self._Kd[rounds][i] result.append((self.Si[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF) result.append((self.Si[(t[(i + s1) % 4] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF) result.append((self.Si[(t[(i + s2) % 4] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF) result.append((self.Si[ t[(i + s3) % 4] & 0xFF] ^ tt ) & 0xFF) return result class Counter(object): '''A counter object for the Counter (CTR) mode of operation. To create a custom counter, you can usually just override the increment method.''' def __init__(self, initial_value = 1): # Convert the value into an array of bytes long self._counter = [ ((initial_value >> i) % 256) for i in xrange(128 - 8, -1, -8) ] value = property(lambda s: s._counter) def increment(self): '''Increment the counter (overflow rolls back to 0).''' for i in xrange(len(self._counter) - 1, -1, -1): self._counter[i] += 1 if self._counter[i] < 256: break # Carry the one self._counter[i] = 0 # Overflow else: self._counter = [ 0 ] * len(self._counter) class AESBlockModeOfOperation(object): '''Super-class for AES modes of operation that require blocks.''' def __init__(self, key): self._aes = AES(key) def decrypt(self, ciphertext): raise Exception('not implemented') def encrypt(self, plaintext): raise Exception('not implemented') class AESStreamModeOfOperation(AESBlockModeOfOperation): '''Super-class for AES modes of operation that are stream-ciphers.''' class AESSegmentModeOfOperation(AESStreamModeOfOperation): '''Super-class for AES modes of operation that segment data.''' segment_bytes = 16 class AESModeOfOperationECB(AESBlockModeOfOperation): '''AES Electronic Codebook Mode of Operation. o Block-cipher, so data must be padded to 16 byte boundaries Security Notes: o This mode is not recommended o Any two identical blocks produce identical encrypted values, exposing data patterns. (See the image of Tux on wikipedia) Also see: o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Electronic_codebook_.28ECB.29 o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.1''' name = "Electronic Codebook (ECB)" def encrypt(self, plaintext): if len(plaintext) != 16: raise ValueError('plaintext block must be 16 bytes') plaintext = _string_to_bytes(plaintext) return _bytes_to_string(self._aes.encrypt(plaintext)) def decrypt(self, ciphertext): if len(ciphertext) != 16: raise ValueError('ciphertext block must be 16 bytes') ciphertext = _string_to_bytes(ciphertext) return _bytes_to_string(self._aes.decrypt(ciphertext)) class AESModeOfOperationCBC(AESBlockModeOfOperation): '''AES Cipher-Block Chaining Mode of Operation. o The Initialization Vector (IV) o Block-cipher, so data must be padded to 16 byte boundaries o An incorrect initialization vector will only cause the first block to be corrupt; all other blocks will be intact o A corrupt bit in the cipher text will cause a block to be corrupted, and the next block to be inverted, but all other blocks will be intact. Security Notes: o This method (and CTR) ARE recommended. Also see: o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher-block_chaining_.28CBC.29 o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.2''' name = "Cipher-Block Chaining (CBC)" def __init__(self, key, iv = None): if iv is None: self._last_cipherblock = [ 0 ] * 16 elif len(iv) != 16: raise ValueError('initialization vector must be 16 bytes') else: self._last_cipherblock = _string_to_bytes(iv) AESBlockModeOfOperation.__init__(self, key) def encrypt(self, plaintext): if len(plaintext) != 16: raise ValueError('plaintext block must be 16 bytes') plaintext = _string_to_bytes(plaintext) precipherblock = [ (p ^ l) for (p, l) in zip(plaintext, self._last_cipherblock) ] self._last_cipherblock = self._aes.encrypt(precipherblock) return _bytes_to_string(self._last_cipherblock) def decrypt(self, ciphertext): if len(ciphertext) != 16: raise ValueError('ciphertext block must be 16 bytes') cipherblock = _string_to_bytes(ciphertext) plaintext = [ (p ^ l) for (p, l) in zip(self._aes.decrypt(cipherblock), self._last_cipherblock) ] self._last_cipherblock = cipherblock return _bytes_to_string(plaintext) class AESModeOfOperationCFB(AESSegmentModeOfOperation): '''AES Cipher Feedback Mode of Operation. o A stream-cipher, so input does not need to be padded to blocks, but does need to be padded to segment_size Also see: o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_feedback_.28CFB.29 o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.3''' name = "Cipher Feedback (CFB)" def __init__(self, key, iv, segment_size = 1): if segment_size == 0: segment_size = 1 if iv is None: self._shift_register = [ 0 ] * 16 elif len(iv) != 16: raise ValueError('initialization vector must be 16 bytes') else: self._shift_register = _string_to_bytes(iv) self._segment_bytes = segment_size AESBlockModeOfOperation.__init__(self, key) segment_bytes = property(lambda s: s._segment_bytes) def encrypt(self, plaintext): if len(plaintext) % self._segment_bytes != 0: raise ValueError('plaintext block must be a multiple of segment_size') plaintext = _string_to_bytes(plaintext) # Break block into segments encrypted = [ ] for i in xrange(0, len(plaintext), self._segment_bytes): plaintext_segment = plaintext[i: i + self._segment_bytes] xor_segment = self._aes.encrypt(self._shift_register)[:len(plaintext_segment)] cipher_segment = [ (p ^ x) for (p, x) in zip(plaintext_segment, xor_segment) ] # Shift the top bits out and the ciphertext in self._shift_register = _concat_list(self._shift_register[len(cipher_segment):], cipher_segment) encrypted.extend(cipher_segment) return _bytes_to_string(encrypted) def decrypt(self, ciphertext): if len(ciphertext) % self._segment_bytes != 0: raise ValueError('ciphertext block must be a multiple of segment_size') ciphertext = _string_to_bytes(ciphertext) # Break block into segments decrypted = [ ] for i in xrange(0, len(ciphertext), self._segment_bytes): cipher_segment = ciphertext[i: i + self._segment_bytes] xor_segment = self._aes.encrypt(self._shift_register)[:len(cipher_segment)] plaintext_segment = [ (p ^ x) for (p, x) in zip(cipher_segment, xor_segment) ] # Shift the top bits out and the ciphertext in self._shift_register = _concat_list(self._shift_register[len(cipher_segment):], cipher_segment) decrypted.extend(plaintext_segment) return _bytes_to_string(decrypted) class AESModeOfOperationOFB(AESStreamModeOfOperation): '''AES Output Feedback Mode of Operation. o A stream-cipher, so input does not need to be padded to blocks, allowing arbitrary length data. o A bit twiddled in the cipher text, twiddles the same bit in the same bit in the plain text, which can be useful for error correction techniques. Also see: o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Output_feedback_.28OFB.29 o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.4''' name = "Output Feedback (OFB)" def __init__(self, key, iv = None): if iv is None: self._last_precipherblock = [ 0 ] * 16 elif len(iv) != 16: raise ValueError('initialization vector must be 16 bytes') else: self._last_precipherblock = _string_to_bytes(iv) self._remaining_block = [ ] AESBlockModeOfOperation.__init__(self, key) def encrypt(self, plaintext): encrypted = [ ] for p in _string_to_bytes(plaintext): if len(self._remaining_block) == 0: self._remaining_block = self._aes.encrypt(self._last_precipherblock) self._last_precipherblock = [ ] precipherbyte = self._remaining_block.pop(0) self._last_precipherblock.append(precipherbyte) cipherbyte = p ^ precipherbyte encrypted.append(cipherbyte) return _bytes_to_string(encrypted) def decrypt(self, ciphertext): # AES-OFB is symetric return self.encrypt(ciphertext) class AESModeOfOperationCTR(AESStreamModeOfOperation): '''AES Counter Mode of Operation. o A stream-cipher, so input does not need to be padded to blocks, allowing arbitrary length data. o The counter must be the same size as the key size (ie. len(key)) o Each block independant of the other, so a corrupt byte will not damage future blocks. o Each block has a uniue counter value associated with it, which contributes to the encrypted value, so no data patterns are leaked. o Also known as: Counter Mode (CM), Integer Counter Mode (ICM) and Segmented Integer Counter (SIC Security Notes: o This method (and CBC) ARE recommended. o Each message block is associated with a counter value which must be unique for ALL messages with the same key. Otherwise security may be compromised. Also see: o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Counter_.28CTR.29 o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.5 and Appendix B for managing the initial counter''' name = "Counter (CTR)" def __init__(self, key, counter = None): AESBlockModeOfOperation.__init__(self, key) if counter is None: counter = Counter() self._counter = counter self._remaining_counter = [ ] def encrypt(self, plaintext): while len(self._remaining_counter) < len(plaintext): self._remaining_counter += self._aes.encrypt(self._counter.value) self._counter.increment() plaintext = _string_to_bytes(plaintext) encrypted = [ (p ^ c) for (p, c) in zip(plaintext, self._remaining_counter) ] self._remaining_counter = self._remaining_counter[len(encrypted):] return _bytes_to_string(encrypted) def decrypt(self, crypttext): # AES-CTR is symetric return self.encrypt(crypttext) # Simple lookup table for each mode AESModesOfOperation = dict( ctr = AESModeOfOperationCTR, cbc = AESModeOfOperationCBC, cfb = AESModeOfOperationCFB, ecb = AESModeOfOperationECB, ofb = AESModeOfOperationOFB, )
60,310
101.222034
3,083
py
FATE
FATE-master/python/federatedml/secureprotol/symmetric_encryption/pohlig_hellman_encryption.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import random from federatedml.secureprotol.gmpy_math import is_prime, invert, gcd, powmod from federatedml.secureprotol.symmetric_encryption.symmetric_encryption import SymmetricKey, SymmetricCiphertext from federatedml.secureprotol.diffie_hellman import DiffieHellman from federatedml.util import conversion class PohligHellmanCipherKey(SymmetricKey): """ A commutative encryption scheme inspired by Pohlig, Stephen, and Martin Hellman. "An improved algorithm for computing logarithms over GF (p) and its cryptographic significance." 1978 Enc(x) = x^a mod p, with public knowledge p being a prime and satisfying that (p - 1) / 2 is also a prime Dec(y) = y^(a^(-1) mod phi(p)) mod p """ def __init__(self, mod_base, exponent=None): """ :param exponent: int :param mod_base: int """ super(PohligHellmanCipherKey, self).__init__() self.mod_base = mod_base # p if exponent is not None and gcd(exponent, mod_base - 1) != 1: raise ValueError("In Pohlig, exponent and the totient of the modulo base must be coprimes") self.exponent = exponent # a self.exponent_inverse = None if exponent is None else invert(exponent, mod_base - 1) @staticmethod def generate_key(key_size=1024): """ Generate a self-typed object with public mod_base and vacant exponent :param key_size: int :return: PohligHellmanCipherKey """ mod_base, _ = DiffieHellman.generate_oakley_group_key_pair(num_bits=key_size) return PohligHellmanCipherKey(mod_base) def init(self): """ Init self.exponent :return: """ while True: self.exponent = random.randint(2, self.mod_base) if gcd(self.exponent, self.mod_base - 1) == 1: self.exponent_inverse = invert(self.exponent, self.mod_base - 1) break def encrypt(self, plaintext): if isinstance(plaintext, list): return self.encrypt_list(plaintext) return self.encrypt_single_val(plaintext) def encrypt_single_val(self, plaintext): """ :param plaintext: int >= 0 / str / PohligHellmanCiphertext :return: PohligHellmanCiphertext """ if isinstance(plaintext, str): plaintext = conversion.str_to_int(plaintext) elif isinstance(plaintext, PohligHellmanCiphertext): plaintext = plaintext.message elif not isinstance(plaintext, int): plaintext = conversion.str_to_int(str(plaintext)) ciphertext = powmod(plaintext, self.exponent, self.mod_base) return PohligHellmanCiphertext(ciphertext) def encrypt_list(self, list_plaintext): ciphertext = [self.encrypt_single_val(p) for p in list_plaintext] return ciphertext def decrypt(self, ciphertext, decode_output=False): if isinstance(ciphertext, list): return self.decrypt_list(ciphertext, decode_output) return self.decrypt_single_val(ciphertext, decode_output) def decrypt_single_val(self, ciphertext, decode_output=False): """ If decode, then call int_to_str() method to decode the output plaintext :param ciphertext: PohligHellmanCiphertext :param decode_output: bool :return: PohligHellmanCiphertext / str """ if isinstance(ciphertext, PohligHellmanCiphertext): ciphertext = ciphertext.message elif isinstance(ciphertext, str): ciphertext = conversion.str_to_int(ciphertext) if decode_output: return conversion.int_to_str(powmod(ciphertext, self.exponent_inverse, self.mod_base)) else: return PohligHellmanCiphertext(powmod(ciphertext, self.exponent_inverse, self.mod_base)) def decrypt_list(self, ciphertext, decode_output): decrypt_result = [self.decrypt_single_val(c, decode_output) for c in ciphertext] return decrypt_result class PohligHellmanCiphertext(SymmetricCiphertext): """ """ def __init__(self, message): super(PohligHellmanCiphertext, self).__init__() self.message = message def __hash__(self): return self.message.__hash__() def __eq__(self, other): if not isinstance(other, PohligHellmanCiphertext): raise TypeError("Can only compare two PohligHellmanCiphertext objects") if self.message == other.message: return True else: return False
5,223
36.049645
112
py
FATE
FATE-master/python/federatedml/secure_information_retrieval/secure_information_retrieval_host.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.secure_information_retrieval.base_secure_information_retrieval import \ BaseSecureInformationRetrieval from federatedml.param.sir_param import SecureInformationRetrievalParam from federatedml.param.intersect_param import IntersectParam from federatedml.secureprotol.oblivious_transfer.hauck_oblivious_transfer.hauck_oblivious_transfer_sender import \ HauckObliviousTransferSender from federatedml.secureprotol.symmetric_encryption.py_aes_encryption import AESEncryptKey from federatedml.secureprotol.symmetric_encryption.cryptor_executor import CryptoExecutor from federatedml.statistic import data_overview from federatedml.statistic.intersect import DhIntersectionHost from federatedml.util import consts, abnormal_detection, LOGGER MODEL_PARAM_NAME = 'SecureInformationRetrievalParam' MODEL_META_NAME = 'SecureInformationRetrievalMeta' class SecureInformationRetrievalHost(BaseSecureInformationRetrieval): def __init__(self): super(SecureInformationRetrievalHost, self).__init__() self.oblivious_transfer = None self.target_indexes = None def _init_model(self, param: SecureInformationRetrievalParam): self._init_base_model(param) self.intersection_obj = DhIntersectionHost() self.intersection_obj.role = consts.HOST intersect_param = IntersectParam(dh_params=self.dh_params) self.intersection_obj.load_params(intersect_param) self.intersection_obj.host_party_id_list = self.component_properties.host_party_idlist self.intersection_obj.guest_party_id = self.component_properties.guest_partyid if self.model_param.oblivious_transfer_protocol == consts.OT_HAUCK.lower(): self.oblivious_transfer = HauckObliviousTransferSender() else: raise ValueError("SIR only supports Hauck's OT") def fit(self, data_inst): """ :param data_inst: Table :return: """ # LOGGER.info("data count = {}".format(data_inst.count())) abnormal_detection.empty_table_detection(data_inst) self._update_target_indexes(data_inst.schema) match_data = data_inst if data_overview.check_with_inst_id(data_inst): match_data = self._recover_match_id(data_inst) # 0. Raw retrieval if self.model_param.raw_retrieval or self.security_level == 0: LOGGER.info("enter raw information retrieval host") # abnormal_detection.empty_table_detection(data_inst) self._raw_information_retrieval(match_data) self._display_result(block_num='N/A') self._sync_coverage(data_inst) return data_inst # 1. Data pre-processing LOGGER.info("enter secure information retrieval host") # abnormal_detection.empty_table_detection(data_inst) self._parse_security_level(match_data) if not self._check_oblivious_transfer_condition(): self._failure_response() # 2. Guest find intersection self.intersection_obj.get_intersect_doubly_encrypted_id(match_data) id_list_host_first = self.intersection_obj.id_list_local_first # 3. Get the re-indexed doubly encrypted ID from guest id_blocks = self._iteratively_get_id_blocks() # 4. Restore value for the intersection id_blocks = _restore_value(id_list_host_first, id_blocks, self.target_indexes, self.need_label) # List[(Ei, val)] LOGGER.info("interested values restored") # 8. Execute OT as sender LOGGER.info("enter oblivious transfer protocol as a sender") key_list = self.oblivious_transfer.key_derivation(self.block_num) LOGGER.info("oblivious transfer key derived") # 9. Encrypt and transmit self._non_committing_encrypt(id_blocks, key_list) # List[(Ei, Eval)] LOGGER.info("non-committing encryption and transmission completed") # 10. Slack self._sync_coverage(data_inst) self._display_result() LOGGER.info("secure information retrieval finished") return data_inst def _sync_nonce_list(self, nonce, time): self.transfer_variable.nonce_list.remote(nonce, suffix=(time,), role=consts.GUEST, idx=0) LOGGER.info("sent {}-th nonce to guest".format(time)) def _transmit_value_ciphertext(self, id_block, time): self.transfer_variable.id_blocks_ciphertext.remote(id_block, suffix=(time,), role=consts.GUEST, idx=0) LOGGER.info("sent {}-th id block ciphertext to guest".format(time)) def _non_committing_encrypt(self, id_blocks, key_list): """ Use non-committing cipher to encrypt id blocks :param id_blocks: List[(Ei, val)] :param key_list: List[ObliviousTransferKey] :return: """ for i in range(self.block_num): if self.model_param.non_committing_encryption == consts.AES.lower(): aes_key = CryptoExecutor(AESEncryptKey(key_list[i].key)) else: raise ValueError("only supports AES cipher for non-committing encryption") self._transmit_value_ciphertext(aes_key.map_values_encrypt(id_blocks[i], mode=0), time=i) self._sync_nonce_list(aes_key.get_nonce(), time=i) block_confirm = self.transfer_variable.block_confirm.get(idx=0, suffix=(i,)) if block_confirm: continue def _update_target_indexes(self, schema): self.need_label = self._check_need_label() if self.need_label: return header = schema["header"] target_indexes = [] for col_name in self.target_cols: try: i = header.index(col_name) target_indexes.append(i) except ValueError: raise ValueError(f"{col_name} does not exist in table header. Please check.") self.target_indexes = target_indexes @staticmethod def extract_value(instance, target_indexes, need_label): if need_label: return instance.label features = [instance.features[i] for i in target_indexes] return features def _sync_natural_indexation(self, id_list=None, time=None): id_list_natural_indexation = self.transfer_variable.natural_indexation.get(idx=0, suffix=(time,)) LOGGER.info(f"got naturally indexed block {time} from guest") return id_list_natural_indexation def _parse_security_level(self, data_instance): self._sync_block_num() def _sync_block_num(self): self.block_num = self.transfer_variable.block_num.get(idx=0) LOGGER.info("got block num {} from guest".format(self.block_num)) def _raw_information_retrieval(self, data_instance): id_list_guest = self.transfer_variable.raw_id_list.get(idx=0) LOGGER.info("got raw id list from guest") target_indexes, need_label = self.target_indexes, self.need_label id_intersect = data_instance.join(id_list_guest, lambda v, u: SecureInformationRetrievalHost.extract_value(v, target_indexes, need_label)) self.transfer_variable.raw_value_list.remote(id_intersect, role=consts.GUEST, idx=0) LOGGER.info("sent raw value list to guest") # self._sync_coverage(data_instance) def _sync_coverage(self, data_instance): self.coverage = self.transfer_variable.coverage.get(idx=0) / data_instance.count() LOGGER.info(f"got coverage {self.coverage} from guest") def _iteratively_get_id_blocks(self): """ :return: List[Table] """ id_blocks = [None for _ in range(self.block_num)] for i in range(self.block_num): id_block = self._sync_natural_indexation(time=i) # get List[(Ei, -1)] id_blocks[i] = id_block return id_blocks def _restore_value(id_list_host, id_blocks, target_indexes, need_label): """ :param id_list_host: (h, (Eh, Instance)) :param id_blocks: List[(Ei, -1)] :return: """ id_value_blocks = [] for i in range(len(id_blocks)): restored_table = id_list_host.join(id_blocks[i], lambda v, u: SecureInformationRetrievalHost.extract_value(v[1], target_indexes, need_label)) id_value_blocks.append(restored_table) return id_value_blocks
10,186
42.348936
115
py
FATE
FATE-master/python/federatedml/secure_information_retrieval/base_secure_information_retrieval.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.model_base import Metric, MetricMeta from federatedml.model_base import ModelBase from federatedml.param.sir_param import SecureInformationRetrievalParam from federatedml.protobuf.generated import sir_meta_pb2, sir_param_pb2 from federatedml.statistic.intersect.match_id_process import MatchIDIntersect from federatedml.transfer_variable.transfer_class.secure_information_retrieval_transfer_variable import \ SecureInformationRetrievalTransferVariable from federatedml.util import consts, abnormal_detection MODEL_PARAM_NAME = 'SecureInformationRetrievalParam' MODEL_META_NAME = 'SecureInformationRetrievalMeta' class BaseSecureInformationRetrieval(ModelBase): """ """ def __init__(self): super(BaseSecureInformationRetrieval, self).__init__() self.model_param = SecureInformationRetrievalParam() self.security_level = None self.commutative_cipher = None self.transfer_variable = None self.block_num = None # N in 1-N OT self.coverage = None # the percentage of transactions whose values are successfully retrieved self.dh_params = None self.intersection_obj = None self.proc_obj = None self.with_inst_id = None self.need_label = False self.target_cols = None # For callback self.metric_name = "sir" self.metric_namespace = "train" self.metric_type = "SIR" def _init_base_model(self, param: SecureInformationRetrievalParam): self.transfer_variable = SecureInformationRetrievalTransferVariable() self._init_transfer_variable() self.model_param = param self.security_level = self.model_param.security_level self.dh_params = self.model_param.dh_params self.target_cols = self.model_param.target_cols def _init_transfer_variable(self): self.transfer_variable.natural_indexation.disable_auto_clean() self.transfer_variable.id_blocks_ciphertext.disable_auto_clean() @staticmethod def _abnormal_detection(data_instances): """ Make sure input data_instances is valid. """ abnormal_detection.empty_table_detection(data_instances) abnormal_detection.empty_feature_detection(data_instances) """ @staticmethod def record_original_id(k, v): if isinstance(k, str): restored_id = conversion.int_to_str(conversion.str_to_int(k)) else: restored_id = k return (restored_id, k) """ def _check_need_label(self): return len(self.target_cols) == 0 def _recover_match_id(self, data_instance): self.proc_obj = MatchIDIntersect(sample_id_generator=consts.GUEST, role=self.intersection_obj.role) self.proc_obj.new_join_id = False self.proc_obj.use_sample_id() match_data = self.proc_obj.recover(data=data_instance) return match_data def _restore_sample_id(self, data_instances): restore_data = self.proc_obj.expand(data_instances, owner_only=True) return restore_data def _raw_information_retrieval(self, data_instance): """ If security_level == 0, then perform raw information retrieval :param data_instance: :return: """ pass def _parse_security_level(self, data_instance): """ Cooperatively parse the security level index :param data_instance: :return: """ pass def _sync_natural_index(self, id_list_arr): """ guest -> host :param id_list_arr: :return: """ def _sync_natural_indexation(self, id_list, time): """ guest -> host :param id_list: :param time :return: """ def _sync_block_num(self): """ guest -> host :param :return: """ def _transmit_value_ciphertext(self, id_block, time): """ host -> guest :param id_block: :param time: int :return: """ def _check_oblivious_transfer_condition(self): """ 1-N OT with N no smaller than 2 is supported :return: """ return self.block_num >= 2 def _failure_response(self): """ If even 1-2 OT cannot be performed, make failure response :return: """ raise ValueError("Cannot perform even 1-2 OT, recommend use raw retrieval") def _sync_coverage(self, data_instance): """ guest -> host :param data_instance: :return: """ pass def _sync_nonce_list(self, nonce, time): """ host -> guest :param nonce: :return: """ pass def export_model(self): if self.model_output is not None: return self.model_output meta_obj = self._get_meta() param_obj = self._get_param() result = { MODEL_META_NAME: meta_obj, MODEL_PARAM_NAME: param_obj } self.model_output = result return result def _get_meta(self): return sir_meta_pb2.SecureInformationRetrievalMeta( security_level=self.security_level, oblivious_transfer_protocol=self.model_param.oblivious_transfer_protocol, commutative_encryption=self.model_param.commutative_encryption, non_committing_encryption=self.model_param.non_committing_encryption, key_size=self.model_param.key_size, raw_retrieval=self.model_param.raw_retrieval ) def _get_param(self): return sir_param_pb2.SecureInformationRetrievalParam( coverage=self.coverage, block_num=self.block_num ) def _display_result(self, block_num=None): if block_num is None: self.callback_metric(metric_name=self.metric_name, metric_namespace=self.metric_namespace, metric_data=[Metric("Coverage", self.coverage), Metric("Block number", self.block_num)]) self.tracker.set_metric_meta(metric_namespace=self.metric_namespace, metric_name=self.metric_name, metric_meta=MetricMeta(self.metric_name, metric_type="INTERSECTION")) else: self.callback_metric(metric_name=self.metric_name, metric_namespace=self.metric_namespace, metric_data=[Metric("Coverage", self.coverage), Metric("Block number", block_num)]) self.tracker.set_metric_meta(metric_namespace=self.metric_namespace, metric_name=self.metric_name, metric_meta=MetricMeta(self.metric_name, metric_type="INTERSECTION")) """ @staticmethod def _set_schema(data_instance, id_name=None, label_name=None, feature_name=None): if id_name is not None: data_instance.schema['sid_name'] = id_name if label_name is not None: data_instance.schema['label_name'] = label_name if feature_name is not None: data_instance.schema['header'] = feature_name return data_instance @staticmethod def log_table(tab, mode=0): # tab_col = tab.collect() if mode == 0: LOGGER.debug("mode 0: k, v") elif mode == 1: LOGGER.debug("mode 1: k, v.label") elif mode == 2: LOGGER.debug("mode 2: k, v.id, v.label") elif mode == 3: LOGGER.debug("mode 3: k, v.id, v.features, v.label") @staticmethod def log_schema(tab): LOGGER.debug("tab schema = {}".format(tab.schema)) """
8,594
32.313953
110
py
FATE
FATE-master/python/federatedml/secure_information_retrieval/secure_information_retrieval_guest.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import random import numpy as np from federatedml.feature.instance import Instance from federatedml.secure_information_retrieval.base_secure_information_retrieval import \ BaseSecureInformationRetrieval from federatedml.param.sir_param import SecureInformationRetrievalParam from federatedml.param.intersect_param import IntersectParam from federatedml.secureprotol.oblivious_transfer.hauck_oblivious_transfer.hauck_oblivious_transfer_receiver import \ HauckObliviousTransferReceiver from federatedml.secureprotol.symmetric_encryption.py_aes_encryption import AESDecryptKey from federatedml.secureprotol.symmetric_encryption.cryptor_executor import CryptoExecutor from federatedml.statistic import data_overview # from federatedml.secureprotol.symmetric_encryption.pohlig_hellman_encryption import PohligHellmanCipherKey from federatedml.statistic.intersect import DhIntersectionGuest from federatedml.util import consts, abnormal_detection, LOGGER MODEL_PARAM_NAME = 'SecureInformationRetrievalParam' MODEL_META_NAME = 'SecureInformationRetrievalMeta' class SecureInformationRetrievalGuest(BaseSecureInformationRetrieval): def __init__(self): super(SecureInformationRetrievalGuest, self).__init__() self.oblivious_transfer = None self.target_block_index = None # k-th block message is expected to obtain, with k in {0, 1, ..., N-1} # The following parameter restricts the range of the block number self.security_scale = np.log(500) # block_num = 2 * exp(security_scale * security_level) def _init_model(self, param: SecureInformationRetrievalParam): self._init_base_model(param) self.intersection_obj = DhIntersectionGuest() self.intersection_obj.role = consts.GUEST intersect_param = IntersectParam(dh_params=self.dh_params) self.intersection_obj.load_params(intersect_param) self.intersection_obj.host_party_id_list = self.component_properties.host_party_idlist self.intersection_obj.guest_party_id = self.component_properties.guest_partyid if self.model_param.oblivious_transfer_protocol == consts.OT_HAUCK.lower(): self.oblivious_transfer = HauckObliviousTransferReceiver() else: raise ValueError("SIR only supports Hauck's OT") def fit(self, data_inst): """ :param data_inst: Table, only the key column of the Table is used :return: """ abnormal_detection.empty_table_detection(data_inst) # 0. Raw retrieval match_data = data_inst self.with_inst_id = data_overview.check_with_inst_id(data_inst) if self.with_inst_id: match_data = self._recover_match_id(data_inst) if self.model_param.raw_retrieval or self.security_level == 0: LOGGER.info("enter raw information retrieval guest") # abnormal_detection.empty_table_detection(data_inst) data_output = self._raw_information_retrieval(match_data) self._display_result(block_num='N/A') if self.with_inst_id: data_output = self._restore_sample_id(data_output) data_output = self._compensate_set_difference(data_inst, data_output) return data_output # 1. Data pre-processing LOGGER.info("enter secure information retrieval guest") self.need_label = self._check_need_label() # abnormal_detection.empty_table_detection(data_inst) self._parse_security_level(match_data) if not self._check_oblivious_transfer_condition(): self._failure_response() # 2. Find intersection id_list_intersect = self.intersection_obj.get_intersect_doubly_encrypted_id(match_data)[0] id_list_host_second_only = self.intersection_obj.id_list_remote_second[0] # 3. Send the re-indexed doubly encrypted ID to host self._fake_blocks(id_list_intersect, id_list_host_second_only) # List[(EEi, -1)] LOGGER.info("faked blocks for obfuscation") # 4. Wait for host to restore value for the intersection LOGGER.info("waiting for host to restore interested values for the intersection") # 5. Execute OT as receiver LOGGER.info("enter oblivious transfer protocol as a receiver") target_key = self.oblivious_transfer.key_derivation(self.target_block_index) LOGGER.info("oblivious transfer key derived") # 6. Wait for host to encrypt and transmit, and then receive the encrypted interested values id_block_ciphertext, nonce = self._iteratively_get_encrypted_values() LOGGER.info("got encrypted interested values and nonce") target_block_cipher_id = self._non_committing_decrypt( id_block_ciphertext, nonce, target_key) # (Eright, val) LOGGER.info("used the right key to decrypt the wanted values") # 7. Get (EEright, instance) target_block_cipher_cipher_id = self.intersection_obj.map_raw_id_to_encrypt_id(target_block_cipher_id, id_list_host_second_only, keep_value=True) # 8. Get (EEright, Eright_guest) id_local_first = self.intersection_obj.id_local_first # (Eright_guest, id) id_list_local_second = self.intersection_obj.id_list_local_second[0] # (EEright, Eright_guest) # 9. Merge result # (Eright_guest, instance) id_list_cipher = self._merge_instance(target_block_cipher_cipher_id, id_list_local_second, self.need_label) data_output = self._merge(id_list_cipher, id_local_first) if self.with_inst_id: data_output = self._restore_sample_id(data_output) data_output = self._compensate_set_difference(data_inst, data_output) self._display_result() LOGGER.info("secure information retrieval finished") return data_output def _sync_nonce_list(self, nonce=None, time=0): nonce_list_result = self.transfer_variable.nonce_list.get(idx=0, suffix=(time,)) LOGGER.info("Got {}-th nonce list from host".format(time)) return nonce_list_result @staticmethod def _merge_instance(id_map1, id_map2, need_label): """ :param id_map1: (a, b) :param id_map2: (a, c) :return: (c, b) """ merge_table = id_map1.join(id_map2, lambda v, u: (u, v)) if need_label: return merge_table.map(lambda k, v: (v[0], Instance(label=v[1], features=[]))) else: return merge_table.map(lambda k, v: (v[0], Instance(features=v[1]))) @staticmethod def _merge(id_map1, id_map2): """ :param id_map1: (a, b) :param id_map2: (a, c) :return: (c, b) """ merge_table = id_map1.join(id_map2, lambda v, u: (u, v)) return merge_table.map(lambda k, v: (v[0], v[1])) def _composite_decrypt(self, id_list): """ k, v -> k, Dv :param id_list: :return: """ commutative_cipher = self.intersection_obj.commutative_cipher[0] return commutative_cipher.map_values_decrypt(id_list, mode=1) def _composite_encrypt(self, id_list): """ k, v -> Ek, v :param id_list: :return: """ commutative_cipher = self.intersection_obj.commutative_cipher[0] return commutative_cipher.map_encrypt(id_list, mode=2) def _decrypt_value(self, id_list): """ :param id_list: :return: """ def _non_committing_decrypt(self, id_block_ciphertext, nonce, target_key): """ Use non-committing cipher to encrypt id blocks :param id_block_ciphertext: (Ei, Eval) :param nonce: bytes :param target_key: ObliviousTransferKey :return: """ if self.model_param.non_committing_encryption == consts.AES.lower(): aes_key = CryptoExecutor(AESDecryptKey(key=target_key.key, nonce=nonce)) else: raise ValueError("only supports AES cipher for non-committing decryption") return aes_key.map_values_decrypt(id_block_ciphertext, mode=0) def _transmit_value_ciphertext(self, id_block=None, time=0): id_blocks = self.transfer_variable.id_blocks_ciphertext.get(idx=0, suffix=(time,)) LOGGER.info("got {}-th id block ciphertext from host".format(time)) return id_blocks def _decrypt_id_list(self, id_list): """ :param id_list: (EEe, v) :return: (Ee, v) """ commutative_cipher = self.intersection_obj.commutative_cipher[0] return commutative_cipher.map_decrypt(id_list, mode=2) def _sync_natural_indexation(self, id_list, time): self.transfer_variable.natural_indexation.remote(id_list, suffix=(time,), role=consts.HOST, idx=0) LOGGER.info("sent naturally indexed block {} to host".format(time)) def _fake_blocks(self, id_list_intersect, id_list_host, replacement=True): """ Randomly sample self.block_num - 1 blocks with the same size as id_list_intersect from id_list_host :param id_list_intersect: Table in the form (intersect_ENC_id, -1) :param id_list_host: Table in the form (ENC_id, -1) :param replacement: bool :return: id_list_array: List[Table] with disjoint (ENC_id, -1) Tables """ intersect_count = id_list_intersect.count() self.target_block_index = random.SystemRandom().randint(0, self.block_num - 1) for i in range(self.block_num): if i == self.target_block_index: id_block = id_list_intersect.join(id_list_host, lambda x, y: y) else: id_block = self.take_exact_sample(data_inst=id_list_host, exact_num=intersect_count) if not replacement: id_list_host = id_list_host.subtractByKey(id_block) # id_block = self._decrypt_id_list(id_block) id_block = id_block.map(lambda k, v: (v, -1)) self._sync_natural_indexation(id_block, time=i) @staticmethod def _id_list_array_indexation(id_list_array): """ :param id_list_array: List(Table) :return: """ for i in range(len(id_list_array)): id_list_array[i].mapValues(lambda v: i) return id_list_array def _parse_security_level(self, data_instance): # data_count_guest = data_instance.count() # block_num = 2 * exp(scale * level) self.block_num = int(np.ceil(2 * np.exp(self.security_scale * self.security_level))) LOGGER.info("parsed block num = {}".format(self.block_num)) self._sync_block_num() def _raw_information_retrieval(self, data_instance): self.transfer_variable.raw_id_list.remote(data_instance.map(lambda k, v: (k, -1)), role=consts.HOST, idx=0) LOGGER.info("sent raw id list to host") data_output = self.transfer_variable.raw_value_list.get(idx=0) LOGGER.info("got raw value list from host") # data_output = self._compensate_set_difference(data_instance, data_output) return data_output @staticmethod def take_exact_sample(data_inst, exact_num): """ Sample an exact number of instances from a Table :param data_inst: Table :param exact_num: int :return: Table """ sample_inst = data_inst.sample(num=exact_num) return sample_inst def _sync_block_num(self): self.transfer_variable.block_num.remote(self.block_num, role=consts.HOST, idx=0) LOGGER.info("sent block num {} to host".format(self.block_num)) def _compensate_set_difference(self, original_data, data_output): self.coverage = data_output.count() / original_data.count() import copy schema = copy.deepcopy(original_data.schema) if self.need_label: original_data = original_data.mapValues(lambda v: Instance(label="unretrieved", features=[], inst_id=v.inst_id)) else: feature_count = len(self.target_cols) features = np.array(["unretrieved"] * feature_count) original_data = original_data.mapValues(lambda v: Instance(features=features, inst_id=v.inst_id)) # LOGGER.debug(f"original data features is {list(original_data.collect())[0][1].features}") # LOGGER.debug(f"original data label is {list(original_data.collect())[0][1].label}") data_output = original_data.union(data_output, lambda v, u: u) # LOGGER.debug(f"data_output features after union is {list(data_output.collect())[0][1].features}") # LOGGER.debug(f"data_output label after union is {list(data_output.collect())[0][1].label}") if self.need_label: schema["label_name"] = "retrieved_value" schema["header"] = [] data_output.schema = schema else: schema["label_name"] = None schema["header"] = self.target_cols data_output.schema = schema self._sync_coverage(original_data) return data_output def _sync_coverage(self, data_instance): coverage = self.coverage * data_instance.count() self.transfer_variable.coverage.remote(coverage, role=consts.HOST, idx=0) LOGGER.info(f"sent coverage {coverage} to host") def _iteratively_get_encrypted_values(self): """ :return: Table, bytes """ id_block_ciphertext = None nonce = None for i in range(self.block_num): id_block = self._transmit_value_ciphertext(time=i) # List[(Ei, Eval)] nonce_inst = self._sync_nonce_list(time=i) if i != self.target_block_index: pass else: id_block_ciphertext = id_block nonce = nonce_inst self.transfer_variable.block_confirm.remote(True, suffix=(i,), role=consts.HOST, idx=0) return id_block_ciphertext, nonce
15,708
42.156593
116
py
FATE
FATE-master/python/federatedml/secure_information_retrieval/__init__.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #
616
37.5625
75
py
FATE
FATE-master/python/federatedml/param/ftl_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import collections import copy from federatedml.param.intersect_param import IntersectParam from types import SimpleNamespace from federatedml.param.base_param import BaseParam, deprecated_param from federatedml.util import consts from federatedml.param.encrypt_param import EncryptParam from federatedml.param.encrypted_mode_calculation_param import EncryptedModeCalculatorParam from federatedml.param.predict_param import PredictParam from federatedml.param.callback_param import CallbackParam deprecated_param_list = ["validation_freqs", "metrics"] @deprecated_param(*deprecated_param_list) class FTLParam(BaseParam): def __init__(self, alpha=1, tol=0.000001, n_iter_no_change=False, validation_freqs=None, optimizer={'optimizer': 'Adam', 'learning_rate': 0.01}, nn_define={}, epochs=1, intersect_param=IntersectParam(consts.RSA), config_type='keras', batch_size=-1, encrypte_param=EncryptParam(), encrypted_mode_calculator_param=EncryptedModeCalculatorParam(mode="confusion_opt"), predict_param=PredictParam(), mode='plain', communication_efficient=False, local_round=5, callback_param=CallbackParam()): """ Parameters ---------- alpha : float a loss coefficient defined in paper, it defines the importance of alignment loss tol : float loss tolerance n_iter_no_change : bool check loss convergence or not validation_freqs : None or positive integer or container object in python Do validation in training process or Not. if equals None, will not do validation in train process; if equals positive integer, will validate data every validation_freqs epochs passes; if container object in python, will validate data if epochs belong to this container. e.g. validation_freqs = [10, 15], will validate data when epoch equals to 10 and 15. The default value is None, 1 is suggested. You can set it to a number larger than 1 in order to speed up training by skipping validation rounds. When it is larger than 1, a number which is divisible by "epochs" is recommended, otherwise, you will miss the validation scores of last training epoch. optimizer : str or dict optimizer method, accept following types: 1. a string, one of "Adadelta", "Adagrad", "Adam", "Adamax", "Nadam", "RMSprop", "SGD" 2. a dict, with a required key-value pair keyed by "optimizer", with optional key-value pairs such as learning rate. defaults to "SGD" nn_define : dict a dict represents the structure of neural network, it can be output by tf-keras epochs : int epochs num intersect_param define the intersect method config_type : {'tf-keras'} config type batch_size : int batch size when computing transformed feature embedding, -1 use full data. encrypte_param encrypted param encrypted_mode_calculator_param encrypted mode calculator param: predict_param predict param mode: {"plain", "encrypted"} plain: will not use any encrypt algorithms, data exchanged in plaintext encrypted: use paillier to encrypt gradients communication_efficient: bool will use communication efficient or not. when communication efficient is enabled, FTL model will update gradients by several local rounds using intermediate data local_round: int local update round when using communication efficient """ super(FTLParam, self).__init__() self.alpha = alpha self.tol = tol self.n_iter_no_change = n_iter_no_change self.validation_freqs = validation_freqs self.optimizer = optimizer self.nn_define = nn_define self.epochs = epochs self.intersect_param = copy.deepcopy(intersect_param) self.config_type = config_type self.batch_size = batch_size self.encrypted_mode_calculator_param = copy.deepcopy(encrypted_mode_calculator_param) self.encrypt_param = copy.deepcopy(encrypte_param) self.predict_param = copy.deepcopy(predict_param) self.mode = mode self.communication_efficient = communication_efficient self.local_round = local_round self.callback_param = copy.deepcopy(callback_param) def check(self): self.intersect_param.check() self.encrypt_param.check() self.encrypted_mode_calculator_param.check() self.optimizer = self._parse_optimizer(self.optimizer) supported_config_type = ["keras"] if self.config_type not in supported_config_type: raise ValueError(f"config_type should be one of {supported_config_type}") if not isinstance(self.tol, (int, float)): raise ValueError("tol should be numeric") if not isinstance(self.epochs, int) or self.epochs <= 0: raise ValueError("epochs should be a positive integer") if self.nn_define and not isinstance(self.nn_define, dict): raise ValueError("bottom_nn_define should be a dict defining the structure of neural network") if self.batch_size != -1: if not isinstance(self.batch_size, int) \ or self.batch_size < consts.MIN_BATCH_SIZE: raise ValueError( " {} not supported, should be larger than 10 or -1 represent for all data".format(self.batch_size)) for p in deprecated_param_list: # if self._warn_to_deprecate_param(p, "", ""): if self._deprecated_params_set.get(p): if "callback_param" in self.get_user_feeded(): raise ValueError(f"{p} and callback param should not be set simultaneously," f"{self._deprecated_params_set}, {self.get_user_feeded()}") else: self.callback_param.callbacks = ["PerformanceEvaluate"] break descr = "ftl's" if self._warn_to_deprecate_param("validation_freqs", descr, "callback_param's 'validation_freqs'"): self.callback_param.validation_freqs = self.validation_freqs if self._warn_to_deprecate_param("metrics", descr, "callback_param's 'metrics'"): self.callback_param.metrics = self.metrics if self.validation_freqs is None: pass elif isinstance(self.validation_freqs, int): if self.validation_freqs < 1: raise ValueError("validation_freqs should be larger than 0 when it's integer") elif not isinstance(self.validation_freqs, collections.Container): raise ValueError("validation_freqs should be None or positive integer or container") assert isinstance(self.communication_efficient, bool), 'communication efficient must be a boolean' assert self.mode in [ 'encrypted', 'plain'], 'mode options: encrpyted or plain, but {} is offered'.format( self.mode) self.check_positive_integer(self.epochs, 'epochs') self.check_positive_number(self.alpha, 'alpha') self.check_positive_integer(self.local_round, 'local round') @staticmethod def _parse_optimizer(opt): """ Examples: 1. "optimize": "SGD" 2. "optimize": { "optimizer": "SGD", "learning_rate": 0.05 } """ kwargs = {} if isinstance(opt, str): return SimpleNamespace(optimizer=opt, kwargs=kwargs) elif isinstance(opt, dict): optimizer = opt.get("optimizer", kwargs) if not optimizer: raise ValueError(f"optimizer config: {opt} invalid") kwargs = {k: v for k, v in opt.items() if k != "optimizer"} return SimpleNamespace(optimizer=optimizer, kwargs=kwargs) else: raise ValueError(f"invalid type for optimize: {type(opt)}")
8,927
44.090909
120
py
FATE
FATE-master/python/federatedml/param/glm_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import copy from federatedml.param.base_param import BaseParam, deprecated_param from federatedml.param.callback_param import CallbackParam from federatedml.param.encrypt_param import EncryptParam from federatedml.param.cross_validation_param import CrossValidationParam from federatedml.param.init_model_param import InitParam from federatedml.param.stepwise_param import StepwiseParam from federatedml.util import consts @deprecated_param("validation_freqs", "metrics", "early_stopping_rounds", "use_first_metric_only") class LinearModelParam(BaseParam): """ Parameters used for GLM. Parameters ---------- penalty : {'L2' or 'L1'} Penalty method used in LinR. Please note that, when using encrypted version in HeteroLinR, 'L1' is not supported. tol : float, default: 1e-4 The tolerance of convergence alpha : float, default: 1.0 Regularization strength coefficient. optimizer : {'sgd', 'rmsprop', 'adam', 'sqn', 'adagrad', 'nesterov_momentum_sgd'} Optimize method batch_size : int, default: -1 Batch size when updating model. -1 means use all data in a batch. i.e. Not to use mini-batch strategy. learning_rate : float, default: 0.01 Learning rate max_iter : int, default: 20 The maximum iteration for training. init_param: InitParam object, default: default InitParam object Init param method object. early_stop : {'diff', 'abs', 'weight_dff'} Method used to judge convergence. a) diff: Use difference of loss between two iterations to judge whether converge. b) abs: Use the absolute value of loss to judge whether converge. i.e. if loss < tol, it is converged. c) weight_diff: Use difference between weights of two consecutive iterations encrypt_param: EncryptParam object, default: default EncryptParam object encrypt param cv_param: CrossValidationParam object, default: default CrossValidationParam object cv param decay: int or float, default: 1 Decay rate for learning rate. learning rate will follow the following decay schedule. lr = lr0/(1+decay*t) if decay_sqrt is False. If decay_sqrt is True, lr = lr0 / sqrt(1+decay*t) where t is the iter number. decay_sqrt: Bool, default: True lr = lr0/(1+decay*t) if decay_sqrt is False, otherwise, lr = lr0 / sqrt(1+decay*t) validation_freqs: int, list, tuple, set, or None validation frequency during training, required when using early stopping. The default value is None, 1 is suggested. You can set it to a number larger than 1 in order to speed up training by skipping validation rounds. When it is larger than 1, a number which is divisible by "max_iter" is recommended, otherwise, you will miss the validation scores of the last training iteration. early_stopping_rounds: int, default: None If positive number specified, at every specified training rounds, program checks for early stopping criteria. Validation_freqs must also be set when using early stopping. metrics: list or None, default: None Specify which metrics to be used when performing evaluation during training process. If metrics have not improved at early_stopping rounds, trianing stops before convergence. If set as empty, default metrics will be used. For regression tasks, default metrics are ['root_mean_squared_error', 'mean_absolute_error'] use_first_metric_only: bool, default: False Indicate whether to use the first metric in `metrics` as the only criterion for early stopping judgement. floating_point_precision: None or integer if not None, use floating_point_precision-bit to speed up calculation, e.g.: convert an x to round(x * 2**floating_point_precision) during Paillier operation, divide the result by 2**floating_point_precision in the end. callback_param: CallbackParam object callback param """ def __init__(self, penalty='L2', tol=1e-4, alpha=1.0, optimizer='sgd', batch_size=-1, learning_rate=0.01, init_param=InitParam(), max_iter=100, early_stop='diff', encrypt_param=EncryptParam(), cv_param=CrossValidationParam(), decay=1, decay_sqrt=True, validation_freqs=None, early_stopping_rounds=None, stepwise_param=StepwiseParam(), metrics=None, use_first_metric_only=False, floating_point_precision=23, callback_param=CallbackParam()): super(LinearModelParam, self).__init__() self.penalty = penalty self.tol = tol self.alpha = alpha self.optimizer = optimizer self.batch_size = batch_size self.learning_rate = learning_rate self.init_param = copy.deepcopy(init_param) self.max_iter = max_iter self.early_stop = early_stop self.encrypt_param = encrypt_param self.cv_param = copy.deepcopy(cv_param) self.decay = decay self.decay_sqrt = decay_sqrt self.validation_freqs = validation_freqs self.early_stopping_rounds = early_stopping_rounds self.stepwise_param = copy.deepcopy(stepwise_param) self.metrics = metrics or [] self.use_first_metric_only = use_first_metric_only self.floating_point_precision = floating_point_precision self.callback_param = copy.deepcopy(callback_param) def check(self): descr = "linear model param's " if self.penalty is None: self.penalty = 'NONE' elif type(self.penalty).__name__ != "str": raise ValueError( descr + "penalty {} not supported, should be str type".format(self.penalty)) self.penalty = self.penalty.upper() if self.penalty not in [consts.L1_PENALTY, consts.L2_PENALTY, consts.NONE.upper()]: raise ValueError( "penalty {} not supported, penalty should be 'L1', 'L2' or 'NONE'".format(self.penalty)) if type(self.tol).__name__ not in ["int", "float"]: raise ValueError( descr + "tol {} not supported, should be float type".format(self.tol)) if type(self.alpha).__name__ not in ["int", "float"]: raise ValueError( descr + "alpha {} not supported, should be float type".format(self.alpha)) if type(self.optimizer).__name__ != "str": raise ValueError( descr + "optimizer {} not supported, should be str type".format(self.optimizer)) else: self.optimizer = self.optimizer.lower() if self.optimizer not in ['sgd', 'rmsprop', 'adam', 'adagrad', 'sqn', 'nesterov_momentum_sgd']: raise ValueError( descr + "optimizer not supported, optimizer should be" " 'sgd', 'rmsprop', 'adam', 'sqn', 'adagrad', or 'nesterov_momentum_sgd'") if type(self.batch_size).__name__ not in ["int", "long"]: raise ValueError( descr + "batch_size {} not supported, should be int type".format(self.batch_size)) if self.batch_size != -1: if type(self.batch_size).__name__ not in ["int", "long"] \ or self.batch_size < consts.MIN_BATCH_SIZE: raise ValueError(descr + " {} not supported, should be larger than {} or " "-1 represent for all data".format(self.batch_size, consts.MIN_BATCH_SIZE)) if type(self.learning_rate).__name__ not in ["int", "float"]: raise ValueError( descr + "learning_rate {} not supported, should be float type".format( self.learning_rate)) self.init_param.check() if type(self.max_iter).__name__ != "int": raise ValueError( descr + "max_iter {} not supported, should be int type".format(self.max_iter)) elif self.max_iter <= 0: raise ValueError( descr + "max_iter must be greater or equal to 1") if type(self.early_stop).__name__ != "str": raise ValueError( descr + "early_stop {} not supported, should be str type".format( self.early_stop)) else: self.early_stop = self.early_stop.lower() if self.early_stop not in ['diff', 'abs', 'weight_diff']: raise ValueError( descr + "early_stop not supported, early_stop should be 'weight_diff', 'diff' or 'abs'") self.encrypt_param.check() if type(self.decay).__name__ not in ["int", "float"]: raise ValueError( descr + "decay {} not supported, should be 'int' or 'float'".format(self.decay) ) if type(self.decay_sqrt).__name__ not in ["bool"]: raise ValueError( descr + "decay_sqrt {} not supported, should be 'bool'".format(self.decay) ) self.stepwise_param.check() for p in ["early_stopping_rounds", "validation_freqs", "metrics", "use_first_metric_only"]: if self._warn_to_deprecate_param(p, "", ""): if "callback_param" in self.get_user_feeded(): raise ValueError(f"{p} and callback param should not be set simultaneously") else: self.callback_param.callbacks = ["PerformanceEvaluate"] break if self._warn_to_deprecate_param("validation_freqs", descr, "callback_param's 'validation_freqs'"): self.callback_param.validation_freqs = self.validation_freqs if self._warn_to_deprecate_param("early_stopping_rounds", descr, "callback_param's 'early_stopping_rounds'"): self.callback_param.early_stopping_rounds = self.early_stopping_rounds if self._warn_to_deprecate_param("metrics", descr, "callback_param's 'metrics'"): self.callback_param.metrics = self.metrics if self._warn_to_deprecate_param("use_first_metric_only", descr, "callback_param's 'use_first_metric_only'"): self.callback_param.use_first_metric_only = self.use_first_metric_only if self.floating_point_precision is not None and \ (not isinstance(self.floating_point_precision, int) or self.floating_point_precision < 0 or self.floating_point_precision > 64): raise ValueError("floating point precision should be null or a integer between 0 and 64") self.callback_param.check() return True
11,308
49.262222
182
py
FATE
FATE-master/python/federatedml/param/sir_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam, deprecated_param from federatedml.param.base_param import BaseParam from federatedml.param.intersect_param import DHParam from federatedml.util import consts, LOGGER @deprecated_param("key_size", "raw_retrieval") class SecureInformationRetrievalParam(BaseParam): """ Parameters ---------- security_level: float, default 0.5 security level, should set value in [0, 1] if security_level equals 0.0 means raw data retrieval oblivious_transfer_protocol: {"OT_Hauck"} OT type, only supports OT_Hauck commutative_encryption : {"CommutativeEncryptionPohligHellman"} the commutative encryption scheme used non_committing_encryption : {"aes"} the non-committing encryption scheme used dh_params params for Pohlig-Hellman Encryption key_size: int, value >= 1024 the key length of the commutative cipher; note that this param will be deprecated in future, please specify key_length in PHParam instead. raw_retrieval: bool perform raw retrieval if raw_retrieval target_cols: str or list of str target cols to retrieve; any values not retrieved will be marked as "unretrieved", if target_cols is None, label will be retrieved, same behavior as in previous version default None """ def __init__(self, security_level=0.5, oblivious_transfer_protocol=consts.OT_HAUCK, commutative_encryption=consts.CE_PH, non_committing_encryption=consts.AES, key_size=consts.DEFAULT_KEY_LENGTH, dh_params=DHParam(), raw_retrieval=False, target_cols=None): super(SecureInformationRetrievalParam, self).__init__() self.security_level = security_level self.oblivious_transfer_protocol = oblivious_transfer_protocol self.commutative_encryption = commutative_encryption self.non_committing_encryption = non_committing_encryption self.dh_params = dh_params self.key_size = key_size self.raw_retrieval = raw_retrieval self.target_cols = target_cols def check(self): descr = "secure information retrieval param's " self.check_decimal_float(self.security_level, descr + "security_level") self.oblivious_transfer_protocol = self.check_and_change_lower(self.oblivious_transfer_protocol, [consts.OT_HAUCK.lower()], descr + "oblivious_transfer_protocol") self.commutative_encryption = self.check_and_change_lower(self.commutative_encryption, [consts.CE_PH.lower()], descr + "commutative_encryption") self.non_committing_encryption = self.check_and_change_lower(self.non_committing_encryption, [consts.AES.lower()], descr + "non_committing_encryption") if self._warn_to_deprecate_param("key_size", descr, "dh_param's key_length"): self.dh_params.key_length = self.key_size self.dh_params.check() if self._warn_to_deprecate_param("raw_retrieval", descr, "dh_param's security_level = 0"): self.check_boolean(self.raw_retrieval, descr) self.target_cols = [] if self.target_cols is None else self.target_cols if not isinstance(self.target_cols, list): self.target_cols = [self.target_cols] for col in self.target_cols: self.check_string(col, descr + "target_cols") if len(self.target_cols) == 0: LOGGER.warning(f"Both 'target_cols' and 'target_indexes' are empty. Label will be retrieved.")
4,682
46.785714
109
py
FATE
FATE-master/python/federatedml/param/homo_onehot_encoder_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # added by jsweng # param class for OHE with alignment # from federatedml.param.base_param import BaseParam class HomoOneHotParam(BaseParam): """ Parameters ---------- transform_col_indexes: list or int, default: -1 Specify which columns need to calculated. -1 represent for all columns. need_run: bool, default True Indicate if this module needed to be run need_alignment: bool, default True Indicated whether alignment of features is turned on """ def __init__(self, transform_col_indexes=-1, transform_col_names=None, need_run=True, need_alignment=True): super(HomoOneHotParam, self).__init__() self.transform_col_indexes = transform_col_indexes self.transform_col_names = transform_col_names self.need_run = need_run self.need_alignment = need_alignment def check(self): descr = "One-hot encoder with alignment param's" self.check_defined_type(self.transform_col_indexes, descr, ['list', 'int']) self.check_boolean(self.need_run, descr) self.check_boolean(self.need_alignment, descr) self.transform_col_names = [] if self.transform_col_names is None else self.transform_col_names return True
1,299
32.333333
111
py
FATE
FATE-master/python/federatedml/param/scale_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam from federatedml.util import consts, LOGGER class ScaleParam(BaseParam): """ Define the feature scale parameters. Parameters ---------- method : {"standard_scale", "min_max_scale"} like scale in sklearn, now it support "min_max_scale" and "standard_scale", and will support other scale method soon. Default standard_scale, which will do nothing for scale mode : {"normal", "cap"} for mode is "normal", the feat_upper and feat_lower is the normal value like "10" or "3.1" and for "cap", feat_upper and feature_lower will between 0 and 1, which means the percentile of the column. Default "normal" feat_upper : int or float or list of int or float the upper limit in the column. If use list, mode must be "normal", and list length should equal to the number of features to scale. If the scaled value is larger than feat_upper, it will be set to feat_upper feat_lower: int or float or list of int or float the lower limit in the column. If use list, mode must be "normal", and list length should equal to the number of features to scale. If the scaled value is less than feat_lower, it will be set to feat_lower scale_col_indexes: list the idx of column in scale_column_idx will be scaled, while the idx of column is not in, it will not be scaled. scale_names : list of string Specify which columns need to scaled. Each element in the list represent for a column name in header. default: [] with_mean : bool used for "standard_scale". Default True. with_std : bool used for "standard_scale". Default True. The standard scale of column x is calculated as : $z = (x - u) / s$ , where $u$ is the mean of the column and $s$ is the standard deviation of the column. if with_mean is False, $u$ will be 0, and if with_std is False, $s$ will be 1. need_run : bool Indicate if this module needed to be run, default True """ def __init__( self, method="standard_scale", mode="normal", scale_col_indexes=-1, scale_names=None, feat_upper=None, feat_lower=None, with_mean=True, with_std=True, need_run=True): super().__init__() self.scale_names = [] if scale_names is None else scale_names self.method = method self.mode = mode self.feat_upper = feat_upper # LOGGER.debug("self.feat_upper:{}, type:{}".format(self.feat_upper, type(self.feat_upper))) self.feat_lower = feat_lower self.scale_col_indexes = scale_col_indexes self.with_mean = with_mean self.with_std = with_std self.need_run = need_run def check(self): if self.method is not None: descr = "scale param's method" self.method = self.check_and_change_lower(self.method, [consts.MINMAXSCALE, consts.STANDARDSCALE], descr) descr = "scale param's mode" self.mode = self.check_and_change_lower(self.mode, [consts.NORMAL, consts.CAP], descr) # LOGGER.debug("self.feat_upper:{}, type:{}".format(self.feat_upper, type(self.feat_upper))) # if type(self.feat_upper).__name__ not in ["float", "int"]: # raise ValueError("scale param's feat_upper {} not supported, should be float or int".format( # self.feat_upper)) if self.scale_col_indexes != -1 and not isinstance(self.scale_col_indexes, list): raise ValueError("scale_col_indexes is should be -1 or a list") if self.scale_names is None: self.scale_names = [] if not isinstance(self.scale_names, list): raise ValueError("scale_names is should be a list of string") else: for e in self.scale_names: if not isinstance(e, str): raise ValueError("scale_names is should be a list of string") self.check_boolean(self.with_mean, "scale_param with_mean") self.check_boolean(self.with_std, "scale_param with_std") self.check_boolean(self.need_run, "scale_param need_run") LOGGER.debug("Finish scale parameter check!") return True
5,196
43.042373
162
py
FATE
FATE-master/python/federatedml/param/encrypted_mode_calculation_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam from federatedml.util import LOGGER class EncryptedModeCalculatorParam(BaseParam): """ Define the encrypted_mode_calulator parameters. Parameters ---------- mode: {'strict', 'fast', 'balance', 'confusion_opt'} encrypted mode, default: strict re_encrypted_rate: float or int numeric number in [0, 1], use when mode equals to 'balance', default: 1 """ def __init__(self, mode="strict", re_encrypted_rate=1): self.mode = mode self.re_encrypted_rate = re_encrypted_rate def check(self): descr = "encrypted_mode_calculator param" self.mode = self.check_and_change_lower(self.mode, ["strict", "fast", "balance", "confusion_opt", "confusion_opt_balance"], descr) if self.mode != "strict": LOGGER.warning("encrypted_mode_calculator will be remove in later version, " "but in current version user can still use it, but it only supports strict mode, " "other mode will be reset to strict for compatibility") self.mode = "strict" return True
1,926
36.057692
120
py
FATE
FATE-master/python/federatedml/param/feldman_verifiable_sum_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam class FeldmanVerifiableSumParam(BaseParam): """ Define how to transfer the cols Parameters ---------- sum_cols : list of column index, default: None Specify which columns need to be sum. If column index is None, each of columns will be sum. q_n : int, positive integer less than or equal to 16, default: 6 q_n is the number of significant decimal digit, If the data type is a float, the maximum significant digit is 16. The sum of integer and significant decimal digits should be less than or equal to 16. """ def __init__(self, sum_cols=None, q_n=6): self.sum_cols = sum_cols self.q_n = q_n def check(self): self.sum_cols = [] if self.sum_cols is None else self.sum_cols if isinstance(self.sum_cols, list): for idx in self.sum_cols: if not isinstance(idx, int): raise ValueError(f"type mismatch, column_indexes with element {idx}(type is {type(idx)})") if not isinstance(self.q_n, int): raise ValueError(f"Init param's q_n {self.q_n} not supported, should be int type", type is {type(self.q_n)}) if self.q_n < 0: raise ValueError(f"param's q_n {self.q_n} not supported, should be non-negative int value") elif self.q_n > 16: raise ValueError(f"param's q_n {self.q_n} not supported, should be less than or equal to 16")
2,149
38.814815
120
py
FATE
FATE-master/python/federatedml/param/sample_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam import collections class SampleParam(BaseParam): """ Define the sample method Parameters ---------- mode: {'random', 'stratified', 'exact_by_weight'}' specify sample to use, default: 'random' method: {'downsample', 'upsample'}, default: 'downsample' specify sample method fractions: None or float or list if mode equals to random, it should be a float number greater than 0, otherwise a list of elements of pairs like [label_i, sample_rate_i], e.g. [[0, 0.5], [1, 0.8], [2, 0.3]]. default: None random_state: int, RandomState instance or None, default: None random state need_run: bool, default True Indicate if this module needed to be run """ def __init__(self, mode="random", method="downsample", fractions=None, random_state=None, task_type="hetero", need_run=True): self.mode = mode self.method = method self.fractions = fractions self.random_state = random_state self.task_type = task_type self.need_run = need_run def check(self): descr = "sample param" self.mode = self.check_and_change_lower(self.mode, ["random", "stratified", "exact_by_weight"], descr) self.method = self.check_and_change_lower(self.method, ["upsample", "downsample"], descr) if self.mode == "stratified" and self.fractions is not None: if not isinstance(self.fractions, list): raise ValueError("fractions of sample param when using stratified should be list") for ele in self.fractions: if not isinstance(ele, collections.Container) or len(ele) != 2: raise ValueError( "element in fractions of sample param using stratified should be a pair like [label_i, rate_i]") return True
2,788
36.186667
120
py
FATE
FATE-master/python/federatedml/param/base_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import builtins import json import os from federatedml.util import LOGGER, consts _FEEDED_DEPRECATED_PARAMS = "_feeded_deprecated_params" _DEPRECATED_PARAMS = "_deprecated_params" _USER_FEEDED_PARAMS = "_user_feeded_params" _IS_RAW_CONF = "_is_raw_conf" def deprecated_param(*names): def _decorator(cls: "BaseParam"): deprecated = cls._get_or_init_deprecated_params_set() for name in names: deprecated.add(name) return cls return _decorator class _StaticDefaultMeta(type): """ hook object creation, copy all default parameters in `__init__` """ def __call__(cls, *args, **kwargs): obj = cls.__new__(cls) import inspect import copy signature = inspect.signature(obj.__init__).bind(*args, **kwargs) signature.apply_defaults() args = copy.deepcopy(signature.args) kwargs = copy.deepcopy(signature.kwargs) obj.__init__(*args, **kwargs) return obj class BaseParam(metaclass=_StaticDefaultMeta): def __init__(self): pass def set_name(self, name: str): self._name = name return self def check(self): raise NotImplementedError("Parameter Object should be checked.") @classmethod def _get_or_init_deprecated_params_set(cls): if not hasattr(cls, _DEPRECATED_PARAMS): setattr(cls, _DEPRECATED_PARAMS, set()) return getattr(cls, _DEPRECATED_PARAMS) def _get_or_init_feeded_deprecated_params_set(self, conf=None): if not hasattr(self, _FEEDED_DEPRECATED_PARAMS): if conf is None: setattr(self, _FEEDED_DEPRECATED_PARAMS, set()) else: setattr( self, _FEEDED_DEPRECATED_PARAMS, set(conf[_FEEDED_DEPRECATED_PARAMS]), ) return getattr(self, _FEEDED_DEPRECATED_PARAMS) def _get_or_init_user_feeded_params_set(self, conf=None): if not hasattr(self, _USER_FEEDED_PARAMS): if conf is None: setattr(self, _USER_FEEDED_PARAMS, set()) else: setattr(self, _USER_FEEDED_PARAMS, set(conf[_USER_FEEDED_PARAMS])) return getattr(self, _USER_FEEDED_PARAMS) def get_user_feeded(self): return self._get_or_init_user_feeded_params_set() def get_feeded_deprecated_params(self): return self._get_or_init_feeded_deprecated_params_set() @property def _deprecated_params_set(self): return {name: True for name in self.get_feeded_deprecated_params()} def as_dict(self): def _recursive_convert_obj_to_dict(obj): ret_dict = {} for attr_name in list(obj.__dict__): # get attr attr = getattr(obj, attr_name) if attr and type(attr).__name__ not in dir(builtins): ret_dict[attr_name] = _recursive_convert_obj_to_dict(attr) else: ret_dict[attr_name] = attr return ret_dict return _recursive_convert_obj_to_dict(self) def update(self, conf, allow_redundant=False): update_from_raw_conf = conf.get(_IS_RAW_CONF, True) if update_from_raw_conf: deprecated_params_set = self._get_or_init_deprecated_params_set() feeded_deprecated_params_set = ( self._get_or_init_feeded_deprecated_params_set() ) user_feeded_params_set = self._get_or_init_user_feeded_params_set() setattr(self, _IS_RAW_CONF, False) else: feeded_deprecated_params_set = ( self._get_or_init_feeded_deprecated_params_set(conf) ) user_feeded_params_set = self._get_or_init_user_feeded_params_set(conf) def _recursive_update_param(param, config, depth, prefix): if depth > consts.PARAM_MAXDEPTH: raise ValueError("Param define nesting too deep!!!, can not parse it") inst_variables = param.__dict__ redundant_attrs = [] for config_key, config_value in config.items(): # redundant attr if config_key not in inst_variables: if not update_from_raw_conf and config_key.startswith("_"): setattr(param, config_key, config_value) else: redundant_attrs.append(config_key) continue full_config_key = f"{prefix}{config_key}" if update_from_raw_conf: # add user feeded params user_feeded_params_set.add(full_config_key) # update user feeded deprecated param set if full_config_key in deprecated_params_set: feeded_deprecated_params_set.add(full_config_key) # supported attr attr = getattr(param, config_key) if type(attr).__name__ in dir(builtins) or attr is None: setattr(param, config_key, config_value) else: # recursive set obj attr sub_params = _recursive_update_param( attr, config_value, depth + 1, prefix=f"{prefix}{config_key}." ) setattr(param, config_key, sub_params) if not allow_redundant and redundant_attrs: raise ValueError( f"cpn `{getattr(self, '_name', type(self))}` has redundant parameters: `{[redundant_attrs]}`" ) return param return _recursive_update_param(param=self, config=conf, depth=0, prefix="") def extract_not_builtin(self): def _get_not_builtin_types(obj): ret_dict = {} for variable in obj.__dict__: attr = getattr(obj, variable) if attr and type(attr).__name__ not in dir(builtins): ret_dict[variable] = _get_not_builtin_types(attr) return ret_dict return _get_not_builtin_types(self) def validate(self): self.builtin_types = dir(builtins) self.func = { "ge": self._greater_equal_than, "le": self._less_equal_than, "in": self._in, "not_in": self._not_in, "range": self._range, } home_dir = os.path.abspath(os.path.dirname(os.path.realpath(__file__))) param_validation_path_prefix = home_dir + "/param_validation/" param_name = type(self).__name__ param_validation_path = "/".join( [param_validation_path_prefix, param_name + ".json"] ) validation_json = None try: with open(param_validation_path, "r") as fin: validation_json = json.loads(fin.read()) except BaseException: return self._validate_param(self, validation_json) def _validate_param(self, param_obj, validation_json): default_section = type(param_obj).__name__ var_list = param_obj.__dict__ for variable in var_list: attr = getattr(param_obj, variable) if type(attr).__name__ in self.builtin_types or attr is None: if variable not in validation_json: continue validation_dict = validation_json[default_section][variable] value = getattr(param_obj, variable) value_legal = False for op_type in validation_dict: if self.func[op_type](value, validation_dict[op_type]): value_legal = True break if not value_legal: raise ValueError( "Plase check runtime conf, {} = {} does not match user-parameter restriction".format( variable, value ) ) elif variable in validation_json: self._validate_param(attr, validation_json) @staticmethod def check_string(param, descr): if type(param).__name__ not in ["str"]: raise ValueError( descr + " {} not supported, should be string type".format(param) ) @staticmethod def check_positive_integer(param, descr): if type(param).__name__ not in ["int", "long"] or param <= 0: raise ValueError( descr + " {} not supported, should be positive integer".format(param) ) @staticmethod def check_positive_number(param, descr): if type(param).__name__ not in ["float", "int", "long"] or param <= 0: raise ValueError( descr + " {} not supported, should be positive numeric".format(param) ) @staticmethod def check_nonnegative_number(param, descr): if type(param).__name__ not in ["float", "int", "long"] or param < 0: raise ValueError( descr + " {} not supported, should be non-negative numeric".format(param) ) @staticmethod def check_decimal_float(param, descr): if type(param).__name__ not in ["float", "int"] or param < 0 or param > 1: raise ValueError( descr + " {} not supported, should be a float number in range [0, 1]".format( param ) ) @staticmethod def check_boolean(param, descr): if type(param).__name__ != "bool": raise ValueError( descr + " {} not supported, should be bool type".format(param) ) @staticmethod def check_open_unit_interval(param, descr): if type(param).__name__ not in ["float"] or param <= 0 or param >= 1: raise ValueError( descr + " should be a numeric number between 0 and 1 exclusively" ) @staticmethod def check_valid_value(param, descr, valid_values): if param not in valid_values: raise ValueError( descr + " {} is not supported, it should be in {}".format(param, valid_values) ) @staticmethod def check_defined_type(param, descr, types): if type(param).__name__ not in types: raise ValueError( descr + " {} not supported, should be one of {}".format(param, types) ) @staticmethod def check_and_change_lower(param, valid_list, descr=""): if type(param).__name__ != "str": raise ValueError( descr + " {} not supported, should be one of {}".format(param, valid_list) ) lower_param = param.lower() if lower_param in valid_list: return lower_param else: raise ValueError( descr + " {} not supported, should be one of {}".format(param, valid_list) ) @staticmethod def _greater_equal_than(value, limit): return value >= limit - consts.FLOAT_ZERO @staticmethod def _less_equal_than(value, limit): return value <= limit + consts.FLOAT_ZERO @staticmethod def _range(value, ranges): in_range = False for left_limit, right_limit in ranges: if ( left_limit - consts.FLOAT_ZERO <= value <= right_limit + consts.FLOAT_ZERO ): in_range = True break return in_range @staticmethod def _in(value, right_value_list): return value in right_value_list @staticmethod def _not_in(value, wrong_value_list): return value not in wrong_value_list def _warn_deprecated_param(self, param_name, descr): if self._deprecated_params_set.get(param_name): LOGGER.warning( f"{descr} {param_name} is deprecated and ignored in this version." ) def _warn_to_deprecate_param(self, param_name, descr, new_param): if self._deprecated_params_set.get(param_name): LOGGER.warning( f"{descr} {param_name} will be deprecated in future release; " f"please use {new_param} instead." ) return True return False
13,151
33.610526
113
py
FATE
FATE-master/python/federatedml/param/column_expand_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam from federatedml.util import consts from federatedml.util import LOGGER class ColumnExpandParam(BaseParam): """ Define method used for expanding column Parameters ---------- append_header : None or str or List[str], default: None Name(s) for appended feature(s). If None is given, module outputs the original input value without any operation. method : str, default: 'manual' If method is 'manual', use user-specified `fill_value` to fill in new features. fill_value : int or float or str or List[int] or List[float] or List[str], default: 1e-8 Used for filling expanded feature columns. If given a list, length of the list must match that of `append_header` need_run: bool, default: True Indicate if this module needed to be run. """ def __init__(self, append_header=None, method="manual", fill_value=consts.FLOAT_ZERO, need_run=True): super(ColumnExpandParam, self).__init__() self.append_header = append_header self.method = method self.fill_value = fill_value self.need_run = need_run def check(self): descr = "column_expand param's " if not isinstance(self.method, str): raise ValueError(f"{descr}method {self.method} not supported, should be str type") else: user_input = self.method.lower() if user_input == "manual": self.method = consts.MANUAL else: raise ValueError(f"{descr} method {user_input} not supported") BaseParam.check_boolean(self.need_run, descr=descr) self.append_header = [] if self.append_header is None else self.append_header if not isinstance(self.append_header, list): raise ValueError(f"{descr} append_header must be None or list of str. " f"Received {type(self.append_header)} instead.") for feature_name in self.append_header: BaseParam.check_string(feature_name, descr + "append_header values") if isinstance(self.fill_value, list): if len(self.append_header) != len(self.fill_value): raise ValueError( f"{descr} `fill value` is set to be list, " f"and param `append_header` must also be list of the same length.") else: self.fill_value = [self.fill_value] for value in self.fill_value: if type(value).__name__ not in ["float", "int", "long", "str"]: raise ValueError( f"{descr} fill value(s) must be float, int, or str. Received type {type(value)} instead.") LOGGER.debug("Finish column expand parameter check!") return True
3,472
40.843373
121
py
FATE
FATE-master/python/federatedml/param/local_baseline_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import copy from federatedml.param.base_param import BaseParam from federatedml.param.predict_param import PredictParam class LocalBaselineParam(BaseParam): """ Define the local baseline model param Parameters ---------- model_name : str sklearn model used to train on baseline model model_opts : dict or none, default None Param to be used as input into baseline model predict_param : PredictParam object, default: default PredictParam object predict param need_run: bool, default True Indicate if this module needed to be run """ def __init__(self, model_name="LogisticRegression", model_opts=None, predict_param=PredictParam(), need_run=True): super(LocalBaselineParam, self).__init__() self.model_name = model_name self.model_opts = model_opts self.predict_param = copy.deepcopy(predict_param) self.need_run = need_run def check(self): descr = "local baseline param" self.model_name = self.check_and_change_lower(self.model_name, ["logisticregression"], descr) self.check_boolean(self.need_run, descr) if self.model_opts is not None: if not isinstance(self.model_opts, dict): raise ValueError(descr + " model_opts must be None or dict.") if self.model_opts is None: self.model_opts = {} self.predict_param.check() return True
2,222
33.734375
118
py
FATE
FATE-master/python/federatedml/param/evaluation_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.util import consts, LOGGER from federatedml.param.base_param import BaseParam class EvaluateParam(BaseParam): """ Define the evaluation method of binary/multiple classification and regression Parameters ---------- eval_type : {'binary', 'regression', 'multi'} support 'binary' for HomoLR, HeteroLR and Secureboosting, support 'regression' for Secureboosting, 'multi' is not support these version unfold_multi_result : bool unfold multi result and get several one-vs-rest binary classification results pos_label : int or float or str specify positive label type, depend on the data's label. this parameter effective only for 'binary' need_run: bool, default True Indicate if this module needed to be run """ def __init__(self, eval_type="binary", pos_label=1, need_run=True, metrics=None, run_clustering_arbiter_metric=False, unfold_multi_result=False): super().__init__() self.eval_type = eval_type self.pos_label = pos_label self.need_run = need_run self.metrics = metrics self.unfold_multi_result = unfold_multi_result self.run_clustering_arbiter_metric = run_clustering_arbiter_metric self.default_metrics = { consts.BINARY: consts.ALL_BINARY_METRICS, consts.MULTY: consts.ALL_MULTI_METRICS, consts.REGRESSION: consts.ALL_REGRESSION_METRICS, consts.CLUSTERING: consts.ALL_CLUSTER_METRICS } self.allowed_metrics = { consts.BINARY: consts.ALL_BINARY_METRICS, consts.MULTY: consts.ALL_MULTI_METRICS, consts.REGRESSION: consts.ALL_REGRESSION_METRICS, consts.CLUSTERING: consts.ALL_CLUSTER_METRICS } def _use_single_value_default_metrics(self): self.default_metrics = { consts.BINARY: consts.DEFAULT_BINARY_METRIC, consts.MULTY: consts.DEFAULT_MULTI_METRIC, consts.REGRESSION: consts.DEFAULT_REGRESSION_METRIC, consts.CLUSTERING: consts.DEFAULT_CLUSTER_METRIC } def _check_valid_metric(self, metrics_list): metric_list = consts.ALL_METRIC_NAME alias_name: dict = consts.ALIAS full_name_list = [] metrics_list = [str.lower(i) for i in metrics_list] for metric in metrics_list: if metric in metric_list: if metric not in full_name_list: full_name_list.append(metric) continue valid_flag = False for alias, full_name in alias_name.items(): if metric in alias: if full_name not in full_name_list: full_name_list.append(full_name) valid_flag = True break if not valid_flag: raise ValueError('metric {} is not supported'.format(metric)) allowed_metrics = self.allowed_metrics[self.eval_type] for m in full_name_list: if m not in allowed_metrics: raise ValueError('metric {} is not used for {} task'.format(m, self.eval_type)) if consts.RECALL in full_name_list and consts.PRECISION not in full_name_list: full_name_list.append(consts.PRECISION) if consts.RECALL not in full_name_list and consts.PRECISION in full_name_list: full_name_list.append(consts.RECALL) return full_name_list def check(self): descr = "evaluate param's " self.eval_type = self.check_and_change_lower(self.eval_type, [consts.BINARY, consts.MULTY, consts.REGRESSION, consts.CLUSTERING], descr) if type(self.pos_label).__name__ not in ["str", "float", "int"]: raise ValueError( "evaluate param's pos_label {} not supported, should be str or float or int type".format( self.pos_label)) if type(self.need_run).__name__ != "bool": raise ValueError( "evaluate param's need_run {} not supported, should be bool".format( self.need_run)) if self.metrics is None or len(self.metrics) == 0: self.metrics = self.default_metrics[self.eval_type] LOGGER.warning('use default metric {} for eval type {}'.format(self.metrics, self.eval_type)) self.check_boolean(self.unfold_multi_result, 'multi_result_unfold') self.metrics = self._check_valid_metric(self.metrics) return True def check_single_value_default_metric(self): self._use_single_value_default_metrics() # in validation strategy, psi f1-score and confusion-mat pr-quantile are not supported in cur version if self.metrics is None or len(self.metrics) == 0: self.metrics = self.default_metrics[self.eval_type] LOGGER.warning('use default metric {} for eval type {}'.format(self.metrics, self.eval_type)) ban_metric = [consts.PSI, consts.F1_SCORE, consts.CONFUSION_MAT, consts.QUANTILE_PR] for metric in self.metrics: if metric in ban_metric: self.metrics.remove(metric) self.check()
6,061
37.858974
109
py
FATE
FATE-master/python/federatedml/param/hetero_sshe_lr_param.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import copy from federatedml.param.logistic_regression_param import LogisticParam from federatedml.param.cross_validation_param import CrossValidationParam from federatedml.param.callback_param import CallbackParam from federatedml.param.encrypt_param import EncryptParam from federatedml.param.encrypted_mode_calculation_param import EncryptedModeCalculatorParam from federatedml.param.init_model_param import InitParam from federatedml.param.predict_param import PredictParam from federatedml.util import consts class HeteroSSHELRParam(LogisticParam): """ Parameters used for Hetero SSHE Logistic Regression Parameters ---------- penalty : str, 'L1', 'L2' or None. default: 'L2' Penalty method used in LR. If it is not None, weights are required to be reconstruct every iter. tol : float, default: 1e-4 The tolerance of convergence alpha : float, default: 1.0 Regularization strength coefficient. optimizer : str, 'sgd', 'rmsprop', 'adam', 'nesterov_momentum_sgd', or 'adagrad', default: 'sgd' Optimizer batch_size : int, default: -1 Batch size when updating model. -1 means use all data in a batch. i.e. Not to use mini-batch strategy. learning_rate : float, default: 0.01 Learning rate max_iter : int, default: 100 The maximum iteration for training. early_stop : str, 'diff', 'weight_diff' or 'abs', default: 'diff' Method used to judge converge or not. a) diff: Use difference of loss between two iterations to judge whether converge. b) weight_diff: Use difference between weights of two consecutive iterations c) abs: Use the absolute value of loss to judge whether converge. i.e. if loss < eps, it is converged. decay: int or float, default: 1 Decay rate for learning rate. learning rate will follow the following decay schedule. lr = lr0/(1+decay*t) if decay_sqrt is False. If decay_sqrt is True, lr = lr0 / sqrt(1+decay*t) where t is the iter number. decay_sqrt: Bool, default: True lr = lr0/(1+decay*t) if decay_sqrt is False, otherwise, lr = lr0 / sqrt(1+decay*t) encrypt_param: EncryptParam object, default: default EncryptParam object encrypt param predict_param: PredictParam object, default: default PredictParam object predict param cv_param: CrossValidationParam object, default: default CrossValidationParam object cv param multi_class: str, 'ovr', default: 'ovr' If it is a multi_class task, indicate what strategy to use. Currently, support 'ovr' short for one_vs_rest only. reveal_strategy: str, "respectively", "encrypted_reveal_in_host", default: "respectively" "respectively": Means guest and host can reveal their own part of weights only. "encrypted_reveal_in_host": Means host can be revealed his weights in encrypted mode, and guest can be revealed in normal mode. reveal_every_iter: bool, default: False Whether reconstruct model weights every iteration. If so, Regularization is available. The performance will be better as well since the algorithm process is simplified. """ def __init__(self, penalty='L2', tol=1e-4, alpha=1.0, optimizer='sgd', batch_size=-1, learning_rate=0.01, init_param=InitParam(), max_iter=100, early_stop='diff', encrypt_param=EncryptParam(), predict_param=PredictParam(), cv_param=CrossValidationParam(), decay=1, decay_sqrt=True, multi_class='ovr', use_mix_rand=True, reveal_strategy="respectively", reveal_every_iter=False, callback_param=CallbackParam(), encrypted_mode_calculator_param=EncryptedModeCalculatorParam() ): super(HeteroSSHELRParam, self).__init__(penalty=penalty, tol=tol, alpha=alpha, optimizer=optimizer, batch_size=batch_size, learning_rate=learning_rate, init_param=init_param, max_iter=max_iter, early_stop=early_stop, predict_param=predict_param, cv_param=cv_param, decay=decay, decay_sqrt=decay_sqrt, multi_class=multi_class, encrypt_param=encrypt_param, callback_param=callback_param) self.use_mix_rand = use_mix_rand self.reveal_strategy = reveal_strategy self.reveal_every_iter = reveal_every_iter self.encrypted_mode_calculator_param = copy.deepcopy(encrypted_mode_calculator_param) def check(self): descr = "logistic_param's" super(HeteroSSHELRParam, self).check() self.check_boolean(self.reveal_every_iter, descr) if self.penalty is None: pass elif type(self.penalty).__name__ != "str": raise ValueError( "logistic_param's penalty {} not supported, should be str type".format(self.penalty)) else: self.penalty = self.penalty.upper() """ if self.penalty not in [consts.L1_PENALTY, consts.L2_PENALTY, consts.NONE.upper()]: raise ValueError( "logistic_param's penalty not supported, penalty should be 'L1', 'L2' or 'NONE'") """ if not self.reveal_every_iter: if self.penalty not in [consts.L2_PENALTY, consts.NONE.upper()]: raise ValueError( f"penalty should be 'L2' or 'none', when reveal_every_iter is False" ) if type(self.optimizer).__name__ != "str": raise ValueError( "logistic_param's optimizer {} not supported, should be str type".format(self.optimizer)) else: self.optimizer = self.optimizer.lower() if self.reveal_every_iter: if self.optimizer not in ['sgd', 'rmsprop', 'adam', 'adagrad', 'nesterov_momentum_sgd']: raise ValueError( "When reveal_every_iter is True, " "sshe logistic_param's optimizer not supported, optimizer should be" " 'sgd', 'rmsprop', 'adam', 'nesterov_momentum_sgd', or 'adagrad'") else: if self.optimizer not in ['sgd', 'nesterov_momentum_sgd']: raise ValueError("When reveal_every_iter is False, " "sshe logistic_param's optimizer not supported, optimizer should be" " 'sgd', 'nesterov_momentum_sgd'") if self.encrypt_param.method not in [consts.PAILLIER, None]: raise ValueError( "logistic_param's encrypted method support 'Paillier' or None only") if self.callback_param.validation_freqs is not None: if self.reveal_every_iter is False: raise ValueError(f"When reveal_every_iter is False, validation every iter" f" is not supported.") self.reveal_strategy = self.check_and_change_lower(self.reveal_strategy, ["respectively", "encrypted_reveal_in_host"], f"{descr} reveal_strategy") if self.reveal_strategy == "encrypted_reveal_in_host" and self.reveal_every_iter: raise PermissionError("reveal strategy: encrypted_reveal_in_host mode is not allow to reveal every iter.") self.encrypted_mode_calculator_param.check() return True
8,460
52.89172
135
py
FATE
FATE-master/python/federatedml/param/hetero_nn_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import copy from types import SimpleNamespace from federatedml.param.base_param import BaseParam from federatedml.param.base_param import deprecated_param from federatedml.param.callback_param import CallbackParam from federatedml.param.cross_validation_param import CrossValidationParam from federatedml.param.encrypt_param import EncryptParam from federatedml.param.encrypted_mode_calculation_param import EncryptedModeCalculatorParam from federatedml.param.predict_param import PredictParam from federatedml.util import consts class DatasetParam(BaseParam): def __init__(self, dataset_name=None, **kwargs): super(DatasetParam, self).__init__() self.dataset_name = dataset_name self.param = kwargs def check(self): if self.dataset_name is not None: self.check_string(self.dataset_name, 'dataset_name') def to_dict(self): ret = {'dataset_name': self.dataset_name, 'param': self.param} return ret class SelectorParam(object): """ Parameters ---------- method: None or str back propagation select method, accept "relative" only, default: None selective_size: int deque size to use, store the most recent selective_size historical loss, default: 1024 beta: int sample whose selective probability >= power(np.random, beta) will be selected min_prob: Numeric selective probability is max(min_prob, rank_rate) """ def __init__(self, method=None, beta=1, selective_size=consts.SELECTIVE_SIZE, min_prob=0, random_state=None): self.method = method self.selective_size = selective_size self.beta = beta self.min_prob = min_prob self.random_state = random_state def check(self): if self.method is not None and self.method not in ["relative"]: raise ValueError('selective method should be None be "relative"') if not isinstance(self.selective_size, int) or self.selective_size <= 0: raise ValueError("selective size should be a positive integer") if not isinstance(self.beta, int): raise ValueError("beta should be integer") if not isinstance(self.min_prob, (float, int)): raise ValueError("min_prob should be numeric") class CoAEConfuserParam(BaseParam): """ A label protect mechanism proposed in paper: "Batch Label Inference and Replacement Attacks in Black-Boxed Vertical Federated Learning" paper link: https://arxiv.org/abs/2112.05409 Convert true labels to fake soft labels by using an auto-encoder. Args: enable: boolean run CoAE or not epoch: None or int auto-encoder training epochs lr: float auto-encoder learning rate lambda1: float parameter to control the difference between true labels and fake soft labels. Larger the parameter, autoencoder will give more attention to making true labels and fake soft label different. lambda2: float parameter to control entropy loss, see original paper for details verbose: boolean print loss log while training auto encoder """ def __init__(self, enable=False, epoch=50, lr=0.001, lambda1=1.0, lambda2=2.0, verbose=False): super(CoAEConfuserParam, self).__init__() self.enable = enable self.epoch = epoch self.lr = lr self.lambda1 = lambda1 self.lambda2 = lambda2 self.verbose = verbose def check(self): self.check_boolean(self.enable, 'enable') if not isinstance(self.epoch, int) or self.epoch <= 0: raise ValueError("epoch should be a positive integer") if not isinstance(self.lr, float): raise ValueError('lr should be a float number') if not isinstance(self.lambda1, float): raise ValueError('lambda1 should be a float number') if not isinstance(self.lambda2, float): raise ValueError('lambda2 should be a float number') self.check_boolean(self.verbose, 'verbose') @deprecated_param("validation_freqs", "early_stopping_rounds", "metrics", "use_first_metric_only") class HeteroNNParam(BaseParam): """ Parameters used for Hetero Neural Network. Parameters ---------- task_type: str, task type of hetero nn model, one of 'classification', 'regression'. bottom_nn_define: a dict represents the structure of bottom neural network. interactive_layer_define: a dict represents the structure of interactive layer. interactive_layer_lr: float, the learning rate of interactive layer. top_nn_define: a dict represents the structure of top neural network. optimizer: optimizer method, accept following types: 1. a string, one of "Adadelta", "Adagrad", "Adam", "Adamax", "Nadam", "RMSprop", "SGD" 2. a dict, with a required key-value pair keyed by "optimizer", with optional key-value pairs such as learning rate. defaults to "SGD". loss: str, a string to define loss function used epochs: int, the maximum iteration for aggregation in training. batch_size : int, batch size when updating model. -1 means use all data in a batch. i.e. Not to use mini-batch strategy. defaults to -1. early_stop : str, accept 'diff' only in this version, default: 'diff' Method used to judge converge or not. a) diff: Use difference of loss between two iterations to judge whether converge. floating_point_precision: None or integer, if not None, means use floating_point_precision-bit to speed up calculation, e.g.: convert an x to round(x * 2**floating_point_precision) during Paillier operation, divide the result by 2**floating_point_precision in the end. callback_param: CallbackParam object """ def __init__(self, task_type='classification', bottom_nn_define=None, top_nn_define=None, interactive_layer_define=None, interactive_layer_lr=0.9, config_type='pytorch', optimizer='SGD', loss=None, epochs=100, batch_size=-1, early_stop="diff", tol=1e-5, seed=100, encrypt_param=EncryptParam(), encrypted_mode_calculator_param=EncryptedModeCalculatorParam(), predict_param=PredictParam(), cv_param=CrossValidationParam(), validation_freqs=None, early_stopping_rounds=None, metrics=None, use_first_metric_only=True, selector_param=SelectorParam(), floating_point_precision=23, callback_param=CallbackParam(), coae_param=CoAEConfuserParam(), dataset=DatasetParam() ): super(HeteroNNParam, self).__init__() self.task_type = task_type self.bottom_nn_define = bottom_nn_define self.interactive_layer_define = interactive_layer_define self.interactive_layer_lr = interactive_layer_lr self.top_nn_define = top_nn_define self.batch_size = batch_size self.epochs = epochs self.early_stop = early_stop self.tol = tol self.optimizer = optimizer self.loss = loss self.validation_freqs = validation_freqs self.early_stopping_rounds = early_stopping_rounds self.metrics = metrics or [] self.use_first_metric_only = use_first_metric_only self.encrypt_param = copy.deepcopy(encrypt_param) self.encrypted_model_calculator_param = encrypted_mode_calculator_param self.predict_param = copy.deepcopy(predict_param) self.cv_param = copy.deepcopy(cv_param) self.selector_param = selector_param self.floating_point_precision = floating_point_precision self.callback_param = copy.deepcopy(callback_param) self.coae_param = coae_param self.dataset = dataset self.seed = seed self.config_type = 'pytorch' # pytorch only def check(self): assert isinstance(self.dataset, DatasetParam), 'dataset must be a DatasetParam()' self.dataset.check() self.check_positive_integer(self.seed, 'seed') if self.task_type not in ["classification", "regression"]: raise ValueError("config_type should be classification or regression") if not isinstance(self.tol, (int, float)): raise ValueError("tol should be numeric") if not isinstance(self.epochs, int) or self.epochs <= 0: raise ValueError("epochs should be a positive integer") if self.bottom_nn_define and not isinstance(self.bottom_nn_define, dict): raise ValueError("bottom_nn_define should be a dict defining the structure of neural network") if self.top_nn_define and not isinstance(self.top_nn_define, dict): raise ValueError("top_nn_define should be a dict defining the structure of neural network") if self.interactive_layer_define is not None and not isinstance(self.interactive_layer_define, dict): raise ValueError( "the interactive_layer_define should be a dict defining the structure of interactive layer") if self.batch_size != -1: if not isinstance(self.batch_size, int) \ or self.batch_size < consts.MIN_BATCH_SIZE: raise ValueError( " {} not supported, should be larger than 10 or -1 represent for all data".format(self.batch_size)) if self.early_stop != "diff": raise ValueError("early stop should be diff in this version") if self.metrics is not None and not isinstance(self.metrics, list): raise ValueError("metrics should be a list") if self.floating_point_precision is not None and \ (not isinstance(self.floating_point_precision, int) or self.floating_point_precision < 0 or self.floating_point_precision > 63): raise ValueError("floating point precision should be null or a integer between 0 and 63") self.encrypt_param.check() self.encrypted_model_calculator_param.check() self.predict_param.check() self.selector_param.check() self.coae_param.check() descr = "hetero nn param's " for p in ["early_stopping_rounds", "validation_freqs", "use_first_metric_only"]: if self._deprecated_params_set.get(p): if "callback_param" in self.get_user_feeded(): raise ValueError(f"{p} and callback param should not be set simultaneously," f"{self._deprecated_params_set}, {self.get_user_feeded()}") else: self.callback_param.callbacks = ["PerformanceEvaluate"] break if self._warn_to_deprecate_param("validation_freqs", descr, "callback_param's 'validation_freqs'"): self.callback_param.validation_freqs = self.validation_freqs if self._warn_to_deprecate_param("early_stopping_rounds", descr, "callback_param's 'early_stopping_rounds'"): self.callback_param.early_stopping_rounds = self.early_stopping_rounds if self._warn_to_deprecate_param("metrics", descr, "callback_param's 'metrics'"): if self.metrics: self.callback_param.metrics = self.metrics if self._warn_to_deprecate_param("use_first_metric_only", descr, "callback_param's 'use_first_metric_only'"): self.callback_param.use_first_metric_only = self.use_first_metric_only
12,596
41.557432
139
py
FATE
FATE-master/python/federatedml/param/dataio_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam class DataIOParam(BaseParam): """ Define dataio parameters that used in federated ml. Parameters ---------- input_format : {'dense', 'sparse', 'tag'} please have a look at this tutorial at "DataIO" section of federatedml/util/README.md. Formally, dense input format data should be set to "dense", svm-light input format data should be set to "sparse", tag or tag:value input format data should be set to "tag". delimitor : str the delimitor of data input, default: ',' data_type : {'float64', 'float', 'int', 'int64', 'str', 'long'} the data type of data input exclusive_data_type : dict the key of dict is col_name, the value is data_type, use to specified special data type of some features. tag_with_value: bool use if input_format is 'tag', if tag_with_value is True, input column data format should be tag[delimitor]value, otherwise is tag only tag_value_delimitor: str use if input_format is 'tag' and 'tag_with_value' is True, delimitor of tag[delimitor]value column value. missing_fill : bool need to fill missing value or not, accepted only True/False, default: False default_value : None or object or list the value to replace missing value. if None, it will use default value define in federatedml/feature/imputer.py, if single object, will fill missing value with this object, if list, it's length should be the sample of input data' feature dimension, means that if some column happens to have missing values, it will replace it the value by element in the identical position of this list. missing_fill_method : {None, 'min', 'max', 'mean', 'designated'} the method to replace missing value missing_impute: None or list element of list can be any type, or auto generated if value is None, define which values to be consider as missing outlier_replace: bool need to replace outlier value or not, accepted only True/False, default: True outlier_replace_method : {None, 'min', 'max', 'mean', 'designated'} the method to replace missing value outlier_impute: None or list element of list can be any type, which values should be regard as missing value, default: None outlier_replace_value : None or object or list the value to replace outlier. if None, it will use default value define in federatedml/feature/imputer.py, if single object, will replace outlier with this object, if list, it's length should be the sample of input data' feature dimension, means that if some column happens to have outliers, it will replace it the value by element in the identical position of this list. with_label : bool True if input data consist of label, False otherwise. default: 'false' label_name : str column_name of the column where label locates, only use in dense-inputformat. default: 'y' label_type : {'int', 'int64', 'float', 'float64', 'long', 'str'} use when with_label is True. output_format : {'dense', 'sparse'} output format """ def __init__(self, input_format="dense", delimitor=',', data_type='float64', exclusive_data_type=None, tag_with_value=False, tag_value_delimitor=":", missing_fill=False, default_value=0, missing_fill_method=None, missing_impute=None, outlier_replace=False, outlier_replace_method=None, outlier_impute=None, outlier_replace_value=0, with_label=False, label_name='y', label_type='int', output_format='dense', need_run=True): self.input_format = input_format self.delimitor = delimitor self.data_type = data_type self.exclusive_data_type = exclusive_data_type self.tag_with_value = tag_with_value self.tag_value_delimitor = tag_value_delimitor self.missing_fill = missing_fill self.default_value = default_value self.missing_fill_method = missing_fill_method self.missing_impute = missing_impute self.outlier_replace = outlier_replace self.outlier_replace_method = outlier_replace_method self.outlier_impute = outlier_impute self.outlier_replace_value = outlier_replace_value self.with_label = with_label self.label_name = label_name self.label_type = label_type self.output_format = output_format self.need_run = need_run def check(self): descr = "dataio param's" self.input_format = self.check_and_change_lower(self.input_format, ["dense", "sparse", "tag"], descr) self.output_format = self.check_and_change_lower(self.output_format, ["dense", "sparse"], descr) self.data_type = self.check_and_change_lower(self.data_type, ["int", "int64", "float", "float64", "str", "long"], descr) if type(self.missing_fill).__name__ != 'bool': raise ValueError("dataio param's missing_fill {} not supported".format(self.missing_fill)) if self.missing_fill_method is not None: self.missing_fill_method = self.check_and_change_lower(self.missing_fill_method, ['min', 'max', 'mean', 'designated'], descr) if self.outlier_replace_method is not None: self.outlier_replace_method = self.check_and_change_lower(self.outlier_replace_method, ['min', 'max', 'mean', 'designated'], descr) if type(self.with_label).__name__ != 'bool': raise ValueError("dataio param's with_label {} not supported".format(self.with_label)) if self.with_label: if not isinstance(self.label_name, str): raise ValueError("dataio param's label_name {} should be str".format(self.label_name)) self.label_type = self.check_and_change_lower(self.label_type, ["int", "int64", "float", "float64", "str", "long"], descr) if self.exclusive_data_type is not None and not isinstance(self.exclusive_data_type, dict): raise ValueError("exclusive_data_type is should be None or a dict") return True
7,738
48.929032
122
py
FATE
FATE-master/python/federatedml/param/psi_param.py
from federatedml.param.base_param import BaseParam from federatedml.util import consts class PSIParam(BaseParam): def __init__(self, max_bin_num=20, need_run=True, dense_missing_val=None, binning_error=consts.DEFAULT_RELATIVE_ERROR): super(PSIParam, self).__init__() self.max_bin_num = max_bin_num self.need_run = need_run self.dense_missing_val = dense_missing_val self.binning_error = binning_error def check(self): assert isinstance(self.max_bin_num, int) and self.max_bin_num > 0, 'max bin must be an integer larger than 0' assert isinstance(self.need_run, bool) if self.dense_missing_val is not None: assert isinstance(self.dense_missing_val, str) or isinstance(self.dense_missing_val, int) or \ isinstance(self.dense_missing_val, float), \ 'missing value type {} not supported'.format(type(self.dense_missing_val)) self.check_decimal_float(self.binning_error, "psi's param")
1,028
40.16
117
py
FATE
FATE-master/python/federatedml/param/onehot_encoder_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam class OneHotEncoderParam(BaseParam): """ Parameters ---------- transform_col_indexes: list or int, default: -1 Specify which columns need to calculated. -1 represent for all columns. transform_col_names : list of string, default: [] Specify which columns need to calculated. Each element in the list represent for a column name in header. need_run: bool, default True Indicate if this module needed to be run """ def __init__(self, transform_col_indexes=-1, transform_col_names=None, need_run=True): super(OneHotEncoderParam, self).__init__() if transform_col_names is None: transform_col_names = [] self.transform_col_indexes = transform_col_indexes self.transform_col_names = transform_col_names self.need_run = need_run def check(self): descr = "One-hot encoder param's" self.check_defined_type(self.transform_col_indexes, descr, ['list', 'int', 'NoneType']) self.check_defined_type(self.transform_col_names, descr, ['list', 'NoneType']) return True
1,816
36.854167
113
py
FATE
FATE-master/python/federatedml/param/logistic_regression_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import copy from federatedml.param.base_param import deprecated_param from federatedml.param.glm_param import LinearModelParam from federatedml.param.callback_param import CallbackParam from federatedml.param.cross_validation_param import CrossValidationParam from federatedml.param.encrypt_param import EncryptParam from federatedml.param.encrypted_mode_calculation_param import EncryptedModeCalculatorParam from federatedml.param.init_model_param import InitParam from federatedml.param.predict_param import PredictParam from federatedml.param.sqn_param import StochasticQuasiNewtonParam from federatedml.param.stepwise_param import StepwiseParam from federatedml.util import consts class LogisticParam(LinearModelParam): """ Parameters used for Logistic Regression both for Homo mode or Hetero mode. Parameters ---------- penalty : {'L2', 'L1' or None} Penalty method used in LR. Please note that, when using encrypted version in HomoLR, 'L1' is not supported. tol : float, default: 1e-4 The tolerance of convergence alpha : float, default: 1.0 Regularization strength coefficient. optimizer : {'rmsprop', 'sgd', 'adam', 'nesterov_momentum_sgd', 'adagrad'}, default: 'rmsprop' Optimize method. batch_strategy : str, {'full', 'random'}, default: "full" Strategy to generate batch data. a) full: use full data to generate batch_data, batch_nums every iteration is ceil(data_size / batch_size) b) random: select data randomly from full data, batch_num will be 1 every iteration. batch_size : int, default: -1 Batch size when updating model. -1 means use all data in a batch. i.e. Not to use mini-batch strategy. shuffle : bool, default: True Work only in hetero logistic regression, batch data will be shuffle in every iteration. masked_rate: int, float: default: 5 Use masked data to enhance security of hetero logistic regression learning_rate : float, default: 0.01 Learning rate max_iter : int, default: 100 The maximum iteration for training. early_stop : {'diff', 'weight_diff', 'abs'}, default: 'diff' Method used to judge converge or not. a) diff: Use difference of loss between two iterations to judge whether converge. b) weight_diff: Use difference between weights of two consecutive iterations c) abs: Use the absolute value of loss to judge whether converge. i.e. if loss < eps, it is converged. Please note that for hetero-lr multi-host situation, this parameter support "weight_diff" only. In homo-lr, weight_diff is not supported decay: int or float, default: 1 Decay rate for learning rate. learning rate will follow the following decay schedule. lr = lr0/(1+decay*t) if decay_sqrt is False. If decay_sqrt is True, lr = lr0 / sqrt(1+decay*t) where t is the iter number. decay_sqrt: bool, default: True lr = lr0/(1+decay*t) if decay_sqrt is False, otherwise, lr = lr0 / sqrt(1+decay*t) encrypt_param: EncryptParam object, default: default EncryptParam object encrypt param predict_param: PredictParam object, default: default PredictParam object predict param callback_param: CallbackParam object callback param cv_param: CrossValidationParam object, default: default CrossValidationParam object cv param multi_class: {'ovr'}, default: 'ovr' If it is a multi_class task, indicate what strategy to use. Currently, support 'ovr' short for one_vs_rest only. validation_freqs: int or list or tuple or set, or None, default None validation frequency during training. early_stopping_rounds: int, default: None Will stop training if one metric doesn’t improve in last early_stopping_round rounds metrics: list or None, default: None Indicate when executing evaluation during train process, which metrics will be used. If set as empty, default metrics for specific task type will be used. As for binary classification, default metrics are ['auc', 'ks'] use_first_metric_only: bool, default: False Indicate whether use the first metric only for early stopping judgement. floating_point_precision: None or integer if not None, use floating_point_precision-bit to speed up calculation, e.g.: convert an x to round(x * 2**floating_point_precision) during Paillier operation, divide the result by 2**floating_point_precision in the end. """ def __init__(self, penalty='L2', tol=1e-4, alpha=1.0, optimizer='rmsprop', batch_size=-1, shuffle=True, batch_strategy="full", masked_rate=5, learning_rate=0.01, init_param=InitParam(), max_iter=100, early_stop='diff', encrypt_param=EncryptParam(), predict_param=PredictParam(), cv_param=CrossValidationParam(), decay=1, decay_sqrt=True, multi_class='ovr', validation_freqs=None, early_stopping_rounds=None, stepwise_param=StepwiseParam(), floating_point_precision=23, metrics=None, use_first_metric_only=False, callback_param=CallbackParam() ): super(LogisticParam, self).__init__() self.penalty = penalty self.tol = tol self.alpha = alpha self.optimizer = optimizer self.batch_size = batch_size self.learning_rate = learning_rate self.init_param = copy.deepcopy(init_param) self.max_iter = max_iter self.early_stop = early_stop self.encrypt_param = encrypt_param self.shuffle = shuffle self.batch_strategy = batch_strategy self.masked_rate = masked_rate self.predict_param = copy.deepcopy(predict_param) self.cv_param = copy.deepcopy(cv_param) self.decay = decay self.decay_sqrt = decay_sqrt self.multi_class = multi_class self.validation_freqs = validation_freqs self.stepwise_param = copy.deepcopy(stepwise_param) self.early_stopping_rounds = early_stopping_rounds self.metrics = metrics or [] self.use_first_metric_only = use_first_metric_only self.floating_point_precision = floating_point_precision self.callback_param = copy.deepcopy(callback_param) def check(self): descr = "logistic_param's" super(LogisticParam, self).check() self.predict_param.check() if self.encrypt_param.method not in [consts.PAILLIER, consts.PAILLIER_IPCL, None]: raise ValueError( "logistic_param's encrypted method support 'Paillier' or None only") self.multi_class = self.check_and_change_lower( self.multi_class, ["ovr"], f"{descr}") if not isinstance(self.masked_rate, (float, int)) or self.masked_rate < 0: raise ValueError( "masked rate should be non-negative numeric number") if not isinstance(self.batch_strategy, str) or self.batch_strategy.lower() not in ["full", "random"]: raise ValueError("batch strategy should be full or random") self.batch_strategy = self.batch_strategy.lower() if not isinstance(self.shuffle, bool): raise ValueError("shuffle should be boolean type") return True class HomoLogisticParam(LogisticParam): """ Parameters ---------- aggregate_iters : int, default: 1 Indicate how many iterations are aggregated once. """ def __init__(self, penalty='L2', tol=1e-4, alpha=1.0, optimizer='rmsprop', batch_size=-1, learning_rate=0.01, init_param=InitParam(), max_iter=100, early_stop='diff', predict_param=PredictParam(), cv_param=CrossValidationParam(), decay=1, decay_sqrt=True, aggregate_iters=1, multi_class='ovr', validation_freqs=None, metrics=['auc', 'ks'], callback_param=CallbackParam() ): super(HomoLogisticParam, self).__init__(penalty=penalty, tol=tol, alpha=alpha, optimizer=optimizer, batch_size=batch_size, learning_rate=learning_rate, init_param=init_param, max_iter=max_iter, early_stop=early_stop, predict_param=predict_param, cv_param=cv_param, multi_class=multi_class, validation_freqs=validation_freqs, decay=decay, decay_sqrt=decay_sqrt, metrics=metrics, callback_param=callback_param) self.aggregate_iters = aggregate_iters def check(self): super().check() if not isinstance(self.aggregate_iters, int): raise ValueError( "logistic_param's aggregate_iters {} not supported, should be int type".format( self.aggregate_iters)) return True class HeteroLogisticParam(LogisticParam): def __init__(self, penalty='L2', tol=1e-4, alpha=1.0, optimizer='rmsprop', batch_size=-1, shuffle=True, batch_strategy="full", masked_rate=5, learning_rate=0.01, init_param=InitParam(), max_iter=100, early_stop='diff', encrypted_mode_calculator_param=EncryptedModeCalculatorParam(), predict_param=PredictParam(), cv_param=CrossValidationParam(), decay=1, decay_sqrt=True, sqn_param=StochasticQuasiNewtonParam(), multi_class='ovr', validation_freqs=None, early_stopping_rounds=None, metrics=['auc', 'ks'], floating_point_precision=23, encrypt_param=EncryptParam(), use_first_metric_only=False, stepwise_param=StepwiseParam(), callback_param=CallbackParam() ): super(HeteroLogisticParam, self).__init__(penalty=penalty, tol=tol, alpha=alpha, optimizer=optimizer, batch_size=batch_size, shuffle=shuffle, batch_strategy=batch_strategy, masked_rate=masked_rate, learning_rate=learning_rate, init_param=init_param, max_iter=max_iter, early_stop=early_stop, predict_param=predict_param, cv_param=cv_param, decay=decay, decay_sqrt=decay_sqrt, multi_class=multi_class, validation_freqs=validation_freqs, early_stopping_rounds=early_stopping_rounds, metrics=metrics, floating_point_precision=floating_point_precision, encrypt_param=encrypt_param, use_first_metric_only=use_first_metric_only, stepwise_param=stepwise_param, callback_param=callback_param) self.encrypted_mode_calculator_param = copy.deepcopy( encrypted_mode_calculator_param) self.sqn_param = copy.deepcopy(sqn_param) def check(self): super().check() self.encrypted_mode_calculator_param.check() self.sqn_param.check() return True
12,665
50.909836
120
py
FATE
FATE-master/python/federatedml/param/data_transform_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam class DataTransformParam(BaseParam): """ Define data transform parameters that used in federated ml. Parameters ---------- input_format : {'dense', 'sparse', 'tag'} please have a look at this tutorial at "DataTransform" section of federatedml/util/README.md. Formally, dense input format data should be set to "dense", svm-light input format data should be set to "sparse", tag or tag:value input format data should be set to "tag". Note: in fate's version >= 1.9.0, this params can be used in uploading/binding data's meta delimitor : str the delimitor of data input, default: ',' data_type : int {'float64','float','int','int64','str','long'} the data type of data input exclusive_data_type : dict the key of dict is col_name, the value is data_type, use to specified special data type of some features. tag_with_value: bool use if input_format is 'tag', if tag_with_value is True, input column data format should be tag[delimitor]value, otherwise is tag only tag_value_delimitor: str use if input_format is 'tag' and 'tag_with_value' is True, delimitor of tag[delimitor]value column value. missing_fill : bool need to fill missing value or not, accepted only True/False, default: False default_value : None or object or list the value to replace missing value. if None, it will use default value define in federatedml/feature/imputer.py, if single object, will fill missing value with this object, if list, it's length should be the sample of input data' feature dimension, means that if some column happens to have missing values, it will replace it the value by element in the identical position of this list. missing_fill_method: None or str the method to replace missing value, should be one of [None, 'min', 'max', 'mean', 'designated'] missing_impute: None or list element of list can be any type, or auto generated if value is None, define which values to be consider as missing outlier_replace: bool need to replace outlier value or not, accepted only True/False, default: True outlier_replace_method: None or str the method to replace missing value, should be one of [None, 'min', 'max', 'mean', 'designated'] outlier_impute: None or list element of list can be any type, which values should be regard as missing value outlier_replace_value: None or object or list the value to replace outlier. if None, it will use default value define in federatedml/feature/imputer.py, if single object, will replace outlier with this object, if list, it's length should be the sample of input data' feature dimension, means that if some column happens to have outliers, it will replace it the value by element in the identical position of this list. with_label : bool True if input data consist of label, False otherwise. default: 'false' Note: in fate's version >= 1.9.0, this params can be used in uploading/binding data's meta label_name : str column_name of the column where label locates, only use in dense-inputformat. default: 'y' label_type : {'int','int64','float','float64','long','str'} use when with_label is True output_format : {'dense', 'sparse'} output format with_match_id: bool True if dataset has match_id, default: False Note: in fate's version >= 1.9.0, this params can be used in uploading/binding data's meta match_id_name: str Valid if input_format is "dense", and multiple columns are considered as match_ids, the name of match_id to be used in current job Note: in fate's version >= 1.9.0, this params can be used in uploading/binding data's meta match_id_index: int Valid if input_format is "tag" or "sparse", and multiple columns are considered as match_ids, the index of match_id, default: 0 This param works only when data meta has been set with uploading/binding. """ def __init__(self, input_format="dense", delimitor=',', data_type='float64', exclusive_data_type=None, tag_with_value=False, tag_value_delimitor=":", missing_fill=False, default_value=0, missing_fill_method=None, missing_impute=None, outlier_replace=False, outlier_replace_method=None, outlier_impute=None, outlier_replace_value=0, with_label=False, label_name='y', label_type='int', output_format='dense', need_run=True, with_match_id=False, match_id_name='', match_id_index=0): self.input_format = input_format self.delimitor = delimitor self.data_type = data_type self.exclusive_data_type = exclusive_data_type self.tag_with_value = tag_with_value self.tag_value_delimitor = tag_value_delimitor self.missing_fill = missing_fill self.default_value = default_value self.missing_fill_method = missing_fill_method self.missing_impute = missing_impute self.outlier_replace = outlier_replace self.outlier_replace_method = outlier_replace_method self.outlier_impute = outlier_impute self.outlier_replace_value = outlier_replace_value self.with_label = with_label self.label_name = label_name self.label_type = label_type self.output_format = output_format self.need_run = need_run self.with_match_id = with_match_id self.match_id_name = match_id_name self.match_id_index = match_id_index def check(self): descr = "data_transform param's" self.input_format = self.check_and_change_lower(self.input_format, ["dense", "sparse", "tag"], descr) self.output_format = self.check_and_change_lower(self.output_format, ["dense", "sparse"], descr) self.data_type = self.check_and_change_lower(self.data_type, ["int", "int64", "float", "float64", "str", "long"], descr) if type(self.missing_fill).__name__ != 'bool': raise ValueError("data_transform param's missing_fill {} not supported".format(self.missing_fill)) if self.missing_fill_method is not None: self.missing_fill_method = self.check_and_change_lower(self.missing_fill_method, ['min', 'max', 'mean', 'designated'], descr) if self.outlier_replace_method is not None: self.outlier_replace_method = self.check_and_change_lower(self.outlier_replace_method, ['min', 'max', 'mean', 'designated'], descr) if type(self.with_label).__name__ != 'bool': raise ValueError("data_transform param's with_label {} not supported".format(self.with_label)) if self.with_label: if not isinstance(self.label_name, str): raise ValueError("data transform param's label_name {} should be str".format(self.label_name)) self.label_type = self.check_and_change_lower(self.label_type, ["int", "int64", "float", "float64", "str", "long"], descr) if self.exclusive_data_type is not None and not isinstance(self.exclusive_data_type, dict): raise ValueError("exclusive_data_type is should be None or a dict") if not isinstance(self.with_match_id, bool): raise ValueError("with_match_id should be boolean variable, but {} find".format(self.with_match_id)) if not isinstance(self.match_id_index, int) or self.match_id_index < 0: raise ValueError("match_id_index should be non negative integer") if self.match_id_name is not None and not isinstance(self.match_id_name, str): raise ValueError("match_id_name should be str") return True
9,351
50.384615
122
py
FATE
FATE-master/python/federatedml/param/sqn_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from federatedml.param.base_param import BaseParam class StochasticQuasiNewtonParam(BaseParam): """ Parameters used for stochastic quasi-newton method. Parameters ---------- update_interval_L : int, default: 3 Set how many iteration to update hess matrix memory_M : int, default: 5 Stack size of curvature information, i.e. y_k and s_k in the paper. sample_size : int, default: 5000 Sample size of data that used to update Hess matrix """ def __init__(self, update_interval_L=3, memory_M=5, sample_size=5000, random_seed=None): super().__init__() self.update_interval_L = update_interval_L self.memory_M = memory_M self.sample_size = sample_size self.random_seed = random_seed def check(self): descr = "hetero sqn param's" self.check_positive_integer(self.update_interval_L, descr) self.check_positive_integer(self.memory_M, descr) self.check_positive_integer(self.sample_size, descr) if self.random_seed is not None: self.check_positive_integer(self.random_seed, descr) return True
1,815
33.923077
92
py
FATE
FATE-master/python/federatedml/param/encrypt_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam from federatedml.util import consts, LOGGER class EncryptParam(BaseParam): """ Define encryption method that used in federated ml. Parameters ---------- method : {'Paillier'} If method is 'Paillier', Paillier encryption will be used for federated ml. To use non-encryption version in HomoLR, set this to None. For detail of Paillier encryption, please check out the paper mentioned in README file. key_length : int, default: 1024 Used to specify the length of key in this encryption method. """ def __init__(self, method=consts.PAILLIER, key_length=1024): super(EncryptParam, self).__init__() self.method = method self.key_length = key_length def check(self): if self.method is not None and type(self.method).__name__ != "str": raise ValueError( "encrypt_param's method {} not supported, should be str type".format( self.method)) elif self.method is None: pass else: user_input = self.method.lower() if user_input == "paillier": self.method = consts.PAILLIER elif user_input == consts.ITERATIVEAFFINE.lower() or user_input == consts.RANDOM_ITERATIVEAFFINE: LOGGER.warning('Iterative Affine and Random Iterative Affine are not supported in version>=1.7.1 ' 'due to safety concerns, encrypt method will be reset to Paillier') self.method = consts.PAILLIER elif user_input == "ipcl": self.method = consts.PAILLIER_IPCL else: raise ValueError( "encrypt_param's method {} not supported".format(user_input)) if type(self.key_length).__name__ != "int": raise ValueError( "encrypt_param's key_length {} not supported, should be int type".format(self.key_length)) elif self.key_length <= 0: raise ValueError( "encrypt_param's key_length must be greater or equal to 1") LOGGER.debug("Finish encrypt parameter check!") return True
2,897
38.69863
114
py
FATE
FATE-master/python/federatedml/param/feature_selection_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import copy from federatedml.param.base_param import BaseParam, deprecated_param from federatedml.util import consts class UniqueValueParam(BaseParam): """ Use the difference between max-value and min-value to judge. Parameters ---------- eps : float, default: 1e-5 The column(s) will be filtered if its difference is smaller than eps. """ def __init__(self, eps=1e-5): self.eps = eps def check(self): descr = "Unique value param's" self.check_positive_number(self.eps, descr) return True class IVValueSelectionParam(BaseParam): """ Use information values to select features. Parameters ---------- value_threshold: float, default: 1.0 Used if iv_value_thres method is used in feature selection. host_thresholds: List of float or None, default: None Set threshold for different host. If None, use same threshold as guest. If provided, the order should map with the host id setting. """ def __init__(self, value_threshold=0.0, host_thresholds=None, local_only=False): super().__init__() self.value_threshold = value_threshold self.host_thresholds = host_thresholds self.local_only = local_only def check(self): if not isinstance(self.value_threshold, (float, int)): raise ValueError("IV selection param's value_threshold should be float or int") if self.host_thresholds is not None: if not isinstance(self.host_thresholds, list): raise ValueError("IV selection param's host_threshold should be list or None") if not isinstance(self.local_only, bool): raise ValueError("IV selection param's local_only should be bool") return True class IVPercentileSelectionParam(BaseParam): """ Use information values to select features. Parameters ---------- percentile_threshold: float 0 <= percentile_threshold <= 1.0, default: 1.0, Percentile threshold for iv_percentile method """ def __init__(self, percentile_threshold=1.0, local_only=False): super().__init__() self.percentile_threshold = percentile_threshold self.local_only = local_only def check(self): descr = "IV selection param's" if self.percentile_threshold != 0 or self.percentile_threshold != 1: self.check_decimal_float(self.percentile_threshold, descr) self.check_boolean(self.local_only, descr) return True class IVTopKParam(BaseParam): """ Use information values to select features. Parameters ---------- k: int should be greater than 0, default: 10, Percentile threshold for iv_percentile method """ def __init__(self, k=10, local_only=False): super().__init__() self.k = k self.local_only = local_only def check(self): descr = "IV selection param's" self.check_positive_integer(self.k, descr) self.check_boolean(self.local_only, descr) return True class VarianceOfCoeSelectionParam(BaseParam): """ Use coefficient of variation to select features. When judging, the absolute value will be used. Parameters ---------- value_threshold: float, default: 1.0 Used if coefficient_of_variation_value_thres method is used in feature selection. Filter those columns who has smaller coefficient of variance than the threshold. """ def __init__(self, value_threshold=1.0): self.value_threshold = value_threshold def check(self): descr = "Coff of Variances param's" self.check_positive_number(self.value_threshold, descr) return True class OutlierColsSelectionParam(BaseParam): """ Given percentile and threshold. Judge if this quantile point is larger than threshold. Filter those larger ones. Parameters ---------- percentile: float, [0., 1.] default: 1.0 The percentile points to compare. upper_threshold: float, default: 1.0 Percentile threshold for coefficient_of_variation_percentile method """ def __init__(self, percentile=1.0, upper_threshold=1.0): self.percentile = percentile self.upper_threshold = upper_threshold def check(self): descr = "Outlier Filter param's" self.check_decimal_float(self.percentile, descr) self.check_defined_type(self.upper_threshold, descr, ['float', 'int']) return True class CommonFilterParam(BaseParam): """ All of the following parameters can set with a single value or a list of those values. When setting one single value, it means using only one metric to filter while a list represent for using multiple metrics. Please note that if some of the following values has been set as list, all of them should have same length. Otherwise, error will be raised. And if there exist a list type parameter, the metrics should be in list type. Parameters ---------- metrics: str or list, default: depends on the specific filter Indicate what metrics are used in this filter filter_type: str, default: threshold Should be one of "threshold", "top_k" or "top_percentile" take_high: bool, default: True When filtering, taking highest values or not. threshold: float or int, default: 1 If filter type is threshold, this is the threshold value. If it is "top_k", this is the k value. If it is top_percentile, this is the percentile threshold. host_thresholds: List of float or List of List of float or None, default: None Set threshold for different host. If None, use same threshold as guest. If provided, the order should map with the host id setting. select_federated: bool, default: True Whether select federated with other parties or based on local variables """ def __init__(self, metrics, filter_type='threshold', take_high=True, threshold=1, host_thresholds=None, select_federated=True): super().__init__() self.metrics = metrics self.filter_type = filter_type self.take_high = take_high self.threshold = threshold self.host_thresholds = host_thresholds self.select_federated = select_federated def check(self): self._convert_to_list(param_names=["filter_type", "take_high", "threshold", "select_federated"]) for v in self.filter_type: if v not in ["threshold", "top_k", "top_percentile"]: raise ValueError('filter_type should be one of ' '"threshold", "top_k", "top_percentile"') descr = "hetero feature selection param's" for v in self.take_high: self.check_boolean(v, descr) for idx, v in enumerate(self.threshold): if self.filter_type[idx] == "threshold": if not isinstance(v, (float, int)): raise ValueError(descr + f"{v} should be a float or int") elif self.filter_type[idx] == 'top_k': self.check_positive_integer(v, descr) else: if not (v == 0 or v == 1): self.check_decimal_float(v, descr) if self.host_thresholds is not None: if not isinstance(self.host_thresholds, list): self.host_thresholds = [self.host_thresholds] # raise ValueError("selection param's host_thresholds should be list or None") assert isinstance(self.select_federated, list) for v in self.select_federated: self.check_boolean(v, descr) def _convert_to_list(self, param_names): if not isinstance(self.metrics, list): for value_name in param_names: v = getattr(self, value_name) if isinstance(v, list): raise ValueError(f"{value_name}: {v} should not be a list when " f"metrics: {self.metrics} is not a list") setattr(self, value_name, [v]) setattr(self, "metrics", [self.metrics]) else: expected_length = len(self.metrics) for value_name in param_names: v = getattr(self, value_name) if isinstance(v, list): if len(v) != expected_length: raise ValueError(f"The parameter {v} should have same length " f"with metrics") else: new_v = [v] * expected_length setattr(self, value_name, new_v) class IVFilterParam(CommonFilterParam): """ Parameters ---------- mul_class_merge_type: str or list, default: "average" Indicate how to merge multi-class iv results. Support "average", "min" and "max". """ def __init__(self, filter_type='threshold', threshold=1, host_thresholds=None, select_federated=True, mul_class_merge_type="average"): super().__init__(metrics='iv', filter_type=filter_type, take_high=True, threshold=threshold, host_thresholds=host_thresholds, select_federated=select_federated) self.mul_class_merge_type = mul_class_merge_type def check(self): super(IVFilterParam, self).check() self._convert_to_list(param_names=["mul_class_merge_type"]) class CorrelationFilterParam(BaseParam): """ This filter follow this specific rules: 1. Sort all the columns from high to low based on specific metric, eg. iv. 2. Traverse each sorted column. If there exists other columns with whom the absolute values of correlation are larger than threshold, they will be filtered. Parameters ---------- sort_metric: str, default: iv Specify which metric to be used to sort features. threshold: float or int, default: 0.1 Correlation threshold select_federated: bool, default: True Whether select federated with other parties or based on local variables """ def __init__(self, sort_metric='iv', threshold=0.1, select_federated=True): super().__init__() self.sort_metric = sort_metric self.threshold = threshold self.select_federated = select_federated def check(self): descr = "Correlation Filter param's" self.sort_metric = self.sort_metric.lower() support_metrics = ['iv'] if self.sort_metric not in support_metrics: raise ValueError(f"sort_metric in Correlation Filter should be one of {support_metrics}") self.check_positive_number(self.threshold, descr) class PercentageValueParam(BaseParam): """ Filter the columns that have a value that exceeds a certain percentage. Parameters ---------- upper_pct: float, [0.1, 1.], default: 1.0 The upper percentage threshold for filtering, upper_pct should not be less than 0.1. """ def __init__(self, upper_pct=1.0): super().__init__() self.upper_pct = upper_pct def check(self): descr = "Percentage Filter param's" if self.upper_pct not in [0, 1]: self.check_decimal_float(self.upper_pct, descr) if self.upper_pct < consts.PERCENTAGE_VALUE_LIMIT: raise ValueError(descr + f" {self.upper_pct} not supported," f" should not be smaller than {consts.PERCENTAGE_VALUE_LIMIT}") return True class ManuallyFilterParam(BaseParam): """ Specified columns that need to be filtered. If exist, it will be filtered directly, otherwise, ignore it. Both Filter_out or left parameters only works for this specific filter. For instances, if you set some columns left in this filter but those columns are filtered by other filters, those columns will NOT left in final. Please note that (left_col_indexes & left_col_names) cannot use with (filter_out_indexes & filter_out_names) simultaneously. Parameters ---------- filter_out_indexes: list of int, default: None Specify columns' indexes to be filtered out Note tha columns specified by `filter_out_indexes` and `filter_out_names` will be combined. filter_out_names : list of string, default: None Specify columns' names to be filtered out Note tha columns specified by `filter_out_indexes` and `filter_out_names` will be combined. left_col_indexes: list of int, default: None Specify left_col_index Note tha columns specified by `left_col_indexes` and `left_col_names` will be combined. left_col_names: list of string, default: None Specify left col names Note tha columns specified by `left_col_indexes` and `left_col_names` will be combined. """ def __init__(self, filter_out_indexes=None, filter_out_names=None, left_col_indexes=None, left_col_names=None): super().__init__() self.filter_out_indexes = filter_out_indexes self.filter_out_names = filter_out_names self.left_col_indexes = left_col_indexes self.left_col_names = left_col_names def check(self): descr = "Manually Filter param's" self.check_defined_type(self.filter_out_indexes, descr, ['list', 'NoneType']) self.check_defined_type(self.filter_out_names, descr, ['list', 'NoneType']) self.check_defined_type(self.left_col_indexes, descr, ['list', 'NoneType']) self.check_defined_type(self.left_col_names, descr, ['list', 'NoneType']) if (self.filter_out_indexes or self.filter_out_names) is not None and \ (self.left_col_names or self.left_col_indexes) is not None: raise ValueError("(left_col_indexes & left_col_names) cannot use with" " (filter_out_indexes & filter_out_names) simultaneously") return True deprecated_param_list = ["iv_value_param", "iv_percentile_param", "iv_top_k_param", "variance_coe_param", "unique_param", "outlier_param"] @deprecated_param(*deprecated_param_list) class FeatureSelectionParam(BaseParam): """ Define the feature selection parameters. Parameters ---------- select_col_indexes: list or int, default: -1 Specify which columns need to calculated. -1 represent for all columns. Note tha columns specified by `select_col_indexes` and `select_names` will be combined. select_names : list of string, default: [] Specify which columns need to calculated. Each element in the list represent for a column name in header. Note tha columns specified by `select_col_indexes` and `select_names` will be combined. filter_methods: list of ["manually", "iv_filter", "statistic_filter", "psi_filter", “hetero_sbt_filter", "homo_sbt_filter", "hetero_fast_sbt_filter", "percentage_value", "vif_filter", "correlation_filter"], default: ["manually"]. The following methods will be deprecated in future version: "unique_value", "iv_value_thres", "iv_percentile", "coefficient_of_variation_value_thres", "outlier_cols" Specify the filter methods used in feature selection. The orders of filter used is depended on this list. Please be notified that, if a percentile method is used after some certain filter method, the percentile represent for the ratio of rest features. e.g. If you have 10 features at the beginning. After first filter method, you have 8 rest. Then, you want top 80% highest iv feature. Here, we will choose floor(0.8 * 8) = 6 features instead of 8. unique_param: UniqueValueParam filter the columns if all values in this feature is the same iv_value_param: IVValueSelectionParam Use information value to filter columns. If this method is set, a float threshold need to be provided. Filter those columns whose iv is smaller than threshold. Will be deprecated in the future. iv_percentile_param: IVPercentileSelectionParam Use information value to filter columns. If this method is set, a float ratio threshold need to be provided. Pick floor(ratio * feature_num) features with higher iv. If multiple features around the threshold are same, all those columns will be keep. Will be deprecated in the future. variance_coe_param: VarianceOfCoeSelectionParam Use coefficient of variation to judge whether filtered or not. Will be deprecated in the future. outlier_param: OutlierColsSelectionParam Filter columns whose certain percentile value is larger than a threshold. Will be deprecated in the future. percentage_value_param: PercentageValueParam Filter the columns that have a value that exceeds a certain percentage. iv_param: IVFilterParam Setting how to filter base on iv. It support take high mode only. All of "threshold", "top_k" and "top_percentile" are accepted. Check more details in CommonFilterParam. To use this filter, hetero-feature-binning module has to be provided. statistic_param: CommonFilterParam Setting how to filter base on statistic values. All of "threshold", "top_k" and "top_percentile" are accepted. Check more details in CommonFilterParam. To use this filter, data_statistic module has to be provided. psi_param: CommonFilterParam Setting how to filter base on psi values. All of "threshold", "top_k" and "top_percentile" are accepted. Its take_high properties should be False to choose lower psi features. Check more details in CommonFilterParam. To use this filter, data_statistic module has to be provided. use_anonymous: bool, default False whether to interpret 'select_names' as anonymous names. need_run: bool, default True Indicate if this module needed to be run """ def __init__(self, select_col_indexes=-1, select_names=None, filter_methods=None, unique_param=UniqueValueParam(), iv_value_param=IVValueSelectionParam(), iv_percentile_param=IVPercentileSelectionParam(), iv_top_k_param=IVTopKParam(), variance_coe_param=VarianceOfCoeSelectionParam(), outlier_param=OutlierColsSelectionParam(), manually_param=ManuallyFilterParam(), percentage_value_param=PercentageValueParam(), iv_param=IVFilterParam(), statistic_param=CommonFilterParam(metrics=consts.MEAN), psi_param=CommonFilterParam(metrics=consts.PSI, take_high=False), vif_param=CommonFilterParam(metrics=consts.VIF, threshold=5.0, take_high=False), sbt_param=CommonFilterParam(metrics=consts.FEATURE_IMPORTANCE), correlation_param=CorrelationFilterParam(), use_anonymous=False, need_run=True ): super(FeatureSelectionParam, self).__init__() self.correlation_param = correlation_param self.vif_param = vif_param self.select_col_indexes = select_col_indexes if select_names is None: self.select_names = [] else: self.select_names = select_names if filter_methods is None: self.filter_methods = [consts.MANUALLY_FILTER] else: self.filter_methods = filter_methods # deprecate in the future self.unique_param = copy.deepcopy(unique_param) self.iv_value_param = copy.deepcopy(iv_value_param) self.iv_percentile_param = copy.deepcopy(iv_percentile_param) self.iv_top_k_param = copy.deepcopy(iv_top_k_param) self.variance_coe_param = copy.deepcopy(variance_coe_param) self.outlier_param = copy.deepcopy(outlier_param) self.percentage_value_param = copy.deepcopy(percentage_value_param) self.manually_param = copy.deepcopy(manually_param) self.iv_param = copy.deepcopy(iv_param) self.statistic_param = copy.deepcopy(statistic_param) self.psi_param = copy.deepcopy(psi_param) self.sbt_param = copy.deepcopy(sbt_param) self.need_run = need_run self.use_anonymous = use_anonymous def check(self): descr = "hetero feature selection param's" self.check_defined_type(self.filter_methods, descr, ['list']) for idx, method in enumerate(self.filter_methods): method = method.lower() self.check_valid_value(method, descr, [consts.UNIQUE_VALUE, consts.IV_VALUE_THRES, consts.IV_PERCENTILE, consts.COEFFICIENT_OF_VARIATION_VALUE_THRES, consts.OUTLIER_COLS, consts.MANUALLY_FILTER, consts.PERCENTAGE_VALUE, consts.IV_FILTER, consts.STATISTIC_FILTER, consts.IV_TOP_K, consts.PSI_FILTER, consts.HETERO_SBT_FILTER, consts.HOMO_SBT_FILTER, consts.HETERO_FAST_SBT_FILTER, consts.VIF_FILTER, consts.CORRELATION_FILTER]) self.filter_methods[idx] = method self.check_defined_type(self.select_col_indexes, descr, ['list', 'int']) self.unique_param.check() self.iv_value_param.check() self.iv_percentile_param.check() self.iv_top_k_param.check() self.variance_coe_param.check() self.outlier_param.check() self.manually_param.check() self.percentage_value_param.check() self.iv_param.check() for th in self.iv_param.take_high: if not th: raise ValueError("Iv filter should take higher iv features") for m in self.iv_param.metrics: if m != consts.IV: raise ValueError("For iv filter, metrics should be 'iv'") self.statistic_param.check() self.psi_param.check() for th in self.psi_param.take_high: if th: raise ValueError("PSI filter should take lower psi features") for m in self.psi_param.metrics: if m != consts.PSI: raise ValueError("For psi filter, metrics should be 'psi'") self.sbt_param.check() for th in self.sbt_param.take_high: if not th: raise ValueError("SBT filter should take higher feature_importance features") for m in self.sbt_param.metrics: if m != consts.FEATURE_IMPORTANCE: raise ValueError("For SBT filter, metrics should be 'feature_importance'") self.vif_param.check() for m in self.vif_param.metrics: if m != consts.VIF: raise ValueError("For VIF filter, metrics should be 'vif'") self.correlation_param.check() self.check_boolean(self.use_anonymous, f"{descr} use_anonymous") self._warn_to_deprecate_param("iv_value_param", descr, "iv_param") self._warn_to_deprecate_param("iv_percentile_param", descr, "iv_param") self._warn_to_deprecate_param("iv_top_k_param", descr, "iv_param") self._warn_to_deprecate_param("variance_coe_param", descr, "statistic_param") self._warn_to_deprecate_param("unique_param", descr, "statistic_param") self._warn_to_deprecate_param("outlier_param", descr, "statistic_param")
24,528
42.184859
128
py
FATE
FATE-master/python/federatedml/param/boosting_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam, deprecated_param from federatedml.param.encrypt_param import EncryptParam from federatedml.param.encrypted_mode_calculation_param import EncryptedModeCalculatorParam from federatedml.param.cross_validation_param import CrossValidationParam from federatedml.param.predict_param import PredictParam from federatedml.param.callback_param import CallbackParam from federatedml.util import consts, LOGGER import copy import collections hetero_deprecated_param_list = ["early_stopping_rounds", "validation_freqs", "metrics", "use_first_metric_only"] homo_deprecated_param_list = ["validation_freqs", "metrics"] class ObjectiveParam(BaseParam): """ Define objective parameters that used in federated ml. Parameters ---------- objective : {None, 'cross_entropy', 'lse', 'lae', 'log_cosh', 'tweedie', 'fair', 'huber'} None in host's config, should be str in guest'config. when task_type is classification, only support 'cross_entropy', other 6 types support in regression task params : None or list should be non empty list when objective is 'tweedie','fair','huber', first element of list shoulf be a float-number large than 0.0 when objective is 'fair', 'huber', first element of list should be a float-number in [1.0, 2.0) when objective is 'tweedie' """ def __init__(self, objective='cross_entropy', params=None): self.objective = objective self.params = params def check(self, task_type=None): if self.objective is None: return True descr = "objective param's" LOGGER.debug('check objective {}'.format(self.objective)) if task_type not in [consts.CLASSIFICATION, consts.REGRESSION]: self.objective = self.check_and_change_lower(self.objective, ["cross_entropy", "lse", "lae", "huber", "fair", "log_cosh", "tweedie"], descr) if task_type == consts.CLASSIFICATION: if self.objective != "cross_entropy": raise ValueError("objective param's objective {} not supported".format(self.objective)) elif task_type == consts.REGRESSION: self.objective = self.check_and_change_lower(self.objective, ["lse", "lae", "huber", "fair", "log_cosh", "tweedie"], descr) params = self.params if self.objective in ["huber", "fair", "tweedie"]: if type(params).__name__ != 'list' or len(params) < 1: raise ValueError( "objective param's params {} not supported, should be non-empty list".format(params)) if type(params[0]).__name__ not in ["float", "int", "long"]: raise ValueError("objective param's params[0] {} not supported".format(self.params[0])) if self.objective == 'tweedie': if params[0] < 1 or params[0] >= 2: raise ValueError("in tweedie regression, objective params[0] should betweend [1, 2)") if self.objective == 'fair' or 'huber': if params[0] <= 0.0: raise ValueError("in {} regression, objective params[0] should greater than 0.0".format( self.objective)) return True class DecisionTreeParam(BaseParam): """ Define decision tree parameters that used in federated ml. Parameters ---------- criterion_method : {"xgboost"}, default: "xgboost" the criterion function to use criterion_params: list or dict should be non empty and elements are float-numbers, if a list is offered, the first one is l2 regularization value, and the second one is l1 regularization value. if a dict is offered, make sure it contains key 'l1', and 'l2'. l1, l2 regularization values are non-negative floats. default: [0.1, 0] or {'l1':0, 'l2':0,1} max_depth: positive integer the max depth of a decision tree, default: 3 min_sample_split: int least quantity of nodes to split, default: 2 min_impurity_split: float least gain of a single split need to reach, default: 1e-3 min_child_weight: float sum of hessian needed in child nodes. default is 0 min_leaf_node: int when samples no more than min_leaf_node, it becomes a leave, default: 1 max_split_nodes: positive integer we will use no more than max_split_nodes to parallel finding their splits in a batch, for memory consideration. default is 65536 feature_importance_type: {'split', 'gain'} if is 'split', feature_importances calculate by feature split times, if is 'gain', feature_importances calculate by feature split gain. default: 'split' Due to the safety concern, we adjust training strategy of Hetero-SBT in FATE-1.8, When running Hetero-SBT, this parameter is now abandoned. In Hetero-SBT of FATE-1.8, guest side will compute split, gain of local features, and receive anonymous feature importance results from hosts. Hosts will compute split importance of local features. use_missing: bool, accepted True, False only, default: False use missing value in training process or not. zero_as_missing: bool regard 0 as missing value or not, will be use only if use_missing=True, default: False deterministic: bool ensure stability when computing histogram. Set this to true to ensure stable result when using same data and same parameter. But it may slow down computation. """ def __init__(self, criterion_method="xgboost", criterion_params=[0.1, 0], max_depth=3, min_sample_split=2, min_impurity_split=1e-3, min_leaf_node=1, max_split_nodes=consts.MAX_SPLIT_NODES, feature_importance_type='split', n_iter_no_change=True, tol=0.001, min_child_weight=0, use_missing=False, zero_as_missing=False, deterministic=False): super(DecisionTreeParam, self).__init__() self.criterion_method = criterion_method self.criterion_params = criterion_params self.max_depth = max_depth self.min_sample_split = min_sample_split self.min_impurity_split = min_impurity_split self.min_leaf_node = min_leaf_node self.min_child_weight = min_child_weight self.max_split_nodes = max_split_nodes self.feature_importance_type = feature_importance_type self.n_iter_no_change = n_iter_no_change self.tol = tol self.use_missing = use_missing self.zero_as_missing = zero_as_missing self.deterministic = deterministic def check(self): descr = "decision tree param" self.criterion_method = self.check_and_change_lower(self.criterion_method, ["xgboost"], descr) if len(self.criterion_params) == 0: raise ValueError("decisition tree param's criterio_params should be non empty") if isinstance(self.criterion_params, list): assert len(self.criterion_params) == 2, 'length of criterion_param should be 2: l1, l2 regularization ' \ 'values are needed' self.check_nonnegative_number(self.criterion_params[0], 'l2 reg value') self.check_nonnegative_number(self.criterion_params[1], 'l1 reg value') elif isinstance(self.criterion_params, dict): assert 'l1' in self.criterion_params and 'l2' in self.criterion_params, 'l1 and l2 keys are needed in ' \ 'criterion_params dict' self.criterion_params = [self.criterion_params['l2'], self.criterion_params['l1']] else: raise ValueError('criterion_params should be a dict or a list contains l1, l2 reg value') if type(self.max_depth).__name__ not in ["int", "long"]: raise ValueError("decision tree param's max_depth {} not supported, should be integer".format( self.max_depth)) if self.max_depth < 1: raise ValueError("decision tree param's max_depth should be positive integer, no less than 1") if type(self.min_sample_split).__name__ not in ["int", "long"]: raise ValueError("decision tree param's min_sample_split {} not supported, should be integer".format( self.min_sample_split)) if type(self.min_impurity_split).__name__ not in ["int", "long", "float"]: raise ValueError("decision tree param's min_impurity_split {} not supported, should be numeric".format( self.min_impurity_split)) if type(self.min_leaf_node).__name__ not in ["int", "long"]: raise ValueError("decision tree param's min_leaf_node {} not supported, should be integer".format( self.min_leaf_node)) if type(self.max_split_nodes).__name__ not in ["int", "long"] or self.max_split_nodes < 1: raise ValueError("decision tree param's max_split_nodes {} not supported, " + "should be positive integer between 1 and {}".format(self.max_split_nodes, consts.MAX_SPLIT_NODES)) if type(self.n_iter_no_change).__name__ != "bool": raise ValueError("decision tree param's n_iter_no_change {} not supported, should be bool type".format( self.n_iter_no_change)) if type(self.tol).__name__ not in ["float", "int", "long"]: raise ValueError("decision tree param's tol {} not supported, should be numeric".format(self.tol)) self.feature_importance_type = self.check_and_change_lower(self.feature_importance_type, ["split", "gain"], descr) self.check_nonnegative_number(self.min_child_weight, 'min_child_weight') self.check_boolean(self.deterministic, 'deterministic') return True class BoostingParam(BaseParam): """ Basic parameter for Boosting Algorithms Parameters ---------- task_type : {'classification', 'regression'}, default: 'classification' task type objective_param : ObjectiveParam Object, default: ObjectiveParam() objective param learning_rate : float, int or long the learning rate of secure boost. default: 0.3 num_trees : int or float the max number of boosting round. default: 5 subsample_feature_rate : float a float-number in [0, 1], default: 1.0 n_iter_no_change : bool, when True and residual error less than tol, tree building process will stop. default: True bin_num: positive integer greater than 1 bin number use in quantile. default: 32 validation_freqs: None or positive integer or container object in python Do validation in training process or Not. if equals None, will not do validation in train process; if equals positive integer, will validate data every validation_freqs epochs passes; if container object in python, will validate data if epochs belong to this container. e.g. validation_freqs = [10, 15], will validate data when epoch equals to 10 and 15. Default: None """ def __init__(self, task_type=consts.CLASSIFICATION, objective_param=ObjectiveParam(), learning_rate=0.3, num_trees=5, subsample_feature_rate=1, n_iter_no_change=True, tol=0.0001, bin_num=32, predict_param=PredictParam(), cv_param=CrossValidationParam(), validation_freqs=None, metrics=None, random_seed=100, binning_error=consts.DEFAULT_RELATIVE_ERROR): super(BoostingParam, self).__init__() self.task_type = task_type self.objective_param = copy.deepcopy(objective_param) self.learning_rate = learning_rate self.num_trees = num_trees self.subsample_feature_rate = subsample_feature_rate self.n_iter_no_change = n_iter_no_change self.tol = tol self.bin_num = bin_num self.predict_param = copy.deepcopy(predict_param) self.cv_param = copy.deepcopy(cv_param) self.validation_freqs = validation_freqs self.metrics = metrics self.random_seed = random_seed self.binning_error = binning_error def check(self): descr = "boosting tree param's" if self.task_type not in [consts.CLASSIFICATION, consts.REGRESSION]: raise ValueError("boosting_core tree param's task_type {} not supported, should be {} or {}".format( self.task_type, consts.CLASSIFICATION, consts.REGRESSION)) self.objective_param.check(self.task_type) if type(self.learning_rate).__name__ not in ["float", "int", "long"]: raise ValueError("boosting_core tree param's learning_rate {} not supported, should be numeric".format( self.learning_rate)) if type(self.subsample_feature_rate).__name__ not in ["float", "int", "long"] or \ self.subsample_feature_rate < 0 or self.subsample_feature_rate > 1: raise ValueError( "boosting_core tree param's subsample_feature_rate should be a numeric number between 0 and 1") if type(self.n_iter_no_change).__name__ != "bool": raise ValueError("boosting_core tree param's n_iter_no_change {} not supported, should be bool type".format( self.n_iter_no_change)) if type(self.tol).__name__ not in ["float", "int", "long"]: raise ValueError("boosting_core tree param's tol {} not supported, should be numeric".format(self.tol)) if type(self.bin_num).__name__ not in ["int", "long"] or self.bin_num < 2: raise ValueError( "boosting_core tree param's bin_num {} not supported, should be positive integer greater than 1".format( self.bin_num)) if self.validation_freqs is None: pass elif isinstance(self.validation_freqs, int): if self.validation_freqs < 1: raise ValueError("validation_freqs should be larger than 0 when it's integer") elif not isinstance(self.validation_freqs, collections.Container): raise ValueError("validation_freqs should be None or positive integer or container") if self.metrics is not None and not isinstance(self.metrics, list): raise ValueError("metrics should be a list") if self.random_seed is not None: assert isinstance(self.random_seed, int) and self.random_seed >= 0, 'random seed must be an integer >= 0' self.check_decimal_float(self.binning_error, descr) return True class HeteroBoostingParam(BoostingParam): """ Parameters ---------- encrypt_param : EncodeParam Object encrypt method use in secure boost, default: EncryptParam() encrypted_mode_calculator_param: EncryptedModeCalculatorParam object the calculation mode use in secureboost, default: EncryptedModeCalculatorParam() """ def __init__(self, task_type=consts.CLASSIFICATION, objective_param=ObjectiveParam(), learning_rate=0.3, num_trees=5, subsample_feature_rate=1, n_iter_no_change=True, tol=0.0001, encrypt_param=EncryptParam(), bin_num=32, encrypted_mode_calculator_param=EncryptedModeCalculatorParam(), predict_param=PredictParam(), cv_param=CrossValidationParam(), validation_freqs=None, early_stopping_rounds=None, metrics=None, use_first_metric_only=False, random_seed=100, binning_error=consts.DEFAULT_RELATIVE_ERROR): super(HeteroBoostingParam, self).__init__(task_type, objective_param, learning_rate, num_trees, subsample_feature_rate, n_iter_no_change, tol, bin_num, predict_param, cv_param, validation_freqs, metrics=metrics, random_seed=random_seed, binning_error=binning_error) self.encrypt_param = copy.deepcopy(encrypt_param) self.encrypted_mode_calculator_param = copy.deepcopy(encrypted_mode_calculator_param) self.early_stopping_rounds = early_stopping_rounds self.use_first_metric_only = use_first_metric_only def check(self): super(HeteroBoostingParam, self).check() self.encrypted_mode_calculator_param.check() self.encrypt_param.check() if self.early_stopping_rounds is None: pass elif isinstance(self.early_stopping_rounds, int): if self.early_stopping_rounds < 1: raise ValueError("early stopping rounds should be larger than 0 when it's integer") if self.validation_freqs is None: raise ValueError("validation freqs must be set when early stopping is enabled") if not isinstance(self.use_first_metric_only, bool): raise ValueError("use_first_metric_only should be a boolean") return True @deprecated_param(*hetero_deprecated_param_list) class HeteroSecureBoostParam(HeteroBoostingParam): """ Define boosting tree parameters that used in federated ml. Parameters ---------- task_type : {'classification', 'regression'}, default: 'classification' task type tree_param : DecisionTreeParam Object, default: DecisionTreeParam() tree param objective_param : ObjectiveParam Object, default: ObjectiveParam() objective param learning_rate : float, int or long the learning rate of secure boost. default: 0.3 num_trees : int or float the max number of trees to build. default: 5 subsample_feature_rate : float a float-number in [0, 1], default: 1.0 random_seed: int seed that controls all random functions n_iter_no_change : bool, when True and residual error less than tol, tree building process will stop. default: True encrypt_param : EncodeParam Object encrypt method use in secure boost, default: EncryptParam(), this parameter is only for hetero-secureboost bin_num: positive integer greater than 1 bin number use in quantile. default: 32 encrypted_mode_calculator_param: EncryptedModeCalculatorParam object the calculation mode use in secureboost, default: EncryptedModeCalculatorParam(), only for hetero-secureboost use_missing: bool use missing value in training process or not. default: False zero_as_missing: bool regard 0 as missing value or not, will be use only if use_missing=True, default: False validation_freqs: None or positive integer or container object in python Do validation in training process or Not. if equals None, will not do validation in train process; if equals positive integer, will validate data every validation_freqs epochs passes; if container object in python, will validate data if epochs belong to this container. e.g. validation_freqs = [10, 15], will validate data when epoch equals to 10 and 15. Default: None The default value is None, 1 is suggested. You can set it to a number larger than 1 in order to speed up training by skipping validation rounds. When it is larger than 1, a number which is divisible by "num_trees" is recommended, otherwise, you will miss the validation scores of last training iteration. early_stopping_rounds: integer larger than 0 will stop training if one metric of one validation data doesn’t improve in last early_stopping_round rounds, need to set validation freqs and will check early_stopping every at every validation epoch, metrics: list, default: [] Specify which metrics to be used when performing evaluation during training process. If set as empty, default metrics will be used. For regression tasks, default metrics are ['root_mean_squared_error', 'mean_absolute_error'], For binary-classificatiin tasks, default metrics are ['auc', 'ks']. For multi-classification tasks, default metrics are ['accuracy', 'precision', 'recall'] use_first_metric_only: bool use only the first metric for early stopping complete_secure: int, defualt: 0 if use complete_secure, when use complete secure, build first 'complete secure' tree using only guest features sparse_optimization: this parameter is abandoned in FATE-1.7.1 run_goss: bool activate Gradient-based One-Side Sampling, which selects large gradient and small gradient samples using top_rate and other_rate. top_rate: float, the retain ratio of large gradient data, used when run_goss is True other_rate: float, the retain ratio of small gradient data, used when run_goss is True cipher_compress_error: This param is now abandoned cipher_compress: bool, default is True, use cipher compressing to reduce computation cost and transfer cost boosting_strategy:str std: standard sbt setting mix: alternate using guest/host features to build trees. For example, the first 'tree_num_per_party' trees use guest features, the second k trees use host features, and so on layered: only support 2 party, when running layered mode, first 'host_depth' layer will use host features, and then next 'guest_depth' will only use guest features work_mode: str This parameter has the same function as boosting_strategy, but is deprecated tree_num_per_party: int, every party will alternate build 'tree_num_per_party' trees until reach max tree num, this param is valid when boosting_strategy is mix guest_depth: int, guest will build last guest_depth of a decision tree using guest features, is valid when boosting_strategy is layered host_depth: int, host will build first host_depth of a decision tree using host features, is valid when work boosting_strategy layered multi_mode: str, decide which mode to use when running multi-classification task: single_output standard gbdt multi-classification strategy multi_output every leaf give a multi-dimension predict, using multi_mode can save time by learning a model with less trees. EINI_inference: bool default is False, this option changes the inference algorithm used in predict tasks. a secure prediction method that hides decision path to enhance security in the inference step. This method is insprired by EINI inference algorithm. EINI_random_mask: bool default is False multiply predict result by a random float number to confuse original predict result. This operation further enhances the security of naive EINI algorithm. EINI_complexity_check: bool default is False check the complexity of tree models when running EINI algorithms. Complexity models are easy to hide their decision path, while simple tree models are not, therefore if a tree model is too simple, it is not allowed to run EINI predict algorithms. """ def __init__(self, tree_param: DecisionTreeParam = DecisionTreeParam(), task_type=consts.CLASSIFICATION, objective_param=ObjectiveParam(), learning_rate=0.3, num_trees=5, subsample_feature_rate=1.0, n_iter_no_change=True, tol=0.0001, encrypt_param=EncryptParam(), bin_num=32, encrypted_mode_calculator_param=EncryptedModeCalculatorParam(), predict_param=PredictParam(), cv_param=CrossValidationParam(), validation_freqs=None, early_stopping_rounds=None, use_missing=False, zero_as_missing=False, complete_secure=0, metrics=None, use_first_metric_only=False, random_seed=100, binning_error=consts.DEFAULT_RELATIVE_ERROR, sparse_optimization=False, run_goss=False, top_rate=0.2, other_rate=0.1, cipher_compress_error=None, cipher_compress=True, new_ver=True, boosting_strategy=consts.STD_TREE, work_mode=None, tree_num_per_party=1, guest_depth=2, host_depth=3, callback_param=CallbackParam(), multi_mode=consts.SINGLE_OUTPUT, EINI_inference=False, EINI_random_mask=False, EINI_complexity_check=False): super(HeteroSecureBoostParam, self).__init__(task_type, objective_param, learning_rate, num_trees, subsample_feature_rate, n_iter_no_change, tol, encrypt_param, bin_num, encrypted_mode_calculator_param, predict_param, cv_param, validation_freqs, early_stopping_rounds, metrics=metrics, use_first_metric_only=use_first_metric_only, random_seed=random_seed, binning_error=binning_error) self.tree_param = copy.deepcopy(tree_param) self.zero_as_missing = zero_as_missing self.use_missing = use_missing self.complete_secure = complete_secure self.sparse_optimization = sparse_optimization self.run_goss = run_goss self.top_rate = top_rate self.other_rate = other_rate self.cipher_compress_error = cipher_compress_error self.cipher_compress = cipher_compress self.new_ver = new_ver self.EINI_inference = EINI_inference self.EINI_random_mask = EINI_random_mask self.EINI_complexity_check = EINI_complexity_check self.boosting_strategy = boosting_strategy self.work_mode = work_mode self.tree_num_per_party = tree_num_per_party self.guest_depth = guest_depth self.host_depth = host_depth self.callback_param = copy.deepcopy(callback_param) self.multi_mode = multi_mode def check(self): super(HeteroSecureBoostParam, self).check() self.tree_param.check() if not isinstance(self.use_missing, bool): raise ValueError('use missing should be bool type') if not isinstance(self.zero_as_missing, bool): raise ValueError('zero as missing should be bool type') self.check_boolean(self.run_goss, 'run goss') self.check_decimal_float(self.top_rate, 'top rate') self.check_decimal_float(self.other_rate, 'other rate') self.check_positive_number(self.other_rate, 'other_rate') self.check_positive_number(self.top_rate, 'top_rate') self.check_boolean(self.new_ver, 'code version switcher') self.check_boolean(self.cipher_compress, 'cipher compress') self.check_boolean(self.EINI_inference, 'eini inference') self.check_boolean(self.EINI_random_mask, 'eini random mask') self.check_boolean(self.EINI_complexity_check, 'eini complexity check') assert isinstance(self.complete_secure, int) and self.complete_secure >= 0, "complete secure should be an int >= 0" if self.EINI_inference and self.EINI_random_mask: LOGGER.warning('To protect the inference decision path, notice that current setting will multiply' ' predict result by a random number, hence SecureBoost will return confused predict scores' ' that is not the same as the original predict scores') if self.work_mode == consts.MIX_TREE and self.EINI_inference: LOGGER.warning('Mix tree mode does not support EINI, use default predict setting') if self.work_mode is not None: self.boosting_strategy = self.work_mode if self.multi_mode not in [consts.SINGLE_OUTPUT, consts.MULTI_OUTPUT]: raise ValueError('unsupported multi-classification mode') if self.multi_mode == consts.MULTI_OUTPUT: if self.boosting_strategy != consts.STD_TREE: raise ValueError('MO trees only works when boosting strategy is std tree') if not self.cipher_compress: raise ValueError('Mo trees only works when cipher compress is enabled') if self.boosting_strategy not in [consts.STD_TREE, consts.LAYERED_TREE, consts.MIX_TREE]: raise ValueError('unknown sbt boosting strategy{}'.format(self.boosting_strategy)) for p in ["early_stopping_rounds", "validation_freqs", "metrics", "use_first_metric_only"]: # if self._warn_to_deprecate_param(p, "", ""): if self._deprecated_params_set.get(p): if "callback_param" in self.get_user_feeded(): raise ValueError(f"{p} and callback param should not be set simultaneously," f"{self._deprecated_params_set}, {self.get_user_feeded()}") else: self.callback_param.callbacks = ["PerformanceEvaluate"] break descr = "boosting_param's" if self._warn_to_deprecate_param("validation_freqs", descr, "callback_param's 'validation_freqs'"): self.callback_param.validation_freqs = self.validation_freqs if self._warn_to_deprecate_param("early_stopping_rounds", descr, "callback_param's 'early_stopping_rounds'"): self.callback_param.early_stopping_rounds = self.early_stopping_rounds if self._warn_to_deprecate_param("metrics", descr, "callback_param's 'metrics'"): self.callback_param.metrics = self.metrics if self._warn_to_deprecate_param("use_first_metric_only", descr, "callback_param's 'use_first_metric_only'"): self.callback_param.use_first_metric_only = self.use_first_metric_only if self.top_rate + self.other_rate >= 1: raise ValueError('sum of top rate and other rate should be smaller than 1') return True @deprecated_param(*homo_deprecated_param_list) class HomoSecureBoostParam(BoostingParam): """ Parameters ---------- backend: {'distributed', 'memory'} decides which backend to use when computing histograms for homo-sbt """ def __init__(self, tree_param: DecisionTreeParam = DecisionTreeParam(), task_type=consts.CLASSIFICATION, objective_param=ObjectiveParam(), learning_rate=0.3, num_trees=5, subsample_feature_rate=1, n_iter_no_change=True, tol=0.0001, bin_num=32, predict_param=PredictParam(), cv_param=CrossValidationParam(), validation_freqs=None, use_missing=False, zero_as_missing=False, random_seed=100, binning_error=consts.DEFAULT_RELATIVE_ERROR, backend=consts.DISTRIBUTED_BACKEND, callback_param=CallbackParam(), multi_mode=consts.SINGLE_OUTPUT): super(HomoSecureBoostParam, self).__init__(task_type=task_type, objective_param=objective_param, learning_rate=learning_rate, num_trees=num_trees, subsample_feature_rate=subsample_feature_rate, n_iter_no_change=n_iter_no_change, tol=tol, bin_num=bin_num, predict_param=predict_param, cv_param=cv_param, validation_freqs=validation_freqs, random_seed=random_seed, binning_error=binning_error ) self.use_missing = use_missing self.zero_as_missing = zero_as_missing self.tree_param = copy.deepcopy(tree_param) self.backend = backend self.callback_param = copy.deepcopy(callback_param) self.multi_mode = multi_mode def check(self): super(HomoSecureBoostParam, self).check() self.tree_param.check() if not isinstance(self.use_missing, bool): raise ValueError('use missing should be bool type') if not isinstance(self.zero_as_missing, bool): raise ValueError('zero as missing should be bool type') if self.backend not in [consts.MEMORY_BACKEND, consts.DISTRIBUTED_BACKEND]: raise ValueError('unsupported backend') if self.multi_mode not in [consts.SINGLE_OUTPUT, consts.MULTI_OUTPUT]: raise ValueError('unsupported multi-classification mode') for p in ["validation_freqs", "metrics"]: # if self._warn_to_deprecate_param(p, "", ""): if self._deprecated_params_set.get(p): if "callback_param" in self.get_user_feeded(): raise ValueError(f"{p} and callback param should not be set simultaneously," f"{self._deprecated_params_set}, {self.get_user_feeded()}") else: self.callback_param.callbacks = ["PerformanceEvaluate"] break descr = "boosting_param's" if self._warn_to_deprecate_param("validation_freqs", descr, "callback_param's 'validation_freqs'"): self.callback_param.validation_freqs = self.validation_freqs if self._warn_to_deprecate_param("metrics", descr, "callback_param's 'metrics'"): self.callback_param.metrics = self.metrics if self.multi_mode not in [consts.SINGLE_OUTPUT, consts.MULTI_OUTPUT]: raise ValueError('unsupported multi-classification mode') if self.multi_mode == consts.MULTI_OUTPUT: if self.task_type == consts.REGRESSION: raise ValueError('regression tasks not support multi-output trees') return True
35,803
51.268613
130
py
FATE
FATE-master/python/federatedml/param/scorecard_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam from federatedml.util import consts, LOGGER class ScorecardParam(BaseParam): """ Define method used for transforming prediction score to credit score Parameters ---------- method : {"credit"}, default: 'credit' score method, currently only supports "credit" offset : int or float, default: 500 score baseline factor : int or float, default: 20 scoring step, when odds double, result score increases by this factor factor_base : int or float, default: 2 factor base, value ln(factor_base) is used for calculating result score upper_limit_ratio : int or float, default: 3 upper bound for odds, credit score upper bound is upper_limit_ratio * offset lower_limit_value : int or float, default: 0 lower bound for result score need_run : bool, default: True Indicate if this module needs to be run. """ def __init__( self, method="credit", offset=500, factor=20, factor_base=2, upper_limit_ratio=3, lower_limit_value=0, need_run=True): super(ScorecardParam, self).__init__() self.method = method self.offset = offset self.factor = factor self.factor_base = factor_base self.upper_limit_ratio = upper_limit_ratio self.lower_limit_value = lower_limit_value self.need_run = need_run def check(self): descr = "scorecard param" if not isinstance(self.method, str): raise ValueError(f"{descr}method {self.method} not supported, should be str type") else: user_input = self.method.lower() if user_input == "credit": self.method = consts.CREDIT else: raise ValueError(f"{descr} method {user_input} not supported") if type(self.offset).__name__ not in ["int", "long", "float"]: raise ValueError(f"{descr} offset must be numeric," f"received {type(self.offset)} instead.") if type(self.factor).__name__ not in ["int", "long", "float"]: raise ValueError(f"{descr} factor must be numeric," f"received {type(self.factor)} instead.") if type(self.factor_base).__name__ not in ["int", "long", "float"]: raise ValueError(f"{descr} factor_base must be numeric," f"received {type(self.factor_base)} instead.") if type(self.upper_limit_ratio).__name__ not in ["int", "long", "float"]: raise ValueError(f"{descr} upper_limit_ratio must be numeric," f"received {type(self.upper_limit_ratio)} instead.") if type(self.lower_limit_value).__name__ not in ["int", "long", "float"]: raise ValueError(f"{descr} lower_limit_value must be numeric," f"received {type(self.lower_limit_value)} instead.") BaseParam.check_boolean(self.need_run, descr=descr + "need_run ") LOGGER.debug("Finish Scorecard parameter check!") return True
3,868
37.306931
94
py
FATE
FATE-master/python/federatedml/param/poisson_regression_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import copy from federatedml.param.glm_param import LinearModelParam from federatedml.param.callback_param import CallbackParam from federatedml.param.encrypt_param import EncryptParam from federatedml.param.encrypted_mode_calculation_param import EncryptedModeCalculatorParam from federatedml.param.cross_validation_param import CrossValidationParam from federatedml.param.init_model_param import InitParam from federatedml.param.stepwise_param import StepwiseParam from federatedml.util import consts class PoissonParam(LinearModelParam): """ Parameters used for Poisson Regression. Parameters ---------- penalty : {'L2', 'L1'}, default: 'L2' Penalty method used in Poisson. Please note that, when using encrypted version in HeteroPoisson, 'L1' is not supported. tol : float, default: 1e-4 The tolerance of convergence alpha : float, default: 1.0 Regularization strength coefficient. optimizer : {'rmsprop', 'sgd', 'adam', 'adagrad'}, default: 'rmsprop' Optimize method batch_size : int, default: -1 Batch size when updating model. -1 means use all data in a batch. i.e. Not to use mini-batch strategy. learning_rate : float, default: 0.01 Learning rate max_iter : int, default: 20 The maximum iteration for training. init_param: InitParam object, default: default InitParam object Init param method object. early_stop : str, 'weight_diff', 'diff' or 'abs', default: 'diff' Method used to judge convergence. a) diff: Use difference of loss between two iterations to judge whether converge. b) weight_diff: Use difference between weights of two consecutive iterations c) abs: Use the absolute value of loss to judge whether converge. i.e. if loss < eps, it is converged. exposure_colname: str or None, default: None Name of optional exposure variable in dTable. encrypt_param: EncryptParam object, default: default EncryptParam object encrypt param encrypted_mode_calculator_param: EncryptedModeCalculatorParam object, default: default EncryptedModeCalculatorParam object encrypted mode calculator param cv_param: CrossValidationParam object, default: default CrossValidationParam object cv param stepwise_param: StepwiseParam object, default: default StepwiseParam object stepwise param decay: int or float, default: 1 Decay rate for learning rate. learning rate will follow the following decay schedule. lr = lr0/(1+decay*t) if decay_sqrt is False. If decay_sqrt is True, lr = lr0 / sqrt(1+decay*t) where t is the iter number. decay_sqrt: bool, default: True lr = lr0/(1+decay*t) if decay_sqrt is False, otherwise, lr = lr0 / sqrt(1+decay*t) validation_freqs: int, list, tuple, set, or None validation frequency during training, required when using early stopping. The default value is None, 1 is suggested. You can set it to a number larger than 1 in order to speed up training by skipping validation rounds. When it is larger than 1, a number which is divisible by "max_iter" is recommended, otherwise, you will miss the validation scores of the last training iteration. early_stopping_rounds: int, default: None If positive number specified, at every specified training rounds, program checks for early stopping criteria. Validation_freqs must also be set when using early stopping. metrics: list or None, default: None Specify which metrics to be used when performing evaluation during training process. If metrics have not improved at early_stopping rounds, trianing stops before convergence. If set as empty, default metrics will be used. For regression tasks, default metrics are ['root_mean_squared_error', 'mean_absolute_error'] use_first_metric_only: bool, default: False Indicate whether to use the first metric in `metrics` as the only criterion for early stopping judgement. floating_point_precision: None or integer if not None, use floating_point_precision-bit to speed up calculation, e.g.: convert an x to round(x * 2**floating_point_precision) during Paillier operation, divide the result by 2**floating_point_precision in the end. callback_param: CallbackParam object callback param """ def __init__(self, penalty='L2', tol=1e-4, alpha=1.0, optimizer='rmsprop', batch_size=-1, learning_rate=0.01, init_param=InitParam(), max_iter=20, early_stop='diff', exposure_colname=None, encrypt_param=EncryptParam(), encrypted_mode_calculator_param=EncryptedModeCalculatorParam(), cv_param=CrossValidationParam(), stepwise_param=StepwiseParam(), decay=1, decay_sqrt=True, validation_freqs=None, early_stopping_rounds=None, metrics=None, use_first_metric_only=False, floating_point_precision=23, callback_param=CallbackParam()): super(PoissonParam, self).__init__(penalty=penalty, tol=tol, alpha=alpha, optimizer=optimizer, batch_size=batch_size, learning_rate=learning_rate, init_param=init_param, max_iter=max_iter, early_stop=early_stop, cv_param=cv_param, decay=decay, decay_sqrt=decay_sqrt, validation_freqs=validation_freqs, early_stopping_rounds=early_stopping_rounds, metrics=metrics, floating_point_precision=floating_point_precision, encrypt_param=encrypt_param, use_first_metric_only=use_first_metric_only, stepwise_param=stepwise_param, callback_param=callback_param) self.encrypted_mode_calculator_param = copy.deepcopy(encrypted_mode_calculator_param) self.exposure_colname = exposure_colname def check(self): descr = "poisson_regression_param's " super(PoissonParam, self).check() if self.encrypt_param.method != consts.PAILLIER: raise ValueError( descr + "encrypt method supports 'Paillier' only") if self.optimizer not in ['sgd', 'rmsprop', 'adam', 'adagrad']: raise ValueError( descr + "optimizer not supported, optimizer should be" " 'sgd', 'rmsprop', 'adam', or 'adagrad'") if self.exposure_colname is not None: if type(self.exposure_colname).__name__ != "str": raise ValueError( descr + "exposure_colname {} not supported, should be string type".format(self.exposure_colname)) self.encrypted_mode_calculator_param.check() return True
7,753
55.188406
182
py
FATE
FATE-master/python/federatedml/param/intersect_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import copy from federatedml.param.base_param import BaseParam, deprecated_param from federatedml.param.base_param import BaseParam from federatedml.util import consts, LOGGER DEFAULT_RANDOM_BIT = 128 class EncodeParam(BaseParam): """ Define the hash method for raw intersect method Parameters ---------- salt: str the src id will be str = str + salt, default by empty string encode_method: {"none", "md5", "sha1", "sha224", "sha256", "sha384", "sha512", "sm3"} the hash method of src id, support md5, sha1, sha224, sha256, sha384, sha512, sm3, default by None base64: bool if True, the result of hash will be changed to base64, default by False """ def __init__(self, salt='', encode_method='none', base64=False): super().__init__() self.salt = salt self.encode_method = encode_method self.base64 = base64 def check(self): if type(self.salt).__name__ != "str": raise ValueError( "encode param's salt {} not supported, should be str type".format( self.salt)) descr = "encode param's " self.encode_method = self.check_and_change_lower(self.encode_method, ["none", consts.MD5, consts.SHA1, consts.SHA224, consts.SHA256, consts.SHA384, consts.SHA512, consts.SM3], descr) if type(self.base64).__name__ != "bool": raise ValueError( "hash param's base64 {} not supported, should be bool type".format(self.base64)) LOGGER.debug("Finish EncodeParam check!") LOGGER.warning(f"'EncodeParam' will be replaced by 'RAWParam' in future release." f"Please do not rely on current param naming in application.") return True class RAWParam(BaseParam): """ Note: This param is deprecated """ def __init__(self, use_hash=False, salt='', hash_method='none', base64=False, join_role=consts.GUEST): super().__init__() self.use_hash = use_hash self.salt = salt self.hash_method = hash_method self.base64 = base64 self.join_role = join_role def check(self): descr = "raw param's " self.check_boolean(self.use_hash, f"{descr}use_hash") self.check_string(self.salt, f"{descr}salt") self.hash_method = self.check_and_change_lower(self.hash_method, ["none", consts.MD5, consts.SHA1, consts.SHA224, consts.SHA256, consts.SHA384, consts.SHA512, consts.SM3], f"{descr}hash_method") self.check_boolean(self.base64, f"{descr}base_64") self.join_role = self.check_and_change_lower(self.join_role, [consts.GUEST, consts.HOST], f"{descr}join_role") LOGGER.debug("Finish RAWParam check!") return True class RSAParam(BaseParam): """ Specify parameters for RSA intersect method Parameters ---------- salt: str the src id will be str = str + salt, default '' hash_method: str the hash method of src id, support sha256, sha384, sha512, sm3, default sha256 final_hash_method: str the hash method of result data string, support md5, sha1, sha224, sha256, sha384, sha512, sm3, default sha256 split_calculation: bool if True, Host & Guest split operations for faster performance, recommended on large data set random_base_fraction: positive float if not None, generate (fraction * public key id count) of r for encryption and reuse generated r; note that value greater than 0.99 will be taken as 1, and value less than 0.01 will be rounded up to 0.01 key_length: int value >= 1024, bit count of rsa key, default 1024 random_bit: positive int it will define the size of blinding factor in rsa algorithm, default 128 """ def __init__(self, salt='', hash_method='sha256', final_hash_method='sha256', split_calculation=False, random_base_fraction=None, key_length=consts.DEFAULT_KEY_LENGTH, random_bit=DEFAULT_RANDOM_BIT): super().__init__() self.salt = salt self.hash_method = hash_method self.final_hash_method = final_hash_method self.split_calculation = split_calculation self.random_base_fraction = random_base_fraction self.key_length = key_length self.random_bit = random_bit def check(self): descr = "rsa param's " self.check_string(self.salt, f"{descr}salt") self.hash_method = self.check_and_change_lower(self.hash_method, [consts.SHA256, consts.SHA384, consts.SHA512, consts.SM3], f"{descr}hash_method") self.final_hash_method = self.check_and_change_lower(self.final_hash_method, [consts.MD5, consts.SHA1, consts.SHA224, consts.SHA256, consts.SHA384, consts.SHA512, consts.SM3], f"{descr}final_hash_method") self.check_boolean(self.split_calculation, f"{descr}split_calculation") if self.random_base_fraction: self.check_positive_number(self.random_base_fraction, descr) self.check_decimal_float(self.random_base_fraction, f"{descr}random_base_fraction") self.check_positive_integer(self.key_length, f"{descr}key_length") if self.key_length < 1024: raise ValueError(f"key length must be >= 1024") self.check_positive_integer(self.random_bit, f"{descr}random_bit") LOGGER.debug("Finish RSAParam parameter check!") return True class DHParam(BaseParam): """ Define the hash method for DH intersect method Parameters ---------- salt: str the src id will be str = str + salt, default '' hash_method: str the hash method of src id, support none, md5, sha1, sha 224, sha256, sha384, sha512, sm3, default sha256 key_length: int, value >= 1024 the key length of the commutative cipher p, default 1024 """ def __init__(self, salt='', hash_method='sha256', key_length=consts.DEFAULT_KEY_LENGTH): super().__init__() self.salt = salt self.hash_method = hash_method self.key_length = key_length def check(self): descr = "dh param's " self.check_string(self.salt, f"{descr}salt") self.hash_method = self.check_and_change_lower(self.hash_method, ["none", consts.MD5, consts.SHA1, consts.SHA224, consts.SHA256, consts.SHA384, consts.SHA512, consts.SM3], f"{descr}hash_method") self.check_positive_integer(self.key_length, f"{descr}key_length") if self.key_length < 1024: raise ValueError(f"key length must be >= 1024") LOGGER.debug("Finish DHParam parameter check!") return True class ECDHParam(BaseParam): """ Define the hash method for ECDH intersect method Parameters ---------- salt: str the src id will be str = str + salt, default '' hash_method: str the hash method of src id, support sha256, sha384, sha512, sm3, default sha256 curve: str the name of curve, currently only support 'curve25519', which offers 128 bits of security """ def __init__(self, salt='', hash_method='sha256', curve=consts.CURVE25519): super().__init__() self.salt = salt self.hash_method = hash_method self.curve = curve def check(self): descr = "ecdh param's " self.check_string(self.salt, f"{descr}salt") self.hash_method = self.check_and_change_lower(self.hash_method, [consts.SHA256, consts.SHA384, consts.SHA512, consts.SM3], f"{descr}hash_method") self.curve = self.check_and_change_lower(self.curve, [consts.CURVE25519], f"{descr}curve") LOGGER.debug("Finish ECDHParam parameter check!") return True class IntersectCache(BaseParam): def __init__(self, use_cache=False, id_type=consts.PHONE, encrypt_type=consts.SHA256): """ Parameters ---------- use_cache: bool whether to use cached ids; with ver1.7 and above, this param is ignored id_type with ver1.7 and above, this param is ignored encrypt_type with ver1.7 and above, this param is ignored """ super().__init__() self.use_cache = use_cache self.id_type = id_type self.encrypt_type = encrypt_type def check(self): descr = "intersect_cache param's " # self.check_boolean(self.use_cache, f"{descr}use_cache") self.check_and_change_lower(self.id_type, [consts.PHONE, consts.IMEI], f"{descr}id_type") self.check_and_change_lower(self.encrypt_type, [consts.MD5, consts.SHA256], f"{descr}encrypt_type") class IntersectPreProcessParam(BaseParam): """ Specify parameters for pre-processing and cardinality-only mode Parameters ---------- false_positive_rate: float initial target false positive rate when creating Bloom Filter, must be <= 0.5, default 1e-3 encrypt_method: str encrypt method for encrypting id when performing cardinality_only task, supports rsa only, default rsa; specify rsa parameter setting with RSAParam hash_method: str the hash method for inserting ids, support md5, sha1, sha 224, sha256, sha384, sha512, sm3, default sha256 preprocess_method: str the hash method for encoding ids before insertion into filter, default sha256, only effective for preprocessing preprocess_salt: str salt to be appended to hash result by preprocess_method before insertion into filter, default '', only effective for preprocessing random_state: int seed for random salt generator when constructing hash functions, salt is appended to hash result by hash_method when performing insertion, default None filter_owner: str role that constructs filter, either guest or host, default guest, only effective for preprocessing """ def __init__(self, false_positive_rate=1e-3, encrypt_method=consts.RSA, hash_method='sha256', preprocess_method='sha256', preprocess_salt='', random_state=None, filter_owner=consts.GUEST): super().__init__() self.false_positive_rate = false_positive_rate self.encrypt_method = encrypt_method self.hash_method = hash_method self.preprocess_method = preprocess_method self.preprocess_salt = preprocess_salt self.random_state = random_state self.filter_owner = filter_owner def check(self): descr = "intersect preprocess param's false_positive_rate " self.check_decimal_float(self.false_positive_rate, descr) self.check_positive_number(self.false_positive_rate, descr) if self.false_positive_rate > 0.5: raise ValueError(f"{descr} must be positive float no greater than 0.5") descr = "intersect preprocess param's encrypt_method " self.encrypt_method = self.check_and_change_lower(self.encrypt_method, [consts.RSA], descr) descr = "intersect preprocess param's random_state " if self.random_state: self.check_nonnegative_number(self.random_state, descr) descr = "intersect preprocess param's hash_method " self.hash_method = self.check_and_change_lower(self.hash_method, [consts.MD5, consts.SHA1, consts.SHA224, consts.SHA256, consts.SHA384, consts.SHA512, consts.SM3], descr) descr = "intersect preprocess param's preprocess_salt " self.check_string(self.preprocess_salt, descr) descr = "intersect preprocess param's preprocess_method " self.preprocess_method = self.check_and_change_lower(self.preprocess_method, [consts.MD5, consts.SHA1, consts.SHA224, consts.SHA256, consts.SHA384, consts.SHA512, consts.SM3], descr) descr = "intersect preprocess param's filter_owner " self.filter_owner = self.check_and_change_lower(self.filter_owner, [consts.GUEST, consts.HOST], descr) LOGGER.debug("Finish IntersectPreProcessParam parameter check!") return True @deprecated_param("random_bit", "join_role", "with_encode", "encode_params", "intersect_cache_param", "raw_params", "repeated_id_process", "repeated_id_owner", "allow_info_share", "info_owner", "with_sample_id") class IntersectParam(BaseParam): """ Define the intersect method Parameters ---------- intersect_method: str it supports 'rsa', 'raw', 'dh', 'ecdh', default by 'rsa' random_bit: positive int it will define the size of blinding factor in rsa algorithm, default 128 note that this param will be deprecated in future, please use random_bit in RSAParam instead sync_intersect_ids: bool In rsa, 'sync_intersect_ids' is True means guest or host will send intersect results to the others, and False will not. while in raw, 'sync_intersect_ids' is True means the role of "join_role" will send intersect results and the others will get them. Default by True. join_role: str role who joins ids, supports "guest" and "host" only and effective only for raw. If it is "guest", the host will send its ids to guest and find the intersection of ids in guest; if it is "host", the guest will send its ids to host. Default by "guest"; note this param will be deprecated in future version, please use 'join_role' in raw_params instead only_output_key: bool if false, the results of intersection will include key and value which from input data; if true, it will just include key from input data and the value will be empty or filled by uniform string like "intersect_id" with_encode: bool if True, it will use hash method for intersect ids, effective for raw method only; note that this param will be deprecated in future version, please use 'use_hash' in raw_params; currently if this param is set to True, specification by 'encode_params' will be taken instead of 'raw_params'. encode_params: EncodeParam effective only when with_encode is True; this param will be deprecated in future version, use 'raw_params' in future implementation raw_params: RAWParam this param is deprecated rsa_params: RSAParam effective for rsa method only, this param is deprecated dh_params: DHParam effective for dh method only ecdh_params: ECDHParam effective for ecdh method only join_method: {'inner_join', 'left_join'} if 'left_join', participants will all include sample_id_generator's (imputed) ids in output, default 'inner_join' new_sample_id: bool whether to generate new id for sample_id_generator's ids, only effective when join_method is 'left_join' or when input data are instance with match id, default False sample_id_generator: str role whose ids are to be kept, effective only when join_method is 'left_join' or when input data are instance with match id, default 'guest' intersect_cache_param: IntersectCacheParam specification for cache generation, with ver1.7 and above, this param is ignored. run_cache: bool whether to store Host's encrypted ids, only valid when intersect method is 'rsa', 'dh', 'ecdh', default False cardinality_only: bool whether to output estimated intersection count(cardinality); if sync_cardinality is True, then sync cardinality count with host(s) cardinality_method: string specify which intersect method to use for coutning cardinality, default "ecdh"; note that with "rsa", estimated cardinality will be produced; while "dh" and "ecdh" method output exact cardinality, it only supports single-host task sync_cardinality: bool whether to sync cardinality with all participants, default False, only effective when cardinality_only set to True run_preprocess: bool whether to run preprocess process, default False intersect_preprocess_params: IntersectPreProcessParam used for preprocessing and cardinality_only mode repeated_id_process: bool if true, intersection will process the ids which can be repeatable; in ver 1.7 and above,repeated id process will be automatically applied to data with instance id, this param will be ignored repeated_id_owner: str which role has the repeated id; in ver 1.7 and above, this param is ignored allow_info_share: bool in ver 1.7 and above, this param is ignored info_owner: str in ver 1.7 and above, this param is ignored with_sample_id: bool data with sample id or not, default False; in ver 1.7 and above, this param is ignored """ def __init__(self, intersect_method: str = consts.RSA, random_bit=DEFAULT_RANDOM_BIT, sync_intersect_ids=True, join_role=consts.GUEST, only_output_key: bool = False, with_encode=False, encode_params=EncodeParam(), raw_params=RAWParam(), rsa_params=RSAParam(), dh_params=DHParam(), ecdh_params=ECDHParam(), join_method=consts.INNER_JOIN, new_sample_id: bool = False, sample_id_generator=consts.GUEST, intersect_cache_param=IntersectCache(), run_cache: bool = False, cardinality_only: bool = False, sync_cardinality: bool = False, cardinality_method=consts.ECDH, run_preprocess: bool = False, intersect_preprocess_params=IntersectPreProcessParam(), repeated_id_process=False, repeated_id_owner=consts.GUEST, with_sample_id=False, allow_info_share: bool = False, info_owner=consts.GUEST): super().__init__() self.intersect_method = intersect_method self.random_bit = random_bit self.sync_intersect_ids = sync_intersect_ids self.join_role = join_role self.with_encode = with_encode self.encode_params = copy.deepcopy(encode_params) self.raw_params = copy.deepcopy(raw_params) self.rsa_params = copy.deepcopy(rsa_params) self.only_output_key = only_output_key self.sample_id_generator = sample_id_generator self.intersect_cache_param = copy.deepcopy(intersect_cache_param) self.run_cache = run_cache self.repeated_id_process = repeated_id_process self.repeated_id_owner = repeated_id_owner self.allow_info_share = allow_info_share self.info_owner = info_owner self.with_sample_id = with_sample_id self.join_method = join_method self.new_sample_id = new_sample_id self.dh_params = copy.deepcopy(dh_params) self.cardinality_only = cardinality_only self.sync_cardinality = sync_cardinality self.cardinality_method = cardinality_method self.run_preprocess = run_preprocess self.intersect_preprocess_params = copy.deepcopy(intersect_preprocess_params) self.ecdh_params = copy.deepcopy(ecdh_params) def check(self): descr = "intersect param's " if self.intersect_method.lower() == consts.RAW.lower(): self.intersect_method = consts.ECDH LOGGER.warning("Raw intersect method is not supported, it will be replaced by ECDH") self.intersect_method = self.check_and_change_lower(self.intersect_method, [consts.RSA, consts.RAW, consts.DH, consts.ECDH], f"{descr}intersect_method") if self._warn_to_deprecate_param("random_bit", descr, "rsa_params' 'random_bit'"): if "rsa_params.random_bit" in self.get_user_feeded(): raise ValueError(f"random_bit and rsa_params.random_bit should not be set simultaneously") self.rsa_params.random_bit = self.random_bit self.check_boolean(self.sync_intersect_ids, f"{descr}intersect_ids") if self._warn_to_deprecate_param("encode_param", "", ""): if "raw_params" in self.get_user_feeded(): raise ValueError(f"encode_param and raw_params should not be set simultaneously") else: self.callback_param.callbacks = ["PerformanceEvaluate"] if self._warn_to_deprecate_param("join_role", descr, "raw_params' 'join_role'"): if "raw_params.join_role" in self.get_user_feeded(): raise ValueError(f"join_role and raw_params.join_role should not be set simultaneously") self.raw_params.join_role = self.join_role self.check_boolean(self.only_output_key, f"{descr}only_output_key") self.join_method = self.check_and_change_lower(self.join_method, [consts.INNER_JOIN, consts.LEFT_JOIN], f"{descr}join_method") self.check_boolean(self.new_sample_id, f"{descr}new_sample_id") self.sample_id_generator = self.check_and_change_lower(self.sample_id_generator, [consts.GUEST, consts.HOST], f"{descr}sample_id_generator") if self.join_method == consts.LEFT_JOIN: if not self.sync_intersect_ids: raise ValueError(f"Cannot perform left join without sync intersect ids") self.check_boolean(self.run_cache, f"{descr} run_cache") if self._warn_to_deprecate_param("encode_params", descr, "raw_params") or \ self._warn_to_deprecate_param("with_encode", descr, "raw_params' 'use_hash'"): # self.encode_params.check() if "with_encode" in self.get_user_feeded() and "raw_params.use_hash" in self.get_user_feeded(): raise ValueError(f"'raw_params' and 'encode_params' should not be set simultaneously.") if "raw_params" in self.get_user_feeded() and "encode_params" in self.get_user_feeded(): raise ValueError(f"'raw_params' and 'encode_params' should not be set simultaneously.") LOGGER.warning(f"Param values from 'encode_params' will override 'raw_params' settings.") self.raw_params.use_hash = self.with_encode self.raw_params.hash_method = self.encode_params.encode_method self.raw_params.salt = self.encode_params.salt self.raw_params.base64 = self.encode_params.base64 self.raw_params.check() self.rsa_params.check() self.dh_params.check() self.ecdh_params.check() self.check_boolean(self.cardinality_only, f"{descr}cardinality_only") self.check_boolean(self.sync_cardinality, f"{descr}sync_cardinality") self.check_boolean(self.run_preprocess, f"{descr}run_preprocess") self.intersect_preprocess_params.check() if self.cardinality_only: if self.cardinality_method not in [consts.RSA, consts.DH, consts.ECDH]: raise ValueError(f"cardinality-only mode only support rsa, dh, ecdh.") if self.cardinality_method == consts.RSA and self.rsa_params.split_calculation: raise ValueError(f"cardinality-only mode only supports unified calculation.") if self.run_preprocess: if self.intersect_preprocess_params.false_positive_rate < 0.01: raise ValueError(f"for preprocessing ids, false_positive_rate must be no less than 0.01") if self.cardinality_only: raise ValueError(f"cardinality_only mode cannot run preprocessing.") if self.run_cache: if self.intersect_method not in [consts.RSA, consts.DH, consts.ECDH]: raise ValueError(f"Only rsa, dh, or ecdh method supports cache.") if self.intersect_method == consts.RSA and self.rsa_params.split_calculation: raise ValueError(f"RSA split_calculation does not support cache.") if self.cardinality_only: raise ValueError(f"cache is not available for cardinality_only mode.") if self.run_preprocess: raise ValueError(f"Preprocessing does not support cache.") deprecated_param_list = ["repeated_id_process", "repeated_id_owner", "intersect_cache_param", "allow_info_share", "info_owner", "with_sample_id"] for param in deprecated_param_list: self._warn_deprecated_param(param, descr) LOGGER.debug("Finish intersect parameter check!") return True
27,174
46.508741
140
py
FATE
FATE-master/python/federatedml/param/stepwise_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from federatedml.param.base_param import BaseParam from federatedml.util import consts class StepwiseParam(BaseParam): """ Define stepwise params Parameters ---------- score_name: {"AIC", "BIC"}, default: 'AIC' Specify which model selection criterion to be used mode: {"Hetero", "Homo"}, default: 'Hetero' Indicate what mode is current task role: {"Guest", "Host", "Arbiter"}, default: 'Guest' Indicate what role is current party direction: {"both", "forward", "backward"}, default: 'both' Indicate which direction to go for stepwise. 'forward' means forward selection; 'backward' means elimination; 'both' means possible models of both directions are examined at each step. max_step: int, default: '10' Specify total number of steps to run before forced stop. nvmin: int, default: '2' Specify the min subset size of final model, cannot be lower than 2. When nvmin > 2, the final model size may be smaller than nvmin due to max_step limit. nvmax: int, default: None Specify the max subset size of final model, 2 <= nvmin <= nvmax. The final model size may be larger than nvmax due to max_step limit. need_stepwise: bool, default False Indicate if this module needed to be run """ def __init__(self, score_name="AIC", mode=consts.HETERO, role=consts.GUEST, direction="both", max_step=10, nvmin=2, nvmax=None, need_stepwise=False): super(StepwiseParam, self).__init__() self.score_name = score_name self.mode = mode self.role = role self.direction = direction self.max_step = max_step self.nvmin = nvmin self.nvmax = nvmax self.need_stepwise = need_stepwise def check(self): model_param_descr = "stepwise param's" self.score_name = self.check_and_change_lower(self.score_name, ["aic", "bic"], model_param_descr) self.check_valid_value(self.mode, model_param_descr, valid_values=[consts.HOMO, consts.HETERO]) self.check_valid_value(self.role, model_param_descr, valid_values=[consts.HOST, consts.GUEST, consts.ARBITER]) self.direction = self.check_and_change_lower(self.direction, ["forward", "backward", "both"], model_param_descr) self.check_positive_integer(self.max_step, model_param_descr) self.check_positive_integer(self.nvmin, model_param_descr) if self.nvmin < 2: raise ValueError(model_param_descr + " nvmin must be no less than 2.") if self.nvmax is not None: self.check_positive_integer(self.nvmax, model_param_descr) if self.nvmin > self.nvmax: raise ValueError(model_param_descr + " nvmax must be greater than nvmin.") self.check_boolean(self.need_stepwise, model_param_descr)
3,510
44.597403
161
py
FATE
FATE-master/python/federatedml/param/feature_imputation_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam class FeatureImputationParam(BaseParam): """ Define feature imputation parameters Parameters ---------- default_value : None or single object type or list the value to replace missing value. if None, it will use default value defined in federatedml/feature/imputer.py, if single object, will fill missing value with this object, if list, it's length should be the same as input data' feature dimension, means that if some column happens to have missing values, it will replace it the value by element in the identical position of this list. missing_fill_method : [None, 'min', 'max', 'mean', 'designated'] the method to replace missing value col_missing_fill_method: None or dict of (column name, missing_fill_method) pairs specifies method to replace missing value for each column; any column not specified will take missing_fill_method, if missing_fill_method is None, unspecified column will not be imputed; missing_impute : None or list element of list can be any type, or auto generated if value is None, define which values to be consider as missing, default: None need_run: bool, default True need run or not """ def __init__(self, default_value=0, missing_fill_method=None, col_missing_fill_method=None, missing_impute=None, need_run=True): super(FeatureImputationParam, self).__init__() self.default_value = default_value self.missing_fill_method = missing_fill_method self.col_missing_fill_method = col_missing_fill_method self.missing_impute = missing_impute self.need_run = need_run def check(self): descr = "feature imputation param's " self.check_boolean(self.need_run, descr + "need_run") if self.missing_fill_method is not None: self.missing_fill_method = self.check_and_change_lower(self.missing_fill_method, ['min', 'max', 'mean', 'designated'], f"{descr}missing_fill_method ") if self.col_missing_fill_method: if not isinstance(self.col_missing_fill_method, dict): raise ValueError(f"{descr}col_missing_fill_method should be a dict") for k, v in self.col_missing_fill_method.items(): if not isinstance(k, str): raise ValueError(f"{descr}col_missing_fill_method should contain str key(s) only") v = self.check_and_change_lower(v, ['min', 'max', 'mean', 'designated'], f"per column method specified in {descr} col_missing_fill_method dict") self.col_missing_fill_method[k] = v if self.missing_impute: if not isinstance(self.missing_impute, list): raise ValueError(f"{descr}missing_impute must be None or list.") return True
3,808
45.45122
137
py
FATE
FATE-master/python/federatedml/param/hetero_kmeans_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam class KmeansParam(BaseParam): """ Parameters ---------- k : int, default 5 The number of the centroids to generate. should be larger than 1 and less than 100 in this version max_iter : int, default 300. Maximum number of iterations of the hetero-k-means algorithm to run. tol : float, default 0.001. tol random_stat : None or int random seed """ def __init__(self, k=5, max_iter=300, tol=0.001, random_stat=None): super(KmeansParam, self).__init__() self.k = k self.max_iter = max_iter self.tol = tol self.random_stat = random_stat def check(self): descr = "Kmeans_param's" if not isinstance(self.k, int): raise ValueError( descr + "k {} not supported, should be int type".format(self.k)) elif self.k <= 1: raise ValueError( descr + "k {} not supported, should be larger than 1") elif self.k > 100: raise ValueError( descr + "k {} not supported, should be less than 100 in this version") if not isinstance(self.max_iter, int): raise ValueError( descr + "max_iter not supported, should be int type".format(self.max_iter)) elif self.max_iter <= 0: raise ValueError( descr + "max_iter not supported, should be larger than 0".format(self.max_iter)) if not isinstance(self.tol, (float, int)): raise ValueError( descr + "tol not supported, should be float type".format(self.tol)) elif self.tol < 0: raise ValueError( descr + "tol not supported, should be larger than or equal to 0".format(self.tol)) if self.random_stat is not None: if not isinstance(self.random_stat, int): raise ValueError(descr + "random_stat not supported, should be int type".format(self.random_stat)) elif self.random_stat < 0: raise ValueError( descr + "random_stat not supported, should be larger than/equal to 0".format(self.random_stat))
2,896
36.141026
115
py
FATE
FATE-master/python/federatedml/param/positive_unlabeled_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.util import consts from federatedml.param.base_param import BaseParam class PositiveUnlabeledParam(BaseParam): """ Parameters used for positive unlabeled. ---------- strategy: {"probability", "quantity", "proportion", "distribution"} The strategy of converting unlabeled value. threshold: int or float, default: 0.9 The threshold in labeling strategy. """ def __init__(self, strategy="probability", threshold=0.9): super(PositiveUnlabeledParam, self).__init__() self.strategy = strategy self.threshold = threshold def check(self): base_descr = "Positive Unlabeled Param's " float_descr = "Probability or Proportion Strategy Param's " int_descr = "Quantity Strategy Param's " numeric_descr = "Distribution Strategy Param's " self.check_valid_value(self.strategy, base_descr, [consts.PROBABILITY, consts.QUANTITY, consts.PROPORTION, consts.DISTRIBUTION]) self.check_defined_type(self.threshold, base_descr, [consts.INT, consts.FLOAT]) if self.strategy == consts.PROBABILITY or self.strategy == consts.PROPORTION: self.check_decimal_float(self.threshold, float_descr) if self.strategy == consts.QUANTITY: self.check_positive_integer(self.threshold, int_descr) if self.strategy == consts.DISTRIBUTION: self.check_positive_number(self.threshold, numeric_descr) return True
2,178
35.316667
109
py
FATE
FATE-master/python/federatedml/param/secure_add_example_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam class SecureAddExampleParam(BaseParam): def __init__(self, seed=None, partition=1, data_num=1000): self.seed = seed self.partition = partition self.data_num = data_num def check(self): if self.seed is not None and type(self.seed).__name__ != "int": raise ValueError("random seed should be None or integers") if type(self.partition).__name__ != "int" or self.partition < 1: raise ValueError("partition should be an integer large than 0") if type(self.data_num).__name__ != "int" or self.data_num < 1: raise ValueError("data_num should be an integer large than 0")
1,374
36.162162
75
py
FATE
FATE-master/python/federatedml/param/union_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copylast 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam from federatedml.util import LOGGER class UnionParam(BaseParam): """ Define the union method for combining multiple dTables and keep entries with the same id Parameters ---------- need_run: bool, default True Indicate if this module needed to be run allow_missing: bool, default False Whether allow mismatch between feature length and header length in the result. Note that empty tables will always be skipped regardless of this param setting. keep_duplicate: bool, default False Whether to keep entries with duplicated keys. If set to True, a new id will be generated for duplicated entry in the format {id}_{table_name}. """ def __init__(self, need_run=True, allow_missing=False, keep_duplicate=False): super().__init__() self.need_run = need_run self.allow_missing = allow_missing self.keep_duplicate = keep_duplicate def check(self): descr = "union param's " if type(self.need_run).__name__ != "bool": raise ValueError( descr + "need_run {} not supported, should be bool".format( self.need_run)) if type(self.allow_missing).__name__ != "bool": raise ValueError( descr + "allow_missing {} not supported, should be bool".format( self.allow_missing)) if type(self.keep_duplicate).__name__ != "bool": raise ValueError( descr + "keep_duplicate {} not supported, should be bool".format( self.keep_duplicate)) LOGGER.info("Finish union parameter check!") return True
2,375
36.125
166
py
FATE
FATE-master/python/federatedml/param/statistics_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import re from federatedml.param.base_param import BaseParam from federatedml.util import consts import copy class StatisticsParam(BaseParam): """ Define statistics params Parameters ---------- statistics: list, string, default "summary" Specify the statistic types to be computed. "summary" represents list: [consts.SUM, consts.MEAN, consts.STANDARD_DEVIATION, consts.MEDIAN, consts.MIN, consts.MAX, consts.MISSING_COUNT, consts.SKEWNESS, consts.KURTOSIS] column_names: list of string, default [] Specify columns to be used for statistic computation by column names in header column_indexes: list of int, default -1 Specify columns to be used for statistic computation by column order in header -1 indicates to compute statistics over all columns bias: bool, default: True If False, the calculations of skewness and kurtosis are corrected for statistical bias. need_run: bool, default True Indicate whether to run this modules """ LEGAL_STAT = [consts.COUNT, consts.SUM, consts.MEAN, consts.STANDARD_DEVIATION, consts.MEDIAN, consts.MIN, consts.MAX, consts.VARIANCE, consts.COEFFICIENT_OF_VARIATION, consts.MISSING_COUNT, consts.MISSING_RATIO, consts.SKEWNESS, consts.KURTOSIS] BASIC_STAT = [consts.SUM, consts.MEAN, consts.STANDARD_DEVIATION, consts.MEDIAN, consts.MIN, consts.MAX, consts.MISSING_RATIO, consts.MISSING_COUNT, consts.SKEWNESS, consts.KURTOSIS, consts.COEFFICIENT_OF_VARIATION] LEGAL_QUANTILE = re.compile("^(100)|([1-9]?[0-9])%$") def __init__(self, statistics="summary", column_names=None, column_indexes=-1, need_run=True, abnormal_list=None, quantile_error=consts.DEFAULT_RELATIVE_ERROR, bias=True): super().__init__() self.statistics = statistics self.column_names = column_names self.column_indexes = column_indexes self.abnormal_list = abnormal_list self.need_run = need_run self.quantile_error = quantile_error self.bias = bias # @staticmethod # def extend_statistics(statistic_name): # basic_metrics = [consts.SUM, consts.MEAN, consts.STANDARD_DEVIATION, # consts.MEDIAN, consts.MIN, consts.MAX, consts.MISSING_RATIO, # consts.MISSING_COUNT, consts.SKEWNESS, consts.KURTOSIS, # consts.COEFFICIENT_OF_VARIATION] # if statistic_name == "summary": # return basic_metrics # # if statistic_name == "describe": # return [consts.COUNT, consts.MEAN, consts.STANDARD_DEVIATION, # consts.MIN, consts.MAX] @staticmethod def find_stat_name_match(stat_name): if stat_name in StatisticsParam.LEGAL_STAT or StatisticsParam.LEGAL_QUANTILE.match(stat_name): return True return False # match_result = [legal_name == stat_name for legal_name in StatisticsParam.LEGAL_STAT] # match_result.append(0 if LEGAL_QUANTILE.match(stat_name) is None else True) # match_found = sum(match_result) > 0 # return match_found def check(self): model_param_descr = "Statistics's param statistics" BaseParam.check_boolean(self.need_run, model_param_descr) statistics = copy.copy(self.BASIC_STAT) if not isinstance(self.statistics, list): if self.statistics in [consts.SUMMARY]: self.statistics = statistics else: if self.statistics not in statistics: statistics.append(self.statistics) self.statistics = statistics else: for s in self.statistics: if s not in statistics: statistics.append(s) self.statistics = statistics for stat_name in self.statistics: match_found = StatisticsParam.find_stat_name_match(stat_name) if not match_found: raise ValueError(f"Illegal statistics name provided: {stat_name}.") self.column_names = [] if self.column_names is None else self.column_names self.column_indexes = [] if self.column_indexes is None else self.column_indexes self.abnormal_list = [] if self.abnormal_list is None else self.abnormal_list model_param_descr = "Statistics's param column_names" if not isinstance(self.column_names, list): raise ValueError(f"column_names should be list of string.") for col_name in self.column_names: BaseParam.check_string(col_name, model_param_descr) model_param_descr = "Statistics's param column_indexes" if not isinstance(self.column_indexes, list) and self.column_indexes != -1: raise ValueError(f"column_indexes should be list of int or -1.") if self.column_indexes != -1: for col_index in self.column_indexes: if not isinstance(col_index, int): raise ValueError(f"{model_param_descr} should be int or list of int") if col_index < -consts.FLOAT_ZERO: raise ValueError(f"{model_param_descr} should be non-negative int value(s)") if not isinstance(self.abnormal_list, list): raise ValueError(f"abnormal_list should be list of int or string.") self.check_decimal_float(self.quantile_error, "Statistics's param quantile_error ") self.check_boolean(self.bias, "Statistics's param bias ") return True
6,353
43.125
102
py
FATE
FATE-master/python/federatedml/param/linear_regression_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import copy from federatedml.param.glm_param import LinearModelParam from federatedml.param.callback_param import CallbackParam from federatedml.param.encrypt_param import EncryptParam from federatedml.param.encrypted_mode_calculation_param import EncryptedModeCalculatorParam from federatedml.param.cross_validation_param import CrossValidationParam from federatedml.param.init_model_param import InitParam from federatedml.param.sqn_param import StochasticQuasiNewtonParam from federatedml.param.stepwise_param import StepwiseParam from federatedml.util import consts class LinearParam(LinearModelParam): """ Parameters used for Linear Regression. Parameters ---------- penalty : {'L2' or 'L1'} Penalty method used in LinR. Please note that, when using encrypted version in HeteroLinR, 'L1' is not supported. When using Homo-LR, 'L1' is not supported tol : float, default: 1e-4 The tolerance of convergence alpha : float, default: 1.0 Regularization strength coefficient. optimizer : {'sgd', 'rmsprop', 'adam', 'sqn', 'adagrad'} Optimize method batch_size : int, default: -1 Batch size when updating model. -1 means use all data in a batch. i.e. Not to use mini-batch strategy. learning_rate : float, default: 0.01 Learning rate max_iter : int, default: 20 The maximum iteration for training. init_param: InitParam object, default: default InitParam object Init param method object. early_stop : {'diff', 'abs', 'weight_dff'} Method used to judge convergence. a) diff: Use difference of loss between two iterations to judge whether converge. b) abs: Use the absolute value of loss to judge whether converge. i.e. if loss < tol, it is converged. c) weight_diff: Use difference between weights of two consecutive iterations encrypt_param: EncryptParam object, default: default EncryptParam object encrypt param encrypted_mode_calculator_param: EncryptedModeCalculatorParam object, default: default EncryptedModeCalculatorParam object encrypted mode calculator param cv_param: CrossValidationParam object, default: default CrossValidationParam object cv param decay: int or float, default: 1 Decay rate for learning rate. learning rate will follow the following decay schedule. lr = lr0/(1+decay*t) if decay_sqrt is False. If decay_sqrt is True, lr = lr0 / sqrt(1+decay*t) where t is the iter number. decay_sqrt: Bool, default: True lr = lr0/(1+decay*t) if decay_sqrt is False, otherwise, lr = lr0 / sqrt(1+decay*t) validation_freqs: int, list, tuple, set, or None validation frequency during training, required when using early stopping. The default value is None, 1 is suggested. You can set it to a number larger than 1 in order to speed up training by skipping validation rounds. When it is larger than 1, a number which is divisible by "max_iter" is recommended, otherwise, you will miss the validation scores of the last training iteration. early_stopping_rounds: int, default: None If positive number specified, at every specified training rounds, program checks for early stopping criteria. Validation_freqs must also be set when using early stopping. metrics: list or None, default: None Specify which metrics to be used when performing evaluation during training process. If metrics have not improved at early_stopping rounds, trianing stops before convergence. If set as empty, default metrics will be used. For regression tasks, default metrics are ['root_mean_squared_error', 'mean_absolute_error'] use_first_metric_only: bool, default: False Indicate whether to use the first metric in `metrics` as the only criterion for early stopping judgement. floating_point_precision: None or integer if not None, use floating_point_precision-bit to speed up calculation, e.g.: convert an x to round(x * 2**floating_point_precision) during Paillier operation, divide the result by 2**floating_point_precision in the end. callback_param: CallbackParam object callback param """ def __init__(self, penalty='L2', tol=1e-4, alpha=1.0, optimizer='sgd', batch_size=-1, learning_rate=0.01, init_param=InitParam(), max_iter=20, early_stop='diff', encrypt_param=EncryptParam(), sqn_param=StochasticQuasiNewtonParam(), encrypted_mode_calculator_param=EncryptedModeCalculatorParam(), cv_param=CrossValidationParam(), decay=1, decay_sqrt=True, validation_freqs=None, early_stopping_rounds=None, stepwise_param=StepwiseParam(), metrics=None, use_first_metric_only=False, floating_point_precision=23, callback_param=CallbackParam()): super(LinearParam, self).__init__(penalty=penalty, tol=tol, alpha=alpha, optimizer=optimizer, batch_size=batch_size, learning_rate=learning_rate, init_param=init_param, max_iter=max_iter, early_stop=early_stop, encrypt_param=encrypt_param, cv_param=cv_param, decay=decay, decay_sqrt=decay_sqrt, validation_freqs=validation_freqs, early_stopping_rounds=early_stopping_rounds, stepwise_param=stepwise_param, metrics=metrics, use_first_metric_only=use_first_metric_only, floating_point_precision=floating_point_precision, callback_param=callback_param) self.sqn_param = copy.deepcopy(sqn_param) self.encrypted_mode_calculator_param = copy.deepcopy(encrypted_mode_calculator_param) def check(self): descr = "linear_regression_param's " super(LinearParam, self).check() if self.optimizer not in ['sgd', 'rmsprop', 'adam', 'adagrad', 'sqn']: raise ValueError( descr + "optimizer not supported, optimizer should be" " 'sgd', 'rmsprop', 'adam', 'sqn' or 'adagrad'") self.sqn_param.check() if self.encrypt_param.method != consts.PAILLIER: raise ValueError( descr + "encrypt method supports 'Paillier' only") return True
7,255
55.6875
182
py
FATE
FATE-master/python/federatedml/param/__init__.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.boosting_param import BoostingParam from federatedml.param.boosting_param import DecisionTreeParam from federatedml.param.boosting_param import ObjectiveParam from federatedml.param.column_expand_param import ColumnExpandParam from federatedml.param.cross_validation_param import CrossValidationParam from federatedml.param.data_split_param import DataSplitParam from federatedml.param.dataio_param import DataIOParam from federatedml.param.data_transform_param import DataTransformParam from federatedml.param.encrypt_param import EncryptParam from federatedml.param.encrypted_mode_calculation_param import EncryptedModeCalculatorParam from federatedml.param.evaluation_param import EvaluateParam from federatedml.param.feature_binning_param import FeatureBinningParam from federatedml.param.feature_selection_param import FeatureSelectionParam from federatedml.param.feldman_verifiable_sum_param import FeldmanVerifiableSumParam from federatedml.param.ftl_param import FTLParam from federatedml.param.hetero_kmeans_param import KmeansParam from federatedml.param.hetero_nn_param import HeteroNNParam from federatedml.param.homo_nn_param import HomoNNParam from federatedml.param.homo_onehot_encoder_param import HomoOneHotParam from federatedml.param.init_model_param import InitParam from federatedml.param.intersect_param import IntersectParam from federatedml.param.intersect_param import EncodeParam from federatedml.param.intersect_param import RSAParam from federatedml.param.linear_regression_param import LinearParam from federatedml.param.local_baseline_param import LocalBaselineParam from federatedml.param.logistic_regression_param import LogisticParam from federatedml.param.one_vs_rest_param import OneVsRestParam from federatedml.param.pearson_param import PearsonParam from federatedml.param.poisson_regression_param import PoissonParam from federatedml.param.positive_unlabeled_param import PositiveUnlabeledParam from federatedml.param.predict_param import PredictParam from federatedml.param.psi_param import PSIParam from federatedml.param.sample_param import SampleParam from federatedml.param.sample_weight_param import SampleWeightParam from federatedml.param.scale_param import ScaleParam from federatedml.param.scorecard_param import ScorecardParam from federatedml.param.secure_add_example_param import SecureAddExampleParam from federatedml.param.sir_param import SecureInformationRetrievalParam from federatedml.param.sqn_param import StochasticQuasiNewtonParam from federatedml.param.statistics_param import StatisticsParam from federatedml.param.stepwise_param import StepwiseParam from federatedml.param.union_param import UnionParam __all__ = [ "BoostingParam", "ObjectiveParam", "DecisionTreeParam", "CrossValidationParam", "DataSplitParam", "DataIOParam", "DataTransformParam", "EncryptParam", "EncryptedModeCalculatorParam", "FeatureBinningParam", "FeatureSelectionParam", "FTLParam", "HeteroNNParam", "HomoNNParam", "HomoOneHotParam", "InitParam", "IntersectParam", "EncodeParam", "RSAParam", "LinearParam", "LocalBaselineParam", "LogisticParam", "OneVsRestParam", "PearsonParam", "PoissonParam", "PositiveUnlabeledParam", "PredictParam", "PSIParam", "SampleParam", "ScaleParam", "SecureAddExampleParam", "StochasticQuasiNewtonParam", "StatisticsParam", "StepwiseParam", "UnionParam", "ColumnExpandParam", "KmeansParam", "ScorecardParam", "SecureInformationRetrievalParam", "SampleWeightParam", "FeldmanVerifiableSumParam", "EvaluateParam" ]
4,289
40.25
91
py
FATE
FATE-master/python/federatedml/param/data_split_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam from federatedml.util import LOGGER class DataSplitParam(BaseParam): """ Define data split param that used in data split. Parameters ---------- random_state : None or int, default: None Specify the random state for shuffle. test_size : float or int or None, default: 0.0 Specify test data set size. float value specifies fraction of input data set, int value specifies exact number of data instances train_size : float or int or None, default: 0.8 Specify train data set size. float value specifies fraction of input data set, int value specifies exact number of data instances validate_size : float or int or None, default: 0.2 Specify validate data set size. float value specifies fraction of input data set, int value specifies exact number of data instances stratified : bool, default: False Define whether sampling should be stratified, according to label value. shuffle : bool, default: True Define whether do shuffle before splitting or not. split_points : None or list, default : None Specify the point(s) by which continuous label values are bucketed into bins for stratified split. eg.[0.2] for two bins or [0.1, 1, 3] for 4 bins need_run: bool, default: True Specify whether to run data split """ def __init__(self, random_state=None, test_size=None, train_size=None, validate_size=None, stratified=False, shuffle=True, split_points=None, need_run=True): super(DataSplitParam, self).__init__() self.random_state = random_state self.test_size = test_size self.train_size = train_size self.validate_size = validate_size self.stratified = stratified self.shuffle = shuffle self.split_points = split_points self.need_run = need_run def check(self): model_param_descr = "data split param's " if self.random_state is not None: if not isinstance(self.random_state, int): raise ValueError(f"{model_param_descr} random state should be int type") BaseParam.check_nonnegative_number(self.random_state, f"{model_param_descr} random_state ") if self.test_size is not None: BaseParam.check_nonnegative_number(self.test_size, f"{model_param_descr} test_size ") if isinstance(self.test_size, float): BaseParam.check_decimal_float(self.test_size, f"{model_param_descr} test_size ") if self.train_size is not None: BaseParam.check_nonnegative_number(self.train_size, f"{model_param_descr} train_size ") if isinstance(self.train_size, float): BaseParam.check_decimal_float(self.train_size, f"{model_param_descr} train_size ") if self.validate_size is not None: BaseParam.check_nonnegative_number(self.validate_size, f"{model_param_descr} validate_size ") if isinstance(self.validate_size, float): BaseParam.check_decimal_float(self.validate_size, f"{model_param_descr} validate_size ") # use default size values if none given if self.test_size is None and self.train_size is None and self.validate_size is None: self.test_size = 0.0 self.train_size = 0.8 self.validate_size = 0.2 BaseParam.check_boolean(self.stratified, f"{model_param_descr} stratified ") BaseParam.check_boolean(self.shuffle, f"{model_param_descr} shuffle ") BaseParam.check_boolean(self.need_run, f"{model_param_descr} need run ") if self.split_points is not None: if not isinstance(self.split_points, list): raise ValueError(f"{model_param_descr} split_points should be list type") LOGGER.debug("Finish data_split parameter check!") return True
4,605
45.525253
112
py
FATE
FATE-master/python/federatedml/param/feature_binning_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import copy from federatedml.param.base_param import BaseParam from federatedml.param.encrypt_param import EncryptParam from federatedml.util import consts, LOGGER class TransformParam(BaseParam): """ Define how to transfer the cols Parameters ---------- transform_cols : list of column index, default: -1 Specify which columns need to be transform. If column index is None, None of columns will be transformed. If it is -1, it will use same columns as cols in binning module. Note tha columns specified by `transform_cols` and `transform_names` will be combined. transform_names: list of string, default: [] Specify which columns need to calculated. Each element in the list represent for a column name in header. Note tha columns specified by `transform_cols` and `transform_names` will be combined. transform_type: {'bin_num', 'woe', None} Specify which value these columns going to replace. 1. bin_num: Transfer original feature value to bin index in which this value belongs to. 2. woe: This is valid for guest party only. It will replace original value to its woe value 3. None: nothing will be replaced. """ def __init__(self, transform_cols=-1, transform_names=None, transform_type="bin_num"): super(TransformParam, self).__init__() self.transform_cols = transform_cols self.transform_names = transform_names self.transform_type = transform_type def check(self): descr = "Transform Param's " if self.transform_cols is not None and self.transform_cols != -1: self.check_defined_type(self.transform_cols, descr, ['list']) self.check_defined_type(self.transform_names, descr, ['list', "NoneType"]) if self.transform_names is not None: for name in self.transform_names: if not isinstance(name, str): raise ValueError("Elements in transform_names should be string type") self.check_valid_value(self.transform_type, descr, ['bin_num', 'woe', None]) class OptimalBinningParam(BaseParam): """ Indicate optimal binning params Parameters ---------- metric_method: str, default: "iv" The algorithm metric method. Support iv, gini, ks, chi-square min_bin_pct: float, default: 0.05 The minimum percentage of each bucket max_bin_pct: float, default: 1.0 The maximum percentage of each bucket init_bin_nums: int, default 100 Number of bins when initialize mixture: bool, default: True Whether each bucket need event and non-event records init_bucket_method: str default: quantile Init bucket methods. Accept quantile and bucket. """ def __init__(self, metric_method='iv', min_bin_pct=0.05, max_bin_pct=1.0, init_bin_nums=1000, mixture=True, init_bucket_method='quantile'): super().__init__() self.init_bucket_method = init_bucket_method self.metric_method = metric_method self.max_bin = None self.mixture = mixture self.max_bin_pct = max_bin_pct self.min_bin_pct = min_bin_pct self.init_bin_nums = init_bin_nums self.adjustment_factor = None def check(self): descr = "hetero binning's optimal binning param's" self.check_string(self.metric_method, descr) self.metric_method = self.metric_method.lower() if self.metric_method in ['chi_square', 'chi-square']: self.metric_method = 'chi_square' self.check_valid_value(self.metric_method, descr, ['iv', 'gini', 'chi_square', 'ks']) self.check_positive_integer(self.init_bin_nums, descr) self.init_bucket_method = self.init_bucket_method.lower() self.check_valid_value(self.init_bucket_method, descr, ['quantile', 'bucket']) if self.max_bin_pct not in [1, 0]: self.check_decimal_float(self.max_bin_pct, descr) if self.min_bin_pct not in [1, 0]: self.check_decimal_float(self.min_bin_pct, descr) if self.min_bin_pct > self.max_bin_pct: raise ValueError("Optimal binning's min_bin_pct should less or equal than max_bin_pct") self.check_boolean(self.mixture, descr) self.check_positive_integer(self.init_bin_nums, descr) class FeatureBinningParam(BaseParam): """ Define the feature binning method Parameters ---------- method : str, 'quantile', 'bucket' or 'optimal', default: 'quantile' Binning method. compress_thres: int, default: 10000 When the number of saved summaries exceed this threshold, it will call its compress function head_size: int, default: 10000 The buffer size to store inserted observations. When head list reach this buffer size, the QuantileSummaries object start to generate summary(or stats) and insert into its sampled list. error: float, 0 <= error < 1 default: 0.001 The error of tolerance of binning. The final split point comes from original data, and the rank of this value is close to the exact rank. More precisely, floor((p - 2 * error) * N) <= rank(x) <= ceil((p + 2 * error) * N) where p is the quantile in float, and N is total number of data. bin_num: int, bin_num > 0, default: 10 The max bin number for binning bin_indexes : list of int or int, default: -1 Specify which columns need to be binned. -1 represent for all columns. If you need to indicate specific cols, provide a list of header index instead of -1. Note tha columns specified by `bin_indexes` and `bin_names` will be combined. bin_names : list of string, default: [] Specify which columns need to calculated. Each element in the list represent for a column name in header. Note tha columns specified by `bin_indexes` and `bin_names` will be combined. adjustment_factor : float, default: 0.5 the adjustment factor when calculating WOE. This is useful when there is no event or non-event in a bin. Please note that this parameter will NOT take effect for setting in host. category_indexes : list of int or int, default: [] Specify which columns are category features. -1 represent for all columns. List of int indicate a set of such features. For category features, bin_obj will take its original values as split_points and treat them as have been binned. If this is not what you expect, please do NOT put it into this parameters. The number of categories should not exceed bin_num set above. Note tha columns specified by `category_indexes` and `category_names` will be combined. category_names : list of string, default: [] Use column names to specify category features. Each element in the list represent for a column name in header. Note tha columns specified by `category_indexes` and `category_names` will be combined. local_only : bool, default: False Whether just provide binning method to guest party. If true, host party will do nothing. Warnings: This parameter will be deprecated in future version. transform_param: TransformParam Define how to transfer the binned data. need_run: bool, default True Indicate if this module needed to be run skip_static: bool, default False If true, binning will not calculate iv, woe etc. In this case, optimal-binning will not be supported. """ def __init__(self, method=consts.QUANTILE, compress_thres=consts.DEFAULT_COMPRESS_THRESHOLD, head_size=consts.DEFAULT_HEAD_SIZE, error=consts.DEFAULT_RELATIVE_ERROR, bin_num=consts.G_BIN_NUM, bin_indexes=-1, bin_names=None, adjustment_factor=0.5, transform_param=TransformParam(), local_only=False, category_indexes=None, category_names=None, need_run=True, skip_static=False): super(FeatureBinningParam, self).__init__() self.method = method self.compress_thres = compress_thres self.head_size = head_size self.error = error self.adjustment_factor = adjustment_factor self.bin_num = bin_num self.bin_indexes = bin_indexes self.bin_names = bin_names self.category_indexes = category_indexes self.category_names = category_names self.transform_param = copy.deepcopy(transform_param) self.need_run = need_run self.skip_static = skip_static self.local_only = local_only def check(self): descr = "Binning param's" self.check_string(self.method, descr) self.method = self.method.lower() self.check_positive_integer(self.compress_thres, descr) self.check_positive_integer(self.head_size, descr) self.check_decimal_float(self.error, descr) self.check_positive_integer(self.bin_num, descr) if self.bin_indexes != -1: self.check_defined_type(self.bin_indexes, descr, ['list', 'RepeatedScalarContainer', "NoneType"]) self.check_defined_type(self.bin_names, descr, ['list', "NoneType"]) self.check_defined_type(self.category_indexes, descr, ['list', "NoneType"]) self.check_defined_type(self.category_names, descr, ['list', "NoneType"]) self.check_open_unit_interval(self.adjustment_factor, descr) self.check_boolean(self.local_only, descr) class HeteroFeatureBinningParam(FeatureBinningParam): """ split_points_by_index: dict, default None Manually specified split points for local features; key should be feature index, value should be split points in sorted list; along with `split_points_by_col_name`, keys should cover all local features, including categorical features; note that each split point list should have length equal to desired bin num(n), with first (n-1) entries equal to the maximum value(inclusive) of each first (n-1) bins, and nth value the max of current feature. split_points_by_col_name: dict, default None Manually specified split points for local features; key should be feature name, value should be split points in sorted list; along with `split_points_by_index`, keys should cover all local features, including categorical features; note that each split point list should have length equal to desired bin num(n), with first (n-1) entries equal to the maximum value(inclusive) of each first (n-1) bins, and nth value the max of current feature. """ def __init__(self, method=consts.QUANTILE, compress_thres=consts.DEFAULT_COMPRESS_THRESHOLD, head_size=consts.DEFAULT_HEAD_SIZE, error=consts.DEFAULT_RELATIVE_ERROR, bin_num=consts.G_BIN_NUM, bin_indexes=-1, bin_names=None, adjustment_factor=0.5, transform_param=TransformParam(), optimal_binning_param=OptimalBinningParam(), local_only=False, category_indexes=None, category_names=None, encrypt_param=EncryptParam(), need_run=True, skip_static=False, split_points_by_index=None, split_points_by_col_name=None): super(HeteroFeatureBinningParam, self).__init__(method=method, compress_thres=compress_thres, head_size=head_size, error=error, bin_num=bin_num, bin_indexes=bin_indexes, bin_names=bin_names, adjustment_factor=adjustment_factor, transform_param=transform_param, category_indexes=category_indexes, category_names=category_names, need_run=need_run, local_only=local_only, skip_static=skip_static) self.optimal_binning_param = copy.deepcopy(optimal_binning_param) self.encrypt_param = encrypt_param self.split_points_by_index = split_points_by_index self.split_points_by_col_name = split_points_by_col_name def check(self): descr = "Hetero Binning param's" super(HeteroFeatureBinningParam, self).check() self.check_valid_value(self.method, descr, [consts.QUANTILE, consts.BUCKET, consts.OPTIMAL]) self.optimal_binning_param.check() self.encrypt_param.check() if self.encrypt_param.method != consts.PAILLIER: raise ValueError("Feature Binning support Paillier encrypt method only.") if self.skip_static and self.method == consts.OPTIMAL: raise ValueError("When skip_static, optimal binning is not supported.") self.transform_param.check() if self.skip_static and self.transform_param.transform_type == 'woe': raise ValueError("To use woe transform, skip_static should set as False") if self.split_points_by_index is not None: LOGGER.warning(f"When manually setting binning split points, 'method' will be ignored.") if not isinstance(self.split_points_by_index, dict): raise ValueError(f"{descr} `split_points_by_index` should be a dict") for k, v in self.split_points_by_index.items(): if not isinstance(k, str): raise ValueError(f"{descr} `split_points_by_index`'s keys should be str") if not isinstance(v, list): raise ValueError(f"{descr} `split_points_by_index`'s values should be given in list format") if sorted(v) != v: raise ValueError(f"{k}'s split points({v}) should be given in sorted order.") if self.split_points_by_col_name is not None: LOGGER.warning(f"When manually setting binning split points, 'method' will be ignored.") if not isinstance(self.split_points_by_col_name, dict): raise ValueError(f"{descr} `split_points_by_col_name` should be a dict") for k, v in self.split_points_by_col_name.items(): if not isinstance(k, str): raise ValueError(f"{descr} `split_points_by_col_name`'s keys should be str") if not isinstance(v, list): raise ValueError(f"{descr} `split_points_by_col_name`'s values should be given in list format") if sorted(v) != v: raise ValueError(f"{k}'s split points({v}) should be given in sorted order.") class HomoFeatureBinningParam(FeatureBinningParam): def __init__(self, method=consts.VIRTUAL_SUMMARY, compress_thres=consts.DEFAULT_COMPRESS_THRESHOLD, head_size=consts.DEFAULT_HEAD_SIZE, error=consts.DEFAULT_RELATIVE_ERROR, sample_bins=100, bin_num=consts.G_BIN_NUM, bin_indexes=-1, bin_names=None, adjustment_factor=0.5, transform_param=TransformParam(), category_indexes=None, category_names=None, need_run=True, skip_static=False, max_iter=100): super(HomoFeatureBinningParam, self).__init__(method=method, compress_thres=compress_thres, head_size=head_size, error=error, bin_num=bin_num, bin_indexes=bin_indexes, bin_names=bin_names, adjustment_factor=adjustment_factor, transform_param=transform_param, category_indexes=category_indexes, category_names=category_names, need_run=need_run, skip_static=skip_static) self.sample_bins = sample_bins self.max_iter = max_iter def check(self): descr = "homo binning param's" super(HomoFeatureBinningParam, self).check() self.check_string(self.method, descr) self.method = self.method.lower() self.check_valid_value(self.method, descr, [consts.VIRTUAL_SUMMARY, consts.RECURSIVE_QUERY]) self.check_positive_integer(self.max_iter, descr) if self.max_iter > 100: raise ValueError("Max iter is not allowed exceed 100")
17,379
52.80805
119
py
FATE
FATE-master/python/federatedml/param/one_vs_rest_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam from federatedml.util import LOGGER class OneVsRestParam(BaseParam): """ Define the one_vs_rest parameters. Parameters ---------- has_arbiter: bool, default: true For some algorithm, may not has arbiter, for instances, secureboost of FATE, for these algorithms, it should be set to false. """ def __init__(self, need_one_vs_rest=False, has_arbiter=True): super().__init__() self.need_one_vs_rest = need_one_vs_rest self.has_arbiter = has_arbiter def check(self): if type(self.has_arbiter).__name__ != "bool": raise ValueError( "one_vs_rest param's has_arbiter {} not supported, should be bool type".format( self.has_arbiter)) LOGGER.debug("Finish one_vs_rest parameter check!") return True
1,555
31.416667
95
py
FATE
FATE-master/python/federatedml/param/label_transform_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam from federatedml.util import LOGGER class LabelTransformParam(BaseParam): """ Define label transform param that used in label transform. Parameters ---------- label_encoder : None or dict, default : None Specify (label, encoded label) key-value pairs for transforming labels to new values. e.g. {"Yes": 1, "No": 0}; **new in ver 1.9: during training, input labels not found in `label_encoder` will retain its original value label_list : None or list, default : None List all input labels, used for matching types of original keys in label_encoder dict, length should match key count in label_encoder, e.g. ["Yes", "No"]; **new in ver 1.9: given non-emtpy `label_encoder`, when `label_list` not provided, module will inference label types from input data need_run: bool, default: True Specify whether to run label transform """ def __init__(self, label_encoder=None, label_list=None, need_run=True): super(LabelTransformParam, self).__init__() self.label_encoder = label_encoder self.label_list = label_list self.need_run = need_run def check(self): model_param_descr = "label transform param's " BaseParam.check_boolean(self.need_run, f"{model_param_descr} need run ") if self.label_encoder is not None: if not isinstance(self.label_encoder, dict): raise ValueError(f"{model_param_descr} label_encoder should be dict type") if len(self.label_encoder) == 0: self.label_encoder = None if self.label_list is not None: if not isinstance(self.label_list, list): raise ValueError(f"{model_param_descr} label_list should be list type") if self.label_encoder and self.label_list and len(self.label_list) != len(self.label_encoder.keys()): raise ValueError(f"label_list's length not matching label_encoder key count.") if len(self.label_list) == 0: self.label_list = None LOGGER.debug("Finish label transformer parameter check!") return True
2,882
39.605634
115
py
FATE
FATE-master/python/federatedml/param/init_model_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from federatedml.param.base_param import BaseParam class InitParam(BaseParam): """ Initialize Parameters used in initializing a model. Parameters ---------- init_method : {'random_uniform', 'random_normal', 'ones', 'zeros' or 'const'} Initial method. init_const : int or float, default: 1 Required when init_method is 'const'. Specify the constant. fit_intercept : bool, default: True Whether to initialize the intercept or not. """ def __init__(self, init_method='random_uniform', init_const=1, fit_intercept=True, random_seed=None): super().__init__() self.init_method = init_method self.init_const = init_const self.fit_intercept = fit_intercept self.random_seed = random_seed def check(self): if type(self.init_method).__name__ != "str": raise ValueError( "Init param's init_method {} not supported, should be str type".format(self.init_method)) else: self.init_method = self.init_method.lower() if self.init_method not in ['random_uniform', 'random_normal', 'ones', 'zeros', 'const']: raise ValueError( "Init param's init_method {} not supported, init_method should in 'random_uniform'," " 'random_normal' 'ones', 'zeros' or 'const'".format(self.init_method)) if type(self.init_const).__name__ not in ['int', 'float']: raise ValueError( "Init param's init_const {} not supported, should be int or float type".format(self.init_const)) if type(self.fit_intercept).__name__ != 'bool': raise ValueError( "Init param's fit_intercept {} not supported, should be bool type".format(self.fit_intercept)) if self.random_seed is not None: if type(self.random_seed).__name__ != 'int': raise ValueError( "Init param's random_seed {} not supported, should be int or float type".format(self.random_seed)) return True
2,742
39.338235
118
py
FATE
FATE-master/python/federatedml/param/callback_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam class CallbackParam(BaseParam): """ Define callback method that used in federated ml. Parameters ---------- callbacks : list, default: [] Indicate what kinds of callback functions is desired during the training process. Accepted values: {'EarlyStopping', 'ModelCheckpoint', 'PerformanceEvaluate'} validation_freqs: {None, int, list, tuple, set} validation frequency during training. early_stopping_rounds: None or int Will stop training if one metric doesn’t improve in last early_stopping_round rounds metrics: None, or list Indicate when executing evaluation during train process, which metrics will be used. If set as empty, default metrics for specific task type will be used. As for binary classification, default metrics are ['auc', 'ks'] use_first_metric_only: bool, default: False Indicate whether use the first metric only for early stopping judgement. save_freq: int, default: 1 The callbacks save model every save_freq epoch """ def __init__(self, callbacks=None, validation_freqs=None, early_stopping_rounds=None, metrics=None, use_first_metric_only=False, save_freq=1): super(CallbackParam, self).__init__() self.callbacks = callbacks or [] self.validation_freqs = validation_freqs self.early_stopping_rounds = early_stopping_rounds self.metrics = metrics or [] self.use_first_metric_only = use_first_metric_only self.save_freq = save_freq def check(self): self.callbacks = [] if self.callbacks is None else self.callbacks self.metrics = [] if self.metrics is None else self.metrics if self.early_stopping_rounds is None: pass elif isinstance(self.early_stopping_rounds, int): if self.early_stopping_rounds < 1: raise ValueError("early stopping rounds should be larger than 0 when it's integer") if self.validation_freqs is None: raise ValueError("validation freqs must be set when early stopping is enabled") if self.validation_freqs is not None: if type(self.validation_freqs).__name__ not in ["int", "list", "tuple", "set"]: raise ValueError( "validation strategy param's validate_freqs's type not supported ," " should be int or list or tuple or set" ) if type(self.validation_freqs).__name__ == "int" and \ self.validation_freqs <= 0: raise ValueError("validation strategy param's validate_freqs should greater than 0") if self.metrics is not None and not isinstance(self.metrics, list): raise ValueError("metrics should be a list") if not isinstance(self.use_first_metric_only, bool): raise ValueError("use_first_metric_only should be a boolean") return True
3,687
42.904762
110
py
FATE
FATE-master/python/federatedml/param/homo_nn_param.py
from federatedml.param.base_param import BaseParam class TrainerParam(BaseParam): def __init__(self, trainer_name=None, **kwargs): super(TrainerParam, self).__init__() self.trainer_name = trainer_name self.param = kwargs def check(self): if self.trainer_name is not None: self.check_string(self.trainer_name, 'trainer_name') def to_dict(self): ret = {'trainer_name': self.trainer_name, 'param': self.param} return ret class DatasetParam(BaseParam): def __init__(self, dataset_name=None, **kwargs): super(DatasetParam, self).__init__() self.dataset_name = dataset_name self.param = kwargs def check(self): if self.dataset_name is not None: self.check_string(self.dataset_name, 'dataset_name') def to_dict(self): ret = {'dataset_name': self.dataset_name, 'param': self.param} return ret class HomoNNParam(BaseParam): def __init__(self, trainer: TrainerParam = TrainerParam(), dataset: DatasetParam = DatasetParam(), torch_seed: int = 100, nn_define: dict = None, loss: dict = None, optimizer: dict = None, ds_config: dict = None ): super(HomoNNParam, self).__init__() self.trainer = trainer self.dataset = dataset self.torch_seed = torch_seed self.nn_define = nn_define self.loss = loss self.optimizer = optimizer self.ds_config = ds_config def check(self): assert isinstance(self.trainer, TrainerParam), 'trainer must be a TrainerParam()' assert isinstance(self.dataset, DatasetParam), 'dataset must be a DatasetParam()' self.trainer.check() self.dataset.check() # torch seed >= 0 if isinstance(self.torch_seed, int): assert self.torch_seed >= 0, 'torch seed should be an int >=0' else: raise ValueError('torch seed should be an int >=0') if self.nn_define is not None: assert isinstance(self.nn_define, dict), 'nn define should be a dict defining model structures' if self.loss is not None: assert isinstance(self.loss, dict), 'loss parameter should be a loss config dict' if self.optimizer is not None: assert isinstance(self.optimizer, dict), 'optimizer parameter should be a config dict'
2,502
31.506494
107
py
FATE
FATE-master/python/federatedml/param/predict_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ################################################################################ from federatedml.param.base_param import BaseParam from federatedml.util import LOGGER class PredictParam(BaseParam): """ Define the predict method of HomoLR, HeteroLR, SecureBoosting Parameters ---------- threshold: float or int The threshold use to separate positive and negative class. Normally, it should be (0,1) """ def __init__(self, threshold=0.5): self.threshold = threshold def check(self): if type(self.threshold).__name__ not in ["float", "int"]: raise ValueError("predict param's predict_param {} not supported, should be float or int".format( self.threshold)) LOGGER.debug("Finish predict parameter check!") return True
1,487
30.659574
109
py
FATE
FATE-master/python/federatedml/param/sample_weight_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam from federatedml.util import consts, LOGGER class SampleWeightParam(BaseParam): """ Define sample weight parameters Parameters ---------- class_weight : str or dict, or None, default None class weight dictionary or class weight computation mode, string value only accepts 'balanced'; If dict provided, key should be class(label), and weight will not be normalize, e.g.: {'0': 1, '1': 2} If both class_weight and sample_weight_name are None, return original input data. sample_weight_name : str name of column which specifies sample weight. feature name of sample weight; if both class_weight and sample_weight_name are None, return original input data normalize : bool, default False whether to normalize sample weight extracted from `sample_weight_name` column need_run : bool, default True whether to run this module or not """ def __init__(self, class_weight=None, sample_weight_name=None, normalize=False, need_run=True): self.class_weight = class_weight self.sample_weight_name = sample_weight_name self.normalize = normalize self.need_run = need_run def check(self): descr = "sample weight param's" if self.class_weight: if not isinstance(self.class_weight, str) and not isinstance(self.class_weight, dict): raise ValueError(f"{descr} class_weight must be str, dict, or None.") if isinstance(self.class_weight, str): self.class_weight = self.check_and_change_lower(self.class_weight, [consts.BALANCED], f"{descr} class_weight") if isinstance(self.class_weight, dict): for k, v in self.class_weight.items(): if v < 0: LOGGER.warning(f"Negative value {v} provided for class {k} as class_weight.") if self.sample_weight_name: self.check_string(self.sample_weight_name, f"{descr} sample_weight_name") self.check_boolean(self.need_run, f"{descr} need_run") self.check_boolean(self.normalize, f"{descr} normalize") return True
3,002
39.581081
119
py
FATE
FATE-master/python/federatedml/param/cross_validation_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from federatedml.param.base_param import BaseParam # from federatedml.param.evaluation_param import EvaluateParam from federatedml.util import consts class CrossValidationParam(BaseParam): """ Define cross validation params Parameters ---------- n_splits: int, default: 5 Specify how many splits used in KFold mode: str, default: 'Hetero' Indicate what mode is current task role: {'Guest', 'Host', 'Arbiter'}, default: 'Guest' Indicate what role is current party shuffle: bool, default: True Define whether do shuffle before KFold or not. random_seed: int, default: 1 Specify the random seed for numpy shuffle need_cv: bool, default False Indicate if this module needed to be run output_fold_history: bool, default True Indicate whether to output table of ids used by each fold, else return original input data returned ids are formatted as: {original_id}#fold{fold_num}#{train/validate} history_value_type: {'score', 'instance'}, default score Indicate whether to include original instance or predict score in the output fold history, only effective when output_fold_history set to True """ def __init__(self, n_splits=5, mode=consts.HETERO, role=consts.GUEST, shuffle=True, random_seed=1, need_cv=False, output_fold_history=True, history_value_type="score"): super(CrossValidationParam, self).__init__() self.n_splits = n_splits self.mode = mode self.role = role self.shuffle = shuffle self.random_seed = random_seed # self.evaluate_param = copy.deepcopy(evaluate_param) self.need_cv = need_cv self.output_fold_history = output_fold_history self.history_value_type = history_value_type def check(self): model_param_descr = "cross validation param's " self.check_positive_integer(self.n_splits, model_param_descr) self.check_valid_value(self.mode, model_param_descr, valid_values=[consts.HOMO, consts.HETERO]) self.check_valid_value(self.role, model_param_descr, valid_values=[consts.HOST, consts.GUEST, consts.ARBITER]) self.check_boolean(self.shuffle, model_param_descr) self.check_boolean(self.output_fold_history, model_param_descr) self.history_value_type = self.check_and_change_lower( self.history_value_type, ["instance", "score"], model_param_descr) if self.random_seed is not None: self.check_positive_integer(self.random_seed, model_param_descr)
3,252
41.802632
118
py
FATE
FATE-master/python/federatedml/param/pearson_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.param.base_param import BaseParam class PearsonParam(BaseParam): """ param for pearson correlation Parameters ---------- column_names : list of string list of column names column_index : list of int list of column index cross_parties : bool, default: True if True, calculate correlation of columns from both party need_run : bool set False to skip this party use_mix_rand : bool, defalut: False mix system random and pseudo random for quicker calculation calc_loca_vif : bool, default True calculate VIF for columns in local """ def __init__( self, column_names=None, column_indexes=None, cross_parties=True, need_run=True, use_mix_rand=False, calc_local_vif=True, ): super().__init__() self.column_names = column_names self.column_indexes = column_indexes self.cross_parties = cross_parties self.need_run = need_run self.use_mix_rand = use_mix_rand self.calc_local_vif = calc_local_vif def check(self): if not isinstance(self.use_mix_rand, bool): raise ValueError( f"use_mix_rand accept bool type only, {type(self.use_mix_rand)} got" ) if self.cross_parties and (not self.need_run): raise ValueError( f"need_run should be True(which is default) when cross_parties is True." ) self.column_indexes = [] if self.column_indexes is None else self.column_indexes self.column_names = [] if self.column_names is None else self.column_names if not isinstance(self.column_names, list): raise ValueError( f"type mismatch, column_names with type {type(self.column_names)}" ) for name in self.column_names: if not isinstance(name, str): raise ValueError( f"type mismatch, column_names with element {name}(type is {type(name)})" ) if isinstance(self.column_indexes, list): for idx in self.column_indexes: if not isinstance(idx, int): raise ValueError( f"type mismatch, column_indexes with element {idx}(type is {type(idx)})" ) if isinstance(self.column_indexes, int) and self.column_indexes != -1: raise ValueError( f"column_indexes with type int and value {self.column_indexes}(only -1 allowed)" ) if self.need_run: if isinstance(self.column_indexes, list) and isinstance( self.column_names, list ): if len(self.column_indexes) == 0 and len(self.column_names) == 0: raise ValueError(f"provide at least one column")
3,573
34.74
96
py
FATE
FATE-master/python/federatedml/param/hetero_sshe_linr_param.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import copy from federatedml.param.glm_param import LinearModelParam from federatedml.param.callback_param import CallbackParam from federatedml.param.encrypt_param import EncryptParam from federatedml.param.encrypted_mode_calculation_param import EncryptedModeCalculatorParam from federatedml.param.cross_validation_param import CrossValidationParam from federatedml.param.init_model_param import InitParam from federatedml.util import consts class HeteroSSHELinRParam(LinearModelParam): """ Parameters used for Hetero SSHE Linear Regression. Parameters ---------- penalty : {'L2' or 'L1'} Penalty method used in LinR. Please note that, when using encrypted version in HeteroLinR, 'L1' is not supported. tol : float, default: 1e-4 The tolerance of convergence alpha : float, default: 1.0 Regularization strength coefficient. optimizer : {'sgd', 'rmsprop', 'adam', 'adagrad'} Optimize method batch_size : int, default: -1 Batch size when updating model. -1 means use all data in a batch. i.e. Not to use mini-batch strategy. learning_rate : float, default: 0.01 Learning rate max_iter : int, default: 20 The maximum iteration for training. init_param: InitParam object, default: default InitParam object Init param method object. early_stop : {'diff', 'abs', 'weight_dff'} Method used to judge convergence. a) diff: Use difference of loss between two iterations to judge whether converge. b) abs: Use the absolute value of loss to judge whether converge. i.e. if loss < tol, it is converged. c) weight_diff: Use difference between weights of two consecutive iterations encrypt_param: EncryptParam object, default: default EncryptParam object encrypt param encrypted_mode_calculator_param: EncryptedModeCalculatorParam object, default: default EncryptedModeCalculatorParam object encrypted mode calculator param cv_param: CrossValidationParam object, default: default CrossValidationParam object cv param decay: int or float, default: 1 Decay rate for learning rate. learning rate will follow the following decay schedule. lr = lr0/(1+decay*t) if decay_sqrt is False. If decay_sqrt is True, lr = lr0 / sqrt(1+decay*t) where t is the iter number. decay_sqrt: Bool, default: True lr = lr0/(1+decay*t) if decay_sqrt is False, otherwise, lr = lr0 / sqrt(1+decay*t) callback_param: CallbackParam object callback param reveal_strategy: str, "respectively", "encrypted_reveal_in_host", default: "respectively" "respectively": Means guest and host can reveal their own part of weights only. "encrypted_reveal_in_host": Means host can be revealed his weights in encrypted mode, and guest can be revealed in normal mode. reveal_every_iter: bool, default: False Whether reconstruct model weights every iteration. If so, Regularization is available. The performance will be better as well since the algorithm process is simplified. """ def __init__(self, penalty='L2', tol=1e-4, alpha=1.0, optimizer='sgd', batch_size=-1, learning_rate=0.01, init_param=InitParam(), max_iter=20, early_stop='diff', encrypt_param=EncryptParam(), encrypted_mode_calculator_param=EncryptedModeCalculatorParam(), cv_param=CrossValidationParam(), decay=1, decay_sqrt=True, callback_param=CallbackParam(), use_mix_rand=True, reveal_strategy="respectively", reveal_every_iter=False ): super(HeteroSSHELinRParam, self).__init__(penalty=penalty, tol=tol, alpha=alpha, optimizer=optimizer, batch_size=batch_size, learning_rate=learning_rate, init_param=init_param, max_iter=max_iter, early_stop=early_stop, encrypt_param=encrypt_param, cv_param=cv_param, decay=decay, decay_sqrt=decay_sqrt, callback_param=callback_param) self.encrypted_mode_calculator_param = copy.deepcopy(encrypted_mode_calculator_param) self.use_mix_rand = use_mix_rand self.reveal_strategy = reveal_strategy self.reveal_every_iter = reveal_every_iter def check(self): descr = "sshe linear_regression_param's " super(HeteroSSHELinRParam, self).check() if self.encrypt_param.method != consts.PAILLIER: raise ValueError( descr + "encrypt method supports 'Paillier' only") self.check_boolean(self.reveal_every_iter, descr) if self.penalty is None: pass elif type(self.penalty).__name__ != "str": raise ValueError( f"{descr} penalty {self.penalty} not supported, should be str type") else: self.penalty = self.penalty.upper() """ if self.penalty not in [consts.L1_PENALTY, consts.L2_PENALTY]: raise ValueError( "logistic_param's penalty not supported, penalty should be 'L1', 'L2' or 'none'") """ if not self.reveal_every_iter: if self.penalty not in [consts.L2_PENALTY, consts.NONE.upper()]: raise ValueError( f"penalty should be 'L2' or 'none', when reveal_every_iter is False" ) if type(self.optimizer).__name__ != "str": raise ValueError( f"{descr} optimizer {self.optimizer} not supported, should be str type") else: self.optimizer = self.optimizer.lower() if self.reveal_every_iter: if self.optimizer not in ['sgd', 'rmsprop', 'adam', 'adagrad']: raise ValueError( "When reveal_every_iter is True, " f"{descr} optimizer not supported, optimizer should be" " 'sgd', 'rmsprop', 'adam', or 'adagrad'") else: if self.optimizer not in ['sgd']: raise ValueError("When reveal_every_iter is False, " f"{descr} optimizer not supported, optimizer should be" " 'sgd'") if self.callback_param.validation_freqs is not None: if self.reveal_every_iter is False: raise ValueError(f"When reveal_every_iter is False, validation every iter" f" is not supported.") self.reveal_strategy = self.check_and_change_lower(self.reveal_strategy, ["respectively", "encrypted_reveal_in_host"], f"{descr} reveal_strategy") if self.reveal_strategy == "encrypted_reveal_in_host" and self.reveal_every_iter: raise PermissionError("reveal strategy: encrypted_reveal_in_host mode is not allow to reveal every iter.") return True
8,007
49.683544
135
py
FATE
FATE-master/python/federatedml/param/test/__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License.
661
35.777778
75
py
FATE
FATE-master/python/federatedml/param/test/param_json_test.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import os import unittest from federatedml.param.feature_binning_param import FeatureBinningParam from federatedml.util.param_extract import ParamExtract home_dir = os.path.split(os.path.realpath(__file__))[0] class TestParamExtract(unittest.TestCase): def setUp(self): self.param = FeatureBinningParam() json_config_file = home_dir + '/param_feature_binning.json' self.config_path = json_config_file with open(json_config_file, 'r', encoding='utf-8') as load_f: role_config = json.load(load_f) self.config_json = role_config # def tearDown(self): # os.system("rm -r " + self.config_path) def test_directly_extract(self): param_obj = FeatureBinningParam() extractor = ParamExtract() param_obj = extractor.parse_param_from_config(param_obj, self.config_json) self.assertTrue(param_obj.method == "quantile") self.assertTrue(param_obj.transform_param.transform_type == 'bin_num') if __name__ == '__main__': unittest.main()
1,718
32.705882
82
py
FATE
FATE-master/python/federatedml/transfer_variable/__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License.
661
35.777778
75
py
FATE
FATE-master/python/federatedml/transfer_variable/base_transfer_variable.py
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from fate_arch.federation.transfer_variable import BaseTransferVariables, Variable
700
37.944444
82
py
FATE
FATE-master/python/federatedml/transfer_variable/transfer_class/secret_share_transfer_variable.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ################################################################################ # # AUTO GENERATED TRANSFER VARIABLE CLASS. DO NOT MODIFY # ################################################################################ from federatedml.transfer_variable.base_transfer_variable import BaseTransferVariables # noinspection PyAttributeOutsideInit class SecretShareTransferVariable(BaseTransferVariables): def __init__(self, flowid=0): super().__init__(flowid) self.multiply_triplets_cross = self._create_variable( name='multiply_triplets_cross', src=[ 'guest', 'host'], dst=[ 'guest', 'host']) self.multiply_triplets_encrypted = self._create_variable( name='multiply_triplets_encrypted', src=[ 'guest', 'host'], dst=[ 'guest', 'host']) self.rescontruct = self._create_variable(name='rescontruct', src=['guest', 'host'], dst=['guest', 'host']) self.share = self._create_variable(name='share', src=['guest', 'host'], dst=['guest', 'host']) self.encrypted_share_matrix = self._create_variable( name='encrypted_share_matrix', src=[ 'guest', "host"], dst=[ 'host', "guest"]) self.q_field = self._create_variable( name='q_field', src=[ 'guest', "host"], dst=[ 'host', "guest"])
2,075
39.705882
114
py
FATE
FATE-master/python/federatedml/transfer_variable/transfer_class/one_vs_rest_transfer_variable.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ################################################################################ # # AUTO GENERATED TRANSFER VARIABLE CLASS. DO NOT MODIFY # ################################################################################ from federatedml.transfer_variable.base_transfer_variable import BaseTransferVariables # noinspection PyAttributeOutsideInit class OneVsRestTransferVariable(BaseTransferVariables): def __init__(self, flowid=0): super().__init__(flowid) self.aggregate_classes = self._create_variable(name='aggregate_classes', src=['guest'], dst=['host', 'arbiter']) self.host_classes = self._create_variable(name='host_classes', src=['host'], dst=['guest'])
1,358
37.828571
120
py
FATE
FATE-master/python/federatedml/transfer_variable/transfer_class/hetero_kmeans_transfer_variable.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ################################################################################ # # AUTO GENERATED TRANSFER VARIABLE CLASS. DO NOT MODIFY # ################################################################################ from federatedml.transfer_variable.base_transfer_variable import BaseTransferVariables # noinspection PyAttributeOutsideInit class HeteroKmeansTransferVariable(BaseTransferVariables): def __init__(self, flowid=0): super().__init__(flowid) self.arbiter_tol = self._create_variable(name='arbiter_tol', src=['arbiter'], dst=['host', 'guest']) self.cluster_result = self._create_variable(name='cluster_result', src=['arbiter'], dst=['host', 'guest']) self.cluster_evaluation = self._create_variable( name='cluster_evaluation', src=['arbiter'], dst=['host', 'guest']) self.guest_dist = self._create_variable(name='guest_dist', src=['guest'], dst=['arbiter']) self.guest_tol = self._create_variable(name='guest_tol', src=['guest'], dst=['arbiter']) self.host_dist = self._create_variable(name='host_dist', src=['host'], dst=['arbiter']) self.host_tol = self._create_variable(name='host_tol', src=['host'], dst=['arbiter']) self.centroid_list = self._create_variable(name='centroid_list', src=['guest'], dst=['host'])
1,988
46.357143
114
py
FATE
FATE-master/python/federatedml/transfer_variable/transfer_class/rsa_intersect_transfer_variable.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ################################################################################ # # AUTO GENERATED TRANSFER VARIABLE CLASS. DO NOT MODIFY # ################################################################################ from federatedml.transfer_variable.base_transfer_variable import BaseTransferVariables # noinspection PyAttributeOutsideInit class RsaIntersectTransferVariable(BaseTransferVariables): def __init__(self, flowid=0): super().__init__(flowid) self.guest_pubkey_ids = self._create_variable(name='guest_pubkey_ids', src=['guest'], dst=['host']) self.host_pubkey_ids = self._create_variable(name='host_pubkey_ids', src=['host'], dst=['guest']) self.host_prvkey_ids = self._create_variable(name='host_prvkey_ids', src=['host'], dst=['guest']) self.guest_prvkey_ids = self._create_variable(name='guest_prvkey_ids', src=['guest'], dst=['host']) self.host_sign_guest_ids = self._create_variable(name='host_sign_guest_ids', src=['host'], dst=['guest']) self.guest_sign_host_ids = self._create_variable(name='guest_sign_host_ids', src=['guest'], dst=['host']) self.intersect_ids = self._create_variable(name='intersect_ids', src=['guest'], dst=['host']) self.host_intersect_ids = self._create_variable(name='host_intersect_ids', src=['host'], dst=['guest']) self.host_pubkey = self._create_variable(name='host_pubkey', src=['host'], dst=['guest']) self.guest_pubkey = self._create_variable(name='guest_pubkey', src=['guest'], dst=['host']) self.cache_version_info = self._create_variable(name='cache_version_info', src=['guest'], dst=['host']) self.cache_version_match_info = self._create_variable(name='cache_version_match_info', src=['host'], dst=['guest']) self.intersect_guest_ids = self._create_variable(name='intersect_guest_ids', src=['guest'], dst=['host']) self.intersect_guest_ids_process = self._create_variable(name='intersect_guest_ids_process', src=['host'], dst=['guest']) self.intersect_host_ids_process = self._create_variable(name='intersect_host_ids_process', src=['host'], dst=['guest']) self.rsa_pubkey = self._create_variable(name='rsa_pubkey', src=['host'], dst=['guest']) self.cardinality = self._create_variable(name='cardinality', src=['guest'], dst=['host']) self.host_filter = self._create_variable(name="host_filter", src=['host'], dst=['guest'])
3,307
52.354839
114
py
FATE
FATE-master/python/federatedml/transfer_variable/transfer_class/homo_transfer_variable.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ################################################################################ # # AUTO GENERATED TRANSFER VARIABLE CLASS. DO NOT MODIFY # ################################################################################ from federatedml.transfer_variable.base_transfer_variable import BaseTransferVariables # noinspection PyAttributeOutsideInit class HomoTransferVariable(BaseTransferVariables): def __init__(self, flowid=0): super().__init__(flowid) self.aggregated_model = self._create_variable(name='aggregated_model', src=['arbiter'], dst=['guest', 'host']) self.dh_ciphertext_bc = self._create_variable(name='dh_ciphertext_bc', src=['arbiter'], dst=['guest', 'host']) self.dh_ciphertext_guest = self._create_variable(name='dh_ciphertext_guest', src=['guest'], dst=['arbiter']) self.dh_ciphertext_host = self._create_variable(name='dh_ciphertext_host', src=['host'], dst=['arbiter']) self.dh_pubkey = self._create_variable(name='dh_pubkey', src=['arbiter'], dst=['guest', 'host']) self.guest_loss = self._create_variable(name='guest_loss', src=['guest'], dst=['arbiter']) self.guest_model = self._create_variable(name='guest_model', src=['guest'], dst=['arbiter']) self.guest_party_weight = self._create_variable(name='guest_party_weight', src=['guest'], dst=['arbiter']) self.guest_uuid = self._create_variable(name='guest_uuid', src=['guest'], dst=['arbiter']) self.host_loss = self._create_variable(name='host_loss', src=['host'], dst=['arbiter']) self.host_model = self._create_variable(name='host_model', src=['host'], dst=['arbiter']) self.host_party_weight = self._create_variable(name='host_party_weight', src=['host'], dst=['arbiter']) self.host_uuid = self._create_variable(name='host_uuid', src=['host'], dst=['arbiter']) self.is_converge = self._create_variable(name='is_converge', src=['arbiter'], dst=['guest', 'host']) self.model_re_encrypted = self._create_variable(name='model_re_encrypted', src=['arbiter'], dst=['host']) self.model_to_re_encrypt = self._create_variable(name='model_to_re_encrypt', src=['host'], dst=['arbiter']) self.paillier_pubkey = self._create_variable(name='paillier_pubkey', src=['arbiter'], dst=['host']) self.re_encrypt_times = self._create_variable(name='re_encrypt_times', src=['host'], dst=['arbiter']) self.re_encrypted_model = self._create_variable(name='re_encrypted_model', src=['arbiter'], dst=['host']) self.to_encrypt_model = self._create_variable(name='to_encrypt_model', src=['host'], dst=['arbiter']) self.use_encrypt = self._create_variable(name='use_encrypt', src=['host'], dst=['arbiter']) self.uuid_conflict_flag = self._create_variable( name='uuid_conflict_flag', src=['arbiter'], dst=['guest', 'host'])
3,539
62.214286
118
py
FATE
FATE-master/python/federatedml/transfer_variable/transfer_class/hetero_lr_transfer_variable.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ################################################################################ # # AUTO GENERATED TRANSFER VARIABLE CLASS. DO NOT MODIFY # ################################################################################ from federatedml.transfer_variable.base_transfer_variable import BaseTransferVariables # noinspection PyAttributeOutsideInit class HeteroLRTransferVariable(BaseTransferVariables): def __init__(self, flowid=0): super().__init__(flowid) self.batch_data_index = self._create_variable(name='batch_data_index', src=['guest'], dst=['host']) self.batch_info = self._create_variable(name='batch_info', src=['guest'], dst=['host', 'arbiter']) self.batch_validate_info = self._create_variable(name="batch_validate_info", src=['host'], dst=['guest']) self.converge_flag = self._create_variable(name='converge_flag', src=['arbiter'], dst=['host', 'guest']) self.fore_gradient = self._create_variable(name='fore_gradient', src=['guest'], dst=['host']) self.forward_hess = self._create_variable(name='forward_hess', src=['guest'], dst=['host']) self.guest_gradient = self._create_variable(name='guest_gradient', src=['guest'], dst=['arbiter']) self.guest_hess_vector = self._create_variable(name='guest_hess_vector', src=['guest'], dst=['arbiter']) self.guest_optim_gradient = self._create_variable(name='guest_optim_gradient', src=['arbiter'], dst=['guest']) self.host_forward_dict = self._create_variable(name='host_forward_dict', src=['host'], dst=['guest']) self.host_gradient = self._create_variable(name='host_gradient', src=['host'], dst=['arbiter']) self.host_hess_vector = self._create_variable(name='host_hess_vector', src=['host'], dst=['arbiter']) self.host_loss_regular = self._create_variable(name='host_loss_regular', src=['host'], dst=['guest']) self.host_optim_gradient = self._create_variable(name='host_optim_gradient', src=['arbiter'], dst=['host']) self.host_prob = self._create_variable(name='host_prob', src=['host'], dst=['guest']) self.host_sqn_forwards = self._create_variable(name='host_sqn_forwards', src=['host'], dst=['guest']) self.loss = self._create_variable(name='loss', src=['guest'], dst=['arbiter']) self.loss_intermediate = self._create_variable(name='loss_intermediate', src=['host'], dst=['guest']) self.paillier_pubkey = self._create_variable(name='paillier_pubkey', src=['arbiter'], dst=['host', 'guest']) self.sqn_sample_index = self._create_variable(name='sqn_sample_index', src=['guest'], dst=['host']) self.use_async = self._create_variable(name='use_async', src=['guest'], dst=['host'])
3,389
61.777778
118
py