repo
stringlengths 2
99
| file
stringlengths 13
225
| code
stringlengths 0
18.3M
| file_length
int64 0
18.3M
| avg_line_length
float64 0
1.36M
| max_line_length
int64 0
4.26M
| extension_type
stringclasses 1
value |
---|---|---|---|---|---|---|
FATE
|
FATE-master/python/fate_client/pipeline/utils/tools.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import typing
from pathlib import Path
from ruamel import yaml
def merge_dict(dict1, dict2):
merge_ret = {}
keyset = dict1.keys() | dict2.keys()
for key in keyset:
if key in dict1 and key in dict2:
val1 = dict1.get(key)
val2 = dict2.get(key)
assert type(val1).__name__ == type(val2).__name__
if isinstance(val1, dict):
merge_ret[key] = merge_dict(val1, val2)
else:
merge_ret[key] = val2
elif key in dict1:
merge_ret[key] = dict1.get(key)
else:
merge_ret[key] = dict2.get(key)
return merge_ret
def extract_explicit_parameter(func):
def wrapper(*args, **kwargs):
explict_kwargs = {"explict_parameters": kwargs}
return func(*args, **explict_kwargs)
return wrapper
def load_job_config(path):
config = JobConfig.load(path)
return config
class Parties(object):
def __init__(self, parties):
self.host = parties.get("host", None)
self.guest = parties.get("guest", None)
self.arbiter = parties.get("arbiter", None)
class JobConfig(object):
def __init__(self, config):
self.parties = Parties(config.get("parties", {}))
self.backend = config.get("backend", 0)
self.work_mode = config.get("work_mode", 0)
self.data_base_dir = config.get("data_base_dir", "")
self.system_setting = config.get("system_setting", {})
@staticmethod
def load(path: typing.Union[str, Path]):
conf = JobConfig.load_from_file(path)
return JobConfig(conf)
@staticmethod
def load_from_file(path: typing.Union[str, Path]):
"""
Loads conf content from json or yaml file. Used to read in parameter configuration
Parameters
----------
path: str, path to conf file, should be absolute path
Returns
-------
dict, parameter configuration in dictionary format
"""
if isinstance(path, str):
path = Path(path)
config = {}
if path is not None:
file_type = path.suffix
with path.open("r") as f:
if file_type == ".yaml":
config.update(yaml.safe_load(f))
elif file_type == ".json":
config.update(json.load(f))
else:
raise ValueError(f"Cannot load conf from file type {file_type}")
return config
| 3,127 | 29.368932 | 90 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/utils/__init__.py
| 0 | 0 | 0 |
py
|
|
FATE
|
FATE-master/python/fate_client/pipeline/utils/invoker/job_submitter.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import os
import tempfile
import time
from datetime import timedelta
from pathlib import Path
from flow_sdk.client import FlowClient
from pipeline.backend import config as conf
from pipeline.backend.config import JobStatus
from pipeline.backend.config import StatusCode
from pipeline.utils.logger import LOGGER
class JobInvoker(object):
def __init__(self):
self.client = FlowClient(ip=conf.PipelineConfig.IP, port=conf.PipelineConfig.PORT, version=conf.SERVER_VERSION,
app_key=conf.PipelineConfig.APP_KEY, secret_key=conf.PipelineConfig.SECRET_KEY)
def submit_job(self, dsl=None, submit_conf=None, callback_func=None):
LOGGER.debug(f"submit dsl is: \n {json.dumps(dsl, indent=4, ensure_ascii=False)}")
LOGGER.debug(f"submit conf is: \n {json.dumps(submit_conf, indent=4, ensure_ascii=False)}")
result = self.run_job_with_retry(self.client.job.submit, params=dict(config_data=submit_conf,
dsl_data=dsl))
# result = self.client.job.submit(config_data=submit_conf, dsl_data=dsl)
if callback_func is not None:
callback_func(result)
try:
if 'retcode' not in result or result["retcode"] != 0:
raise ValueError(f"retcode err, callback result is {result}")
if "jobId" not in result:
raise ValueError(f"jobID not in result: {result}")
job_id = result["jobId"]
data = result["data"]
except ValueError:
raise ValueError("job submit failed, err msg: {}".format(result))
return job_id, data
def upload_data(self, submit_conf=None, drop=0):
result = self.client.data.upload(config_data=submit_conf, verbose=1, drop=drop)
try:
if 'retcode' not in result or result["retcode"] != 0:
raise ValueError
if "jobId" not in result:
raise ValueError
job_id = result["jobId"]
data = result["data"]
except BaseException:
raise ValueError("job submit failed, err msg: {}".format(result))
return job_id, data
def monitor_job_status(self, job_id, role, party_id, previous_status=None):
if previous_status in [StatusCode.SUCCESS, StatusCode.CANCELED]:
if previous_status == StatusCode.SUCCESS:
status = JobStatus.SUCCESS
else:
status = JobStatus.CANCELED
raise ValueError(f"Previous fit status is {status}, please don't fit again")
party_id = str(party_id)
start_time = time.time()
pre_cpn = None
LOGGER.info(f"Job id is {job_id}\n")
while True:
ret_code, ret_msg, data = self.query_job(job_id, role, party_id)
status = data["f_status"]
if status == JobStatus.SUCCESS:
elapse_seconds = timedelta(seconds=int(time.time() - start_time))
LOGGER.info(f"Job is success!!! Job id is {job_id}")
LOGGER.info(f"Total time: {elapse_seconds}")
return StatusCode.SUCCESS
elif status == JobStatus.FAILED:
raise ValueError(f"Job is failed, please check out job {job_id} by fate board or fate_flow cli")
elif status == JobStatus.WAITING:
elapse_seconds = timedelta(seconds=int(time.time() - start_time))
LOGGER.info(f"\x1b[80D\x1b[1A\x1b[KJob is still waiting, time elapse: {elapse_seconds}")
elif status == JobStatus.CANCELED:
elapse_seconds = timedelta(seconds=int(time.time() - start_time))
LOGGER.info(f"Job is canceled, time elapse: {elapse_seconds}\r")
return StatusCode.CANCELED
elif status == JobStatus.TIMEOUT:
elapse_seconds = timedelta(seconds=int(time.time() - start_time))
raise ValueError(f"Job is timeout, time elapse: {elapse_seconds}\r")
elif status == JobStatus.RUNNING:
ret_code, _, data = self.query_task(job_id=job_id, role=role, party_id=party_id,
status=JobStatus.RUNNING)
if ret_code != 0 or len(data) == 0:
time.sleep(conf.TIME_QUERY_FREQS)
continue
elapse_seconds = timedelta(seconds=int(time.time() - start_time))
if len(data) == 1:
cpn = data[0]["f_component_name"]
else:
cpn = []
for cpn_data in data:
cpn.append(cpn_data["f_component_name"])
if cpn != pre_cpn:
LOGGER.info(f"\r")
pre_cpn = cpn
LOGGER.info(f"\x1b[80D\x1b[1A\x1b[KRunning component {cpn}, time elapse: {elapse_seconds}")
else:
raise ValueError(f"Unknown status: {status}")
time.sleep(conf.TIME_QUERY_FREQS)
def query_job(self, job_id, role, party_id):
party_id = str(party_id)
result = self.run_job_with_retry(self.client.job.query, params=dict(job_id=job_id, role=role,
party_id=party_id))
# result = self.client.job.query(job_id=job_id, role=role, party_id=party_id)
try:
if 'retcode' not in result or result["retcode"] != 0:
raise ValueError("can not query_job")
ret_code = result["retcode"]
ret_msg = result["retmsg"]
data = result["data"][0]
return ret_code, ret_msg, data
except ValueError:
raise ValueError("query job result is {}, can not parse useful info".format(result))
def get_output_data_table(self, job_id, cpn_name, role, party_id):
"""
Parameters
----------
job_id: str
cpn_name: str
role: str
party_id: int
Returns
-------
dict
single output example:
{
table_name: [],
table_namespace: []
}
multiple output example:
{
train_data: {
table_name: [],
table_namespace: []
},
validate_data: {
table_name: [],
table_namespace: []
}
test_data: {
table_name: [],
table_namespace: []
}
}
"""
party_id = str(party_id)
result = self.client.component.output_data_table(job_id=job_id, role=role,
party_id=party_id, component_name=cpn_name)
data = {}
try:
if 'retcode' not in result or result["retcode"] != 0:
raise ValueError(f"No retcode found in result: {result}")
if "data" not in result:
raise ValueError(f"No data returned: {result}")
all_data = result["data"]
n = len(all_data)
# single data table
if n == 1:
single_data = all_data[0]
del single_data["data_name"]
data = single_data
# multiple data table
elif n > 1:
for single_data in all_data:
data_name = single_data["data_name"]
del single_data["data_name"]
data[data_name] = single_data
# no data table obtained
else:
LOGGER.info(f"No output data table found in {result}")
except ValueError:
raise ValueError(f"Job submit failed, err msg: {result}")
return data
def query_task(self, job_id, role, party_id, status=None):
party_id = str(party_id)
result = self.client.task.query(job_id=job_id, role=role,
party_id=party_id, status=status)
try:
if 'retcode' not in result:
raise ValueError("Cannot query task status of job {}".format(job_id))
ret_code = result["retcode"]
ret_msg = result["retmsg"]
if ret_code != 0:
data = None
else:
data = result["data"]
return ret_code, ret_msg, data
except ValueError:
raise ValueError("Query task result is {}, cannot parse useful info".format(result))
def get_output_data(self, job_id, cpn_name, role, party_id, limits=None, to_pandas=True):
"""
Parameters
----------
job_id: str
cpn_name: str
role: str
party_id: int
limits: int, None, default None. Maximum number of lines returned, including header. If None, return all lines.
to_pandas: bool, default True. Change data output to pandas or not.
Returns
-------
single output example: pandas.DataFrame
multiple output example:
{
train_data: tran_data_df,
validate_data: validate_data_df,
test_data: test_data_df
}
"""
party_id = str(party_id)
with tempfile.TemporaryDirectory() as job_dir:
result = self.client.component.output_data(job_id=job_id, role=role, output_path=job_dir,
party_id=party_id, component_name=cpn_name)
output_dir = result["directory"]
n = 0
data_files = []
for file in os.listdir(output_dir):
if file.endswith("csv"):
n += 1
data_files.append(file[:-4])
if n > 0:
data_dict = {}
for data_name in data_files:
curr_data_dict = JobInvoker.create_data_meta_dict(data_name, output_dir, limits)
if curr_data_dict is not None:
if to_pandas:
data_dict[data_name] = self.to_pandas(curr_data_dict)
else:
data_dict[data_name] = curr_data_dict
# no output data obtained
else:
raise ValueError(f"No output data found in directory{output_dir}")
if len(data_dict) == 1:
return list(data_dict.values())[0]
return data_dict
@staticmethod
def create_data_meta_dict(data_name, output_dir, limits):
data_file = f"{data_name}.csv"
meta_file = f"{data_name}.meta"
output_data = os.path.join(output_dir, data_file)
output_meta = os.path.join(output_dir, meta_file)
if not Path(output_data).resolve().exists():
return
data = JobInvoker.extract_output_data(output_data, limits)
meta = JobInvoker.extract_output_meta(output_meta)
data_dict = {"data": data, "meta": meta}
return data_dict
@staticmethod
def to_pandas(data_dict):
import pandas as pd
data = data_dict["data"]
meta = data_dict["meta"]
if JobInvoker.is_normal_predict_task(meta):
"""ignore the first line
"""
rows = []
for i in range(1, len(data)):
cols = data[i].split(",", -1)
predict_detail = json.loads(",".join(cols[len(meta) - 2: -1])[1:-1].replace("\'", "\""))
value = cols[: len(meta) - 2] + [predict_detail] + cols[-1:]
rows.append(value)
return pd.DataFrame(rows, columns=meta)
else:
rows = []
for i in range(1, len(data)):
cols = data[i].split(",", -1)
rows.append(cols)
return pd.DataFrame(rows, columns=meta)
@staticmethod
def is_normal_predict_task(col_names):
if len(col_names) <= 5:
return False
template_col_names = ["label", "predict_result", "predict_score", "predict_detail", "type"]
for i in range(5):
if template_col_names[i] != col_names[-5 + i]:
return False
return True
@staticmethod
def extract_output_data(output_data, limits):
data = []
with open(output_data, "r") as fin:
for i, line in enumerate(fin):
if i == limits:
break
data.append(line.strip())
return data
@staticmethod
def extract_output_meta(output_meta):
with open(output_meta, "r") as fin:
try:
meta_dict = json.load(fin)
meta = meta_dict["header"]
except ValueError as e:
raise ValueError(f"Cannot get output data meta. err msg: {e}")
return meta
def get_model_param(self, job_id, cpn_name, role, party_id):
result = None
party_id = str(party_id)
try:
result = self.client.component.output_model(job_id=job_id, role=role,
party_id=party_id, component_name=cpn_name)
if "data" not in result:
raise ValueError(f"{result['retmsg']} job {job_id}, component {cpn_name} has no output model param")
return result["data"]
except BaseException:
raise ValueError(f"Cannot get output model, err msg: {result}")
def get_metric(self, job_id, cpn_name, role, party_id):
result = None
party_id = str(party_id)
try:
result = self.client.component.metric_all(job_id=job_id, role=role,
party_id=party_id, component_name=cpn_name)
if "data" not in result:
raise ValueError(f"job {job_id}, component {cpn_name} has no output metric")
return result["data"]
except BaseException:
raise ValueError(f"Cannot get output model, err msg: {result}")
# raise
def get_summary(self, job_id, cpn_name, role, party_id):
result = None
party_id = str(party_id)
try:
result = self.client.component.get_summary(job_id=job_id, role=role,
party_id=party_id, component_name=cpn_name)
if "data" not in result:
raise ValueError(f"Job {job_id}, component {cpn_name} has no output metric")
return result["data"]
except BaseException:
raise ValueError(f"Cannot get output model, err msg: {result}")
def model_deploy(self, model_id, model_version, cpn_list=None, predict_dsl=None, components_checkpoint=None):
if cpn_list:
result = self.client.model.deploy(model_id=model_id, model_version=model_version, cpn_list=cpn_list)
elif predict_dsl:
result = self.client.model.deploy(model_id=model_id, model_version=model_version,
predict_dsl=predict_dsl, components_checkpoint=components_checkpoint)
else:
result = self.client.model.deploy(model_id=model_id, model_version=model_version,
components_checkpoint=components_checkpoint)
if result is None or 'retcode' not in result:
raise ValueError("Call flow deploy is failed, check if fate_flow server is up!")
elif result["retcode"] != 0:
raise ValueError(f"Cannot deploy components, error msg is {result['data']}")
else:
return result["data"]
def get_predict_dsl(self, model_id, model_version):
result = self.client.model.get_predict_dsl(model_id=model_id, model_version=model_version)
if result is None or 'retcode' not in result:
raise ValueError("Call flow get predict dsl is failed, check if fate_flow server is up!")
elif result["retcode"] != 0:
raise ValueError("Cannot get predict dsl, error msg is {}".format(result["retmsg"]))
else:
return result["data"]
def load_model(self, load_conf):
result = self.client.model.load(load_conf)
if result is None or 'retcode' not in result:
raise ValueError("Call flow load failed, check if fate_flow server is up!")
elif result["retcode"] != 0:
raise ValueError("Cannot load model, error msg is {}".format(result["retmsg"]))
else:
return result["data"]
def bind_model(self, bind_conf):
result = self.client.model.bind(bind_conf)
if result is None or 'retcode' not in result:
raise ValueError("Call flow bind failed, check if fate_flow server is up!")
elif result["retcode"] != 0:
raise ValueError("Cannot bind model, error msg is {}".format(result["retmsg"]))
else:
return result["retmsg"]
def convert_homo_model(self, convert_conf):
result = self.client.model.homo_convert(convert_conf)
if result is None or 'retcode' not in result:
raise ValueError("Call flow homo convert failed, check if fate_flow server is up!")
elif result["retcode"] != 0:
raise ValueError("Cannot convert homo model, error msg is {}".format(result["retmsg"]))
else:
return result["data"]
def bind_table(self, **kwargs):
result = self.client.table.bind(**kwargs)
if result is None or 'retcode' not in result:
raise ValueError("Call flow table bind is failed, check if fate_flow server is up!")
elif result["retcode"] != 0:
raise ValueError(f"Cannot bind table, error msg is {result['retmsg']}")
else:
return result["data"]
@staticmethod
def run_job_with_retry(api_func, params):
for i in range(conf.MAX_RETRY + 1):
try:
result = api_func(**params)
if result is None or "retmsg" not in result:
return result
if i == conf.MAX_RETRY:
return result
ret_msg = result["retmsg"]
if "connection refused" in ret_msg.lower() \
or "max retries" in ret_msg.lower():
pass
else:
return result
except AttributeError:
pass
time.sleep(conf.TIME_QUERY_FREQS * (i + 1))
| 19,264 | 39.219207 | 119 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/utils/invoker/__init__.py
| 0 | 0 | 0 |
py
|
|
FATE
|
FATE-master/python/fate_client/pipeline/param/ftl_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import collections
import copy
from pipeline.param.intersect_param import IntersectParam
from types import SimpleNamespace
from pipeline.param.base_param import BaseParam
from pipeline.param import consts
from pipeline.param.encrypt_param import EncryptParam
from pipeline.param.encrypted_mode_calculation_param import EncryptedModeCalculatorParam
from pipeline.param.predict_param import PredictParam
from pipeline.param.callback_param import CallbackParam
class FTLParam(BaseParam):
def __init__(self, alpha=1, tol=0.000001,
n_iter_no_change=False, validation_freqs=None, optimizer={'optimizer': 'Adam', 'learning_rate': 0.01},
nn_define={}, epochs=1, intersect_param=IntersectParam(consts.RSA), config_type='keras', batch_size=-1,
encrypte_param=EncryptParam(),
encrypted_mode_calculator_param=EncryptedModeCalculatorParam(mode="confusion_opt"),
predict_param=PredictParam(), mode='plain', communication_efficient=False,
local_round=5, callback_param=CallbackParam()):
"""
Args:
alpha: float, a loss coefficient defined in paper, it defines the importance of alignment loss
tol: float, loss tolerance
n_iter_no_change: bool, check loss convergence or not
validation_freqs: None or positive integer or container object in python. Do validation in training process or Not.
if equals None, will not do validation in train process;
if equals positive integer, will validate data every validation_freqs epochs passes;
if container object in python, will validate data if epochs belong to this container.
e.g. validation_freqs = [10, 15], will validate data when epoch equals to 10 and 15.
Default: None
The default value is None, 1 is suggested. You can set it to a number larger than 1 in order to
speed up training by skipping validation rounds. When it is larger than 1, a number which is
divisible by "epochs" is recommended, otherwise, you will miss the validation scores
of last training epoch.
optimizer: optimizer method, accept following types:
1. a string, one of "Adadelta", "Adagrad", "Adam", "Adamax", "Nadam", "RMSprop", "SGD"
2. a dict, with a required key-value pair keyed by "optimizer",
with optional key-value pairs such as learning rate.
defaults to "SGD"
nn_define: dict, a dict represents the structure of neural network, it can be output by tf-keras
epochs: int, epochs num
intersect_param: define the intersect method
config_type: now only 'tf-keras' is supported
batch_size: batch size when computing transformed feature embedding, -1 use full data.
encrypte_param: encrypted param
encrypted_mode_calculator_param:
predict_param: predict param
mode:
plain: will not use any encrypt algorithms, data exchanged in plaintext
encrypted: use paillier to encrypt gradients
communication_efficient:
bool, will use communication efficient or not. when communication efficient is enabled, FTL model will
update gradients by several local rounds using intermediate data
local_round: local update round when using communication efficient
"""
super(FTLParam, self).__init__()
self.alpha = alpha
self.tol = tol
self.n_iter_no_change = n_iter_no_change
self.validation_freqs = validation_freqs
self.optimizer = optimizer
self.nn_define = nn_define
self.epochs = epochs
self.intersect_param = copy.deepcopy(intersect_param)
self.config_type = config_type
self.batch_size = batch_size
self.encrypted_mode_calculator_param = copy.deepcopy(encrypted_mode_calculator_param)
self.encrypt_param = copy.deepcopy(encrypte_param)
self.predict_param = copy.deepcopy(predict_param)
self.mode = mode
self.communication_efficient = communication_efficient
self.local_round = local_round
self.callback_param = copy.deepcopy(callback_param)
def check(self):
self.intersect_param.check()
self.encrypt_param.check()
self.encrypted_mode_calculator_param.check()
self.optimizer = self._parse_optimizer(self.optimizer)
supported_config_type = ["keras"]
if self.config_type not in supported_config_type:
raise ValueError(f"config_type should be one of {supported_config_type}")
if not isinstance(self.tol, (int, float)):
raise ValueError("tol should be numeric")
if not isinstance(self.epochs, int) or self.epochs <= 0:
raise ValueError("epochs should be a positive integer")
if self.nn_define and not isinstance(self.nn_define, dict):
raise ValueError("bottom_nn_define should be a dict defining the structure of neural network")
if self.batch_size != -1:
if not isinstance(self.batch_size, int) \
or self.batch_size < consts.MIN_BATCH_SIZE:
raise ValueError(
" {} not supported, should be larger than 10 or -1 represent for all data".format(self.batch_size))
if self.validation_freqs is None:
pass
elif isinstance(self.validation_freqs, int):
if self.validation_freqs < 1:
raise ValueError("validation_freqs should be larger than 0 when it's integer")
elif not isinstance(self.validation_freqs, collections.Container):
raise ValueError("validation_freqs should be None or positive integer or container")
assert isinstance(self.communication_efficient, bool), 'communication efficient must be a boolean'
assert self.mode in [
'encrypted', 'plain'], 'mode options: encrpyted or plain, but {} is offered'.format(
self.mode)
self.check_positive_integer(self.epochs, 'epochs')
self.check_positive_number(self.alpha, 'alpha')
self.check_positive_integer(self.local_round, 'local round')
@staticmethod
def _parse_optimizer(opt):
"""
Examples:
1. "optimize": "SGD"
2. "optimize": {
"optimizer": "SGD",
"learning_rate": 0.05
}
"""
kwargs = {}
if isinstance(opt, str):
return SimpleNamespace(optimizer=opt, kwargs=kwargs)
elif isinstance(opt, dict):
optimizer = opt.get("optimizer", kwargs)
if not optimizer:
raise ValueError(f"optimizer config: {opt} invalid")
kwargs = {k: v for k, v in opt.items() if k != "optimizer"}
return SimpleNamespace(optimizer=optimizer, kwargs=kwargs)
else:
raise ValueError(f"invalid type for optimize: {type(opt)}")
| 7,810 | 47.216049 | 127 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/glm_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import copy
from pipeline.param.base_param import BaseParam
from pipeline.param.callback_param import CallbackParam
from pipeline.param.encrypt_param import EncryptParam
from pipeline.param.cross_validation_param import CrossValidationParam
from pipeline.param.init_model_param import InitParam
from pipeline.param.stepwise_param import StepwiseParam
from pipeline.param import consts
class LinearModelParam(BaseParam):
"""
Parameters used for GLM.
Parameters
----------
penalty : {'L2' or 'L1'}
Penalty method used in LinR. Please note that, when using encrypted version in HeteroLinR,
'L1' is not supported.
tol : float, default: 1e-4
The tolerance of convergence
alpha : float, default: 1.0
Regularization strength coefficient.
optimizer : {'sgd', 'rmsprop', 'adam', 'sqn', 'adagrad', 'nesterov_momentum_sgd'}
Optimize method
batch_size : int, default: -1
Batch size when updating model. -1 means use all data in a batch. i.e. Not to use mini-batch strategy.
learning_rate : float, default: 0.01
Learning rate
max_iter : int, default: 20
The maximum iteration for training.
init_param: InitParam object, default: default InitParam object
Init param method object.
early_stop : {'diff', 'abs', 'weight_dff'}
Method used to judge convergence.
a) diff: Use difference of loss between two iterations to judge whether converge.
b) abs: Use the absolute value of loss to judge whether converge. i.e. if loss < tol, it is converged.
c) weight_diff: Use difference between weights of two consecutive iterations
encrypt_param: EncryptParam object, default: default EncryptParam object
encrypt param
cv_param: CrossValidationParam object, default: default CrossValidationParam object
cv param
decay: int or float, default: 1
Decay rate for learning rate. learning rate will follow the following decay schedule.
lr = lr0/(1+decay*t) if decay_sqrt is False. If decay_sqrt is True, lr = lr0 / sqrt(1+decay*t)
where t is the iter number.
decay_sqrt: Bool, default: True
lr = lr0/(1+decay*t) if decay_sqrt is False, otherwise, lr = lr0 / sqrt(1+decay*t)
validation_freqs: int, list, tuple, set, or None
validation frequency during training, required when using early stopping.
The default value is None, 1 is suggested. You can set it to a number larger than 1 in order to speed up training by skipping validation rounds.
When it is larger than 1, a number which is divisible by "max_iter" is recommended, otherwise, you will miss the validation scores of the last training iteration.
early_stopping_rounds: int, default: None
If positive number specified, at every specified training rounds, program checks for early stopping criteria.
Validation_freqs must also be set when using early stopping.
metrics: list or None, default: None
Specify which metrics to be used when performing evaluation during training process. If metrics have not improved at early_stopping rounds, trianing stops before convergence.
If set as empty, default metrics will be used. For regression tasks, default metrics are ['root_mean_squared_error', 'mean_absolute_error']
use_first_metric_only: bool, default: False
Indicate whether to use the first metric in `metrics` as the only criterion for early stopping judgement.
floating_point_precision: None or integer
if not None, use floating_point_precision-bit to speed up calculation,
e.g.: convert an x to round(x * 2**floating_point_precision) during Paillier operation, divide
the result by 2**floating_point_precision in the end.
callback_param: CallbackParam object
callback param
"""
def __init__(self, penalty='L2',
tol=1e-4, alpha=1.0, optimizer='sgd',
batch_size=-1, learning_rate=0.01, init_param=InitParam(),
max_iter=100, early_stop='diff',
encrypt_param=EncryptParam(),
cv_param=CrossValidationParam(), decay=1, decay_sqrt=True, validation_freqs=None,
early_stopping_rounds=None, stepwise_param=StepwiseParam(), metrics=None, use_first_metric_only=False,
floating_point_precision=23, callback_param=CallbackParam()):
super(LinearModelParam, self).__init__()
self.penalty = penalty
self.tol = tol
self.alpha = alpha
self.optimizer = optimizer
self.batch_size = batch_size
self.learning_rate = learning_rate
self.init_param = copy.deepcopy(init_param)
self.max_iter = max_iter
self.early_stop = early_stop
self.encrypt_param = encrypt_param
self.cv_param = copy.deepcopy(cv_param)
self.decay = decay
self.decay_sqrt = decay_sqrt
self.validation_freqs = validation_freqs
self.early_stopping_rounds = early_stopping_rounds
self.stepwise_param = copy.deepcopy(stepwise_param)
self.metrics = metrics or []
self.use_first_metric_only = use_first_metric_only
self.floating_point_precision = floating_point_precision
self.callback_param = copy.deepcopy(callback_param)
def check(self):
descr = "linear model param's "
if self.penalty is None:
self.penalty = 'NONE'
elif type(self.penalty).__name__ != "str":
raise ValueError(
descr + "penalty {} not supported, should be str type".format(self.penalty))
self.penalty = self.penalty.upper()
if self.penalty not in ['L1', 'L2', 'NONE']:
raise ValueError(
"penalty {} not supported, penalty should be 'L1', 'L2' or 'none'".format(self.penalty))
if type(self.tol).__name__ not in ["int", "float"]:
raise ValueError(
descr + "tol {} not supported, should be float type".format(self.tol))
if type(self.alpha).__name__ not in ["int", "float"]:
raise ValueError(
descr + "alpha {} not supported, should be float type".format(self.alpha))
if type(self.optimizer).__name__ != "str":
raise ValueError(
descr + "optimizer {} not supported, should be str type".format(self.optimizer))
else:
self.optimizer = self.optimizer.lower()
if self.optimizer not in ['sgd', 'rmsprop', 'adam', 'adagrad', 'sqn', 'nesterov_momentum_sgd']:
raise ValueError(
descr + "optimizer not supported, optimizer should be"
" 'sgd', 'rmsprop', 'adam', 'sqn', 'adagrad', or 'nesterov_momentum_sgd'")
if type(self.batch_size).__name__ not in ["int", "long"]:
raise ValueError(
descr + "batch_size {} not supported, should be int type".format(self.batch_size))
if self.batch_size != -1:
if type(self.batch_size).__name__ not in ["int", "long"] \
or self.batch_size < consts.MIN_BATCH_SIZE:
raise ValueError(descr + " {} not supported, should be larger than {} or "
"-1 represent for all data".format(self.batch_size, consts.MIN_BATCH_SIZE))
if type(self.learning_rate).__name__ not in ["int", "float"]:
raise ValueError(
descr + "learning_rate {} not supported, should be float type".format(
self.learning_rate))
self.init_param.check()
if type(self.max_iter).__name__ != "int":
raise ValueError(
descr + "max_iter {} not supported, should be int type".format(self.max_iter))
elif self.max_iter <= 0:
raise ValueError(
descr + "max_iter must be greater or equal to 1")
if type(self.early_stop).__name__ != "str":
raise ValueError(
descr + "early_stop {} not supported, should be str type".format(
self.early_stop))
else:
self.early_stop = self.early_stop.lower()
if self.early_stop not in ['diff', 'abs', 'weight_diff']:
raise ValueError(
descr + "early_stop not supported, early_stop should be 'weight_diff', 'diff' or 'abs'")
self.encrypt_param.check()
if type(self.decay).__name__ not in ["int", "float"]:
raise ValueError(
descr + "decay {} not supported, should be 'int' or 'float'".format(self.decay)
)
if type(self.decay_sqrt).__name__ not in ["bool"]:
raise ValueError(
descr + "decay_sqrt {} not supported, should be 'bool'".format(self.decay)
)
self.stepwise_param.check()
self.callback_param.check()
return True
| 9,637 | 43.827907 | 182 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/onehot_encoder_with_alignment_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# added by jsweng
# param class for OHE with alignment
#
from pipeline.param.base_param import BaseParam
from pipeline.param.encrypt_param import EncryptParam
from pipeline.param import consts
class OHEAlignmentParam(BaseParam):
"""
Parameters
----------
transform_col_indexes: list or int, default: -1
Specify which columns need to calculated. -1 represent for all columns.
need_run: bool, default True
Indicate if this module needed to be run
need_alignment: bool, default True
Indicated whether alignment of features is turned on
encrypt_param: EncryptParam object, default: default EncryptParam object
"""
def __init__(self, transform_col_indexes=-1, transform_col_names=None, need_run=True, need_alignment=True,
encrypt_param=EncryptParam()):
super(OHEAlignmentParam, self).__init__()
if transform_col_names is None:
transform_col_names = []
self.transform_col_indexes = transform_col_indexes
self.transform_col_names = transform_col_names
self.need_run = need_run
self.need_alignment = need_alignment
self.encrypt_param = encrypt_param
def check(self):
descr = "One-hot encoder with alignment param's"
self.check_defined_type(self.transform_col_indexes, descr, ['list', 'int'])
self.encrypt_param.check()
if self.encrypt_param.method not in [consts.PAILLIER, None]:
raise ValueError(
"encrypted method support 'Paillier' or None only")
return True
| 1,633 | 29.259259 | 110 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/sir_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
from pipeline.param.intersect_param import DHParam
from pipeline.param import consts
class SecureInformationRetrievalParam(BaseParam):
"""
Parameters
----------
security_level: float, default 0.5
security level, should set value in [0, 1]
if security_level equals 0.0 means raw data retrieval
oblivious_transfer_protocol: {"OT_Hauck"}
OT type, only supports OT_Hauck
commutative_encryption : {"CommutativeEncryptionPohligHellman"}
the commutative encryption scheme used
non_committing_encryption : {"aes"}
the non-committing encryption scheme used
dh_params
params for Pohlig-Hellman Encryption
key_size: int, value >= 1024
the key length of the commutative cipher;
note that this param will be deprecated in future, please specify key_length in PHParam instead.
raw_retrieval: bool
perform raw retrieval if raw_retrieval
target_cols: str or list of str
target cols to retrieve;
any values not retrieved will be marked as "unretrieved",
if target_cols is None, label will be retrieved, same behavior as in previous version
default None
"""
def __init__(self, security_level=0.5,
oblivious_transfer_protocol=consts.OT_HAUCK,
commutative_encryption=consts.CE_PH,
non_committing_encryption=consts.AES,
key_size=consts.DEFAULT_KEY_LENGTH,
dh_params=DHParam(),
raw_retrieval=False,
target_cols=None):
super(SecureInformationRetrievalParam, self).__init__()
self.security_level = security_level
self.oblivious_transfer_protocol = oblivious_transfer_protocol
self.commutative_encryption = commutative_encryption
self.non_committing_encryption = non_committing_encryption
self.dh_params = dh_params
self.key_size = key_size
self.raw_retrieval = raw_retrieval
self.target_cols = [] if target_cols is None else target_cols
def check(self):
descr = "secure information retrieval param's "
self.check_decimal_float(self.security_level, descr + "security_level")
self.oblivious_transfer_protocol = self.check_and_change_lower(self.oblivious_transfer_protocol,
[consts.OT_HAUCK.lower()],
descr + "oblivious_transfer_protocol")
self.commutative_encryption = self.check_and_change_lower(self.commutative_encryption,
[consts.CE_PH.lower()],
descr + "commutative_encryption")
self.non_committing_encryption = self.check_and_change_lower(self.non_committing_encryption,
[consts.AES.lower()],
descr + "non_committing_encryption")
self.dh_params.check()
if self.key_size:
self.check_positive_integer(self.key_size, descr + "key_size")
if self.key_size < 1024:
raise ValueError(f"key size must be >= 1024")
self.check_boolean(self.raw_retrieval, descr)
if not isinstance(self.target_cols, list):
self.target_cols = [self.target_cols]
for col in self.target_cols:
self.check_string(col, descr + "target_cols")
| 4,318 | 42.19 | 109 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/homo_onehot_encoder_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# added by jsweng
# param class for OHE with alignment
#
from pipeline.param.base_param import BaseParam
from pipeline.param import consts
class HomoOneHotParam(BaseParam):
"""
Parameters
----------
transform_col_indexes: list or int, default: -1
Specify which columns need to calculated. -1 represent for all columns.
need_run: bool, default True
Indicate if this module needed to be run
need_alignment: bool, default True
Indicated whether alignment of features is turned on
"""
def __init__(self, transform_col_indexes=-1, transform_col_names=None, need_run=True, need_alignment=True):
super(HomoOneHotParam, self).__init__()
if transform_col_names is None:
transform_col_names = []
self.transform_col_indexes = transform_col_indexes
self.transform_col_names = transform_col_names
self.need_run = need_run
self.need_alignment = need_alignment
def check(self):
descr = "One-hot encoder with alignment param's"
self.check_defined_type(self.transform_col_indexes, descr, ['list', 'int'])
self.check_boolean(self.need_run, descr)
self.check_boolean(self.need_alignment, descr)
return True
| 1,307 | 28.066667 | 111 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/scale_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
from pipeline.param import consts
class ScaleParam(BaseParam):
"""
Define the feature scale parameters.
Parameters
----------
method : {"standard_scale", "min_max_scale"}
like scale in sklearn, now it support "min_max_scale" and "standard_scale", and will support other scale method soon.
Default standard_scale, which will do nothing for scale
mode : {"normal", "cap"}
for mode is "normal", the feat_upper and feat_lower is the normal value like "10" or "3.1"
and for "cap", feat_upper and feature_lower will between 0 and 1, which means the percentile of the column. Default "normal"
feat_upper : int or float or list of int or float
the upper limit in the column.
If use list, mode must be "normal", and list length should equal to the number of features to scale.
If the scaled value is larger than feat_upper, it will be set to feat_upper
feat_lower: int or float or list of int or float
the lower limit in the column.
If use list, mode must be "normal", and list length should equal to the number of features to scale.
If the scaled value is less than feat_lower, it will be set to feat_lower
scale_col_indexes: list
the idx of column in scale_column_idx will be scaled, while the idx of column is not in, it will not be scaled.
scale_names : list of string
Specify which columns need to scaled. Each element in the list represent for a column name in header. default: []
with_mean : bool
used for "standard_scale". Default True.
with_std : bool
used for "standard_scale". Default True.
The standard scale of column x is calculated as : $z = (x - u) / s$ , where $u$ is the mean of the column and $s$ is the standard deviation of the column.
if with_mean is False, $u$ will be 0, and if with_std is False, $s$ will be 1.
need_run : bool
Indicate if this module needed to be run, default True
"""
def __init__(self, method="standard_scale", mode="normal", scale_col_indexes=-1, scale_names=None, feat_upper=None,
feat_lower=None, with_mean=True, with_std=True, need_run=True):
super().__init__()
self.scale_names = [] if scale_names is None else scale_names
self.method = method
self.mode = mode
self.feat_upper = feat_upper
# LOGGER.debug("self.feat_upper:{}, type:{}".format(self.feat_upper, type(self.feat_upper)))
self.feat_lower = feat_lower
self.scale_col_indexes = scale_col_indexes
self.scale_names = scale_names
self.with_mean = with_mean
self.with_std = with_std
self.need_run = need_run
def check(self):
if self.method is not None:
descr = "scale param's method"
self.method = self.check_and_change_lower(self.method,
[consts.MINMAXSCALE, consts.STANDARDSCALE],
descr)
descr = "scale param's mode"
self.mode = self.check_and_change_lower(self.mode,
[consts.NORMAL, consts.CAP],
descr)
# LOGGER.debug("self.feat_upper:{}, type:{}".format(self.feat_upper, type(self.feat_upper)))
# if type(self.feat_upper).__name__ not in ["float", "int"]:
# raise ValueError("scale param's feat_upper {} not supported, should be float or int".format(
# self.feat_upper))
if self.scale_col_indexes != -1 and not isinstance(self.scale_col_indexes, list):
raise ValueError("scale_col_indexes is should be -1 or a list")
if self.scale_names is None:
self.scale_names = []
if not isinstance(self.scale_names, list):
raise ValueError("scale_names is should be a list of string")
else:
for e in self.scale_names:
if not isinstance(e, str):
raise ValueError("scale_names is should be a list of string")
self.check_boolean(self.with_mean, "scale_param with_mean")
self.check_boolean(self.with_std, "scale_param with_std")
self.check_boolean(self.need_run, "scale_param need_run")
return True
| 5,074 | 41.647059 | 162 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/cache_loader_param.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class CacheLoaderParam:
def __init__(self, cache_key=None, job_id=None, component_name=None, cache_name=None):
super().__init__()
self.cache_key = cache_key
self.job_id = job_id
self.component_name = component_name
self.cache_name = cache_name
def check(self):
return True
| 948 | 32.892857 | 90 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/encrypted_mode_calculation_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
class EncryptedModeCalculatorParam(BaseParam):
"""
Define the encrypted_mode_calulator parameters.
Parameters
----------
mode: {'strict', 'fast', 'balance', 'confusion_opt'}
encrypted mode, default: strict
re_encrypted_rate: float or int
numeric number in [0, 1], use when mode equals to 'balance', default: 1
"""
def __init__(self, mode="strict", re_encrypted_rate=1):
self.mode = mode
self.re_encrypted_rate = re_encrypted_rate
def check(self):
descr = "encrypted_mode_calculator param"
self.mode = self.check_and_change_lower(self.mode,
["strict", "fast", "balance", "confusion_opt", "confusion_opt_balance"],
descr)
if self.mode in ["balance", "confusion_opt_balance"]:
if type(self.re_encrypted_rate).__name__ not in ["int", "long", "float"]:
raise ValueError("re_encrypted_rate should be a numeric number")
if not 0.0 <= self.re_encrypted_rate <= 1:
raise ValueError("re_encrypted_rate should in [0, 1]")
return True
| 1,896 | 34.792453 | 120 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/feldman_verifiable_sum_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
class FeldmanVerifiableSumParam(BaseParam):
"""
Define how to transfer the cols
Parameters
----------
sum_cols : list of column index, default: None
Specify which columns need to be sum. If column index is None, each of columns will be sum.
q_n : int, positive integer less than or equal to 16, default: 6
q_n is the number of significant decimal digit, If the data type is a float,
the maximum significant digit is 16. The sum of integer and significant decimal digits should
be less than or equal to 16.
"""
def __init__(self, sum_cols=None, q_n=6):
self.sum_cols = sum_cols
if sum_cols is None:
self.sum_cols = []
self.q_n = q_n
def check(self):
if isinstance(self.sum_cols, list):
for idx in self.sum_cols:
if not isinstance(idx, int):
raise ValueError(f"type mismatch, column_indexes with element {idx}(type is {type(idx)})")
if not isinstance(self.q_n, int):
raise ValueError(f"Init param's q_n {self.q_n} not supported, should be int type", type is {type(self.q_n)})
if self.q_n < 0:
raise ValueError(f"param's q_n {self.q_n} not supported, should be non-negative int value")
elif self.q_n > 16:
raise ValueError(f"param's q_n {self.q_n} not supported, should be less than or equal to 16")
| 2,173 | 37.140351 | 120 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/sample_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
import collections
class SampleParam(BaseParam):
"""
Define the sample method
Parameters
----------
mode: str, accepted 'random','stratified', 'exact_by_weight', specify sample to use, default: 'random'
method: str, accepted 'downsample','upsample' only in this version. default: 'downsample'
fractions: None or float or list, if mode equals to random, it should be a float number greater than 0,
otherwise a list of elements of pairs like [label_i, sample_rate_i],
e.g. [[0, 0.5], [1, 0.8], [2, 0.3]]. default: None
random_state: int, RandomState instance or None, default: None
need_run: bool, default True
Indicate if this module needed to be run
"""
def __init__(self, mode="random", method="downsample", fractions=None,
random_state=None, task_type="hetero", need_run=True):
self.mode = mode
self.method = method
self.fractions = fractions
self.random_state = random_state
self.task_type = task_type
self.need_run = need_run
def check(self):
descr = "sample param"
self.mode = self.check_and_change_lower(self.mode,
["random", "stratified", "exact_by_weight"],
descr)
self.method = self.check_and_change_lower(self.method,
["upsample", "downsample"],
descr)
if self.mode == "stratified" and self.fractions is not None:
if not isinstance(self.fractions, list):
raise ValueError("fractions of sample param when using stratified should be list")
for ele in self.fractions:
if not isinstance(ele, collections.Container) or len(ele) != 2:
raise ValueError(
"element in fractions of sample param using stratified should be a pair like [label_i, rate_i]")
return True
| 2,756 | 37.830986 | 120 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/consts.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
ARBITER = 'arbiter'
HOST = 'host'
GUEST = 'guest'
MODEL_AGG = "model_agg"
GRAD_AGG = "grad_agg"
BINARY = 'binary'
MULTY = 'multi'
CLASSIFICATION = "classification"
REGRESSION = 'regression'
CLUSTERING = 'clustering'
ONE_VS_REST = 'one_vs_rest'
PAILLIER = 'Paillier'
RANDOM_PADS = "RandomPads"
NONE = "None"
AFFINE = 'Affine'
ITERATIVEAFFINE = 'IterativeAffine'
RANDOM_ITERATIVEAFFINE = 'RandomIterativeAffine'
L1_PENALTY = 'L1'
L2_PENALTY = 'L2'
FLOAT_ZERO = 1e-8
OVERFLOW_THRESHOLD = 1e8
OT_HAUCK = 'OT_Hauck'
CE_PH = 'CommutativeEncryptionPohligHellman'
XOR = 'xor'
AES = 'aes'
PARAM_MAXDEPTH = 5
MAX_CLASSNUM = 1000
MIN_BATCH_SIZE = 10
SPARSE_VECTOR = "SparseVector"
HETERO = "hetero"
HOMO = "homo"
RAW = "raw"
RSA = "rsa"
DH = "dh"
ECDH = "ecdh"
# evaluation
AUC = "auc"
KS = "ks"
LIFT = "lift"
GAIN = "gain"
PRECISION = "precision"
RECALL = "recall"
ACCURACY = "accuracy"
EXPLAINED_VARIANCE = "explained_variance"
MEAN_ABSOLUTE_ERROR = "mean_absolute_error"
MEAN_SQUARED_ERROR = "mean_squared_error"
MEAN_SQUARED_LOG_ERROR = "mean_squared_log_error"
MEDIAN_ABSOLUTE_ERROR = "median_absolute_error"
R2_SCORE = "r2_score"
ROOT_MEAN_SQUARED_ERROR = "root_mean_squared_error"
ROC = "roc"
F1_SCORE = 'f1_score'
CONFUSION_MAT = 'confusion_mat'
PSI = 'psi'
VIF = 'vif'
PEARSON = 'pearson'
FEATURE_IMPORTANCE = 'feature_importance'
QUANTILE_PR = 'quantile_pr'
JACCARD_SIMILARITY_SCORE = 'jaccard_similarity_score'
FOWLKES_MALLOWS_SCORE = 'fowlkes_mallows_score'
ADJUSTED_RAND_SCORE = 'adjusted_rand_score'
DAVIES_BOULDIN_INDEX = 'davies_bouldin_index'
DISTANCE_MEASURE = 'distance_measure'
CONTINGENCY_MATRIX = 'contingency_matrix'
# evaluation alias metric
ALL_METRIC_NAME = [AUC, KS, LIFT, GAIN, PRECISION, RECALL, ACCURACY, EXPLAINED_VARIANCE, MEAN_ABSOLUTE_ERROR,
MEAN_SQUARED_ERROR, MEAN_SQUARED_LOG_ERROR, MEDIAN_ABSOLUTE_ERROR, R2_SCORE, ROOT_MEAN_SQUARED_ERROR,
ROC, F1_SCORE, CONFUSION_MAT, PSI, QUANTILE_PR, JACCARD_SIMILARITY_SCORE, FOWLKES_MALLOWS_SCORE,
ADJUSTED_RAND_SCORE, DAVIES_BOULDIN_INDEX, DISTANCE_MEASURE, CONTINGENCY_MATRIX]
ALIAS = {
('l1', 'mae', 'regression_l1'): MEAN_ABSOLUTE_ERROR,
('l2', 'mse', 'regression_l2', 'regression'): MEAN_SQUARED_ERROR,
('l2_root', 'rmse'): ROOT_MEAN_SQUARED_ERROR,
('msle', ): MEAN_SQUARED_LOG_ERROR,
('r2', ): R2_SCORE,
('acc', ): ACCURACY,
('DBI', ): DAVIES_BOULDIN_INDEX,
('FMI', ): FOWLKES_MALLOWS_SCORE,
('RI', ): ADJUSTED_RAND_SCORE,
('jaccard', ): JACCARD_SIMILARITY_SCORE
}
# default evaluation metrics
DEFAULT_BINARY_METRIC = [AUC, KS]
DEFAULT_REGRESSION_METRIC = [ROOT_MEAN_SQUARED_ERROR, MEAN_ABSOLUTE_ERROR]
DEFAULT_MULTI_METRIC = [ACCURACY, PRECISION, RECALL]
DEFAULT_CLUSTER_METRIC = [DAVIES_BOULDIN_INDEX]
# allowed metrics for different tasks
ALL_BINARY_METRICS = [
AUC,
KS,
LIFT,
GAIN,
ACCURACY,
PRECISION,
RECALL,
ROC,
CONFUSION_MAT,
PSI,
F1_SCORE,
QUANTILE_PR
]
ALL_REGRESSION_METRICS = [
EXPLAINED_VARIANCE,
MEAN_ABSOLUTE_ERROR,
MEAN_SQUARED_ERROR,
MEDIAN_ABSOLUTE_ERROR,
R2_SCORE,
ROOT_MEAN_SQUARED_ERROR
]
ALL_MULTI_METRICS = [
ACCURACY,
PRECISION,
RECALL
]
ALL_CLUSTER_METRICS = [
JACCARD_SIMILARITY_SCORE,
FOWLKES_MALLOWS_SCORE,
ADJUSTED_RAND_SCORE,
DAVIES_BOULDIN_INDEX,
DISTANCE_MEASURE,
CONTINGENCY_MATRIX
]
# single value metrics
REGRESSION_SINGLE_VALUE_METRICS = [
EXPLAINED_VARIANCE,
MEAN_ABSOLUTE_ERROR,
MEAN_SQUARED_ERROR,
MEAN_SQUARED_LOG_ERROR,
MEDIAN_ABSOLUTE_ERROR,
R2_SCORE,
ROOT_MEAN_SQUARED_ERROR,
]
BINARY_SINGLE_VALUE_METRIC = [
AUC,
KS
]
MULTI_SINGLE_VALUE_METRIC = [
PRECISION,
RECALL,
ACCURACY
]
CLUSTER_SINGLE_VALUE_METRIC = [
JACCARD_SIMILARITY_SCORE,
FOWLKES_MALLOWS_SCORE,
ADJUSTED_RAND_SCORE,
DAVIES_BOULDIN_INDEX
]
# workflow
TRAIN_DATA = "train_data"
TEST_DATA = "test_data"
# initialize method
RANDOM_NORMAL = "random_normal"
RANDOM_UNIFORM = 'random_uniform'
ONES = 'ones'
ZEROS = 'zeros'
CONST = 'const'
# decision tree
MAX_SPLIT_NODES = 2 ** 16
MAX_SPLITINFO_TO_COMPUTE = 2 ** 10
NORMAL_TREE = 'normal'
COMPLETE_SECURE_TREE = 'complete_secure'
STD_TREE = 'std'
MIX_TREE = 'mix'
LAYERED_TREE = 'layered'
SINGLE_OUTPUT = 'single_output'
MULTI_OUTPUT = 'multi_output'
TRAIN_EVALUATE = 'train_evaluate'
VALIDATE_EVALUATE = 'validate_evaluate'
# Feature engineering
G_BIN_NUM = 10
DEFAULT_COMPRESS_THRESHOLD = 10000
DEFAULT_HEAD_SIZE = 10000
DEFAULT_RELATIVE_ERROR = 1e-4
ONE_HOT_LIMIT = 1024 # No more than 10 possible values
PERCENTAGE_VALUE_LIMIT = 0.1
SECURE_AGG_AMPLIFY_FACTOR = 1000
QUANTILE = 'quantile'
BUCKET = 'bucket'
OPTIMAL = 'optimal'
VIRTUAL_SUMMARY = 'virtual_summary'
RECURSIVE_QUERY = 'recursive_query'
# Feature selection methods
UNIQUE_VALUE = 'unique_value'
IV_VALUE_THRES = 'iv_value_thres'
IV_PERCENTILE = 'iv_percentile'
IV_TOP_K = 'iv_top_k'
COEFFICIENT_OF_VARIATION_VALUE_THRES = 'coefficient_of_variation_value_thres'
# COEFFICIENT_OF_VARIATION_PERCENTILE = 'coefficient_of_variation_percentile'
OUTLIER_COLS = 'outlier_cols'
MANUALLY_FILTER = 'manually'
PERCENTAGE_VALUE = 'percentage_value'
IV_FILTER = 'iv_filter'
STATISTIC_FILTER = 'statistic_filter'
PSI_FILTER = 'psi_filter'
VIF_FILTER = 'vif_filter'
CORRELATION_FILTER = 'correlation_filter'
SECUREBOOST = 'sbt'
HETERO_SBT_FILTER = 'hetero_sbt_filter'
HOMO_SBT_FILTER = 'homo_sbt_filter'
HETERO_FAST_SBT_FILTER = 'hetero_fast_sbt_filter'
IV = 'iv'
# Selection Pre-model
STATISTIC_MODEL = 'statistic_model'
BINNING_MODEL = 'binning_model'
# imputer
MIN = 'min'
MAX = 'max'
MEAN = 'mean'
DESIGNATED = 'designated'
STR = 'str'
FLOAT = 'float'
INT = 'int'
ORIGIN = 'origin'
MEDIAN = 'median'
# min_max_scaler
NORMAL = 'normal'
CAP = 'cap'
MINMAXSCALE = 'min_max_scale'
STANDARDSCALE = 'standard_scale'
ALL = 'all'
COL = 'col'
# intersection cache
PHONE = 'phone'
IMEI = 'imei'
MD5 = 'md5'
SHA1 = 'sha1'
SHA224 = 'sha224'
SHA256 = 'sha256'
SHA384 = 'sha384'
SHA512 = 'sha512'
SM3 = 'sm3'
INTERSECT_CACHE_TAG = 'Za'
SHARE_INFO_COL_NAME = "share_info"
# statistics
COUNT = 'count'
STANDARD_DEVIATION = 'stddev'
SUMMARY = 'summary'
DESCRIBE = 'describe'
SUM = 'sum'
COVARIANCE = 'cov'
CORRELATION = 'corr'
VARIANCE = 'variance'
COEFFICIENT_OF_VARIATION = 'coefficient_of_variance'
MISSING_COUNT = "missing_count"
MISSING_RATIO = "missing_ratio"
SKEWNESS = 'skewness'
KURTOSIS = 'kurtosis'
# adapters model name
HOMO_SBT = 'homo_sbt'
HETERO_SBT = 'hetero_sbt'
HETERO_FAST_SBT = 'hetero_fast_sbt'
HETERO_FAST_SBT_MIX = 'hetero_fast_sbt_mix'
HETERO_FAST_SBT_LAYERED = 'hetero_fast_sbt_layered'
# tree protobuf model name
HETERO_SBT_GUEST_MODEL = 'HeteroSecureBoostingTreeGuest'
HETERO_SBT_HOST_MODEL = 'HeteroSecureBoostingTreeHost'
HETERO_FAST_SBT_GUEST_MODEL = "HeteroFastSecureBoostingTreeGuest"
HETERO_FAST_SBT_HOST_MODEL = "HeteroFastSecureBoostingTreeHost"
HOMO_SBT_GUEST_MODEL = "HomoSecureBoostingTreeGuest"
HOMO_SBT_HOST_MODEL = "HomoSecureBoostingTreeHost"
# tree decimal round to prevent float error
TREE_DECIMAL_ROUND = 10
# homm sbt backend
MEMORY_BACKEND = 'memory'
DISTRIBUTED_BACKEND = 'distributed'
# column_expand
MANUAL = 'manual'
# scorecard
CREDIT = 'credit'
# sample weight
BALANCED = 'balanced'
# min r base fraction
MIN_BASE_FRACTION = 0.01
MAX_BASE_FRACTION = 0.99
MAX_SAMPLE_OUTPUT_LIMIT = 10 ** 6
# Hetero NN Selective BP Strategy
SELECTIVE_SIZE = 1024
# intersect join methods
INNER_JOIN = "inner_join"
LEFT_JOIN = "left_join"
DEFAULT_KEY_LENGTH = 1024
MIN_HASH_FUNC_COUNT = 4
MAX_HASH_FUNC_COUNT = 32
# curve names
CURVE25519 = 'curve25519'
# positive unlabeled
PROBABILITY = "probability"
QUANTITY = "quantity"
PROPORTION = "proportion"
DISTRIBUTION = "distribution"
| 8,409 | 22.824363 | 120 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/base_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import builtins
import json
import os
from pipeline.param import consts
class BaseParam(object):
def __init__(self):
pass
def check(self):
raise NotImplementedError("Parameter Object should be checked.")
def validate(self):
self.builtin_types = dir(builtins)
self.func = {"ge": self._greater_equal_than,
"le": self._less_equal_than,
"in": self._in,
"not_in": self._not_in,
"range": self._range
}
home_dir = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
param_validation_path_prefix = home_dir + "/param_validation/"
param_name = type(self).__name__
param_validation_path = "/".join([param_validation_path_prefix, param_name + ".json"])
validation_json = None
print("param validation path is {}".format(home_dir))
try:
with open(param_validation_path, "r") as fin:
validation_json = json.loads(fin.read())
except BaseException:
return
self._validate_param(self, validation_json)
def _validate_param(self, param_obj, validation_json):
default_section = type(param_obj).__name__
var_list = param_obj.__dict__
for variable in var_list:
attr = getattr(param_obj, variable)
if type(attr).__name__ in self.builtin_types or attr is None:
if variable not in validation_json:
continue
validation_dict = validation_json[default_section][variable]
value = getattr(param_obj, variable)
value_legal = False
for op_type in validation_dict:
if self.func[op_type](value, validation_dict[op_type]):
value_legal = True
break
if not value_legal:
raise ValueError(
"Plase check runtime conf, {} = {} does not match user-parameter restriction".format(
variable, value))
elif variable in validation_json:
self._validate_param(attr, validation_json)
@staticmethod
def check_string(param, descr):
if type(param).__name__ not in ["str"]:
raise ValueError(descr + " {} not supported, should be string type".format(param))
@staticmethod
def check_positive_integer(param, descr):
if type(param).__name__ not in ["int", "long"] or param <= 0:
raise ValueError(descr + " {} not supported, should be positive integer".format(param))
@staticmethod
def check_positive_number(param, descr):
if type(param).__name__ not in ["float", "int", "long"] or param <= 0:
raise ValueError(descr + " {} not supported, should be positive numeric".format(param))
@staticmethod
def check_nonnegative_number(param, descr):
if type(param).__name__ not in ["float", "int", "long"] or param < 0:
raise ValueError(descr + " {} not supported, should be non-negative numeric".format(param))
@staticmethod
def check_decimal_float(param, descr):
if type(param).__name__ not in ["float"] or param < 0 or param > 1:
raise ValueError(descr + " {} not supported, should be a float number in range [0, 1]".format(param))
@staticmethod
def check_boolean(param, descr):
if type(param).__name__ != "bool":
raise ValueError(descr + " {} not supported, should be bool type".format(param))
@staticmethod
def check_open_unit_interval(param, descr):
if type(param).__name__ not in ["float"] or param <= 0 or param >= 1:
raise ValueError(descr + " should be a numeric number between 0 and 1 exclusively")
@staticmethod
def check_valid_value(param, descr, valid_values):
if param not in valid_values:
raise ValueError(descr + " {} is not supported, it should be in {}".format(param, valid_values))
@staticmethod
def check_defined_type(param, descr, types):
if type(param).__name__ not in types:
raise ValueError(descr + " {} not supported, should be one of {}".format(param, types))
@staticmethod
def check_and_change_lower(param, valid_list, descr=''):
if type(param).__name__ != 'str':
raise ValueError(descr + " {} not supported, should be one of {}".format(param, valid_list))
lower_param = param.lower()
if lower_param in valid_list:
return lower_param
else:
raise ValueError(descr + " {} not supported, should be one of {}".format(param, valid_list))
@staticmethod
def _greater_equal_than(value, limit):
return value >= limit - consts.FLOAT_ZERO
@staticmethod
def _less_equal_than(value, limit):
return value <= limit + consts.FLOAT_ZERO
@staticmethod
def _range(value, ranges):
in_range = False
for left_limit, right_limit in ranges:
if left_limit - consts.FLOAT_ZERO <= value <= right_limit + consts.FLOAT_ZERO:
in_range = True
break
return in_range
@staticmethod
def _in(value, right_value_list):
return value in right_value_list
@staticmethod
def _not_in(value, wrong_value_list):
return value not in wrong_value_list
| 6,130 | 35.933735 | 113 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/column_expand_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
from pipeline.param import consts
class ColumnExpandParam(BaseParam):
"""
Define method used for expanding column
Parameters
----------
append_header : None or str or List[str], default: None
Name(s) for appended feature(s). If None is given, module outputs the original input value without any operation.
method : str, default: 'manual'
If method is 'manual', use user-specified `fill_value` to fill in new features.
fill_value : int or float or str or List[int] or List[float] or List[str], default: 1e-8
Used for filling expanded feature columns. If given a list, length of the list must match that of `append_header`
need_run: bool, default: True
Indicate if this module needed to be run.
"""
def __init__(self, append_header=None, method="manual",
fill_value=consts.FLOAT_ZERO, need_run=True):
super(ColumnExpandParam, self).__init__()
self.append_header = [] if append_header is None else append_header
self.method = method
self.fill_value = fill_value
self.need_run = need_run
def check(self):
descr = "column_expand param's "
if not isinstance(self.method, str):
raise ValueError(f"{descr}method {self.method} not supported, should be str type")
else:
user_input = self.method.lower()
if user_input == "manual":
self.method = consts.MANUAL
else:
raise ValueError(f"{descr} method {user_input} not supported")
BaseParam.check_boolean(self.need_run, descr=descr)
if not isinstance(self.append_header, list):
raise ValueError(f"{descr} append_header must be None or list of str. "
f"Received {type(self.append_header)} instead.")
for feature_name in self.append_header:
BaseParam.check_string(feature_name, descr + "append_header values")
if isinstance(self.fill_value, list):
if len(self.append_header) != len(self.fill_value):
raise ValueError(
f"{descr} `fill value` is set to be list, "
f"and param `append_header` must also be list of the same length.")
else:
self.fill_value = [self.fill_value]
for value in self.fill_value:
if type(value).__name__ not in ["float", "int", "long", "str"]:
raise ValueError(
f"{descr} fill value(s) must be float, int, or str. Received type {type(value)} instead.")
return True
| 3,321 | 38.082353 | 121 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/local_baseline_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import copy
from pipeline.param.base_param import BaseParam
from pipeline.param.predict_param import PredictParam
class LocalBaselineParam(BaseParam):
"""
Define the local baseline model param
Parameters
----------
model_name : str
sklearn model used to train on baseline model
model_opts : dict or none, default None
Param to be used as input into baseline model
predict_param : PredictParam object, default: default PredictParam object
predict param
need_run: bool, default True
Indicate if this module needed to be run
"""
def __init__(self, model_name="LogisticRegression", model_opts=None, predict_param=PredictParam(), need_run=True):
super(LocalBaselineParam, self).__init__()
self.model_name = model_name
self.model_opts = model_opts
self.predict_param = copy.deepcopy(predict_param)
self.need_run = need_run
def check(self):
descr = "local baseline param"
self.model_name = self.check_and_change_lower(self.model_name,
["logisticregression"],
descr)
self.check_boolean(self.need_run, descr)
if self.model_opts is not None:
if not isinstance(self.model_opts, dict):
raise ValueError(descr + " model_opts must be None or dict.")
if self.model_opts is None:
self.model_opts = {}
self.predict_param.check()
return True
| 2,219 | 32.134328 | 118 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/evaluation_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param import consts
from pipeline.param.base_param import BaseParam
class EvaluateParam(BaseParam):
"""
Define the evaluation method of binary/multiple classification and regression
Parameters
----------
eval_type: string, support 'binary' for HomoLR, HeteroLR and Secureboosting. support 'regression' for Secureboosting. 'multi' is not support these version
unfold_multi_result: bool, unfold multi result and get several one-vs-rest binary classification results
pos_label: specify positive label type, can be int, float and str, this depend on the data's label, this parameter effective only for 'binary'
need_run: bool, default True
Indicate if this module needed to be run
"""
def __init__(self, eval_type="binary", pos_label=1, need_run=True, metrics=None,
run_clustering_arbiter_metric=False, unfold_multi_result=False):
super().__init__()
self.eval_type = eval_type
self.pos_label = pos_label
self.need_run = need_run
self.metrics = metrics
self.unfold_multi_result = unfold_multi_result
self.run_clustering_arbiter_metric = run_clustering_arbiter_metric
self.default_metrics = {
consts.BINARY: consts.ALL_BINARY_METRICS,
consts.MULTY: consts.ALL_MULTI_METRICS,
consts.REGRESSION: consts.ALL_REGRESSION_METRICS,
consts.CLUSTERING: consts.ALL_CLUSTER_METRICS
}
self.allowed_metrics = {
consts.BINARY: consts.ALL_BINARY_METRICS,
consts.MULTY: consts.ALL_MULTI_METRICS,
consts.REGRESSION: consts.ALL_REGRESSION_METRICS,
consts.CLUSTERING: consts.ALL_CLUSTER_METRICS
}
def _use_single_value_default_metrics(self):
self.default_metrics = {
consts.BINARY: consts.DEFAULT_BINARY_METRIC,
consts.MULTY: consts.DEFAULT_MULTI_METRIC,
consts.REGRESSION: consts.DEFAULT_REGRESSION_METRIC,
consts.CLUSTERING: consts.DEFAULT_CLUSTER_METRIC
}
def _check_valid_metric(self, metrics_list):
metric_list = consts.ALL_METRIC_NAME
alias_name: dict = consts.ALIAS
full_name_list = []
metrics_list = [str.lower(i) for i in metrics_list]
for metric in metrics_list:
if metric in metric_list:
if metric not in full_name_list:
full_name_list.append(metric)
continue
valid_flag = False
for alias, full_name in alias_name.items():
if metric in alias:
if full_name not in full_name_list:
full_name_list.append(full_name)
valid_flag = True
break
if not valid_flag:
raise ValueError('metric {} is not supported'.format(metric))
allowed_metrics = self.allowed_metrics[self.eval_type]
for m in full_name_list:
if m not in allowed_metrics:
raise ValueError('metric {} is not used for {} task'.format(m, self.eval_type))
if consts.RECALL in full_name_list and consts.PRECISION not in full_name_list:
full_name_list.append(consts.PRECISION)
if consts.RECALL not in full_name_list and consts.PRECISION in full_name_list:
full_name_list.append(consts.RECALL)
return full_name_list
def check(self):
descr = "evaluate param's "
self.eval_type = self.check_and_change_lower(self.eval_type,
[consts.BINARY, consts.MULTY, consts.REGRESSION,
consts.CLUSTERING],
descr)
if type(self.pos_label).__name__ not in ["str", "float", "int"]:
raise ValueError(
"evaluate param's pos_label {} not supported, should be str or float or int type".format(
self.pos_label))
if type(self.need_run).__name__ != "bool":
raise ValueError(
"evaluate param's need_run {} not supported, should be bool".format(
self.need_run))
if self.metrics is None or len(self.metrics) == 0:
self.metrics = self.default_metrics[self.eval_type]
LOGGER.warning('use default metric {} for eval type {}'.format(self.metrics, self.eval_type))
self.check_boolean(self.unfold_multi_result, 'multi_result_unfold')
self.metrics = self._check_valid_metric(self.metrics)
LOGGER.info("Finish evaluation parameter check!")
return True
def check_single_value_default_metric(self):
self._use_single_value_default_metrics()
# in validation strategy, psi f1-score and confusion-mat pr-quantile are not supported in cur version
if self.metrics is None or len(self.metrics) == 0:
self.metrics = self.default_metrics[self.eval_type]
LOGGER.warning('use default metric {} for eval type {}'.format(self.metrics, self.eval_type))
ban_metric = [consts.PSI, consts.F1_SCORE, consts.CONFUSION_MAT, consts.QUANTILE_PR]
for metric in self.metrics:
if metric in ban_metric:
self.metrics.remove(metric)
self.check()
| 6,055 | 37.573248 | 158 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/hetero_sshe_lr_param.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
from pipeline.param.logistic_regression_param import LogisticParam
from pipeline.param.cross_validation_param import CrossValidationParam
from pipeline.param.callback_param import CallbackParam
from pipeline.param.encrypt_param import EncryptParam
from pipeline.param.encrypted_mode_calculation_param import EncryptedModeCalculatorParam
from pipeline.param.init_model_param import InitParam
from pipeline.param.predict_param import PredictParam
from pipeline.param import consts
class HeteroSSHELRParam(LogisticParam):
"""
Parameters used for Hetero SSHE Logistic Regression
Parameters
----------
penalty : str, 'L1', 'L2' or None. default: 'L2'
Penalty method used in LR. If it is not None, weights are required to be reconstruct every iter.
tol : float, default: 1e-4
The tolerance of convergence
alpha : float, default: 1.0
Regularization strength coefficient.
optimizer : str, 'sgd', 'rmsprop', 'adam', 'nesterov_momentum_sgd', or 'adagrad', default: 'sgd'
Optimizer
batch_size : int, default: -1
Batch size when updating model. -1 means use all data in a batch. i.e. Not to use mini-batch strategy.
learning_rate : float, default: 0.01
Learning rate
max_iter : int, default: 100
The maximum iteration for training.
early_stop : str, 'diff', 'weight_diff' or 'abs', default: 'diff'
Method used to judge converge or not.
a) diff: Use difference of loss between two iterations to judge whether converge.
b) weight_diff: Use difference between weights of two consecutive iterations
c) abs: Use the absolute value of loss to judge whether converge. i.e. if loss < eps, it is converged.
decay: int or float, default: 1
Decay rate for learning rate. learning rate will follow the following decay schedule.
lr = lr0/(1+decay*t) if decay_sqrt is False. If decay_sqrt is True, lr = lr0 / sqrt(1+decay*t)
where t is the iter number.
decay_sqrt: Bool, default: True
lr = lr0/(1+decay*t) if decay_sqrt is False, otherwise, lr = lr0 / sqrt(1+decay*t)
encrypt_param: EncryptParam object, default: default EncryptParam object
encrypt param
predict_param: PredictParam object, default: default PredictParam object
predict param
cv_param: CrossValidationParam object, default: default CrossValidationParam object
cv param
multi_class: str, 'ovr', default: 'ovr'
If it is a multi_class task, indicate what strategy to use. Currently, support 'ovr' short for one_vs_rest only.
reveal_strategy: str, "respectively", "encrypted_reveal_in_host", default: "respectively"
"respectively": Means guest and host can reveal their own part of weights only.
"encrypted_reveal_in_host": Means host can be revealed his weights in encrypted mode, and guest can be revealed in normal mode.
reveal_every_iter: bool, default: False
Whether reconstruct model weights every iteration. If so, Regularization is available.
The performance will be better as well since the algorithm process is simplified.
"""
def __init__(self, penalty='L2',
tol=1e-4, alpha=1.0, optimizer='sgd',
batch_size=-1, learning_rate=0.01, init_param=InitParam(),
max_iter=100, early_stop='diff', encrypt_param=EncryptParam(),
predict_param=PredictParam(), cv_param=CrossValidationParam(),
decay=1, decay_sqrt=True,
multi_class='ovr', use_mix_rand=True,
reveal_strategy="respectively",
reveal_every_iter=False,
callback_param=CallbackParam(),
encrypted_mode_calculator_param=EncryptedModeCalculatorParam()
):
super(HeteroSSHELRParam, self).__init__(penalty=penalty, tol=tol, alpha=alpha, optimizer=optimizer,
batch_size=batch_size,
learning_rate=learning_rate,
init_param=init_param, max_iter=max_iter, early_stop=early_stop,
predict_param=predict_param, cv_param=cv_param,
decay=decay,
decay_sqrt=decay_sqrt, multi_class=multi_class,
encrypt_param=encrypt_param, callback_param=callback_param)
self.use_mix_rand = use_mix_rand
self.reveal_strategy = reveal_strategy
self.reveal_every_iter = reveal_every_iter
self.encrypted_mode_calculator_param = copy.deepcopy(encrypted_mode_calculator_param)
def check(self):
descr = "logistic_param's"
super(HeteroSSHELRParam, self).check()
self.check_boolean(self.reveal_every_iter, descr)
if self.penalty is None:
pass
elif type(self.penalty).__name__ != "str":
raise ValueError(
"logistic_param's penalty {} not supported, should be str type".format(self.penalty))
else:
self.penalty = self.penalty.upper()
"""
if self.penalty not in [consts.L1_PENALTY, consts.L2_PENALTY]:
raise ValueError(
"logistic_param's penalty not supported, penalty should be 'L1', 'L2' or 'none'")
"""
if not self.reveal_every_iter:
if self.penalty not in [consts.L2_PENALTY, consts.NONE.upper()]:
raise ValueError(
f"penalty should be 'L2' or 'none', when reveal_every_iter is False"
)
if type(self.optimizer).__name__ != "str":
raise ValueError(
"logistic_param's optimizer {} not supported, should be str type".format(self.optimizer))
else:
self.optimizer = self.optimizer.lower()
if self.reveal_every_iter:
if self.optimizer not in ['sgd', 'rmsprop', 'adam', 'adagrad', 'nesterov_momentum_sgd']:
raise ValueError(
"When reveal_every_iter is True, "
"sshe logistic_param's optimizer not supported, optimizer should be"
" 'sgd', 'rmsprop', 'adam', 'nesterov_momentum_sgd', or 'adagrad'")
else:
if self.optimizer not in ['sgd', 'nesterov_momentum_sgd']:
raise ValueError("When reveal_every_iter is False, "
"sshe logistic_param's optimizer not supported, optimizer should be"
" 'sgd', 'nesterov_momentum_sgd'")
if self.encrypt_param.method not in [consts.PAILLIER, None]:
raise ValueError(
"logistic_param's encrypted method support 'Paillier' or None only")
if self.callback_param.validation_freqs is not None:
if self.reveal_every_iter is False:
raise ValueError(f"When reveal_every_iter is False, validation every iter"
f" is not supported.")
self.reveal_strategy = self.check_and_change_lower(self.reveal_strategy,
["respectively", "encrypted_reveal_in_host"],
f"{descr} reveal_strategy")
if self.reveal_strategy == "encrypted_reveal_in_host" and self.reveal_every_iter:
raise PermissionError("reveal strategy: encrypted_reveal_in_host mode is not allow to reveal every iter.")
self.encrypted_mode_calculator_param.check()
return True
| 8,432 | 47.745665 | 135 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/hetero_nn_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import copy
import collections
from types import SimpleNamespace
from pipeline.param.base_param import BaseParam
from pipeline.param.callback_param import CallbackParam
from pipeline.param.cross_validation_param import CrossValidationParam
from pipeline.param.encrypt_param import EncryptParam
from pipeline.param.encrypted_mode_calculation_param import EncryptedModeCalculatorParam
from pipeline.param.predict_param import PredictParam
from pipeline.param import consts
class DatasetParam(BaseParam):
def __init__(self, dataset_name=None, **kwargs):
super(DatasetParam, self).__init__()
self.dataset_name = dataset_name
self.param = kwargs
def check(self):
if self.dataset_name is not None:
self.check_string(self.dataset_name, 'dataset_name')
def to_dict(self):
ret = {'dataset_name': self.dataset_name, 'param': self.param}
return ret
class SelectorParam(object):
"""
Parameters
----------
method: None or str
back propagation select method, accept "relative" only, default: None
selective_size: int
deque size to use, store the most recent selective_size historical loss, default: 1024
beta: int
sample whose selective probability >= power(np.random, beta) will be selected
min_prob: Numeric
selective probability is max(min_prob, rank_rate)
"""
def __init__(self, method=None, beta=1, selective_size=consts.SELECTIVE_SIZE, min_prob=0, random_state=None):
self.method = method
self.selective_size = selective_size
self.beta = beta
self.min_prob = min_prob
self.random_state = random_state
def check(self):
if self.method is not None and self.method not in ["relative"]:
raise ValueError('selective method should be None be "relative"')
if not isinstance(self.selective_size, int) or self.selective_size <= 0:
raise ValueError("selective size should be a positive integer")
if not isinstance(self.beta, int):
raise ValueError("beta should be integer")
if not isinstance(self.min_prob, (float, int)):
raise ValueError("min_prob should be numeric")
class CoAEConfuserParam(BaseParam):
"""
A label protect mechanism proposed in paper: "Batch Label Inference and Replacement Attacks in Black-Boxed Vertical Federated Learning"
paper link: https://arxiv.org/abs/2112.05409
Convert true labels to fake soft labels by using an auto-encoder.
Args:
enable: boolean
run CoAE or not
epoch: None or int
auto-encoder training epochs
lr: float
auto-encoder learning rate
lambda1: float
parameter to control the difference between true labels and fake soft labels. Larger the parameter,
autoencoder will give more attention to making true labels and fake soft label different.
lambda2: float
parameter to control entropy loss, see original paper for details
verbose: boolean
print loss log while training auto encoder
"""
def __init__(self, enable=False, epoch=50, lr=0.001, lambda1=1.0, lambda2=2.0, verbose=False):
super(CoAEConfuserParam, self).__init__()
self.enable = enable
self.epoch = epoch
self.lr = lr
self.lambda1 = lambda1
self.lambda2 = lambda2
self.verbose = verbose
def check(self):
self.check_boolean(self.enable, 'enable')
if not isinstance(self.epoch, int) or self.epoch <= 0:
raise ValueError("epoch should be a positive integer")
if not isinstance(self.lr, float):
raise ValueError('lr should be a float number')
if not isinstance(self.lambda1, float):
raise ValueError('lambda1 should be a float number')
if not isinstance(self.lambda2, float):
raise ValueError('lambda2 should be a float number')
self.check_boolean(self.verbose, 'verbose')
class HeteroNNParam(BaseParam):
"""
Parameters used for Hetero Neural Network.
Parameters
----------
task_type: str, task type of hetero nn model, one of 'classification', 'regression'.
bottom_nn_define: a dict represents the structure of bottom neural network.
interactive_layer_define: a dict represents the structure of interactive layer.
interactive_layer_lr: float, the learning rate of interactive layer.
top_nn_define: a dict represents the structure of top neural network.
optimizer: optimizer method, accept following types:
1. a string, one of "Adadelta", "Adagrad", "Adam", "Adamax", "Nadam", "RMSprop", "SGD"
2. a dict, with a required key-value pair keyed by "optimizer",
with optional key-value pairs such as learning rate.
defaults to "SGD".
loss: str, a string to define loss function used
epochs: int, the maximum iteration for aggregation in training.
batch_size : int, batch size when updating model.
-1 means use all data in a batch. i.e. Not to use mini-batch strategy.
defaults to -1.
early_stop : str, accept 'diff' only in this version, default: 'diff'
Method used to judge converge or not.
a) diff: Use difference of loss between two iterations to judge whether converge.
floating_point_precision: None or integer, if not None, means use floating_point_precision-bit to speed up calculation,
e.g.: convert an x to round(x * 2**floating_point_precision) during Paillier operation, divide
the result by 2**floating_point_precision in the end.
callback_param: CallbackParam object
"""
def __init__(self,
task_type='classification',
bottom_nn_define=None,
top_nn_define=None,
config_type='pytorch',
interactive_layer_define=None,
interactive_layer_lr=0.9,
optimizer='SGD',
loss=None,
epochs=100,
batch_size=-1,
early_stop="diff",
tol=1e-5,
encrypt_param=EncryptParam(),
encrypted_mode_calculator_param=EncryptedModeCalculatorParam(),
predict_param=PredictParam(),
cv_param=CrossValidationParam(),
validation_freqs=None,
early_stopping_rounds=None,
metrics=None,
use_first_metric_only=True,
selector_param=SelectorParam(),
floating_point_precision=23,
callback_param=CallbackParam(),
coae_param=CoAEConfuserParam(),
dataset=DatasetParam()
):
super(HeteroNNParam, self).__init__()
self.task_type = task_type
self.bottom_nn_define = bottom_nn_define
self.interactive_layer_define = interactive_layer_define
self.interactive_layer_lr = interactive_layer_lr
self.top_nn_define = top_nn_define
self.batch_size = batch_size
self.epochs = epochs
self.early_stop = early_stop
self.tol = tol
self.optimizer = optimizer
self.loss = loss
self.validation_freqs = validation_freqs
self.early_stopping_rounds = early_stopping_rounds
self.metrics = metrics or []
self.use_first_metric_only = use_first_metric_only
self.encrypt_param = copy.deepcopy(encrypt_param)
self.encrypted_model_calculator_param = encrypted_mode_calculator_param
self.predict_param = copy.deepcopy(predict_param)
self.cv_param = copy.deepcopy(cv_param)
self.selector_param = selector_param
self.floating_point_precision = floating_point_precision
self.callback_param = copy.deepcopy(callback_param)
self.coae_param = coae_param
self.dataset = dataset
self.config_type = 'pytorch' # pytorch only
def check(self):
assert isinstance(self.dataset, DatasetParam), 'dataset must be a DatasetParam()'
self.dataset.check()
if self.task_type not in ["classification", "regression"]:
raise ValueError("config_type should be classification or regression")
if not isinstance(self.tol, (int, float)):
raise ValueError("tol should be numeric")
if not isinstance(self.epochs, int) or self.epochs <= 0:
raise ValueError("epochs should be a positive integer")
if self.bottom_nn_define and not isinstance(self.bottom_nn_define, dict):
raise ValueError("bottom_nn_define should be a dict defining the structure of neural network")
if self.top_nn_define and not isinstance(self.top_nn_define, dict):
raise ValueError("top_nn_define should be a dict defining the structure of neural network")
if self.interactive_layer_define is not None and not isinstance(self.interactive_layer_define, dict):
raise ValueError(
"the interactive_layer_define should be a dict defining the structure of interactive layer")
if self.batch_size != -1:
if not isinstance(self.batch_size, int) \
or self.batch_size < consts.MIN_BATCH_SIZE:
raise ValueError(
" {} not supported, should be larger than 10 or -1 represent for all data".format(self.batch_size))
if self.early_stop != "diff":
raise ValueError("early stop should be diff in this version")
if self.metrics is not None and not isinstance(self.metrics, list):
raise ValueError("metrics should be a list")
if self.floating_point_precision is not None and \
(not isinstance(self.floating_point_precision, int) or
self.floating_point_precision < 0 or self.floating_point_precision > 63):
raise ValueError("floating point precision should be null or a integer between 0 and 63")
self.encrypt_param.check()
self.encrypted_model_calculator_param.check()
self.predict_param.check()
self.selector_param.check()
self.coae_param.check()
descr = "hetero nn param's "
for p in ["early_stopping_rounds", "validation_freqs",
"use_first_metric_only"]:
if self._deprecated_params_set.get(p):
if "callback_param" in self.get_user_feeded():
raise ValueError(f"{p} and callback param should not be set simultaneously,"
f"{self._deprecated_params_set}, {self.get_user_feeded()}")
else:
self.callback_param.callbacks = ["PerformanceEvaluate"]
break
if self._warn_to_deprecate_param("validation_freqs", descr, "callback_param's 'validation_freqs'"):
self.callback_param.validation_freqs = self.validation_freqs
if self._warn_to_deprecate_param("early_stopping_rounds", descr, "callback_param's 'early_stopping_rounds'"):
self.callback_param.early_stopping_rounds = self.early_stopping_rounds
if self._warn_to_deprecate_param("metrics", descr, "callback_param's 'metrics'"):
if self.metrics:
self.callback_param.metrics = self.metrics
if self._warn_to_deprecate_param("use_first_metric_only", descr, "callback_param's 'use_first_metric_only'"):
self.callback_param.use_first_metric_only = self.use_first_metric_only
| 12,330 | 41.37457 | 139 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/dataio_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
class DataIOParam(BaseParam):
"""
Define dataio parameters that used in federated ml.
This module is not supported to use in training task since Fate-v1.9.0, use data transform instead.
Parameters
----------
input_format : str, accepted 'dense','sparse' 'tag' only in this version. default: 'dense'.
please have a look at this tutorial at "DataIO" section of federatedml/util/README.md.
Formally,
dense input format data should be set to "dense",
svm-light input format data should be set to "sparse",
tag or tag:value input format data should be set to "tag".
delimitor : str, the delimitor of data input, default: ','
data_type : str, the data type of data input, accepted 'float','float64','int','int64','str','long'
"default: "float64"
exclusive_data_type : dict, the key of dict is col_name, the value is data_type, use to specified special data type
of some features.
tag_with_value: bool, use if input_format is 'tag', if tag_with_value is True,
input column data format should be tag[delimitor]value, otherwise is tag only
tag_value_delimitor: str, use if input_format is 'tag' and 'tag_with_value' is True,
delimitor of tag[delimitor]value column value.
missing_fill : bool, need to fill missing value or not, accepted only True/False, default: True
default_value : None or single object type or list, the value to replace missing value.
if None, it will use default value define in federatedml/feature/imputer.py,
if single object, will fill missing value with this object,
if list, it's length should be the sample of input data' feature dimension,
means that if some column happens to have missing values, it will replace it
the value by element in the identical position of this list.
default: None
missing_fill_method: None or str, the method to replace missing value, should be one of [None, 'min', 'max', 'mean', 'designated'], default: None
missing_impute: None or list, element of list can be any type, or auto generated if value is None, define which values to be consider as missing, default: None
outlier_replace: bool, need to replace outlier value or not, accepted only True/False, default: True
outlier_replace_method: None or str, the method to replace missing value, should be one of [None, 'min', 'max', 'mean', 'designated'], default: None
outlier_impute: None or list, element of list can be any type, which values should be regard as missing value, default: None
outlier_replace_value: None or single object type or list, the value to replace outlier.
if None, it will use default value define in federatedml/feature/imputer.py,
if single object, will replace outlier with this object,
if list, it's length should be the sample of input data' feature dimension,
means that if some column happens to have outliers, it will replace it
the value by element in the identical position of this list.
default: None
with_label : bool, True if input data consist of label, False otherwise. default: 'false'
label_name : str, column_name of the column where label locates, only use in dense-inputformat. default: 'y'
label_type : object, accepted 'int','int64','float','float64','long','str' only,
use when with_label is True. default: 'false'
output_format : str, accepted 'dense','sparse' only in this version. default: 'dense'
"""
def __init__(self, input_format="dense", delimitor=',', data_type='float64',
exclusive_data_type=None,
tag_with_value=False, tag_value_delimitor=":",
missing_fill=False, default_value=0, missing_fill_method=None,
missing_impute=None, outlier_replace=False, outlier_replace_method=None,
outlier_impute=None, outlier_replace_value=0,
with_label=False, label_name='y',
label_type='int', output_format='dense', need_run=True):
self.input_format = input_format
self.delimitor = delimitor
self.data_type = data_type
self.exclusive_data_type = exclusive_data_type
self.tag_with_value = tag_with_value
self.tag_value_delimitor = tag_value_delimitor
self.missing_fill = missing_fill
self.default_value = default_value
self.missing_fill_method = missing_fill_method
self.missing_impute = missing_impute
self.outlier_replace = outlier_replace
self.outlier_replace_method = outlier_replace_method
self.outlier_impute = outlier_impute
self.outlier_replace_value = outlier_replace_value
self.with_label = with_label
self.label_name = label_name
self.label_type = label_type
self.output_format = output_format
self.need_run = need_run
def check(self):
descr = "dataio param's"
self.input_format = self.check_and_change_lower(self.input_format,
["dense", "sparse", "tag"],
descr)
self.output_format = self.check_and_change_lower(self.output_format,
["dense", "sparse"],
descr)
self.data_type = self.check_and_change_lower(self.data_type,
["int", "int64", "float", "float64", "str", "long"],
descr)
if type(self.missing_fill).__name__ != 'bool':
raise ValueError("dataio param's missing_fill {} not supported".format(self.missing_fill))
if self.missing_fill_method is not None:
self.missing_fill_method = self.check_and_change_lower(self.missing_fill_method,
['min', 'max', 'mean', 'designated'],
descr)
if self.outlier_replace_method is not None:
self.outlier_replace_method = self.check_and_change_lower(self.outlier_replace_method,
['min', 'max', 'mean', 'designated'],
descr)
if type(self.with_label).__name__ != 'bool':
raise ValueError("dataio param's with_label {} not supported".format(self.with_label))
if self.with_label:
if not isinstance(self.label_name, str):
raise ValueError("dataio param's label_name {} should be str".format(self.label_name))
self.label_type = self.check_and_change_lower(self.label_type,
["int", "int64", "float", "float64", "str", "long"],
descr)
if self.exclusive_data_type is not None and not isinstance(self.exclusive_data_type, dict):
raise ValueError("exclusive_data_type is should be None or a dict")
return True
| 8,289 | 50.490683 | 163 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/psi_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
class PSIParam(BaseParam):
def __init__(self, max_bin_num=20, need_run=True, dense_missing_val=None):
super(PSIParam, self).__init__()
self.max_bin_num = max_bin_num
self.need_run = need_run
self.dense_missing_val = dense_missing_val
def check(self):
assert isinstance(self.max_bin_num, int) and self.max_bin_num > 0, 'max bin must be an integer larger than 0'
assert isinstance(self.need_run, bool)
if self.dense_missing_val is not None:
assert isinstance(self.dense_missing_val, str) or isinstance(self.dense_missing_val, int) or \
isinstance(self.dense_missing_val, float), \
'missing value type {} not supported'.format(type(self.dense_missing_val))
| 1,480 | 36.025 | 117 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/onehot_encoder_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
class OneHotEncoderParam(BaseParam):
"""
Parameters
----------
transform_col_indexes: list or int, default: -1
Specify which columns need to calculated. -1 represent for all columns.
transform_col_names : list of string, default: []
Specify which columns need to calculated. Each element in the list represent for a column name in header.
need_run: bool, default True
Indicate if this module needed to be run
"""
def __init__(self, transform_col_indexes=-1, transform_col_names=None, need_run=True):
super(OneHotEncoderParam, self).__init__()
if transform_col_names is None:
transform_col_names = []
self.transform_col_indexes = transform_col_indexes
self.transform_col_names = transform_col_names
self.need_run = need_run
def check(self):
descr = "One-hot encoder param's"
self.check_defined_type(self.transform_col_indexes, descr, ['list', 'int', 'NoneType'])
self.check_defined_type(self.transform_col_names, descr, ['list', 'NoneType'])
return True
| 1,819 | 32.703704 | 113 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/logistic_regression_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import copy
from pipeline.param.glm_param import LinearModelParam
from pipeline.param.callback_param import CallbackParam
from pipeline.param.cross_validation_param import CrossValidationParam
from pipeline.param.encrypt_param import EncryptParam
from pipeline.param.encrypted_mode_calculation_param import EncryptedModeCalculatorParam
from pipeline.param.init_model_param import InitParam
from pipeline.param.predict_param import PredictParam
from pipeline.param.sqn_param import StochasticQuasiNewtonParam
from pipeline.param.stepwise_param import StepwiseParam
from pipeline.param import consts
class LogisticParam(LinearModelParam):
"""
Parameters used for Logistic Regression both for Homo mode or Hetero mode.
Parameters
----------
penalty : {'L2', 'L1' or None}
Penalty method used in LR. Please note that, when using encrypted version in HomoLR,
'L1' is not supported.
tol : float, default: 1e-4
The tolerance of convergence
alpha : float, default: 1.0
Regularization strength coefficient.
optimizer : {'rmsprop', 'sgd', 'adam', 'nesterov_momentum_sgd', 'sqn', 'adagrad'}, default: 'rmsprop'
Optimize method, if 'sqn' has been set, sqn_param will take effect. Currently, 'sqn' support hetero mode only.
batch_size : int, default: -1
Batch size when updating model. -1 means use all data in a batch. i.e. Not to use mini-batch strategy.
learning_rate : float, default: 0.01
Learning rate
max_iter : int, default: 100
The maximum iteration for training.
early_stop : {'diff', 'weight_diff', 'abs'}, default: 'diff'
Method used to judge converge or not.
a) diff: Use difference of loss between two iterations to judge whether converge.
b) weight_diff: Use difference between weights of two consecutive iterations
c) abs: Use the absolute value of loss to judge whether converge. i.e. if loss < eps, it is converged.
Please note that for hetero-lr multi-host situation, this parameter support "weight_diff" only.
decay: int or float, default: 1
Decay rate for learning rate. learning rate will follow the following decay schedule.
lr = lr0/(1+decay*t) if decay_sqrt is False. If decay_sqrt is True, lr = lr0 / sqrt(1+decay*t)
where t is the iter number.
decay_sqrt: bool, default: True
lr = lr0/(1+decay*t) if decay_sqrt is False, otherwise, lr = lr0 / sqrt(1+decay*t)
encrypt_param: EncryptParam object, default: default EncryptParam object
encrypt param
predict_param: PredictParam object, default: default PredictParam object
predict param
callback_param: CallbackParam object
callback param
cv_param: CrossValidationParam object, default: default CrossValidationParam object
cv param
multi_class: {'ovr'}, default: 'ovr'
If it is a multi_class task, indicate what strategy to use. Currently, support 'ovr' short for one_vs_rest only.
validation_freqs: int or list or tuple or set, or None, default None
validation frequency during training.
early_stopping_rounds: int, default: None
Will stop training if one metric doesn’t improve in last early_stopping_round rounds
metrics: list or None, default: None
Indicate when executing evaluation during train process, which metrics will be used. If set as empty,
default metrics for specific task type will be used. As for binary classification, default metrics are
['auc', 'ks']
use_first_metric_only: bool, default: False
Indicate whether use the first metric only for early stopping judgement.
floating_point_precision: None or integer
if not None, use floating_point_precision-bit to speed up calculation,
e.g.: convert an x to round(x * 2**floating_point_precision) during Paillier operation, divide
the result by 2**floating_point_precision in the end.
"""
def __init__(self, penalty='L2',
tol=1e-4, alpha=1.0, optimizer='rmsprop',
batch_size=-1, shuffle=True, batch_strategy="full", masked_rate=5,
learning_rate=0.01, init_param=InitParam(),
max_iter=100, early_stop='diff', encrypt_param=EncryptParam(),
predict_param=PredictParam(), cv_param=CrossValidationParam(),
decay=1, decay_sqrt=True,
multi_class='ovr', validation_freqs=None, early_stopping_rounds=None,
stepwise_param=StepwiseParam(), floating_point_precision=23,
metrics=None,
use_first_metric_only=False,
callback_param=CallbackParam()
):
super(LogisticParam, self).__init__()
self.penalty = penalty
self.tol = tol
self.alpha = alpha
self.optimizer = optimizer
self.batch_size = batch_size
self.learning_rate = learning_rate
self.init_param = copy.deepcopy(init_param)
self.max_iter = max_iter
self.early_stop = early_stop
self.encrypt_param = encrypt_param
self.shuffle = shuffle
self.batch_strategy = batch_strategy
self.masked_rate = masked_rate
self.predict_param = copy.deepcopy(predict_param)
self.cv_param = copy.deepcopy(cv_param)
self.decay = decay
self.decay_sqrt = decay_sqrt
self.multi_class = multi_class
self.validation_freqs = validation_freqs
self.stepwise_param = copy.deepcopy(stepwise_param)
self.early_stopping_rounds = early_stopping_rounds
self.metrics = metrics or []
self.use_first_metric_only = use_first_metric_only
self.floating_point_precision = floating_point_precision
self.callback_param = copy.deepcopy(callback_param)
def check(self):
descr = "logistic_param's"
super(LogisticParam, self).check()
self.predict_param.check()
if self.encrypt_param.method not in [consts.PAILLIER, None]:
raise ValueError(
"logistic_param's encrypted method support 'Paillier' or None only")
self.multi_class = self.check_and_change_lower(self.multi_class, ["ovr"], f"{descr}")
return True
class HomoLogisticParam(LogisticParam):
"""
Parameters
----------
aggregate_iters : int, default: 1
Indicate how many iterations are aggregated once.
"""
def __init__(self, penalty='L2',
tol=1e-4, alpha=1.0, optimizer='rmsprop',
batch_size=-1, learning_rate=0.01, init_param=InitParam(),
max_iter=100, early_stop='diff',
predict_param=PredictParam(), cv_param=CrossValidationParam(),
decay=1, decay_sqrt=True,
aggregate_iters=1, multi_class='ovr', validation_freqs=None,
metrics=['auc', 'ks'],
callback_param=CallbackParam()
):
super(HomoLogisticParam, self).__init__(penalty=penalty, tol=tol, alpha=alpha, optimizer=optimizer,
batch_size=batch_size,
learning_rate=learning_rate,
init_param=init_param, max_iter=max_iter, early_stop=early_stop,
predict_param=predict_param,
cv_param=cv_param, multi_class=multi_class,
validation_freqs=validation_freqs,
decay=decay, decay_sqrt=decay_sqrt,
metrics=metrics,
callback_param=callback_param)
self.aggregate_iters = aggregate_iters
def check(self):
super().check()
if not isinstance(self.aggregate_iters, int):
raise ValueError(
"logistic_param's aggregate_iters {} not supported, should be int type".format(
self.aggregate_iters))
return True
class HeteroLogisticParam(LogisticParam):
def __init__(self, penalty='L2',
tol=1e-4, alpha=1.0, optimizer='rmsprop',
batch_size=-1, shuffle=True, batch_strategy="full", masked_rate=5,
learning_rate=0.01, init_param=InitParam(),
max_iter=100, early_stop='diff',
encrypted_mode_calculator_param=EncryptedModeCalculatorParam(),
predict_param=PredictParam(), cv_param=CrossValidationParam(),
decay=1, decay_sqrt=True, sqn_param=StochasticQuasiNewtonParam(),
multi_class='ovr', validation_freqs=None, early_stopping_rounds=None,
metrics=['auc', 'ks'], floating_point_precision=23,
encrypt_param=EncryptParam(),
use_first_metric_only=False, stepwise_param=StepwiseParam(),
callback_param=CallbackParam()
):
super(
HeteroLogisticParam,
self).__init__(
penalty=penalty,
tol=tol,
alpha=alpha,
optimizer=optimizer,
batch_size=batch_size,
shuffle=shuffle,
batch_strategy=batch_strategy,
masked_rate=masked_rate,
learning_rate=learning_rate,
init_param=init_param,
max_iter=max_iter,
early_stop=early_stop,
predict_param=predict_param,
cv_param=cv_param,
decay=decay,
decay_sqrt=decay_sqrt,
multi_class=multi_class,
validation_freqs=validation_freqs,
early_stopping_rounds=early_stopping_rounds,
metrics=metrics,
floating_point_precision=floating_point_precision,
encrypt_param=encrypt_param,
use_first_metric_only=use_first_metric_only,
stepwise_param=stepwise_param,
callback_param=callback_param)
self.encrypted_mode_calculator_param = copy.deepcopy(encrypted_mode_calculator_param)
self.sqn_param = copy.deepcopy(sqn_param)
def check(self):
super().check()
self.encrypted_mode_calculator_param.check()
self.sqn_param.check()
return True
| 11,129 | 42.307393 | 120 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/data_transform_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
class DataTransformParam(BaseParam):
"""
Define data_transform parameters that used in federated ml.
Parameters
----------
input_format : str, accepted 'dense','sparse' 'tag' only in this version. default: 'dense'.
please have a look at this tutorial at "DataTransform" section of federatedml/util/README.md.
Formally,
dense input format data should be set to "dense",
svm-light input format data should be set to "sparse",
tag or tag:value input format data should be set to "tag".
delimitor : str, the delimitor of data input, default: ','
data_type : str, the data type of data input, accepted 'float','float64','int','int64','str','long'
"default: "float64"
exclusive_data_type : dict, the key of dict is col_name, the value is data_type, use to specified special data type
of some features.
tag_with_value: bool, use if input_format is 'tag', if tag_with_value is True,
input column data format should be tag[delimitor]value, otherwise is tag only
tag_value_delimitor: str, use if input_format is 'tag' and 'tag_with_value' is True,
delimitor of tag[delimitor]value column value.
missing_fill : bool, need to fill missing value or not, accepted only True/False, default: True
default_value : None or single object type or list, the value to replace missing value.
if None, it will use default value define in federatedml/feature/imputer.py,
if single object, will fill missing value with this object,
if list, it's length should be the sample of input data' feature dimension,
means that if some column happens to have missing values, it will replace it
the value by element in the identical position of this list.
default: None
missing_fill_method: None or str, the method to replace missing value, should be one of [None, 'min', 'max', 'mean', 'designated'], default: None
missing_impute: None or list, element of list can be any type, or auto generated if value is None, define which values to be consider as missing, default: None
outlier_replace: bool, need to replace outlier value or not, accepted only True/False, default: True
outlier_replace_method: None or str, the method to replace missing value, should be one of [None, 'min', 'max', 'mean', 'designated'], default: None
outlier_impute: None or list, element of list can be any type, which values should be regard as missing value, default: None
outlier_replace_value: None or single object type or list, the value to replace outlier.
if None, it will use default value define in federatedml/feature/imputer.py,
if single object, will replace outlier with this object,
if list, it's length should be the sample of input data' feature dimension,
means that if some column happens to have outliers, it will replace it
the value by element in the identical position of this list.
default: None
with_label : bool, True if input data consist of label, False otherwise. default: 'false'
label_name : str, column_name of the column where label locates, only use in dense-inputformat. default: 'y'
label_type : object, accepted 'int','int64','float','float64','long','str' only,
use when with_label is True. default: 'false'
output_format : str, accepted 'dense','sparse' only in this version. default: 'dense'
with_match_id: bool, True if dataset has match_id, default: False
"""
def __init__(self, input_format="dense", delimitor=',', data_type='float64',
exclusive_data_type=None,
tag_with_value=False, tag_value_delimitor=":",
missing_fill=False, default_value=0, missing_fill_method=None,
missing_impute=None, outlier_replace=False, outlier_replace_method=None,
outlier_impute=None, outlier_replace_value=0,
with_label=False, label_name='y',
label_type='int', output_format='dense', need_run=True,
with_match_id=False, match_id_name='', match_id_index=0):
self.input_format = input_format
self.delimitor = delimitor
self.data_type = data_type
self.exclusive_data_type = exclusive_data_type
self.tag_with_value = tag_with_value
self.tag_value_delimitor = tag_value_delimitor
self.missing_fill = missing_fill
self.default_value = default_value
self.missing_fill_method = missing_fill_method
self.missing_impute = missing_impute
self.outlier_replace = outlier_replace
self.outlier_replace_method = outlier_replace_method
self.outlier_impute = outlier_impute
self.outlier_replace_value = outlier_replace_value
self.with_label = with_label
self.label_name = label_name
self.label_type = label_type
self.output_format = output_format
self.need_run = need_run
self.with_match_id = with_match_id
self.match_id_name = match_id_name
self.match_id_index = match_id_index
def check(self):
descr = "data_transform param's"
self.input_format = self.check_and_change_lower(self.input_format,
["dense", "sparse", "tag"],
descr)
self.output_format = self.check_and_change_lower(self.output_format,
["dense", "sparse"],
descr)
self.data_type = self.check_and_change_lower(self.data_type,
["int", "int64", "float", "float64", "str", "long"],
descr)
if type(self.missing_fill).__name__ != 'bool':
raise ValueError("data_transform param's missing_fill {} not supported".format(self.missing_fill))
if self.missing_fill_method is not None:
self.missing_fill_method = self.check_and_change_lower(self.missing_fill_method,
['min', 'max', 'mean', 'designated'],
descr)
if self.outlier_replace_method is not None:
self.outlier_replace_method = self.check_and_change_lower(self.outlier_replace_method,
['min', 'max', 'mean', 'designated'],
descr)
if type(self.with_label).__name__ != 'bool':
raise ValueError("data_transform param's with_label {} not supported".format(self.with_label))
if self.with_label:
if not isinstance(self.label_name, str):
raise ValueError("data_transform param's label_name {} should be str".format(self.label_name))
self.label_type = self.check_and_change_lower(self.label_type,
["int", "int64", "float", "float64", "str", "long"],
descr)
if self.exclusive_data_type is not None and not isinstance(self.exclusive_data_type, dict):
raise ValueError("exclusive_data_type is should be None or a dict")
return True
| 8,515 | 50.301205 | 163 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/sqn_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pipeline.param.base_param import BaseParam
class StochasticQuasiNewtonParam(BaseParam):
"""
Parameters used for stochastic quasi-newton method.
Parameters
----------
update_interval_L : int, default: 3
Set how many iteration to update hess matrix
memory_M : int, default: 5
Stack size of curvature information, i.e. y_k and s_k in the paper.
sample_size : int, default: 5000
Sample size of data that used to update Hess matrix
"""
def __init__(self, update_interval_L=3, memory_M=5, sample_size=5000, random_seed=None):
super().__init__()
self.update_interval_L = update_interval_L
self.memory_M = memory_M
self.sample_size = sample_size
self.random_seed = random_seed
def check(self):
descr = "hetero sqn param's"
self.check_positive_integer(self.update_interval_L, descr)
self.check_positive_integer(self.memory_M, descr)
self.check_positive_integer(self.sample_size, descr)
if self.random_seed is not None:
self.check_positive_integer(self.random_seed, descr)
return True
| 1,814 | 32.611111 | 92 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/encrypt_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
from pipeline.param import consts
class EncryptParam(BaseParam):
"""
Define encryption method that used in federated ml.
Parameters
----------
method : {'Paillier'}
If method is 'Paillier', Paillier encryption will be used for federated ml.
To use non-encryption version in HomoLR, set this to None.
For detail of Paillier encryption, please check out the paper mentioned in README file.
key_length : int, default: 1024
Used to specify the length of key in this encryption method.
"""
def __init__(self, method=consts.PAILLIER, key_length=1024):
super(EncryptParam, self).__init__()
self.method = method
self.key_length = key_length
def check(self):
return True
| 1,481 | 30.531915 | 95 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/feature_selection_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import copy
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
from pipeline.param import consts
class UniqueValueParam(BaseParam):
"""
Use the difference between max-value and min-value to judge.
Parameters
----------
eps: float, default: 1e-5
The column(s) will be filtered if its difference is smaller than eps.
"""
def __init__(self, eps=1e-5):
self.eps = eps
def check(self):
descr = "Unique value param's"
self.check_positive_number(self.eps, descr)
return True
class IVValueSelectionParam(BaseParam):
"""
Use information values to select features.
Parameters
----------
value_threshold: float, default: 1.0
Used if iv_value_thres method is used in feature selection.
host_thresholds: List of float or None, default: None
Set threshold for different host. If None, use same threshold as guest. If provided, the order should map with
the host id setting.
"""
def __init__(self, value_threshold=0.0, host_thresholds=None, local_only=False):
super().__init__()
self.value_threshold = value_threshold
self.host_thresholds = host_thresholds
self.local_only = local_only
def check(self):
if not isinstance(self.value_threshold, (float, int)):
raise ValueError("IV selection param's value_threshold should be float or int")
if self.host_thresholds is not None:
if not isinstance(self.host_thresholds, list):
raise ValueError("IV selection param's host_threshold should be list or None")
if not isinstance(self.local_only, bool):
raise ValueError("IV selection param's local_only should be bool")
return True
class IVPercentileSelectionParam(BaseParam):
"""
Use information values to select features.
Parameters
----------
percentile_threshold: float, 0 <= percentile_threshold <= 1.0, default: 1.0
Percentile threshold for iv_percentile method
"""
def __init__(self, percentile_threshold=1.0, local_only=False):
super().__init__()
self.percentile_threshold = percentile_threshold
self.local_only = local_only
def check(self):
descr = "IV selection param's"
self.check_decimal_float(self.percentile_threshold, descr)
self.check_boolean(self.local_only, descr)
return True
class IVTopKParam(BaseParam):
"""
Use information values to select features.
Parameters
----------
k: int, should be greater than 0, default: 10
Percentile threshold for iv_percentile method
"""
def __init__(self, k=10, local_only=False):
super().__init__()
self.k = k
self.local_only = local_only
def check(self):
descr = "IV selection param's"
self.check_positive_integer(self.k, descr)
self.check_boolean(self.local_only, descr)
return True
class VarianceOfCoeSelectionParam(BaseParam):
"""
Use coefficient of variation to select features. When judging, the absolute value will be used.
Parameters
----------
value_threshold: float, default: 1.0
Used if coefficient_of_variation_value_thres method is used in feature selection. Filter those
columns who has smaller coefficient of variance than the threshold.
"""
def __init__(self, value_threshold=1.0):
self.value_threshold = value_threshold
def check(self):
descr = "Coff of Variances param's"
self.check_positive_number(self.value_threshold, descr)
return True
class OutlierColsSelectionParam(BaseParam):
"""
Given percentile and threshold. Judge if this quantile point is larger than threshold. Filter those larger ones.
Parameters
----------
percentile: float, [0., 1.] default: 1.0
The percentile points to compare.
upper_threshold: float, default: 1.0
Percentile threshold for coefficient_of_variation_percentile method
"""
def __init__(self, percentile=1.0, upper_threshold=1.0):
self.percentile = percentile
self.upper_threshold = upper_threshold
def check(self):
descr = "Outlier Filter param's"
self.check_decimal_float(self.percentile, descr)
self.check_defined_type(self.upper_threshold, descr, ['float', 'int'])
return True
class CommonFilterParam(BaseParam):
"""
All of the following parameters can set with a single value or a list of those values.
When setting one single value, it means using only one metric to filter while
a list represent for using multiple metrics.
Please note that if some of the following values has been set as list, all of them
should have same length. Otherwise, error will be raised. And if there exist a list
type parameter, the metrics should be in list type.
Parameters
----------
metrics: str or list, default: depends on the specific filter
Indicate what metrics are used in this filter
filter_type: str, default: threshold
Should be one of "threshold", "top_k" or "top_percentile"
take_high: bool, default: True
When filtering, taking highest values or not.
threshold: float or int, default: 1
If filter type is threshold, this is the threshold value.
If it is "top_k", this is the k value.
If it is top_percentile, this is the percentile threshold.
host_thresholds: List of float or List of List of float or None, default: None
Set threshold for different host. If None, use same threshold as guest. If provided, the order should map with
the host id setting.
select_federated: bool, default: True
Whether select federated with other parties or based on local variables
"""
def __init__(self, metrics, filter_type='threshold', take_high=True, threshold=1,
host_thresholds=None, select_federated=True):
super().__init__()
self.metrics = metrics
self.filter_type = filter_type
self.take_high = take_high
self.threshold = threshold
self.host_thresholds = host_thresholds
self.select_federated = select_federated
def check(self):
if not isinstance(self.metrics, list):
for value_name in ["filter_type", "take_high",
"threshold", "select_federated"]:
v = getattr(self, value_name)
if isinstance(v, list):
raise ValueError(f"{value_name}: {v} should not be a list when "
f"metrics: {self.metrics} is not a list")
setattr(self, value_name, [v])
setattr(self, "metrics", [self.metrics])
else:
expected_length = len(self.metrics)
for value_name in ["filter_type", "take_high",
"threshold", "select_federated"]:
v = getattr(self, value_name)
if isinstance(v, list):
if len(v) != expected_length:
raise ValueError(f"The parameter {v} should have same length "
f"with metrics")
else:
new_v = [v] * expected_length
setattr(self, value_name, new_v)
for v in self.filter_type:
if v not in ["threshold", "top_k", "top_percentile"]:
raise ValueError('filter_type should be one of '
'"threshold", "top_k", "top_percentile"')
descr = "hetero feature selection param's"
for v in self.take_high:
self.check_boolean(v, descr)
for idx, v in enumerate(self.threshold):
if self.filter_type[idx] == "threshold":
if not isinstance(v, (float, int)):
raise ValueError(descr + f"{v} should be a float or int")
elif self.filter_type[idx] == 'top_k':
self.check_positive_integer(v, descr)
else:
if not (v == 0 or v == 1):
self.check_decimal_float(v, descr)
if self.host_thresholds is not None:
if not isinstance(self.host_thresholds, list):
self.host_thresholds = [self.host_thresholds]
# raise ValueError("selection param's host_threshold should be list or None")
assert isinstance(self.select_federated, list)
for v in self.select_federated:
self.check_boolean(v, descr)
class CorrelationFilterParam(BaseParam):
"""
This filter follow this specific rules:
1. Sort all the columns from high to low based on specific metric, eg. iv.
2. Traverse each sorted column. If there exists other columns with whom the
absolute values of correlation are larger than threshold, they will be filtered.
Parameters
----------
sort_metric: str, default: iv
Specify which metric to be used to sort features.
threshold: float or int, default: 0.1
Correlation threshold
select_federated: bool, default: True
Whether select federated with other parties or based on local variables
"""
def __init__(self, sort_metric='iv', threshold=0.1, select_federated=True):
super().__init__()
self.sort_metric = sort_metric
self.threshold = threshold
self.select_federated = select_federated
def check(self):
descr = "Correlation Filter param's"
self.sort_metric = self.sort_metric.lower()
support_metrics = ['iv']
if self.sort_metric not in support_metrics:
raise ValueError(f"sort_metric in Correlation Filter should be one of {support_metrics}")
self.check_positive_number(self.threshold, descr)
class PercentageValueParam(BaseParam):
"""
Filter the columns that have a value that exceeds a certain percentage.
Parameters
----------
upper_pct: float, [0.1, 1.], default: 1.0
The upper percentage threshold for filtering, upper_pct should not be less than 0.1.
"""
def __init__(self, upper_pct=1.0):
super().__init__()
self.upper_pct = upper_pct
def check(self):
descr = "Percentage Filter param's"
if self.upper_pct not in [0, 1]:
self.check_decimal_float(self.upper_pct, descr)
if self.upper_pct < consts.PERCENTAGE_VALUE_LIMIT:
raise ValueError(descr + f" {self.upper_pct} not supported,"
f" should not be smaller than {consts.PERCENTAGE_VALUE_LIMIT}")
return True
class ManuallyFilterParam(BaseParam):
"""
Specified columns that need to be filtered. If exist, it will be filtered directly, otherwise, ignore it.
Both Filter_out or left parameters only works for this specific filter. For instances, if you set some columns left
in this filter but those columns are filtered by other filters, those columns will NOT left in final.
Please note that (left_col_indexes & left_col_names) cannot use with
(filter_out_indexes & filter_out_names) simultaneously.
Parameters
----------
filter_out_indexes: list of int, default: None
Specify columns' indexes to be filtered out
Note tha columns specified by `filter_out_indexes` and `filter_out_names` will be combined.
filter_out_names : list of string, default: None
Specify columns' names to be filtered out
Note tha columns specified by `filter_out_indexes` and `filter_out_names` will be combined.
left_col_indexes: list of int, default: None
Specify left_col_index
Note tha columns specified by `left_col_indexes` and `left_col_names` will be combined.
left_col_names: list of string, default: None
Specify left col names
Note tha columns specified by `left_col_indexes` and `left_col_names` will be combined.
"""
def __init__(self, filter_out_indexes=None, filter_out_names=None, left_col_indexes=None,
left_col_names=None):
super().__init__()
self.filter_out_indexes = filter_out_indexes
self.filter_out_names = filter_out_names
self.left_col_indexes = left_col_indexes
self.left_col_names = left_col_names
def check(self):
descr = "Manually Filter param's"
self.check_defined_type(self.filter_out_indexes, descr, ['list', 'NoneType'])
self.check_defined_type(self.filter_out_names, descr, ['list', 'NoneType'])
self.check_defined_type(self.left_col_indexes, descr, ['list', 'NoneType'])
self.check_defined_type(self.left_col_names, descr, ['list', 'NoneType'])
if (self.filter_out_indexes or self.filter_out_names) is not None and \
(self.left_col_names or self.left_col_indexes) is not None:
raise ValueError("(left_col_indexes & left_col_names) cannot use with"
" (filter_out_indexes & filter_out_names) simultaneously")
return True
class FeatureSelectionParam(BaseParam):
"""
Define the feature selection parameters.
Parameters
----------
select_col_indexes: list or int, default: -1
Specify which columns need to calculated. -1 represent for all columns.
Note tha columns specified by `select_col_indexes` and `select_names` will be combined.
select_names : list of string, default: []
Specify which columns need to calculated. Each element in the list represent for a column name in header.
Note tha columns specified by `select_col_indexes` and `select_names` will be combined.
filter_methods: list, ["manually", "iv_filter", "statistic_filter",
"psi_filter", “hetero_sbt_filter", "homo_sbt_filter",
"hetero_fast_sbt_filter", "percentage_value",
"vif_filter", "correlation_filter"],
default: ["manually"]
The following methods will be deprecated in future version:
"unique_value", "iv_value_thres", "iv_percentile",
"coefficient_of_variation_value_thres", "outlier_cols"
Specify the filter methods used in feature selection. The orders of filter used is depended on this list.
Please be notified that, if a percentile method is used after some certain filter method,
the percentile represent for the ratio of rest features.
e.g. If you have 10 features at the beginning. After first filter method, you have 8 rest. Then, you want
top 80% highest iv feature. Here, we will choose floor(0.8 * 8) = 6 features instead of 8.
unique_param: filter the columns if all values in this feature is the same
iv_value_param: Use information value to filter columns. If this method is set, a float threshold need to be provided.
Filter those columns whose iv is smaller than threshold. Will be deprecated in the future.
iv_percentile_param: Use information value to filter columns. If this method is set, a float ratio threshold
need to be provided. Pick floor(ratio * feature_num) features with higher iv. If multiple features around
the threshold are same, all those columns will be keep. Will be deprecated in the future.
variance_coe_param: Use coefficient of variation to judge whether filtered or not.
Will be deprecated in the future.
outlier_param: Filter columns whose certain percentile value is larger than a threshold.
Will be deprecated in the future.
percentage_value_param: Filter the columns that have a value that exceeds a certain percentage.
iv_param: Setting how to filter base on iv. It support take high mode only. All of "threshold",
"top_k" and "top_percentile" are accepted. Check more details in CommonFilterParam. To
use this filter, hetero-feature-binning module has to be provided.
statistic_param: Setting how to filter base on statistic values. All of "threshold",
"top_k" and "top_percentile" are accepted. Check more details in CommonFilterParam.
To use this filter, data_statistic module has to be provided.
psi_param: Setting how to filter base on psi values. All of "threshold",
"top_k" and "top_percentile" are accepted. Its take_high properties should be False
to choose lower psi features. Check more details in CommonFilterParam.
To use this filter, data_statistic module has to be provided.
use_anonymous: bool, default False
whether to interpret 'select_names' as anonymous names.
need_run: bool, default True
Indicate if this module needed to be run
"""
def __init__(self, select_col_indexes=-1, select_names=None, filter_methods=None,
unique_param=UniqueValueParam(),
iv_value_param=IVValueSelectionParam(),
iv_percentile_param=IVPercentileSelectionParam(),
iv_top_k_param=IVTopKParam(),
variance_coe_param=VarianceOfCoeSelectionParam(),
outlier_param=OutlierColsSelectionParam(),
manually_param=ManuallyFilterParam(),
percentage_value_param=PercentageValueParam(),
iv_param=CommonFilterParam(metrics=consts.IV),
statistic_param=CommonFilterParam(metrics=consts.MEAN),
psi_param=CommonFilterParam(metrics=consts.PSI,
take_high=False),
vif_param=CommonFilterParam(metrics=consts.VIF,
threshold=5.0,
take_high=False),
sbt_param=CommonFilterParam(metrics=consts.FEATURE_IMPORTANCE),
correlation_param=CorrelationFilterParam(),
use_anonymous=False,
need_run=True
):
super(FeatureSelectionParam, self).__init__()
self.correlation_param = correlation_param
self.vif_param = vif_param
self.select_col_indexes = select_col_indexes
if select_names is None:
self.select_names = []
else:
self.select_names = select_names
if filter_methods is None:
self.filter_methods = [consts.MANUALLY_FILTER]
else:
self.filter_methods = filter_methods
# deprecate in the future
self.unique_param = copy.deepcopy(unique_param)
self.iv_value_param = copy.deepcopy(iv_value_param)
self.iv_percentile_param = copy.deepcopy(iv_percentile_param)
self.iv_top_k_param = copy.deepcopy(iv_top_k_param)
self.variance_coe_param = copy.deepcopy(variance_coe_param)
self.outlier_param = copy.deepcopy(outlier_param)
self.percentage_value_param = copy.deepcopy(percentage_value_param)
self.manually_param = copy.deepcopy(manually_param)
self.iv_param = copy.deepcopy(iv_param)
self.statistic_param = copy.deepcopy(statistic_param)
self.psi_param = copy.deepcopy(psi_param)
self.sbt_param = copy.deepcopy(sbt_param)
self.need_run = need_run
self.use_anonymous = use_anonymous
def check(self):
descr = "hetero feature selection param's"
self.check_defined_type(self.filter_methods, descr, ['list'])
for idx, method in enumerate(self.filter_methods):
method = method.lower()
self.check_valid_value(method, descr, [consts.UNIQUE_VALUE, consts.IV_VALUE_THRES, consts.IV_PERCENTILE,
consts.COEFFICIENT_OF_VARIATION_VALUE_THRES, consts.OUTLIER_COLS,
consts.MANUALLY_FILTER, consts.PERCENTAGE_VALUE,
consts.IV_FILTER, consts.STATISTIC_FILTER, consts.IV_TOP_K,
consts.PSI_FILTER, consts.HETERO_SBT_FILTER,
consts.HOMO_SBT_FILTER, consts.HETERO_FAST_SBT_FILTER,
consts.VIF_FILTER, consts.CORRELATION_FILTER])
self.filter_methods[idx] = method
self.check_defined_type(self.select_col_indexes, descr, ['list', 'int'])
self.unique_param.check()
self.iv_value_param.check()
self.iv_percentile_param.check()
self.iv_top_k_param.check()
self.variance_coe_param.check()
self.outlier_param.check()
self.manually_param.check()
self.percentage_value_param.check()
self.iv_param.check()
for th in self.iv_param.take_high:
if not th:
raise ValueError("Iv filter should take higher iv features")
for m in self.iv_param.metrics:
if m != consts.IV:
raise ValueError("For iv filter, metrics should be 'iv'")
self.statistic_param.check()
self.psi_param.check()
for th in self.psi_param.take_high:
if th:
raise ValueError("PSI filter should take lower psi features")
for m in self.psi_param.metrics:
if m != consts.PSI:
raise ValueError("For psi filter, metrics should be 'psi'")
self.sbt_param.check()
for th in self.sbt_param.take_high:
if not th:
raise ValueError("SBT filter should take higher feature_importance features")
for m in self.sbt_param.metrics:
if m != consts.FEATURE_IMPORTANCE:
raise ValueError("For SBT filter, metrics should be 'feature_importance'")
self.vif_param.check()
for m in self.vif_param.metrics:
if m != consts.VIF:
raise ValueError("For VIF filter, metrics should be 'vif'")
self.correlation_param.check()
| 22,712 | 39.631485 | 122 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/boosting_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pipeline.param.base_param import BaseParam
from pipeline.param.encrypt_param import EncryptParam
from pipeline.param.encrypted_mode_calculation_param import EncryptedModeCalculatorParam
from pipeline.param.cross_validation_param import CrossValidationParam
from pipeline.param.predict_param import PredictParam
from pipeline.param import consts
from pipeline.param.callback_param import CallbackParam
import copy
import collections
class ObjectiveParam(BaseParam):
"""
Define objective parameters that used in federated ml.
Parameters
----------
objective : {None, 'cross_entropy', 'lse', 'lae', 'log_cosh', 'tweedie', 'fair', 'huber'}
None in host's config, should be str in guest'config.
when task_type is classification, only support 'cross_entropy',
other 6 types support in regression task
params : None or list
should be non empty list when objective is 'tweedie','fair','huber',
first element of list shoulf be a float-number large than 0.0 when objective is 'fair', 'huber',
first element of list should be a float-number in [1.0, 2.0) when objective is 'tweedie'
"""
def __init__(self, objective='cross_entropy', params=None):
self.objective = objective
self.params = params
def check(self, task_type=None):
if self.objective is None:
return True
descr = "objective param's"
if task_type not in [consts.CLASSIFICATION, consts.REGRESSION]:
self.objective = self.check_and_change_lower(self.objective,
["cross_entropy", "lse", "lae", "huber", "fair",
"log_cosh", "tweedie"],
descr)
if task_type == consts.CLASSIFICATION:
if self.objective != "cross_entropy":
raise ValueError("objective param's objective {} not supported".format(self.objective))
elif task_type == consts.REGRESSION:
self.objective = self.check_and_change_lower(self.objective,
["lse", "lae", "huber", "fair", "log_cosh", "tweedie"],
descr)
params = self.params
if self.objective in ["huber", "fair", "tweedie"]:
if type(params).__name__ != 'list' or len(params) < 1:
raise ValueError(
"objective param's params {} not supported, should be non-empty list".format(params))
if type(params[0]).__name__ not in ["float", "int", "long"]:
raise ValueError("objective param's params[0] {} not supported".format(self.params[0]))
if self.objective == 'tweedie':
if params[0] < 1 or params[0] >= 2:
raise ValueError("in tweedie regression, objective params[0] should betweend [1, 2)")
if self.objective == 'fair' or 'huber':
if params[0] <= 0.0:
raise ValueError("in {} regression, objective params[0] should greater than 0.0".format(
self.objective))
return True
class DecisionTreeParam(BaseParam):
"""
Define decision tree parameters that used in federated ml.
Parameters
----------
criterion_method : {"xgboost"}, default: "xgboost"
the criterion function to use
criterion_params: list or dict
should be non empty and elements are float-numbers,
if a list is offered, the first one is l2 regularization value, and the second one is
l1 regularization value.
if a dict is offered, make sure it contains key 'l1', and 'l2'.
l1, l2 regularization values are non-negative floats.
default: [0.1, 0] or {'l1':0, 'l2':0,1}
max_depth: positive integer
the max depth of a decision tree, default: 3
min_sample_split: int
least quantity of nodes to split, default: 2
min_impurity_split: float
least gain of a single split need to reach, default: 1e-3
min_child_weight: float
sum of hessian needed in child nodes. default is 0
min_leaf_node: int
when samples no more than min_leaf_node, it becomes a leave, default: 1
max_split_nodes: positive integer
we will use no more than max_split_nodes to
parallel finding their splits in a batch, for memory consideration. default is 65536
feature_importance_type: {'split', 'gain'}
if is 'split', feature_importances calculate by feature split times,
if is 'gain', feature_importances calculate by feature split gain.
default: 'split'
Due to the safety concern, we adjust training strategy of Hetero-SBT in FATE-1.8,
When running Hetero-SBT, this parameter is now abandoned.
In Hetero-SBT of FATE-1.8, guest side will compute split, gain of local features,
and receive anonymous feature importance results from hosts. Hosts will compute split
importance of local features.
use_missing: bool, accepted True, False only, use missing value in training process or not. default: False
zero_as_missing: bool
regard 0 as missing value or not,
will be use only if use_missing=True, default: False
deterministic: bool
ensure stability when computing histogram. Set this to true to ensure stable result when using
same data and same parameter. But it may slow down computation.
"""
def __init__(self, criterion_method="xgboost", criterion_params=[0.1, 0], max_depth=3,
min_sample_split=2, min_impurity_split=1e-3, min_leaf_node=1,
max_split_nodes=consts.MAX_SPLIT_NODES, feature_importance_type='split',
n_iter_no_change=True, tol=0.001, min_child_weight=0,
use_missing=False, zero_as_missing=False, deterministic=False):
super(DecisionTreeParam, self).__init__()
self.criterion_method = criterion_method
self.criterion_params = criterion_params
self.max_depth = max_depth
self.min_sample_split = min_sample_split
self.min_impurity_split = min_impurity_split
self.min_leaf_node = min_leaf_node
self.min_child_weight = min_child_weight
self.max_split_nodes = max_split_nodes
self.feature_importance_type = feature_importance_type
self.n_iter_no_change = n_iter_no_change
self.tol = tol
self.use_missing = use_missing
self.zero_as_missing = zero_as_missing
self.deterministic = deterministic
def check(self):
descr = "decision tree param"
self.criterion_method = self.check_and_change_lower(self.criterion_method,
["xgboost"],
descr)
if len(self.criterion_params) == 0:
raise ValueError("decisition tree param's criterio_params should be non empty")
if isinstance(self.criterion_params, list):
assert len(self.criterion_params) == 2, 'length of criterion_param should be 2: l1, l2 regularization ' \
'values are needed'
self.check_nonnegative_number(self.criterion_params[0], 'l2 reg value')
self.check_nonnegative_number(self.criterion_params[1], 'l1 reg value')
elif isinstance(self.criterion_params, dict):
assert 'l1' in self.criterion_params and 'l2' in self.criterion_params, 'l1 and l2 keys are needed in ' \
'criterion_params dict'
self.criterion_params = [self.criterion_params['l2'], self.criterion_params['l1']]
else:
raise ValueError('criterion_params should be a dict or a list contains l1, l2 reg value')
if type(self.max_depth).__name__ not in ["int", "long"]:
raise ValueError("decision tree param's max_depth {} not supported, should be integer".format(
self.max_depth))
if self.max_depth < 1:
raise ValueError("decision tree param's max_depth should be positive integer, no less than 1")
if type(self.min_sample_split).__name__ not in ["int", "long"]:
raise ValueError("decision tree param's min_sample_split {} not supported, should be integer".format(
self.min_sample_split))
if type(self.min_impurity_split).__name__ not in ["int", "long", "float"]:
raise ValueError("decision tree param's min_impurity_split {} not supported, should be numeric".format(
self.min_impurity_split))
if type(self.min_leaf_node).__name__ not in ["int", "long"]:
raise ValueError("decision tree param's min_leaf_node {} not supported, should be integer".format(
self.min_leaf_node))
if type(self.max_split_nodes).__name__ not in ["int", "long"] or self.max_split_nodes < 1:
raise ValueError("decision tree param's max_split_nodes {} not supported, " +
"should be positive integer between 1 and {}".format(self.max_split_nodes,
consts.MAX_SPLIT_NODES))
if type(self.n_iter_no_change).__name__ != "bool":
raise ValueError("decision tree param's n_iter_no_change {} not supported, should be bool type".format(
self.n_iter_no_change))
if type(self.tol).__name__ not in ["float", "int", "long"]:
raise ValueError("decision tree param's tol {} not supported, should be numeric".format(self.tol))
self.feature_importance_type = self.check_and_change_lower(self.feature_importance_type,
["split", "gain"],
descr)
self.check_nonnegative_number(self.min_child_weight, 'min_child_weight')
self.check_boolean(self.deterministic, 'deterministic')
return True
class BoostingParam(BaseParam):
"""
Basic parameter for Boosting Algorithms
Parameters
----------
task_type : {'classification', 'regression'}, default: 'classification'
task type
objective_param : ObjectiveParam Object, default: ObjectiveParam()
objective param
learning_rate : float, int or long
the learning rate of secure boost. default: 0.3
num_trees : int or float
the max number of boosting round. default: 5
subsample_feature_rate : float
a float-number in [0, 1], default: 1.0
n_iter_no_change : bool,
when True and residual error less than tol, tree building process will stop. default: True
bin_num: positive integer greater than 1
bin number use in quantile. default: 32
validation_freqs: None or positive integer or container object in python
Do validation in training process or Not.
if equals None, will not do validation in train process;
if equals positive integer, will validate data every validation_freqs epochs passes;
if container object in python, will validate data if epochs belong to this container.
e.g. validation_freqs = [10, 15], will validate data when epoch equals to 10 and 15.
Default: None
"""
def __init__(self, task_type=consts.CLASSIFICATION,
objective_param=ObjectiveParam(),
learning_rate=0.3, num_trees=5, subsample_feature_rate=1, n_iter_no_change=True,
tol=0.0001, bin_num=32,
predict_param=PredictParam(), cv_param=CrossValidationParam(),
validation_freqs=None, metrics=None, random_seed=100,
binning_error=consts.DEFAULT_RELATIVE_ERROR):
super(BoostingParam, self).__init__()
self.task_type = task_type
self.objective_param = copy.deepcopy(objective_param)
self.learning_rate = learning_rate
self.num_trees = num_trees
self.subsample_feature_rate = subsample_feature_rate
self.n_iter_no_change = n_iter_no_change
self.tol = tol
self.bin_num = bin_num
self.predict_param = copy.deepcopy(predict_param)
self.cv_param = copy.deepcopy(cv_param)
self.validation_freqs = validation_freqs
self.metrics = metrics
self.random_seed = random_seed
self.binning_error = binning_error
def check(self):
descr = "boosting tree param's"
if self.task_type not in [consts.CLASSIFICATION, consts.REGRESSION]:
raise ValueError("boosting_core tree param's task_type {} not supported, should be {} or {}".format(
self.task_type, consts.CLASSIFICATION, consts.REGRESSION))
self.objective_param.check(self.task_type)
if type(self.learning_rate).__name__ not in ["float", "int", "long"]:
raise ValueError("boosting_core tree param's learning_rate {} not supported, should be numeric".format(
self.learning_rate))
if type(self.subsample_feature_rate).__name__ not in ["float", "int", "long"] or \
self.subsample_feature_rate < 0 or self.subsample_feature_rate > 1:
raise ValueError(
"boosting_core tree param's subsample_feature_rate should be a numeric number between 0 and 1")
if type(self.n_iter_no_change).__name__ != "bool":
raise ValueError("boosting_core tree param's n_iter_no_change {} not supported, should be bool type".format(
self.n_iter_no_change))
if type(self.tol).__name__ not in ["float", "int", "long"]:
raise ValueError("boosting_core tree param's tol {} not supported, should be numeric".format(self.tol))
if type(self.bin_num).__name__ not in ["int", "long"] or self.bin_num < 2:
raise ValueError(
"boosting_core tree param's bin_num {} not supported, should be positive integer greater than 1".format(
self.bin_num))
if self.validation_freqs is None:
pass
elif isinstance(self.validation_freqs, int):
if self.validation_freqs < 1:
raise ValueError("validation_freqs should be larger than 0 when it's integer")
elif not isinstance(self.validation_freqs, collections.Container):
raise ValueError("validation_freqs should be None or positive integer or container")
if self.metrics is not None and not isinstance(self.metrics, list):
raise ValueError("metrics should be a list")
if self.random_seed is not None:
assert isinstance(self.random_seed, int) and self.random_seed >= 0, 'random seed must be an integer >= 0'
self.check_decimal_float(self.binning_error, descr)
return True
class HeteroBoostingParam(BoostingParam):
"""
Parameters
----------
encrypt_param : EncodeParam Object
encrypt method use in secure boost, default: EncryptParam()
encrypted_mode_calculator_param: EncryptedModeCalculatorParam object
the calculation mode use in secureboost,
default: EncryptedModeCalculatorParam()
"""
def __init__(self, task_type=consts.CLASSIFICATION,
objective_param=ObjectiveParam(),
learning_rate=0.3, num_trees=5, subsample_feature_rate=1, n_iter_no_change=True,
tol=0.0001, encrypt_param=EncryptParam(),
bin_num=32,
encrypted_mode_calculator_param=EncryptedModeCalculatorParam(),
predict_param=PredictParam(), cv_param=CrossValidationParam(),
validation_freqs=None, early_stopping_rounds=None, metrics=None, use_first_metric_only=False,
random_seed=100, binning_error=consts.DEFAULT_RELATIVE_ERROR):
super(HeteroBoostingParam, self).__init__(task_type, objective_param, learning_rate, num_trees,
subsample_feature_rate, n_iter_no_change, tol, bin_num,
predict_param, cv_param, validation_freqs, metrics=metrics,
random_seed=random_seed,
binning_error=binning_error)
self.encrypt_param = copy.deepcopy(encrypt_param)
self.encrypted_mode_calculator_param = copy.deepcopy(encrypted_mode_calculator_param)
self.early_stopping_rounds = early_stopping_rounds
self.use_first_metric_only = use_first_metric_only
def check(self):
super(HeteroBoostingParam, self).check()
self.encrypted_mode_calculator_param.check()
self.encrypt_param.check()
if self.early_stopping_rounds is None:
pass
elif isinstance(self.early_stopping_rounds, int):
if self.early_stopping_rounds < 1:
raise ValueError("early stopping rounds should be larger than 0 when it's integer")
if self.validation_freqs is None:
raise ValueError("validation freqs must be set when early stopping is enabled")
if not isinstance(self.use_first_metric_only, bool):
raise ValueError("use_first_metric_only should be a boolean")
return True
class HeteroSecureBoostParam(HeteroBoostingParam):
"""
Define boosting tree parameters that used in federated ml.
Parameters
----------
task_type : {'classification', 'regression'}, default: 'classification'
task type
tree_param : DecisionTreeParam Object, default: DecisionTreeParam()
tree param
objective_param : ObjectiveParam Object, default: ObjectiveParam()
objective param
learning_rate : float, int or long
the learning rate of secure boost. default: 0.3
num_trees : int or float
the max number of trees to build. default: 5
subsample_feature_rate : float
a float-number in [0, 1], default: 1.0
random_seed: int
seed that controls all random functions
n_iter_no_change : bool,
when True and residual error less than tol, tree building process will stop. default: True
encrypt_param : EncodeParam Object
encrypt method use in secure boost, default: EncryptParam(), this parameter
is only for hetero-secureboost
bin_num: positive integer greater than 1
bin number use in quantile. default: 32
encrypted_mode_calculator_param: EncryptedModeCalculatorParam object
the calculation mode use in secureboost, default: EncryptedModeCalculatorParam(), only for hetero-secureboost
use_missing: bool
use missing value in training process or not. default: False
zero_as_missing: bool
regard 0 as missing value or not, will be use only if use_missing=True, default: False
validation_freqs: None or positive integer or container object in python
Do validation in training process or Not.
if equals None, will not do validation in train process;
if equals positive integer, will validate data every validation_freqs epochs passes;
if container object in python, will validate data if epochs belong to this container.
e.g. validation_freqs = [10, 15], will validate data when epoch equals to 10 and 15.
Default: None
The default value is None, 1 is suggested. You can set it to a number larger than 1 in order to
speed up training by skipping validation rounds. When it is larger than 1, a number which is
divisible by "num_trees" is recommended, otherwise, you will miss the validation scores
of last training iteration.
early_stopping_rounds: integer larger than 0
will stop training if one metric of one validation data
doesn’t improve in last early_stopping_round rounds,
need to set validation freqs and will check early_stopping every at every validation epoch,
metrics: list, default: []
Specify which metrics to be used when performing evaluation during training process.
If set as empty, default metrics will be used. For regression tasks, default metrics are
['root_mean_squared_error', 'mean_absolute_error'], For binary-classificatiin tasks, default metrics
are ['auc', 'ks']. For multi-classification tasks, default metrics are ['accuracy', 'precision', 'recall']
use_first_metric_only: bool
use only the first metric for early stopping
complete_secure: int, defualt: 0
if use complete_secure, when use complete secure, build first 'complete secure' tree using only guest features
sparse_optimization:
this parameter is abandoned in FATE-1.7.1
run_goss: bool
activate Gradient-based One-Side Sampling, which selects large gradient and small
gradient samples using top_rate and other_rate.
top_rate: float, the retain ratio of large gradient data, used when run_goss is True
other_rate: float, the retain ratio of small gradient data, used when run_goss is True
cipher_compress_error: This param is now abandoned
cipher_compress: bool, default is True, use cipher compressing to reduce computation cost and transfer cost
boosting_strategy:str
std: standard sbt setting
mix: alternate using guest/host features to build trees. For example, the first 'tree_num_per_party' trees
use guest features,
the second k trees use host features, and so on
layered: only support 2 party, when running layered mode, first 'host_depth' layer will use host features,
and then next 'guest_depth' will only use guest features
work_mode: str
This parameter has the same function as boosting_strategy, but is deprecated
tree_num_per_party: int, every party will alternate build 'tree_num_per_party' trees until reach max tree num, this
param is valid when boosting_strategy is mix
guest_depth: int, guest will build last guest_depth of a decision tree using guest features, is valid when boosting_strategy
is layered
host_depth: int, host will build first host_depth of a decision tree using host features, is valid when work boosting_strategy
layered
multi_mode: str, decide which mode to use when running multi-classification task:
single_output standard gbdt multi-classification strategy
multi_output every leaf give a multi-dimension predict, using multi_mode can save time
by learning a model with less trees.
EINI_inference: bool
default is False, this option changes the inference algorithm used in predict tasks.
a secure prediction method that hides decision path to enhance security in the inference
step. This method is insprired by EINI inference algorithm.
EINI_random_mask: bool
default is False
multiply predict result by a random float number to confuse original predict result. This operation further
enhances the security of naive EINI algorithm.
EINI_complexity_check: bool
default is False
check the complexity of tree models when running EINI algorithms. Complexity models are easy to hide their
decision path, while simple tree models are not, therefore if a tree model is too simple, it is not allowed
to run EINI predict algorithms.
"""
def __init__(self, tree_param: DecisionTreeParam = DecisionTreeParam(), task_type=consts.CLASSIFICATION,
objective_param=ObjectiveParam(),
learning_rate=0.3, num_trees=5, subsample_feature_rate=1.0, n_iter_no_change=True,
tol=0.0001, encrypt_param=EncryptParam(),
bin_num=32,
encrypted_mode_calculator_param=EncryptedModeCalculatorParam(),
predict_param=PredictParam(), cv_param=CrossValidationParam(),
validation_freqs=None, early_stopping_rounds=None, use_missing=False, zero_as_missing=False,
complete_secure=False, metrics=None, use_first_metric_only=False, random_seed=100,
binning_error=consts.DEFAULT_RELATIVE_ERROR,
sparse_optimization=False, run_goss=False, top_rate=0.2, other_rate=0.1,
cipher_compress_error=None, cipher_compress=0, new_ver=True, boosting_strategy=consts.STD_TREE,
work_mode=None, tree_num_per_party=1, guest_depth=2, host_depth=3, callback_param=CallbackParam(),
multi_mode=consts.SINGLE_OUTPUT, EINI_inference=False, EINI_random_mask=False,
EINI_complexity_check=False):
super(HeteroSecureBoostParam, self).__init__(task_type, objective_param, learning_rate, num_trees,
subsample_feature_rate, n_iter_no_change, tol, encrypt_param,
bin_num, encrypted_mode_calculator_param, predict_param, cv_param,
validation_freqs, early_stopping_rounds, metrics=metrics,
use_first_metric_only=use_first_metric_only,
random_seed=random_seed,
binning_error=binning_error)
self.tree_param = copy.deepcopy(tree_param)
self.zero_as_missing = zero_as_missing
self.use_missing = use_missing
self.complete_secure = complete_secure
self.sparse_optimization = sparse_optimization
self.run_goss = run_goss
self.top_rate = top_rate
self.other_rate = other_rate
self.cipher_compress_error = cipher_compress_error
self.cipher_compress = cipher_compress
self.new_ver = new_ver
self.EINI_inference = EINI_inference
self.EINI_random_mask = EINI_random_mask
self.EINI_complexity_check = EINI_complexity_check
self.boosting_strategy = boosting_strategy
self.work_mode = work_mode
self.tree_num_per_party = tree_num_per_party
self.guest_depth = guest_depth
self.host_depth = host_depth
self.callback_param = copy.deepcopy(callback_param)
self.multi_mode = multi_mode
def check(self):
super(HeteroSecureBoostParam, self).check()
self.tree_param.check()
if not isinstance(self.use_missing, bool):
raise ValueError('use missing should be bool type')
if not isinstance(self.zero_as_missing, bool):
raise ValueError('zero as missing should be bool type')
self.check_boolean(self.run_goss, 'run goss')
self.check_decimal_float(self.top_rate, 'top rate')
self.check_decimal_float(self.other_rate, 'other rate')
self.check_positive_number(self.other_rate, 'other_rate')
self.check_positive_number(self.top_rate, 'top_rate')
self.check_boolean(self.new_ver, 'code version switcher')
self.check_boolean(self.cipher_compress, 'cipher compress')
self.check_boolean(self.EINI_inference, 'eini inference')
self.check_boolean(self.EINI_random_mask, 'eini random mask')
self.check_boolean(self.EINI_complexity_check, 'eini complexity check')
assert isinstance(self.complete_secure,
int) and self.complete_secure >= 0, "complete secure should be an int >= 0"
if self.work_mode is not None:
self.boosting_strategy = self.work_mode
if self.multi_mode not in [consts.SINGLE_OUTPUT, consts.MULTI_OUTPUT]:
raise ValueError('unsupported multi-classification mode')
if self.multi_mode == consts.MULTI_OUTPUT:
if self.boosting_strategy != consts.STD_TREE:
raise ValueError('MO trees only works when boosting strategy is std tree')
if not self.cipher_compress:
raise ValueError('Mo trees only works when cipher compress is enabled')
if self.boosting_strategy not in [consts.STD_TREE, consts.LAYERED_TREE, consts.MIX_TREE]:
raise ValueError('unknown sbt boosting strategy{}'.format(self.boosting_strategy))
for p in ["early_stopping_rounds", "validation_freqs", "metrics",
"use_first_metric_only"]:
# if self._warn_to_deprecate_param(p, "", ""):
if self._deprecated_params_set.get(p):
if "callback_param" in self.get_user_feeded():
raise ValueError(f"{p} and callback param should not be set simultaneously,"
f"{self._deprecated_params_set}, {self.get_user_feeded()}")
else:
self.callback_param.callbacks = ["PerformanceEvaluate"]
break
descr = "boosting_param's"
if self._warn_to_deprecate_param("validation_freqs", descr, "callback_param's 'validation_freqs'"):
self.callback_param.validation_freqs = self.validation_freqs
if self._warn_to_deprecate_param("early_stopping_rounds", descr, "callback_param's 'early_stopping_rounds'"):
self.callback_param.early_stopping_rounds = self.early_stopping_rounds
if self._warn_to_deprecate_param("metrics", descr, "callback_param's 'metrics'"):
self.callback_param.metrics = self.metrics
if self._warn_to_deprecate_param("use_first_metric_only", descr, "callback_param's 'use_first_metric_only'"):
self.callback_param.use_first_metric_only = self.use_first_metric_only
if self.top_rate + self.other_rate >= 1:
raise ValueError('sum of top rate and other rate should be smaller than 1')
return True
class HomoSecureBoostParam(BoostingParam):
"""
Parameters
----------
backend: {'distributed', 'memory'}
decides which backend to use when computing histograms for homo-sbt
"""
def __init__(self, tree_param: DecisionTreeParam = DecisionTreeParam(), task_type=consts.CLASSIFICATION,
objective_param=ObjectiveParam(),
learning_rate=0.3, num_trees=5, subsample_feature_rate=1, n_iter_no_change=True,
tol=0.0001, bin_num=32, predict_param=PredictParam(), cv_param=CrossValidationParam(),
validation_freqs=None, use_missing=False, zero_as_missing=False, random_seed=100,
binning_error=consts.DEFAULT_RELATIVE_ERROR, backend=consts.DISTRIBUTED_BACKEND,
callback_param=CallbackParam(), multi_mode=consts.SINGLE_OUTPUT):
super(HomoSecureBoostParam, self).__init__(task_type=task_type,
objective_param=objective_param,
learning_rate=learning_rate,
num_trees=num_trees,
subsample_feature_rate=subsample_feature_rate,
n_iter_no_change=n_iter_no_change,
tol=tol,
bin_num=bin_num,
predict_param=predict_param,
cv_param=cv_param,
validation_freqs=validation_freqs,
random_seed=random_seed,
binning_error=binning_error
)
self.use_missing = use_missing
self.zero_as_missing = zero_as_missing
self.tree_param = copy.deepcopy(tree_param)
self.backend = backend
self.callback_param = copy.deepcopy(callback_param)
self.multi_mode = multi_mode
def check(self):
super(HomoSecureBoostParam, self).check()
self.tree_param.check()
if not isinstance(self.use_missing, bool):
raise ValueError('use missing should be bool type')
if not isinstance(self.zero_as_missing, bool):
raise ValueError('zero as missing should be bool type')
if self.backend not in [consts.MEMORY_BACKEND, consts.DISTRIBUTED_BACKEND]:
raise ValueError('unsupported backend')
if self.multi_mode not in [consts.SINGLE_OUTPUT, consts.MULTI_OUTPUT]:
raise ValueError('unsupported multi-classification mode')
for p in ["validation_freqs", "metrics"]:
# if self._warn_to_deprecate_param(p, "", ""):
if self._deprecated_params_set.get(p):
if "callback_param" in self.get_user_feeded():
raise ValueError(f"{p} and callback param should not be set simultaneously,"
f"{self._deprecated_params_set}, {self.get_user_feeded()}")
else:
self.callback_param.callbacks = ["PerformanceEvaluate"]
break
descr = "boosting_param's"
if self._warn_to_deprecate_param("validation_freqs", descr, "callback_param's 'validation_freqs'"):
self.callback_param.validation_freqs = self.validation_freqs
if self._warn_to_deprecate_param("metrics", descr, "callback_param's 'metrics'"):
self.callback_param.metrics = self.metrics
if self.multi_mode not in [consts.SINGLE_OUTPUT, consts.MULTI_OUTPUT]:
raise ValueError('unsupported multi-classification mode')
if self.multi_mode == consts.MULTI_OUTPUT:
if self.task_type == consts.REGRESSION:
raise ValueError('regression tasks not support multi-output trees')
return True
| 35,176 | 47.253772 | 134 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/scorecard_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
from pipeline.param import consts
class ScorecardParam(BaseParam):
"""
Define method used for transforming prediction score to credit score
Parameters
----------
method : {"credit"}, default: 'credit'
score method, currently only supports "credit"
offset : int or float, default: 500
score baseline
factor : int or float, default: 20
scoring step, when odds double, result score increases by this factor
factor_base : int or float, default: 2
factor base, value ln(factor_base) is used for calculating result score
upper_limit_ratio : int or float, default: 3
upper bound for odds, credit score upper bound is upper_limit_ratio * offset
lower_limit_value : int or float, default: 0
lower bound for result score
need_run : bool, default: True
Indicate if this module needs to be run.
"""
def __init__(
self,
method="credit",
offset=500,
factor=20,
factor_base=2,
upper_limit_ratio=3,
lower_limit_value=0,
need_run=True):
super(ScorecardParam, self).__init__()
self.method = method
self.offset = offset
self.factor = factor
self.factor_base = factor_base
self.upper_limit_ratio = upper_limit_ratio
self.lower_limit_value = lower_limit_value
self.need_run = need_run
def check(self):
descr = "scorecard param"
if not isinstance(self.method, str):
raise ValueError(f"{descr}method {self.method} not supported, should be str type")
else:
user_input = self.method.lower()
if user_input == "credit":
self.method = consts.CREDIT
else:
raise ValueError(f"{descr} method {user_input} not supported")
if type(self.offset).__name__ not in ["int", "long", "float"]:
raise ValueError(f"{descr} offset must be numeric,"
f"received {type(self.offset)} instead.")
if type(self.factor).__name__ not in ["int", "long", "float"]:
raise ValueError(f"{descr} factor must be numeric,"
f"received {type(self.factor)} instead.")
if type(self.factor_base).__name__ not in ["int", "long", "float"]:
raise ValueError(f"{descr} factor_base must be numeric,"
f"received {type(self.factor_base)} instead.")
if type(self.upper_limit_ratio).__name__ not in ["int", "long", "float"]:
raise ValueError(f"{descr} upper_limit_ratio must be numeric,"
f"received {type(self.upper_limit_ratio)} instead.")
if type(self.lower_limit_value).__name__ not in ["int", "long", "float"]:
raise ValueError(f"{descr} lower_limit_value must be numeric,"
f"received {type(self.lower_limit_value)} instead.")
BaseParam.check_boolean(self.need_run, descr=descr + "need_run ")
return True
| 3,804 | 34.560748 | 94 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/poisson_regression_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import copy
from pipeline.param.glm_param import LinearModelParam
from pipeline.param.callback_param import CallbackParam
from pipeline.param.encrypt_param import EncryptParam
from pipeline.param.encrypted_mode_calculation_param import EncryptedModeCalculatorParam
from pipeline.param.cross_validation_param import CrossValidationParam
from pipeline.param.init_model_param import InitParam
from pipeline.param.stepwise_param import StepwiseParam
from pipeline.param import consts
class PoissonParam(LinearModelParam):
"""
Parameters used for Poisson Regression.
Parameters
----------
penalty : {'L2', 'L1'}, default: 'L2'
Penalty method used in Poisson. Please note that, when using encrypted version in HeteroPoisson,
'L1' is not supported.
tol : float, default: 1e-4
The tolerance of convergence
alpha : float, default: 1.0
Regularization strength coefficient.
optimizer : {'rmsprop', 'sgd', 'adam', 'adagrad'}, default: 'rmsprop'
Optimize method
batch_size : int, default: -1
Batch size when updating model. -1 means use all data in a batch. i.e. Not to use mini-batch strategy.
learning_rate : float, default: 0.01
Learning rate
max_iter : int, default: 20
The maximum iteration for training.
init_param: InitParam object, default: default InitParam object
Init param method object.
early_stop : str, 'weight_diff', 'diff' or 'abs', default: 'diff'
Method used to judge convergence.
a) diff: Use difference of loss between two iterations to judge whether converge.
b) weight_diff: Use difference between weights of two consecutive iterations
c) abs: Use the absolute value of loss to judge whether converge. i.e. if loss < eps, it is converged.
exposure_colname: str or None, default: None
Name of optional exposure variable in dTable.
encrypt_param: EncryptParam object, default: default EncryptParam object
encrypt param
encrypted_mode_calculator_param: EncryptedModeCalculatorParam object, default: default EncryptedModeCalculatorParam object
encrypted mode calculator param
cv_param: CrossValidationParam object, default: default CrossValidationParam object
cv param
stepwise_param: StepwiseParam object, default: default StepwiseParam object
stepwise param
decay: int or float, default: 1
Decay rate for learning rate. learning rate will follow the following decay schedule.
lr = lr0/(1+decay*t) if decay_sqrt is False. If decay_sqrt is True, lr = lr0 / sqrt(1+decay*t)
where t is the iter number.
decay_sqrt: bool, default: True
lr = lr0/(1+decay*t) if decay_sqrt is False, otherwise, lr = lr0 / sqrt(1+decay*t)
validation_freqs: int, list, tuple, set, or None
validation frequency during training, required when using early stopping.
The default value is None, 1 is suggested. You can set it to a number larger than 1 in order to speed up training by skipping validation rounds.
When it is larger than 1, a number which is divisible by "max_iter" is recommended, otherwise, you will miss the validation scores of the last training iteration.
early_stopping_rounds: int, default: None
If positive number specified, at every specified training rounds, program checks for early stopping criteria.
Validation_freqs must also be set when using early stopping.
metrics: list or None, default: None
Specify which metrics to be used when performing evaluation during training process. If metrics have not improved at early_stopping rounds, trianing stops before convergence.
If set as empty, default metrics will be used. For regression tasks, default metrics are ['root_mean_squared_error', 'mean_absolute_error']
use_first_metric_only: bool, default: False
Indicate whether to use the first metric in `metrics` as the only criterion for early stopping judgement.
floating_point_precision: None or integer
if not None, use floating_point_precision-bit to speed up calculation,
e.g.: convert an x to round(x * 2**floating_point_precision) during Paillier operation, divide
the result by 2**floating_point_precision in the end.
callback_param: CallbackParam object
callback param
"""
def __init__(self, penalty='L2',
tol=1e-4, alpha=1.0, optimizer='rmsprop',
batch_size=-1, learning_rate=0.01, init_param=InitParam(),
max_iter=20, early_stop='diff',
exposure_colname=None,
encrypt_param=EncryptParam(),
encrypted_mode_calculator_param=EncryptedModeCalculatorParam(),
cv_param=CrossValidationParam(), stepwise_param=StepwiseParam(),
decay=1, decay_sqrt=True,
validation_freqs=None, early_stopping_rounds=None, metrics=None, use_first_metric_only=False,
floating_point_precision=23, callback_param=CallbackParam()):
super(PoissonParam, self).__init__(penalty=penalty, tol=tol, alpha=alpha, optimizer=optimizer,
batch_size=batch_size, learning_rate=learning_rate,
init_param=init_param, max_iter=max_iter,
early_stop=early_stop, cv_param=cv_param, decay=decay,
decay_sqrt=decay_sqrt, validation_freqs=validation_freqs,
early_stopping_rounds=early_stopping_rounds, metrics=metrics,
floating_point_precision=floating_point_precision,
encrypt_param=encrypt_param,
use_first_metric_only=use_first_metric_only,
stepwise_param=stepwise_param,
callback_param=callback_param)
self.encrypted_mode_calculator_param = copy.deepcopy(encrypted_mode_calculator_param)
self.exposure_colname = exposure_colname
def check(self):
descr = "poisson_regression_param's "
super(PoissonParam, self).check()
if self.encrypt_param.method != consts.PAILLIER:
raise ValueError(
descr + "encrypt method supports 'Paillier' only")
if self.optimizer not in ['sgd', 'rmsprop', 'adam', 'adagrad']:
raise ValueError(
descr + "optimizer not supported, optimizer should be"
" 'sgd', 'rmsprop', 'adam', or 'adagrad'")
if self.exposure_colname is not None:
if type(self.exposure_colname).__name__ != "str":
raise ValueError(
descr + "exposure_colname {} not supported, should be string type".format(self.exposure_colname))
self.encrypted_mode_calculator_param.check()
return True
| 7,751 | 47.754717 | 182 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/intersect_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import copy
from pipeline.param.base_param import BaseParam
from pipeline.param import consts
DEFAULT_RANDOM_BIT = 128
class EncodeParam(BaseParam):
"""
Define the hash method for raw intersect method
Parameters
----------
salt: str
the src data string will be str = str + salt, default by empty string
encode_method: {"none", "md5", "sha1", "sha224", "sha256", "sha384", "sha512", "sm3"}
the hash method of src data string, support md5, sha1, sha224, sha256, sha384, sha512, sm3, default by None
base64: bool
if True, the result of hash will be changed to base64, default by False
"""
def __init__(self, salt='', encode_method='none', base64=False):
super().__init__()
self.salt = salt
self.encode_method = encode_method
self.base64 = base64
def check(self):
if type(self.salt).__name__ != "str":
raise ValueError(
"encode param's salt {} not supported, should be str type".format(
self.salt))
descr = "encode param's "
self.encode_method = self.check_and_change_lower(self.encode_method,
["none", consts.MD5, consts.SHA1, consts.SHA224,
consts.SHA256, consts.SHA384, consts.SHA512,
consts.SM3],
descr)
if type(self.base64).__name__ != "bool":
raise ValueError(
"hash param's base64 {} not supported, should be bool type".format(self.base64))
return True
class RAWParam(BaseParam):
"""
Specify parameters for raw intersect method
Parameters
----------
use_hash: bool
whether to hash ids for raw intersect
salt: str
the src data string will be str = str + salt, default by empty string
hash_method: str
the hash method of src data string, support md5, sha1, sha224, sha256, sha384, sha512, sm3, default by None
base64: bool
if True, the result of hash will be changed to base64, default by False
join_role: {"guest", "host"}
role who joins ids, supports "guest" and "host" only and effective only for raw.
If it is "guest", the host will send its ids to guest and find the intersection of
ids in guest; if it is "host", the guest will send its ids to host. Default by "guest";
"""
def __init__(self, use_hash=False, salt='', hash_method='none', base64=False, join_role=consts.GUEST):
super().__init__()
self.use_hash = use_hash
self.salt = salt
self.hash_method = hash_method
self.base64 = base64
self.join_role = join_role
def check(self):
descr = "raw param's "
self.check_boolean(self.use_hash, f"{descr}use_hash")
self.check_string(self.salt, f"{descr}salt")
self.hash_method = self.check_and_change_lower(self.hash_method,
["none", consts.MD5, consts.SHA1, consts.SHA224,
consts.SHA256, consts.SHA384, consts.SHA512,
consts.SM3],
f"{descr}hash_method")
self.check_boolean(self.base64, f"{descr}base_64")
self.join_role = self.check_and_change_lower(self.join_role, [consts.GUEST, consts.HOST], f"{descr}join_role")
return True
class RSAParam(BaseParam):
"""
Specify parameters for RSA intersect method
Parameters
----------
salt: str
the src data string will be str = str + salt, default ''
hash_method: str
the hash method of src data string, support sha256, sha384, sha512, sm3, default sha256
final_hash_method: str
the hash method of result data string, support md5, sha1, sha224, sha256, sha384, sha512, sm3, default sha256
split_calculation: bool
if True, Host & Guest split operations for faster performance, recommended on large data set
random_base_fraction: positive float
if not None, generate (fraction * public key id count) of r for encryption and reuse generated r;
note that value greater than 0.99 will be taken as 1, and value less than 0.01 will be rounded up to 0.01
key_length: int
value >= 1024, bit count of rsa key, default 1024
random_bit: positive int
it will define the size of blinding factor in rsa algorithm, default 128
"""
def __init__(self, salt='', hash_method='sha256', final_hash_method='sha256',
split_calculation=False, random_base_fraction=None, key_length=consts.DEFAULT_KEY_LENGTH,
random_bit=DEFAULT_RANDOM_BIT):
super().__init__()
self.salt = salt
self.hash_method = hash_method
self.final_hash_method = final_hash_method
self.split_calculation = split_calculation
self.random_base_fraction = random_base_fraction
self.key_length = key_length
self.random_bit = random_bit
def check(self):
descr = "rsa param's "
self.check_string(self.salt, f"{descr}salt")
self.hash_method = self.check_and_change_lower(self.hash_method,
[consts.SHA256, consts.SHA384, consts.SHA512, consts.SM3],
f"{descr}hash_method")
self.final_hash_method = self.check_and_change_lower(self.final_hash_method,
[consts.MD5, consts.SHA1, consts.SHA224,
consts.SHA256, consts.SHA384, consts.SHA512,
consts.SM3],
f"{descr}final_hash_method")
self.check_boolean(self.split_calculation, f"{descr}split_calculation")
if self.random_base_fraction:
self.check_positive_number(self.random_base_fraction, descr)
self.check_decimal_float(self.random_base_fraction, f"{descr}random_base_fraction")
self.check_positive_integer(self.key_length, f"{descr}key_length")
if self.key_length < 1024:
raise ValueError(f"key length must be >= 1024")
self.check_positive_integer(self.random_bit, f"{descr}random_bit")
return True
class DHParam(BaseParam):
"""
Define the hash method for DH intersect method
Parameters
----------
salt: str
the src data string will be str = str + salt, default ''
hash_method: str
the hash method of src data string, support none, md5, sha1, sha 224, sha256, sha384, sha512, sm3, default sha256
key_length: int, value >= 1024
the key length of the commutative cipher p, default 1024
"""
def __init__(self, salt='', hash_method='sha256', key_length=consts.DEFAULT_KEY_LENGTH):
super().__init__()
self.salt = salt
self.hash_method = hash_method
self.key_length = key_length
def check(self):
descr = "dh param's "
self.check_string(self.salt, f"{descr}salt")
self.hash_method = self.check_and_change_lower(self.hash_method,
["none", consts.MD5, consts.SHA1, consts.SHA224,
consts.SHA256, consts.SHA384, consts.SHA512,
consts.SM3],
f"{descr}hash_method")
self.check_positive_integer(self.key_length, f"{descr}key_length")
if self.key_length < 1024:
raise ValueError(f"key length must be >= 1024")
return True
class ECDHParam(BaseParam):
"""
Define the hash method for ECDH intersect method
Parameters
----------
salt: str
the src id will be str = str + salt, default ''
hash_method: str
the hash method of src id, support sha256, sha384, sha512, sm3, default sha256
curve: str
the name of curve, currently only support 'curve25519', which offers 128 bits of security
"""
def __init__(self, salt='', hash_method='sha256', curve=consts.CURVE25519):
super().__init__()
self.salt = salt
self.hash_method = hash_method
self.curve = curve
def check(self):
descr = "ecdh param's "
self.check_string(self.salt, f"{descr}salt")
self.hash_method = self.check_and_change_lower(self.hash_method,
[consts.SHA256, consts.SHA384, consts.SHA512,
consts.SM3],
f"{descr}hash_method")
self.curve = self.check_and_change_lower(self.curve, [consts.CURVE25519], f"{descr}curve")
return True
class IntersectCache(BaseParam):
def __init__(self, use_cache=False, id_type=consts.PHONE, encrypt_type=consts.SHA256):
"""
Parameters
----------
use_cache: whether to use cached ids; with ver1.7 and above, this param is ignored
id_type: with ver1.7 and above, this param is ignored
encrypt_type: with ver1.7 and above, this param is ignored
"""
super().__init__()
self.use_cache = use_cache
self.id_type = id_type
self.encrypt_type = encrypt_type
def check(self):
descr = "intersect_cache param's "
# self.check_boolean(self.use_cache, f"{descr}use_cache")
self.check_and_change_lower(self.id_type,
[consts.PHONE, consts.IMEI],
f"{descr}id_type")
self.check_and_change_lower(self.encrypt_type,
[consts.MD5, consts.SHA256],
f"{descr}encrypt_type")
class IntersectPreProcessParam(BaseParam):
"""
Specify parameters for pre-processing and cardinality-only mode
Parameters
----------
false_positive_rate: float
initial target false positive rate when creating Bloom Filter,
must be <= 0.5, default 1e-3
encrypt_method: str
encrypt method for encrypting id when performing cardinality_only task,
supports rsa only, default rsa;
specify rsa parameter setting with RSAParam
hash_method: str
the hash method for inserting ids, support md5, sha1, sha 224, sha256, sha384, sha512, sm3,
default sha256
preprocess_method: str
the hash method for encoding ids before insertion into filter, default sha256,
only effective for preprocessing
preprocess_salt: str
salt to be appended to hash result by preprocess_method before insertion into filter,
default '', only effective for preprocessing
random_state: int
seed for random salt generator when constructing hash functions,
salt is appended to hash result by hash_method when performing insertion, default None
filter_owner: str
role that constructs filter, either guest or host, default guest,
only effective for preprocessing
"""
def __init__(self, false_positive_rate=1e-3, encrypt_method=consts.RSA, hash_method='sha256',
preprocess_method='sha256', preprocess_salt='', random_state=None, filter_owner=consts.GUEST):
super().__init__()
self.false_positive_rate = false_positive_rate
self.encrypt_method = encrypt_method
self.hash_method = hash_method
self.preprocess_method = preprocess_method
self.preprocess_salt = preprocess_salt
self.random_state = random_state
self.filter_owner = filter_owner
def check(self):
descr = "intersect preprocess param's false_positive_rate "
self.check_decimal_float(self.false_positive_rate, descr)
self.check_positive_number(self.false_positive_rate, descr)
if self.false_positive_rate > 0.5:
raise ValueError(f"{descr} must be positive float no greater than 0.5")
descr = "intersect preprocess param's encrypt_method "
self.encrypt_method = self.check_and_change_lower(self.encrypt_method, [consts.RSA], descr)
descr = "intersect preprocess param's random_state "
if self.random_state:
self.check_nonnegative_number(self.random_state, descr)
descr = "intersect preprocess param's hash_method "
self.hash_method = self.check_and_change_lower(self.hash_method,
[consts.MD5, consts.SHA1, consts.SHA224,
consts.SHA256, consts.SHA384, consts.SHA512,
consts.SM3],
descr)
descr = "intersect preprocess param's preprocess_salt "
self.check_string(self.preprocess_salt, descr)
descr = "intersect preprocess param's preprocess_method "
self.preprocess_method = self.check_and_change_lower(self.preprocess_method,
[consts.MD5, consts.SHA1, consts.SHA224,
consts.SHA256, consts.SHA384, consts.SHA512,
consts.SM3],
descr)
descr = "intersect preprocess param's filter_owner "
self.filter_owner = self.check_and_change_lower(self.filter_owner,
[consts.GUEST, consts.HOST],
descr)
return True
class IntersectParam(BaseParam):
"""
Define the intersect method
Parameters
----------
intersect_method: str
it supports 'rsa', 'raw', 'dh', default by 'rsa'
random_bit: positive int
it will define the size of blinding factor in rsa algorithm, default 128
note that this param will be deprecated in future, please use random_bit in RSAParam instead
sync_intersect_ids: bool
In rsa, 'sync_intersect_ids' is True means guest or host will send intersect results to the others, and False will not.
while in raw, 'sync_intersect_ids' is True means the role of "join_role" will send intersect results and the others will get them.
Default by True.
join_role: str
role who joins ids, supports "guest" and "host" only and effective only for raw.
If it is "guest", the host will send its ids to guest and find the intersection of
ids in guest; if it is "host", the guest will send its ids to host. Default by "guest";
note this param will be deprecated in future version, please use 'join_role' in raw_params instead
only_output_key: bool
if false, the results of intersection will include key and value which from input data; if true, it will just include key from input
data and the value will be empty or filled by uniform string like "intersect_id"
with_encode: bool
if True, it will use hash method for intersect ids, effective for raw method only;
note that this param will be deprecated in future version, please use 'use_hash' in raw_params;
currently if this param is set to True,
specification by 'encode_params' will be taken instead of 'raw_params'.
encode_params: EncodeParam
effective only when with_encode is True;
this param will be deprecated in future version, use 'raw_params' in future implementation
raw_params: RAWParam
effective for raw method only
rsa_params: RSAParam
effective for rsa method only
dh_params: DHParam
effective for dh method only
ecdh_params: ECDHParam
effective for ecdh method only
join_method: {'inner_join', 'left_join'}
if 'left_join', participants will all include sample_id_generator's (imputed) ids in output,
default 'inner_join'
new_sample_id: bool
whether to generate new id for sample_id_generator's ids,
only effective when join_method is 'left_join' or when input data are instance with match id,
default False
sample_id_generator: str
role whose ids are to be kept,
effective only when join_method is 'left_join' or when input data are instance with match id,
default 'guest'
intersect_cache_param: IntersectCacheParam
specification for cache generation,
with ver1.7 and above, this param is ignored.
run_cache: bool
whether to store Host's encrypted ids, only valid when intersect method is 'rsa', 'dh', or 'ecdh', default False
cardinality_only: bool
whether to output intersection count(cardinality);
if sync_cardinality is True, then sync cardinality count with host(s)
cardinality_method: string
specify which intersect method to use for coutning cardinality, default "ecdh";
note that with "rsa", estimated cardinality will be produced;
while "dh" method outputs exact cardinality, it only supports single-host task
sync_cardinality: bool
whether to sync cardinality with all participants, default False,
only effective when cardinality_only set to True
run_preprocess: bool
whether to run preprocess process, default False
intersect_preprocess_params: IntersectPreProcessParam
used for preprocessing and cardinality_only mode
repeated_id_process: bool
if true, intersection will process the ids which can be repeatable;
in ver 1.7 and above,repeated id process
will be automatically applied to data with instance id, this param will be ignored
repeated_id_owner: str
which role has the repeated id; in ver 1.7 and above, this param is ignored
allow_info_share: bool
in ver 1.7 and above, this param is ignored
info_owner: str
in ver 1.7 and above, this param is ignored
with_sample_id: bool
data with sample id or not, default False; in ver 1.7 and above, this param is ignored
"""
def __init__(self, intersect_method: str = consts.RSA, random_bit=DEFAULT_RANDOM_BIT, sync_intersect_ids=True,
join_role=consts.GUEST, only_output_key: bool = False,
with_encode=False, encode_params=EncodeParam(),
raw_params=RAWParam(), rsa_params=RSAParam(), dh_params=DHParam(), ecdh_params=ECDHParam(),
join_method=consts.INNER_JOIN, new_sample_id: bool = False, sample_id_generator=consts.GUEST,
intersect_cache_param=IntersectCache(), run_cache: bool = False,
cardinality_only: bool = False, sync_cardinality: bool = False, cardinality_method=consts.ECDH,
run_preprocess: bool = False,
intersect_preprocess_params=IntersectPreProcessParam(),
repeated_id_process=False, repeated_id_owner=consts.GUEST,
with_sample_id=False, allow_info_share: bool = False, info_owner=consts.GUEST):
super().__init__()
self.intersect_method = intersect_method
self.random_bit = random_bit
self.sync_intersect_ids = sync_intersect_ids
self.join_role = join_role
self.with_encode = with_encode
self.encode_params = copy.deepcopy(encode_params)
self.raw_params = copy.deepcopy(raw_params)
self.rsa_params = copy.deepcopy(rsa_params)
self.only_output_key = only_output_key
self.sample_id_generator = sample_id_generator
self.intersect_cache_param = copy.deepcopy(intersect_cache_param)
self.run_cache = run_cache
self.repeated_id_process = repeated_id_process
self.repeated_id_owner = repeated_id_owner
self.allow_info_share = allow_info_share
self.info_owner = info_owner
self.with_sample_id = with_sample_id
self.join_method = join_method
self.new_sample_id = new_sample_id
self.dh_params = copy.deepcopy(dh_params)
self.cardinality_only = cardinality_only
self.sync_cardinality = sync_cardinality
self.cardinality_method = cardinality_method
self.run_preprocess = run_preprocess
self.intersect_preprocess_params = copy.deepcopy(intersect_preprocess_params)
self.ecdh_params = copy.deepcopy(ecdh_params)
def check(self):
descr = "intersect param's "
self.intersect_method = self.check_and_change_lower(self.intersect_method,
[consts.RSA, consts.RAW, consts.DH, consts.ECDH],
f"{descr}intersect_method")
self.check_positive_integer(self.random_bit, f"{descr}random_bit")
self.check_boolean(self.sync_intersect_ids, f"{descr}intersect_ids")
self.join_role = self.check_and_change_lower(self.join_role,
[consts.GUEST, consts.HOST],
f"{descr}join_role")
self.check_boolean(self.with_encode, f"{descr}with_encode")
self.check_boolean(self.only_output_key, f"{descr}only_output_key")
self.join_method = self.check_and_change_lower(self.join_method, [consts.INNER_JOIN, consts.LEFT_JOIN],
f"{descr}join_method")
self.check_boolean(self.new_sample_id, f"{descr}new_sample_id")
self.sample_id_generator = self.check_and_change_lower(self.sample_id_generator,
[consts.GUEST, consts.HOST],
f"{descr}sample_id_generator")
if self.join_method == consts.LEFT_JOIN:
if not self.sync_intersect_ids:
raise ValueError(f"Cannot perform left join without sync intersect ids")
self.check_boolean(self.run_cache, f"{descr} run_cache")
self.encode_params.check()
self.raw_params.check()
self.rsa_params.check()
self.dh_params.check()
self.ecdh_params.check()
self.check_boolean(self.cardinality_only, f"{descr}cardinality_only")
self.check_boolean(self.sync_cardinality, f"{descr}sync_cardinality")
self.check_boolean(self.run_preprocess, f"{descr}run_preprocess")
self.intersect_preprocess_params.check()
if self.cardinality_only:
if self.cardinality_method not in [consts.RSA, consts.DH, consts.ECDH]:
raise ValueError(f"cardinality-only mode only support rsa, dh, ecdh.")
if self.cardinality_method == consts.RSA and self.rsa_params.split_calculation:
raise ValueError(f"cardinality-only mode only supports unified calculation.")
if self.run_preprocess:
if self.intersect_preprocess_params.false_positive_rate < 0.01:
raise ValueError(f"for preprocessing ids, false_positive_rate must be no less than 0.01")
if self.cardinality_only:
raise ValueError(f"cardinality_only mode cannot run preprocessing.")
if self.run_cache:
if self.intersect_method not in [consts.RSA, consts.DH, consts.ECDH]:
raise ValueError(f"Only rsa, dh, ecdh method supports cache.")
if self.intersect_method == consts.RSA and self.rsa_params.split_calculation:
raise ValueError(f"RSA split_calculation does not support cache.")
if self.cardinality_only:
raise ValueError(f"Cache is not available for cardinality_only mode.")
if self.run_preprocess:
raise ValueError(f"Preprocessing does not support cache.")
return True
| 25,007 | 41.895369 | 140 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/stepwise_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pipeline.param.base_param import BaseParam
from pipeline.param import consts
class StepwiseParam(BaseParam):
"""
Define stepwise params
Parameters
----------
score_name: {"AIC", "BIC"}, default: 'AIC'
Specify which model selection criterion to be used
mode: {"Hetero", "Homo"}, default: 'Hetero'
Indicate what mode is current task
role: {"Guest", "Host", "Arbiter"}, default: 'Guest'
Indicate what role is current party
direction: {"both", "forward", "backward"}, default: 'both'
Indicate which direction to go for stepwise.
'forward' means forward selection; 'backward' means elimination; 'both' means possible models of both directions are examined at each step.
max_step: int, default: '10'
Specify total number of steps to run before forced stop.
nvmin: int, default: '2'
Specify the min subset size of final model, cannot be lower than 2. When nvmin > 2, the final model size may be smaller than nvmin due to max_step limit.
nvmax: int, default: None
Specify the max subset size of final model, 2 <= nvmin <= nvmax. The final model size may be larger than nvmax due to max_step limit.
need_stepwise: bool, default False
Indicate if this module needed to be run
"""
def __init__(self, score_name="AIC", mode=consts.HETERO, role=consts.GUEST, direction="both",
max_step=10, nvmin=2, nvmax=None, need_stepwise=False):
super(StepwiseParam, self).__init__()
self.score_name = score_name
self.mode = mode
self.role = role
self.direction = direction
self.max_step = max_step
self.nvmin = nvmin
self.nvmax = nvmax
self.need_stepwise = need_stepwise
def check(self):
model_param_descr = "stepwise param's"
self.score_name = self.check_and_change_lower(self.score_name, ["aic", "bic"], model_param_descr)
self.check_valid_value(self.mode, model_param_descr, valid_values=[consts.HOMO, consts.HETERO])
self.check_valid_value(self.role, model_param_descr, valid_values=[consts.HOST, consts.GUEST, consts.ARBITER])
self.direction = self.check_and_change_lower(self.direction, ["forward", "backward", "both"], model_param_descr)
self.check_positive_integer(self.max_step, model_param_descr)
self.check_positive_integer(self.nvmin, model_param_descr)
if self.nvmin < 2:
raise ValueError(model_param_descr + " nvmin must be no less than 2.")
if self.nvmax is not None:
self.check_positive_integer(self.nvmax, model_param_descr)
if self.nvmin > self.nvmax:
raise ValueError(model_param_descr + " nvmax must be greater than nvmin.")
self.check_boolean(self.need_stepwise, model_param_descr)
| 3,512 | 40.821429 | 161 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/feature_imputation_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
class FeatureImputationParam(BaseParam):
"""
Define feature imputation parameters
Parameters
----------
default_value : None or single object type or list
the value to replace missing value.
if None, it will use default value defined in federatedml/feature/imputer.py,
if single object, will fill missing value with this object,
if list, it's length should be the same as input data' feature dimension,
means that if some column happens to have missing values, it will replace it
the value by element in the identical position of this list.
missing_fill_method : [None, 'min', 'max', 'mean', 'designated']
the method to replace missing value
col_missing_fill_method: None or dict of (column name, missing_fill_method) pairs
specifies method to replace missing value for each column;
any column not specified will take missing_fill_method,
if missing_fill_method is None, unspecified column will not be imputed;
missing_impute : None or list
element of list can be any type, or auto generated if value is None, define which values to be consider as missing, default: None
need_run: bool, default True
need run or not
"""
def __init__(self, default_value=0, missing_fill_method=None, col_missing_fill_method=None,
missing_impute=None, need_run=True):
super(FeatureImputationParam, self).__init__()
self.default_value = default_value
self.missing_fill_method = missing_fill_method
self.col_missing_fill_method = col_missing_fill_method
self.missing_impute = missing_impute
self.need_run = need_run
def check(self):
descr = "feature imputation param's "
self.check_boolean(self.need_run, descr + "need_run")
if self.missing_fill_method is not None:
self.missing_fill_method = self.check_and_change_lower(self.missing_fill_method,
['min', 'max', 'mean', 'designated'],
f"{descr}missing_fill_method ")
if self.col_missing_fill_method:
if not isinstance(self.col_missing_fill_method, dict):
raise ValueError(f"{descr}col_missing_fill_method should be a dict")
for k, v in self.col_missing_fill_method.items():
if not isinstance(k, str):
raise ValueError(f"{descr}col_missing_fill_method should contain str key(s) only")
v = self.check_and_change_lower(v,
['min', 'max', 'mean', 'designated'],
f"per column method specified in {descr} col_missing_fill_method dict")
self.col_missing_fill_method[k] = v
if self.missing_impute:
if not isinstance(self.missing_impute, list):
raise ValueError(f"{descr}missing_impute must be None or list.")
return True
| 3,811 | 42.318182 | 137 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/hetero_kmeans_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
class KmeansParam(BaseParam):
"""
Parameters used for K-means.
----------
k : int, should be larger than 1 ,default 5.
The number of the centroids to generate.
max_iter : int, default 300.
Maximum number of iterations of the hetero-k-means algorithm to run.
tol : float, default 0.001。
random_stat: int, random state, default is None
"""
def __init__(self, k=5, max_iter=300, tol=0.001, random_stat=None):
super(KmeansParam, self).__init__()
self.k = k
self.max_iter = max_iter
self.tol = tol
self.random_stat = random_stat
def check(self):
descr = "Kmeans_param's"
if not isinstance(self.k, int):
raise ValueError(
descr + "k {} not supported, should be int type".format(self.k))
elif self.k <= 1:
raise ValueError(
descr + "k {} not supported, should be larger than 1")
if not isinstance(self.max_iter, int):
raise ValueError(
descr + "max_iter not supported, should be int type".format(self.max_iter))
elif self.max_iter <= 0:
raise ValueError(
descr + "max_iter not supported, should be larger than 0".format(self.max_iter))
if not isinstance(self.tol, (float, int)):
raise ValueError(
descr + "tol not supported, should be float type".format(self.tol))
elif self.tol < 0:
raise ValueError(
descr + "tol not supported, should be larger than or equal to 0".format(self.tol))
| 2,318 | 34.136364 | 98 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/positive_unlabeled_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param import consts
from pipeline.param.base_param import BaseParam
class PositiveUnlabeledParam(BaseParam):
"""
Parameters used for positive unlabeled.
----------
strategy: {"probability", "quantity", "proportion", "distribution"}
The strategy of converting unlabeled value.
threshold: int or float, default: 0.9
The threshold in labeling strategy.
"""
def __init__(self, strategy="probability", threshold=0.9):
super(PositiveUnlabeledParam, self).__init__()
self.strategy = strategy
self.threshold = threshold
def check(self):
base_descr = "Positive Unlabeled Param's "
float_descr = "Probability or Proportion Strategy Param's "
int_descr = "Quantity Strategy Param's "
numeric_descr = "Distribution Strategy Param's "
self.check_valid_value(self.strategy, base_descr,
[consts.PROBABILITY, consts.QUANTITY, consts.PROPORTION, consts.DISTRIBUTION])
self.check_defined_type(self.threshold, base_descr, [consts.INT, consts.FLOAT])
if self.strategy == consts.PROBABILITY or self.strategy == consts.PROPORTION:
self.check_decimal_float(self.threshold, float_descr)
if self.strategy == consts.QUANTITY:
self.check_positive_integer(self.threshold, int_descr)
if self.strategy == consts.DISTRIBUTION:
self.check_positive_number(self.threshold, numeric_descr)
return True
| 2,173 | 35.233333 | 109 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/secure_add_example_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
class SecureAddExampleParam(BaseParam):
def __init__(self, seed=None, partition=1, data_num=1000):
self.seed = seed
self.partition = partition
self.data_num = data_num
def check(self):
if self.seed is not None and type(self.seed).__name__ != "int":
raise ValueError("random seed should be None or integers")
if type(self.partition).__name__ != "int" or self.partition < 1:
raise ValueError("partition should be an integer large than 0")
if type(self.data_num).__name__ != "int" or self.data_num < 1:
raise ValueError("data_num should be an integer large than 0")
| 1,371 | 36.081081 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/model_loader_param.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pipeline.param.base_param import BaseParam
class CheckpointParam(BaseParam):
def __init__(self, model_id: str = None, model_version: str = None, component_name: str = None,
step_index: int = None, step_name: str = None):
super().__init__()
self.model_id = model_id
self.model_version = model_version
self.component_name = component_name
self.step_index = step_index
self.step_name = step_name
if self.step_index is not None:
self.step_index = int(self.step_index)
def check(self):
for i in ('model_id', 'model_version', 'component_name'):
if getattr(self, i) is None:
return False
# do not set step_index and step_name at the same time
if self.step_index is not None:
return self.step_name is None
return self.step_name is not None
| 1,523 | 35.285714 | 99 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/union_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copylast 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
class UnionParam(BaseParam):
"""
Define the union method for combining multiple dTables and keep entries with the same id
Parameters
----------
need_run: bool, default True
Indicate if this module needed to be run
allow_missing: bool, default False
Whether allow mismatch between feature length and header length in the result. Note that empty tables will always be skipped regardless of this param setting.
keep_duplicate: bool, default False
Whether to keep entries with duplicated keys. If set to True, a new id will be generated for duplicated entry in the format {id}_{table_name}.
"""
def __init__(self, need_run=True, allow_missing=False, keep_duplicate=False):
super().__init__()
self.need_run = need_run
self.allow_missing = allow_missing
self.keep_duplicate = keep_duplicate
def check(self):
descr = "union param's "
if type(self.need_run).__name__ != "bool":
raise ValueError(
descr + "need_run {} not supported, should be bool".format(
self.need_run))
if type(self.allow_missing).__name__ != "bool":
raise ValueError(
descr + "allow_missing {} not supported, should be bool".format(
self.allow_missing))
if type(self.keep_duplicate).__name__ != "bool":
raise ValueError(
descr + "keep_duplicate {} not supported, should be bool".format(
self.keep_duplicate))
return True
| 2,285 | 34.71875 | 166 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/reader_param.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class ReaderParam:
def __init__(self, table=None):
self.table = table
def check(self):
return True
| 742 | 29.958333 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/statistics_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import re
from pipeline.param.base_param import BaseParam
from pipeline.param import consts
class StatisticsParam(BaseParam):
"""
Define statistics params
Parameters
----------
statistics: list, string, default "summary"
Specify the statistic types to be computed.
"summary" represents list: [consts.SUM, consts.MEAN, consts.STANDARD_DEVIATION,
consts.MEDIAN, consts.MIN, consts.MAX,
consts.MISSING_COUNT, consts.SKEWNESS, consts.KURTOSIS]
"describe" represents list: [consts.COUNT, consts.MEAN,
consts.STANDARD_DEVIATION, consts.MIN, consts.MAX]
column_names: list of string, default []
Specify columns to be used for statistic computation by column names in header
column_indexes: list of int, default -1
Specify columns to be used for statistic computation by column order in header
-1 indicates to compute statistics over all columns
bias: bool, default: True
If False, the calculations of skewness and kurtosis are corrected for statistical bias.
need_run: bool, default True
Indicate whether to run this modules
"""
LEGAL_STAT = [consts.COUNT, consts.SUM, consts.MEAN, consts.STANDARD_DEVIATION,
consts.MEDIAN, consts.MIN, consts.MAX, consts.VARIANCE,
consts.COEFFICIENT_OF_VARIATION, consts.MISSING_COUNT,
consts.SKEWNESS, consts.KURTOSIS]
LEGAL_QUANTILE = re.compile("^(100)|([1-9]?[0-9])%$")
def __init__(self, statistics="summary", column_names=None,
column_indexes=-1, need_run=True, abnormal_list=None,
quantile_error=consts.DEFAULT_RELATIVE_ERROR, bias=True):
super().__init__()
self.statistics = statistics
self.column_names = column_names
self.column_indexes = column_indexes
self.abnormal_list = abnormal_list
self.need_run = need_run
self.quantile_error = quantile_error
self.bias = bias
if column_names is None:
self.column_names = []
if column_indexes is None:
self.column_indexes = []
if abnormal_list is None:
self.abnormal_list = []
@staticmethod
def extend_statistics(statistic_name):
if statistic_name == "summary":
return [consts.SUM, consts.MEAN, consts.STANDARD_DEVIATION,
consts.MEDIAN, consts.MIN, consts.MAX,
consts.MISSING_COUNT, consts.SKEWNESS, consts.KURTOSIS,
consts.COEFFICIENT_OF_VARIATION]
if statistic_name == "describe":
return [consts.COUNT, consts.MEAN, consts.STANDARD_DEVIATION, consts.MIN, consts.MAX]
@staticmethod
def find_stat_name_match(stat_name):
if stat_name in StatisticsParam.LEGAL_STAT or StatisticsParam.LEGAL_QUANTILE.match(stat_name):
return True
return False
# match_result = [legal_name == stat_name for legal_name in StatisticsParam.LEGAL_STAT]
# match_result.append(0 if LEGAL_QUANTILE.match(stat_name) is None else True)
# match_found = sum(match_result) > 0
# return match_found
def check(self):
model_param_descr = "Statistics's param statistics"
BaseParam.check_boolean(self.need_run, model_param_descr)
if not isinstance(self.statistics, list):
if self.statistics in [consts.DESCRIBE, consts.SUMMARY]:
self.statistics = StatisticsParam.extend_statistics(self.statistics)
else:
self.statistics = [self.statistics]
for stat_name in self.statistics:
match_found = StatisticsParam.find_stat_name_match(stat_name)
if not match_found:
raise ValueError(f"Illegal statistics name provided: {stat_name}.")
model_param_descr = "Statistics's param column_names"
if not isinstance(self.column_names, list):
raise ValueError(f"column_names should be list of string.")
for col_name in self.column_names:
BaseParam.check_string(col_name, model_param_descr)
model_param_descr = "Statistics's param column_indexes"
if not isinstance(self.column_indexes, list) and self.column_indexes != -1:
raise ValueError(f"column_indexes should be list of int or -1.")
if self.column_indexes != -1:
for col_index in self.column_indexes:
if not isinstance(col_index, int):
raise ValueError(f"{model_param_descr} should be int or list of int")
if col_index < -consts.FLOAT_ZERO:
raise ValueError(f"{model_param_descr} should be non-negative int value(s)")
if not isinstance(self.abnormal_list, list):
raise ValueError(f"abnormal_list should be list of int or string.")
self.check_decimal_float(self.quantile_error, "Statistics's param quantile_error ")
self.check_boolean(self.bias, "Statistics's param bias ")
return True
| 5,750 | 41.286765 | 102 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/linear_regression_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import copy
from pipeline.param.glm_param import LinearModelParam
from pipeline.param.callback_param import CallbackParam
from pipeline.param.encrypt_param import EncryptParam
from pipeline.param.encrypted_mode_calculation_param import EncryptedModeCalculatorParam
from pipeline.param.cross_validation_param import CrossValidationParam
from pipeline.param.init_model_param import InitParam
from pipeline.param.sqn_param import StochasticQuasiNewtonParam
from pipeline.param.stepwise_param import StepwiseParam
from pipeline.param import consts
class LinearParam(LinearModelParam):
"""
Parameters used for Linear Regression.
Parameters
----------
penalty : {'L2' or 'L1'}
Penalty method used in LinR. Please note that, when using encrypted version in HeteroLinR,
'L1' is not supported.
tol : float, default: 1e-4
The tolerance of convergence
alpha : float, default: 1.0
Regularization strength coefficient.
optimizer : {'sgd', 'rmsprop', 'adam', 'sqn', 'adagrad'}
Optimize method
batch_size : int, default: -1
Batch size when updating model. -1 means use all data in a batch. i.e. Not to use mini-batch strategy.
learning_rate : float, default: 0.01
Learning rate
max_iter : int, default: 20
The maximum iteration for training.
init_param: InitParam object, default: default InitParam object
Init param method object.
early_stop : {'diff', 'abs', 'weight_dff'}
Method used to judge convergence.
a) diff: Use difference of loss between two iterations to judge whether converge.
b) abs: Use the absolute value of loss to judge whether converge. i.e. if loss < tol, it is converged.
c) weight_diff: Use difference between weights of two consecutive iterations
encrypt_param: EncryptParam object, default: default EncryptParam object
encrypt param
encrypted_mode_calculator_param: EncryptedModeCalculatorParam object, default: default EncryptedModeCalculatorParam object
encrypted mode calculator param
cv_param: CrossValidationParam object, default: default CrossValidationParam object
cv param
decay: int or float, default: 1
Decay rate for learning rate. learning rate will follow the following decay schedule.
lr = lr0/(1+decay*t) if decay_sqrt is False. If decay_sqrt is True, lr = lr0 / sqrt(1+decay*t)
where t is the iter number.
decay_sqrt: Bool, default: True
lr = lr0/(1+decay*t) if decay_sqrt is False, otherwise, lr = lr0 / sqrt(1+decay*t)
validation_freqs: int, list, tuple, set, or None
validation frequency during training, required when using early stopping.
The default value is None, 1 is suggested. You can set it to a number larger than 1 in order to speed up training by skipping validation rounds.
When it is larger than 1, a number which is divisible by "max_iter" is recommended, otherwise, you will miss the validation scores of the last training iteration.
early_stopping_rounds: int, default: None
If positive number specified, at every specified training rounds, program checks for early stopping criteria.
Validation_freqs must also be set when using early stopping.
metrics: list or None, default: None
Specify which metrics to be used when performing evaluation during training process. If metrics have not improved at early_stopping rounds, trianing stops before convergence.
If set as empty, default metrics will be used. For regression tasks, default metrics are ['root_mean_squared_error', 'mean_absolute_error']
use_first_metric_only: bool, default: False
Indicate whether to use the first metric in `metrics` as the only criterion for early stopping judgement.
floating_point_precision: None or integer
if not None, use floating_point_precision-bit to speed up calculation,
e.g.: convert an x to round(x * 2**floating_point_precision) during Paillier operation, divide
the result by 2**floating_point_precision in the end.
callback_param: CallbackParam object
callback param
"""
def __init__(self, penalty='L2',
tol=1e-4, alpha=1.0, optimizer='sgd',
batch_size=-1, learning_rate=0.01, init_param=InitParam(),
max_iter=20, early_stop='diff',
encrypt_param=EncryptParam(), sqn_param=StochasticQuasiNewtonParam(),
encrypted_mode_calculator_param=EncryptedModeCalculatorParam(),
cv_param=CrossValidationParam(), decay=1, decay_sqrt=True, validation_freqs=None,
early_stopping_rounds=None, stepwise_param=StepwiseParam(), metrics=None, use_first_metric_only=False,
floating_point_precision=23, callback_param=CallbackParam()):
super(LinearParam, self).__init__(penalty=penalty, tol=tol, alpha=alpha, optimizer=optimizer,
batch_size=batch_size, learning_rate=learning_rate,
init_param=init_param, max_iter=max_iter, early_stop=early_stop,
encrypt_param=encrypt_param, cv_param=cv_param, decay=decay,
decay_sqrt=decay_sqrt, validation_freqs=validation_freqs,
early_stopping_rounds=early_stopping_rounds,
stepwise_param=stepwise_param, metrics=metrics,
use_first_metric_only=use_first_metric_only,
floating_point_precision=floating_point_precision,
callback_param=callback_param)
self.sqn_param = copy.deepcopy(sqn_param)
self.encrypted_mode_calculator_param = copy.deepcopy(encrypted_mode_calculator_param)
def check(self):
descr = "linear_regression_param's "
super(LinearParam, self).check()
if self.optimizer not in ['sgd', 'rmsprop', 'adam', 'adagrad', 'sqn']:
raise ValueError(
descr + "optimizer not supported, optimizer should be"
" 'sgd', 'rmsprop', 'adam', 'sqn' or 'adagrad'")
self.sqn_param.check()
if self.encrypt_param.method != consts.PAILLIER:
raise ValueError(
descr + "encrypt method supports 'Paillier' only")
return True
| 7,205 | 48.356164 | 182 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/__init__.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.boosting_param import HeteroSecureBoostParam, HomoSecureBoostParam
from pipeline.param.column_expand_param import ColumnExpandParam
from pipeline.param.data_split_param import DataSplitParam
from pipeline.param.dataio_param import DataIOParam
from pipeline.param.data_transform_param import DataTransformParam
from pipeline.param.encrypt_param import EncryptParam
from pipeline.param.evaluation_param import EvaluateParam
from pipeline.param.feature_binning_param import FeatureBinningParam
from pipeline.param.feldman_verifiable_sum_param import FeldmanVerifiableSumParam
from pipeline.param.ftl_param import FTLParam
from pipeline.param.hetero_kmeans_param import KmeansParam
from pipeline.param.hetero_nn_param import HeteroNNParam
from pipeline.param.homo_nn_param import HomoNNParam
from pipeline.param.homo_onehot_encoder_param import HomoOneHotParam
from pipeline.param.init_model_param import InitParam
from pipeline.param.intersect_param import IntersectParam
from pipeline.param.linear_regression_param import LinearParam
from pipeline.param.local_baseline_param import LocalBaselineParam
from pipeline.param.logistic_regression_param import HeteroLogisticParam, HomoLogisticParam
from pipeline.param.pearson_param import PearsonParam
from pipeline.param.poisson_regression_param import PoissonParam
from pipeline.param.psi_param import PSIParam
from pipeline.param.sample_param import SampleParam
from pipeline.param.sample_weight_param import SampleWeightParam
from pipeline.param.scale_param import ScaleParam
from pipeline.param.scorecard_param import ScorecardParam
from pipeline.param.statistics_param import StatisticsParam
from pipeline.param.union_param import UnionParam
from pipeline.param.boosting_param import ObjectiveParam
from pipeline.param.boosting_param import DecisionTreeParam
from pipeline.param.predict_param import PredictParam
from pipeline.param.feature_imputation_param import FeatureImputationParam
from pipeline.param.label_transform_param import LabelTransformParam
from pipeline.param.sir_param import SecureInformationRetrievalParam
from pipeline.param.cache_loader_param import CacheLoaderParam
from pipeline.param.hetero_sshe_lr_param import HeteroSSHELRParam
from pipeline.param.hetero_sshe_linr_param import HeteroSSHELinRParam
from pipeline.param.positive_unlabeled_param import PositiveUnlabeledParam
__all__ = ["HeteroSecureBoostParam", "HomoSecureBoostParam",
"ColumnExpandParam", "DataSplitParam", "DataIOParam", "EncryptParam",
"EvaluateParam", "FeatureBinningParam", "FeldmanVerifiableSumParam", "FTLParam",
"KmeansParam", "HeteroNNParam", "HomoNNParam", "HomoOneHotParam", "InitParam",
"IntersectParam", "LinearParam", "LocalBaselineParam", "HeteroLogisticParam",
"HomoLogisticParam", "PearsonParam", "PoissonParam", "PSIParam", "SampleParam",
"SampleWeightParam", "ScaleParam", "ScorecardParam",
"UnionParam", "ObjectiveParam", "DecisionTreeParam", "PredictParam",
"FeatureImputationParam", "LabelTransformParam",
"SecureInformationRetrievalParam", "CacheLoaderParam", "HeteroSSHELRParam",
"HeteroSSHELinRParam", "PositiveUnlabeledParam"]
| 3,842 | 56.358209 | 91 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/data_split_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
class DataSplitParam(BaseParam):
"""
Define data split param that used in data split.
Parameters
----------
random_state : None or int, default: None
Specify the random state for shuffle.
test_size : float or int or None, default: 0.0
Specify test data set size.
float value specifies fraction of input data set, int value specifies exact number of data instances
train_size : float or int or None, default: 0.8
Specify train data set size.
float value specifies fraction of input data set, int value specifies exact number of data instances
validate_size : float or int or None, default: 0.2
Specify validate data set size.
float value specifies fraction of input data set, int value specifies exact number of data instances
stratified : bool, default: False
Define whether sampling should be stratified, according to label value.
shuffle : bool, default: True
Define whether do shuffle before splitting or not.
split_points : None or list, default : None
Specify the point(s) by which continuous label values are bucketed into bins for stratified split.
eg.[0.2] for two bins or [0.1, 1, 3] for 4 bins
need_run: bool, default: True
Specify whether to run data split
"""
def __init__(self, random_state=None, test_size=None, train_size=None, validate_size=None, stratified=False,
shuffle=True, split_points=None, need_run=True):
super(DataSplitParam, self).__init__()
self.random_state = random_state
self.test_size = test_size
self.train_size = train_size
self.validate_size = validate_size
self.stratified = stratified
self.shuffle = shuffle
self.split_points = split_points
self.need_run = need_run
def check(self):
model_param_descr = "data split param's "
if self.random_state is not None:
if not isinstance(self.random_state, int):
raise ValueError(f"{model_param_descr} random state should be int type")
BaseParam.check_nonnegative_number(self.random_state, f"{model_param_descr} random_state ")
if self.test_size is not None:
BaseParam.check_nonnegative_number(self.test_size, f"{model_param_descr} test_size ")
if isinstance(self.test_size, float):
BaseParam.check_decimal_float(self.test_size, f"{model_param_descr} test_size ")
if self.train_size is not None:
BaseParam.check_nonnegative_number(self.train_size, f"{model_param_descr} train_size ")
if isinstance(self.train_size, float):
BaseParam.check_decimal_float(self.train_size, f"{model_param_descr} train_size ")
if self.validate_size is not None:
BaseParam.check_nonnegative_number(self.validate_size, f"{model_param_descr} validate_size ")
if isinstance(self.validate_size, float):
BaseParam.check_decimal_float(self.validate_size, f"{model_param_descr} validate_size ")
# use default size values if none given
if self.test_size is None and self.train_size is None and self.validate_size is None:
self.test_size = 0.0
self.train_size = 0.8
self.validate_size = 0.2
BaseParam.check_boolean(self.stratified, f"{model_param_descr} stratified ")
BaseParam.check_boolean(self.shuffle, f"{model_param_descr} shuffle ")
BaseParam.check_boolean(self.need_run, f"{model_param_descr} need run ")
if self.split_points is not None:
if not isinstance(self.split_points, list):
raise ValueError(f"{model_param_descr} split_points should be list type")
return True
| 4,515 | 42.009524 | 112 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/feature_binning_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import copy
from pipeline.param.base_param import BaseParam
from pipeline.param.encrypt_param import EncryptParam
from pipeline.param import consts
class TransformParam(BaseParam):
"""
Define how to transfer the cols
Parameters
----------
transform_cols : list of column index, default: -1
Specify which columns need to be transform. If column index is None, None of columns will be transformed.
If it is -1, it will use same columns as cols in binning module.
Note tha columns specified by `transform_cols` and `transform_names` will be combined.
transform_names: list of string, default: []
Specify which columns need to calculated. Each element in the list represent for a column name in header.
Note tha columns specified by `transform_cols` and `transform_names` will be combined.
transform_type: str, 'bin_num'or 'woe' or None default: 'bin_num'
Specify which value these columns going to replace.
1. bin_num: Transfer original feature value to bin index in which this value belongs to.
2. woe: This is valid for guest party only. It will replace original value to its woe value
3. None: nothing will be replaced.
"""
def __init__(self, transform_cols=-1, transform_names=None, transform_type="bin_num"):
super(TransformParam, self).__init__()
self.transform_cols = transform_cols
self.transform_names = transform_names
self.transform_type = transform_type
def check(self):
descr = "Transform Param's "
if self.transform_cols is not None and self.transform_cols != -1:
self.check_defined_type(self.transform_cols, descr, ['list'])
self.check_defined_type(self.transform_names, descr, ['list', "NoneType"])
if self.transform_names is not None:
for name in self.transform_names:
if not isinstance(name, str):
raise ValueError("Elements in transform_names should be string type")
self.check_valid_value(self.transform_type, descr, ['bin_num', 'woe', None])
class OptimalBinningParam(BaseParam):
"""
Indicate optimal binning params
Parameters
----------
metric_method: str, default: "iv"
The algorithm metric method. Support iv, gini, ks, chi-square
min_bin_pct: float, default: 0.05
The minimum percentage of each bucket
max_bin_pct: float, default: 1.0
The maximum percentage of each bucket
init_bin_nums: int, default 100
Number of bins when initialize
mixture: bool, default: True
Whether each bucket need event and non-event records
init_bucket_method: str default: quantile
Init bucket methods. Accept quantile and bucket.
"""
def __init__(self, metric_method='iv', min_bin_pct=0.05, max_bin_pct=1.0,
init_bin_nums=1000, mixture=True, init_bucket_method='quantile'):
super().__init__()
self.init_bucket_method = init_bucket_method
self.metric_method = metric_method
self.max_bin = None
self.mixture = mixture
self.max_bin_pct = max_bin_pct
self.min_bin_pct = min_bin_pct
self.init_bin_nums = init_bin_nums
self.adjustment_factor = None
def check(self):
descr = "hetero binning's optimal binning param's"
self.check_string(self.metric_method, descr)
self.metric_method = self.metric_method.lower()
if self.metric_method in ['chi_square', 'chi-square']:
self.metric_method = 'chi_square'
self.check_valid_value(self.metric_method, descr, ['iv', 'gini', 'chi_square', 'ks'])
self.check_positive_integer(self.init_bin_nums, descr)
self.init_bucket_method = self.init_bucket_method.lower()
self.check_valid_value(self.init_bucket_method, descr, ['quantile', 'bucket'])
if self.max_bin_pct not in [1, 0]:
self.check_decimal_float(self.max_bin_pct, descr)
if self.min_bin_pct not in [1, 0]:
self.check_decimal_float(self.min_bin_pct, descr)
if self.min_bin_pct > self.max_bin_pct:
raise ValueError("Optimal binning's min_bin_pct should less or equal than max_bin_pct")
self.check_boolean(self.mixture, descr)
self.check_positive_integer(self.init_bin_nums, descr)
class FeatureBinningParam(BaseParam):
"""
Define the feature binning method
Parameters
----------
method : str, 'quantile', 'bucket' or 'optimal', default: 'quantile'
Binning method.
compress_thres: int, default: 10000
When the number of saved summaries exceed this threshold, it will call its compress function
head_size: int, default: 10000
The buffer size to store inserted observations. When head list reach this buffer size, the
QuantileSummaries object start to generate summary(or stats) and insert into its sampled list.
error: float, 0 <= error < 1 default: 0.001
The error of tolerance of binning. The final split point comes from original data, and the rank
of this value is close to the exact rank. More precisely,
floor((p - 2 * error) * N) <= rank(x) <= ceil((p + 2 * error) * N)
where p is the quantile in float, and N is total number of data.
bin_num: int, bin_num > 0, default: 10
The max bin number for binning
bin_indexes : list of int or int, default: -1
Specify which columns need to be binned. -1 represent for all columns. If you need to indicate specific
cols, provide a list of header index instead of -1.
Note tha columns specified by `bin_indexes` and `bin_names` will be combined.
bin_names : list of string, default: []
Specify which columns need to calculated. Each element in the list represent for a column name in header.
Note tha columns specified by `bin_indexes` and `bin_names` will be combined.
adjustment_factor : float, default: 0.5
the adjustment factor when calculating WOE. This is useful when there is no event or non-event in
a bin. Please note that this parameter will NOT take effect for setting in host.
category_indexes : list of int or int, default: []
Specify which columns are category features. -1 represent for all columns. List of int indicate a set of
such features. For category features, bin_obj will take its original values as split_points and treat them
as have been binned. If this is not what you expect, please do NOT put it into this parameters.
The number of categories should not exceed bin_num set above.
Note tha columns specified by `category_indexes` and `category_names` will be combined.
category_names : list of string, default: []
Use column names to specify category features. Each element in the list represent for a column name in header.
Note tha columns specified by `category_indexes` and `category_names` will be combined.
local_only : bool, default: False
Whether just provide binning method to guest party. If true, host party will do nothing.
Warnings: This parameter will be deprecated in future version.
transform_param: TransformParam
Define how to transfer the binned data.
need_run: bool, default True
Indicate if this module needed to be run
skip_static: bool, default False
If true, binning will not calculate iv, woe etc. In this case, optimal-binning
will not be supported.
"""
def __init__(self, method=consts.QUANTILE,
compress_thres=consts.DEFAULT_COMPRESS_THRESHOLD,
head_size=consts.DEFAULT_HEAD_SIZE,
error=consts.DEFAULT_RELATIVE_ERROR,
bin_num=consts.G_BIN_NUM, bin_indexes=-1, bin_names=None, adjustment_factor=0.5,
transform_param=TransformParam(),
local_only=False,
category_indexes=None, category_names=None,
need_run=True, skip_static=False):
super(FeatureBinningParam, self).__init__()
self.method = method
self.compress_thres = compress_thres
self.head_size = head_size
self.error = error
self.adjustment_factor = adjustment_factor
self.bin_num = bin_num
self.bin_indexes = bin_indexes
self.bin_names = bin_names
self.category_indexes = category_indexes
self.category_names = category_names
self.transform_param = copy.deepcopy(transform_param)
self.need_run = need_run
self.skip_static = skip_static
self.local_only = local_only
def check(self):
descr = "Binning param's"
self.check_string(self.method, descr)
self.method = self.method.lower()
self.check_positive_integer(self.compress_thres, descr)
self.check_positive_integer(self.head_size, descr)
self.check_decimal_float(self.error, descr)
self.check_positive_integer(self.bin_num, descr)
if self.bin_indexes != -1:
self.check_defined_type(self.bin_indexes, descr, ['list', 'RepeatedScalarContainer', "NoneType"])
self.check_defined_type(self.bin_names, descr, ['list', "NoneType"])
self.check_defined_type(self.category_indexes, descr, ['list', "NoneType"])
self.check_defined_type(self.category_names, descr, ['list', "NoneType"])
self.check_open_unit_interval(self.adjustment_factor, descr)
self.check_boolean(self.local_only, descr)
class HeteroFeatureBinningParam(FeatureBinningParam):
"""
split_points_by_index: dict, default None
Manually specified split points for local features;
key should be feature index, value should be split points in sorted list;
along with `split_points_by_col_name`, keys should cover all local features, including categorical features;
note that each split point list should have length equal to desired bin num(n),
with first (n-1) entries equal to the maximum value(inclusive) of each first (n-1) bins,
and nth value the max of current feature.
split_points_by_col_name: dict, default None
Manually specified split points for local features;
key should be feature name, value should be split points in sorted list;
along with `split_points_by_index`, keys should cover all local features, including categorical features;
note that each split point list should have length equal to desired bin num(n),
with first (n-1) entries equal to the maximum value(inclusive) of each first (n-1) bins,
and nth value the max of current feature.
"""
def __init__(self, method=consts.QUANTILE, compress_thres=consts.DEFAULT_COMPRESS_THRESHOLD,
head_size=consts.DEFAULT_HEAD_SIZE,
error=consts.DEFAULT_RELATIVE_ERROR,
bin_num=consts.G_BIN_NUM, bin_indexes=-1, bin_names=None, adjustment_factor=0.5,
transform_param=TransformParam(), optimal_binning_param=OptimalBinningParam(),
local_only=False, category_indexes=None, category_names=None,
encrypt_param=EncryptParam(),
need_run=True, skip_static=False,
split_points_by_index=None, split_points_by_col_name=None):
super(HeteroFeatureBinningParam, self).__init__(method=method, compress_thres=compress_thres,
head_size=head_size, error=error,
bin_num=bin_num, bin_indexes=bin_indexes,
bin_names=bin_names, adjustment_factor=adjustment_factor,
transform_param=transform_param,
category_indexes=category_indexes,
category_names=category_names,
need_run=need_run, local_only=local_only,
skip_static=skip_static)
self.optimal_binning_param = copy.deepcopy(optimal_binning_param)
self.encrypt_param = encrypt_param
self.split_points_by_index = split_points_by_index
self.split_points_by_col_name = split_points_by_col_name
def check(self):
descr = "Hetero Binning param's"
super(HeteroFeatureBinningParam, self).check()
self.check_valid_value(self.method, descr, [consts.QUANTILE, consts.BUCKET, consts.OPTIMAL])
self.optimal_binning_param.check()
self.encrypt_param.check()
if self.encrypt_param.method != consts.PAILLIER:
raise ValueError("Feature Binning support Paillier encrypt method only.")
if self.skip_static and self.method == consts.OPTIMAL:
raise ValueError("When skip_static, optimal binning is not supported.")
self.transform_param.check()
if self.skip_static and self.transform_param.transform_type == 'woe':
raise ValueError("To use woe transform, skip_static should set as False")
class HomoFeatureBinningParam(FeatureBinningParam):
def __init__(self, method=consts.VIRTUAL_SUMMARY,
compress_thres=consts.DEFAULT_COMPRESS_THRESHOLD,
head_size=consts.DEFAULT_HEAD_SIZE,
error=consts.DEFAULT_RELATIVE_ERROR,
sample_bins=100,
bin_num=consts.G_BIN_NUM, bin_indexes=-1, bin_names=None, adjustment_factor=0.5,
transform_param=TransformParam(),
category_indexes=None, category_names=None,
need_run=True, skip_static=False, max_iter=100):
super(HomoFeatureBinningParam, self).__init__(method=method, compress_thres=compress_thres,
head_size=head_size, error=error,
bin_num=bin_num, bin_indexes=bin_indexes,
bin_names=bin_names, adjustment_factor=adjustment_factor,
transform_param=transform_param,
category_indexes=category_indexes, category_names=category_names,
need_run=need_run,
skip_static=skip_static)
self.sample_bins = sample_bins
self.max_iter = max_iter
def check(self):
descr = "homo binning param's"
super(HomoFeatureBinningParam, self).check()
self.check_string(self.method, descr)
self.method = self.method.lower()
self.check_valid_value(self.method, descr, [consts.VIRTUAL_SUMMARY, consts.RECURSIVE_QUERY])
self.check_positive_integer(self.max_iter, descr)
if self.max_iter > 100:
raise ValueError("Max iter is not allowed exceed 100")
| 15,819 | 46.939394 | 119 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/one_vs_rest_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
class OneVsRestParam(BaseParam):
"""
Define the one_vs_rest parameters.
Parameters
----------
has_arbiter: bool, default: true
For some algorithm, may not has arbiter, for instances, secureboost of FATE,
for these algorithms, it should be set to false.
"""
def __init__(self, need_one_vs_rest=False, has_arbiter=True):
super().__init__()
self.need_one_vs_rest = need_one_vs_rest
self.has_arbiter = has_arbiter
def check(self):
if type(self.has_arbiter).__name__ != "bool":
raise ValueError(
"one_vs_rest param's has_arbiter {} not supported, should be bool type".format(
self.has_arbiter))
return True
| 1,456 | 30.673913 | 95 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/label_transform_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
class LabelTransformParam(BaseParam):
"""
Define label transform param that used in label transform.
Parameters
----------
label_encoder : None or dict, default : None
Specify (label, encoded label) key-value pairs for transforming labels to new values.
e.g. {"Yes": 1, "No": 0};
**new in ver 1.9: during training, input labels not found in `label_encoder` will retain its original value
label_list : None or list, default : None
List all input labels, used for matching types of original keys in label_encoder dict,
length should match key count in label_encoder, e.g. ["Yes", "No"];
**new in ver 1.9: given non-emtpy `label_encoder`, when `label_list` not provided,
module will inference label types from input data
need_run: bool, default: True
Specify whether to run label transform
"""
def __init__(self, label_encoder=None, label_list=None, need_run=True):
super(LabelTransformParam, self).__init__()
self.label_encoder = label_encoder
self.label_list = label_list
self.need_run = need_run
def check(self):
model_param_descr = "label transform param's "
BaseParam.check_boolean(self.need_run, f"{model_param_descr} need run ")
if self.label_encoder is not None:
if not isinstance(self.label_encoder, dict):
raise ValueError(f"{model_param_descr} label_encoder should be dict type")
if len(self.label_encoder) == 0:
self.label_encoder = None
if self.label_list is not None:
if not isinstance(self.label_list, list):
raise ValueError(f"{model_param_descr} label_list should be list type")
if self.label_encoder and self.label_list and len(self.label_list) != len(self.label_encoder.keys()):
raise ValueError(f"label_list's length not matching label_encoder key count.")
if len(self.label_list) == 0:
self.label_list = None
return True
| 2,780 | 37.625 | 115 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/init_model_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pipeline.param.base_param import BaseParam
class InitParam(BaseParam):
"""
Initialize Parameters used in initializing a model.
Parameters
----------
init_method : {'random_uniform', 'random_normal', 'ones', 'zeros' or 'const'}
Initial method.
init_const : int or float, default: 1
Required when init_method is 'const'. Specify the constant.
fit_intercept : bool, default: True
Whether to initialize the intercept or not.
"""
def __init__(self, init_method='random_uniform', init_const=1, fit_intercept=True, random_seed=None):
super().__init__()
self.init_method = init_method
self.init_const = init_const
self.fit_intercept = fit_intercept
self.random_seed = random_seed
def check(self):
if type(self.init_method).__name__ != "str":
raise ValueError(
"Init param's init_method {} not supported, should be str type".format(self.init_method))
else:
self.init_method = self.init_method.lower()
if self.init_method not in ['random_uniform', 'random_normal', 'ones', 'zeros', 'const']:
raise ValueError(
"Init param's init_method {} not supported, init_method should in 'random_uniform',"
" 'random_normal' 'ones', 'zeros' or 'const'".format(self.init_method))
if type(self.init_const).__name__ not in ['int', 'float']:
raise ValueError(
"Init param's init_const {} not supported, should be int or float type".format(self.init_const))
if type(self.fit_intercept).__name__ != 'bool':
raise ValueError(
"Init param's fit_intercept {} not supported, should be bool type".format(self.fit_intercept))
if self.random_seed is not None:
if type(self.random_seed).__name__ != 'int':
raise ValueError(
"Init param's random_seed {} not supported, should be int or float type".format(self.random_seed))
return True
| 2,742 | 37.633803 | 118 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/callback_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
class CallbackParam(BaseParam):
"""
Define callback method that used in federated ml.
Parameters
----------
callbacks : list, default: []
Indicate what kinds of callback functions is desired during the training process.
Accepted values: {'EarlyStopping', 'ModelCheckpoint', 'PerformanceEvaluate'}
validation_freqs: {None, int, list, tuple, set}
validation frequency during training.
early_stopping_rounds: None or int
Will stop training if one metric doesn’t improve in last early_stopping_round rounds
metrics: None, or list, default None
Indicate when executing evaluation during train process, which metrics will be used. If set as empty,
default metrics for specific task type will be used. As for binary classification, default metrics are
['auc', 'ks']
use_first_metric_only: bool, default: False
Indicate whether use the first metric only for early stopping judgement.
save_freq: int, default: 1
The callbacks save model every save_freq epoch
"""
def __init__(self, callbacks=None, validation_freqs=None, early_stopping_rounds=None,
metrics=None, use_first_metric_only=False, save_freq=1):
super(CallbackParam, self).__init__()
self.callbacks = callbacks or []
self.validation_freqs = validation_freqs
self.early_stopping_rounds = early_stopping_rounds
self.metrics = metrics or []
self.use_first_metric_only = use_first_metric_only
self.save_freq = save_freq
def check(self):
if self.early_stopping_rounds is None:
pass
elif isinstance(self.early_stopping_rounds, int):
if self.early_stopping_rounds < 1:
raise ValueError("early stopping rounds should be larger than 0 when it's integer")
if self.validation_freqs is None:
raise ValueError("validation freqs must be set when early stopping is enabled")
if self.validation_freqs is not None:
if type(self.validation_freqs).__name__ not in ["int", "list", "tuple", "set"]:
raise ValueError(
"validation strategy param's validate_freqs's type not supported ,"
" should be int or list or tuple or set"
)
if type(self.validation_freqs).__name__ == "int" and \
self.validation_freqs <= 0:
raise ValueError("validation strategy param's validate_freqs should greater than 0")
if self.metrics is not None and not isinstance(self.metrics, list):
raise ValueError("metrics should be a list")
if not isinstance(self.use_first_metric_only, bool):
raise ValueError("use_first_metric_only should be a boolean")
return True
| 3,563 | 39.044944 | 110 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/homo_nn_param.py
|
from pipeline.param.base_param import BaseParam
class TrainerParam(BaseParam):
def __init__(self, trainer_name=None, **kwargs):
super(TrainerParam, self).__init__()
self.trainer_name = trainer_name
self.param = kwargs
def check(self):
if self.trainer_name is not None:
self.check_string(self.trainer_name, 'trainer_name')
def to_dict(self):
ret = {'trainer_name': self.trainer_name, 'param': self.param}
return ret
class DatasetParam(BaseParam):
def __init__(self, dataset_name=None, **kwargs):
super(DatasetParam, self).__init__()
self.dataset_name = dataset_name
self.param = kwargs
def check(self):
if self.dataset_name is not None:
self.check_string(self.dataset_name, 'dataset_name')
def to_dict(self):
ret = {'dataset_name': self.dataset_name, 'param': self.param}
return ret
class HomoNNParam(BaseParam):
def __init__(self,
trainer: TrainerParam = TrainerParam(),
dataset: DatasetParam = DatasetParam(),
torch_seed: int = 100,
nn_define: dict = None,
loss: dict = None,
optimizer: dict = None,
ds_config: dict = None
):
super(HomoNNParam, self).__init__()
self.trainer = trainer
self.dataset = dataset
self.torch_seed = torch_seed
self.nn_define = nn_define
self.loss = loss
self.optimizer = optimizer
self.ds_config = ds_config
def check(self):
assert isinstance(self.trainer, TrainerParam), 'trainer must be a TrainerParam()'
assert isinstance(self.dataset, DatasetParam), 'dataset must be a DatasetParam()'
self.trainer.check()
self.dataset.check()
self.check_positive_integer(self.torch_seed, 'torch seed')
if self.nn_define is not None:
assert isinstance(self.nn_define, dict), 'nn define should be a dict defining model structures'
if self.loss is not None:
assert isinstance(self.loss, dict), 'loss parameter should be a loss config dict'
if self.optimizer is not None:
assert isinstance(self.optimizer, dict), 'optimizer parameter should be a config dict'
| 2,340 | 31.971831 | 107 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/predict_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
from pipeline.param.base_param import BaseParam
class PredictParam(BaseParam):
"""
Define the predict method of HomoLR, HeteroLR, SecureBoosting
Parameters
----------
threshold: float or int
The threshold use to separate positive and negative class. Normally, it should be (0,1)
"""
def __init__(self, threshold=0.5):
self.threshold = threshold
def check(self):
if type(self.threshold).__name__ not in ["float", "int"]:
raise ValueError("predict param's predict_param {} not supported, should be float or int".format(
self.threshold))
return True
| 1,391 | 30.636364 | 109 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/sample_weight_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
from pipeline.param import consts
class SampleWeightParam(BaseParam):
"""
Define sample weight parameters.
Parameters
----------
class_weight : str or dict, default None
class weight dictionary or class weight computation mode, string value only accepts 'balanced';
If dict provided, key should be class(label), and weight will not be normalize, e.g.: {'0': 1, '1': 2}
If both class_weight and sample_weight_name are None, return original input data
sample_weight_name : str, name of column which specifies sample weight.
feature name of sample weight; if both class_weight and sample_weight_name are None, return original input data
normalize : bool, default False
whether to normalize sample weight extracted from `sample_weight_name` column
need_run : bool, default True
whether to run this module or not
"""
def __init__(self, class_weight=None, sample_weight_name=None, normalize=False, need_run=True):
self.class_weight = class_weight
self.sample_weight_name = sample_weight_name
self.normalize = normalize
self.need_run = need_run
def check(self):
descr = "sample weight param's"
if self.class_weight:
if not isinstance(self.class_weight, str) and not isinstance(self.class_weight, dict):
raise ValueError(f"{descr} class_weight must be str, dict, or None.")
if isinstance(self.class_weight, str):
self.class_weight = self.check_and_change_lower(self.class_weight,
[consts.BALANCED],
f"{descr} class_weight")
if self.sample_weight_name:
self.check_string(self.sample_weight_name, f"{descr} sample_weight_name")
self.check_boolean(self.need_run, f"{descr} need_run")
self.check_boolean(self.normalize, f"{descr} normalize")
return True
| 2,737 | 37.027778 | 119 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/cross_validation_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pipeline.param.base_param import BaseParam
# from pipeline.param.evaluation_param import EvaluateParam
from pipeline.param import consts
class CrossValidationParam(BaseParam):
"""
Define cross validation params
Parameters
----------
n_splits: int, default: 5
Specify how many splits used in KFold
mode: str, default: 'Hetero'
Indicate what mode is current task
role: {'Guest', 'Host', 'Arbiter'}, default: 'Guest'
Indicate what role is current party
shuffle: bool, default: True
Define whether do shuffle before KFold or not.
random_seed: int, default: 1
Specify the random seed for numpy shuffle
need_cv: bool, default False
Indicate if this module needed to be run
output_fold_history: bool, default True
Indicate whether to output table of ids used by each fold, else return original input data
returned ids are formatted as: {original_id}#fold{fold_num}#{train/validate}
history_value_type: {'score', 'instance'}, default score
Indicate whether to include original instance or predict score in the output fold history,
only effective when output_fold_history set to True
"""
def __init__(self, n_splits=5, mode=consts.HETERO, role=consts.GUEST, shuffle=True, random_seed=1,
need_cv=False, output_fold_history=True, history_value_type="score"):
super(CrossValidationParam, self).__init__()
self.n_splits = n_splits
self.mode = mode
self.role = role
self.shuffle = shuffle
self.random_seed = random_seed
# self.evaluate_param = copy.deepcopy(evaluate_param)
self.need_cv = need_cv
self.output_fold_history = output_fold_history
self.history_value_type = history_value_type
def check(self):
model_param_descr = "cross validation param's "
self.check_positive_integer(self.n_splits, model_param_descr)
self.check_valid_value(self.mode, model_param_descr, valid_values=[consts.HOMO, consts.HETERO])
self.check_valid_value(self.role, model_param_descr, valid_values=[consts.HOST, consts.GUEST, consts.ARBITER])
self.check_boolean(self.shuffle, model_param_descr)
self.check_boolean(self.output_fold_history, model_param_descr)
self.history_value_type = self.check_and_change_lower(
self.history_value_type, ["instance", "score"], model_param_descr)
if self.random_seed is not None:
self.check_positive_integer(self.random_seed, model_param_descr)
| 3,251 | 38.180723 | 118 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/pearson_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.base_param import BaseParam
class PearsonParam(BaseParam):
def __init__(
self,
column_names=None,
column_indexes=None,
cross_parties=True,
need_run=True,
use_mix_rand=False,
calc_local_vif=True,
):
super().__init__()
self.column_names = column_names
self.column_indexes = column_indexes
self.cross_parties = cross_parties
self.need_run = need_run
self.use_mix_rand = use_mix_rand
if column_names is None:
self.column_names = []
if column_indexes is None:
self.column_indexes = []
self.calc_local_vif = calc_local_vif
def check(self):
if not isinstance(self.use_mix_rand, bool):
raise ValueError(
f"use_mix_rand accept bool type only, {type(self.use_mix_rand)} got"
)
if self.cross_parties and (not self.need_run):
raise ValueError(
f"need_run should be True(which is default) when cross_parties is True."
)
if not isinstance(self.column_names, list):
raise ValueError(
f"type mismatch, column_names with type {type(self.column_names)}"
)
for name in self.column_names:
if not isinstance(name, str):
raise ValueError(
f"type mismatch, column_names with element {name}(type is {type(name)})"
)
if isinstance(self.column_indexes, list):
for idx in self.column_indexes:
if not isinstance(idx, int):
raise ValueError(
f"type mismatch, column_indexes with element {idx}(type is {type(idx)})"
)
if isinstance(self.column_indexes, int) and self.column_indexes != -1:
raise ValueError(
f"column_indexes with type int and value {self.column_indexes}(only -1 allowed)"
)
if self.need_run:
if isinstance(self.column_indexes, list) and isinstance(
self.column_names, list
):
if len(self.column_indexes) == 0 and len(self.column_names) == 0:
raise ValueError(f"provide at least one column")
| 2,979 | 35.341463 | 96 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/param/hetero_sshe_linr_param.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import copy
from pipeline.param.glm_param import LinearModelParam
from pipeline.param.callback_param import CallbackParam
from pipeline.param.encrypt_param import EncryptParam
from pipeline.param.encrypted_mode_calculation_param import EncryptedModeCalculatorParam
from pipeline.param.cross_validation_param import CrossValidationParam
from pipeline.param.init_model_param import InitParam
from pipeline.param import consts
class HeteroSSHELinRParam(LinearModelParam):
"""
Parameters used for Hetero SSHE Linear Regression.
Parameters
----------
penalty : {'L2' or 'L1'}
Penalty method used in LinR. Please note that, when using encrypted version in HeteroLinR,
'L1' is not supported.
tol : float, default: 1e-4
The tolerance of convergence
alpha : float, default: 1.0
Regularization strength coefficient.
optimizer : {'sgd', 'rmsprop', 'adam', 'adagrad', 'nesterov_momentum_sgd'}
Optimize method
batch_size : int, default: -1
Batch size when updating model. -1 means use all data in a batch. i.e. Not to use mini-batch strategy.
learning_rate : float, default: 0.01
Learning rate
max_iter : int, default: 20
The maximum iteration for training.
init_param: InitParam object, default: default InitParam object
Init param method object.
early_stop : {'diff', 'abs', 'weight_dff'}
Method used to judge convergence.
a) diff: Use difference of loss between two iterations to judge whether converge.
b) abs: Use the absolute value of loss to judge whether converge. i.e. if loss < tol, it is converged.
c) weight_diff: Use difference between weights of two consecutive iterations
encrypt_param: EncryptParam object, default: default EncryptParam object
encrypt param
encrypted_mode_calculator_param: EncryptedModeCalculatorParam object, default: default EncryptedModeCalculatorParam object
encrypted mode calculator param
cv_param: CrossValidationParam object, default: default CrossValidationParam object
cv param
decay: int or float, default: 1
Decay rate for learning rate. learning rate will follow the following decay schedule.
lr = lr0/(1+decay*t) if decay_sqrt is False. If decay_sqrt is True, lr = lr0 / sqrt(1+decay*t)
where t is the iter number.
decay_sqrt: Bool, default: True
lr = lr0/(1+decay*t) if decay_sqrt is False, otherwise, lr = lr0 / sqrt(1+decay*t)
callback_param: CallbackParam object
callback param
reveal_strategy: str, "respectively", "encrypted_reveal_in_host", default: "respectively"
"respectively": Means guest and host can reveal their own part of weights only.
"encrypted_reveal_in_host": Means host can be revealed his weights in encrypted mode, and guest can be revealed in normal mode.
reveal_every_iter: bool, default: False
Whether reconstruct model weights every iteration. If so, Regularization is available.
The performance will be better as well since the algorithm process is simplified.
"""
def __init__(self, penalty='L2',
tol=1e-4, alpha=1.0, optimizer='sgd',
batch_size=-1, learning_rate=0.01, init_param=InitParam(),
max_iter=20, early_stop='diff',
encrypt_param=EncryptParam(),
encrypted_mode_calculator_param=EncryptedModeCalculatorParam(),
cv_param=CrossValidationParam(), decay=1, decay_sqrt=True,
callback_param=CallbackParam(),
use_mix_rand=True,
reveal_strategy="respectively",
reveal_every_iter=False
):
super(HeteroSSHELinRParam, self).__init__(penalty=penalty, tol=tol, alpha=alpha, optimizer=optimizer,
batch_size=batch_size, learning_rate=learning_rate,
init_param=init_param, max_iter=max_iter, early_stop=early_stop,
encrypt_param=encrypt_param, cv_param=cv_param, decay=decay,
decay_sqrt=decay_sqrt,
callback_param=callback_param)
self.encrypted_mode_calculator_param = copy.deepcopy(encrypted_mode_calculator_param)
self.use_mix_rand = use_mix_rand
self.reveal_strategy = reveal_strategy
self.reveal_every_iter = reveal_every_iter
def check(self):
descr = "sshe linear_regression_param's "
super(HeteroSSHELinRParam, self).check()
if self.encrypt_param.method != consts.PAILLIER:
raise ValueError(
descr + "encrypt method supports 'Paillier' only")
self.check_boolean(self.reveal_every_iter, descr)
if self.penalty is None:
pass
elif type(self.penalty).__name__ != "str":
raise ValueError(
f"{descr} penalty {self.penalty} not supported, should be str type")
else:
self.penalty = self.penalty.upper()
"""
if self.penalty not in [consts.L1_PENALTY, consts.L2_PENALTY]:
raise ValueError(
f"{descr} penalty not supported, penalty should be 'L1', 'L2' or 'none'")
"""
if not self.reveal_every_iter:
if self.penalty not in [consts.L2_PENALTY, consts.NONE.upper()]:
raise ValueError(
f"penalty should be 'L2' or 'none', when reveal_every_iter is False"
)
if type(self.optimizer).__name__ != "str":
raise ValueError(
f"{descr} optimizer {self.optimizer} not supported, should be str type")
else:
self.optimizer = self.optimizer.lower()
if self.reveal_every_iter:
if self.optimizer not in ['sgd', 'rmsprop', 'adam', 'adagrad']:
raise ValueError(
"When reveal_every_iter is True, "
f"{descr} optimizer not supported, optimizer should be"
" 'sgd', 'rmsprop', 'adam', or 'adagrad'")
else:
if self.optimizer not in ['sgd']:
raise ValueError("When reveal_every_iter is False, "
f"{descr} optimizer not supported, optimizer should be"
" 'sgd'")
if self.callback_param.validation_freqs is not None:
if self.reveal_every_iter is False:
raise ValueError(f"When reveal_every_iter is False, validation every iter"
f" is not supported.")
self.reveal_strategy = self.check_and_change_lower(self.reveal_strategy,
["respectively", "encrypted_reveal_in_host"],
f"{descr} reveal_strategy")
if self.reveal_strategy == "encrypted_reveal_in_host" and self.reveal_every_iter:
raise PermissionError("reveal strategy: encrypted_reveal_in_host mode is not allow to reveal every iter.")
return True
| 8,022 | 44.585227 | 135 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/parser/__init__.py
| 0 | 0 | 0 |
py
|
|
FATE
|
FATE-master/python/fate_client/pipeline/component/hetero_ftl.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.tools import extract_explicit_parameter
from pipeline.param import consts
try:
from pipeline.component.component_base import FateComponent
from pipeline.component.nn.models.sequantial import Sequential
import numpy as np
except Exception as e:
print(e)
print('Import NN components in HeteroFTL module failed, \
this may casue by the situation that torch/keras are not installed,\
please install them to use this module')
def find_and_convert_float32_in_dict(d, path=""):
for k, v in d.items():
new_path = f"{path}.{k}" if path else k
if isinstance(v, dict):
find_and_convert_float32_in_dict(v, new_path)
elif isinstance(v, np.float32) or isinstance(v, np.float64):
d[k] = float(v)
class HeteroFTL(FateComponent):
@extract_explicit_parameter
def __init__(self, epochs=1, batch_size=-1,
encrypt_param=None, predict_param=None, cv_param=None,
intersect_param={'intersect_method': consts.RSA},
validation_freqs=None, early_stopping_rounds=None, use_first_metric_only=None,
mode='plain', communication_efficient=False, n_iter_no_change=False, tol=1e-5,
local_round=5,
**kwargs):
explicit_parameters = kwargs["explict_parameters"]
explicit_parameters["optimizer"] = None
# explicit_parameters["loss"] = None
# explicit_parameters["metrics"] = None
explicit_parameters["nn_define"] = None
explicit_parameters["config_type"] = "keras"
FateComponent.__init__(self, **explicit_parameters)
if "name" in explicit_parameters:
del explicit_parameters["name"]
for param_key, param_value in explicit_parameters.items():
setattr(self, param_key, param_value)
self.input = Input(self.name, data_type="multi")
self.output = Output(self.name, data_type='single')
self._module_name = "FTL"
self.optimizer = None
self.loss = None
self.config_type = "keras"
self.metrics = None
self.bottom_nn_define = None
self.top_nn_define = None
self.interactive_layer_define = None
self._nn_model = Sequential()
self.nn_define = None
def add_nn_layer(self, layer):
self._nn_model.add(layer)
def compile(self, optimizer,):
self.optimizer = self._nn_model.get_optimizer_config(optimizer)
self.config_type = self._nn_model.get_layer_type()
self.nn_define = self._nn_model.get_network_config()
find_and_convert_float32_in_dict(self.nn_define)
find_and_convert_float32_in_dict(self.optimizer)
def __getstate__(self):
state = dict(self.__dict__)
del state["_nn_model"]
return state
| 3,526 | 35.739583 | 95 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/intersection.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.intersect_param import IntersectParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class Intersection(FateComponent, IntersectParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
IntersectParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name, has_model=False, has_cache=True)
self._module_name = "Intersection"
| 1,340 | 35.243243 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/hetero_sshe_linr.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.hetero_sshe_linr_param import HeteroSSHELinRParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class HeteroSSHELinR(FateComponent, HeteroSSHELinRParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
HeteroSSHELinRParam.__init__(self, **new_kwargs)
self.input = Input(self.name, data_type="multi")
self.output = Output(self.name)
self._module_name = "HeteroSSHELinR"
| 1,352 | 35.567568 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/reader.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.component.component_base import FateFlowComponent
from pipeline.interface import Output
from pipeline.param.reader_param import ReaderParam
class Reader(FateFlowComponent, ReaderParam):
def __init__(self, **kwargs):
FateFlowComponent.__init__(self, **kwargs)
new_kwargs = self.erase_component_base_param(**kwargs)
ReaderParam.__init__(self, **new_kwargs)
self.output = Output(self.name, data_type='single', has_model=False)
self._module_name = "Reader"
| 1,133 | 34.4375 | 76 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/secure_information_retrieval.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.sir_param import SecureInformationRetrievalParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class SecureInformationRetrieval(FateComponent, SecureInformationRetrievalParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
SecureInformationRetrievalParam.__init__(self, **new_kwargs)
self.input = Input(self.name, data_type="single")
self.output = Output(self.name)
self._module_name = "SecureInformationRetrieval"
| 1,400 | 36.864865 | 81 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/dataio.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.dataio_param import DataIOParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class DataIO(FateComponent, DataIOParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
#print (self.name)
LOGGER.debug(f"{self.name} component created")
LOGGER.warning("DataIO should not be use in training task since FATE-v1.9.0, use DataTransform instead")
new_kwargs = self.erase_component_base_param(**kwargs)
DataIOParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name, data_type='single')
self._module_name = "DataIO"
| 1,415 | 36.263158 | 112 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/scorecard.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.scorecard_param import ScorecardParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class Scorecard(FateComponent, ScorecardParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
ScorecardParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name, data_type='single', has_model=False)
self._module_name = "Scorecard"
| 1,310 | 35.416667 | 76 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/hetero_linr.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.linear_regression_param import LinearParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class HeteroLinR(FateComponent, LinearParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
LinearParam.__init__(self, **new_kwargs)
self.input = Input(self.name, data_type="multi")
self.output = Output(self.name)
self._module_name = "HeteroLinR"
| 1,321 | 34.72973 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/sampler.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.sample_param import SampleParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class FederatedSample(FateComponent, SampleParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
SampleParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name, has_model=False)
self._module_name = "FederatedSample"
| 1,317 | 35.611111 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/feldman_verifiable_sum.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.feldman_verifiable_sum_param import FeldmanVerifiableSumParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class FeldmanVerifiableSum(FateComponent, FeldmanVerifiableSumParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
FeldmanVerifiableSumParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name, has_model=False)
self._module_name = "FeldmanVerifiableSum"
| 1,358 | 36.75 | 81 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/union.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.union_param import UnionParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class Union(FateComponent, UnionParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
UnionParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name, has_model=False)
self._module_name = "Union"
| 1,294 | 34 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/column_expand.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.column_expand_param import ColumnExpandParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class ColumnExpand(FateComponent, ColumnExpandParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
#print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
ColumnExpandParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name, data_type='single', has_model=False)
self._module_name = "ColumnExpand"
| 1,356 | 35.675676 | 76 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/scale.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.scale_param import ScaleParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class FeatureScale(FateComponent, ScaleParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
ScaleParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name)
self._module_name = "FeatureScale"
| 1,291 | 33.918919 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/hetero_secureboost.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.boosting_param import HeteroSecureBoostParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class HeteroSecureBoost(FateComponent, HeteroSecureBoostParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print(self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
HeteroSecureBoostParam.__init__(self, **new_kwargs)
self.input = Input(self.name, data_type="multi")
self.output = Output(self.name)
self._module_name = "HeteroSecureBoost"
| 1,358 | 35.72973 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/homo_data_split.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.data_split_param import DataSplitParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class HomoDataSplit(FateComponent, DataSplitParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
DataSplitParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name, has_model=False, data_type="multi")
self._module_name = "HomoDataSplit"
| 1,345 | 36.388889 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/homo_feature_binning.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.feature_binning_param import HomoFeatureBinningParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class HomoFeatureBinning(FateComponent, HomoFeatureBinningParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
HomoFeatureBinningParam.__init__(self, **new_kwargs)
self.input = Input(self.name, data_type="multi")
self.output = Output(self.name)
self._module_name = "HomoFeatureBinning"
| 1,371 | 36.081081 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/psi.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.psi_param import PSIParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class PSI(FateComponent, PSIParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print(self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
PSIParam.__init__(self, **new_kwargs)
self.input = Input(self.name, data_type="multi")
self.output = Output(self.name, has_model=True)
self._module_name = "PSI"
| 1,299 | 34.135135 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/model_loader.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.model_loader_param import CheckpointParam
from pipeline.component.component_base import FateFlowComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class ModelLoader(FateFlowComponent, CheckpointParam):
def __init__(self, **kwargs):
FateFlowComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
CheckpointParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name, has_model=True, has_cache=False, has_data=False)
self._module_name = "ModelLoader"
| 1,372 | 36.108108 | 88 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/hetero_poisson.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.poisson_regression_param import PoissonParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class HeteroPoisson(FateComponent, PoissonParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
PoissonParam.__init__(self, **new_kwargs)
self.input = Input(self.name, data_type="multi")
self.output = Output(self.name)
self._module_name = "HeteroPoisson"
| 1,331 | 35 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/homo_nn.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import copy
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.tools import extract_explicit_parameter
from pipeline.utils.logger import LOGGER
from pipeline.component.component_base import FateComponent
from pipeline.component.nn.interface import TrainerParam, DatasetParam
DEFAULT_PARAM_DICT = {}
try:
import torch as t
OptimizerType = t.optim.Optimizer
except ImportError:
OptimizerType = 't.optim.Optimizer'
try:
import torch as t
from pipeline.component.nn.backend.torch.base import Sequential
from pipeline.component.nn.backend.torch import base
from pipeline.component.nn.backend.torch.cust import CustModel
# default parameter dict
DEFAULT_PARAM_DICT = {
'trainer': TrainerParam(trainer_name='fedavg_trainer'),
'dataset': DatasetParam(dataset_name='table'),
'torch_seed': 100,
'loss': None,
'optimizer': None,
'nn_define': None,
'ds_config': None
}
except Exception as e:
print(e)
print('Import NN components in HomoNN module failed,\
this may casue by the situation that torch are not installed,\
please install torch to use this module')
Sequential = None
class HomoNN(FateComponent):
"""
Parameters
----------
name, name of this component
trainer, trainer param
dataset, dataset param
torch_seed, global random seed
loss, loss function from fate_torch
optimizer, optimizer from fate_torch
model, a fate torch sequential defining the model structure
"""
@extract_explicit_parameter
def __init__(self,
name=None,
trainer: TrainerParam = TrainerParam(trainer_name='fedavg_trainer', epochs=10, batch_size=512, # training parameter
early_stop=None, tol=0.0001, # early stop parameters
secure_aggregate=True, weighted_aggregation=True,
aggregate_every_n_epoch=None, # federation
cuda=False, pin_memory=True, shuffle=True, data_loader_worker=0, # GPU dataloader
validation_freqs=None),
dataset: DatasetParam = DatasetParam(dataset_name='table'),
torch_seed: int = 100,
loss=None,
optimizer: OptimizerType = None,
ds_config: dict = None,
model: Sequential = None, **kwargs):
explicit_parameters = copy.deepcopy(DEFAULT_PARAM_DICT)
if 'name' not in kwargs["explict_parameters"]:
raise RuntimeError('moduel name is not set')
explicit_parameters["name"] = kwargs["explict_parameters"]['name']
FateComponent.__init__(self, **explicit_parameters)
kwargs["explict_parameters"].pop('name')
self.input = Input(self.name, data_type="multi")
self.output = Output(self.name, data_type='single')
self._module_name = "HomoNN"
self._updated = {'trainer': False, 'dataset': False,
'torch_seed': False, 'loss': False, 'optimizer': False, 'model': False}
self._set_param(kwargs["explict_parameters"])
self._check_parameters()
def _set_updated(self, attr, status=True):
if attr in self._updated:
self._updated[attr] = status
else:
raise ValueError('attr {} not in update status {}'.format(attr, self._updated))
def _set_param(self, params):
if "name" in params:
del params["name"]
for param_key, param_value in params.items():
setattr(self, param_key, param_value)
def _check_parameters(self):
if hasattr(self, 'trainer') and self.trainer is not None and not self._updated['trainer']:
assert isinstance(
self.trainer, TrainerParam), 'trainer must be a TrainerPram class'
self.trainer.check()
self.trainer: TrainerParam = self.trainer.to_dict()
self._set_updated('trainer', True)
if hasattr(self, 'dataset') and self.dataset is not None and not self._updated['dataset']:
assert isinstance(
self.dataset, DatasetParam), 'dataset must be a DatasetParam class'
self.dataset.check()
self.dataset: DatasetParam = self.dataset.to_dict()
self._set_updated('dataset', True)
if hasattr(self, 'model') and self.model is not None and not self._updated['model']:
if isinstance(self.model, Sequential):
self.nn_define = self.model.get_network_config()
elif isinstance(self.model, CustModel):
self.model = Sequential(self.model)
self.nn_define = self.model.get_network_config()
else:
raise RuntimeError('Model must be a fate-torch Sequential, but got {} '
'\n do remember to call fate_torch_hook():'
'\n import torch as t'
'\n fate_torch_hook(t)'.format(
type(self.model)))
self._set_updated('model', True)
if hasattr(self, 'optimizer') and self.optimizer is not None and not self._updated['optimizer']:
if not isinstance(self.optimizer, base.FateTorchOptimizer):
raise ValueError('please pass FateTorchOptimizer instances to Homo-nn components, got {}.'
'do remember to use fate_torch_hook():\n'
' import torch as t\n'
' fate_torch_hook(t)'.format(type(self.optimizer)))
optimizer_config = self.optimizer.to_dict()
self.optimizer = optimizer_config
self._set_updated('optimizer', True)
if hasattr(self, 'loss') and self.loss is not None and not self._updated['loss']:
if isinstance(self.loss, base.FateTorchLoss):
loss_config = self.loss.to_dict()
elif issubclass(self.loss, base.FateTorchLoss):
loss_config = self.loss().to_dict()
else:
raise ValueError('unable to parse loss function {}, loss must be an instance'
'of FateTorchLoss subclass or a subclass of FateTorchLoss, '
'do remember to use fate_torch_hook()'.format(self.loss))
self.loss = loss_config
self._set_updated('loss', True)
def component_param(self, **kwargs):
# reset paramerters
used_attr = set()
setattr(self, 'model', None)
if 'model' in kwargs:
self.model = kwargs['model']
kwargs.pop('model')
self._set_updated('model', False)
for attr in self._component_parameter_keywords:
if attr in kwargs:
setattr(self, attr, kwargs[attr])
self._set_updated(attr, False)
used_attr.add(attr)
self._check_parameters() # check and convert homo-nn paramters
not_use_attr = set(kwargs.keys()).difference(used_attr)
for attr in not_use_attr:
LOGGER.warning(f"key {attr}, value {kwargs[attr]} not use")
self._role_parameter_keywords |= used_attr
for attr in self.__dict__:
if attr not in self._component_parameter_keywords:
continue
else:
self._component_param[attr] = getattr(self, attr)
def __getstate__(self):
state = dict(self.__dict__)
if "model" in state:
del state["model"]
return state
| 8,452 | 41.691919 | 136 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/homo_secureboost.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.boosting_param import HomoSecureBoostParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class HomoSecureBoost(FateComponent, HomoSecureBoostParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
HomoSecureBoostParam.__init__(self, **new_kwargs)
self.input = Input(self.name, data_type="multi")
self.output = Output(self.name)
self._module_name = "HomoSecureBoost"
| 1,349 | 35.486486 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/homo_onehot.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.homo_onehot_encoder_param import HomoOneHotParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class HomoOneHotEncoder(FateComponent, HomoOneHotParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
HomoOneHotParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name)
self._module_name = "HomoOneHotEncoder"
| 1,330 | 34.972973 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/positive_unlabeled.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.positive_unlabeled_param import PositiveUnlabeledParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class PositiveUnlabeled(FateComponent, PositiveUnlabeledParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
PositiveUnlabeledParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name, has_model=False)
self._module_name = "PositiveUnlabeled"
| 1,339 | 36.222222 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/hetero_pearson.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.pearson_param import PearsonParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class HeteroPearson(FateComponent, PearsonParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
PearsonParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name)
self._module_name = "HeteroPearson"
| 1,301 | 34.189189 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/data_statistics.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.statistics_param import StatisticsParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class DataStatistics(FateComponent, StatisticsParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
#print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
StatisticsParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name, has_model=True, has_data=False)
self._module_name = "DataStatistics"
| 1,346 | 35.405405 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/homo_lr.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.logistic_regression_param import HomoLogisticParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class HomoLR(FateComponent, HomoLogisticParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
HomoLogisticParam.__init__(self, **new_kwargs)
self.input = Input(self.name, data_type="multi")
self.output = Output(self.name)
self._module_name = "HomoLR"
| 1,333 | 35.054054 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/hetero_feature_binning.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.feature_binning_param import HeteroFeatureBinningParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class HeteroFeatureBinning(FateComponent, HeteroFeatureBinningParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
HeteroFeatureBinningParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name)
self._module_name = "HeteroFeatureBinning"
| 1,362 | 35.837838 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/hetero_sshe_lr.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.hetero_sshe_lr_param import HeteroSSHELRParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class HeteroSSHELR(FateComponent, HeteroSSHELRParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
HeteroSSHELRParam.__init__(self, **new_kwargs)
self.input = Input(self.name, data_type="multi")
self.output = Output(self.name)
self._module_name = "HeteroSSHELR"
| 1,340 | 35.243243 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/hetero_fast_secureboost.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.boosting_param import HeteroSecureBoostParam
from pipeline.component.component_base import FateComponent
from pipeline.constant import ProviderType
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class HeteroFastSecureBoost(FateComponent, HeteroSecureBoostParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print(self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
HeteroSecureBoostParam.__init__(self, **new_kwargs)
self.input = Input(self.name, data_type="multi")
self.output = Output(self.name)
self._module_name = "HeteroFastSecureBoost"
self._source_provider = ProviderType.FATE
| 1,460 | 35.525 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/feature_imputation.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.feature_imputation_param import FeatureImputationParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class FeatureImputation(FateComponent, FeatureImputationParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
#print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
FeatureImputationParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name, has_model=True)
self._module_name = "FeatureImputation"
| 1,365 | 35.918919 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/local_baseline.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.local_baseline_param import LocalBaselineParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class LocalBaseline(FateComponent, LocalBaselineParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
LocalBaselineParam.__init__(self, **new_kwargs)
self.input = Input(self.name, data_type="multi")
self.output = Output(self.name)
self._module_name = "LocalBaseline"
| 1,345 | 35.378378 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/one_hot_encoder.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.onehot_encoder_param import OneHotEncoderParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class OneHotEncoder(FateComponent, OneHotEncoderParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
OneHotEncoderParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name)
self._module_name = "OneHotEncoder"
| 1,326 | 34.864865 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/sample_weight.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.sample_weight_param import SampleWeightParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
class SampleWeight(FateComponent, SampleWeightParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
new_kwargs = self.erase_component_base_param(**kwargs)
SampleWeightParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name, data_type='single', has_model=True)
self._module_name = "SampleWeight"
| 1,232 | 35.264706 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/label_transform.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.label_transform_param import LabelTransformParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class LabelTransform(FateComponent, LabelTransformParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
#print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
LabelTransformParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name, data_type='single', has_model=True)
self._module_name = "LabelTransform"
| 1,367 | 35.972973 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/hetero_kmeans.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.hetero_kmeans_param import KmeansParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class HeteroKmeans(FateComponent, KmeansParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
KmeansParam.__init__(self, **new_kwargs)
self.input = Input(self.name, data_type="multi")
self.output = Output(self.name, data_type="no_limit", output_unit=2)
self._module_name = "HeteroKmeans"
| 1,358 | 35.72973 | 76 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/component_base.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import copy
from pipeline.constant import ProviderType
from pipeline.utils.logger import LOGGER
class Component(object):
__instance = {}
def __init__(self, *args, **kwargs):
LOGGER.debug(f"kwargs: {kwargs}")
if "name" in kwargs:
self._component_name = kwargs["name"]
self.__party_instance = {}
self._component_parameter_keywords = set(kwargs.keys())
self._role_parameter_keywords = set()
self._module_name = None
self._component_param = {}
self._provider = None # deprecated, to compatible with fate-1.7.0
self._source_provider = None
self._provider_version = None
def __new__(cls, *args, **kwargs):
if cls.__name__.lower() not in cls.__instance:
cls.__instance[cls.__name__.lower()] = 0
new_cls = object.__new__(cls)
new_cls.set_name(cls.__instance[cls.__name__.lower()])
cls.__instance[cls.__name__.lower()] += 1
return new_cls
def set_name(self, idx):
self._component_name = self.__class__.__name__.lower() + "_" + str(idx)
LOGGER.debug(f"enter set name func {self._component_name}")
def reset_name(self, name):
self._component_name = name
@property
def provider(self):
return self._provider
@provider.setter
def provider(self, provider):
self._provider = provider
@property
def source_provider(self):
return self._source_provider
@property
def provider_version(self):
return self._provider_version
@provider_version.setter
def provider_version(self, provider_version):
self._provider_version = provider_version
def get_party_instance(self, role="guest", party_id=None) -> 'Component':
if role not in ["guest", "host", "arbiter"]:
raise ValueError("Role should be one of guest/host/arbiter")
if party_id is not None:
if isinstance(party_id, list):
for _id in party_id:
if not isinstance(_id, int) or _id <= 0:
raise ValueError("party id should be positive integer")
elif not isinstance(party_id, int) or party_id <= 0:
raise ValueError("party id should be positive integer")
if role not in self.__party_instance:
self.__party_instance[role] = {}
self.__party_instance[role]["party"] = {}
party_key = party_id
if isinstance(party_id, list):
party_key = "|".join(map(str, party_id))
if party_key not in self.__party_instance[role]["party"]:
self.__party_instance[role]["party"][party_key] = None
if not self.__party_instance[role]["party"][party_key]:
party_instance = copy.deepcopy(self)
self._decrease_instance_count()
self.__party_instance[role]["party"][party_key] = party_instance
LOGGER.debug(f"enter init")
return self.__party_instance[role]["party"][party_key]
@classmethod
def _decrease_instance_count(cls):
cls.__instance[cls.__name__.lower()] -= 1
LOGGER.debug(f"decrease instance count")
@property
def name(self):
return self._component_name
@property
def module(self):
return self._module_name
def component_param(self, **kwargs):
new_kwargs = copy.deepcopy(kwargs)
for attr in self.__dict__:
if attr in new_kwargs:
setattr(self, attr, new_kwargs[attr])
self._component_param[attr] = new_kwargs[attr]
del new_kwargs[attr]
for attr in new_kwargs:
LOGGER.warning(f"key {attr}, value {new_kwargs[attr]} not use")
self._role_parameter_keywords |= set(kwargs.keys())
def get_component_param(self):
return self._component_param
def get_common_param_conf(self):
"""
exclude_attr = ["_component_name", "__party_instance",
"_component_parameter_keywords", "_role_parameter_keywords"]
"""
common_param_conf = {}
for attr in self.__dict__:
if attr.startswith("_"):
continue
if attr in self._role_parameter_keywords:
continue
if attr not in self._component_parameter_keywords:
continue
common_param_conf[attr] = getattr(self, attr)
return common_param_conf
def get_role_param_conf(self, roles=None):
role_param_conf = {}
if not self.__party_instance:
return role_param_conf
for role in self.__party_instance:
role_param_conf[role] = {}
if None in self.__party_instance[role]["party"]:
role_all_party_conf = self.__party_instance[role]["party"][None].get_component_param()
if "all" not in role_param_conf:
role_param_conf[role]["all"] = {}
role_param_conf[role]["all"][self._component_name] = role_all_party_conf
valid_partyids = roles.get(role)
for party_id in self.__party_instance[role]["party"]:
if not party_id:
continue
if isinstance(party_id, int):
party_key = str(valid_partyids.index(party_id))
else:
party_list = list(map(int, party_id.split("|", -1)))
party_key = "|".join(map(str, [valid_partyids.index(party) for party in party_list]))
party_inst = self.__party_instance[role]["party"][party_id]
if party_key not in role_param_conf:
role_param_conf[role][party_key] = {}
role_param_conf[role][party_key][self._component_name] = party_inst.get_component_param()
# print ("role_param_conf {}".format(role_param_conf))
LOGGER.debug(f"role_param_conf {role_param_conf}")
return role_param_conf
@classmethod
def erase_component_base_param(cls, **kwargs):
new_kwargs = copy.deepcopy(kwargs)
if "name" in new_kwargs:
del new_kwargs["name"]
return new_kwargs
def get_config(self, *args, **kwargs):
"""need to implement"""
roles = kwargs["roles"]
common_param_conf = self.get_common_param_conf()
role_param_conf = self.get_role_param_conf(roles)
conf = {}
if common_param_conf:
conf['common'] = {self._component_name: common_param_conf}
if role_param_conf:
conf["role"] = role_param_conf
return conf
def _get_all_party_instance(self):
return self.__party_instance
class FateComponent(Component):
def __init__(self, *args, **kwargs):
super(FateComponent, self).__init__(*args, **kwargs)
self._source_provider = ProviderType.FATE
class FateFlowComponent(Component):
def __init__(self, *args, **kwargs):
super(FateFlowComponent, self).__init__(*args, **kwargs)
self._source_provider = ProviderType.FATE_FLOW
class FateSqlComponent(Component):
def __init__(self, *args, **kwargs):
super(FateSqlComponent, self).__init__(*args, **kwargs)
self._source_provider = ProviderType.FATE_SQL
class PlaceHolder(object):
pass
| 7,979 | 31.839506 | 105 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/hetero_data_split.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.data_split_param import DataSplitParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class HeteroDataSplit(FateComponent, DataSplitParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
DataSplitParam.__init__(self, **new_kwargs)
self.input = Input(self.name, )
self.output = Output(self.name, has_model=False, data_type="multi")
self._module_name = "HeteroDataSplit"
| 1,351 | 36.555556 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/__init__.py
|
from pipeline.component.column_expand import ColumnExpand
from pipeline.component.data_statistics import DataStatistics
from pipeline.component.dataio import DataIO
from pipeline.component.data_transform import DataTransform
from pipeline.component.evaluation import Evaluation
from pipeline.component.hetero_data_split import HeteroDataSplit
from pipeline.component.hetero_fast_secureboost import HeteroFastSecureBoost
from pipeline.component.hetero_feature_binning import HeteroFeatureBinning
from pipeline.component.hetero_feature_selection import HeteroFeatureSelection
from pipeline.component.hetero_linr import HeteroLinR
from pipeline.component.hetero_lr import HeteroLR
from pipeline.component.hetero_pearson import HeteroPearson
from pipeline.component.hetero_poisson import HeteroPoisson
from pipeline.component.hetero_secureboost import HeteroSecureBoost
from pipeline.component.homo_data_split import HomoDataSplit
from pipeline.component.homo_lr import HomoLR
from pipeline.component.homo_secureboost import HomoSecureBoost
from pipeline.component.homo_feature_binning import HomoFeatureBinning
from pipeline.component.intersection import Intersection
from pipeline.component.local_baseline import LocalBaseline
from pipeline.component.one_hot_encoder import OneHotEncoder
from pipeline.component.psi import PSI
from pipeline.component.reader import Reader
from pipeline.component.scorecard import Scorecard
from pipeline.component.sampler import FederatedSample
from pipeline.component.scale import FeatureScale
from pipeline.component.union import Union
from pipeline.component.feldman_verifiable_sum import FeldmanVerifiableSum
from pipeline.component.sample_weight import SampleWeight
from pipeline.component.feature_imputation import FeatureImputation
from pipeline.component.label_transform import LabelTransform
from pipeline.component.hetero_sshe_lr import HeteroSSHELR
from pipeline.component.secure_information_retrieval import SecureInformationRetrieval
from pipeline.component.cache_loader import CacheLoader
from pipeline.component.model_loader import ModelLoader
from pipeline.component.hetero_kmeans import HeteroKmeans
from pipeline.component.homo_onehot import HomoOneHotEncoder
from pipeline.component.hetero_sshe_linr import HeteroSSHELinR
from pipeline.component.positive_unlabeled import PositiveUnlabeled
try:
import torch
from pipeline.component.homo_nn import HomoNN
from pipeline.component.hetero_ftl import HeteroFTL
from pipeline.component.hetero_nn import HeteroNN
except BaseException:
print('Import torch failed, this may casue by the situation that torch are not installed, HomoNN, HeteroNN, HeteroFTL are not available')
HomoNN, HeteroNN, HeteroFTL = None, None, None
__all__ = [
"DataStatistics",
"DataIO",
"Evaluation",
"HeteroDataSplit",
"HeteroFastSecureBoost",
"HeteroFeatureBinning",
"HeteroFeatureSelection",
"HeteroFTL",
"HeteroLinR",
"HeteroLR",
"HeteroNN",
"HeteroPearson",
"HeteroPoisson",
"HeteroSecureBoost",
"HomoDataSplit",
"HomoLR",
"HomoNN",
"HomoSecureBoost",
"HomoFeatureBinning",
"Intersection",
"LocalBaseline",
"OneHotEncoder",
"PSI",
"Reader",
"Scorecard",
"FederatedSample",
"FeatureScale",
"Union",
"ColumnExpand",
"FeldmanVerifiableSum",
"SampleWeight",
"DataTransform",
"FeatureImputation",
"LabelTransform",
"SecureInformationRetrieval",
"CacheLoader",
"ModelLoader",
"HeteroSSHELR",
"HeteroKmeans",
"HomoOneHotEncoder",
"HeteroSSHELinR",
"PositiveUnlabeled"]
| 3,628 | 37.606383 | 141 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/evaluation.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.evaluation_param import EvaluateParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class Evaluation(FateComponent, EvaluateParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
EvaluateParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name, has_model=False)
self._module_name = "Evaluation"
| 1,317 | 35.611111 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/data_transform.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.data_transform_param import DataTransformParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class DataTransform(FateComponent, DataTransformParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
#print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
DataTransformParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name, data_type='single')
self._module_name = "DataTransform"
| 1,345 | 35.378378 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/hetero_feature_selection.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.feature_selection_param import FeatureSelectionParam
from pipeline.component.component_base import FateComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class HeteroFeatureSelection(FateComponent, FeatureSelectionParam):
def __init__(self, **kwargs):
FateComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
FeatureSelectionParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name)
self._module_name = "HeteroFeatureSelection"
| 1,356 | 35.675676 | 75 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/hetero_nn.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.tools import extract_explicit_parameter
from pipeline.component.nn.interface import DatasetParam
try:
from pipeline.component.component_base import FateComponent
from pipeline.component.nn.models.sequantial import Sequential
from pipeline.component.nn.backend.torch.interactive import InteractiveLayer
except Exception as e:
print(e)
print('Import NN components in HeteroNN module failed, \
this may casue by the situation that torch are not installed,\
please install torch to use this module')
class HeteroNN(FateComponent):
@extract_explicit_parameter
def __init__(self, task_type="classification", epochs=None, batch_size=-1, early_stop="diff",
tol=1e-5, encrypt_param=None, predict_param=None, cv_param=None, interactive_layer_lr=0.1,
validation_freqs=None, early_stopping_rounds=None, use_first_metric_only=None,
floating_point_precision=23, selector_param=None, seed=100,
dataset: DatasetParam = DatasetParam(dataset_name='table'), **kwargs
):
"""
Parameters used for Hetero Neural Network.
Parameters
----------
task_type: str, task type of hetero nn model, one of 'classification', 'regression'.
interactive_layer_lr: float, the learning rate of interactive layer.
epochs: int, the maximum iteration for aggregation in training.
batch_size : int, batch size when updating model.
-1 means use all data in a batch. i.e. Not to use mini-batch strategy.
defaults to -1.
early_stop : str, accept 'diff' only in this version, default: 'diff'
Method used to judge converge or not.
a) diff: Use difference of loss between two iterations to judge whether converge.
tol: float, tolerance val for early stop
floating_point_precision: None or integer, if not None, means use floating_point_precision-bit to speed up calculation,
e.g.: convert an x to round(x * 2**floating_point_precision) during Paillier operation, divide
the result by 2**floating_point_precision in the end.
callback_param: dict, CallbackParam, see federatedml/param/callback_param
encrypt_param: dict, see federatedml/param/encrypt_param
dataset_param: dict, interface defining the dataset param
early_stopping_rounds: integer larger than 0
will stop training if one metric of one validation data
doesn’t improve in last early_stopping_round rounds,
need to set validation freqs and will check early_stopping every at every validation epoch
validation_freqs: None or positive integer or container object in python
Do validation in training process or Not.
if equals None, will not do validation in train process;
if equals positive integer, will validate data every validation_freqs epochs passes;
if container object in python, will validate data if epochs belong to this container.
e.g. validation_freqs = [10, 15], will validate data when epoch equals to 10 and 15.
Default: None
"""
explicit_parameters = kwargs["explict_parameters"]
explicit_parameters["optimizer"] = None
explicit_parameters["bottom_nn_define"] = None
explicit_parameters["top_nn_define"] = None
explicit_parameters["interactive_layer_define"] = None
explicit_parameters["loss"] = None
FateComponent.__init__(self, **explicit_parameters)
if "name" in explicit_parameters:
del explicit_parameters["name"]
for param_key, param_value in explicit_parameters.items():
setattr(self, param_key, param_value)
self.input = Input(self.name, data_type="multi")
self.output = Output(self.name, data_type='single')
self._module_name = "HeteroNN"
self.optimizer = None
self.bottom_nn_define = None
self.top_nn_define = None
self.interactive_layer_define = None
# model holder
self._bottom_nn_model = Sequential()
self._interactive_layer = Sequential()
self._top_nn_model = Sequential()
# role
self._role = 'common' # common/guest/host
if hasattr(self, 'dataset'):
assert isinstance(
self.dataset, DatasetParam), 'dataset must be a DatasetParam class'
self.dataset.check()
self.dataset: DatasetParam = self.dataset.to_dict()
def set_role(self, role):
self._role = role
def get_party_instance(self, role="guest", party_id=None) -> 'Component':
inst = super().get_party_instance(role, party_id)
inst.set_role(role)
return inst
def add_dataset(self, dataset_param: DatasetParam):
assert isinstance(
dataset_param, DatasetParam), 'dataset must be a DatasetParam class'
dataset_param.check()
self.dataset: DatasetParam = dataset_param.to_dict()
self._component_parameter_keywords.add("dataset")
self._component_param["dataset"] = self.dataset
def add_bottom_model(self, model):
if not hasattr(self, "_bottom_nn_model"):
setattr(self, "_bottom_nn_model", Sequential())
self._bottom_nn_model.add(model)
def set_interactive_layer(self, layer):
if self._role == 'common' or self._role == 'guest':
if not hasattr(self, "_interactive_layer"):
setattr(self, "_interactive_layer", Sequential())
assert isinstance(layer, InteractiveLayer), 'You need to add an interactive layer instance, \n' \
'you can access InteractiveLayer by:\n' \
't.nn.InteractiveLayer after fate_torch_hook(t)\n' \
'or from pipeline.component.nn.backend.torch.interactive ' \
'import InteractiveLayer'
self._interactive_layer.add(layer)
else:
raise RuntimeError(
'You can only set interactive layer in "common" or "guest" hetero nn component')
def add_top_model(self, model):
if self._role == 'host':
raise RuntimeError('top model is not allow to set on host model')
if not hasattr(self, "_top_nn_model"):
setattr(self, "_top_nn_model", Sequential())
self._top_nn_model.add(model)
def _set_optimizer(self, opt):
assert hasattr(
opt, 'to_dict'), 'opt does not have function to_dict(), remember to call fate_torch_hook(t)'
self.optimizer = opt.to_dict()
def _set_loss(self, loss):
assert hasattr(
loss, 'to_dict'), 'loss does not have function to_dict(), remember to call fate_torch_hook(t)'
loss_conf = loss.to_dict()
setattr(self, "loss", loss_conf)
def compile(self, optimizer, loss):
self._set_optimizer(optimizer)
self._set_loss(loss)
self._compile_common_network_config()
self._compile_role_network_config()
self._compile_interactive_layer()
def _compile_interactive_layer(self):
if hasattr(
self,
"_interactive_layer") and not self._interactive_layer.is_empty():
self.interactive_layer_define = self._interactive_layer.get_network_config()
self._component_param["interactive_layer_define"] = self.interactive_layer_define
def _compile_common_network_config(self):
if hasattr(
self,
"_bottom_nn_model") and not self._bottom_nn_model.is_empty():
self.bottom_nn_define = self._bottom_nn_model.get_network_config()
self._component_param["bottom_nn_define"] = self.bottom_nn_define
if hasattr(
self,
"_top_nn_model") and not self._top_nn_model.is_empty():
self.top_nn_define = self._top_nn_model.get_network_config()
self._component_param["top_nn_define"] = self.top_nn_define
def _compile_role_network_config(self):
all_party_instance = self._get_all_party_instance()
for role in all_party_instance:
for party in all_party_instance[role]["party"].keys():
all_party_instance[role]["party"][party]._compile_common_network_config(
)
all_party_instance[role]["party"][party]._compile_interactive_layer(
)
def get_bottom_model(self):
if hasattr(
self,
"_bottom_nn_model") and not getattr(
self,
"_bottom_nn_model").is_empty():
return getattr(self, "_bottom_nn_model").get_model()
bottom_models = {}
all_party_instance = self._get_all_party_instance()
for role in all_party_instance.keys():
for party in all_party_instance[role]["party"].keys():
party_inst = all_party_instance[role]["party"][party]
if party_inst is not None:
btn_model = all_party_instance[role]["party"][party].get_bottom_model(
)
if btn_model is not None:
bottom_models[party] = btn_model
return bottom_models if len(bottom_models) > 0 else None
def get_top_model(self):
if hasattr(
self,
"_top_nn_model") and not getattr(
self,
"_top_nn_model").is_empty():
return getattr(self, "_top_nn_model").get_model()
models = {}
all_party_instance = self._get_all_party_instance()
for role in all_party_instance.keys():
for party in all_party_instance[role]["party"].keys():
party_inst = all_party_instance[role]["party"][party]
if party_inst is not None:
top_model = all_party_instance[role]["party"][party].get_top_model(
)
if top_model is not None:
models[party] = top_model
return models if len(models) > 0 else None
def __getstate__(self):
state = dict(self.__dict__)
if "_bottom_nn_model" in state:
del state["_bottom_nn_model"]
if "_interactive_layer" in state:
del state["_interactive_layer"]
if "_top_nn_model" in state:
del state["_top_nn_model"]
return state
| 11,564 | 43.141221 | 134 |
py
|
FATE
|
FATE-master/python/fate_client/pipeline/component/cache_loader.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pipeline.param.cache_loader_param import CacheLoaderParam
from pipeline.component.component_base import FateFlowComponent
from pipeline.interface import Input
from pipeline.interface import Output
from pipeline.utils.logger import LOGGER
class CacheLoader(FateFlowComponent, CacheLoaderParam):
def __init__(self, **kwargs):
FateFlowComponent.__init__(self, **kwargs)
# print (self.name)
LOGGER.debug(f"{self.name} component created")
new_kwargs = self.erase_component_base_param(**kwargs)
CacheLoaderParam.__init__(self, **new_kwargs)
self.input = Input(self.name)
self.output = Output(self.name, has_model=False, has_cache=True, has_data=False)
self._module_name = "CacheLoader"
| 1,375 | 36.189189 | 88 |
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.