repo
stringlengths 2
99
| file
stringlengths 13
225
| code
stringlengths 0
18.3M
| file_length
int64 0
18.3M
| avg_line_length
float64 0
1.36M
| max_line_length
int64 0
4.26M
| extension_type
stringclasses 1
value |
---|---|---|---|---|---|---|
FATE
|
FATE-master/examples/pipeline/homo_sbt/pipeline-homo-sbt-warm-start.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component.homo_secureboost import HomoSecureBoost
from pipeline.component.reader import Reader
from pipeline.interface.data import Data
from pipeline.component.evaluation import Evaluation
from pipeline.interface.model import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_homo_guest", "namespace": f"experiment{namespace}"}
guest_validate_data = {"name": "breast_homo_test", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_homo_host", "namespace": f"experiment{namespace}"}
host_validate_data = {"name": "breast_homo_test", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
data_transform_0, data_transform_1 = DataTransform(name="data_transform_0"), DataTransform(name='data_transform_1')
reader_0, reader_1 = Reader(name="reader_0"), Reader(name='reader_1')
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0.get_party_instance(
role='guest', party_id=guest).component_param(
with_label=True, output_format="dense")
data_transform_0.get_party_instance(
role='host', party_id=host).component_param(
with_label=True, output_format="dense")
reader_1.get_party_instance(role='guest', party_id=guest).component_param(table=guest_validate_data)
reader_1.get_party_instance(role='host', party_id=host).component_param(table=host_validate_data)
data_transform_1.get_party_instance(
role='guest', party_id=guest).component_param(
with_label=True, output_format="dense")
data_transform_1.get_party_instance(
role='host', party_id=host).component_param(
with_label=True, output_format="dense")
homo_secureboost_0 = HomoSecureBoost(name="homo_secureboost_0",
num_trees=3,
task_type='classification',
objective_param={"objective": "cross_entropy"},
tree_param={
"max_depth": 3
},
validation_freqs=1
)
homo_secureboost_1 = HomoSecureBoost(name="homo_secureboost_1",
num_trees=3,
task_type='classification',
objective_param={"objective": "cross_entropy"},
tree_param={
"max_depth": 3
},
validation_freqs=1
)
evaluation_0 = Evaluation(name='evaluation_0', eval_type='binary')
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(reader_1)
pipeline.add_component(
data_transform_1, data=Data(
data=reader_1.output.data), model=Model(
data_transform_0.output.model))
pipeline.add_component(homo_secureboost_0, data=Data(train_data=data_transform_0.output.data,
validate_data=data_transform_1.output.data
))
pipeline.add_component(homo_secureboost_1, data=Data(train_data=data_transform_0.output.data,
validate_data=data_transform_1.output.data),
model=Model(model=homo_secureboost_0.output.model))
pipeline.add_component(evaluation_0, data=Data(homo_secureboost_0.output.data))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,385 | 43.883333 | 120 |
py
|
FATE
|
FATE-master/examples/pipeline/homo_sbt/pipeline-homo-sbt-binary.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component.homo_secureboost import HomoSecureBoost
from pipeline.component.reader import Reader
from pipeline.interface.data import Data
from pipeline.component.evaluation import Evaluation
from pipeline.interface.model import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_homo_guest", "namespace": f"experiment{namespace}"}
guest_validate_data = {"name": "breast_homo_test", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_homo_host", "namespace": f"experiment{namespace}"}
host_validate_data = {"name": "breast_homo_test", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
data_transform_0, data_transform_1 = DataTransform(name="data_transform_0"), DataTransform(name='data_transform_1')
reader_0, reader_1 = Reader(name="reader_0"), Reader(name='reader_1')
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0.get_party_instance(
role='guest', party_id=guest).component_param(
with_label=True, output_format="dense")
data_transform_0.get_party_instance(
role='host', party_id=host).component_param(
with_label=True, output_format="dense")
reader_1.get_party_instance(role='guest', party_id=guest).component_param(table=guest_validate_data)
reader_1.get_party_instance(role='host', party_id=host).component_param(table=host_validate_data)
data_transform_1.get_party_instance(
role='guest', party_id=guest).component_param(
with_label=True, output_format="dense")
data_transform_1.get_party_instance(
role='host', party_id=host).component_param(
with_label=True, output_format="dense")
homo_secureboost_0 = HomoSecureBoost(name="homo_secureboost_0",
num_trees=3,
task_type='classification',
objective_param={"objective": "cross_entropy"},
tree_param={
"max_depth": 3
},
validation_freqs=1
)
evaluation_0 = Evaluation(name='evaluation_0', eval_type='binary')
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(reader_1)
pipeline.add_component(
data_transform_1, data=Data(
data=reader_1.output.data), model=Model(
data_transform_0.output.model))
pipeline.add_component(homo_secureboost_0, data=Data(train_data=data_transform_0.output.data,
validate_data=data_transform_1.output.data
))
pipeline.add_component(evaluation_0, data=Data(homo_secureboost_0.output.data))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,564 | 41.663551 | 120 |
py
|
FATE
|
FATE-master/examples/pipeline/homo_sbt/pipeline-homo-sbt-binary-with-missing-value.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component.homo_secureboost import HomoSecureBoost
from pipeline.component.reader import Reader
from pipeline.interface.data import Data
from pipeline.component.evaluation import Evaluation
from pipeline.interface.model import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "ionosphere_scale_guest", "namespace": f"experiment{namespace}"}
guest_validate_data = {"name": "ionosphere_scale_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "ionosphere_scale_host", "namespace": f"experiment{namespace}"}
host_validate_data = {"name": "ionosphere_scale_host", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
data_transform_0, data_transform_1 = DataTransform(name="data_transform_0"), DataTransform(name='data_transform_1')
reader_0, reader_1 = Reader(name="reader_0"), Reader(name='reader_1')
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0.get_party_instance(
role='guest',
party_id=guest).component_param(
with_label=True,
output_format="dense",
label_name="label")
data_transform_0.get_party_instance(
role='host',
party_id=host).component_param(
with_label=True,
output_format="dense",
label_name="label")
reader_1.get_party_instance(role='guest', party_id=guest).component_param(table=guest_validate_data)
reader_1.get_party_instance(role='host', party_id=host).component_param(table=host_validate_data)
data_transform_1.get_party_instance(
role='guest',
party_id=guest).component_param(
with_label=True,
output_format="dense",
label_name="label")
data_transform_1.get_party_instance(
role='host',
party_id=host).component_param(
with_label=True,
output_format="dense",
label_name="label")
homo_secureboost_0 = HomoSecureBoost(name="homo_secureboost_0",
num_trees=3,
task_type='classification',
objective_param={"objective": "cross_entropy"},
use_missing=True,
tree_param={
"max_depth": 3,
"use_missing": True
},
validation_freqs=1
)
evaluation_0 = Evaluation(name='evaluation_0', eval_type='binary')
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(reader_1)
pipeline.add_component(
data_transform_1, data=Data(
data=reader_1.output.data), model=Model(
data_transform_0.output.model))
pipeline.add_component(homo_secureboost_0, data=Data(train_data=data_transform_0.output.data,
validate_data=data_transform_1.output.data
))
pipeline.add_component(evaluation_0, data=Data(homo_secureboost_0.output.data))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,886 | 39.38843 | 120 |
py
|
FATE
|
FATE-master/examples/pipeline/homo_sbt/pipeline-homo-sbt-regression-cv.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component.homo_secureboost import HomoSecureBoost
from pipeline.component.reader import Reader
from pipeline.interface.data import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_homo_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_homo_host", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
data_transform_0 = DataTransform(name="data_transform_0")
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0.get_party_instance(
role='guest',
party_id=guest).component_param(
with_label=True,
output_format="dense",
label_type="float")
data_transform_0.get_party_instance(
role='host',
party_id=host).component_param(
with_label=True,
output_format="dense",
label_type="float")
homo_secureboost_0 = HomoSecureBoost(name="homo_secureboost_0",
num_trees=3,
task_type='regression',
objective_param={"objective": "lse"},
tree_param={
"max_depth": 3
},
cv_param={
"need_cv": True,
"shuffle": False,
"n_splits": 5
}
)
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(homo_secureboost_0, data=Data(train_data=data_transform_0.output.data))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 3,472 | 36.344086 | 120 |
py
|
FATE
|
FATE-master/examples/pipeline/homo_sbt/pipeline-homo-sbt-binary-multi-host.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component.homo_secureboost import HomoSecureBoost
from pipeline.component.reader import Reader
from pipeline.interface.data import Data
from pipeline.component.evaluation import Evaluation
from pipeline.interface.model import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host_0 = parties.host[0]
host_1 = parties.host[1]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "default_credit_guest", "namespace": f"experiment{namespace}"}
host_train_data_0 = {"name": "default_credit_host1", "namespace": f"experiment{namespace}"}
host_train_data_1 = {"name": "default_credit_host2", "namespace": f"experiment{namespace}"}
guest_validate_data = {"name": "default_credit_test", "namespace": f"experiment{namespace}"}
host_validate_data_0 = {"name": "default_credit_test", "namespace": f"experiment{namespace}"}
host_validate_data_1 = {"name": "default_credit_test", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(
role='guest', party_id=guest).set_roles(
guest=guest, host=[
host_0, host_1], arbiter=arbiter)
data_transform_0, data_transform_1 = DataTransform(name="data_transform_0"), DataTransform(name='data_transform_1')
reader_0, reader_1 = Reader(name="reader_0"), Reader(name='reader_1')
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host_0).component_param(table=host_train_data_0)
reader_0.get_party_instance(role='host', party_id=host_1).component_param(table=host_train_data_1)
data_transform_0.get_party_instance(
role='guest', party_id=guest).component_param(
with_label=True, output_format="dense")
data_transform_0.get_party_instance(
role='host', party_id=host_0).component_param(
with_label=True, output_format="dense")
data_transform_0.get_party_instance(
role='host', party_id=host_1).component_param(
with_label=True, output_format="dense")
reader_1.get_party_instance(role='guest', party_id=guest).component_param(table=guest_validate_data)
reader_1.get_party_instance(role='host', party_id=host_0).component_param(table=host_validate_data_0)
reader_1.get_party_instance(role='host', party_id=host_1).component_param(table=host_validate_data_1)
data_transform_1.get_party_instance(
role='guest', party_id=guest).component_param(
with_label=True, output_format="dense")
data_transform_1.get_party_instance(
role='host', party_id=host_0).component_param(
with_label=True, output_format="dense")
data_transform_1.get_party_instance(
role='host', party_id=host_1).component_param(
with_label=True, output_format="dense")
homo_secureboost_0 = HomoSecureBoost(name="homo_secureboost_0",
num_trees=3,
task_type='classification',
objective_param={"objective": "cross_entropy"},
tree_param={
"max_depth": 3
},
validation_freqs=1
)
evaluation_0 = Evaluation(name='evaluation_0', eval_type='binary')
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(reader_1)
pipeline.add_component(
data_transform_1, data=Data(
data=reader_1.output.data), model=Model(
data_transform_0.output.model))
pipeline.add_component(homo_secureboost_0, data=Data(train_data=data_transform_0.output.data,
validate_data=data_transform_1.output.data
))
pipeline.add_component(evaluation_0, data=Data(homo_secureboost_0.output.data))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,359 | 42.934426 | 119 |
py
|
FATE
|
FATE-master/examples/pipeline/homo_sbt/pipeline-homo-sbt-regression.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component.homo_secureboost import HomoSecureBoost
from pipeline.component.reader import Reader
from pipeline.interface.data import Data
from pipeline.component.evaluation import Evaluation
from pipeline.interface.model import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "student_homo_guest", "namespace": f"experiment{namespace}"}
guest_validate_data = {"name": "student_homo_test", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "student_homo_host", "namespace": f"experiment{namespace}"}
host_validate_data = {"name": "student_homo_test", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
data_transform_0, data_transform_1 = DataTransform(name="data_transform_0"), DataTransform(name='data_transform_1')
reader_0, reader_1 = Reader(name="reader_0"), Reader(name='reader_1')
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0.get_party_instance(
role='guest',
party_id=guest).component_param(
with_label=True,
output_format="dense",
label_type="float")
data_transform_0.get_party_instance(
role='host',
party_id=host).component_param(
with_label=True,
output_format="dense",
label_type="float")
reader_1.get_party_instance(role='guest', party_id=guest).component_param(table=guest_validate_data)
reader_1.get_party_instance(role='host', party_id=host).component_param(table=host_validate_data)
data_transform_1.get_party_instance(
role='guest',
party_id=guest).component_param(
with_label=True,
output_format="dense",
label_type="float")
data_transform_1.get_party_instance(
role='host',
party_id=host).component_param(
with_label=True,
output_format="dense",
label_type="float")
homo_secureboost_0 = HomoSecureBoost(name="homo_secureboost_0",
num_trees=3,
task_type='regression',
objective_param={"objective": "lse"},
tree_param={
"max_depth": 3
},
validation_freqs=1
)
evaluation_0 = Evaluation(name='evaluation_0', eval_type='regression')
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(reader_1)
pipeline.add_component(
data_transform_1, data=Data(
data=reader_1.output.data), model=Model(
data_transform_0.output.model))
pipeline.add_component(homo_secureboost_0, data=Data(train_data=data_transform_0.output.data,
validate_data=data_transform_1.output.data
))
pipeline.add_component(evaluation_0, data=Data(homo_secureboost_0.output.data))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,734 | 38.789916 | 120 |
py
|
FATE
|
FATE-master/examples/pipeline/homo_sbt/pipeline-homo-sbt-multi.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component.homo_secureboost import HomoSecureBoost
from pipeline.component.reader import Reader
from pipeline.interface.data import Data
from pipeline.component.evaluation import Evaluation
from pipeline.interface.model import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "vehicle_scale_homo_guest", "namespace": f"experiment{namespace}"}
guest_validate_data = {"name": "vehicle_scale_homo_test", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "vehicle_scale_homo_host", "namespace": f"experiment{namespace}"}
host_validate_data = {"name": "vehicle_scale_homo_test", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
data_transform_0, data_transform_1 = DataTransform(name="data_transform_0"), DataTransform(name='data_transform_1')
reader_0, reader_1 = Reader(name="reader_0"), Reader(name='reader_1')
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0.get_party_instance(
role='guest', party_id=guest).component_param(
with_label=True, output_format="dense")
data_transform_0.get_party_instance(
role='host', party_id=host).component_param(
with_label=True, output_format="dense")
reader_1.get_party_instance(role='guest', party_id=guest).component_param(table=guest_validate_data)
reader_1.get_party_instance(role='host', party_id=host).component_param(table=host_validate_data)
data_transform_1.get_party_instance(
role='guest', party_id=guest).component_param(
with_label=True, output_format="dense")
data_transform_1.get_party_instance(
role='host', party_id=host).component_param(
with_label=True, output_format="dense")
homo_secureboost_0 = HomoSecureBoost(name="homo_secureboost_0",
num_trees=3,
task_type='classification',
objective_param={"objective": "cross_entropy"},
tree_param={
"max_depth": 3
},
validation_freqs=1
)
evaluation_0 = Evaluation(name='evaluation_0', eval_type='multi')
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(reader_1)
pipeline.add_component(
data_transform_1, data=Data(
data=reader_1.output.data), model=Model(
data_transform_0.output.model))
pipeline.add_component(homo_secureboost_0, data=Data(train_data=data_transform_0.output.data,
validate_data=data_transform_1.output.data
))
pipeline.add_component(evaluation_0, data=Data(homo_secureboost_0.output.data))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,591 | 41.915888 | 120 |
py
|
FATE
|
FATE-master/examples/pipeline/homo_sbt/pipeline-homo-sbt-multi-cv.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component.homo_secureboost import HomoSecureBoost
from pipeline.component.reader import Reader
from pipeline.interface.data import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "vehicle_scale_homo_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "vehicle_scale_homo_host", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
data_transform_0 = DataTransform(name="data_transform_0")
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0.get_party_instance(
role='guest', party_id=guest).component_param(
with_label=True, output_format="dense")
data_transform_0.get_party_instance(
role='host', party_id=host).component_param(
with_label=True, output_format="dense")
homo_secureboost_0 = HomoSecureBoost(name="homo_secureboost_0",
num_trees=3,
task_type='classification',
objective_param={"objective": "cross_entropy"},
tree_param={
"max_depth": 3
},
cv_param={
"need_cv": True,
"shuffle": False,
"n_splits": 5
}
)
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(homo_secureboost_0, data=Data(train_data=data_transform_0.output.data))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 3,412 | 38.229885 | 120 |
py
|
FATE
|
FATE-master/examples/pipeline/homo_sbt/pipeline-homo-sbt-binary-with-predict.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component.homo_secureboost import HomoSecureBoost
from pipeline.component.reader import Reader
from pipeline.interface.data import Data
from pipeline.component.evaluation import Evaluation
from pipeline.interface.model import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_homo_guest", "namespace": f"experiment{namespace}"}
guest_validate_data = {"name": "breast_homo_test", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_homo_host", "namespace": f"experiment{namespace}"}
host_validate_data = {"name": "breast_homo_test", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
data_transform_0, data_transform_1 = DataTransform(name="data_transform_0"), DataTransform(name='data_transform_1')
reader_0, reader_1 = Reader(name="reader_0"), Reader(name='reader_1')
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0.get_party_instance(
role='guest', party_id=guest).component_param(
with_label=True, output_format="dense")
data_transform_0.get_party_instance(
role='host', party_id=host).component_param(
with_label=True, output_format="dense")
reader_1.get_party_instance(role='guest', party_id=guest).component_param(table=guest_validate_data)
reader_1.get_party_instance(role='host', party_id=host).component_param(table=host_validate_data)
data_transform_1.get_party_instance(
role='guest', party_id=guest).component_param(
with_label=True, output_format="dense")
data_transform_1.get_party_instance(
role='host', party_id=host).component_param(
with_label=True, output_format="dense")
homo_secureboost_0 = HomoSecureBoost(name="homo_secureboost_0",
num_trees=3,
task_type='classification',
objective_param={"objective": "cross_entropy"},
tree_param={
"max_depth": 3
},
validation_freqs=1
)
evaluation_0 = Evaluation(name='evaluation_0', eval_type='binary')
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(reader_1)
pipeline.add_component(
data_transform_1, data=Data(
data=reader_1.output.data), model=Model(
data_transform_0.output.model))
pipeline.add_component(homo_secureboost_0, data=Data(train_data=data_transform_0.output.data,
validate_data=data_transform_1.output.data
))
pipeline.add_component(evaluation_0, data=Data(homo_secureboost_0.output.data))
pipeline.compile()
pipeline.fit()
# predict
# deploy required components
pipeline.deploy_component([data_transform_0, homo_secureboost_0])
predict_pipeline = PipeLine()
# add data reader onto predict pipeline
predict_pipeline.add_component(reader_1)
# add selected components from train pipeline onto predict pipeline
# specify data source
predict_pipeline.add_component(
pipeline, data=Data(
predict_input={
pipeline.data_transform_0.input.data: reader_1.output.data}))
# run predict model
predict_pipeline.predict()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,130 | 40.715447 | 120 |
py
|
FATE
|
FATE-master/examples/pipeline/homo_sbt/pipeline-homo-sbt-binary-with-memory-backend.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component.homo_secureboost import HomoSecureBoost
from pipeline.component.reader import Reader
from pipeline.interface.data import Data
from pipeline.component.evaluation import Evaluation
from pipeline.interface.model import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_homo_guest", "namespace": f"experiment{namespace}"}
guest_validate_data = {"name": "breast_homo_test", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_homo_host", "namespace": f"experiment{namespace}"}
host_validate_data = {"name": "breast_homo_test", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
data_transform_0, data_transform_1 = DataTransform(name="data_transform_0"), DataTransform(name='data_transform_1')
reader_0, reader_1 = Reader(name="reader_0"), Reader(name='reader_1')
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0.get_party_instance(
role='guest', party_id=guest).component_param(
with_label=True, output_format="dense")
data_transform_0.get_party_instance(
role='host', party_id=host).component_param(
with_label=True, output_format="dense")
reader_1.get_party_instance(role='guest', party_id=guest).component_param(table=guest_validate_data)
reader_1.get_party_instance(role='host', party_id=host).component_param(table=host_validate_data)
data_transform_1.get_party_instance(
role='guest', party_id=guest).component_param(
with_label=True, output_format="dense")
data_transform_1.get_party_instance(
role='host', party_id=host).component_param(
with_label=True, output_format="dense")
homo_secureboost_0 = HomoSecureBoost(name="homo_secureboost_0",
num_trees=3,
task_type='classification',
objective_param={"objective": "cross_entropy"},
tree_param={
"max_depth": 3
},
validation_freqs=1,
backend="memory"
)
evaluation_0 = Evaluation(name='evaluation_0', eval_type='binary')
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(reader_1)
pipeline.add_component(
data_transform_1, data=Data(
data=reader_1.output.data), model=Model(
data_transform_0.output.model))
pipeline.add_component(homo_secureboost_0, data=Data(train_data=data_transform_0.output.data,
validate_data=data_transform_1.output.data
))
pipeline.add_component(evaluation_0, data=Data(homo_secureboost_0.output.data))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,623 | 41.814815 | 120 |
py
|
FATE
|
FATE-master/examples/pipeline/homo_sbt/__init__.py
| 0 | 0 | 0 |
py
|
|
FATE
|
FATE-master/examples/pipeline/homo_sbt/pipeline-homo-sbt-binary-cv.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component.homo_secureboost import HomoSecureBoost
from pipeline.component.reader import Reader
from pipeline.interface.data import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_homo_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_homo_host", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
data_transform_0 = DataTransform(name="data_transform_0")
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0.get_party_instance(
role='guest', party_id=guest).component_param(
with_label=True, output_format="dense")
data_transform_0.get_party_instance(
role='host', party_id=host).component_param(
with_label=True, output_format="dense")
homo_secureboost_0 = HomoSecureBoost(name="homo_secureboost_0",
num_trees=3,
task_type='classification',
objective_param={"objective": "cross_entropy"},
tree_param={
"max_depth": 3
},
cv_param={
"need_cv": True,
"shuffle": False,
"n_splits": 5
}
)
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(homo_secureboost_0, data=Data(train_data=data_transform_0.output.data))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 3,398 | 38.068966 | 120 |
py
|
FATE
|
FATE-master/examples/pipeline/scorecard/pipeline-scorecard.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import Scorecard
from pipeline.component import DataTransform
from pipeline.component import HeteroLR
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "default_credit_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "default_credit_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role="guest", party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host, arbiter=arbiter)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role="guest", party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role="host", party_id=host).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(name="data_transform_0") # start component numbering at 0
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role="guest", party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True, output_format="dense")
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role="host", party_id=host).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0", intersect_method="rsa",
sync_intersect_ids=True, only_output_key=False)
param = {
"penalty": "L2",
"optimizer": "nesterov_momentum_sgd",
"tol": 0.0001,
"alpha": 0.01,
"max_iter": 5,
"early_stop": "weight_diff",
"batch_size": -1,
"learning_rate": 0.15,
"init_param": {
"init_method": "random_uniform"
},
"sqn_param": {
"update_interval_L": 3,
"memory_M": 5,
"sample_size": 5000,
"random_seed": None
}
}
hetero_lr_0 = HeteroLR(name="hetero_lr_0", **param)
# define Scorecard component
scorecard_0 = Scorecard(name="scorecard_0")
scorecard_0.get_party_instance(role="guest", party_id=guest).component_param(need_run=True,
method="credit",
offset=500,
factor=20,
factor_base=2,
upper_limit_ratio=3,
lower_limit_value=0)
scorecard_0.get_party_instance(role="host", party_id=host).component_param(need_run=False)
# add components to pipeline, in order of task execution
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
# set data input sources of intersection components
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(hetero_lr_0, data=Data(train_data=intersection_0.output.data))
pipeline.add_component(scorecard_0, data=Data(data=hetero_lr_0.output.data))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
# print(pipeline.get_component("scorecard_0").get_summary())
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,283 | 39.030303 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_stepwise/pipeline-stepwise-lr.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HeteroLR
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_hetero_mini_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_mini_host", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0")
data_transform_0.get_party_instance(
role='guest', party_id=guest).component_param(
with_label=True, output_format="dense")
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
hetero_lr_0 = HeteroLR(name="hetero_lr_0", early_stop="diff", max_iter=5,
penalty="None", optimizer="sgd", tol=0.001,
batch_size=-1, learning_rate=0.15, decay=0.0,
decay_sqrt=False,
init_param={"init_method": "zeros"},
stepwise_param={"score_name": "AIC", "direction": "backward",
"need_stepwise": True, "max_step": 2, "nvmin": 2
})
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(hetero_lr_0, data=Data(train_data=intersection_0.output.data))
pipeline.compile()
pipeline.fit()
# print(pipeline.get_component("hetero_lr_0").get_summary())
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 3,380 | 39.25 | 120 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_stepwise/pipeline-stepwise-poisson.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HeteroPoisson
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "dvisits_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "dvisits_hetero_host", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0")
data_transform_0.get_party_instance(
role='guest',
party_id=guest).component_param(
with_label=True,
output_format="dense",
label_name="doctorco",
label_type="float",
)
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
hetero_poisson_0 = HeteroPoisson(name="hetero_poisson_0", early_stop="diff", max_iter=5,
penalty="None", optimizer="sgd", tol=0.001,
batch_size=-1, learning_rate=0.15, decay=0.0,
decay_sqrt=False, alpha=0.01,
init_param={"init_method": "zeros"},
stepwise_param={"score_name": "AIC", "direction": "both",
"need_stepwise": True, "max_step": 1, "nvmin": 2
})
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(hetero_poisson_0, data=Data(train_data=intersection_0.output.data))
pipeline.compile()
pipeline.fit()
# print(pipeline.get_component("hetero_poisson_0").get_summary())
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 3,560 | 39.465909 | 120 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_stepwise/__init__.py
| 0 | 0 | 0 |
py
|
|
FATE
|
FATE-master/examples/pipeline/hetero_stepwise/pipeline-stepwise-linr.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HeteroLinR
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "motor_hetero_mini_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "motor_hetero_mini_host", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0")
data_transform_0.get_party_instance(
role='guest',
party_id=guest).component_param(
with_label=True,
output_format="dense",
label_name="motor_speed",
label_type="float",
)
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
hetero_linr_0 = HeteroLinR(name="hetero_linr_0", early_stop="diff", max_iter=3,
penalty="None", optimizer="sgd", tol=0.001,
alpha=0.01, batch_size=-1, learning_rate=0.15,
decay=0.0, decay_sqrt=False,
init_param={"init_method": "zeros"},
stepwise_param={"score_name": "AIC", "direction": "backward",
"need_stepwise": True, "max_step": 3, "nvmin": 2
})
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(hetero_linr_0, data=Data(train_data=intersection_0.output.data))
pipeline.compile()
pipeline.fit()
# print(pipeline.get_component("hetero_linr_0").get_summary())
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 3,513 | 38.931818 | 120 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_nn/pipeline-hetero-nn-train-binary-selective-bp.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import torch as t
from torch import nn
from pipeline import fate_torch_hook
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroNN
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
fate_torch_hook(t)
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": "experiment"}
host_train_data = {"name": "breast_hetero_host", "namespace": "experiment"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0")
data_transform_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True)
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
hetero_nn_0 = HeteroNN(name="hetero_nn_0", epochs=5,
interactive_layer_lr=0.01, batch_size=128, validation_freqs=1, task_type='classification',
selector_param={"method": "relative"})
guest_nn_0 = hetero_nn_0.get_party_instance(role='guest', party_id=guest)
host_nn_0 = hetero_nn_0.get_party_instance(role='host', party_id=host)
# define model
guest_bottom = t.nn.Sequential(
nn.Linear(10, 4),
nn.ReLU(),
nn.Dropout(p=0.2)
)
guest_top = t.nn.Sequential(
nn.Linear(4, 1),
nn.Sigmoid()
)
host_bottom = t.nn.Sequential(
nn.Linear(20, 4),
nn.ReLU(),
nn.Dropout(p=0.2)
)
# use interactive layer after fate_torch_hook
# add drop out in this layer
interactive_layer = t.nn.InteractiveLayer(out_dim=4, guest_dim=4, host_dim=4, host_num=1, dropout=0.2)
guest_nn_0.add_top_model(guest_top)
guest_nn_0.add_bottom_model(guest_bottom)
host_nn_0.add_bottom_model(host_bottom)
optimizer = t.optim.Adam(lr=0.01) # you can initialize optimizer without parameters after fate_torch_hook
loss = t.nn.BCELoss()
hetero_nn_0.set_interactive_layer(interactive_layer)
hetero_nn_0.compile(optimizer=optimizer, loss=loss)
evaluation_0 = Evaluation(name='eval_0', eval_type='binary')
# define components IO
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(hetero_nn_0, data=Data(train_data=intersection_0.output.data))
pipeline.add_component(evaluation_0, data=Data(data=hetero_nn_0.output.data))
pipeline.compile()
pipeline.fit()
print(pipeline.get_component("hetero_nn_0").get_summary())
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,307 | 35.201681 | 117 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_nn/pipeline-hetero-nn-train-binary-drop-out.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import torch as t
from torch import nn
from pipeline import fate_torch_hook
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroNN
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
fate_torch_hook(t)
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": "experiment"}
host_train_data = {"name": "breast_hetero_host", "namespace": "experiment"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0")
data_transform_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True)
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
hetero_nn_0 = HeteroNN(name="hetero_nn_0", epochs=10,
interactive_layer_lr=0.01, batch_size=-1, validation_freqs=1, task_type='classification')
guest_nn_0 = hetero_nn_0.get_party_instance(role='guest', party_id=guest)
host_nn_0 = hetero_nn_0.get_party_instance(role='host', party_id=host)
# define model
guest_bottom = t.nn.Sequential(
nn.Linear(10, 4),
nn.ReLU(),
nn.Dropout(p=0.2)
)
guest_top = t.nn.Sequential(
nn.Linear(4, 1),
nn.Sigmoid()
)
host_bottom = t.nn.Sequential(
nn.Linear(20, 4),
nn.ReLU(),
nn.Dropout(p=0.2)
)
# use interactive layer after fate_torch_hook
# add drop out in this layer
interactive_layer = t.nn.InteractiveLayer(out_dim=4, guest_dim=4, host_dim=4, host_num=1, dropout=0.2)
guest_nn_0.add_top_model(guest_top)
guest_nn_0.add_bottom_model(guest_bottom)
host_nn_0.add_bottom_model(host_bottom)
optimizer = t.optim.Adam(lr=0.01) # you can initialize optimizer without parameters after fate_torch_hook
loss = t.nn.BCELoss()
hetero_nn_0.set_interactive_layer(interactive_layer)
hetero_nn_0.compile(optimizer=optimizer, loss=loss)
evaluation_0 = Evaluation(name='eval_0', eval_type='binary')
# define components IO
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(hetero_nn_0, data=Data(train_data=intersection_0.output.data))
pipeline.add_component(evaluation_0, data=Data(data=hetero_nn_0.output.data))
pipeline.compile()
pipeline.fit()
print(pipeline.get_component("hetero_nn_0").get_summary())
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,241 | 34.949153 | 116 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_nn/pipeline-hetero-nn-train-with-early-stop.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import torch as t
from torch import nn
from pipeline import fate_torch_hook
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroNN
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
fate_torch_hook(t)
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": "experiment"}
host_train_data = {"name": "breast_hetero_host", "namespace": "experiment"}
guest_val_data = {"name": "breast_hetero_guest", "namespace": "experiment"}
host_val_data = {"name": "breast_hetero_host", "namespace": "experiment"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
reader_1 = Reader(name="reader_1")
reader_1.get_party_instance(role='guest', party_id=guest).component_param(table=guest_val_data)
reader_1.get_party_instance(role='host', party_id=host).component_param(table=host_val_data)
data_transform_0 = DataTransform(name="data_transform_0")
data_transform_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True)
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
data_transform_1 = DataTransform(name="data_transform_1")
data_transform_1.get_party_instance(role='guest', party_id=guest).component_param(with_label=True)
data_transform_1.get_party_instance(role='host', party_id=host).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
intersection_1 = Intersection(name="intersection_1")
hetero_nn_0 = HeteroNN(name="hetero_nn_0", epochs=100,
interactive_layer_lr=0.01, batch_size=-1, task_type='classification',
callback_param={
"callbacks": ["EarlyStopping"],
"validation_freqs": 1,
"early_stopping_rounds": 2,
"use_first_metric_only": True,
"metrics": ["AUC"]
}
)
guest_nn_0 = hetero_nn_0.get_party_instance(role='guest', party_id=guest)
host_nn_0 = hetero_nn_0.get_party_instance(role='host', party_id=host)
# define model
guest_bottom = t.nn.Sequential(
nn.Linear(10, 2),
nn.ReLU()
)
guest_top = t.nn.Sequential(
nn.Linear(2, 1),
nn.Sigmoid()
)
host_bottom = t.nn.Sequential(
nn.Linear(20, 2),
nn.ReLU()
)
# use interactive layer after fate_torch_hook
interactive_layer = t.nn.InteractiveLayer(out_dim=2, guest_dim=2, host_dim=2, host_num=1)
guest_nn_0.add_top_model(guest_top)
guest_nn_0.add_bottom_model(guest_bottom)
host_nn_0.add_bottom_model(host_bottom)
optimizer = t.optim.Adam(lr=0.01) # you can initialize optimizer without parameters after fate_torch_hook
loss = t.nn.BCELoss()
hetero_nn_0.set_interactive_layer(interactive_layer)
hetero_nn_0.compile(optimizer=optimizer, loss=loss)
evaluation_0 = Evaluation(name='eval_0', eval_type='binary')
# define components IO
pipeline.add_component(reader_0)
pipeline.add_component(reader_1)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(data_transform_1, data=Data(data=reader_1.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(intersection_1, data=Data(data=data_transform_1.output.data))
pipeline.add_component(hetero_nn_0, data=Data(train_data=intersection_0.output.data,
validate_data=intersection_1.output.data))
pipeline.add_component(evaluation_0, data=Data(data=hetero_nn_0.output.data))
pipeline.compile()
pipeline.fit()
print(pipeline.get_component("hetero_nn_0").get_summary())
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,533 | 39.101449 | 110 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_nn/pipeline-hetero-nn-train-binary.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import torch as t
from torch import nn
from pipeline import fate_torch_hook
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroNN
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
fate_torch_hook(t)
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": "experiment"}
host_train_data = {"name": "breast_hetero_host", "namespace": "experiment"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0")
data_transform_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True)
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
hetero_nn_0 = HeteroNN(name="hetero_nn_0", epochs=20,
interactive_layer_lr=0.01, batch_size=-1, validation_freqs=1, task_type='classification')
guest_nn_0 = hetero_nn_0.get_party_instance(role='guest', party_id=guest)
host_nn_0 = hetero_nn_0.get_party_instance(role='host', party_id=host)
# define model
guest_bottom = t.nn.Sequential(
nn.Linear(10, 4),
nn.ReLU()
)
guest_top = t.nn.Sequential(
nn.Linear(4, 1),
nn.Sigmoid()
)
host_bottom = t.nn.Sequential(
nn.Linear(20, 4),
nn.ReLU()
)
# use interactive layer after fate_torch_hook
interactive_layer = t.nn.InteractiveLayer(out_dim=4, guest_dim=4, host_dim=4, host_num=1, dropout=0.2)
guest_nn_0.add_top_model(guest_top)
guest_nn_0.add_bottom_model(guest_bottom)
host_nn_0.add_bottom_model(host_bottom)
optimizer = t.optim.Adam(lr=0.01) # you can initialize optimizer without parameters after fate_torch_hook
loss = t.nn.BCELoss()
hetero_nn_0.set_interactive_layer(interactive_layer)
hetero_nn_0.compile(optimizer=optimizer, loss=loss)
evaluation_0 = Evaluation(name='eval_0', eval_type='binary')
# define components IO
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(hetero_nn_0, data=Data(train_data=intersection_0.output.data))
pipeline.add_component(evaluation_0, data=Data(data=hetero_nn_0.output.data))
pipeline.compile()
pipeline.fit()
print(pipeline.get_component("hetero_nn_0").get_summary())
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,154 | 35.130435 | 116 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_nn/pipeline-hetero-nn-train-binary-coae.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from collections import OrderedDict
import torch as t
from torch import nn
from torch import optim
from pipeline import fate_torch as ft
from pipeline import fate_torch_hook
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroNN
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
# this is important, modify torch modules so that Sequential model be parsed by pipeline
fate_torch_hook(t)
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0")
data_transform_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True)
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
# define network structure in torch style #
# define guest model
Linear = nn.Linear
ReLU = nn.ReLU
guest_bottom_a = Linear(10, 8, True)
seq = nn.Sequential(
OrderedDict([
('layer_0', guest_bottom_a),
('relu_0', ReLU())
])
)
seq2 = nn.Sequential(
ReLU(),
Linear(8, 2, True),
nn.Softmax(dim=1) # to use coae in binary task, output unit is 2, and need use softmax to compute probability
) # so that we can compute loss using fake labels and 2-dim outputs
# define host model
seq3 = nn.Sequential(
Linear(20, 8, True),
ReLU()
)
# use interactive layer after fate_torch_hook
interactive_layer = t.nn.InteractiveLayer(out_dim=8, guest_dim=8, host_dim=8, host_num=1)
# loss fun
ce_loss_fn = nn.CrossEntropyLoss()
# optimizer, after fate torch hook optimizer can be created without parameters
opt: ft.optim.Adam = optim.Adam(lr=0.01)
hetero_nn_0 = HeteroNN(name="hetero_nn_0", epochs=30, floating_point_precision=None,
interactive_layer_lr=0.1, batch_size=-1, early_stop="diff",
coae_param={'enable': True, 'epoch': 100})
guest_nn_0 = hetero_nn_0.get_party_instance(role='guest', party_id=guest)
guest_nn_0.add_bottom_model(seq)
guest_nn_0.add_top_model(seq2)
host_nn_0 = hetero_nn_0.get_party_instance(role='host', party_id=host)
host_nn_0.add_bottom_model(seq3)
hetero_nn_0.set_interactive_layer(interactive_layer)
hetero_nn_0.compile(opt, loss=ce_loss_fn)
hetero_nn_1 = HeteroNN(name="hetero_nn_1")
evaluation_0 = Evaluation(name="evaluation_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(hetero_nn_0, data=Data(train_data=intersection_0.output.data))
pipeline.add_component(hetero_nn_1, data=Data(test_data=intersection_0.output.data),
model=Model(model=hetero_nn_0.output.model))
pipeline.add_component(evaluation_0, data=Data(data=hetero_nn_0.output.data))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,957 | 36.560606 | 118 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_nn/__init__.py
| 0 | 0 | 0 |
py
|
|
FATE
|
FATE-master/examples/pipeline/hetero_nn/pipeline-hetero-nn-train-binary-multi-host.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import torch as t
from torch import nn
from pipeline import fate_torch_hook
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroNN
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
fate_torch_hook(t)
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host
guest_train_data = {"name": "breast_hetero_guest", "namespace": "experiment"}
host_train_data_0 = {"name": "breast_hetero_host", "namespace": "experiment"}
host_train_data_1 = {"name": "breast_hetero_host", "namespace": "experiment"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=hosts)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=hosts[0]).component_param(table=host_train_data_0)
reader_0.get_party_instance(role='host', party_id=hosts[1]).component_param(table=host_train_data_1)
data_transform_0 = DataTransform(name="data_transform_0")
data_transform_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True)
data_transform_0.get_party_instance(role='host', party_id=hosts[0]).component_param(with_label=False)
data_transform_0.get_party_instance(role='host', party_id=hosts[1]).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
hetero_nn_0 = HeteroNN(name="hetero_nn_0", epochs=20,
interactive_layer_lr=0.01, batch_size=-1, validation_freqs=1, task_type='classification')
guest_nn_0 = hetero_nn_0.get_party_instance(role='guest', party_id=guest)
host_nn_0_host_0 = hetero_nn_0.get_party_instance(role='host', party_id=hosts[0])
host_nn_0_host_1 = hetero_nn_0.get_party_instance(role='host', party_id=hosts[1])
# define model
guest_bottom = t.nn.Sequential(
nn.Linear(10, 4),
nn.ReLU()
)
guest_top = t.nn.Sequential(
nn.Linear(4, 1),
nn.Sigmoid()
)
host_bottom = t.nn.Sequential(
nn.Linear(20, 4),
nn.ReLU()
)
# use interactive layer after fate_torch_hook
interactive_layer = t.nn.InteractiveLayer(out_dim=4, guest_dim=4, host_dim=4, host_num=2, dropout=0.2)
guest_nn_0.add_top_model(guest_top)
guest_nn_0.add_bottom_model(guest_bottom)
host_nn_0_host_0.add_bottom_model(host_bottom)
host_nn_0_host_1.add_bottom_model(host_bottom)
optimizer = t.optim.Adam(lr=0.01) # you can initialize optimizer without parameters after fate_torch_hook
loss = t.nn.BCELoss()
hetero_nn_0.set_interactive_layer(interactive_layer)
hetero_nn_0.compile(optimizer=optimizer, loss=loss)
evaluation_0 = Evaluation(name='eval_0', eval_type='binary')
# define components IO
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(hetero_nn_0, data=Data(train_data=intersection_0.output.data))
pipeline.add_component(evaluation_0, data=Data(data=hetero_nn_0.output.data))
pipeline.compile()
pipeline.fit()
print(pipeline.get_component("hetero_nn_0").get_summary())
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,613 | 37.45 | 116 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_nn/pipeline-hetero-nn-train-with-check-point.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import torch as t
from torch import nn
from pipeline import fate_torch_hook
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroNN
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
fate_torch_hook(t)
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": "experiment"}
host_train_data = {"name": "breast_hetero_host", "namespace": "experiment"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0")
data_transform_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True)
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
hetero_nn_0 = HeteroNN(name="hetero_nn_0", epochs=20,
interactive_layer_lr=0.01, batch_size=-1, task_type='classification',
callback_param={
"validation_freqs": 1,
"callbacks": ["ModelCheckpoint"],
"save_freq": 1},
)
guest_nn_0 = hetero_nn_0.get_party_instance(role='guest', party_id=guest)
host_nn_0 = hetero_nn_0.get_party_instance(role='host', party_id=host)
# define model
guest_bottom = t.nn.Sequential(
nn.Linear(10, 4),
nn.ReLU()
)
guest_top = t.nn.Sequential(
nn.Linear(4, 1),
nn.Sigmoid()
)
host_bottom = t.nn.Sequential(
nn.Linear(20, 4),
nn.ReLU()
)
# use interactive layer after fate_torch_hook
interactive_layer = t.nn.InteractiveLayer(out_dim=4, guest_dim=4, host_dim=4, host_num=1, dropout=0.2)
guest_nn_0.add_top_model(guest_top)
guest_nn_0.add_bottom_model(guest_bottom)
host_nn_0.add_bottom_model(host_bottom)
optimizer = t.optim.Adam(lr=0.01) # you can initialize optimizer without parameters after fate_torch_hook
loss = t.nn.BCELoss()
hetero_nn_0.set_interactive_layer(interactive_layer)
hetero_nn_0.compile(optimizer=optimizer, loss=loss)
evaluation_0 = Evaluation(name='eval_0', eval_type='binary')
# define components IO
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(hetero_nn_0, data=Data(train_data=intersection_0.output.data))
pipeline.add_component(evaluation_0, data=Data(data=hetero_nn_0.output.data))
pipeline.compile()
pipeline.fit()
print(pipeline.get_component("hetero_nn_0").get_summary())
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,374 | 35.458333 | 110 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_nn/pipeline-hetero-nn-train-with-warm_start.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import torch as t
from torch import nn
from pipeline import fate_torch_hook
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroNN
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data, Model
from pipeline.utils.tools import load_job_config
fate_torch_hook(t)
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": "experiment"}
host_train_data = {"name": "breast_hetero_host", "namespace": "experiment"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0")
data_transform_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True)
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
hetero_nn_0 = HeteroNN(name="hetero_nn_0", epochs=20,
interactive_layer_lr=0.01, batch_size=-1, validation_freqs=1, task_type='classification')
guest_nn_0 = hetero_nn_0.get_party_instance(role='guest', party_id=guest)
host_nn_0 = hetero_nn_0.get_party_instance(role='host', party_id=host)
# define model
guest_bottom = t.nn.Sequential(
nn.Linear(10, 4),
nn.ReLU()
)
guest_top = t.nn.Sequential(
nn.Linear(4, 1),
nn.Sigmoid()
)
host_bottom = t.nn.Sequential(
nn.Linear(20, 4),
nn.ReLU()
)
# use interactive layer after fate_torch_hook
interactive_layer = t.nn.InteractiveLayer(out_dim=4, guest_dim=4, host_dim=4, host_num=1, dropout=0.2)
guest_nn_0.add_top_model(guest_top)
guest_nn_0.add_bottom_model(guest_bottom)
host_nn_0.add_bottom_model(host_bottom)
optimizer = t.optim.Adam(lr=0.01) # you can initialize optimizer without parameters after fate_torch_hook
loss = t.nn.BCELoss()
hetero_nn_0.set_interactive_layer(interactive_layer)
hetero_nn_0.compile(optimizer=optimizer, loss=loss)
hetero_nn_1 = HeteroNN(name="hetero_nn_1", epochs=10,
interactive_layer_lr=0.01, batch_size=128, validation_freqs=1, task_type='classification')
evaluation_0 = Evaluation(name='eval_0', eval_type='binary')
# define components IO
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(hetero_nn_0, data=Data(train_data=intersection_0.output.data))
pipeline.add_component(hetero_nn_1, data=Data(train_data=intersection_0.output.data),
model=Model(model=hetero_nn_0.output.model))
pipeline.add_component(evaluation_0, data=Data(data=hetero_nn_0.output.data))
pipeline.compile()
pipeline.fit()
print(pipeline.get_component("hetero_nn_0").get_summary())
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,500 | 36.508333 | 117 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_nn/pipeline-hetero-nn-train-multi.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import torch as t
from torch import nn
from pipeline import fate_torch_hook
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroNN
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.component.nn import DatasetParam
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
fate_torch_hook(t)
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "vehicle_scale_hetero_guest", "namespace": "experiment"}
host_train_data = {"name": "vehicle_scale_hetero_host", "namespace": "experiment"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0")
data_transform_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True)
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
hetero_nn_0 = HeteroNN(name="hetero_nn_0", epochs=15,
interactive_layer_lr=0.05, batch_size=256, validation_freqs=1, task_type='classification',
selector_param={"method": "relative"})
guest_nn_0 = hetero_nn_0.get_party_instance(role='guest', party_id=guest)
host_nn_0 = hetero_nn_0.get_party_instance(role='host', party_id=host)
# define model
guest_bottom = t.nn.Sequential(
nn.Linear(9, 9),
nn.ReLU(),
)
guest_top = t.nn.Sequential(
nn.Linear(4, 4),
nn.Softmax(dim=1)
)
host_bottom = t.nn.Sequential(
nn.Linear(9, 9),
nn.ReLU(),
)
# use interactive layer after fate_torch_hook
# add drop out in this layer
interactive_layer = t.nn.InteractiveLayer(out_dim=4, guest_dim=9, host_dim=9, host_num=1, dropout=0.2)
guest_nn_0.add_top_model(guest_top)
guest_nn_0.add_bottom_model(guest_bottom)
host_nn_0.add_bottom_model(host_bottom)
optimizer = t.optim.Adam(lr=0.05) # you can initialize optimizer without parameters after fate_torch_hook
loss = t.nn.CrossEntropyLoss()
hetero_nn_0.set_interactive_layer(interactive_layer)
# add dataset param, because CrossEntropy loss need flatten long label, so add this parameter
# will use table dataset in federatedml/nn/dataset/table.py
hetero_nn_0.add_dataset(DatasetParam(dataset_name='table', flatten_label=True, label_dtype='long'))
hetero_nn_0.compile(optimizer=optimizer, loss=loss)
evaluation_0 = Evaluation(name='eval_0', eval_type='multi')
# define components IO
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(hetero_nn_0, data=Data(train_data=intersection_0.output.data))
pipeline.add_component(evaluation_0, data=Data(data=hetero_nn_0.output.data))
pipeline.compile()
pipeline.fit()
print(pipeline.get_component("hetero_nn_0").get_summary())
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,593 | 37.283333 | 117 |
py
|
FATE
|
FATE-master/examples/pipeline/match_id_test/pipeline-homo-lr-sample-weights.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import SampleWeight
from pipeline.component import Evaluation
from pipeline.component import HomoLR
from pipeline.component import Reader
from pipeline.component import FeatureScale
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_homo_guest", "namespace": f"experiment_sid{namespace}"}
host_train_data = {"name": "breast_homo_host", "namespace": f"experiment_sid{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host, arbiter=arbiter)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(name="data_transform_0", with_match_id=True,
with_label=True, output_format="dense") # start component numbering at 0
scale_0 = FeatureScale(name='scale_0')
sample_weight_0 = SampleWeight(name="sample_weight_0", class_weight={"0": 1, "1": 2})
param = {
"penalty": "L2",
"optimizer": "sgd",
"tol": 1e-05,
"alpha": 0.01,
"max_iter": 3,
"early_stop": "diff",
"batch_size": 320,
"learning_rate": 0.15,
"decay": 1.0,
"decay_sqrt": True,
"init_param": {
"init_method": "zeros"
},
"encrypt_param": {
"method": "Paillier"
},
"cv_param": {
"n_splits": 5,
"shuffle": True,
"random_seed": 33,
"need_cv": False
}
}
homo_lr_0 = HomoLR(name='homo_lr_0', **param)
evaluation_0 = Evaluation(name='evaluation_0')
# add components to pipeline, in order of task execution
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
# set data input sources of intersection components
pipeline.add_component(scale_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(sample_weight_0, data=Data(data=scale_0.output.data))
pipeline.add_component(homo_lr_0, data=Data(train_data=sample_weight_0.output.data))
pipeline.add_component(evaluation_0, data=Data(data=homo_lr_0.output.data))
evaluation_0.get_party_instance(role='host', party_id=host).component_param(need_run=False)
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
print(json.dumps(pipeline.get_component("evaluation_0").get_summary(), indent=4, ensure_ascii=False))
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,363 | 35.672269 | 110 |
py
|
FATE
|
FATE-master/examples/pipeline/match_id_test/secure-information-retrieval.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import Reader
from pipeline.component import DataTransform
from pipeline.component import SecureInformationRetrieval
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_host", "namespace": f"experiment_sid{namespace}"}
host_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment_sid{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role="guest", party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role="guest", party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role="host", party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="datatransform_0", with_match_id=True)
data_transform_0.get_party_instance(
role="guest", party_id=guest).component_param(
with_label=False, output_format="dense")
data_transform_0.get_party_instance(role="host", party_id=host).component_param(with_label=True)
param = {
"security_level": 0.5,
"oblivious_transfer_protocol": "OT_Hauck",
"commutative_encryption": "CommutativeEncryptionPohligHellman",
"non_committing_encryption": "aes",
"dh_params": {
"key_length": 1024
},
"raw_retrieval": False,
"target_cols": ["x0", "x3"]
}
secure_information_retrieval_0 = SecureInformationRetrieval(name="secure_information_retrieval_0", **param)
# add components to pipeline, in order of task execution.
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(secure_information_retrieval_0, data=Data(data=data_transform_0.output.data))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
# fit model
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 3,439 | 35.595745 | 111 |
py
|
FATE
|
FATE-master/examples/pipeline/match_id_test/pipeline-hetero-lr-feature-engineering.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation, DataStatistics, HeteroPearson
from pipeline.component import HeteroLR, OneHotEncoder
from pipeline.component import HeteroFeatureBinning
from pipeline.component import HeteroFeatureSelection
from pipeline.component import FeatureScale
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment_sid{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment_sid{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).\
set_roles(guest=guest, host=host, arbiter=arbiter)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", with_match_id=True)
data_transform_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True)
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
feature_scale_0 = FeatureScale(name='feature_scale_0', method="standard_scale",
need_run=True)
binning_param = {
"method": "quantile",
"compress_thres": 10000,
"head_size": 10000,
"error": 0.001,
"bin_num": 10,
"bin_indexes": -1,
"adjustment_factor": 0.5,
"local_only": False,
"need_run": True,
"transform_param": {
"transform_cols": -1,
"transform_type": "bin_num"
}
}
hetero_feature_binning_0 = HeteroFeatureBinning(name='hetero_feature_binning_0',
**binning_param)
statistic_0 = DataStatistics(name='statistic_0', statistics=["95%"])
pearson_0 = HeteroPearson(name='pearson_0', column_indexes=-1)
onehot_0 = OneHotEncoder(name='onehot_0')
selection_param = {
"name": "hetero_feature_selection_0",
"select_col_indexes": -1,
"select_names": [],
"filter_methods": [
"manually",
"unique_value",
"iv_filter",
"coefficient_of_variation_value_thres",
"outlier_cols"
],
"manually_param": {
"filter_out_indexes": [
0,
1,
2
],
"filter_out_names": [
"x3"
]
},
"unique_param": {
"eps": 1e-06
},
"iv_param": {
"metrics": ["iv", "iv", "iv"],
"filter_type": ["threshold", "top_k", "top_percentile"],
"threshold": [0.001, 100, 0.99]
},
"variance_coe_param": {
"value_threshold": 0.3
},
"outlier_param": {
"percentile": 0.95,
"upper_threshold": 2.0
}}
hetero_feature_selection_0 = HeteroFeatureSelection(**selection_param)
lr_param = {
"name": "hetero_lr_0",
"penalty": "L2",
"optimizer": "rmsprop",
"tol": 0.0001,
"alpha": 0.01,
"max_iter": 30,
"early_stop": "diff",
"batch_size": 320,
"learning_rate": 0.15,
"init_param": {
"init_method": "zeros"
},
"sqn_param": {
"update_interval_L": 3,
"memory_M": 5,
"sample_size": 5000,
"random_seed": None
},
"cv_param": {
"n_splits": 5,
"shuffle": False,
"random_seed": 103,
"need_cv": False
}
}
hetero_lr_0 = HeteroLR(**lr_param)
evaluation_0 = Evaluation(name='evaluation_0')
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(feature_scale_0, data=Data(data=intersection_0.output.data))
pipeline.add_component(hetero_feature_binning_0, data=Data(data=feature_scale_0.output.data))
pipeline.add_component(statistic_0, data=Data(data=feature_scale_0.output.data))
pipeline.add_component(pearson_0, data=Data(data=feature_scale_0.output.data))
pipeline.add_component(hetero_feature_selection_0, data=Data(data=hetero_feature_binning_0.output.data),
model=Model(isometric_model=[hetero_feature_binning_0.output.model,
statistic_0.output.model]))
pipeline.add_component(onehot_0, data=Data(data=hetero_feature_selection_0.output.data))
pipeline.add_component(hetero_lr_0, data=Data(train_data=onehot_0.output.data))
pipeline.add_component(evaluation_0, data=Data(data=hetero_lr_0.output.data))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 6,496 | 35.094444 | 108 |
py
|
FATE
|
FATE-master/examples/pipeline/match_id_test/pipeline-feldman-verifiable-sum.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import Reader
from pipeline.component import DataTransform
from pipeline.component import FeldmanVerifiableSum
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host
guest_train_data = {"name": "breast_homo_test", "namespace": f"experiment_sid{namespace}"}
host_train_data = {"name": "breast_homo_test", "namespace": f"experiment_sid{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role="guest", party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role="guest", party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role="host", party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", with_match_id=True)
# get and configure DataTransform party instance of guest
data_transform_0.get_party_instance(
role="guest", party_id=guest).component_param(
with_label=False, output_format="dense")
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role="host", party_id=hosts).component_param(with_label=False)
# define FeldmanVerifiableSum components
feldmanverifiablesum_0 = FeldmanVerifiableSum(name="feldmanverifiablesum_0")
feldmanverifiablesum_0.get_party_instance(role="guest", party_id=guest).component_param(sum_cols=[1, 2, 3], q_n=6)
feldmanverifiablesum_0.get_party_instance(role="host", party_id=hosts).component_param(sum_cols=[1, 2, 3], q_n=6)
# add components to pipeline, in order of task execution.
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(feldmanverifiablesum_0, data=Data(data=data_transform_0.output.data))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
# fit model
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 3,448 | 37.322222 | 118 |
py
|
FATE
|
FATE-master/examples/pipeline/match_id_test/pipeline-label-transform.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import LabelTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroLR
from pipeline.component import DataTransform
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data, Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment_sid{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment_sid{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role="guest", party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role="guest", party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role="host", party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", with_match_id=True) # start component numbering at 0
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role="guest", party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True, output_format="dense")
data_transform_0.get_party_instance(role="host", party_id=host).component_param(with_label=False,
output_format="dense")
intersection_0 = Intersection(name="intersection_0", join_method="left_join", sample_id_generator="host")
label_transform_0 = LabelTransform(name="label_transform_0")
label_transform_0.get_party_instance(role="host", party_id=host).component_param(need_run=False)
hetero_lr_0 = HeteroLR(name="hetero_lr_0", penalty="L2", optimizer="sgd", tol=0.001,
alpha=0.01, max_iter=20, early_stop="weight_diff", batch_size=-1,
learning_rate=0.15, decay=0.0, decay_sqrt=False,
init_param={"init_method": "zeros"},
floating_point_precision=23)
label_transform_1 = LabelTransform(name="label_transform_1")
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary", pos_label=1)
# add components to pipeline, in order of task execution
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(label_transform_0, data=Data(data=intersection_0.output.data))
pipeline.add_component(hetero_lr_0, data=Data(train_data=label_transform_0.output.data))
pipeline.add_component(
label_transform_1, data=Data(
data=hetero_lr_0.output.data), model=Model(
label_transform_0.output.model))
pipeline.add_component(evaluation_0, data=Data(data=label_transform_1.output.data))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
# fit model
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,538 | 43.5 | 115 |
py
|
FATE
|
FATE-master/examples/pipeline/match_id_test/pipeline-homo-lr-train-eval.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HomoLR
from pipeline.component import Reader
from pipeline.component import Evaluation
from pipeline.component import FeatureScale
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
import json
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_homo_guest", "namespace": f"experiment_sid{namespace}"}
host_train_data = {"name": "breast_homo_host", "namespace": f"experiment_sid{namespace}"}
guest_eval_data = {"name": "breast_homo_guest", "namespace": f"experiment_sid{namespace}"}
host_eval_data = {"name": "breast_homo_host", "namespace": f"experiment_sid{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host, arbiter=arbiter)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
reader_1 = Reader(name="reader_1")
reader_1.get_party_instance(role='guest', party_id=guest).component_param(table=guest_eval_data)
reader_1.get_party_instance(role='host', party_id=host).component_param(table=host_eval_data)
# define DataTransform components
data_transform_0 = DataTransform(name="data_transform_0", with_match_id=True,
with_label=True, output_format="dense")
data_transform_1 = DataTransform(name="data_transform_1") # start component numbering at 0
scale_0 = FeatureScale(name='scale_0')
scale_1 = FeatureScale(name='scale_1')
param = {
"penalty": "L2",
"optimizer": "sgd",
"tol": 1e-05,
"alpha": 0.01,
"max_iter": 3,
"early_stop": "diff",
"batch_size": 320,
"learning_rate": 0.15,
"validation_freqs": 1,
"init_param": {
"init_method": "zeros"
},
"encrypt_param": {
"method": None
},
"cv_param": {
"n_splits": 4,
"shuffle": True,
"random_seed": 33,
"need_cv": False
}
}
homo_lr_0 = HomoLR(name='homo_lr_0', **param)
# add components to pipeline, in order of task execution
pipeline.add_component(reader_0)
pipeline.add_component(reader_1)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(data_transform_1, data=Data(data=reader_1.output.data),
model=Model(data_transform_0.output.model))
# set data input sources of intersection components
pipeline.add_component(scale_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(scale_1, data=Data(data=data_transform_1.output.data),
model=Model(scale_0.output.model))
pipeline.add_component(homo_lr_0, data=Data(train_data=scale_0.output.data,
validate_data=scale_1.output.data))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
evaluation_0.get_party_instance(role='host', party_id=host).component_param(need_run=False)
pipeline.add_component(evaluation_0, data=Data(data=homo_lr_0.output.data))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
print(json.dumps(pipeline.get_component("homo_lr_0").get_summary(), indent=4, ensure_ascii=False))
print(json.dumps(pipeline.get_component("evaluation_0").get_summary(), indent=4, ensure_ascii=False))
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,229 | 37.740741 | 107 |
py
|
FATE
|
FATE-master/examples/pipeline/match_id_test/pipeline-hetero-lr.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HeteroLR
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment_sid{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment_sid{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).\
set_roles(guest=guest, host=host, arbiter=arbiter)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", with_match_id=True)
data_transform_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True)
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
lr_param = {
"name": "hetero_lr_0",
"penalty": "L2",
"optimizer": "rmsprop",
"tol": 0.0001,
"alpha": 0.01,
"max_iter": 30,
"early_stop": "diff",
"batch_size": 320,
"learning_rate": 0.15,
"init_param": {
"init_method": "zeros"
},
"sqn_param": {
"update_interval_L": 3,
"memory_M": 5,
"sample_size": 5000,
"random_seed": None
},
"cv_param": {
"n_splits": 5,
"shuffle": False,
"random_seed": 103,
"need_cv": False
}
}
hetero_lr_0 = HeteroLR(**lr_param)
# evaluation_0 = Evaluation(name='evaluation_0')
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(hetero_lr_0, data=Data(train_data=intersection_0.output.data))
# pipeline.add_component(evaluation_0, data=Data(data=hetero_lr_0.output.data))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 3,570 | 34.009804 | 102 |
py
|
FATE
|
FATE-master/examples/pipeline/match_id_test/__init__.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 614 | 40 | 75 |
py
|
FATE
|
FATE-master/examples/pipeline/match_id_test/pipeline-kmeans.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HeteroKmeans
from pipeline.component import Intersection
from pipeline.component import Evaluation
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment_sid{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment_sid{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host, arbiter=arbiter)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", with_match_id=True)
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True, output_format="dense")
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
param = {
"k": 3,
"max_iter": 10
}
hetero_kmeans_0 = HeteroKmeans(name='hetero_kmeans_0', **param)
evaluation_0 = Evaluation(name='evaluation_0', eval_type='clustering')
# add components to pipeline, in order of task execution
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
# set data input sources of intersection components
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
# set train & validate data of hetero_lr_0 component
pipeline.add_component(hetero_kmeans_0, data=Data(train_data=intersection_0.output.data))
pipeline.add_component(evaluation_0, data=Data(data=hetero_kmeans_0.output.data.data[0]))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
print(pipeline.get_component("hetero_kmeans_0").get_summary())
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 3,943 | 37.666667 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/match_id_test/pipeline-data-transform-dense.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_homo_guest", "namespace": f"experiment_sid{namespace}"}
host_train_data = {"name": "breast_homo_host", "namespace": f"experiment_sid{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", with_match_id=True,
with_label=True)
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 2,207 | 33.5 | 103 |
py
|
FATE
|
FATE-master/examples/pipeline/match_id_test/pipeline-feature-imputation-designated.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.utils.tools import load_job_config
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import FeatureImputation
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment_sid{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment_sid{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", with_label=False, with_match_id=True)
intersection_0 = Intersection(name="intersection_0")
feature_imputation_0 = FeatureImputation(name="feature_imputation_0",
missing_fill_method="designated",
default_value=42, missing_impute=[0])
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(feature_imputation_0, data=Data(data=intersection_0.output.data))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 2,741 | 38.73913 | 103 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_sshe_lr/pipeline-hetero-lr-sample-weights.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import FeatureScale
from pipeline.component import HeteroSSHELR
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.component import SampleWeight
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host, arbiter=arbiter)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(
name="data_transform_0",
with_label=True,
output_format="dense") # start component numbering at 0
data_transform_0.get_party_instance(role="host", party_id=host).component_param(with_label=False)
intersect_0 = Intersection(name='intersect_0')
scale_0 = FeatureScale(name='scale_0', need_run=False)
sample_weight_0 = SampleWeight(name="sample_weight_0", class_weight={"0": 1, "1": 2})
sample_weight_0.get_party_instance(role="host", party_id=host).component_param(need_run=False)
param = {
"penalty": None,
"optimizer": "sgd",
"tol": 1e-05,
"alpha": 0.01,
"max_iter": 3,
"early_stop": "weight_diff",
"batch_size": 320,
"learning_rate": 0.15,
"decay": 0,
"decay_sqrt": True,
"init_param": {
"init_method": "ones"
},
"reveal_every_iter": False,
"reveal_strategy": "respectively"
}
hetero_sshe_lr_0 = HeteroSSHELR(name='hetero_sshe_lr_0', **param)
evaluation_0 = Evaluation(name='evaluation_0')
# add components to pipeline, in order of task execution
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersect_0, data=Data(data=data_transform_0.output.data))
# set data input sources of intersection components
pipeline.add_component(scale_0, data=Data(data=intersect_0.output.data))
pipeline.add_component(sample_weight_0, data=Data(data=scale_0.output.data))
pipeline.add_component(hetero_sshe_lr_0, data=Data(train_data=sample_weight_0.output.data))
pipeline.add_component(evaluation_0, data=Data(data=hetero_sshe_lr_0.output.data))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
print(json.dumps(pipeline.get_component("evaluation_0").get_summary(), indent=4, ensure_ascii=False))
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,527 | 37.372881 | 107 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_sshe_lr/pipeline-hetero-lr-compute-loss-not-reveal.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroSSHELR
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# guest_train_data = {"name": "default_credit_hetero_guest", "namespace": f"experiment{namespace}"}
# host_train_data = {"name": "default_credit_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
lr_param = {
"name": "hetero_sshe_lr_0",
"penalty": None,
"optimizer": "sgd",
"tol": 0.0001,
"alpha": 0.01,
"max_iter": 30,
"early_stop": "diff",
"batch_size": -1,
"learning_rate": 0.15,
"init_param": {
"init_method": "zeros",
"fit_intercept": True
},
"encrypt_param": {
"key_length": 1024
},
"reveal_every_iter": False,
"reveal_strategy": "respectively"
}
hetero_sshe_lr_0 = HeteroSSHELR(**lr_param)
pipeline.add_component(hetero_sshe_lr_0, data=Data(train_data=intersection_0.output.data))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
pipeline.add_component(evaluation_0, data=Data(data=hetero_sshe_lr_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("hetero_sshe_lr_0").get_summary())
prettify(pipeline.get_component("evaluation_0").get_summary())
pipeline.deploy_component([data_transform_0, intersection_0, hetero_sshe_lr_0])
predict_pipeline = PipeLine()
# add data reader onto predict pipeline
predict_pipeline.add_component(reader_0)
# add selected components from train pipeline onto predict pipeline
# specify data source
predict_pipeline.add_component(
pipeline, data=Data(
predict_input={
pipeline.data_transform_0.input.data: reader_0.output.data}))
# run predict model
predict_pipeline.predict()
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,050 | 34.570423 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_sshe_lr/pipeline-hetero-lr-cv.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HeteroSSHELR
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# guest_train_data = {"name": "default_credit_hetero_guest", "namespace": f"experiment{namespace}"}
# host_train_data = {"name": "default_credit_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
lr_param = {
"name": "hetero_sshe_lr_0",
"penalty": "L2",
"optimizer": "rmsprop",
"tol": 0.0001,
"alpha": 0.01,
"max_iter": 30,
"early_stop": "diff",
"batch_size": -1,
"learning_rate": 0.15,
"init_param": {
"init_method": "zeros",
"fit_intercept": True
},
"encrypt_param": {
"key_length": 1024
},
"cv_param": {
"n_splits": 3,
"shuffle": False,
"random_seed": 103,
"need_cv": True
},
"reveal_every_iter": True,
"reveal_strategy": "respectively"
}
hetero_sshe_lr_0 = HeteroSSHELR(**lr_param)
pipeline.add_component(hetero_sshe_lr_0, data=Data(train_data=intersection_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("hetero_sshe_lr_0").get_summary())
# pipeline.deploy_component([data_transform_0, intersection_0, hetero_sshe_lr_0])
#
# predict_pipeline = PipeLine()
# # add data reader onto predict pipeline
# predict_pipeline.add_component(reader_0)
# # add selected components from train pipeline onto predict pipeline
# # specify data source
# predict_pipeline.add_component(pipeline,
# data=Data(predict_input={pipeline.data_transform_0.input.data: reader_0.output.data}))
# # run predict model
# predict_pipeline.predict(job_parameters)
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,970 | 34.255319 | 123 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_sshe_lr/pipeline-hetero-lr-warm-start.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroSSHELR
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# guest_train_data = {"name": "default_credit_hetero_guest", "namespace": f"experiment{namespace}"}
# host_train_data = {"name": "default_credit_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
lr_param = {
"penalty": "L2",
"optimizer": "rmsprop",
"tol": 0.0001,
"alpha": 0.01,
"early_stop": "diff",
"batch_size": -1,
"learning_rate": 0.15,
"init_param": {
"init_method": "zeros",
"fit_intercept": True
},
"encrypt_param": {
"key_length": 1024
},
"reveal_strategy": "respectively",
"reveal_every_iter": True,
"callback_param": {
"callbacks": ["ModelCheckpoint"],
"metrics": None,
"use_first_metric_only": False,
"save_freq": 1
}
}
hetero_sshe_lr_0 = HeteroSSHELR(name="hetero_sshe_lr_0", max_iter=3, **lr_param)
hetero_sshe_lr_1 = HeteroSSHELR(name="hetero_sshe_lr_1", max_iter=30, **lr_param)
pipeline.add_component(hetero_sshe_lr_0, data=Data(train_data=intersection_0.output.data))
pipeline.add_component(hetero_sshe_lr_1, data=Data(train_data=intersection_0.output.data),
model=Model(model=hetero_sshe_lr_0.output.model))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
pipeline.add_component(evaluation_0, data=Data(data=hetero_sshe_lr_1.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("hetero_sshe_lr_0").get_summary())
prettify(pipeline.get_component("hetero_sshe_lr_1").get_summary())
prettify(pipeline.get_component("evaluation_0").get_summary())
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,047 | 36.117647 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_sshe_lr/pipeline-hetero-lr-converged-weight-diff-encrypted-not-reveal.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroSSHELR
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
lr_param = {
"name": "hetero_sshe_lr_0",
"penalty": "L2",
"optimizer": "sgd",
"tol": 0.0001,
"alpha": 0.01,
"max_iter": 30,
"early_stop": "weight_diff",
"batch_size": -1,
"learning_rate": 0.15,
"init_param": {
"init_method": "zeros",
"fit_intercept": False
},
"encrypt_param": {
"key_length": 1024
},
"reveal_every_iter": False,
"reveal_strategy": "encrypted_reveal_in_host"
}
hetero_sshe_lr_0 = HeteroSSHELR(**lr_param)
pipeline.add_component(hetero_sshe_lr_0, data=Data(train_data=intersection_0.output.data))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
pipeline.add_component(evaluation_0, data=Data(data=hetero_sshe_lr_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("hetero_sshe_lr_0").get_summary())
prettify(pipeline.get_component("evaluation_0").get_summary())
pipeline.deploy_component([data_transform_0, intersection_0, hetero_sshe_lr_0])
predict_pipeline = PipeLine()
# add data reader onto predict pipeline
predict_pipeline.add_component(reader_0)
# add selected components from train pipeline onto predict pipeline
# specify data source
predict_pipeline.add_component(
pipeline, data=Data(
predict_input={
pipeline.data_transform_0.input.data: reader_0.output.data}))
# run predict model
predict_pipeline.predict()
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,864 | 33.75 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_sshe_lr/pipeline-hetero-lr-normal-not-fit-intercept.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroSSHELR
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# guest_train_data = {"name": "default_credit_hetero_guest", "namespace": f"experiment{namespace}"}
# host_train_data = {"name": "default_credit_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
lr_param = {
"name": "hetero_sshe_lr_0",
"penalty": "L2",
"optimizer": "rmsprop",
"tol": 0.0001,
"alpha": 0.01,
"max_iter": 30,
"early_stop": "weight_diff",
"batch_size": -1,
"learning_rate": 0.15,
"init_param": {
"init_method": "zeros",
"fit_intercept": False
},
"encrypt_param": {
"key_length": 1024
},
"reveal_every_iter": True,
"reveal_strategy": "respectively"
}
hetero_sshe_lr_0 = HeteroSSHELR(**lr_param)
pipeline.add_component(hetero_sshe_lr_0, data=Data(train_data=intersection_0.output.data))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
pipeline.add_component(evaluation_0, data=Data(data=hetero_sshe_lr_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("hetero_sshe_lr_0").get_summary())
prettify(pipeline.get_component("evaluation_0").get_summary())
pipeline.deploy_component([data_transform_0, intersection_0, hetero_sshe_lr_0])
predict_pipeline = PipeLine()
# add data reader onto predict pipeline
predict_pipeline.add_component(reader_0)
# add selected components from train pipeline onto predict pipeline
# specify data source
predict_pipeline.add_component(
pipeline, data=Data(
predict_input={
pipeline.data_transform_0.input.data: reader_0.output.data}))
# run predict model
predict_pipeline.predict()
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,061 | 34.647887 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_sshe_lr/pipeline-hetero-lr-ovr-none-penalty.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroSSHELR
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "vehicle_scale_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "vehicle_scale_hetero_host", "namespace": f"experiment{namespace}"}
# guest_train_data = {"name": "default_credit_hetero_guest", "namespace": f"experiment{namespace}"}
# host_train_data = {"name": "default_credit_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
lr_param = {
"name": "hetero_sshe_lr_0",
"penalty": None,
"optimizer": "sgd",
"tol": 0.0001,
"alpha": 0.001,
"max_iter": 30,
"early_stop": "diff",
"batch_size": -1,
"learning_rate": 0.15,
"init_param": {
"init_method": "zeros",
"fit_intercept": False
},
"encrypt_param": {
"key_length": 1024
},
"reveal_every_iter": False,
"reveal_strategy": "respectively"
}
hetero_sshe_lr_0 = HeteroSSHELR(**lr_param)
pipeline.add_component(hetero_sshe_lr_0, data=Data(train_data=intersection_0.output.data))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="multi")
pipeline.add_component(evaluation_0, data=Data(data=hetero_sshe_lr_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("hetero_sshe_lr_0").get_summary())
prettify(pipeline.get_component("evaluation_0").get_summary())
pipeline.deploy_component([data_transform_0, intersection_0, hetero_sshe_lr_0])
predict_pipeline = PipeLine()
# add data reader onto predict pipeline
predict_pipeline.add_component(reader_0)
# add selected components from train pipeline onto predict pipeline
# specify data source
predict_pipeline.add_component(
pipeline, data=Data(
predict_input={
pipeline.data_transform_0.input.data: reader_0.output.data}))
# run predict model
predict_pipeline.predict()
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,065 | 34.676056 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_sshe_lr/pipeline-hetero-lr-compute-loss.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroSSHELR
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# guest_train_data = {"name": "default_credit_hetero_guest", "namespace": f"experiment{namespace}"}
# host_train_data = {"name": "default_credit_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
lr_param = {
"name": "hetero_sshe_lr_0",
"penalty": "L2",
"optimizer": "rmsprop",
"tol": 0.0001,
"alpha": 0.01,
"max_iter": 30,
"early_stop": "diff",
"batch_size": -1,
"learning_rate": 0.15,
"init_param": {
"init_method": "zeros",
"fit_intercept": False
},
"encrypt_param": {
"key_length": 1024
},
"reveal_every_iter": True,
"reveal_strategy": "respectively"
}
hetero_sshe_lr_0 = HeteroSSHELR(**lr_param)
pipeline.add_component(hetero_sshe_lr_0, data=Data(train_data=intersection_0.output.data))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
pipeline.add_component(evaluation_0, data=Data(data=hetero_sshe_lr_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("hetero_sshe_lr_0").get_summary())
prettify(pipeline.get_component("evaluation_0").get_summary())
pipeline.deploy_component([data_transform_0, intersection_0, hetero_sshe_lr_0])
predict_pipeline = PipeLine()
# add data reader onto predict pipeline
predict_pipeline.add_component(reader_0)
# add selected components from train pipeline onto predict pipeline
# specify data source
predict_pipeline.add_component(
pipeline, data=Data(
predict_input={
pipeline.data_transform_0.input.data: reader_0.output.data}))
# run predict model
predict_pipeline.predict()
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,054 | 34.598592 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_sshe_lr/pipeline-hetero-lr-large-init-w-compute-loss.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroSSHELR
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# guest_train_data = {"name": "default_credit_hetero_guest", "namespace": f"experiment{namespace}"}
# host_train_data = {"name": "default_credit_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
lr_param = {
"name": "hetero_sshe_lr_0",
"penalty": "L2",
"tol": 0.0001,
"alpha": 10,
"max_iter": 30,
"early_stop": "weight_diff",
"batch_size": -1,
"learning_rate": 0.3,
"decay": 0.5,
"init_param": {
"init_method": "const",
"init_const": 200,
"fit_intercept": False
},
"encrypt_param": {
"key_length": 1024
}
}
hetero_sshe_lr_0 = HeteroSSHELR(**lr_param)
pipeline.add_component(hetero_sshe_lr_0, data=Data(train_data=intersection_0.output.data))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
pipeline.add_component(evaluation_0, data=Data(data=hetero_sshe_lr_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("hetero_sshe_lr_0").get_summary())
prettify(pipeline.get_component("evaluation_0").get_summary())
pipeline.deploy_component([data_transform_0, intersection_0, hetero_sshe_lr_0])
predict_pipeline = PipeLine()
# add data reader onto predict pipeline
predict_pipeline.add_component(reader_0)
# add selected components from train pipeline onto predict pipeline
# specify data source
predict_pipeline.add_component(
pipeline, data=Data(
predict_input={
pipeline.data_transform_0.input.data: reader_0.output.data}))
# run predict model
predict_pipeline.predict()
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,001 | 34.475177 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_sshe_lr/pipeline-hetero-lr-ovr-l1.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroSSHELR
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "vehicle_scale_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "vehicle_scale_hetero_host", "namespace": f"experiment{namespace}"}
# guest_train_data = {"name": "default_credit_hetero_guest", "namespace": f"experiment{namespace}"}
# host_train_data = {"name": "default_credit_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
lr_param = {
"name": "hetero_sshe_lr_0",
"penalty": "L1",
"optimizer": "adam",
"tol": 0.0001,
"alpha": 0.001,
"max_iter": 30,
"early_stop": "diff",
"batch_size": -1,
"learning_rate": 0.15,
"init_param": {
"init_method": "zeros",
"fit_intercept": False
},
"encrypt_param": {
"key_length": 1024
},
"reveal_every_iter": True,
"reveal_strategy": "respectively"
}
hetero_sshe_lr_0 = HeteroSSHELR(**lr_param)
pipeline.add_component(hetero_sshe_lr_0, data=Data(train_data=intersection_0.output.data))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="multi")
pipeline.add_component(evaluation_0, data=Data(data=hetero_sshe_lr_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("hetero_sshe_lr_0").get_summary())
prettify(pipeline.get_component("evaluation_0").get_summary())
pipeline.deploy_component([data_transform_0, intersection_0, hetero_sshe_lr_0])
predict_pipeline = PipeLine()
# add data reader onto predict pipeline
predict_pipeline.add_component(reader_0)
# add selected components from train pipeline onto predict pipeline
# specify data source
predict_pipeline.add_component(
pipeline, data=Data(
predict_input={
pipeline.data_transform_0.input.data: reader_0.output.data}))
# run predict model
predict_pipeline.predict()
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,065 | 34.676056 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_sshe_lr/pipeline-hetero-lr-encrypted-reveal-in-host.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroFeatureSelection
from pipeline.component import HeteroSSHELR
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
selection_param = {
"select_col_indexes": -1,
"filter_methods": ["manually"]
}
hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0",
**selection_param)
hetero_feature_selection_0.get_party_instance(role='guest', party_id=guest).component_param(
manually_param={"left_col_indexes": [0]}
)
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(hetero_feature_selection_0, data=Data(data=intersection_0.output.data))
lr_param = {
"name": "hetero_sshe_lr_0",
"penalty": None,
"optimizer": "sgd",
"tol": 0.0001,
"alpha": 0.01,
"max_iter": 30,
"early_stop": "diff",
"batch_size": -1,
"learning_rate": 0.15,
"init_param": {
"init_method": "random_uniform"
},
"reveal_strategy": "encrypted_reveal_in_host",
"reveal_every_iter": False
}
hetero_sshe_lr_0 = HeteroSSHELR(**lr_param)
pipeline.add_component(hetero_sshe_lr_0, data=Data(train_data=hetero_feature_selection_0.output.data))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
pipeline.add_component(evaluation_0, data=Data(data=hetero_sshe_lr_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("hetero_sshe_lr_0").get_summary())
prettify(pipeline.get_component("evaluation_0").get_summary())
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,814 | 35.203008 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_sshe_lr/pipeline-hetero-lr-converged-weight-diff-respectively-not-reveal.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroSSHELR
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
lr_param = {
"name": "hetero_sshe_lr_0",
"penalty": "L2",
"optimizer": "sgd",
"tol": 0.0001,
"alpha": 0.01,
"max_iter": 30,
"early_stop": "weight_diff",
"batch_size": -1,
"learning_rate": 0.15,
"init_param": {
"init_method": "zeros",
"fit_intercept": False
},
"encrypt_param": {
"key_length": 1024
},
"reveal_every_iter": False,
"reveal_strategy": "respectively"
}
hetero_sshe_lr_0 = HeteroSSHELR(**lr_param)
pipeline.add_component(hetero_sshe_lr_0, data=Data(train_data=intersection_0.output.data))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
pipeline.add_component(evaluation_0, data=Data(data=hetero_sshe_lr_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("hetero_sshe_lr_0").get_summary())
prettify(pipeline.get_component("evaluation_0").get_summary())
pipeline.deploy_component([data_transform_0, intersection_0, hetero_sshe_lr_0])
predict_pipeline = PipeLine()
# add data reader onto predict pipeline
predict_pipeline.add_component(reader_0)
# add selected components from train pipeline onto predict pipeline
# specify data source
predict_pipeline.add_component(
pipeline, data=Data(
predict_input={
pipeline.data_transform_0.input.data: reader_0.output.data}))
# run predict model
predict_pipeline.predict()
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,852 | 33.664286 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_sshe_lr/__init__.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 614 | 40 | 75 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_sshe_lr/pipeline-hetero-lr-l1.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroSSHELR
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# guest_train_data = {"name": "default_credit_hetero_guest", "namespace": f"experiment{namespace}"}
# host_train_data = {"name": "default_credit_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
lr_param = {
"name": "hetero_sshe_lr_0",
"penalty": "L1",
"optimizer": "rmsprop",
"tol": 0.0001,
"alpha": 0.01,
"max_iter": 30,
"early_stop": "diff",
"batch_size": -1,
"learning_rate": 0.15,
"init_param": {
"init_method": "zeros",
"fit_intercept": True
},
"encrypt_param": {
"key_length": 1024
},
"reveal_every_iter": True,
"reveal_strategy": "respectively"
}
hetero_sshe_lr_0 = HeteroSSHELR(**lr_param)
pipeline.add_component(hetero_sshe_lr_0, data=Data(train_data=intersection_0.output.data))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
pipeline.add_component(evaluation_0, data=Data(data=hetero_sshe_lr_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("hetero_sshe_lr_0").get_summary())
prettify(pipeline.get_component("evaluation_0").get_summary())
pipeline.deploy_component([data_transform_0, intersection_0, hetero_sshe_lr_0])
predict_pipeline = PipeLine()
# add data reader onto predict pipeline
predict_pipeline.add_component(reader_0)
# add selected components from train pipeline onto predict pipeline
# specify data source
predict_pipeline.add_component(
pipeline, data=Data(
predict_input={
pipeline.data_transform_0.input.data: reader_0.output.data}))
# run predict model
predict_pipeline.predict()
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,053 | 34.591549 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_sshe_lr/pipeline-hetero-lr-ovr.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroSSHELR
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "vehicle_scale_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "vehicle_scale_hetero_host", "namespace": f"experiment{namespace}"}
# guest_train_data = {"name": "default_credit_hetero_guest", "namespace": f"experiment{namespace}"}
# host_train_data = {"name": "default_credit_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
lr_param = {
"name": "hetero_sshe_lr_0",
"penalty": "L2",
"optimizer": "adam",
"tol": 0.0001,
"alpha": 0.001,
"max_iter": 30,
"early_stop": "diff",
"batch_size": -1,
"learning_rate": 0.15,
"init_param": {
"init_method": "zeros",
"fit_intercept": False
},
"encrypt_param": {
"key_length": 1024
},
"reveal_every_iter": True,
"reveal_strategy": "respectively"
}
hetero_sshe_lr_0 = HeteroSSHELR(**lr_param)
pipeline.add_component(hetero_sshe_lr_0, data=Data(train_data=intersection_0.output.data))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="multi")
pipeline.add_component(evaluation_0, data=Data(data=hetero_sshe_lr_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("hetero_sshe_lr_0").get_summary())
prettify(pipeline.get_component("evaluation_0").get_summary())
pipeline.deploy_component([data_transform_0, intersection_0, hetero_sshe_lr_0])
predict_pipeline = PipeLine()
# add data reader onto predict pipeline
predict_pipeline.add_component(reader_0)
# add selected components from train pipeline onto predict pipeline
# specify data source
predict_pipeline.add_component(
pipeline, data=Data(
predict_input={
pipeline.data_transform_0.input.data: reader_0.output.data}))
# run predict model
predict_pipeline.predict()
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,065 | 34.676056 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_sshe_lr/pipeline-hetero-lr-with-validate.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component.evaluation import Evaluation
from pipeline.component.hetero_sshe_lr import HeteroSSHELR
from pipeline.component.intersection import Intersection
from pipeline.component.reader import Reader
from pipeline.interface.data import Data
from pipeline.interface.model import Model
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
guest_eval_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_eval_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
reader_1 = Reader(name="reader_1")
reader_1.get_party_instance(role='guest', party_id=guest).component_param(table=guest_eval_data)
reader_1.get_party_instance(role='host', party_id=hosts).component_param(table=host_eval_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
data_transform_1 = DataTransform(name="data_transform_1", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
intersection_1 = Intersection(name="intersection_1")
pipeline.add_component(reader_0)
pipeline.add_component(reader_1)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(data_transform_1, data=Data(data=reader_1.output.data),
model=Model(data_transform_0.output.model))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(intersection_1, data=Data(data=data_transform_1.output.data))
lr_param = {
"name": "hetero_sshe_lr_0",
"penalty": "L2",
"optimizer": "rmsprop",
"tol": 0.0001,
"alpha": 0.01,
"max_iter": 30,
"early_stop": "diff",
"batch_size": -1,
"callback_param": {
"callbacks": [
"EarlyStopping",
"PerformanceEvaluate"
],
"validation_freqs": 1,
"early_stopping_rounds": 3
},
"learning_rate": 0.15,
"init_param": {
"init_method": "zeros"
},
"reveal_strategy": "respectively",
"reveal_every_iter": True
}
hetero_sshe_lr_0 = HeteroSSHELR(**lr_param)
pipeline.add_component(hetero_sshe_lr_0, data=Data(train_data=intersection_0.output.data,
validate_data=intersection_1.output.data))
evaluation_data = [hetero_sshe_lr_0.output.data]
hetero_sshe_lr_1 = HeteroSSHELR(name='hetero_sshe_lr_1')
pipeline.add_component(hetero_sshe_lr_1, data=Data(test_data=intersection_1.output.data),
model=Model(hetero_sshe_lr_0.output.model))
evaluation_data.append(hetero_sshe_lr_1.output.data)
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
pipeline.add_component(evaluation_0, data=Data(data=evaluation_data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("hetero_sshe_lr_0").get_summary())
prettify(pipeline.get_component("evaluation_0").get_summary())
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,795 | 38.162162 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_sshe_lr/pipeline-hetero-lr-normal.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroSSHELR
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# guest_train_data = {"name": "default_credit_hetero_guest", "namespace": f"experiment{namespace}"}
# host_train_data = {"name": "default_credit_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
lr_param = {
"name": "hetero_sshe_lr_0",
"penalty": "L2",
"optimizer": "rmsprop",
"tol": 0.0001,
"alpha": 0.01,
"max_iter": 30,
"early_stop": "diff",
"batch_size": -1,
"learning_rate": 0.15,
"init_param": {
"init_method": "zeros",
"fit_intercept": True
},
"encrypt_param": {
"key_length": 1024
},
"reveal_every_iter": True,
"reveal_strategy": "respectively"
}
hetero_sshe_lr_0 = HeteroSSHELR(**lr_param)
pipeline.add_component(hetero_sshe_lr_0, data=Data(train_data=intersection_0.output.data))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
pipeline.add_component(evaluation_0, data=Data(data=hetero_sshe_lr_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("hetero_sshe_lr_0").get_summary())
prettify(pipeline.get_component("evaluation_0").get_summary())
pipeline.deploy_component([data_transform_0, intersection_0, hetero_sshe_lr_0])
predict_pipeline = PipeLine()
# # add data reader onto predict pipeline
predict_pipeline.add_component(reader_0)
# # add selected components from train pipeline onto predict pipeline
# # specify data source
predict_pipeline.add_component(
pipeline, data=Data(
predict_input={
pipeline.data_transform_0.input.data: reader_0.output.data}))
# # run predict model
predict_pipeline.predict()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,039 | 35.258993 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_sshe_lr/pipeline-hetero-lr-ovr-encrypted-reveal-in-host.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroFeatureSelection
from pipeline.component import HeteroSSHELR
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "vehicle_scale_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "vehicle_scale_hetero_host", "namespace": f"experiment{namespace}"}
guest_eval_data = {"name": "vehicle_scale_hetero_guest", "namespace": f"experiment{namespace}"}
host_eval_data = {"name": "vehicle_scale_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
reader_1 = Reader(name="reader_1")
reader_1.get_party_instance(role='guest', party_id=guest).component_param(table=guest_eval_data)
reader_1.get_party_instance(role='host', party_id=hosts).component_param(table=host_eval_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
data_transform_1 = DataTransform(name="data_transform_1", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
intersection_1 = Intersection(name="intersection_1")
selection_param = {
"select_col_indexes": -1,
"filter_methods": ["manually"]
}
hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0",
**selection_param)
hetero_feature_selection_0.get_party_instance(role='guest', party_id=guest).component_param(
manually_param={"left_col_indexes": [0]}
)
hetero_feature_selection_1 = HeteroFeatureSelection(name="hetero_feature_selection_1")
pipeline.add_component(reader_0)
pipeline.add_component(reader_1)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(data_transform_1, data=Data(data=reader_1.output.data),
model=Model(data_transform_0.output.model))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(intersection_1, data=Data(data=data_transform_1.output.data))
pipeline.add_component(hetero_feature_selection_0, data=Data(data=intersection_0.output.data))
pipeline.add_component(hetero_feature_selection_1, data=Data(data=intersection_1.output.data),
model=Model(hetero_feature_selection_0.output.model))
lr_param = {
"name": "hetero_sshe_lr_0",
"penalty": None,
"optimizer": "sgd",
"tol": 0.0001,
"alpha": 0.01,
"max_iter": 1,
"early_stop": "diff",
"batch_size": -1,
"learning_rate": 0.15,
"init_param": {
"init_method": "random_uniform"
},
"reveal_strategy": "encrypted_reveal_in_host",
"reveal_every_iter": False
}
hetero_sshe_lr_0 = HeteroSSHELR(**lr_param)
pipeline.add_component(hetero_sshe_lr_0, data=Data(train_data=hetero_feature_selection_0.output.data,
validate_data=hetero_feature_selection_1.output.data))
evaluation_data = [hetero_sshe_lr_0.output.data]
hetero_sshe_lr_1 = HeteroSSHELR(name='hetero_sshe_lr_1')
pipeline.add_component(hetero_sshe_lr_1, data=Data(test_data=hetero_feature_selection_1.output.data),
model=Model(hetero_sshe_lr_0.output.model))
evaluation_data.append(hetero_sshe_lr_1.output.data)
evaluation_0 = Evaluation(name="evaluation_0", eval_type="multi")
pipeline.add_component(evaluation_0, data=Data(data=evaluation_data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("hetero_sshe_lr_0").get_summary())
prettify(pipeline.get_component("evaluation_0").get_summary())
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 6,437 | 40.805195 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/homo_logistic_regression/pipeline-homo-lr-warm-start.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import HomoLR
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_homo_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_homo_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts, arbiter=arbiter)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense', with_label=True)
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
lr_param = {
"penalty": "L2",
"optimizer": "sgd",
"tol": 1e-05,
"alpha": 0.01,
"early_stop": "diff",
"batch_size": -1,
"learning_rate": 0.15,
"decay": 1,
"decay_sqrt": True,
"init_param": {
"init_method": "zeros"
},
"cv_param": {
"n_splits": 4,
"shuffle": True,
"random_seed": 33,
"need_cv": False
},
"callback_param": {
"callbacks": ["ModelCheckpoint", "EarlyStopping"]
}
}
homo_lr_0 = HomoLR(name="homo_lr_0", max_iter=3, **lr_param)
homo_lr_1 = HomoLR(name="homo_lr_1", max_iter=30, **lr_param)
homo_lr_2 = HomoLR(name="homo_lr_2", max_iter=30, **lr_param)
pipeline.add_component(homo_lr_0, data=Data(train_data=data_transform_0.output.data))
pipeline.add_component(homo_lr_1, data=Data(train_data=data_transform_0.output.data),
model=Model(model=homo_lr_0.output.model))
pipeline.add_component(homo_lr_2, data=Data(train_data=data_transform_0.output.data))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
pipeline.add_component(evaluation_0, data=Data(data=[homo_lr_1.output.data,
homo_lr_2.output.data]))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("evaluation_0").get_summary())
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,190 | 33.073171 | 101 |
py
|
FATE
|
FATE-master/examples/pipeline/homo_logistic_regression/pipeline-homo-lr-sample-weights.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HomoLR
from pipeline.component import Evaluation
from pipeline.component import SampleWeight
from pipeline.component import Reader
from pipeline.component import FeatureScale
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_homo_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_homo_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host, arbiter=arbiter)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(
name="data_transform_0",
with_label=True,
output_format="dense") # start component numbering at 0
scale_0 = FeatureScale(name='scale_0')
sample_weight_0 = SampleWeight(name="sample_weight_0", class_weight={"0": 1, "1": 2})
param = {
"penalty": "L2",
"optimizer": "sgd",
"tol": 1e-05,
"alpha": 0.01,
"max_iter": 3,
"early_stop": "diff",
"batch_size": 320,
"learning_rate": 0.15,
"decay": 1.0,
"decay_sqrt": True,
"init_param": {
"init_method": "zeros"
},
"cv_param": {
"n_splits": 5,
"shuffle": True,
"random_seed": 33,
"need_cv": False
}
}
homo_lr_0 = HomoLR(name='homo_lr_0', **param)
evaluation_0 = Evaluation(name='evaluation_0')
# add components to pipeline, in order of task execution
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
# set data input sources of intersection components
pipeline.add_component(scale_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(sample_weight_0, data=Data(data=scale_0.output.data))
pipeline.add_component(homo_lr_0, data=Data(train_data=sample_weight_0.output.data))
pipeline.add_component(evaluation_0, data=Data(data=homo_lr_0.output.data))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
print(json.dumps(pipeline.get_component("evaluation_0").get_summary(), indent=4, ensure_ascii=False))
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,156 | 34.529915 | 107 |
py
|
FATE
|
FATE-master/examples/pipeline/homo_logistic_regression/pipeline-homo-lr-train.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HomoLR
from pipeline.component import Reader
from pipeline.component import FeatureScale
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_homo_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_homo_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host, arbiter=arbiter)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(
name="data_transform_0",
with_label=True,
output_format="dense") # start component numbering at 0
scale_0 = FeatureScale(name='scale_0')
param = {
"penalty": "L2",
"optimizer": "sgd",
"tol": 1e-05,
"alpha": 0.01,
"max_iter": 30,
"early_stop": "diff",
"batch_size": -1,
"learning_rate": 0.15,
"decay": 1,
"decay_sqrt": True,
"init_param": {
"init_method": "zeros"
},
"cv_param": {
"n_splits": 4,
"shuffle": True,
"random_seed": 33,
"need_cv": False
}
}
homo_lr_0 = HomoLR(name='homo_lr_0', **param)
# add components to pipeline, in order of task execution
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
# set data input sources of intersection components
pipeline.add_component(scale_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(homo_lr_0, data=Data(train_data=scale_0.output.data))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
evaluation_0.get_party_instance(role='host', party_id=host).component_param(need_run=False)
pipeline.add_component(evaluation_0, data=Data(data=homo_lr_0.output.data))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
# fit model
pipeline.fit()
deploy_components = [data_transform_0, scale_0, homo_lr_0]
pipeline.deploy_component(components=deploy_components)
#
predict_pipeline = PipeLine()
# # add data reader onto predict pipeline
predict_pipeline.add_component(reader_0)
# # add selected components from train pipeline onto predict pipeline
# # specify data source
predict_pipeline.add_component(
pipeline, data=Data(
predict_input={
pipeline.data_transform_0.input.data: reader_0.output.data}))
predict_pipeline.compile()
predict_pipeline.predict()
dsl_json = predict_pipeline.get_predict_dsl()
conf_json = predict_pipeline.get_predict_conf()
# import json
json.dump(dsl_json, open('./homo-lr-normal-predict-dsl.json', 'w'), indent=4)
json.dump(conf_json, open('./homo-lr-normal-predict-conf.json', 'w'), indent=4)
# query component summary
print(json.dumps(pipeline.get_component("homo_lr_0").get_summary(), indent=4, ensure_ascii=False))
print(json.dumps(pipeline.get_component("evaluation_0").get_summary(), indent=4, ensure_ascii=False))
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,028 | 35.442029 | 107 |
py
|
FATE
|
FATE-master/examples/pipeline/homo_logistic_regression/pipeline-homo-lr-one-vs-all.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HomoLR
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "vehicle_scale_homo_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "vehicle_scale_homo_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts, arbiter=arbiter)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense', with_label=True)
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
lr_param = {
"penalty": "L2",
"optimizer": "sgd",
"tol": 1e-05,
"alpha": 0.01,
"early_stop": "diff",
"batch_size": -1,
"learning_rate": 0.15,
"decay": 1,
"decay_sqrt": True,
"init_param": {
"init_method": "zeros"
},
"cv_param": {
"n_splits": 4,
"shuffle": True,
"random_seed": 33,
"need_cv": False
},
"callback_param": {
"callbacks": ["ModelCheckpoint", "EarlyStopping"]
}
}
homo_lr_0 = HomoLR(name="homo_lr_0", max_iter=1, **lr_param)
homo_lr_1 = HomoLR(name="homo_lr_1")
pipeline.add_component(homo_lr_0, data=Data(train_data=data_transform_0.output.data))
pipeline.add_component(homo_lr_1, data=Data(test_data=data_transform_0.output.data),
model=Model(model=homo_lr_0.output.model))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="multi")
pipeline.add_component(evaluation_0, data=Data(data=[homo_lr_0.output.data,
homo_lr_1.output.data]))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("evaluation_0").get_summary())
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,020 | 32.508333 | 101 |
py
|
FATE
|
FATE-master/examples/pipeline/homo_logistic_regression/pipeline-homo-lr-eval.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HomoLR
from pipeline.component import Reader
from pipeline.component import FeatureScale
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_homo_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_homo_host", "namespace": f"experiment{namespace}"}
guest_eval_data = {"name": "breast_homo_guest", "namespace": f"experiment{namespace}"}
host_eval_data = {"name": "breast_homo_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host, arbiter=arbiter)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
reader_1 = Reader(name="reader_1")
reader_1.get_party_instance(role='guest', party_id=guest).component_param(table=guest_eval_data)
reader_1.get_party_instance(role='host', party_id=host).component_param(table=host_eval_data)
# define DataTransform components
data_transform_0 = DataTransform(
name="data_transform_0",
with_label=True,
output_format="dense") # start component numbering at 0
data_transform_1 = DataTransform(name="data_transform_1") # start component numbering at 0
scale_0 = FeatureScale(name='scale_0')
scale_1 = FeatureScale(name='scale_1')
param = {
"penalty": "L2",
"optimizer": "sgd",
"tol": 1e-05,
"alpha": 0.01,
"max_iter": 3,
"early_stop": "diff",
"batch_size": 320,
"learning_rate": 0.15,
"validation_freqs": 1,
"init_param": {
"init_method": "zeros"
},
"cv_param": {
"n_splits": 4,
"shuffle": True,
"random_seed": 33,
"need_cv": False
}
}
homo_lr_0 = HomoLR(name='homo_lr_0', **param)
homo_lr_1 = HomoLR(name='homo_lr_1')
# add components to pipeline, in order of task execution
pipeline.add_component(reader_0)
pipeline.add_component(reader_1)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(data_transform_1, data=Data(data=reader_1.output.data),
model=Model(data_transform_0.output.model))
# set data input sources of intersection components
pipeline.add_component(scale_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(scale_1, data=Data(data=data_transform_1.output.data),
model=Model(scale_0.output.model))
pipeline.add_component(homo_lr_0, data=Data(train_data=scale_0.output.data,
validate_data=scale_1.output.data))
pipeline.add_component(homo_lr_1, data=Data(test_data=scale_1.output.data),
model=Model(homo_lr_0.output.model))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
evaluation_0.get_party_instance(role='host', party_id=host).component_param(need_run=False)
pipeline.add_component(evaluation_0, data=Data(data=[homo_lr_0.output.data,
homo_lr_1.output.data]))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
print(json.dumps(pipeline.get_component("homo_lr_0").get_summary(), indent=4, ensure_ascii=False))
print(json.dumps(pipeline.get_component("evaluation_0").get_summary(), indent=4, ensure_ascii=False))
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,417 | 38.26087 | 107 |
py
|
FATE
|
FATE-master/examples/pipeline/homo_logistic_regression/pipeline-homo-lr-cv.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HomoLR
from pipeline.component import Reader
from pipeline.component import FeatureScale
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_homo_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_homo_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host, arbiter=arbiter)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(
name="data_transform_0",
with_label=True,
output_format="dense") # start component numbering at 0
scale_0 = FeatureScale(name='scale_0')
param = {
"penalty": "L2",
"optimizer": "sgd",
"tol": 1e-05,
"alpha": 0.01,
"max_iter": 3,
"early_stop": "diff",
"batch_size": 320,
"learning_rate": 0.15,
"decay": 1.0,
"decay_sqrt": True,
"init_param": {
"init_method": "zeros"
},
"cv_param": {
"n_splits": 5,
"shuffle": True,
"random_seed": 33,
"need_cv": True
}
}
homo_lr_0 = HomoLR(name='homo_lr_0', **param)
# add components to pipeline, in order of task execution
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
# set data input sources of intersection components
pipeline.add_component(scale_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(homo_lr_0, data=Data(train_data=scale_0.output.data))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
print(json.dumps(pipeline.get_component("homo_lr_0").get_summary(), indent=4, ensure_ascii=False))
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 3,756 | 32.846847 | 107 |
py
|
FATE
|
FATE-master/examples/pipeline/homo_logistic_regression/pipeline-homo-lr-multi-host.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HomoLR
from pipeline.component import Reader
from pipeline.component import FeatureScale
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_homo_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_homo_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host, arbiter=arbiter)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(
name="data_transform_0",
with_label=True,
output_format="dense") # start component numbering at 0
scale_0 = FeatureScale(name='scale_0')
param = {
"penalty": "L2",
"optimizer": "sgd",
"tol": 1e-05,
"alpha": 0.01,
"max_iter": 30,
"early_stop": "diff",
"batch_size": -1,
"learning_rate": 0.15,
"decay": 1,
"decay_sqrt": True,
"init_param": {
"init_method": "zeros"
},
"cv_param": {
"n_splits": 4,
"shuffle": True,
"random_seed": 33,
"need_cv": False
}
}
homo_lr_0 = HomoLR(name='homo_lr_0', **param)
# add components to pipeline, in order of task execution
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
# set data input sources of intersection components
pipeline.add_component(scale_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(homo_lr_0, data=Data(train_data=scale_0.output.data))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
evaluation_0.get_party_instance(role='host', party_id=host).component_param(need_run=False)
pipeline.add_component(evaluation_0, data=Data(data=homo_lr_0.output.data))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
print(json.dumps(pipeline.get_component("homo_lr_0").get_summary(), indent=4, ensure_ascii=False))
print(json.dumps(pipeline.get_component("evaluation_0").get_summary(), indent=4, ensure_ascii=False))
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,147 | 34.758621 | 107 |
py
|
FATE
|
FATE-master/examples/pipeline/homo_logistic_regression/pipeline-homo-lr-train-eval.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HomoLR
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.component import Evaluation
from pipeline.component import FeatureScale
from pipeline.utils.tools import load_job_config
import json
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_homo_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_homo_host", "namespace": f"experiment{namespace}"}
guest_eval_data = {"name": "breast_homo_guest", "namespace": f"experiment{namespace}"}
host_eval_data = {"name": "breast_homo_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host, arbiter=arbiter)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
reader_1 = Reader(name="reader_1")
reader_1.get_party_instance(role='guest', party_id=guest).component_param(table=guest_eval_data)
reader_1.get_party_instance(role='host', party_id=host).component_param(table=host_eval_data)
# define DataTransform components
data_transform_0 = DataTransform(
name="data_transform_0",
with_label=True,
output_format="dense") # start component numbering at 0
data_transform_1 = DataTransform(name="data_transform_1") # start component numbering at 0
scale_0 = FeatureScale(name='scale_0')
scale_1 = FeatureScale(name='scale_1')
param = {
"penalty": "L2",
"optimizer": "sgd",
"tol": 1e-05,
"alpha": 0.01,
"max_iter": 3,
"early_stop": "diff",
"batch_size": 320,
"learning_rate": 0.15,
"callback_param": {
"callbacks": ["EarlyStopping"],
"validation_freqs": 1
},
"init_param": {
"init_method": "zeros"
},
"cv_param": {
"n_splits": 4,
"shuffle": True,
"random_seed": 33,
"need_cv": False
}
}
homo_lr_0 = HomoLR(name='homo_lr_0', **param)
# add components to pipeline, in order of task execution
pipeline.add_component(reader_0)
pipeline.add_component(reader_1)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(data_transform_1, data=Data(data=reader_1.output.data),
model=Model(data_transform_0.output.model))
# set data input sources of intersection components
pipeline.add_component(scale_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(scale_1, data=Data(data=data_transform_1.output.data),
model=Model(scale_0.output.model))
pipeline.add_component(homo_lr_0, data=Data(train_data=scale_0.output.data,
validate_data=scale_1.output.data))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
evaluation_0.get_party_instance(role='host', party_id=host).component_param(need_run=False)
pipeline.add_component(evaluation_0, data=Data(data=homo_lr_0.output.data))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
print(json.dumps(pipeline.get_component("homo_lr_0").get_summary(), indent=4, ensure_ascii=False))
print(json.dumps(pipeline.get_component("evaluation_0").get_summary(), indent=4, ensure_ascii=False))
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,236 | 37.226277 | 107 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/pipeline-hetero-feature-selection-pearson.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HeteroFeatureBinning
from pipeline.component import HeteroFeatureSelection
from pipeline.component import HeteroPearson
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0")
data_transform_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True)
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
hetero_pearson_0 = HeteroPearson(name='hetero_pearson_0', column_indexes=-1)
hetero_binning_0 = HeteroFeatureBinning(name='hetero_binning_0')
selection_param = {
"select_col_indexes": -1,
"select_names": [],
"filter_methods": [
"vif_filter",
"correlation_filter"
],
"vif_param": {
"threshold": 5
},
"correlation_param": {
"sort_metric": "iv",
"threshold": 0.5,
"select_federated": False
}
}
hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0", **selection_param)
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(hetero_binning_0, data=Data(data=intersection_0.output.data))
pipeline.add_component(hetero_pearson_0, data=Data(data=intersection_0.output.data))
pipeline.add_component(hetero_feature_selection_0, data=Data(data=intersection_0.output.data),
model=Model(isometric_model=[hetero_pearson_0.output.model,
hetero_binning_0.output.model]))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 3,818 | 38.78125 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/pipeline-hetero-feature-selection-iv-selection.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HeteroFeatureBinning
from pipeline.component import HeteroFeatureSelection
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(name="data_transform_0") # start component numbering at 0
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True, output_format="dense")
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
binning_param = {
"method": "quantile",
"compress_thres": 10000,
"head_size": 10000,
"error": 0.001,
"bin_num": 10,
"bin_indexes": -1,
"bin_names": None,
"category_indexes": None,
"category_names": None,
"adjustment_factor": 0.5,
"local_only": False,
"transform_param": {
"transform_cols": -1,
"transform_names": None,
"transform_type": "bin_num"
}
}
hetero_feature_binning_0 = HeteroFeatureBinning(name='hetero_feature_binning_0', **binning_param)
pipeline.add_component(hetero_feature_binning_0, data=Data(data=intersection_0.output.data))
selection_param = {
"select_col_indexes": -1,
"select_names": [],
"filter_methods": [
"iv_filter"
],
"iv_param": {
"metrics": "iv",
"filter_type": "threshold",
"threshold": 0.1,
}}
hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0", **selection_param)
pipeline.add_component(hetero_feature_selection_0, data=Data(data=intersection_0.output.data),
model=Model(isometric_model=hetero_feature_binning_0.output.model))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,645 | 37.396694 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/pipeline-hetero-feature-selection-single-predict.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import FeatureScale
from pipeline.component import FederatedSample
from pipeline.component import HeteroFeatureBinning
from pipeline.component import HeteroFeatureSelection
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
guest_eval_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_eval_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role="guest", party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role="guest", party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role="host", party_id=host).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(name="data_transform_0") # start component numbering at 0
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role="guest", party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True, output_format="dense")
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role="host", party_id=host).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
reader_1 = Reader(name="reader_1")
reader_1.get_party_instance(role="guest", party_id=guest).component_param(table=guest_eval_data)
reader_1.get_party_instance(role="host", party_id=host).component_param(table=host_eval_data)
data_transform_1 = DataTransform(name="data_transform_1")
intersection_1 = Intersection(name="intersection_1")
pipeline.add_component(reader_1)
pipeline.add_component(
data_transform_1, data=Data(
data=reader_1.output.data), model=Model(
data_transform_0.output.model))
pipeline.add_component(intersection_1, data=Data(data=data_transform_1.output.data))
sample_0 = FederatedSample(name="sample_0", fractions=0.9)
pipeline.add_component(sample_0, data=Data(data=intersection_0.output.data))
binning_param = {
"method": "quantile",
"compress_thres": 10000,
"head_size": 10000,
"error": 0.001,
"bin_num": 10,
"bin_indexes": -1,
"bin_names": None,
"category_indexes": None,
"category_names": None,
"adjustment_factor": 0.5,
"local_only": False,
"transform_param": {
"transform_cols": -1,
"transform_names": None,
"transform_type": "bin_num"
}
}
hetero_feature_binning_0 = HeteroFeatureBinning(name="hetero_feature_binning_0", **binning_param)
pipeline.add_component(hetero_feature_binning_0, data=Data(data=sample_0.output.data))
hetero_feature_binning_1 = HeteroFeatureBinning(name="hetero_feature_binning_1")
pipeline.add_component(hetero_feature_binning_1,
data=Data(data=intersection_1.output.data),
model=Model(hetero_feature_binning_0.output.model))
selection_param = {
"select_col_indexes": -1,
"select_names": [],
"filter_methods": [
"manually",
"iv_value_thres",
"iv_percentile"
],
"manually_param": {
"filter_out_indexes": [],
"filter_out_names": []
},
"unique_param": {
"eps": 1e-06
},
"iv_value_param": {
"value_threshold": 0.1
},
"iv_percentile_param": {
"percentile_threshold": 0.9
},
"variance_coe_param": {
"value_threshold": 0.3
},
"outlier_param": {
"percentile": 0.95,
"upper_threshold": 2.0
}
}
hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0", **selection_param)
pipeline.add_component(hetero_feature_selection_0,
data=Data(data=hetero_feature_binning_0.output.data),
model=Model(isometric_model=[hetero_feature_binning_0.output.model]))
hetero_feature_selection_1 = HeteroFeatureSelection(name="hetero_feature_selection_1")
pipeline.add_component(hetero_feature_selection_1,
data=Data(data=hetero_feature_binning_1.output.data),
model=Model(hetero_feature_selection_0.output.model))
scale_0 = FeatureScale(name="scale_0")
scale_1 = FeatureScale(name="scale_1")
pipeline.add_component(scale_0, data=Data(data=hetero_feature_selection_0.output.data))
pipeline.add_component(scale_1, data=Data(data=hetero_feature_selection_1.output.data),
model=Model(scale_0.output.model))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 7,015 | 38.863636 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/pipeline-hetero-feature-selection-multi-iso.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataStatistics
from pipeline.component import DataTransform
from pipeline.component import HeteroFeatureBinning
from pipeline.component import HeteroFeatureSelection
from pipeline.component import HeteroSecureBoost
from pipeline.component import Intersection
from pipeline.component import PSI
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
guest_eval_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_eval_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(name="data_transform_0") # start component numbering at 0
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True, output_format="dense")
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
binning_param = {
"method": "quantile",
"compress_thres": 10000,
"head_size": 10000,
"error": 0.001,
"bin_num": 10,
"bin_indexes": -1,
"bin_names": None,
"category_indexes": None,
"category_names": None,
"adjustment_factor": 0.5,
"local_only": False,
"transform_param": {
"transform_cols": -1,
"transform_names": None,
"transform_type": "bin_num"
}
}
hetero_feature_binning_0 = HeteroFeatureBinning(name="hetero_feature_binning_0", **binning_param)
pipeline.add_component(hetero_feature_binning_0, data=Data(data=intersection_0.output.data))
statistic_param = {
"statistics": ["95%", "coefficient_of_variance", "stddev"],
"column_indexes": -1,
"column_names": []
}
statistic_0 = DataStatistics(name="statistic_0", **statistic_param)
pipeline.add_component(statistic_0, data=Data(data=intersection_0.output.data))
reader_1 = Reader(name="reader_1")
reader_1.get_party_instance(role='guest', party_id=guest).component_param(table=guest_eval_data)
reader_1.get_party_instance(role='host', party_id=hosts).component_param(table=host_eval_data)
data_transform_1 = DataTransform(name="data_transform_1")
intersection_1 = Intersection(name="intersection_1")
pipeline.add_component(reader_1)
pipeline.add_component(
data_transform_1, data=Data(
data=reader_1.output.data), model=Model(
data_transform_0.output.model))
pipeline.add_component(intersection_1, data=Data(data=data_transform_1.output.data))
psi_param = {
"name": "psi_0",
"max_bin_num": 20
}
psi_0 = PSI(**psi_param)
pipeline.add_component(psi_0, data=Data(train_data=intersection_0.output.data,
validate_data=intersection_1.output.data))
secureboost_param = {
"task_type": "classification",
"learning_rate": 0.1,
"num_trees": 5,
"subsample_feature_rate": 1,
"n_iter_no_change": False,
"tol": 0.0001,
"bin_num": 50,
"objective_param": {
"objective": "cross_entropy"
},
"encrypt_param": {
"method": "paillier"
},
"predict_param": {
"threshold": 0.5
}
}
secureboost_0 = HeteroSecureBoost(name="secureboost_0", **secureboost_param)
pipeline.add_component(secureboost_0, data=Data(train_data=intersection_0.output.data))
selection_param = {
"select_col_indexes": -1,
"select_names": [],
"filter_methods": [
"iv_filter",
"statistic_filter",
"psi_filter",
"hetero_sbt_filter"
],
"iv_param": {
"metrics": ["iv", "iv", "iv"],
"filter_type": ["threshold", "top_k", "top_percentile"],
"take_high": True,
"threshold": [0.03, 15, 0.7],
"host_thresholds": [[0.15], None, None],
"select_federated": True
},
"statistic_param": {
"metrics": ["skewness", "skewness", "kurtosis", "median"],
"filter_type": "threshold",
"take_high": [True, False, False, True],
"threshold": [-10, 10, 2, -1.5]
},
"psi_param": {
"metrics": "psi",
"filter_type": "threshold",
"take_high": False,
"threshold": -0.1
},
"sbt_param": {
"metrics": "feature_importance",
"filter_type": "threshold",
"take_high": True,
"threshold": 0.03
}}
hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0", **selection_param)
pipeline.add_component(hetero_feature_selection_0, data=Data(data=intersection_0.output.data),
model=Model(isometric_model=[hetero_feature_binning_0.output.model,
statistic_0.output.model,
psi_0.output.model,
secureboost_0.output.model]))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 7,933 | 38.472637 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/pipeline-hetero-feature-selection-select-anonymous-col-names.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HeteroPearson
from pipeline.component import HeteroFeatureSelection
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0")
data_transform_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True)
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
hetero_pearson_0 = HeteroPearson(name='hetero_pearson_0', column_indexes=-1)
selection_param = {
"name": "hetero_feature_selection_0",
"select_col_indexes": [],
"select_names": [],
"filter_methods": [
"vif_filter"
],
"vif_param": {
"threshold": 5
}
}
hetero_feature_selection_0 = HeteroFeatureSelection(**selection_param)
host_param = {
"select_names": [
f"host_{host}_x0",
f"host_{host}_x1",
f"host_{host}_x3"
],
"use_anonymous": True}
hetero_feature_selection_0.get_party_instance(role="host", party_id=host).component_param(**host_param)
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(hetero_pearson_0, data=Data(train_data=intersection_0.output.data))
pipeline.add_component(hetero_feature_selection_0, data=Data(data=intersection_0.output.data),
model=Model(isometric_model=hetero_pearson_0.output.model))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 3,649 | 36.244898 | 107 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/pipeline-hetero-feature-selection-fast-sbt.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HeteroFastSecureBoost
from pipeline.component import HeteroFeatureSelection
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(name="data_transform_0") # start component numbering at 0
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True, output_format="dense")
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
fast_sbt_param = {
"task_type": "classification",
"learning_rate": 0.1,
"num_trees": 4,
"subsample_feature_rate": 1,
"n_iter_no_change": False,
"work_mode": "layered",
"guest_depth": 2,
"host_depth": 3,
"tol": 0.0001,
"bin_num": 50,
"metrics": ["Recall", "ks", "auc", "roc"],
"objective_param": {
"objective": "cross_entropy"
},
"encrypt_param": {
"method": "paillier"
},
"predict_param": {
"threshold": 0.5
},
"validation_freqs": 1
}
fast_sbt_0 = HeteroFastSecureBoost(name="fast_secureboost_0", **fast_sbt_param)
pipeline.add_component(fast_sbt_0, data=Data(train_data=intersection_0.output.data))
selection_param = {
"select_col_indexes": -1,
"select_names": [],
"filter_methods": [
"hetero_fast_sbt_filter"
],
"sbt_param": {
"metrics": "feature_importance",
"filter_type": "threshold",
"take_high": True,
"threshold": 0.03
}}
hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0", **selection_param)
pipeline.add_component(hetero_feature_selection_0, data=Data(data=intersection_0.output.data),
model=Model(isometric_model=fast_sbt_0.output.model))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,792 | 36.445313 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/pipeline-hetero-feature-selection-multi-host.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataStatistics
from pipeline.component import DataTransform
from pipeline.component import HeteroFeatureBinning
from pipeline.component import HeteroFeatureSelection
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(name="data_transform_0") # start component numbering at 0
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True, output_format="dense")
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
binning_param = {
"method": "quantile",
"compress_thres": 10000,
"head_size": 10000,
"error": 0.001,
"bin_num": 10,
"bin_indexes": -1,
"bin_names": None,
"category_indexes": None,
"category_names": None,
"adjustment_factor": 0.5,
"local_only": False,
"transform_param": {
"transform_cols": -1,
"transform_names": None,
"transform_type": "bin_num"
}
}
hetero_feature_binning_0 = HeteroFeatureBinning(name="hetero_feature_binning_0", **binning_param)
pipeline.add_component(hetero_feature_binning_0, data=Data(data=intersection_0.output.data))
statistic_param = {
"statistics": ["95%", "coefficient_of_variance", "stddev", "skewness"],
"column_indexes": -1,
"column_names": []
}
statistic_0 = DataStatistics(name="statistic_0", **statistic_param)
pipeline.add_component(statistic_0, data=Data(data=intersection_0.output.data))
selection_param = {
"select_col_indexes": -1,
"select_names": [],
"filter_methods": [
"statistic_filter",
"iv_filter",
"coefficient_of_variation_value_thres"
],
"statistic_param": {
"filter_type": "threshold",
"metrics": ["kurtosis", "coefficient_of_variance", "stddev"],
"take_high": [False, True, True],
"threshold": [2, 0.3, 1e-3]
},
"iv_param": {
"metrics": ["iv", "iv"],
"filter_type": ["threshold", "top_percentile"],
"threshold": [0.1, 0.9]
}
}
hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0", **selection_param)
pipeline.add_component(hetero_feature_selection_0, data=Data(data=intersection_0.output.data),
model=Model(isometric_model=[hetero_feature_binning_0.output.model,
statistic_0.output.model]))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,463 | 38.028571 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/pipeline-hetero-feature-selection-select-cols.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataStatistics
from pipeline.component import DataTransform
from pipeline.component import HeteroFeatureBinning
from pipeline.component import HeteroFeatureSelection
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(name="data_transform_0") # start component numbering at 0
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True, output_format="dense")
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
binning_param = {
"method": "quantile",
"compress_thres": 10000,
"head_size": 10000,
"error": 0.001,
"bin_num": 10,
"bin_indexes": -1,
"bin_names": None,
"category_indexes": None,
"category_names": None,
"adjustment_factor": 0.5,
"local_only": False,
"transform_param": {
"transform_cols": -1,
"transform_names": None,
"transform_type": "bin_num"
}
}
hetero_feature_binning_0 = HeteroFeatureBinning(name="hetero_feature_binning_0", **binning_param)
pipeline.add_component(hetero_feature_binning_0, data=Data(data=intersection_0.output.data))
statistic_param = {
"name": "statistic_0",
"statistics": ["95%", "coefficient_of_variance", "stddev"],
"column_indexes": -1,
"column_names": []
}
statistic_0 = DataStatistics(**statistic_param)
pipeline.add_component(statistic_0, data=Data(data=intersection_0.output.data))
selection_param = {
"select_names": [],
"filter_methods": [
"manually",
"unique_value",
"iv_value_thres",
"coefficient_of_variation_value_thres",
"iv_percentile",
"outlier_cols"
],
"manually_param": {
"filter_out_indexes": [],
"filter_out_names": []
},
"unique_param": {
"eps": 1e-06
},
"iv_value_param": {
"value_threshold": 0.1
},
"iv_percentile_param": {
"percentile_threshold": 0.9
},
"variance_coe_param": {
"value_threshold": 0.3
},
"outlier_param": {
"percentile": 0.95,
"upper_threshold": 2.0
}}
hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0", **selection_param)
hetero_feature_selection_0.get_party_instance(role="guest",
party_id=guest).component_param(select_col_indexes=[0, 1, 3])
hetero_feature_selection_0.get_party_instance(role="host",
party_id=host).component_param(select_col_indexes=[0, 2, 3])
pipeline.add_component(hetero_feature_selection_0, data=Data(data=intersection_0.output.data),
model=Model(isometric_model=[hetero_feature_binning_0.output.model,
statistic_0.output.model]))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,963 | 37.477419 | 111 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/pipeline-hetero-feature-selection-pearson-predict.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HeteroFeatureBinning
from pipeline.component import HeteroFeatureSelection
from pipeline.component import HeteroPearson
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0")
data_transform_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True)
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
hetero_pearson_0 = HeteroPearson(name='hetero_pearson_0', column_indexes=-1)
hetero_binning_0 = HeteroFeatureBinning(name='hetero_binning_0')
selection_param = {
"name": "hetero_feature_selection_0",
"select_col_indexes": -1,
"select_names": [],
"filter_methods": [
"vif_filter",
"correlation_filter"
],
"vif_param": {
"threshold": 5
},
"correlation_param": {
"threshold": 0.5,
"select_federated": False
}
}
hetero_feature_selection_0 = HeteroFeatureSelection(**selection_param)
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(hetero_binning_0, data=Data(data=intersection_0.output.data))
pipeline.add_component(hetero_pearson_0, data=Data(data=intersection_0.output.data))
pipeline.add_component(hetero_feature_selection_0, data=Data(data=intersection_0.output.data),
model=Model(isometric_model=[hetero_pearson_0.output.model,
hetero_binning_0.output.model]))
pipeline.compile()
pipeline.fit()
# predict
# deploy required components
pipeline.deploy_component([data_transform_0, intersection_0,
hetero_binning_0, hetero_feature_selection_0])
predict_pipeline = PipeLine()
# add data reader onto predict pipeline
predict_pipeline.add_component(reader_0)
# add selected components from train pipeline onto predict pipeline
# specify data source
predict_pipeline.add_component(
pipeline, data=Data(
predict_input={
pipeline.data_transform_0.input.data: reader_0.output.data}))
# run predict model
predict_pipeline.predict()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,434 | 38.598214 | 103 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/pipeline-hetero-feature-selection-manually-left.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HeteroFeatureSelection
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(name="data_transform_0") # start component numbering at 0
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True, output_format="dense")
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
guest_selection_param = {"select_col_indexes": -1,
"select_names": [],
"filter_methods": ["manually"],
"manually_param": {
"filter_out_indexes": None,
"filter_out_names": None,
"left_col_indexes": [0, 1, 2],
"left_col_names": ["x3"]
}}
hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0")
hetero_feature_selection_0.get_party_instance(role='guest', party_id=guest).component_param(**guest_selection_param)
host_selection_param = {"select_col_indexes": -1,
"select_names": [],
"filter_methods": ["manually"],
"manually_param": {
"filter_out_indexes": None,
"filter_out_names": None,
"left_col_indexes": [0, 1, 2]
}}
hetero_feature_selection_0.get_party_instance(role='guest', party_id=guest).component_param(**host_selection_param)
pipeline.add_component(hetero_feature_selection_0, data=Data(data=intersection_0.output.data))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,612 | 42.933333 | 120 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/pipeline-hetero-feature-selection-multiclass.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HeteroFeatureBinning
from pipeline.component import HeteroFeatureSelection
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "vehicle_scale_hetero_guest", "namespace": f"experiment{namespace}"}
guest_validate_data = {"name": "vehicle_scale_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "vehicle_scale_hetero_host", "namespace": f"experiment{namespace}"}
host_validate_data = {"name": "vehicle_scale_hetero_host", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host)
data_transform_0, data_transform_1 = DataTransform(name="data_transform_0"), DataTransform(name='data_transform_1')
reader_0, reader_1 = Reader(name="reader_0"), Reader(name='reader_1')
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0.get_party_instance(
role='guest', party_id=guest).component_param(
with_label=True, output_format="dense")
data_transform_0.get_party_instance(
role='host', party_id=host).component_param(
with_label=False, output_format="dense")
reader_1.get_party_instance(role='guest', party_id=guest).component_param(table=guest_validate_data)
reader_1.get_party_instance(role='host', party_id=host).component_param(table=host_validate_data)
data_transform_1.get_party_instance(
role='guest', party_id=guest).component_param(
with_label=True, output_format="dense")
data_transform_1.get_party_instance(
role='host', party_id=host).component_param(
with_label=True, output_format="dense")
intersection_0 = Intersection(name="intersection_0")
intersection_1 = Intersection(name="intersection_1")
param = {
"method": "quantile",
"optimal_binning_param": {
"metric_method": "gini",
"min_bin_pct": 0.05,
"max_bin_pct": 0.8,
"init_bucket_method": "quantile",
"init_bin_nums": 100,
"mixture": True
},
"compress_thres": 10000,
"head_size": 10000,
"error": 0.001,
"bin_num": 10,
"bin_indexes": -1,
"bin_names": None,
"category_indexes": [0, 1, 2],
"category_names": None,
"adjustment_factor": 0.5,
"local_only": False,
"transform_param": {
"transform_cols": -1,
"transform_names": None,
"transform_type": "bin_num"
}
}
hetero_feature_binning_0 = HeteroFeatureBinning(name="hetero_feature_binning_0", **param)
hetero_feature_binning_1 = HeteroFeatureBinning(name="hetero_feature_binning_1")
selection_param = {
"select_col_indexes": -1,
"select_names": [],
"filter_methods": ["iv_filter"],
"iv_param": {
"filter_type": "threshold",
"threshold": 2,
"mul_class_merge_type": "max"
}
}
hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0", **selection_param)
hetero_feature_selection_1 = HeteroFeatureSelection(name="hetero_feature_selection_1")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(reader_1)
pipeline.add_component(
data_transform_1, data=Data(
data=reader_1.output.data), model=Model(
data_transform_0.output.model))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(intersection_1, data=Data(data=data_transform_1.output.data))
pipeline.add_component(hetero_feature_binning_0, data=Data(data=intersection_0.output.data))
pipeline.add_component(hetero_feature_binning_1,
data=Data(data=intersection_1.output.data),
model=Model(hetero_feature_binning_0.output.model))
pipeline.add_component(hetero_feature_selection_0,
data=Data(data=hetero_feature_binning_0.output.data),
model=Model(isometric_model=hetero_feature_binning_0.output.model))
pipeline.add_component(hetero_feature_selection_1,
data=Data(data=hetero_feature_binning_1.output.data),
model=Model(hetero_feature_selection_0.output.model))
pipeline.compile()
pipeline.fit()
# predict
# deploy required components
pipeline.deploy_component([data_transform_0, intersection_0, hetero_feature_selection_0])
predict_pipeline = PipeLine()
# add data reader onto predict pipeline
predict_pipeline.add_component(reader_1)
# add selected components from train pipeline onto predict pipeline
# specify data source
predict_pipeline.add_component(
pipeline, data=Data(
predict_input={
pipeline.data_transform_0.input.data: reader_1.output.data}))
# run predict model
predict_pipeline.predict()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 6,649 | 40.049383 | 119 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/pipeline-hetero-feature-selection-local-only.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HeteroFeatureBinning
from pipeline.component import HeteroFeatureSelection
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(name="data_transform_0") # start component numbering at 0
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True, output_format="dense")
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
binning_param = {
"method": "quantile",
"compress_thres": 10000,
"head_size": 10000,
"error": 0.001,
"bin_num": 10,
"bin_indexes": -1,
"bin_names": None,
"category_indexes": None,
"category_names": None,
"adjustment_factor": 0.5,
"local_only": True,
"transform_param": {
"transform_cols": -1,
"transform_names": None,
"transform_type": "bin_num"
}
}
hetero_feature_binning_0 = HeteroFeatureBinning(name="hetero_feature_binning_0", **binning_param)
pipeline.add_component(hetero_feature_binning_0, data=Data(data=intersection_0.output.data))
selection_param = {
"select_col_indexes": -1,
"select_names": [],
"filter_methods": [
"iv_filter"
],
"iv_param": {
"metrics": "iv",
"filter_type": "threshold",
"select_federated": False,
"threshold": 1
}
}
hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0", **selection_param)
pipeline.add_component(hetero_feature_selection_0, data=Data(data=intersection_0.output.data),
model=Model(isometric_model=hetero_feature_binning_0.output.model))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,685 | 37.097561 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/__init__.py
| 0 | 0 | 0 |
py
|
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/pipeline-hetero-feature-selection-iv-top-k.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HeteroFeatureBinning
from pipeline.component import HeteroFeatureSelection
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(name="data_transform_0") # start component numbering at 0
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True, output_format="dense")
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
binning_param = {
"method": "quantile",
"compress_thres": 10000,
"head_size": 10000,
"error": 0.001,
"bin_num": 10,
"bin_indexes": -1,
"bin_names": None,
"category_indexes": None,
"category_names": None,
"adjustment_factor": 0.5,
"local_only": False,
"transform_param": {
"transform_cols": -1,
"transform_names": None,
"transform_type": "bin_num"
}
}
hetero_feature_binning_0 = HeteroFeatureBinning(name="hetero_feature_binning_0", **binning_param)
pipeline.add_component(hetero_feature_binning_0, data=Data(data=intersection_0.output.data))
selection_param = {
"select_col_indexes": -1,
"select_names": [],
"filter_methods": [
"iv_filter"
],
"iv_param": {
"metrics": "iv",
"filter_type": "top_k",
"threshold": 7
}}
hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0", **selection_param)
pipeline.add_component(hetero_feature_selection_0, data=Data(data=intersection_0.output.data),
model=Model(isometric_model=hetero_feature_binning_0.output.model))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,639 | 37.032787 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/pipeline-hetero-feature-selection-percentage-value.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HeteroFeatureSelection
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(name="data_transform_0") # start component numbering at 0
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True, output_format="dense")
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
selection_param = {
"select_col_indexes": -1,
"select_names": [],
"filter_methods": ["percentage_value"],
"percentage_value_param": {
"upper_pct": 0.8
}}
hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0", **selection_param)
pipeline.add_component(hetero_feature_selection_0, data=Data(data=intersection_0.output.data))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 3,691 | 38.698925 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/pipeline-hetero-feature-selection-vif.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HeteroFeatureSelection
from pipeline.component import HeteroPearson
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0")
data_transform_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True)
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
hetero_pearson_0 = HeteroPearson(name='hetero_pearson_0', column_indexes=-1)
selection_param = {
"select_col_indexes": -1,
"select_names": [],
"filter_methods": [
"vif_filter"
],
"vif_param": {
"threshold": 5
}
}
hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0", **selection_param)
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(hetero_pearson_0, data=Data(train_data=intersection_0.output.data))
pipeline.add_component(hetero_feature_selection_0,
data=Data(data=intersection_0.output.data),
model=Model(isometric_model=hetero_pearson_0.output.model))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 3,377 | 36.955056 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/pipeline-hetero-feature-selection-select-col-names.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataStatistics
from pipeline.component import DataTransform
from pipeline.component import HeteroFeatureBinning
from pipeline.component import HeteroFeatureSelection
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(name="data_transform_0") # start component numbering at 0
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True, output_format="dense")
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
binning_param = {
"method": "quantile",
"compress_thres": 10000,
"head_size": 10000,
"error": 0.001,
"bin_num": 10,
"bin_indexes": -1,
"bin_names": None,
"category_indexes": None,
"category_names": None,
"adjustment_factor": 0.5,
"local_only": False,
"transform_param": {
"transform_cols": -1,
"transform_names": None,
"transform_type": "bin_num"
}
}
hetero_feature_binning_0 = HeteroFeatureBinning(name="hetero_feature_binning_0", **binning_param)
pipeline.add_component(hetero_feature_binning_0, data=Data(data=intersection_0.output.data))
statistic_param = {
"statistics": ["95%", "coefficient_of_variance", "stddev"],
"column_indexes": -1,
"column_names": []
}
statistic_0 = DataStatistics(name="statistic_0", **statistic_param)
pipeline.add_component(statistic_0, data=Data(data=intersection_0.output.data))
selection_param = {
"select_col_indexes": [],
"filter_methods": [
"manually",
"unique_value",
"iv_value_thres",
"coefficient_of_variation_value_thres",
"iv_percentile",
"outlier_cols"
],
"manually_param": {
"filter_out_indexes": [],
"filter_out_names": []
},
"unique_param": {
"eps": 1e-06
},
"iv_value_param": {
"value_threshold": 0.1
},
"iv_percentile_param": {
"percentile_threshold": 0.9
},
"variance_coe_param": {
"value_threshold": 0.3
},
"outlier_param": {
"percentile": 0.95,
"upper_threshold": 2.0
}
}
hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0", **selection_param)
hetero_feature_selection_0.get_party_instance(role="guest",
party_id=guest).component_param(select_names=["x0", "x1", "x3"])
hetero_feature_selection_0.get_party_instance(role="host",
party_id=host).component_param(select_names=["x0", "x2", "x3"])
pipeline.add_component(hetero_feature_selection_0, data=Data(data=intersection_0.output.data),
model=Model(isometric_model=[hetero_feature_binning_0.output.model,
statistic_0.output.model]))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,970 | 37.275641 | 114 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/pipeline-hetero-feature-selection-anonymous-manually-left.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataStatistics
from pipeline.component import DataTransform
from pipeline.component import HeteroFeatureBinning
from pipeline.component import HeteroFeatureSelection
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(name="data_transform_0") # start component numbering at 0
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True, output_format="dense")
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
binning_param = {
"method": "quantile",
"compress_thres": 10000,
"head_size": 10000,
"error": 0.001,
"bin_num": 10,
"bin_indexes": -1,
"bin_names": None,
"category_indexes": None,
"category_names": None,
"adjustment_factor": 0.5,
"local_only": False,
"transform_param": {
"transform_cols": -1,
"transform_names": None,
"transform_type": "bin_num"
}
}
hetero_feature_binning_0 = HeteroFeatureBinning(name="hetero_feature_binning_0", **binning_param)
pipeline.add_component(hetero_feature_binning_0, data=Data(data=intersection_0.output.data))
statistic_param = {
"statistics": ["95%", "coefficient_of_variance", "stddev"],
"column_indexes": -1,
"column_names": []
}
statistic_0 = DataStatistics(name="statistic_0", **statistic_param)
pipeline.add_component(statistic_0, data=Data(data=intersection_0.output.data))
selection_param = {
"select_col_indexes": -1,
"select_names": [],
"filter_methods": [
"manually",
"unique_value",
"iv_value_thres",
"coefficient_of_variation_value_thres",
"iv_percentile",
"outlier_cols"
],
"unique_param": {
"eps": 1e-06
},
"iv_value_param": {
"value_threshold": 0.1
},
"iv_percentile_param": {
"percentile_threshold": 0.9
},
"variance_coe_param": {
"value_threshold": 0.3
},
"outlier_param": {
"percentile": 0.95,
"upper_threshold": 2.0
}}
hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0", **selection_param)
hetero_feature_selection_0.get_party_instance(role='guest',
party_id=guest).component_param(
manually_param={"filter_out_indexes": [
0,
1,
2
],
"filter_out_names": [
"x3"
]
})
host_param = {
"manually_param":
{
"left_col_names": [
f"host_{host}_x3"
]
},
"use_anonymous": True}
hetero_feature_selection_0.get_party_instance(role="host", party_id=host).component_param(**host_param)
pipeline.add_component(hetero_feature_selection_0,
data=Data(data=intersection_0.output.data),
model=Model(isometric_model=[hetero_feature_binning_0.output.model,
statistic_0.output.model]))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 6,184 | 35.382353 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/pipeline-hetero-feature-selection-selection.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataStatistics
from pipeline.component import DataTransform
from pipeline.component import HeteroFeatureBinning
from pipeline.component import HeteroFeatureSelection
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role="guest", party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role="guest", party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role="host", party_id=host).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(name="data_transform_0") # start component numbering at 0
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role="guest", party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True, output_format="dense")
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role="host", party_id=host).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
binning_param = {
"method": "quantile",
"compress_thres": 10000,
"head_size": 10000,
"error": 0.001,
"bin_num": 10,
"bin_indexes": -1,
"bin_names": None,
"category_indexes": None,
"category_names": None,
"adjustment_factor": 0.5,
"local_only": False,
"transform_param": {
"transform_cols": -1,
"transform_names": None,
"transform_type": "bin_num"
}
}
hetero_feature_binning_0 = HeteroFeatureBinning(name="hetero_feature_binning_0", **binning_param)
pipeline.add_component(hetero_feature_binning_0, data=Data(data=intersection_0.output.data))
statistic_param = {
"statistics": ["95%", "coefficient_of_variance", "stddev"],
"column_indexes": -1,
"column_names": []
}
statistic_0 = DataStatistics(name="statistic_0", **statistic_param)
pipeline.add_component(statistic_0, data=Data(data=intersection_0.output.data))
selection_param = {
"select_col_indexes": -1,
"select_names": [],
"filter_methods": [
"manually",
"unique_value",
"iv_value_thres",
"coefficient_of_variation_value_thres",
"iv_percentile",
"outlier_cols"
],
"manually_param": {
"filter_out_indexes": [],
"filter_out_names": []
},
"unique_param": {
"eps": 1e-06
},
"iv_value_param": {
"value_threshold": 0.1
},
"iv_percentile_param": {
"percentile_threshold": 0.9
},
"variance_coe_param": {
"value_threshold": 0.3
},
"outlier_param": {
"percentile": 0.95,
"upper_threshold": 2.0
}}
hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0", **selection_param)
pipeline.add_component(hetero_feature_selection_0, data=Data(data=intersection_0.output.data),
model=Model(isometric_model=[hetero_feature_binning_0.output.model,
statistic_0.output.model]))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,635 | 36.573333 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/pipeline-hetero-feature-selection-manually.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataStatistics
from pipeline.component import DataTransform
from pipeline.component import HeteroFeatureBinning
from pipeline.component import HeteroFeatureSelection
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(name="data_transform_0") # start component numbering at 0
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True, output_format="dense")
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
binning_param = {
"method": "quantile",
"compress_thres": 10000,
"head_size": 10000,
"error": 0.001,
"bin_num": 10,
"bin_indexes": -1,
"bin_names": None,
"category_indexes": None,
"category_names": None,
"adjustment_factor": 0.5,
"local_only": False,
"transform_param": {
"transform_cols": -1,
"transform_names": None,
"transform_type": "bin_num"
}
}
hetero_feature_binning_0 = HeteroFeatureBinning(name="hetero_feature_binning_0", **binning_param)
pipeline.add_component(hetero_feature_binning_0, data=Data(data=intersection_0.output.data))
statistic_param = {
"statistics": ["95%", "coefficient_of_variance", "stddev"],
"column_indexes": -1,
"column_names": []
}
statistic_0 = DataStatistics(name="statistic_0", **statistic_param)
pipeline.add_component(statistic_0, data=Data(data=intersection_0.output.data))
selection_param = {
"select_col_indexes": -1,
"select_names": [],
"filter_methods": [
"manually",
"unique_value",
"iv_value_thres",
"coefficient_of_variation_value_thres",
"iv_percentile",
"outlier_cols"
],
"unique_param": {
"eps": 1e-06
},
"iv_value_param": {
"value_threshold": 0.1
},
"iv_percentile_param": {
"percentile_threshold": 0.9
},
"variance_coe_param": {
"value_threshold": 0.3
},
"outlier_param": {
"percentile": 0.95,
"upper_threshold": 2.0
}}
hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0", **selection_param)
hetero_feature_selection_0.get_party_instance(role='guest',
party_id=guest).component_param(
manually_param={"filter_out_indexes": [
0,
1,
2
],
"filter_out_names": [
"x3"
]
})
pipeline.add_component(hetero_feature_selection_0,
data=Data(data=intersection_0.output.data),
model=Model(isometric_model=[hetero_feature_binning_0.output.model,
statistic_0.output.model]))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,878 | 35.74375 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/pipeline-hetero-feature-selection-multiclass-all-case.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HeteroFeatureBinning
from pipeline.component import HeteroFeatureSelection
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "vehicle_scale_hetero_guest", "namespace": f"experiment{namespace}"}
guest_validate_data = {"name": "vehicle_scale_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "vehicle_scale_hetero_host", "namespace": f"experiment{namespace}"}
host_validate_data = {"name": "vehicle_scale_hetero_host", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host)
data_transform_0, data_transform_1 = DataTransform(name="data_transform_0"), DataTransform(name='data_transform_1')
reader_0, reader_1 = Reader(name="reader_0"), Reader(name='reader_1')
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0.get_party_instance(
role='guest', party_id=guest).component_param(
with_label=True, output_format="dense")
data_transform_0.get_party_instance(
role='host', party_id=host).component_param(
with_label=False, output_format="dense")
reader_1.get_party_instance(role='guest', party_id=guest).component_param(table=guest_validate_data)
reader_1.get_party_instance(role='host', party_id=host).component_param(table=host_validate_data)
data_transform_1.get_party_instance(
role='guest', party_id=guest).component_param(
with_label=True, output_format="dense")
data_transform_1.get_party_instance(
role='host', party_id=host).component_param(
with_label=True, output_format="dense")
intersection_0 = Intersection(name="intersection_0")
intersection_1 = Intersection(name="intersection_1")
param = {
"method": "quantile",
"optimal_binning_param": {
"metric_method": "gini",
"min_bin_pct": 0.05,
"max_bin_pct": 0.8,
"init_bucket_method": "quantile",
"init_bin_nums": 100,
"mixture": True
},
"compress_thres": 10000,
"head_size": 10000,
"error": 0.001,
"bin_num": 10,
"bin_indexes": -1,
"bin_names": None,
"category_indexes": [0, 1, 2],
"category_names": None,
"adjustment_factor": 0.5,
"local_only": False,
"transform_param": {
"transform_cols": -1,
"transform_names": None,
"transform_type": "bin_num"
}
}
hetero_feature_binning_0 = HeteroFeatureBinning(name="hetero_feature_binning_0", **param)
hetero_feature_binning_1 = HeteroFeatureBinning(name='hetero_feature_binning_1')
selection_param = {
"select_col_indexes": -1,
"select_names": [],
"filter_methods": ["iv_filter"],
"iv_param": {
"metrics": ["iv", "iv", "iv"],
"filter_type": ["threshold", "top_k", "top_percentile"],
"threshold": [2, 10, 0.9],
"mul_class_merge_type": ["max", "min", "average"]
}}
hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0", **selection_param)
hetero_feature_selection_1 = HeteroFeatureSelection(name="hetero_feature_selection_1")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(reader_1)
pipeline.add_component(
data_transform_1, data=Data(
data=reader_1.output.data), model=Model(
data_transform_0.output.model))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(intersection_1, data=Data(data=data_transform_1.output.data))
pipeline.add_component(hetero_feature_binning_0, data=Data(data=intersection_0.output.data))
pipeline.add_component(hetero_feature_binning_1, data=Data(data=intersection_1.output.data),
model=Model(hetero_feature_binning_0.output.model))
pipeline.add_component(hetero_feature_selection_0, data=Data(data=hetero_feature_binning_0.output.data),
model=Model(isometric_model=hetero_feature_binning_0.output.model))
pipeline.add_component(hetero_feature_selection_1, data=Data(data=hetero_feature_binning_1.output.data),
model=Model(hetero_feature_selection_0.output.model))
pipeline.compile()
pipeline.fit()
# predict
# deploy required components
pipeline.deploy_component([data_transform_0, intersection_0, hetero_feature_selection_0])
predict_pipeline = PipeLine()
# add data reader onto predict pipeline
predict_pipeline.add_component(reader_1)
# add selected components from train pipeline onto predict pipeline
# specify data source
predict_pipeline.add_component(
pipeline, data=Data(
predict_input={
pipeline.data_transform_0.input.data: reader_1.output.data}))
# run predict model
predict_pipeline.predict()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 6,666 | 40.930818 | 119 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_selection/pipeline-hetero-feature-selection-anonymous-manually.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataStatistics
from pipeline.component import DataTransform
from pipeline.component import HeteroFeatureBinning
from pipeline.component import HeteroFeatureSelection
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=host)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
# define DataTransform components
data_transform_0 = DataTransform(name="data_transform_0") # start component numbering at 0
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True, output_format="dense")
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
binning_param = {
"method": "quantile",
"compress_thres": 10000,
"head_size": 10000,
"error": 0.001,
"bin_num": 10,
"bin_indexes": -1,
"bin_names": None,
"category_indexes": None,
"category_names": None,
"adjustment_factor": 0.5,
"local_only": False,
"transform_param": {
"transform_cols": -1,
"transform_names": None,
"transform_type": "bin_num"
}
}
hetero_feature_binning_0 = HeteroFeatureBinning(name="hetero_feature_binning_0", **binning_param)
pipeline.add_component(hetero_feature_binning_0, data=Data(data=intersection_0.output.data))
statistic_param = {
"statistics": ["95%", "coefficient_of_variance", "stddev"],
"column_indexes": -1,
"column_names": []
}
statistic_0 = DataStatistics(name="statistic_0", **statistic_param)
pipeline.add_component(statistic_0, data=Data(data=intersection_0.output.data))
selection_param = {
"select_col_indexes": -1,
"select_names": [],
"filter_methods": [
"manually",
"unique_value",
"iv_value_thres",
"coefficient_of_variation_value_thres",
"iv_percentile",
"outlier_cols"
],
"unique_param": {
"eps": 1e-06
},
"iv_value_param": {
"value_threshold": 0.1
},
"iv_percentile_param": {
"percentile_threshold": 0.9
},
"variance_coe_param": {
"value_threshold": 0.3
},
"outlier_param": {
"percentile": 0.95,
"upper_threshold": 2.0
}}
hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0", **selection_param)
hetero_feature_selection_0.get_party_instance(role='guest',
party_id=guest).component_param(
manually_param={"filter_out_indexes": [
0,
1,
2
],
"filter_out_names": [
"x3"
]
})
host_param = {
"manually_param": {
"filter_out_names": [
f"host_{host}_x3"
]
},
"use_anonymous": True}
hetero_feature_selection_0.get_party_instance(role="host", party_id=host).component_param(**host_param)
pipeline.add_component(hetero_feature_selection_0,
data=Data(data=intersection_0.output.data),
model=Model(isometric_model=[hetero_feature_binning_0.output.model,
statistic_0.output.model]))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 6,158 | 35.443787 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/model_loader/pipeline-hetero-lr-model-loader.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import HeteroLR
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.component import ModelLoader
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# guest_train_data = {"name": "default_credit_hetero_guest", "namespace": f"experiment{namespace}"}
# host_train_data = {"name": "default_credit_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts, arbiter=arbiter)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
param = {
"model_id": "arbiter-9999#guest-10000#host-9999#model",
"model_version": "202108311438379703480",
"component_name": "hetero_lr_0",
"step_index": 2
}
model_loader_0 = ModelLoader(name="model_loader_0", **param)
lr_param = {
"penalty": "L2",
"optimizer": "rmsprop",
"tol": 0.0001,
"alpha": 0.01,
"early_stop": "diff",
"batch_size": -1,
"learning_rate": 0.15,
"init_param": {
"init_method": "zeros",
"fit_intercept": True
},
"encrypt_param": {
"key_length": 1024
},
"callback_param": {
"callbacks": ["ModelCheckpoint"],
"validation_freqs": 1,
"early_stopping_rounds": 1,
"metrics": None,
"use_first_metric_only": False,
"save_freq": 1
}
}
hetero_lr_0 = HeteroLR(name="hetero_lr_0", max_iter=30, **lr_param)
pipeline.add_component(model_loader_0)
pipeline.add_component(hetero_lr_0, data=Data(train_data=intersection_0.output.data),
model=Model(model=model_loader_0.output.model))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
pipeline.add_component(evaluation_0, data=Data(data=hetero_lr_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("hetero_lr_0").get_summary())
prettify(pipeline.get_component("evaluation_0").get_summary())
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,158 | 35.076923 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/model_loader/pipeline-hetero-lr-train.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import HeteroLR
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# guest_train_data = {"name": "default_credit_hetero_guest", "namespace": f"experiment{namespace}"}
# host_train_data = {"name": "default_credit_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts, arbiter=arbiter)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
lr_param = {
"penalty": "L2",
"optimizer": "rmsprop",
"tol": 0.0001,
"alpha": 0.01,
"early_stop": "diff",
"batch_size": -1,
"learning_rate": 0.15,
"init_param": {
"init_method": "zeros",
"fit_intercept": True
},
"encrypt_param": {
"key_length": 1024
},
"callback_param": {
"callbacks": ["ModelCheckpoint"],
"validation_freqs": 1,
"early_stopping_rounds": 1,
"metrics": None,
"use_first_metric_only": False,
"save_freq": 1
}
}
hetero_lr_0 = HeteroLR(name="hetero_lr_0", max_iter=3, **lr_param)
pipeline.add_component(hetero_lr_0, data=Data(train_data=intersection_0.output.data))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
pipeline.add_component(evaluation_0, data=Data(data=hetero_lr_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("hetero_lr_0").get_summary())
prettify(pipeline.get_component("evaluation_0").get_summary())
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,695 | 34.575758 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/model_loader/pipeline-hetero-binning-train.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HeteroFeatureBinning
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0")
data_transform_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True)
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
param = {
"name": "hetero_feature_binning_0",
"method": "quantile",
"compress_thres": 10000,
"head_size": 10000,
"error": 0.001,
"bin_num": 10,
"bin_indexes": -1,
"bin_names": None,
"category_indexes": None,
"category_names": None,
"adjustment_factor": 0.5,
"local_only": False,
"transform_param": {
"transform_cols": [
0,
1,
2
],
"transform_names": None,
"transform_type": "woe"
}
}
hetero_feature_binning_0 = HeteroFeatureBinning(**param)
hetero_feature_binning_0.get_party_instance(role="host", party_id=host).component_param(
transform_param={"transform_type": None}
)
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(hetero_feature_binning_0, data=Data(data=intersection_0.output.data))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 3,463 | 34.71134 | 103 |
py
|
FATE
|
FATE-master/examples/pipeline/model_loader/pipeline-hetero-sshe-lr-train.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import HeteroSSHELR
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts, arbiter=arbiter)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
lr_param = {
"penalty": "L2",
"optimizer": "sgd",
"tol": 0.00001,
"alpha": 0.01,
"early_stop": "diff",
"batch_size": -1,
"learning_rate": 0.1,
"init_param": {
"init_method": "random_uniform",
"fit_intercept": True
},
"callback_param": {
"callbacks": ["ModelCheckpoint"],
"validation_freqs": 1,
"early_stopping_rounds": 1,
"metrics": None,
"use_first_metric_only": False,
"save_freq": 1
}
}
hetero_sshe_lr_0 = HeteroSSHELR(name="hetero_sshe_lr_0", max_iter=8, **lr_param)
pipeline.add_component(hetero_sshe_lr_0, data=Data(train_data=intersection_0.output.data))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
pipeline.add_component(evaluation_0, data=Data(data=hetero_sshe_lr_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("hetero_sshe_lr_0").get_summary())
prettify(pipeline.get_component("evaluation_0").get_summary())
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,458 | 34.110236 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/model_loader/pipeline-hetero-sshe-lr-model-loader.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import HeteroSSHELR
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.component import ModelLoader
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# guest_train_data = {"name": "default_credit_hetero_guest", "namespace": f"experiment{namespace}"}
# host_train_data = {"name": "default_credit_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts, arbiter=arbiter)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
param = {
"model_id": "",
"model_version": "",
"component_name": "hetero_sshe_lr_0",
"step_index": 7
}
model_loader_0 = ModelLoader(name="model_loader_0", **param)
lr_param = {
"penalty": "L2",
"optimizer": "sgd",
"tol": 0.0001,
"alpha": 0.01,
"early_stop": "diff",
"batch_size": -1,
"learning_rate": 0.15,
"init_param": {
"init_method": "random_uniform",
"fit_intercept": True
},
"callback_param": {
"callbacks": ["ModelCheckpoint"],
"validation_freqs": 1,
"early_stopping_rounds": 1,
"metrics": None,
"use_first_metric_only": False,
"save_freq": 1
}
}
hetero_sshe_lr_0 = HeteroSSHELR(name="hetero_sshe_lr_0", max_iter=10, **lr_param)
pipeline.add_component(model_loader_0)
pipeline.add_component(hetero_sshe_lr_0, data=Data(train_data=intersection_0.output.data),
model=Model(model=model_loader_0.output.model))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
pipeline.add_component(evaluation_0, data=Data(data=hetero_sshe_lr_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("hetero_sshe_lr_0").get_summary())
prettify(pipeline.get_component("evaluation_0").get_summary())
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 5,071 | 35.228571 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/model_loader/pipeline-hetero-binning-selection-model-loader.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HeteroFeatureSelection
from pipeline.component import ModelLoader
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0")
data_transform_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True)
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
param = {
"model_id": "guest-10000#host-9999#model",
"model_version": "202108301602196678300",
"component_name": "hetero_feature_binning_0",
"step_index": None
}
model_loader_0 = ModelLoader(name="model_loader_0", **param)
selection_param = {
"name": "hetero_feature_selection_0",
"select_col_indexes": -1,
"select_names": [],
"filter_methods": [
"iv_filter"
],
"iv_param": {
"metrics": ["iv", "iv", "iv"],
"filter_type": ["threshold", "top_k", "top_percentile"],
"take_high": True,
"threshold": [0.03, 15, 0.7],
"host_thresholds": [[0.15], None, None],
"select_federated": True
}
}
hetero_feature_selection_0 = HeteroFeatureSelection(**selection_param)
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(model_loader_0)
pipeline.add_component(hetero_feature_selection_0, data=Data(data=intersection_0.output.data),
model=Model(isometric_model=model_loader_0.output.model))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 3,716 | 36.928571 | 103 |
py
|
FATE
|
FATE-master/examples/pipeline/model_loader/__init__.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 614 | 40 | 75 |
py
|
FATE
|
FATE-master/examples/pipeline/data_statistics/pipeline-data-statistics-partial-column-missing.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component.evaluation import Evaluation
from pipeline.component.intersection import Intersection
from pipeline.component.reader import Reader
from pipeline.interface.data import Data
from pipeline.interface.model import Model
from pipeline.component.data_statistics import DataStatistics
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "ionosphere_scale_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "ionosphere_scale_hetero_host", "namespace": f"experiment{namespace}"}
# guest_train_data = {"name": "default_credit_hetero_guest", "namespace": f"experiment{namespace}"}
# host_train_data = {"name": "default_credit_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense', missing_fill=False)
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True, label_name="label")
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
statistic_param = {
"name": "statistic_0",
"statistics": ["95%", "coefficient_of_variance", "stddev"],
"column_indexes": [1, 2],
"column_names": ["x3"]
}
statistic_0 = DataStatistics(**statistic_param)
pipeline.add_component(statistic_0, data=Data(data=intersection_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("statistic_0").get_summary())
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,087 | 36.851852 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/data_statistics/pipeline-data-statistics-partial-column.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.component import DataStatistics
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# guest_train_data = {"name": "default_credit_hetero_guest", "namespace": f"experiment{namespace}"}
# host_train_data = {"name": "default_credit_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
statistic_param = {
"name": "statistic_0",
"statistics": ["95%", "coefficient_of_variance", "stddev"],
"column_indexes": [1, 2],
"column_names": []
}
statistic_0 = DataStatistics(**statistic_param)
pipeline.add_component(statistic_0, data=Data(data=intersection_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("statistic_0").get_summary())
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 3,923 | 35.672897 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/data_statistics/pipeline-data-statistics-partial-column-name.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component.evaluation import Evaluation
from pipeline.component.intersection import Intersection
from pipeline.component.reader import Reader
from pipeline.interface.data import Data
from pipeline.interface.model import Model
from pipeline.component.data_statistics import DataStatistics
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# guest_train_data = {"name": "default_credit_hetero_guest", "namespace": f"experiment{namespace}"}
# host_train_data = {"name": "default_credit_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
statistic_param = {
"name": "statistic_0",
"statistics": ["95%", "coefficient_of_variance", "stddev"],
"column_indexes": [1, 2],
"column_names": ["x3"]
}
statistic_0 = DataStatistics(**statistic_param)
pipeline.add_component(statistic_0, data=Data(data=intersection_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("statistic_0").get_summary())
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 4,027 | 36.296296 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/data_statistics/__init__.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 614 | 40 | 75 |
py
|
FATE
|
FATE-master/examples/pipeline/data_statistics/pipeline-data-statistics-all-columns.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.component import DataStatistics
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# guest_train_data = {"name": "default_credit_hetero_guest", "namespace": f"experiment{namespace}"}
# host_train_data = {"name": "default_credit_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
statistic_param = {
"name": "statistic_0",
"statistics": ["95%", "coefficient_of_variance", "stddev"],
"column_indexes": -1,
"column_names": []
}
statistic_0 = DataStatistics(**statistic_param)
pipeline.add_component(statistic_0, data=Data(data=intersection_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("statistic_0").get_summary())
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 3,919 | 35.635514 | 109 |
py
|
FATE
|
FATE-master/examples/pipeline/homo_graph/pipeline_homo_graph_sage.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
# torch
import torch as t
from torch import nn
from pipeline import fate_torch_hook
from pipeline.component.nn import TrainerParam
from pipeline.backend.pipeline import PipeLine
from pipeline.component import HomoNN, Evaluation
from pipeline.component.reader import Reader
from pipeline.interface import Data
from pipeline.component.nn import DatasetParam
import os
fate_torch_hook(t)
def main(config="../../config.yaml", namespace=""):
fate_project_path = os.getenv("FATE_PROJECT_BASE")
host = 10000
guest = 9999
arbiter = 10000
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host,
arbiter=arbiter)
data_0 = {"name": "cora_guest", "namespace": "experiment"}
data_1 = {"name": "cora_host", "namespace": "experiment"}
data_path_0 = fate_project_path + '/examples/data/cora4fate/guest'
data_path_1 = fate_project_path + '/examples/data/cora4fate/host'
pipeline.bind_table(name=data_0['name'], namespace=data_0['namespace'], path=data_path_0)
pipeline.bind_table(name=data_1['name'], namespace=data_1['namespace'], path=data_path_1)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=data_0)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=data_1)
dataset_param = DatasetParam("graph",
id_col='id',
label_col='y',
feature_dtype='float',
label_dtype='long',
feats_name='feats.csv',
feats_dataset_col='dataset',
feats_dataset_train='train',
feats_dataset_vali='vali',
feats_dataset_test='test',
adj_name='adj.csv',
adj_src_col='node1',
adj_dst_col='node2')
model = t.nn.Sequential(
t.nn.CustModel(module_name='graphsage', class_name='Sage', in_channels=1433, hidden_channels=64, class_num=7)
)
loss = nn.NLLLoss()
optimizer = t.optim.Adam(model.parameters(), lr=0.001)
homo_graph_0 = HomoNN(
name="homo_graph_0",
model=model,
loss=loss,
optimizer=optimizer,
dataset=dataset_param,
trainer=TrainerParam(trainer_name='fedavg_graph_trainer', epochs=10, batch_size=10,
validation_freqs=1, num_neighbors=[11, 11], task_type='multi'),
torch_seed=100
)
pipeline.add_component(reader_0)
pipeline.add_component(homo_graph_0, data=Data(train_data=reader_0.output.data))
pipeline.add_component(Evaluation(name='eval_0', eval_type='multi'), data=Data(data=homo_graph_0.output.data))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str, help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 3,919 | 38.59596 | 117 |
py
|
FATE
|
FATE-master/examples/pipeline/hetero_feature_binning/pipeline-hetero-binning-sparse-optimal-ks.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import HeteroFeatureBinning
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0")
data_transform_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True)
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=False,
output_format="sparse")
intersection_0 = Intersection(name="intersection_0")
param = {
"method": "optimal",
"optimal_binning_param": {
"metric_method": "ks",
"min_bin_pct": 0.05,
"max_bin_pct": 0.8,
"init_bucket_method": "bucket",
"init_bin_nums": 100,
"mixture": True
},
"compress_thres": 10000,
"head_size": 10000,
"error": 0.001,
"bin_num": 10,
"bin_indexes": -1,
"bin_names": None,
"category_indexes": None,
"category_names": None,
"adjustment_factor": 0.5,
"local_only": False,
"transform_param": {
"transform_cols": -1,
"transform_names": None,
"transform_type": "bin_num"
}
}
hetero_feature_binning_0 = HeteroFeatureBinning(name="hetero_feature_binning_0", **param)
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
pipeline.add_component(hetero_feature_binning_0, data=Data(data=intersection_0.output.data))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 3,618 | 35.555556 | 107 |
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.