Newer
Older
import random
from decentralizepy.sharing.PartialModel import PartialModel
class RandomAlphaIncremental(PartialModel):
"""
This class implements the partial model sharing with a random alpha from an increasing range each iteration.
"""
def __init__(
self,
rank,
machine_id,
communication,
mapping,
graph,
model,
dataset,
log_dir,
dict_ordered=True,
save_shared=False,
metadata_cap=1.0,
range_start=0.1,
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
):
"""
Constructor
Parameters
----------
rank : int
Local rank
machine_id : int
Global machine id
communication : decentralizepy.communication.Communication
Communication module used to send and receive messages
mapping : decentralizepy.mappings.Mapping
Mapping (rank, machine_id) -> uid
graph : decentralizepy.graphs.Graph
Graph reprensenting neighbors
model : decentralizepy.models.Model
Model to train
dataset : decentralizepy.datasets.Dataset
Dataset for sharing data. Not implemented yet! TODO
log_dir : str
Location to write shared_params (only writing for 2 procs per machine)
dict_ordered : bool
Specifies if the python dict maintains the order of insertion
save_shared : bool
Specifies if the indices of shared parameters should be logged
metadata_cap : float
Share full model when self.alpha > metadata_cap
"""
super().__init__(
rank,
machine_id,
communication,
mapping,
graph,
model,
dataset,
log_dir,
1.0,
dict_ordered,
save_shared,
metadata_cap,
)
random.seed(self.mapping.get_uid(self.rank, self.machine_id))
self.range_start = range_start
self.range_end = range_end
def step(self):
"""
Perform a sharing step. Implements D-PSGD with alpha randomly chosen from an increasing range.
"""
self.alpha = round(random.uniform(self.range_start, self.range_end), 2)
self.range_end = min(1.0, self.range_end + round(random.uniform(0.0, 0.1), 2))