Skip to content
Snippets Groups Projects
Commit 161ef541 authored by Jeffrey Wigger's avatar Jeffrey Wigger
Browse files

run reddit dynamic jwins

parent f62d9eb3
No related tags found
No related merge requests found
...@@ -54,7 +54,7 @@ export PYTHONFAULTHANDLER=1 ...@@ -54,7 +54,7 @@ export PYTHONFAULTHANDLER=1
# Base configs for which the gird search is done # Base configs for which the gird search is done
# tests=("step_configs/config_reddit_sharing_topKdynamicGraph.ini") # tests=("step_configs/config_reddit_sharing_topKdynamicGraph.ini")
# tests=("step_configs/config_reddit_sharing_topKsharingasyncrw.ini" "step_configs/config_reddit_sharing_topKdpsgdrwasync.ini" "step_configs/config_reddit_sharing_topKdpsgdrw.ini") # tests=("step_configs/config_reddit_sharing_topKsharingasyncrw.ini" "step_configs/config_reddit_sharing_topKdpsgdrwasync.ini" "step_configs/config_reddit_sharing_topKdpsgdrw.ini")
tests=("step_configs/config_reddit_sharing_dynamicGraph.ini") # ("step_configs/config_reddit_sharing_dpsgdrwasync0.ini") tests=("step_configs/config_reddit_sharing_dynamicGraphJwins.ini") # ("step_configs/config_reddit_sharing_dpsgdrwasync0.ini")
# tests=("step_configs/config_reddit_sharing_dpsgdrw.ini" "step_configs/config_reddit_sharing_dpsgdrwasync.ini" "step_configs/config_reddit_sharing_sharingasyncrw.ini" "step_configs/config_reddit_sharing_sharingrw.ini") # tests=("step_configs/config_reddit_sharing_dpsgdrw.ini" "step_configs/config_reddit_sharing_dpsgdrwasync.ini" "step_configs/config_reddit_sharing_sharingasyncrw.ini" "step_configs/config_reddit_sharing_sharingrw.ini")
# Learning rates # Learning rates
lr="1" lr="1"
...@@ -71,7 +71,7 @@ echo samples per user: $samples_per_user ...@@ -71,7 +71,7 @@ echo samples per user: $samples_per_user
# random_seeds for which to rerun the experiments # random_seeds for which to rerun the experiments
# random_seeds=("90" "91" "92" "93" "94") # random_seeds=("90" "91" "92" "93" "94")
random_seeds=("90") random_seeds=("90" "91" "92")
echo batchsize: $batchsize echo batchsize: $batchsize
echo communication rounds per global epoch: $comm_rounds_per_global_epoch echo communication rounds per global epoch: $comm_rounds_per_global_epoch
# calculating how many batches there are in a global epoch for each user/proc # calculating how many batches there are in a global epoch for each user/proc
......
[DATASET]
dataset_package = decentralizepy.datasets.Reddit
dataset_class = Reddit
random_seed = 97
model_class = RNN
train_dir = /mnt/nfs/shared/leaf/data/reddit_new/per_user_data/train
test_dir = /mnt/nfs/shared/leaf/data/reddit_new/new_small_data/test
; python list of fractions below
sizes =
[OPTIMIZER_PARAMS]
optimizer_package = torch.optim
optimizer_class = SGD
lr = 0.001
[TRAIN_PARAMS]
training_package = decentralizepy.training.Training
training_class = Training
rounds = 47
full_epochs = False
batch_size = 16
shuffle = True
loss_package = torch.nn
loss_class = CrossEntropyLoss
[COMMUNICATION]
comm_package = decentralizepy.communication.TCPRandomWalkRouting
comm_class = TCPRandomWalkRouting
addresses_filepath = ip_addr_6Machines.json
compression_package = decentralizepy.compression.Eliaszfplossy1
compression_class = Eliaszfplossy1
compress = True
sampler = equi
[SHARING]
sharing_package = decentralizepy.sharing.JwinsDynamicGraph
sharing_class = JwinsDynamicGraph
alpha=0.0833
lower_bound=0.2
metro_hastings=False
change_based_selection = True
wavelet=sym2
level= None
accumulation = True
accumulate_averaging_changes = True
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment