Skip to content
Snippets Groups Projects
Commit f4955dbf authored by Erick Lavoie's avatar Erick Lavoie
Browse files

Added experiment files for IID vs non-IID figures

parent dff41cf6
No related branches found
No related tags found
No related merge requests found
#!/usr/bin/env bash
TOOLS=../../../../../learn-topology/tools; CWD="$(pwd)"; cd $TOOLS
BSZS='
128
'
LRS='
0.1
'
for BSZ in $BSZS;
do for LR in $LRS;
do python sgp-mnist.py --nb-nodes 100 --nb-epochs 100 --local-classes 10 --seed 1 --nodes-per-class 100 100 100 100 100 100 100 100 100 100 --global-train-ratios 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 --dist-optimization d-psgd --topology fully_connected --metric random --learning-momentum 0. --sync-per-mini-batch 1 --results-directory $CWD/all --learning-rate $LR --batch-size $BSZ "$@" --parallel-training --nb-workers 10 --dataset mnist --model linear
done;
done;
#!/usr/bin/env bash
TOOLS=../../../../../learn-topology/tools; CWD="$(pwd)"; cd $TOOLS
BSZS='
128
'
LRS='
0.1
'
for BSZ in $BSZS;
do for LR in $LRS;
do python sgp-mnist.py --nb-nodes 100 --nb-epochs 100 --local-classes 10 --seed 1 --nodes-per-class 100 100 100 100 100 100 100 100 100 100 --global-train-ratios 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 --dist-optimization d-psgd --topology grid --metric random --learning-momentum 0. --sync-per-mini-batch 1 --results-directory $CWD/all --learning-rate $LR --batch-size $BSZ "$@" --parallel-training --nb-workers 10 --dataset mnist --model linear
done;
done;
#!/usr/bin/env bash
TOOLS=../../../../../learn-topology/tools; CWD="$(pwd)"; cd $TOOLS
BSZS='
128
'
LRS='
0.1
'
for BSZ in $BSZS;
do for LR in $LRS;
do python sgp-mnist.py --nb-nodes 100 --nb-epochs 100 --local-classes 1 --seed 1 --nodes-per-class 10 10 10 10 10 10 10 10 10 10 --global-train-ratios 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 --dist-optimization d-psgd --topology grid --metric random --learning-momentum 0. --sync-per-mini-batch 1 --results-directory $CWD/all --learning-rate $LR --batch-size $BSZ "$@" --parallel-training --nb-workers 10 --dataset mnist --model linear
done;
done;
#!/usr/bin/env bash
TOOLS=../../../../../learn-topology/tools; CWD="$(pwd)"; cd $TOOLS
BSZS='
128
'
LRS='
0.1
'
for BSZ in $BSZS;
do for LR in $LRS;
do python sgp-mnist.py --nb-nodes 100 --nb-epochs 100 --local-classes 10 --seed 1 --nodes-per-class 100 100 100 100 100 100 100 100 100 100 --global-train-ratios 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 --dist-optimization d-psgd --topology ring --metric random --learning-momentum 0. --sync-per-mini-batch 1 --results-directory $CWD/all --learning-rate $LR --batch-size $BSZ "$@" --parallel-training --nb-workers 10 --dataset mnist --model linear
done;
done;
#!/usr/bin/env bash
TOOLS=../../../../../learn-topology/tools; CWD="$(pwd)"; cd $TOOLS
BSZS='
128
'
LRS='
0.1
'
for BSZ in $BSZS;
do for LR in $LRS;
do python sgp-mnist.py --nb-nodes 100 --nb-epochs 100 --local-classes 1 --seed 1 --nodes-per-class 10 10 10 10 10 10 10 10 10 10 --global-train-ratios 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 --dist-optimization d-psgd --topology ring --metric random --learning-momentum 0. --sync-per-mini-batch 1 --results-directory $CWD/all --learning-rate $LR --batch-size $BSZ "$@" --parallel-training --nb-workers 10 --dataset mnist --model linear
done;
done;
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment