Skip to content
Snippets Groups Projects
Commit 8bc441bd authored by Erick Lavoie's avatar Erick Lavoie
Browse files

Added MNIST LeNet experiments

parent 0d064232
No related branches found
No related tags found
No related merge requests found
......@@ -581,6 +581,15 @@ In addition, it is important that all nodes are initialized with the same model
\caption{\label{fig:d-cliques-cifar10-clique-clustering} CIFAR10: Effect of Relaxed Intra-Clique Connectivity.}
\end{figure}
\section{XP Suppl.}
\begin{itemize}
\item Test MNIST avec gn-lenet
\item Enlever les mentions à l'initialisation uniforme
\item Plot Scattering (Citer l'article décentralisé)
\item Test topology en n-log n https://dl.acm.org/doi/10.1145/335305.335325
\end{itemize}
\section{Related Work}
......
#!/usr/bin/env bash
TOOLS=../../../../../learn-topology/tools; CWD="$(pwd)"; cd $TOOLS
BSZS='
20
'
LRS='
0.002
'
for BSZ in $BSZS;
do for LR in $LRS;
do python sgp-mnist.py --nb-nodes 100 --nb-epochs 100 --local-classes 1 --seed 1 --nodes-per-class 10 10 10 10 10 10 10 10 10 10 --global-train-ratios 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 --dist-optimization d-psgd --topology fully-connected-cliques --metric dissimilarity --learning-momentum 0.9 --sync-per-mini-batch 1 --results-directory $CWD/all --learning-rate $LR --batch-size $BSZ "$@" --single-process --nb-logging-processes 10 --dataset mnist --model gn-lenet --clique-gradient --initial-averaging --accuracy-logging-interval 1
done;
done;
#!/usr/bin/env bash
TOOLS=../../../../../learn-topology/tools; CWD="$(pwd)"; cd $TOOLS
BSZS='
20
'
LRS='
0.002
'
for BSZ in $BSZS;
do for LR in $LRS;
do python sgp-mnist.py --nb-nodes 100 --nb-epochs 100 --local-classes 1 --seed 1 --nodes-per-class 10 10 10 10 10 10 10 10 10 10 --global-train-ratios 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 --dist-optimization d-psgd --topology fully-connected-cliques --metric dissimilarity --learning-momentum 0.9 --sync-per-mini-batch 1 --results-directory $CWD/all --learning-rate $LR --batch-size $BSZ "$@" --single-process --nb-logging-processes 10 --dataset mnist --model gn-lenet --accuracy-logging-interval 1 --initial-averaging
done;
done;
#!/usr/bin/env bash
TOOLS=../../../../../learn-topology/tools; CWD="$(pwd)"; cd $TOOLS
BSZS='
20
'
LRS='
0.002
'
for BSZ in $BSZS;
do for LR in $LRS;
do python sgp-mnist.py --nb-nodes 100 --nb-epochs 100 --local-classes 1 --seed 1 --nodes-per-class 10 10 10 10 10 10 10 10 10 10 --global-train-ratios 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 --dist-optimization d-psgd --topology fully-connected-cliques --metric dissimilarity --learning-momentum 0.9 --sync-per-mini-batch 1 --results-directory $CWD/all --learning-rate $LR --batch-size $BSZ "$@" --single-process --nb-logging-processes 10 --dataset mnist --model gn-lenet --accuracy-logging-interval 1
done;
done;
#!/usr/bin/env bash
TOOLS=../../../../../learn-topology/tools; CWD="$(pwd)"; cd $TOOLS
BSZS='
20
'
LRS='
0.002
'
for BSZ in $BSZS;
do for LR in $LRS;
do python sgp-mnist.py --nb-nodes 100 --nb-epochs 100 --local-classes 1 --seed 1 --nodes-per-class 10 10 10 10 10 10 10 10 10 10 --global-train-ratios 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 --dist-optimization d-psgd --topology fully-connected-cliques --metric dissimilarity --learning-momentum 0.9 --sync-per-mini-batch 1 --results-directory $CWD/all --learning-rate $LR --batch-size $BSZ "$@" --single-process --nb-logging-processes 10 --dataset mnist --model gn-lenet --clique-gradient --accuracy-logging-interval 1
done;
done;
#!/usr/bin/env bash
TOOLS=../../../../../learn-topology/tools; CWD="$(pwd)"; cd $TOOLS
BSZS='
20
'
LRS='
0.002
'
for BSZ in $BSZS;
do for LR in $LRS;
do python sgp-mnist.py --nb-nodes 100 --nb-epochs 100 --local-classes 1 --seed 1 --nodes-per-class 10 10 10 10 10 10 10 10 10 10 --global-train-ratios 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 --dist-optimization d-psgd --topology greedy-diverse-10 --metric dissimilarity --learning-momentum 0.9 --sync-per-mini-batch 1 --results-directory $CWD/all --learning-rate $LR --batch-size $BSZ "$@" --single-process --nb-logging-processes 10 --dataset mnist --model gn-lenet --accuracy-logging-interval 1 --unbiased-gradient
done;
done;
#!/usr/bin/env bash
TOOLS=../../../../../learn-topology/tools; CWD="$(pwd)"; cd $TOOLS
BSZS='
20
'
LRS='
0.002
'
for BSZ in $BSZS;
do for LR in $LRS;
do python sgp-mnist.py --nb-nodes 100 --nb-epochs 100 --local-classes 1 --seed 1 --nodes-per-class 10 10 10 10 10 10 10 10 10 10 --global-train-ratios 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 --dist-optimization d-psgd --topology greedy-diverse-10 --metric dissimilarity --learning-momentum 0.9 --sync-per-mini-batch 1 --results-directory $CWD/all --learning-rate $LR --batch-size $BSZ "$@" --single-process --nb-logging-processes 10 --dataset mnist --model gn-lenet --accuracy-logging-interval 1
done;
done;
#!/usr/bin/env bash
TOOLS=../../../../../learn-topology/tools; CWD="$(pwd)"; cd $TOOLS
BSZS='
20
'
LRS='
0.002
'
for BSZ in $BSZS;
do for LR in $LRS;
do python sgp-mnist.py --nb-nodes 100 --nb-epochs 100 --local-classes 1 --seed 1 --nodes-per-class 10 10 10 10 10 10 10 10 10 10 --global-train-ratios 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 0.802568 --dist-optimization d-psgd --topology random-10 --metric dissimilarity --learning-momentum 0.9 --sync-per-mini-batch 1 --results-directory $CWD/all --learning-rate $LR --batch-size $BSZ "$@" --single-process --nb-logging-processes 10 --dataset mnist --model gn-lenet --accuracy-logging-interval 1
done;
done;
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment