{ "cells": [ { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "from datasets.Femnist import Femnist\n", "from graphs import SmallWorld\n", "from collections import defaultdict\n", "import os\n", "import json\n", "import numpy as np\n" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "datasets.FEMNIST.FEMNIST" ] }, "execution_count": 7, "metadata": {}, "output_type": "execute_result" } ], "source": [ "a = FEMNIST\n", "a" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [], "source": [ "b = SmallWorld(6, 2, 2, 1)" ] }, { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[{1, 2, 5, 9, 10},\n", " {0, 2, 3, 4, 5},\n", " {0, 1, 3, 8, 10},\n", " {1, 2, 4, 6, 7, 8, 10},\n", " {1, 3, 5, 8, 10},\n", " {0, 1, 4, 6, 9},\n", " {1, 3, 5, 7, 8, 10},\n", " {0, 2, 3, 6, 8, 9, 11},\n", " {1, 2, 3, 4, 6, 7, 11},\n", " {0, 2, 4, 5, 7, 10, 11},\n", " {0, 2, 3, 4, 5, 6, 9},\n", " {0, 4, 7, 8, 9}]" ] }, "execution_count": 9, "metadata": {}, "output_type": "execute_result" } ], "source": [ "b.adj_list" ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "{1, 2, 5, 9, 10}\n", "{0, 2, 3, 4, 5}\n", "{0, 1, 3, 8, 10}\n", "{1, 2, 4, 6, 7, 8, 10}\n", "{1, 3, 5, 8, 10}\n", "{0, 1, 4, 6, 9}\n", "{1, 3, 5, 7, 8, 10}\n", "{0, 2, 3, 6, 8, 9, 11}\n", "{1, 2, 3, 4, 6, 7, 11}\n", "{0, 2, 4, 5, 7, 10, 11}\n", "{0, 2, 3, 4, 5, 6, 9}\n", "{0, 4, 7, 8, 9}\n" ] } ], "source": [ "for i in range(12):\n", " print(b.neighbors(i))" ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [], "source": [ "clients = []" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "num_samples = []\n", "data = defaultdict(lambda : None)" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [], "source": [ "datadir = \"./leaf/data/femnist/data/train\"\n", "files = os.listdir(datadir)\n", "total_users=0\n", "users = set()" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [], "source": [ "files = os.listdir(datadir)[0:1]" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "./leaf/data/femnist/data/train/all_data_6_niid_0_keep_0_train_9.json\n", "Current_Users: 100\n", "total_users: 200\n", "total_users: 100\n", "dict_keys(['f3408_47', 'f3327_11', 'f3417_01', 'f3339_15', 'f3580_22', 'f3414_29', 'f3328_45', 'f3592_19', 'f3516_45', 'f3130_44', 'f3321_36', 'f3284_38', 'f3232_11', 'f3547_04', 'f3265_08', 'f3500_08', 'f3243_44', 'f3349_22', 'f3118_09', 'f3179_39', 'f3381_42', 'f3198_32', 'f3299_12', 'f3237_27', 'f3593_26', 'f3133_33', 'f3591_14', 'f3231_19', 'f3478_49', 'f3447_20', 'f3442_00', 'f3464_12', 'f3293_30', 'f3111_05', 'f3227_14', 'f3146_14', 'f3165_11', 'f3440_33', 'f3379_03', 'f3529_11', 'f3441_24', 'f3253_11', 'f3238_40', 'f3583_09', 'f3256_38', 'f3325_08', 'f3512_31', 'f3214_03', 'f3572_03', 'f3457_40', 'f3419_33', 'f3496_38', 'f3582_25', 'f3205_40', 'f3353_33', 'f3115_25', 'f3517_27', 'f3567_49', 'f3230_21', 'f3336_15', 'f3415_33', 'f3280_34', 'f3294_06', 'f3171_30', 'f3363_42', 'f3105_03', 'f3545_06', 'f3426_23', 'f3102_36', 'f3164_09', 'f3202_01', 'f3365_46', 'f3450_19', 'f3573_02', 'f3290_01', 'f3443_42', 'f3471_02', 'f3136_07', 'f3553_12', 'f3434_00', 'f3537_23', 'f3479_08', 'f3578_27', 'f3286_40', 'f3155_15', 'f3494_34', 'f3460_47', 'f3595_18', 'f3518_46', 'f3433_10', 'f3538_29', 'f3266_12', 'f3375_30', 'f3390_07', 'f3261_00', 'f3221_05', 'f3139_09', 'f3234_23', 'f3341_29', 'f3485_45'])\n", "(155, 784)\n", "(155,)\n", "(164, 784)\n", "(164,)\n", "[ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 16 18 19 20 21 22 23 24 25\n", " 26 27 29 30 31 32 33 34 35 36 37 38 39 40 43 44 45 46 47 48 49 50 51 52\n", " 53 54 55 56 57 58 60 61]\n" ] } ], "source": [ "for f in files:\n", " file_path = os.path.join(datadir, f)\n", " print(file_path)\n", " with open(file_path, 'r') as inf:\n", " client_data = json.load(inf)\n", " current_users = len(client_data['users'])\n", " print(\"Current_Users: \", current_users)\n", " total_users += current_users\n", " users.update(client_data['users'])\n", "\n", "print(\"total_users: \", total_users)\n", "print(\"total_users: \", len(users))\n", "print(client_data['user_data'].keys())\n", "print(np.array(client_data['user_data']['f3408_47']['x']).shape)\n", "print(np.array(client_data['user_data']['f3408_47']['y']).shape)\n", "print(np.array(client_data['user_data']['f3327_11']['x']).shape)\n", "print(np.array(client_data['user_data']['f3327_11']['y']).shape)\n", "print(np.unique(np.array(client_data['user_data']['f3327_11']['y'])))" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "import torch\n", "['import torch.multiprocessing as mp\\n', '\\n', '\\n', 'x = [1, 2]\\n', '\\n', 'def f(id, a):\\n', ' print(id, x)\\n', ' print(id, a)\\n', '\\n', \"if __name__ == '__main__':\\n\", ' x.append(3)\\n', ' mp.spawn(f, nprocs=2, args=(x, ))']\n" ] } ], "source": [ "file = 'run.py'\n", "with open(file, 'r') as inf:\n", " print(inf.readline().strip())\n", " print(inf.readlines())" ] }, { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "['a', 'a', 'a']\n", "['a', 'a', 'c']\n" ] } ], "source": [ "def f(l):\n", " l[2] = 'c'\n", "\n", "a = ['a', 'a', 'a']\n", "print(a)\n", "f(a)\n", "print(a)" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "['a', 'b']\n" ] } ], "source": [ "l = ['a', 'b', 'c']\n", "print(l[:-1])" ] }, { "cell_type": "code", "execution_count": 17, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Section: GRAPH\n", "('package', 'decentralizepy.graphs.SmallWorld')\n", "('graph_class', 'SmallWorld')\n", "Section: DATASET\n", "('package', 'decentralizepy.datasets.Femnist')\n", "('dataset_class', 'Femnist')\n", "('model_class', 'LogisticRegression')\n", "('n_procs', 1.0)\n", "('train_dir', '')\n", "('test_dir', '')\n", "('sizes', '[0.4, 0.2, 0.3, 0.1]')\n", "Section: MODEL_PARAMS\n", "('optimizer_package', 'torch.optim')\n", "('optimizer_class', 'SGD')\n", "('lr', 0.1)\n" ] } ], "source": [ "from localconfig import LocalConfig\n", "\n", "def read_ini(file_path):\n", " config = LocalConfig(file_path)\n", " for section in config:\n", " print(\"Section: \", section)\n", " for key, value in config.items(section):\n", " print((key, value))\n", " \n", "read_ini(\"config.ini\")" ] }, { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "15\n" ] } ], "source": [ "def func(a = 1, b = 2, c = 3):\n", " print(a + b + c)\n", "\n", "l = [3, 5, 7]\n", "\n", "func(*l)" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "ename": "TypeError", "evalue": "spawn() got an unexpected keyword argument 'kwargs'", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", "\u001b[0;32m/tmp/ipykernel_52405/4231740097.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mmultiprocessing\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mmp\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0mmp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mspawn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfn\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfunc\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnprocs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m2\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0margs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkwargs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0;34m'a'\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;36m4\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'b'\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;36m5\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'c'\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;36m6\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", "\u001b[0;31mTypeError\u001b[0m: spawn() got an unexpected keyword argument 'kwargs'" ] } ], "source": [ "from torch import multiprocessing as mp\n", "\n", "mp.spawn(fn = func, nprocs = 2, args = [], kwargs = {'a': 4, 'b': 5, 'c': 6})" ] }, { "cell_type": "code", "execution_count": 19, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "list" ] }, "execution_count": 19, "metadata": {}, "output_type": "execute_result" } ], "source": [ "l = '[0.4, 0.2, 0.3, 0.1]'\n", "type(eval(l))" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "interpreter": { "hash": "996934296aa9d79be6c3d800a38d8fdb7dfa8fe7bb07df178f1397cde2cb8742" }, "kernelspec": { "display_name": "Python 3.9.7 64-bit ('tff': conda)", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.7" }, "orig_nbformat": 4 }, "nbformat": 4, "nbformat_minor": 2 }