@article{turner_blockswap_2020, title = {{BlockSwap}: Fisher-guided Block Substitution for Network Compression on a Budget}, url = {http://arxiv.org/abs/1906.04113}, shorttitle = {{BlockSwap}}, abstract = {The desire to map neural networks to varying-capacity devices has led to the development of a wealth of compression techniques, many of which involve replacing standard convolutional blocks in a large network with cheap alternative blocks. However, not all blocks are created equally; for a required compute budget there may exist a potent combination of many different cheap blocks, though exhaustively searching for such a combination is prohibitively expensive. In this work, we develop {BlockSwap}: a fast algorithm for choosing networks with interleaved block types by passing a single minibatch of training data through randomly initialised networks and gauging their Fisher potential. These networks can then be used as students and distilled with the original large network as a teacher. We demonstrate the effectiveness of the chosen networks across {CIFAR}-10 and {ImageNet} for classification, and {COCO} for detection, and provide a comprehensive ablation study of our approach. {BlockSwap} quickly explores possible block configurations using a simple architecture ranking system, yielding highly competitive networks in orders of magnitude less time than most architecture search techniques (e.g. under 5 minutes on a single {GPU} for {CIFAR}-10). Code is available at https://github.com/{BayesWatch}/pytorch-blockswap.}, journaltitle = {{arXiv}:1906.04113 [cs, stat]}, author = {Turner, Jack and Crowley, Elliot J. and O'Boyle, Michael and Storkey, Amos and Gray, Gavin}, urldate = {2020-02-17}, date = {2020-01-23}, eprinttype = {arxiv}, eprint = {1906.04113}, keywords = {Computer Science - Machine Learning, Statistics - Machine Learning}, }