Skip to content

Commit

Permalink
Merge pull request #290 from harvard-edge/289-updated-betterbib-since…
Browse files Browse the repository at this point in the history
…-latest-updates-to-files

Update all the references
  • Loading branch information
profvjreddi authored Jun 20, 2024
2 parents 64d18a9 + 1eb895f commit baf7336
Show file tree
Hide file tree
Showing 9 changed files with 64 additions and 99 deletions.
1 change: 1 addition & 0 deletions contents/ai_for_good/ai_for_good.bib
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,7 @@ @misc{rao2021
author = {Rao, Ravi},
journal = {www.wevolver.com},
month = dec,
title = {{TinyML} unlocks new possibilities for sustainable development technologies},
url = {https://www.wevolver.com/article/tinyml-unlocks-new-possibilities-for-sustainable-development-technologies},
year = {2021},
}
Expand Down
21 changes: 4 additions & 17 deletions contents/data_engineering/data_engineering.bib
Original file line number Diff line number Diff line change
Expand Up @@ -107,19 +107,6 @@ @article{gebru2021datasheets
month = nov,
}

@inproceedings{Data_Cascades_2021,
author = {Sambasivan, Nithya and Kapania, Shivani and Highfill, Hannah and Akrong, Diana and Paritosh, Praveen and Aroyo, Lora M},
title = {{{\textquotedblleft}Everyone} wants to do the model work, not the data work{\textquotedblright}: {Data} Cascades in High-Stakes {AI}},
booktitle = {Proceedings of the 2021 CHI Conference on Human Factors in Computing Systems},
pages = {1--15},
year = {2021},
doi = {10.1145/3411764.3445518},
source = {Crossref},
url = {https://doi.org/10.1145/3411764.3445518},
publisher = {ACM},
month = may,
}

@misc{googleinformation,
author = {Google},
bdsk-url-1 = {https://blog.google/documents/83/},
Expand Down Expand Up @@ -174,10 +161,10 @@ @article{krishnan2022selfsupervised
}

@inproceedings{mazumder2021multilingual,
title={Multilingual spoken words corpus},
author={Mazumder, Mark and Chitlangia, Sharad and Banbury, Colby and Kang, Yiping and Ciro, Juan Manuel and Achorn, Keith and Galvez, Daniel and Sabini, Mark and Mattson, Peter and Kanter, David and others},
booktitle={Thirty-fifth Conference on Neural Information Processing Systems Datasets and Benchmarks Track (Round 2)},
year={2021}
author = {Mazumder, Mark and Chitlangia, Sharad and Banbury, Colby and Kang, Yiping and Ciro, Juan Manuel and Achorn, Keith and Galvez, Daniel and Sabini, Mark and Mattson, Peter and Kanter, David and others},
title = {Multilingual spoken words corpus},
booktitle = {Thirty-fifth Conference on Neural Information Processing Systems Datasets and Benchmarks Track (Round 2)},
year = {2021},
}

@article{northcutt2021pervasive,
Expand Down
10 changes: 5 additions & 5 deletions contents/dl_primer/dl_primer.bib
Original file line number Diff line number Diff line change
Expand Up @@ -84,9 +84,9 @@ @article{rumelhart1986learning
}

@article{vaswani2017attention,
title={Attention is all you need},
author={Vaswani, Ashish and Shazeer, Noam and Parmar, Niki and Uszkoreit, Jakob and Jones, Llion and Gomez, Aidan N and Kaiser, {\L}ukasz and Polosukhin, Illia},
journal={Advances in neural information processing systems},
volume={30},
year={2017}
author = {Vaswani, Ashish and Shazeer, Noam and Parmar, Niki and Uszkoreit, Jakob and Jones, Llion and Gomez, Aidan N and Kaiser, {\L}ukasz and Polosukhin, Illia},
title = {Attention is all you need},
journal = {Adv Neural Inf Process Syst},
volume = {30},
year = {2017},
}
12 changes: 6 additions & 6 deletions contents/hw_acceleration/hw_acceleration.bib
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ @article{burr2016recent
pages = {146--162},
publisher = {Institute of Electrical and Electronics Engineers (IEEE)},
source = {Crossref},
title = {Recent Progress in Phase-{Change\ensuremath{<}?Pub} \_newline {?\ensuremath{>}Memory} Technology},
title = {Recent Progress in Phase-{Change?Pub} \_newline {?Memory} Technology},
url = {https://doi.org/10.1109/jetcas.2016.2547718},
volume = {6},
year = {2016},
Expand Down Expand Up @@ -1374,8 +1374,8 @@ @inproceedings{zhu2018benchmarking
}

@article{rayis2014,
author = {El-Rayis, A.O.},
title = {Reconfigurable architectures for the next generation of mobile device telecommunications systems},
year = {2014},
url = {: https://www.researchgate.net/publication/292608967}
}
author = {El-Rayis, A.O.},
title = {Reconfigurable architectures for the next generation of mobile device telecommunications systems},
year = {2014},
url = {: https://www.researchgate.net/publication/292608967},
}
22 changes: 13 additions & 9 deletions contents/ml_systems/ml_systems.bib
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,16 @@ @misc{armcomfuture
}

@article{lin2023tiny,
title={Tiny Machine Learning: Progress and Futures [Feature]},
author={Lin, Ji and Zhu, Ligeng and Chen, Wei-Ming and Wang, Wei-Chen and Han, Song},
journal={IEEE Circuits and Systems Magazine},
volume={23},
number={3},
pages={8--34},
year={2023},
publisher={IEEE}
}
author = {Lin, Ji and Zhu, Ligeng and Chen, Wei-Ming and Wang, Wei-Chen and Han, Song},
title = {Tiny Machine Learning: {Progress} and Futures {[Feature]}},
journal = {IEEE Circuits Syst. Mag.},
volume = {23},
number = {3},
pages = {8--34},
year = {2023},
publisher = {Institute of Electrical and Electronics Engineers (IEEE)},
doi = {10.1109/mcas.2023.3302182},
source = {Crossref},
url = {https://doi.org/10.1109/mcas.2023.3302182},
issn = {1531-636X, 1558-0830},
}
80 changes: 31 additions & 49 deletions contents/optimizations/optimizations.bib
Original file line number Diff line number Diff line change
@@ -1,20 +1,21 @@
%comment{This file was created with betterbib v5.0.11.}
@inproceedings{yao2021hawq,
title={Hawq-v3: Dyadic neural network quantization},
author={Yao, Zhewei and Dong, Zhen and Zheng, Zhangcheng and Gholami, Amir and Yu, Jiali and Tan, Eric and Wang, Leyuan and Huang, Qijing and Wang, Yida and Mahoney, Michael and others},
booktitle={International Conference on Machine Learning},
pages={11875--11886},
year={2021},
organization={PMLR}
author = {Yao, Zhewei and Dong, Zhen and Zheng, Zhangcheng and Gholami, Amir and Yu, Jiali and Tan, Eric and Wang, Leyuan and Huang, Qijing and Wang, Yida and Mahoney, Michael and others},
title = {Hawq-v3: {Dyadic} neural network quantization},
booktitle = {International Conference on Machine Learning},
pages = {11875--11886},
year = {2021},
organization = {PMLR},
}

@inproceedings{jacob2018quantization,
title={Quantization and training of neural networks for efficient integer-arithmetic-only inference},
author={Jacob, Benoit and Kligys, Skirmantas and Chen, Bo and Zhu, Menglong and Tang, Matthew and Howard, Andrew and Adam, Hartwig and Kalenichenko, Dmitry},
booktitle={Proceedings of the IEEE conference on computer vision and pattern recognition},
pages={2704--2713},
year={2018}
author = {Jacob, Benoit and Kligys, Skirmantas and Chen, Bo and Zhu, Menglong and Tang, Matthew and Howard, Andrew and Adam, Hartwig and Kalenichenko, Dmitry},
title = {Quantization and training of neural networks for efficient integer-arithmetic-only inference},
booktitle = {Proceedings of the IEEE conference on computer vision and pattern recognition},
pages = {2704--2713},
year = {2018},
}

@inproceedings{benmeziane2021hardwareaware,
Expand Down Expand Up @@ -45,7 +46,6 @@ @inproceedings{cai2018proxylessnas
year = {2019},
}


@article{qi2021efficient,
author = {Qi, Chen and Shen, Shibo and Li, Rongpeng and Zhao, Zhifeng and Liu, Qing and Liang, Jing and Zhang, Honggang},
title = {An efficient pruning scheme of deep neural networks for Internet of Things applications},
Expand Down Expand Up @@ -144,10 +144,10 @@ @misc{gu2023deep
}

@article{han2015deep,
title={Deep compression: Compressing deep neural networks with pruning, trained quantization and huffman coding},
author={Han, Song and Mao, Huizi and Dally, William J},
journal={arXiv preprint arXiv:1510.00149},
year={2015}
author = {Han, Song and Mao, Huizi and Dally, William J},
title = {Deep compression: {Compressing} deep neural networks with pruning, trained quantization and huffman coding},
journal = {arXiv preprint arXiv:1510.00149},
year = {2015},
}

@article{hawks2021psandqs,
Expand Down Expand Up @@ -367,24 +367,6 @@ @inproceedings{prakash2022cfu
month = apr,
}

@article{qi2021efficient,
author = {Qi, Chen and Shen, Shibo and Li, Rongpeng and Zhao, Zhifeng and Liu, Qing and Liang, Jing and Zhang, Honggang},
abstract = {Nowadays, deep neural networks (DNNs) have been rapidly deployed to realize a number of functionalities like sensing, imaging, classification, recognition, etc. However, the computational-intensive requirement of DNNs makes it difficult to be applicable for resource-limited Internet of Things (IoT) devices. In this paper, we propose a novel pruning-based paradigm that aims to reduce the computational cost of DNNs, by uncovering a more compact structure and learning the effective weights therein, on the basis of not compromising the expressive capability of DNNs. In particular, our algorithm can achieve efficient end-to-end training that transfers a redundant neural network to a compact one with a specifically targeted compression rate directly. We comprehensively evaluate our approach on various representative benchmark datasets and compared with typical advanced convolutional neural network (CNN) architectures. The experimental results verify the superior performance and robust effectiveness of our scheme. For example, when pruning VGG on CIFAR-10, our proposed scheme is able to significantly reduce its FLOPs (floating-point operations) and number of parameters with a proportion of 76.2\% and 94.1\%, respectively, while still maintaining a satisfactory accuracy. To sum up, our scheme could facilitate the integration of DNNs into the common machine-learning-based IoT framework and establish distributed training of neural networks in both cloud and edge.},
bdsk-url-1 = {https://doi.org/10.1186/s13634-021-00744-4},
doi = {10.1186/s13634-021-00744-4},
file = {Full Text PDF:/Users/jeffreyma/Zotero/storage/AGWCC5VS/Qi et al. - 2021 - An efficient pruning scheme of deep neural network.pdf:application/pdf},
issn = {1687-6180},
journal = {EURASIP Journal on Advances in Signal Processing},
number = {1},
publisher = {Springer Science and Business Media LLC},
source = {Crossref},
title = {An efficient pruning scheme of deep neural networks for Internet of Things applications},
url = {https://doi.org/10.1186/s13634-021-00744-4},
volume = {2021},
year = {2021},
month = jun,
}

@article{sheng2019qbert,
author = {Shen, Sheng and Dong, Zhen and Ye, Jiayu and Ma, Linjian and Yao, Zhewei and Gholami, Amir and Mahoney, Michael W. and Keutzer, Kurt},
bibsource = {dblp computer science bibliography, https://dblp.org},
Expand Down Expand Up @@ -445,11 +427,11 @@ @misc{ultimate
}

@article{vaswani2017attention,
title={Attention is all you need},
author={Vaswani, Ashish and Shazeer, Noam and Parmar, Niki and Uszkoreit, Jakob and Jones, Llion and Gomez, Aidan N and Kaiser, {\L}ukasz and Polosukhin, Illia},
journal={Advances in neural information processing systems},
volume={30},
year={2017}
author = {Vaswani, Ashish and Shazeer, Noam and Parmar, Niki and Uszkoreit, Jakob and Jones, Llion and Gomez, Aidan N and Kaiser, {\L}ukasz and Polosukhin, Illia},
title = {Attention is all you need},
journal = {Adv Neural Inf Process Syst},
volume = {30},
year = {2017},
}

@inproceedings{wu2019fbnet,
Expand Down Expand Up @@ -555,21 +537,21 @@ @misc{zhou2021analognets
}

@article{annette2020,
title={ANNETTE: Accurate Neural Network Execution Time Estimation with Stacked Models},
author={Wess, Matthias and Ivanov, Matvey and Unger, Christoph and Nookala, Anvesh},
journal={IEEE},
doi={10.1109/ACCESS.2020.3047259},
year={2020},
publisher={IEEE}
author = {Wess, Matthias and Ivanov, Matvey and Unger, Christoph and Nookala, Anvesh},
title = {{ANNETTE:} {Accurate} Neural Network Execution Time Estimation with Stacked Models},
journal = {IEEE},
doi = {10.1109/ACCESS.2020.3047259},
year = {2020},
publisher = {IEEE},
}

@article{alexnet2012,
author = {Krizhevsky, Alex and Sutskever, Ilya and Hinton, Geoffrey E},
editor = {Pereira, F. and Burges, C.J. and Bottou, L. and Weinberger, K.Q.},
booktitle = {Advances in Neural Information Processing Systems},
editor = {F. Pereira and C.J. Burges and L. Bottou and K.Q. Weinberger},
publisher = {Curran Associates, Inc.},
title = {ImageNet Classification with Deep Convolutional Neural Networks},
title = {{ImageNet} Classification with Deep Convolutional Neural Networks},
url = {https://proceedings.neurips.cc/paper_files/paper/2012/file/c399862d3b9d6b76c8436e924a68c45b-Paper.pdf},
volume = {25},
year = {2012}
}
year = {2012},
}
2 changes: 1 addition & 1 deletion contents/responsible_ai/responsible_ai.bib
Original file line number Diff line number Diff line change
Expand Up @@ -351,7 +351,7 @@ @inproceedings{lakkaraju2020fool
publisher = {ACM},
source = {Crossref},
subtitle = {Manipulating User Trust via Misleading Black Box Explanations},
title = {{''How} do I fool you?''},
title = {{{\textquotedblright}How} do I fool you?{\textquotedblright}},
url = {https://doi.org/10.1145/3375627.3375833},
year = {2020},
month = feb,
Expand Down
13 changes: 2 additions & 11 deletions contents/robust_ai/robust_ai.bib
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ @inproceedings{ahmed2020headless

@inproceedings{chen2019sc,
author = {Chen, Zitao and Li, Guanpeng and Pattabiraman, Karthik and DeBardeleben, Nathan},
title = {{\ensuremath{<}i\ensuremath{>}BinFI\ensuremath{<}/i\ensuremath{>}}},
title = {{iBinFI/i}},
year = {2019},
isbn = {9781450362290},
publisher = {ACM},
Expand Down Expand Up @@ -441,16 +441,7 @@ @inproceedings{cheng2016clear
source = {Crossref},
url = {https://doi.org/10.1145/2897937.2897996},
publisher = {ACM},
subtitle = {\ensuremath{<}u\ensuremath{>}C\ensuremath{<}/u\ensuremath{>}
ross
\ensuremath{<}u\ensuremath{>}-L\ensuremath{<}/u\ensuremath{>}
ayer
\ensuremath{<}u\ensuremath{>}E\ensuremath{<}/u\ensuremath{>}
xploration for
\ensuremath{<}u\ensuremath{>}A\ensuremath{<}/u\ensuremath{>}
rchitecting
\ensuremath{<}u\ensuremath{>}R\ensuremath{<}/u\ensuremath{>}
esilience - Combining hardware and software techniques to tolerate soft errors in processor cores},
subtitle = {uC/u ross u-L/u ayer uE/u xploration for uA/u rchitecting uR/u esilience - Combining hardware and software techniques to tolerate soft errors in processor cores},
month = jun,
}

Expand Down
2 changes: 1 addition & 1 deletion contents/sustainable_ai/sustainable_ai.bib
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ @article{cenci2021ecofriendly
pages = {2001263},
publisher = {Wiley},
source = {Crossref},
title = {{Eco-Friendly} {Electronics{\textemdash}A} Comprehensive Review},
title = {Eco-Friendly {Electronics{\textemdash}A} Comprehensive Review},
url = {https://doi.org/10.1002/admt.202001263},
volume = {7},
year = {2021},
Expand Down

0 comments on commit baf7336

Please sign in to comment.