Update README

This commit is contained in:
D-X-Y
2021-04-07 17:14:49 +08:00
parent dd0722c1b7
commit 5c7ef93471
3 changed files with 126 additions and 8 deletions

View File

@@ -99,10 +99,10 @@ Some methods use knowledge distillation (KD), which require pre-trained models.
如果您发现该项目对您的科研或工程有帮助,请考虑引用下列的某些文献:
```
@inproceedings{dong2021autohas,
title={{AutoHAS}: Efficient Hyperparameter and Architecture Search},
author={Dong, Xuanyi and Tan, Mingxing and Yu, Adams Wei and Peng, Daiyi and Gabrys, Bogdan and Le, Quoc V},
booktitle = {International Conference on Learning Representations (ICLR) Workshop on Neural Architecture Search},
year={2021}
title = {{AutoHAS}: Efficient Hyperparameter and Architecture Search},
author = {Dong, Xuanyi and Tan, Mingxing and Yu, Adams Wei and Peng, Daiyi and Gabrys, Bogdan and Le, Quoc V},
booktitle = {2nd Workshop on Neural Architecture Search at International Conference on Learning Representations (ICLR)},
year = {2021}
}
@article{dong2021nats,
title = {{NATS-Bench}: Benchmarking NAS Algorithms for Architecture Topology and Size},