Update README
This commit is contained in:
@@ -99,10 +99,10 @@ Some methods use knowledge distillation (KD), which require pre-trained models.
|
||||
如果您发现该项目对您的科研或工程有帮助,请考虑引用下列的某些文献:
|
||||
```
|
||||
@inproceedings{dong2021autohas,
|
||||
title={{AutoHAS}: Efficient Hyperparameter and Architecture Search},
|
||||
author={Dong, Xuanyi and Tan, Mingxing and Yu, Adams Wei and Peng, Daiyi and Gabrys, Bogdan and Le, Quoc V},
|
||||
booktitle = {International Conference on Learning Representations (ICLR) Workshop on Neural Architecture Search},
|
||||
year={2021}
|
||||
title = {{AutoHAS}: Efficient Hyperparameter and Architecture Search},
|
||||
author = {Dong, Xuanyi and Tan, Mingxing and Yu, Adams Wei and Peng, Daiyi and Gabrys, Bogdan and Le, Quoc V},
|
||||
booktitle = {2nd Workshop on Neural Architecture Search at International Conference on Learning Representations (ICLR)},
|
||||
year = {2021}
|
||||
}
|
||||
@article{dong2021nats,
|
||||
title = {{NATS-Bench}: Benchmarking NAS Algorithms for Architecture Topology and Size},
|
||||
|
Reference in New Issue
Block a user