ESM Collection
Collection
Models and spaces related to Meta's Evolutionary Scale Modeling (ESM) Project (https://github.com/facebookresearch/esm)
•
19 items
•
Updated
•
2
Checkpoint of the ESM Inverse Folding model.
Please see the ESM team's Github repo for more information.
If you find the models useful in your research, we ask that you cite the relevant papers:
@article{rives2019biological,
author={Rives, Alexander and Meier, Joshua and Sercu, Tom and Goyal, Siddharth and Lin, Zeming and Liu, Jason and Guo, Demi and Ott, Myle and Zitnick, C. Lawrence and Ma, Jerry and Fergus, Rob},
title={Biological Structure and Function Emerge from Scaling Unsupervised Learning to 250 Million Protein Sequences},
year={2019},
doi={10.1101/622803},
url={https://www.biorxiv.org/content/10.1101/622803v4},
journal={PNAS}
}
For the self-attention contact prediction:
@article{rao2020transformer,
author = {Rao, Roshan M and Meier, Joshua and Sercu, Tom and Ovchinnikov, Sergey and Rives, Alexander},
title={Transformer protein language models are unsupervised structure learners},
year={2020},
doi={10.1101/2020.12.15.422761},
url={https://www.biorxiv.org/content/10.1101/2020.12.15.422761v1},
journal={bioRxiv}
}
For inverse folding using ESM-IF1:
@article{hsu2022learning,
author = {Hsu, Chloe and Verkuil, Robert and Liu, Jason and Lin, Zeming and Hie, Brian and Sercu, Tom and Lerer, Adam and Rives, Alexander},
title = {Learning inverse folding from millions of predicted structures},
year = {2022},
doi = {10.1101/2022.04.10.487779},
url = {https://www.biorxiv.org/content/early/2022/04/10/2022.04.10.487779},
journal = {ICML}
}