chenlin
init
d9dadf3
raw
history blame contribute delete
115 Bytes
from llava.train.train import train
if __name__ == "__main__":
train(attn_implementation="flash_attention_2")