Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

download_model.py 1.1 KB

You have to be logged in to leave a comment. Sign In
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
  1. #!/usr/bin/env python
  2. import os
  3. import sys
  4. import requests
  5. from tqdm import tqdm
  6. if len(sys.argv)!=2:
  7. print('You must enter the model name as a parameter, e.g.: download_model.py 117M')
  8. sys.exit(1)
  9. model = sys.argv[1]
  10. #Create directory if it does not exist already, then do nothing
  11. if not os.path.exists('models/'+model):
  12. os.makedirs('models/'+model)
  13. #download all the files
  14. for filename in ['checkpoint','encoder.json','hparams.json','model.ckpt.data-00000-of-00001', 'model.ckpt.index', 'model.ckpt.meta', 'vocab.bpe']:
  15. r = requests.get("https://storage.googleapis.com/gpt-2/models/"+model+"/"+filename,stream=True)
  16. #wb flag required for windows
  17. with open('models/'+model+'/'+filename,'wb') as currentFile:
  18. fileSize = int(r.headers["content-length"])
  19. with tqdm(ncols=100,desc="Fetching "+filename,total=fileSize,unit_scale=True) as pbar:
  20. #went for 1k for chunk_size. Motivation -> Ethernet packet size is around 1500 bytes.
  21. for chunk in r.iter_content(chunk_size=1000):
  22. currentFile.write(chunk)
  23. pbar.update(1000)
Tip!

Press p or to see the previous file or, n or to see the next file

Comments

Loading...