update download stuff

This commit is contained in:
Jeff Wu
2019-03-04 10:36:44 -08:00
parent 8eb67930d7
commit ed0dedcd55
5 changed files with 29 additions and 51 deletions

View File

@ -1,24 +1,27 @@
#!/usr/bin/env python
import os
import sys
import requests
from tqdm import tqdm
if len(sys.argv)!=2:
if len(sys.argv) != 2:
print('You must enter the model name as a parameter, e.g.: download_model.py 117M')
sys.exit(1)
model = sys.argv[1]
#Create directory if it does not exist already, then do nothing
if not os.path.exists('models/'+model):
os.makedirs('models/'+model)
#download all the files
subdir = os.path.join('models', model)
if not os.path.exists(subdir):
os.makedirs(subdir)
for filename in ['checkpoint','encoder.json','hparams.json','model.ckpt.data-00000-of-00001', 'model.ckpt.index', 'model.ckpt.meta', 'vocab.bpe']:
r = requests.get("https://storage.googleapis.com/gpt-2/models/"+model+"/"+filename,stream=True)
#wb flag required for windows
with open('models/'+model+'/'+filename,'wb') as currentFile:
fileSize = int(r.headers["content-length"])
with tqdm(ncols=100,desc="Fetching "+filename,total=fileSize,unit_scale=True) as pbar:
#went for 1k for chunk_size. Motivation -> Ethernet packet size is around 1500 bytes.
for chunk in r.iter_content(chunk_size=1000):
currentFile.write(chunk)
pbar.update(1000)
r = requests.get("https://storage.googleapis.com/gpt-2/" + subdir + "/" + filename, stream=True)
with open(os.path.join(subdir, filename), 'wb') as f:
file_size = int(r.headers["content-length"])
chunk_size = 1000
with tqdm(ncols=100, desc="Fetching " + filename, total=file_size, unit_scale=True) as pbar:
# 1k for chunk_size, since Ethernet packet size is around 1500 bytes
for chunk in r.iter_content(chunk_size=chunk_size):
f.write(chunk)
pbar.update(chunk_size)