Add mirror-all-snapshots.py script for extra crowdsourced backup power.

This commit is contained in:
Graydon Hoare 2011-05-17 08:26:38 -07:00
parent aed235e348
commit 49b90d37ed
3 changed files with 43 additions and 5 deletions

View file

@ -3,11 +3,6 @@
import os, tarfile, hashlib, re, shutil import os, tarfile, hashlib, re, shutil
from snapshot import * from snapshot import *
def snap_filename_hash_part(snap):
match = re.match(r".*([a-fA-F\d]{40}).tar.bz2$", snap)
if not match:
raise Exception("unable to find hash in filename: " + snap)
return match.group(1)
def unpack_snapshot(snap): def unpack_snapshot(snap):
dl_path = os.path.join(download_dir_base, snap) dl_path = os.path.join(download_dir_base, snap)

View file

@ -0,0 +1,37 @@
#!/usr/bin/env python
import os, tarfile, hashlib, re, shutil
from snapshot import *
f = open(snapshotfile)
date = None
rev = None
platform = None
snap = None
i = 0
for line in f.readlines():
i += 1
parsed = parse_line(i, line)
if (not parsed): continue
if parsed["type"] == "snapshot":
date = parsed["date"]
rev = parsed["rev"]
elif rev != None and parsed["type"] == "file":
platform = parsed["platform"]
hsh = parsed["hash"]
snap = full_snapshot_name(date, rev, platform, hsh)
dl = os.path.join(download_dir_base, snap)
url = download_url_base + "/" + snap
if (not os.path.exists(dl)):
print("downloading " + url)
get_url_to_file(url, dl)
if (snap_filename_hash_part(snap) == hash_file(dl)):
print("got download with ok hash")
else:
raise Exception("bad hash on download")

View file

@ -95,6 +95,12 @@ def local_rev_committer_date():
def get_url_to_file(u,f): def get_url_to_file(u,f):
subprocess.check_call(["curl", "-o", f, u]) subprocess.check_call(["curl", "-o", f, u])
def snap_filename_hash_part(snap):
match = re.match(r".*([a-fA-F\d]{40}).tar.bz2$", snap)
if not match:
raise Exception("unable to find hash in filename: " + snap)
return match.group(1)
def hash_file(x): def hash_file(x):
h = hashlib.sha1() h = hashlib.sha1()
h.update(open(x, "rb").read()) h.update(open(x, "rb").read())