OLD | NEW |
(Empty) | |
| 1 #!/usr/bin/env python |
| 2 |
| 3 import argparse |
| 4 from contextlib import closing |
| 5 from filecmp import dircmp |
| 6 import hashlib |
| 7 import os |
| 8 import sys |
| 9 import shutil |
| 10 import tarfile |
| 11 import tempfile |
| 12 import urllib |
| 13 import urllib2 |
| 14 |
| 15 _tmp_dir = tempfile.mkdtemp() |
| 16 |
| 17 |
| 18 def download(url): |
| 19 file_name = url.split('/')[-1] |
| 20 abs_file_name = os.path.join(_tmp_dir, file_name) |
| 21 print 'Downloading: ' + file_name |
| 22 try: |
| 23 filename, _ = urllib.urlretrieve(url, abs_file_name) |
| 24 return abs_file_name |
| 25 except urllib2.HTTPError as e: |
| 26 if e.code == 404: |
| 27 sys.exit('File not found on remote source') |
| 28 except Exception as e: |
| 29 sys.exit(e) |
| 30 |
| 31 |
| 32 def calculate_md5(file): |
| 33 with open(file) as f: |
| 34 data = f.read() |
| 35 md5_result = hashlib.md5(data).hexdigest() |
| 36 return md5_result.strip() |
| 37 |
| 38 |
| 39 def read_md5(file): |
| 40 with open(file) as f: |
| 41 md5_result = f.readline() |
| 42 return md5_result.strip() |
| 43 |
| 44 |
| 45 def untar(tar_file): |
| 46 if tarfile.is_tarfile(tar_file): |
| 47 with tarfile.open(tar_file, 'r:gz') as tar: |
| 48 tar.extractall(_tmp_dir) |
| 49 |
| 50 |
| 51 def remove_tree(to_remove): |
| 52 if os.path.exists(to_remove): |
| 53 if os.path.isdir(to_remove): |
| 54 shutil.rmtree(to_remove) |
| 55 else: |
| 56 os.remove(to_remove) |
| 57 |
| 58 |
| 59 def deploy_files(dcmp): |
| 60 for name in dcmp.diff_files: |
| 61 copytree(dcmp.right, dcmp.left) |
| 62 for name in dcmp.left_only: |
| 63 remove_tree(dcmp.left + "/" + name) |
| 64 for name in dcmp.right_only: |
| 65 copytree(dcmp.right, dcmp.left) |
| 66 for sub_dcmp in dcmp.subdirs.values(): |
| 67 deploy_files(sub_dcmp) |
| 68 |
| 69 |
| 70 def copytree(src, dst): |
| 71 if not os.path.exists(dst): |
| 72 os.makedirs(dst) |
| 73 shutil.copystat(src, dst) |
| 74 lst = os.listdir(src) |
| 75 for item in lst: |
| 76 s = os.path.join(src, item) |
| 77 d = os.path.join(dst, item) |
| 78 if os.path.isdir(s): |
| 79 copytree(s, d) |
| 80 else: |
| 81 shutil.copy2(s, d) |
| 82 |
| 83 |
| 84 if __name__ == '__main__': |
| 85 parser = argparse.ArgumentParser( |
| 86 description="""Fetch a compressed archive in the form of $HASH.tar.gz |
| 87 and deploy it to /var/www/$WEBSITE folder""", |
| 88 epilog="""--hash must be provided in order to fetch the files, |
| 89 expected files to be fetched are $HASH.tar.gz and $HASH.md5 in |
| 90 order to compare the hashes. |
| 91 --source must be an URL, e.g. |
| 92 https://helpcenter.eyeofiles.com""", |
| 93 ) |
| 94 parser.add_argument('--hash', action='store', type=str, |
| 95 nargs='?', required=True, |
| 96 help='Hash of the commit to deploy') |
| 97 parser.add_argument('--source', action='store', type=str, |
| 98 required=True, nargs='?', |
| 99 help='The source where files will be downloaded') |
| 100 parser.add_argument('--website', action='store', type=str, nargs='?', |
| 101 help='The name of the website [e.g. help.eyeo.com]') |
| 102 args = parser.parse_args() |
| 103 hash = args.hash |
| 104 source = args.source |
| 105 url_file = '{0}/{1}.tar.gz'.format(source, hash) |
| 106 url_md5 = '{0}/{1}.md5'.format(source, hash) |
| 107 down_file = download(url_file) |
| 108 down_md5 = download(url_md5) |
| 109 try: |
| 110 if calculate_md5(down_file) == read_md5(down_md5): |
| 111 untar(down_file) |
| 112 hash_directory = os.path.join(_tmp_dir, hash) |
| 113 destination = '/var/www/' + args.website |
| 114 dcmp = dircmp(destination, hash_directory) |
| 115 print "Deploying files" |
| 116 deploy_files(dcmp) |
| 117 else: |
| 118 sys.exit("Hashes don't match") |
| 119 except Exception as e: |
| 120 sys.exit(e) |
| 121 finally: |
| 122 shutil.rmtree(_tmp_dir) |
OLD | NEW |