From mboxrd@z Thu Jan 1 00:00:00 1970 Return-Path: Received: from svn.comics.unina.it (unknown [143.225.229.147]) by huchra.bufferbloat.net (Postfix) with ESMTP id 22FCD20090F for ; Thu, 26 May 2011 01:08:38 -0700 (PDT) Received: from www-data by svn.comics.unina.it with local (Exim 4.69) (envelope-from ) id 1QPVqF-0002eF-WF for bismark-commits@lists.bufferbloat.net; Thu, 26 May 2011 10:22:52 +0200 To: bismark-commits@lists.bufferbloat.net From: nick@svn.comics.unina.it Message-Id: Date: Thu, 26 May 2011 10:22:52 +0200 Subject: [Bismark-commits] rev 348 - trunk/server/scripts X-BeenThere: bismark-commits@lists.bufferbloat.net X-Mailman-Version: 2.1.13 Precedence: list List-Id: Commit log for the bismark source code List-Unsubscribe: , List-Archive: List-Post: List-Help: List-Subscribe: , X-List-Received-Date: Thu, 26 May 2011 08:08:39 -0000 Author: nick Date: 2011-05-26 10:22:51 +0200 (Thu, 26 May 2011) New Revision: 348 Modified: trunk/server/scripts/organize-archive.py Log: functions for publishing files from the archive Modified: trunk/server/scripts/organize-archive.py =================================================================== --- trunk/server/scripts/organize-archive.py 2011-05-24 22:36:26 UTC (rev 347) +++ trunk/server/scripts/organize-archive.py 2011-05-26 08:22:51 UTC (rev 348) @@ -6,8 +6,9 @@ import time import datetime import shutil +import tarfile -def clean_data(dir): +def clean_data(dir,targetdir): UKY_DIR = os.environ['HOME'] + '/var/archive/UKY-old' devices = [] @@ -32,6 +33,7 @@ match = re.search(r'[Uu]ky',file) if match: shutil.move(dir+file, UKY_DIR) + continue # get list of devices match = re.search(r'[0-9A-Za-z]+',file) @@ -55,25 +57,54 @@ # make directories and move files - datadir = dir+ dev_name + '/' + typedir + '/'+ datedir + datadir = targetdir + dev_name + '/' + typedir + '/'+ datedir if not os.path.exists(datadir): os.makedirs(datadir) - #print dir+file + '->' + datadir + print dir+file + '->' + datadir shutil.move(dir+file,datadir) for dev in devices: print dev + +def new_device_files(members,device,dir): + for tarinfo in members: + match = re.search(device,tarinfo.name) + if match: + yield tarinfo - +def unpack_backup(device,dir,outdir): + files = os.listdir(dir) + for file in files: + match = re.search(r'xml.tgz',file) + if match: + # unpack into the unpack dir + print file + try: + tar = tarfile.open(dir+file,'r:gz') + tar.extractall(outdir,members=new_device_files(tar,device)) + tar.close() + except tarfile.ReadError: + print "Warning Read Error" + + if __name__ == '__main__': HOME = os.environ['HOME'] + '/' MEASURE_FILE_DIR = 'var/data/' + ARCHIVE_DIR = HOME + MEASURE_FILE_DIR + 'old/' + BACKUP_DIR = HOME + 'var/backup/' + UNPACK_DIR = ARCHIVE_DIR + 'unpack/' - clean_data(ARCHIVE_DIR) + PUBLISH_DIR = ARCHIVE_DIR + + clean_data(ARCHIVE_DIR,PUBLISH_DIR) + + # restore directory structure from backup + #unpack_backup('NB105',BACKUP_DIR,UNPACK_DIR) + #clean_data(UNPACK_DIR,PUBLISH_DIR)