Project

General

Profile

« Previous | Next » 

Revision 54594

added skip store functionality

View differences:

s3_migration.py
46 46
				log_file.writeline('Missing file for objectStoreid: %s ObjectId:%s path: %s'%(obsId, objectId, fs_path))
47 47
		
48 48

  
49
def start_import(metadataCollection, bucket, log_file):
49
def start_import(metadataCollection, bucket, log_file, done_file, skip_store):
50 50
	client = MongoClient()
51 51
	db = client['objectStore']
52 52
	metadataCollection = db[metadataCollection]
53 53
	for item in metadataCollection.find(no_cursor_timeout=True):
54 54
		obsId = item['obsId']
55
		exportItemForMongoCollection(obsId, db, bucket, log_file)
56
		destination_collection =db[ 's3_'+obsId[:36]]
57
		print "creating Index on ID"
58
		destination_collection.create_index([('id',pymongo.ASCENDING)])
55
		if obsId not in skip_store:
56
			exportItemForMongoCollection(obsId, db, bucket, log_file)
57
			destination_collection =db[ 's3_'+obsId[:36]]
58
			print "creating Index on ID"
59
			destination_collection.create_index([('id',pymongo.ASCENDING)])
60
			done_file.write('{}\n'.format(obsId))
59 61

  
60 62

  
61 63

  
......
71 73
		d =line.split('=')
72 74
		if len(d) == 2:
73 75
			props[d[0].strip()] = d[1].strip()
76
	skip_store =[]
77
	if os.path.isfile('store_done'):
78
		f = open('store_done')
79
		skip_store =[s.strip() for line in f if len(line) >0]
74 80

  
75 81
	bname = args[2]
76 82
	conn = boto.connect_s3(aws_access_key_id = props['access_key'], aws_secret_access_key = props['secret_key'],  host = props['host_base'], calling_format = boto.s3.connection.OrdinaryCallingFormat())
77 83
	bucket = conn.get_bucket(bname, validate=True)
78 84
	log_file = open('s3_migration.log', 'w')
79
	start_import('metadataObjectStore',bucket, log_file)
85
	done_file = open('store_done', 'wb')
86
	start_import('metadataObjectStore',bucket, log_file, done_file, skip_store)
80 87
	log_file.close()

Also available in: Unified diff