Nik*_*mar 5 python amazon-s3 amazon-ec2 mongodb amazon-web-services
我按照这个程序链接将我的mongodump上传到s3.
bash脚本
#!/bin/sh
MONGODB_SHELL='/usr/bin/mongo'
DUMP_UTILITY='/usr/bin/mongodump'
DB_NAME='amicus'
date_now=`date +%Y_%m_%d_%H_%M_%S`
dir_name='db_backup_'${date_now}
file_name='db_backup_'${date_now}'.bz2'
log() {
echo $1
}
do_cleanup(){
rm -rf db_backup_2010*
log 'cleaning up....'
}
do_backup(){
log 'snapshotting the db and creating archive' && \
${MONGODB_SHELL} admin fsync_lock.js && \
${DUMP_UTILITY} -d ${DB_NAME} -o ${dir_name} && tar -jcf $file_name ${dir_name}
${MONGODB_SHELL} admin unlock.js && \
log 'data backd up and created snapshot'
}
save_in_s3(){
log 'saving the backup archive in amazon S3' && \
python aws_s3.py set ${file_name} && \
log 'data backup saved in amazon s3'
}
do_backup && save_in_s3 && do_cleanup
Run Code Online (Sandbox Code Playgroud)
aws_s3.py
ACCESS_KEY=''
SECRET=''
BUCKET_NAME='s3:///s3.amazonaws.com/database-backup' #note that you need to create this bucket first
from boto.s3.connection import S3Connection
from boto.s3.key import Key
def save_file_in_s3(filename):
conn = S3Connection(ACCESS_KEY, SECRET)
bucket = conn.get_bucket(BUCKET_NAME)
k = Key(bucket)
k.key = filename
k.set_contents_from_filename(filename)
def get_file_from_s3(filename):
conn = S3Connection(ACCESS_KEY, SECRET)
bucket = conn.get_bucket(BUCKET_NAME)
k = Key(bucket)
k.key = filename
k.get_contents_to_filename(filename)
def list_backup_in_s3():
conn = S3Connection(ACCESS_KEY, SECRET)
bucket = conn.get_bucket(BUCKET_NAME)
for i, key in enumerate(bucket.get_all_keys()):
print "[%s] %s" % (i, key.name)
def delete_all_backups():
#FIXME: validate filename exists
conn = S3Connection(ACCESS_KEY, SECRET)
bucket = conn.get_bucket(BUCKET_NAME)
for i, key in enumerate(bucket.get_all_keys()):
print "deleting %s" % (key.name)
key.delete()
if __name__ == '__main__':
import sys
if len(sys.argv) < 3:
print 'Usage: %s <get/set/list/delete> <backup_filename>' % (sys.argv[0])
else:
if sys.argv[1] == 'set':
save_file_in_s3(sys.argv[2])
elif sys.argv[1] == 'get':
get_file_from_s3(sys.argv[2])
elif sys.argv[1] == 'list':
list_backup_in_s3()
elif sys.argv[1] == 'delete':
delete_all_backups()
else:
print 'Usage: %s <get/set/list/delete> <backup_filename>' % (sys.argv[0])
Run Code Online (Sandbox Code Playgroud)
但不断收到此错误:
Traceback (most recent call last):
File "aws_s3.py", line 42, in <module>
save_file_in_s3(sys.argv[2])
File "aws_s3.py", line 13, in save_file_in_s3
k.set_contents_from_filename(filename)
File "/usr/local/lib/python2.7/dist-packages/boto/s3/key.py", line 1362, in set_contents_from_filename
encrypt_key=encrypt_key)
File "/usr/local/lib/python2.7/dist-packages/boto/s3/key.py", line 1293, in set_contents_from_file
chunked_transfer=chunked_transfer, size=size)
File "/usr/local/lib/python2.7/dist-packages/boto/s3/key.py", line 750, in send_file
chunked_transfer=chunked_transfer, size=size)
File "/usr/local/lib/python2.7/dist-packages/boto/s3/key.py", line 951, in _send_file_internal
query_args=query_args
File "/usr/local/lib/python2.7/dist-packages/boto/s3/connection.py", line 664, in make_request
retry_handler=retry_handler
File "/usr/local/lib/python2.7/dist-packages/boto/connection.py", line 1071, in make_request
retry_handler=retry_handler)
File "/usr/local/lib/python2.7/dist-packages/boto/connection.py", line 1030, in _mexe
raise ex
socket.error: [Errno 104] Connection reset by peer
Run Code Online (Sandbox Code Playgroud)
做了一点我的研究,发现它的某种错误.boto如何继续这个?
由于我没有得到任何更新如何使其工作,所以我s3cmd在 bash 脚本中使用了它。但我仍然必须测试>1GB 的文件。
这是更新后的代码 -
#!/bin/sh
MONGODB_SHELL='/usr/bin/mongo'
DUMP_UTILITY='/usr/bin/mongodump'
DB_NAME='amicus'
date_now=`date +%Y_%m_%d_%H_%M_%S`
dir_name='db_backup_'${date_now}
file_name='db_backup_'${date_now}'.bz2'
log() {
echo $1
}
do_cleanup(){
rm -rf db_backup_2010*
log 'cleaning up....'
}
do_backup(){
log 'snapshotting the db and creating archive' && \
${DUMP_UTILITY} -d ${DB_NAME} -o ${dir_name} && tar -jcf $file_name ${dir_name}
log 'data backd up and created snapshot'
}
save_in_s3(){
log 'saving the backup archive in amazon S3' && \
python aws_s3.py set ${file_name} && \
s3cmd put ${file_name} s3://YOURBUCKETNAME
log 'data backup saved in amazon s3'
}
do_backup && save_in_s3 && do_cleanup
Run Code Online (Sandbox Code Playgroud)
| 归档时间: |
|
| 查看次数: |
832 次 |
| 最近记录: |