Skip to content
This repository was archived by the owner on Jun 17, 2023. It is now read-only.
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ python:
install:
- mkdir -p docs/_static
- pip install -r requirements/dev.txt
- pip install . --use-mirrors
script:
- make lint
- make lint_docs
Expand Down
3 changes: 2 additions & 1 deletion requirements/dev.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
boto==2.9.4
boto==2.48.0
coverage>=3.7.1,<3.8
flake8>=2.4.1,<2.5
Sphinx>=1.3,<1.4
tox>=2.1.1,<2.2
python-dateutil==2.1
2 changes: 1 addition & 1 deletion s3_backups/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '0.1.0'
__version__ = '0.2.0'
39 changes: 39 additions & 0 deletions s3_backups/connection.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import boto


class Conns:
"""
Class for lazy connections.
That is make connections
a) only when they are needed and then cache them
b) not while loading the module
"""
aws = None
do = None

def __init__(self, ACCESS_KEY, SECRET, REGION):
self.ACCESS_KEY = ACCESS_KEY
self.SECRET = SECRET
self.REGION = REGION

def make_aws_conn(self):
return boto.connect_s3(aws_access_key_id=self.ACCESS_KEY,
aws_secret_access_key=self.SECRET,
host='https://s3.amazonaws.com')

def make_do_conn(self):
return boto.connect_s3(aws_access_key_id=self.ACCESS_KEY,
aws_secret_access_key=self.SECRET,
host='https://%s.%s' % (self.REGION, 'digitaloceanspaces.com'))

def __getitem__(self, name):
conn = None
if name == 'amazon':
if self.aws is None:
self.aws = self.make_aws_conn()
conn = self.aws
elif name == 'digitalocean':
if self.do is None:
self.do = self.make_do_conn()
conn = self.do
return conn
19 changes: 12 additions & 7 deletions s3_backups/postgres_to_s3.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
#!/usr/bin/env python

from boto.s3.connection import S3Connection
from boto.s3.key import Key
from boto.exception import S3ResponseError
from datetime import datetime
from s3_backups.utils import ColoredFormatter, timeit
from s3_backups.connection import Conns

from dateutil import tz

import importlib
Expand Down Expand Up @@ -54,20 +55,20 @@ def backup():
tar.add(t1.name, ARCHIVE_NAME + ".sql")
tar.close()

log.info("Uploading the " + FILENAME + " file to Amazon S3 ...")

# get bucket
conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
conn = Conns(ACCESS_KEY=AWS_ACCESS_KEY_ID, SECRET=AWS_SECRET_ACCESS_KEY, REGION=S3_REGION)

try:
bucket = conn.get_bucket(S3_BUCKET_NAME)
log.info("Getting the bucket")
bucket = conn[SERVICE_NAME].get_bucket(S3_BUCKET_NAME)
except S3ResponseError:
sys.stderr.write("There is no bucket with the name \"" + S3_BUCKET_NAME + "\" in your Amazon S3 account\n")
sys.stderr.write("Error: Please enter an appropriate bucket name and re-run the script\n")
t2.close()
return

# upload file to Amazon S3
log.info("Uploading the " + FILENAME + " file to Cloud ...")
k = Key(bucket)
k.key = key_name + FILENAME
k.set_contents_from_filename(t2.name)
Expand All @@ -88,8 +89,8 @@ class archive(object):
def __init__(self, schedule_module='schedules.default'):

schedule = importlib.import_module(schedule_module)
conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
bucket = conn.get_bucket(S3_BUCKET_NAME)
conn = Conns(ACCESS_KEY=AWS_ACCESS_KEY_ID, SECRET=AWS_SECRET_ACCESS_KEY, REGION=S3_REGION)
bucket = conn[SERVICE_NAME].get_bucket(S3_BUCKET_NAME)

key_name = S3_KEY_NAME
if not key_name.endswith("/") and key_name != "":
Expand Down Expand Up @@ -155,6 +156,8 @@ def add_datetimes_to_key(self, key):
parser.add_argument('--AWS_SECRET_ACCESS_KEY', required=AWS_SECRET_ACCESS_KEY is None, help='S3 secret access key (required if not defined in AWS_SECRET_ACCESS_KEY environment variable)', default=AWS_SECRET_ACCESS_KEY)

# optional arguments
parser.add_argument('--SERVICE_NAME', default='amazon', help='S3 service name Eg. amazon / digitalocean')
parser.add_argument('--S3_REGION', default='', help="s3 service region")
parser.add_argument('-v', '--verbose', action='store_true', help='Verbose output')
parser.add_argument('--POSTGRES_DUMP_PATH', default='/usr/bin/pg_dumpall', help="Path to pg_dumpall (default: /usr/bin/pg_dumpall)")
parser.add_argument('--ARCHIVE_NAME', default='all_databases', help='The base name for the archive')
Expand All @@ -163,12 +166,14 @@ def add_datetimes_to_key(self, key):
parser.add_argument('--archive', action='store_true', help='Archive backups on S3')
args = parser.parse_args()

SERVICE_NAME = args.SERVICE_NAME
AWS_ACCESS_KEY_ID = args.AWS_ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY = args.AWS_SECRET_ACCESS_KEY
S3_BUCKET_NAME = args.S3_BUCKET_NAME
S3_KEY_NAME = args.S3_KEY_NAME
POSTGRES_DUMP_PATH = args.POSTGRES_DUMP_PATH
ARCHIVE_NAME = args.ARCHIVE_NAME
S3_REGION = args.S3_REGION

if args.verbose:
log.setLevel(logging.INFO)
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
package_data={'': ['LICENSE']},
package_dir={'s3-backups': 's3-backups'},
include_package_data=True,
install_requires=['boto==2.9.4', 'python-dateutil==2.1'],
install_requires=['boto==2.48.0', 'python-dateutil==2.1'],
license=open('LICENSE').read(),
zip_safe=False,
scripts=['s3_backups/postgres_to_s3.py', 's3_backups/redis_to_s3.py', 's3_backups/mysql_to_s3.py'],
Expand Down