-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathcloud_storage.py
More file actions
97 lines (80 loc) · 2.95 KB
/
cloud_storage.py
File metadata and controls
97 lines (80 loc) · 2.95 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
import boto3
from botocore.exceptions import ClientError
import os
import csv
CLOUD_DISABLE = False
class AWS_S3_CloudStorage:
def __init__(self, credentials_filename='credentials/credentials.csv'):
self.aws_credentials_filename = credentials_filename
self.aws_access_key_id = None
self.aws_secret_access_key = None
self.bucket = None
self.read_credentials()
def read_credentials(self):
'''
Credentials should be in the 2nd row of a csv file, where the first row
would ideally contain the column names in the following order:
[access_key_id, secret_access_key, bucket] and the second row would contain
the values.
'''
with open(self.aws_credentials_filename, newline='') as csvfile:
cred_reader = csv.reader(csvfile)
for row in cred_reader:
if row[0] == 'access_key_id':
continue
self.aws_access_key_id = row[0]
self.aws_secret_access_key = row[1]
self.bucket = row[2]
def create_session(self):
session = boto3.Session(
aws_access_key_id=self.aws_access_key_id,
aws_secret_access_key=self.aws_secret_access_key)
return session
def file_exists_in_s3(self, s3_client, filename):
try:
s3_client.head_object(Bucket=self.bucket, Key=filename)
except ClientError as e:
return int(e.response['Error']['Code']) != 404
return True
def file_exists_locally(self, filename):
return os.path.isfile(filename)
def upload_file_to_cloud(self, filename):
'''
@param filename : Filename include local filepath
'''
global CLOUD_DISABLE
if CLOUD_DISABLE:
return True
cur_session = self.create_session()
s3_client = cur_session.client('s3')
if self.file_exists_locally(filename):
s3_client.upload_file(
Bucket=self.bucket,
Key=os.path.basename(filename),
Filename=filename
)
else:
print('File does not exist. Could not upload.')
return False
return True
def download_file_from_cloud(self, filename):
'''
@param filename : Filename including local filepath where the file will be stored
'''
global CLOUD_DISABLE
if CLOUD_DISABLE:
return True
cur_session = self.create_session()
s3_client = cur_session.client('s3')
if self.file_exists_in_s3(s3_client, os.path.basename(filename)):
s3_client.download_file(
Bucket=self.bucket,
Key=os.path.basename(filename),
Filename=filename
)
else:
print('File does not exist in the S3 bucket. Could not download.')
return False
return True
# Alias
CloudStorage = AWS_S3_CloudStorage