-
Notifications
You must be signed in to change notification settings - Fork 21
Expand file tree
/
Copy pathlambdaHasher.py
More file actions
45 lines (36 loc) · 1.23 KB
/
lambdaHasher.py
File metadata and controls
45 lines (36 loc) · 1.23 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import boto3
from hashlib import sha256
from datetime import datetime
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
s3_cli = boto3.client('s3')
ddb_res = boto3.resource('dynamodb', region_name='us-east-1')
def init(event,context):
logger.info(event)
bucket = event['Records'][0]['s3']['bucket']['name']
key = event['Records'][0]['s3']['object']['key']
hashedVal = hashfile(key,bucket,s3_cli)
response = putItem(ddb_res,hashedVal,key)
logger.info(response)
def hashfile(key,bucket,clientS3):
response = clientS3.get_object(Bucket=bucket, Key=key)
binary_data = response['Body'].read()
hashed_value = sha256(binary_data).hexdigest()
return hashed_value
def genDateString():
now = datetime.now()
dateString = now.strftime("%b-%d-%Y %H:%M:%S")
return dateString
def putItem(resourceDDB,hashVal,filename,table='Metadata'):
response = resourceDDB.Table(table).put_item(
Item={
'md5':hashVal,
'filename':filename,
'timestamp': genDateString()
}
)
return response
#AWS Lambda Practice Exercises: S3 & DynamoDB - lambda function for hashing uploaded files and writing data to DDB
#Elliott Arnold 7-13-19
#si3mshady