forked from HariSekhon/DevOps-Python-tools
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain.py
More file actions
executable file
·105 lines (85 loc) · 3.1 KB
/
main.py
File metadata and controls
executable file
·105 lines (85 loc) · 3.1 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
#!/usr/bin/env python
# coding=utf-8
# vim:ts=4:sts=4:sw=4:et
#
# Author: Hari Sekhon
# Date: 2020-10-14 15:29:38 +0100 (Wed, 14 Oct 2020)
#
# https://github.com/HariSekhon/pytools
#
# License: see accompanying Hari Sekhon LICENSE file
#
# If you're using my code you're welcome to connect with me on LinkedIn
# and optionally send me feedback to help steer this or other code I publish
#
# https://www.linkedin.com/in/HariSekhon
#
"""
GCP Cloud Function to export a given Cloud SQL database to GCS via PubSub notifications from Cloud Scheduler
Solution Documentation:
https://cloud.google.com/solutions/scheduling-cloud-sql-database-exports-using-cloud-scheduler
GCP Cloud PubSub should be sent payloads like this by Cloud Scheduler (replacing the env vars with your literals):
{
"project": "${GOOGLE_PROJECT_ID}",
"instance": "${SQL_INSTANCE_HOST}",
"database": "${DATABASE}",
"bucket": "${BUCKET_NAME}"
}
Tested on GCP Cloud Functions with Python 3.7
"""
# https://cloud.google.com/functions/docs/writing/specifying-dependencies-python
# Code below is based on solution sample code from the link above
#import os
import base64
import logging
import json
from datetime import datetime
from httplib2 import Http
from googleapiclient import discovery
from googleapiclient.errors import HttpError
from oauth2client.client import GoogleCredentials
# pylint: disable=unused-argument
def main(event, context):
# if os.getenv("DEBUG"):
# # debug level logs don't appear in function details logs tab even with
# # DEBUG=1 runtime env var set and logs severity filter set to >= DEBUG
# #logging.debug('event: %s', event)
# #logging.debug('context: %s', context)
# logging.info('event: %s', event)
# logging.info('context: %s', context)
data = json.loads(base64.b64decode(event['data']).decode('utf-8'))
credentials = GoogleCredentials.get_application_default()
service = discovery.build('sqladmin', 'v1beta4', http=credentials.authorize(Http()), cache_discovery=False)
project = data['project']
bucket = data['bucket']
bucket = bucket.lstrip('gs://')
instance = data['instance']
database = data['database']
timestamp = datetime.now().strftime("%Y-%m-%d_%H%M")
# .gz extension so it is auto-compressed, saving storage space + billing
backup_uri = "gs://{bucket}/backups/sql/{instance}--{database}--{timestamp}.sql.gz".format(
bucket=bucket,
instance=instance,
database=database,
timestamp=timestamp)
instances_export_request_body = {
"exportContext": {
"kind": "sql#exportContext",
"fileType": "SQL",
"uri": backup_uri,
"databases": [
database
]
}
}
try:
request = service.instances().export(
project=project,
instance=instance,
body=instances_export_request_body
)
response = request.execute()
except HttpError as err:
logging.error("Backup FAILED. Reason: %s", err)
else:
logging.info("Backup task status: %s", response)