Skip to content

Instantly share code, notes, and snippets.

@ArchTaqi
Last active March 31, 2016 13:35
Show Gist options
  • Save ArchTaqi/3a35ef19c6d095243aa97edd813a315e to your computer and use it in GitHub Desktop.
Save ArchTaqi/3a35ef19c6d095243aa97edd813a315e to your computer and use it in GitHub Desktop.
READ Binary Images FROM MSSQL Server and Store on FileSystem Using Pillow and Upload to S3
__author__ = 'M. Taqi'
__copyright__ = 'Copyright 2015-16, Cogilent '
__license__ = 'MIT'
__email__ = 'taqi.official@gmail.com'
import base64
import requests
import sys, os, re, string, pprint
import psycopg2
import pymssql
import logging
import datetime
import time
from PIL import Image
import PIL.Image
import cStringIO
import boto
from boto.s3.key import Key
class DbMigration:
def __init__(self):
self.__mssql_script_encoding = config.get("database_script_encoding", "utf8")
self.__mssql_encoding = config.get("database_encoding", "utf8")
self.__mssql_host = config.get("database_host")
self.__mssql_port = config.get("database_port", 1433)
self.__mssql_user = config.get("database_user")
self.__mssql_passwd = config.get("database_pass")
self.__mssql_db = config.get("database_name")
self.pgsql_name = 'pncrms'
self.pgsql_user = 'mtaqi'
self.pgsql_password = 'nopass'
self.pgsql_host = '127.0.0.1'
self.pgsql_port = 5432
self.pgsql_conn = ''
self.__AWS_ACCESS_KEY = config.get("aws_key")
self.__AWS_ACCESS_SECRET_KEY = config.get("aws_secret_key")
self.__bucket = config.get("aws_bucket_name")
self.__mssql_connect()
self.__pgsql_connect()
def __mssql_connect(self):
""" connect to mssql server on ms azure."""
try:
self.mssql_conn = pymssql.connect(server=self.__mssql_host, user=self.__mssql_user, password=self.__mssql_passwd,
database=self.__mssql_db)
except Exception as e:
raise Exception("could not connect to mssql database: %s" % e)
finally:
if self.mssql_conn:
print 'Established connection with MSSQL database'
else:
print 'no'
def __pgsql_connect(self):
""" connect to postgres database."""
try:
self.pgsql_conn = psycopg2.connect(database=self.pgsql_name, user=self.pgsql_user, password=self.pgsql_password, host=self.pgsql_host, port=self.pgsql_port)
except Exception as e:
raise Exception("could not connect to PGSQL database: %s" % e)
finally:
if self.pgsql_conn:
print 'Established connection with PGSQL database Successfully'
else:
print 'Could Not Established connection with PGSQL database'
def shift_applicant_images(self):
rows = None
try:
cursor = self.mssql_conn.cursor()
cursor2 = self.pgsql_conn.cursor()
cursor.execute('SELECT [ID], [Full_Name],'
'[Picture],[Signature] FROM '
'Personal_Information ORDER BY ID ASC;')
rows = cursor.fetchall()
except Exception as e:
print e
finally:
if not (rows is None):
path = "/home/muhammadtaqi/Pictures/"
for row in rows:
if not (row[3] is None):
print type(row[3])
try:
image=cStringIO.StringIO(row[3])
img=PIL.Image.open(image)
memory_file = path+str(row[0])+'.png'
img.save(memory_file, format='PNG')
try:
# set boto lib debug to critical
logging.getLogger('boto').setLevel(logging.CRITICAL)
bucket_name = self.__bucket
# connect to the bucket
conn = boto.connect_s3(self.__AWS_ACCESS_KEY, self.__AWS_ACCESS_SECRET_KEY)
bucket = conn.get_bucket(bucket_name, validate=False)
# go through each version of the file
key = '%s.png' % id
fn = '/home/muhammadtaqi/Pictures/%s.png' % id
# create a key to keep track of our file in the storage
k = Key(bucket)
k.key = key
k.set_contents_from_filename(fn)
# we need to make it public so it can be accessed publicly
# using a URL like http://s3.amazonaws.com/bucket_name/key
k.make_public()
# remove the file from the web server
os.remove(fn)
except Exception as e:
print e
except IOError as e:
print "cannot create thumbnail for"
print e
if __name__ == "__main__":
obj_db_migration = DbMigration()
obj_db_migration.shift_applicant_images()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment