Skip to content

Instantly share code, notes, and snippets.

@vbe0201
Last active June 2, 2019 20:07
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save vbe0201/a862664f04fda014758146e0f230df0c to your computer and use it in GitHub Desktop.
Save vbe0201/a862664f04fda014758146e0f230df0c to your computer and use it in GitHub Desktop.
A proof of concept to dumping the contents of mails sent from Gmail's new Confidential Mode
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This is a proof of concept to dumping the contents of mails
sent from Gmail's new Confidential Mode.
The entire "security" can be defeated by obtaining the cookies
for such a confidential mail. Use the network tab of your browser.
Intercept network calls while opening such a mail in confidential
mode. See the email and refresh the page. Find the call you made
for refresh (make sure to have preserve logs enabled) and get the
string that starts with "Cookie:". Strip that away and paste the contents
into the value for the Cookie header.
That step is only required once. After it is running, this works
for any emails.
And that is everything you need to dump the contents of such
mails from a corresponding URL. The rest is just some parsing
by RegEx.
Google sucks and they obviously don't know how to implement good DRMs.
Don't use such emails to send sensitive data.
- Valentin B.
"""
import argparse
import logging
import os
import re
import sys
import unicodedata
import requests
logger = logging.getLogger(__name__)
headers = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/68.0',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Cookie': 'Cookies here',
}
def main(parser, args):
# Download mail contents.
logger.info('Performing request to URL %s with headers %s', args.url, headers)
response = requests.get(args.url, headers=headers)
if response.status_code != 200:
logger.error('Failed to download the contents of the URL.')
sys.exit(1)
path = os.path.dirname(args.output)
# Some RegEx parsing to extract important parts of the mail.
subject = re.findall(r'aria-label=\"Email\ssubject:\s([^\"]+)\">', response.text)[0]
logger.info('Gotcha! Subject of the mail is "%s"', subject)
content = unicodedata.normalize(
'NFKD',
re.findall(r'data:function\(\){return\s\[\[\[[^\"]*\"([^\]]+)', response.text)[0]
).encode('ascii', 'replace')
attachments = re.findall(
r'\[[^,]*,\"([^\"]*)\",[^,]*,[^,]*,[^,]*,[^,]*,\"(https://lh3\.googleusercontent\.com/\w*)',
response.text
)
with open(args.output, 'w') as f:
f.write(content)
logger.info('Contents of the email were written to file %s', args.output)
# If there are attachments, download and save them to the
# directory of the output file for the mail content.
logger.error('%d attachments to download.', len(attachments))
for attachment in attachments:
file_name = os.path.join(path, attachment[0])
result = requests.get(attachment[1], allow_redirects=True)
if result.status_code != 200:
logger.error('Failed to download attachment.')
sys.exit(1)
with open(file_name, 'wb') as f:
f.write(result.content)
logger.info('Attachment %s was saved to %s', attachment[0], file_name)
def _print_help(parser, _):
parser.print_help(sys.stderr)
def _add_dump_args(subparser):
parser = subparser.add_parser(
'dump',
help='Utility for dumping emails sent'
'from Gmail Confidential Mode.'
)
parser.set_defaults(func=main)
parser.add_argument(
'url',
type=str,
help='The URL to the mail.'
)
parser.add_argument(
'output',
type=str,
help='The output file for the contents.'
)
def _parse_args():
parser = argparse.ArgumentParser(
prog='gcm-dumper',
description='Dump emails from Gmail'
'Confidential Mode.'
)
parser.set_defaults(func=_print_help)
command_parser = parser.add_subparsers(
dest='subcommands',
title='Subcommands'
)
_add_dump_args(command_parser)
return parser, parser.parse_args()
if __name__ == '__main__':
parser, args = _parse_args()
args.func(parser, args)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment