Skip to content

Instantly share code, notes, and snippets.

@weaming
Last active August 7, 2023 09:51
Show Gist options
  • Save weaming/32b7b62956b304ff66fbae396ca9c86b to your computer and use it in GitHub Desktop.
Save weaming/32b7b62956b304ff66fbae396ca9c86b to your computer and use it in GitHub Desktop.
Convert boostnote cson format data to markdown
#!/usr/bin/env python3
# coding: utf-8
"""
Author : weaming
Created Time : 2018-05-26 21:32:59
Prerequisite:
python3 -m pip install cson arrow
"""
import json
import os
import sys
import datetime
import cson
try:
import arrow
time_aware = True
except ImportError:
print(
'warning: datetime information will be discarded unless you install arrow'
)
time_aware = False
def read_file(fp):
with open(fp) as f:
return f.read()
def text_to_dict(text):
"""convert json or cson to dict"""
try:
return cson.loads(text)
except:
pass
try:
return json.loads(text)
except:
pass
raise Exception("text should be in cson or json format")
def read_folder_names(fp):
data = text_to_dict(read_file(fp))
return {x['key']: x['name'] for x in data['folders']}
def write_boostnote_markdown(data, output, folder_map):
"""write boostnote dict to markdown"""
target_dir = os.path.join(output, folder_map[data['folder']])
if not os.path.exists(target_dir):
os.makedirs(target_dir)
target_file = os.path.join(target_dir, '{}.md'.format(data['title'].replace('/', '-')))
with open(target_file, 'w') as f:
f.write(data.get('content', ''))
print(target_file)
if time_aware:
update_at = arrow.get(data['updatedAt'])
update_at_epoch = int(update_at.timestamp())
os.utime(target_file, (update_at_epoch, update_at_epoch))
stat = os.stat(target_file)
def process_file(source, output, folder_map):
data = text_to_dict(read_file(source))
write_boostnote_markdown(data, output, folder_map)
def main(boostnote_dir, output):
"""
:input: input folder path
:output: output folder path
"""
folder_map = read_folder_names(os.path.join(boostnote_dir, 'boostnote.json'))
notes_dir = os.path.join(boostnote_dir, 'notes')
for name in os.listdir(notes_dir):
if not name.endswith('.cson'):
continue
source = os.path.join(notes_dir, name)
process_file(source, output, folder_map)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(
description="convert boostnote cson format data to markdown")
parser.add_argument(
'-s',
'--source',
type=str,
help="directory store the cson files",
default=".")
parser.add_argument(
'-o', '--output', type=str, help="output directory", default="output")
args = parser.parse_args()
main(args.source, args.output)
@weaming
Copy link
Author

weaming commented Nov 28, 2019

Line 56 f.write(data['content'])
would be better to change to f.write(data.get('content', ''))` to avoid error when there are empty notes.

@noklam May "content" not exist?

I created new one empty note, and the content is

createdAt: "2018-12-25T12:26:26.948Z"
updatedAt: "2018-12-25T12:26:26.948Z"
type: "MARKDOWN_NOTE"
folder: "7487fac9e197583f5a35"
title: ""
tags: []
content: ""
isStarred: false
isTrashed: false

@noklam
Copy link

noklam commented Nov 28, 2019

@weaming that's the problem, if content does not exist it will cause error. By using get it will get empty string instead of error.

@mbrausen
Copy link

@weaming THANK YOU! That saved me a lot of time :-D

I noticed the tags are not preserved. Therefore I added a line above line 56:
f.write(f"Tags: {', '.join(data.get('tags', ''))}\n")
Inserting the tags comma separated at the top of each note/file.

@mbrausen
Copy link

One more thing: When I discovered that all my snippet notes were empty after converting, I added code to not only export markdown notes but also snippet notes, putting the snippets sequentially under each other.

with open(target_file, 'w', encoding='utf-8') as f:
        if data["type"] == 'MARKDOWN_NOTE':
            f.write(f"Tags: {', '.join(data.get('tags', ''))}\n")
            f.write(data.get('content', ''))
        elif data["type"] == 'SNIPPET_NOTE':
            f.write(f"Tags: {', '.join(data.get('tags', ''))}\n")
            snippets = data.get('snippets', '')
            if snippets != '':
                for snippet in snippets:
                    if not snippet['name']:
                        f.write(data['title'])
                    else:
                        f.write(f"File/Description: {snippet['name']}\n")
                        try:
                            f.write(f'Language: {snippet["mode"]}\n')
                        except KeyError:
                            pass
                        f.write('Code:\n```')
                        try:
                            f.write(f'{str(snippet["mode"]).lower()}\n')
                        except KeyError:
                            f.write('\n')
                        f.write(snippet['content'])
                        f.write('\n```\n')
                        if len(snippets) > 1:
                            f.write('\n***\n\n')
            else:
                f.write('')
        else:
            sys.exit(1, 'Unknown note type - do not know how to handle, exiting!')
        print(target_file)

@weaming
Copy link
Author

weaming commented Jan 13, 2023

@mbrausen Thank you for sharing! But I will not update this gist, because I had stopped using boostnote and have no testing for your code.

@rordaz
Copy link

rordaz commented Jan 16, 2023

Thank you for making this!, this was an excellent resource to convert and organize boostnote notes to Obsidian.

@boonyasukd
Copy link

The time_aware logic from L59-L63 doesn't work correctly for me. Unless your local time is Zulu time (GMT+0), all timestamps of exported files will be off.

To export files with correct lastModified stat, I changed the logic as follows:

   if time_aware:
        update_at = arrow.get(data['updatedAt'])
        timestamp = update_at.timestamp()
        os.utime(target_file, (timestamp, timestamp))
        stat = os.stat(target_file)

@weaming
Copy link
Author

weaming commented Aug 7, 2023

@boonyasukd Updated as your suggestion. Thks!

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment