$ pip install click requests
$ python query_export.py --redash-url "https://app.redash.io/<slug>" --api-key "<api-key>"
import click | |
import requests | |
template = u"""/* | |
Name: {name} | |
Data source: {data_source} | |
Created By: {created_by} | |
Last Update At: {last_updated_at} | |
*/ | |
{query}""" | |
def get_queries(url, api_key): | |
queries = [] | |
headers = {'Authorization': 'Key {}'.format(api_key)} | |
path = "{}/api/queries".format(url) | |
has_more = True | |
page = 1 | |
while has_more: | |
response = requests.get(path, headers=headers, params={'page': page}).json() | |
queries.extend(response['results']) | |
has_more = page * response['page_size'] + 1 <= response['count'] | |
page += 1 | |
return queries | |
def save_queries(queries): | |
for query in queries: | |
filename = 'query_{}.sql'.format(query['id']) | |
with open(filename, 'w') as f: | |
content = template.format(name=query['name'], | |
data_source=query['data_source_id'], | |
created_by=query['user']['name'], | |
last_updated_at=query['updated_at'], | |
query=query['query']) | |
f.write(content.encode('utf-8')) | |
@click.command() | |
@click.option('--redash-url') | |
@click.option('--api-key', help="API Key") | |
def main(redash_url, api_key): | |
queries = get_queries(redash_url, api_key) | |
save_queries(queries) | |
if __name__ == '__main__': | |
main() |
Awesome!
👍
Thanks for this gist.
Is there a way to make an import to Redash?
@ekruten something like that would do the trick. Diclaimer: (I have not validated if this code clean-up/reset other query parameters such as schedules).
import click
import requests
import json
import os
def save_queries(url, api_key):
headers = {'Authorization': 'Key {}'.format(api_key), 'Content-Type': 'application/json'}
files = [f for f in os.listdir('.') if os.path.isfile(f)]
for f in files:
if f.startswith('query_') and f.endswith('.sql'):
start = f.index('_') + 1
end = f.index('.')
query_id = f[start:end]
path = "{}/api/queries/{}".format(url, query_id)
query_str = get_query_str(f)
payload = {'query': query_str, 'id': int(query_id)}
# TODO validate query is correct ?
response = requests.post(path, headers=headers, data=json.dumps(payload))
# TODO print response
def get_query_str(filename):
query = ''
with open(filename, 'r') as f:
lines = f.readlines()
for i in range(7, len(lines)):
query += lines[i]
# TODO validate with redash query_hash?
return query
@click.command()
@click.option('--redash-url')
@click.option('--api-key', help="API Key")
def main(redash_url, api_key):
save_queries(redash_url, api_key)
if __name__ == '__main__':
main()
These are great, thank you @arikfr and @chemipot! The above import script did not reset any of my query parameters like draft status or schedule.
A very minor warning - the export script adds a new line to the end of each query, so if you use it it conjunction with the import script (which grabs everything) you'll end up with an additional extra line each time you run them.
One simple solution might be to change line 11 from:
{query}
"""
to {query}"""
@glunn thanks, updated!
By the way, How to import my queries?@arikfr
The import only seems to work when another query exists with the same number.
I.e. if I'm importing query #1 to a clean redash, it will fail with 404.
If I create a query first, it now being query #1, the import will overwrite it.
Is there a workaround? e.g. an API to create empty queries prior to import?
Thanks for this gist.
another approach: https://github.com/mozilla/stmocli
Is there a way to export dashboards?
Also, Please let me know how do I import queries to a Redash server? Is there a way to do it?
I had to export redash queries in Ruby, so I made a gem like this tool.
https://github.com/oieioi/redash_exporter
Hi, I'm getting an error while using a url like "https://redash.{company-name}.com/queries/4274".
The error is ValueError: No JSON object could be decoded.
@arikfr, Could you explain in what format the url should be?
Two things from my side:
f.write('utf-8')
is necessary. I also added the f.close()
after.import click
import requests
template = u"""/*
Name: {name}
Data source: {data_source}
Created By: {created_by}
Last Update At: {last_updated_at}
*/
{query}"""
def get_queries(url, api_key):
queries = []
headers = {'Authorization': 'Key {}'.format(api_key)}
path = "{}/api/queries".format(url)
has_more = True
page = 1
while has_more:
response = requests.get(path, headers=headers, params={'page': page}).json()
queries.extend(response['results'])
has_more = page * response['page_size'] + 1 <= response['count']
page += 1
return queries
def save_queries(queries):
for query in queries:
filename = 'query_{}.sql'.format(query['id'])
with open(filename, 'w') as f:
content = template.format(name=query['name'],
data_source=query['data_source_id'],
created_by=query['user']['name'],
last_updated_at=query['updated_at'],
query=query['query'])
f.write('utf-8')
f.close()
print("Successfully downloaded all your Redash Queries")
@click.command()
@click.option('--redash-url')
@click.option('--api-key', help="API Key")
def main(redash_url, api_key):
queries = get_queries(redash_url, api_key)
save_queries(queries)
if __name__ == '__main__':
main()
I added a nice little bash-script (i.e. Redash_Backup.command), which can be called from outside your local git-repo to execute and upload your redash queries to either gitlab / github in one-go. Enjoy!
#!/bin/bash
pwd
echo
cd /Users/Code/gitlab/redash_backup
git fetch --prune
git status
pwd
d=$(date +%d%m%y_%H%M)
mkdir "$d"
echo
chmod +x query_export.py
./venv/bin/python query_export.py --redash-url "https://app.redash.io/<value>" --api-key "<value>"
mv query_*.sql "$d"
echo
git add .
git commit -m "$d"
git push origin master
git status
@arikfr is it possible to update the query (the query itself, not the result) using the API? If not, is it "safe" to update the database directly?
The code in this repo didn't work for me, but @dirkbosman's edit did thank you!
@dotanrs I was tweaking the import script and discovered that you can provide 'data_source_id': 1
as an additional attribute inside POST /api/queries
request body to create new query objects. Also added existing query name to make identifying import queries easier via regex.
import click
import requests
import json
import os
import re
def save_queries(url, api_key):
headers = {'Authorization': 'Key {}'.format(api_key), 'Content-Type': 'application/json'}
files = [f for f in os.listdir('.') if os.path.isfile(f)]
for f in files:
if f.startswith('query_') and f.endswith('.sql'):
start = f.index('_') + 1
end = f.index('.')
query_id = f[start:end]
path = "{}/api/queries".format(url)
query_headers = get_headers(f)
query_name = re.search("Name: (.+)", query_headers).group(1)
print(query_name)
query_str = get_query_str(f)
payload = {'query': query_str, 'data_source_id': 1, 'name': query_name}
print(payload)
response = requests.post(path, headers=headers, data=json.dumps(payload))
print(response.content)
def get_query_str(filename):
query = ''
with open(filename, 'r') as f:
lines = f.readlines()
for i in range(7, len(lines)):
query += lines[i]
return query
def get_headers(filename):
query = ''
with open(filename, 'r') as f:
lines = f.readlines()
for i in range(1, 7):
query += lines[i]
return query
@click.command()
@click.option('--redash-url')
@click.option('--api-key')
def main(redash_url, api_key):
save_queries(redash_url, api_key)
if __name__ == '__main__':
main()
❤️
Thanks for sharing!
really cool and very useful!