Skip to content

Instantly share code, notes, and snippets.

@nijel
Last active November 1, 2023 09:10
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save nijel/976ec8c56bae9ebeee7a to your computer and use it in GitHub Desktop.
Save nijel/976ec8c56bae9ebeee7a to your computer and use it in GitHub Desktop.
Weblate snippets
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright © 2015 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Simple script to import Gettext PO file content into Android resource strings
Usage: android-import-po /path/to/android/resources *.po
It generates Android resource strings containing all translated units in the PO
files. The output path has to contain values/strings.xml which defines IDs for
the Android strings. For each PO file, corresponding values-*/strings.xml is
generated.
"""
from __future__ import print_function
import os
import sys
from argparse import ArgumentParser
from translate.storage.aresource import AndroidResourceFile, AndroidResourceUnit
from translate.storage.po import pofile
def create_mapping(filename):
"""Creates source string : unit mapping"""
storage = AndroidResourceFile.parsefile(filename)
result = {}
for unit in storage.units:
if unit.istranslatable() and unit.istranslated():
result[unit.target] = unit
return result
def process_file(args, mapping, filename):
"""Process signle po file"""
# Load PO file
storage = pofile.parsefile(filename)
# Extract language
lang = storage.gettargetlanguage()
if lang is None:
lang = 'XX'
# Target path
lang_path = os.path.join(
args.target,
'values-{0}'.format(lang.replace('_', '-r')),
'strings.xml'
)
# Create target directory if it does not exist
dirname = os.path.dirname(lang_path)
if not os.path.exists(dirname):
os.mkdir(dirname)
# Open or create output storage
if os.path.exists(lang_path):
outstorage = AndroidResourceFile.parsefile(lang_path)
else:
outstorage = AndroidResourceFile()
# Iterate over translated units
for unit in storage.units:
if unit.istranslatable() and unit.istranslated():
# Is source known?
if unit.source not in mapping:
continue
templateunit = mapping[unit.source]
# Find unit in output
outunit = outstorage.findid(templateunit.getid())
# Create output unit if it does not exist
if outunit is None:
outunit = AndroidResourceUnit(templateunit.getid())
outstorage.addunit(outunit, new=True)
# Set translation
outunit.settarget(unit.target)
# Store the file
with open(lang_path, 'w') as handle:
outstorage.savefile(handle)
def main():
parser = ArgumentParser(
description='Gettext to Android translations migration',
epilog='This utility is developed by Weblate <http://weblate.org/> developers'
)
parser.add_argument(
'target',
help='target directory for Android resources (must contain values/strings.xml)'
)
parser.add_argument(
'pofile',
nargs='+',
help='Gettext PO file to import'
)
args = parser.parse_args(sys.argv[1:])
english_path = os.path.join(
args.target, 'values', 'strings.xml'
)
if not os.path.isfile(english_path):
print('Target path does not exist or does not contain values/strings.xml!')
sys.exit(1)
mapping = create_mapping(english_path)
for filename in args.pofile:
process_file(args, mapping, filename)
if __name__ == "__main__":
main()
./manage.py import_project debian-handbook weblate://debian-handbook/12_advanced-administration jessie/master '*/**.po' --license='GPL-2.0+ or CC-BY-SA-3.0' --license-url='https://spdx.org/licenses/GPL-2.0+' --file-format=po
#!/bin/sh
# Steps needed to install Weblate systemwide on Debian server
# Server
apt install nginx uwsgi uwsgi-plugin-python3 mariadb-server redis-server redis-tools
# Certbot
# Needs backports repo!
apt install -t stretch-backports certbot python-certbot-nginx
# Deps
apt install libxml2-dev libxslt-dev libfreetype6-dev libjpeg-dev libz-dev libyaml-dev python3-dev tesseract-ocr libtesseract-dev libleptonica-dev cython libmariadb-dev-compat libmariadb-dev python3-hiredis
# Weblate
pip3 install Weblate
# Optional deps
pip3 install tesserocr PyYAML python-bidi phply pyuca mysqlclient django-redis
# Obtain SSL cert
certbot --nginx -d weblate.example.com
# Data dir for Weblate
mkdir /var/lib/weblate
chown www-data:www-data /var/lib/weblate
# Change shell for www-data user
chsh --shell /bin/sh
# Manual steps follow
exit
# Create mysql db and user
mysql <<EOT
GRANT ALL PRIVILEGES ON weblate.* TO 'weblate'@'localhost' IDENTIFIED BY 'password';
CREATE DATABASE weblate CHARACTER SET utf8mb4;
EOT
# Edit nginx settings
vi /etc/nginx/sites-available/default
# Create and enable uwsgi settings
vi /etc/uwsgi/apps-available/weblate.ini
ln -s /etc/uwsgi/apps-available/weblate.ini /etc/uwsgi/apps-enabled
# Create and edit weblate settings
mkdir /etc/weblate
cp /usr/local/lib/python3.5/dist-packages/weblate/settings_example.py /etc/weblate/settings.py
ln -s /etc/weblate/settings.py /usr/local/lib/python3.5/dist-packages/weblate/settings.py
vi /etc/weblate/settings.py
# Run Weblate migrations
su - www-data -c '/usr/local/bin/weblate migrate'
# Collect static files
su - www-data -c '/usr/local/bin/weblate collectstatic'
# Change site domain
su - www-data -c '/usr/local/bin/weblate changesite --set-name weblate.example.com'
# Create admin user
su - www-data -c '/usr/local/bin/weblate createadmin --username nijel --email michal@cihar.com --name "Michal Čihař"'
# Start services
systemctl restart nginx uwsgi
# Celery settings
vi /etc/default/celery-weblate /etc/systemd/system/celery-weblate.service
# Check status
su - www-data -c '/usr/local/bin/weblate check --deploy'
# Sentry integration
pip3 install raven
vi /etc/weblate/settings.py
#!/usr/bin/env python
GEOCOUNTRY = '/home/nijel/GeoLite2-Country_20190409/GeoLite2-Country.mmdb'
import geoip2.database
from weblate.accounts.models import AuditLog
reader = geoip2.database.Reader(GEOCOUNTRY)
addresses = AuditLog.objects.values_list('address', flat=True).distinct()
countries = set((reader.country(address).iso_code for address in addresses))
# This GitHub action updates the translation (msgmrge) based on the changed source language file
#
# It requires GitHub secret WEBLATE_TOKEN to be set with API token to access Weblate
name: Update Weblate translations
on:
push:
branches:
- master
paths:
- "addons/resource.language.en_gb/resources/strings.po"
jobs:
weblate:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2.3.2
with:
fetch-depth: 0
- name: Update Weblate
env:
TOKEN: "${{ secrets.WEBLATE_TOKEN }}"
run: |
curl -H "Authorization: Token $TOKEN" \
--form file=@addons/resource.language.en_gb/resources/strings.po \
--form method=source \
http://example.com/api/translations/hello/android/en/file/
#!/usr/bin/env python
import sys
from dateutil import parser
def load_data(filename:str):
with open(filename, "r") as handle:
for line in handle.readlines():
timestamp, data = line.split(None, 1)
if "..." not in data:
continue
yield parser.parse(timestamp), data.strip()
first = list(load_data(sys.argv[1]))
second = list(load_data(sys.argv[2]))
base_1 = base_2 = None
for i, line in enumerate(first):
timestamp_1, data_1 = line
timestamp_2, data_2 = second[i]
if base_1 is not None:
diff_1 = timestamp_1 - base_1
diff_2 = timestamp_2 - base_2
print("{:0.1f}".format(diff_2.total_seconds() - diff_1.total_seconds()), data_1)
base_1 = timestamp_1
base_2 = timestamp_2
./manage.py import_project --main-component pluginbot --license MIT --license-url https://spdx.org/licenses/MIT --file-format ruby-yaml mumble-ruby-pluginbot https://github.com/MusicGenerator/mumble-ruby-pluginbot.git devel i18n/*/**.yml
from weblate.trans.models import Project, SubProject
project = Project.objects.get(slug='noosfero')
# File generated by:
# find plugins/ -name *.po | sed 's@.*/@@; s@\.po@@' | sort -u > /tmp/plugins
plugins = [x.strip() for x in open('/tmp/plugins').readlines()]
for p in plugins:
name = 'Plugin: {0}'.format(p.replace('_', ' ').capitalize())
slug = 'plugin-{0}'.format(p.replace('_', '-'))
if project.subproject_set.filter(slug=slug).exists():
continue
SubProject.objects.create(name=name, slug=slug, project=project, repo='weblate://noosfero/noosfero', filemask='plugins/{0}/po/*/{0}.po'.format(p), file_format='po', license='AGPL-3.0', license_url='https://spdx.org/licenses/AGPL-3.0')
-- Fixup for PostgreSQL indexes done during 4.0 development
DROP INDEX unit_source_fulltext;
DROP INDEX unit_target_fulltext;
DROP INDEX unit_context_fulltext;
DROP INDEX unit_note_fulltext;
DROP INDEX unit_location_fulltext;
DROP INDEX suggestion_target_fulltext;
DROP INDEX comment_comment_fulltext;
DROP INDEX dictionary_source_fulltext;
CREATE INDEX unit_source_fulltext ON trans_unit USING GIN (source gin_trgm_ops);
CREATE INDEX unit_target_fulltext ON trans_unit USING GIN (target gin_trgm_ops);
CREATE INDEX unit_context_fulltext ON trans_unit USING GIN (context gin_trgm_ops);
CREATE INDEX unit_note_fulltext ON trans_unit USING GIN (note gin_trgm_ops);
CREATE INDEX unit_location_fulltext ON trans_unit USING GIN (location gin_trgm_ops);
CREATE INDEX suggestion_target_fulltext ON trans_suggestion USING GIN (target gin_trgm_ops);
CREATE INDEX comment_comment_fulltext ON trans_comment USING GIN (comment gin_trgm_ops);
CREATE INDEX dictionary_source_fulltext ON trans_dictionary USING GIN (source gin_trgm_ops);
DROP INDEX unit_source_index;
DROP INDEX unit_context_index;
"""
Script to remove duplicate translations (with duplicate checksum) in Weblate
"""
from weblate.trans.models import Translation, Unit
from django.db.models import Count
def duplicates(translation):
return Unit.objects.filter(translation=translation).values('checksum').annotate(Count('id')).order_by().filter(id__count__gt=1)
def remove_duplicates(translation):
for dupe in duplicates(translation):
units = Unit.objects.filter(translation=translation, checksum=dupe['checksum'])
for unit in units:
if unit.change_set.count() == 0:
unit.delete()
break
print 'FAILED', unit
for translation in Translation.objects.all():
remove_duplicates(translation)
#!/usr/bin/env python
import mechanize
import copy
URL = 'https://demo.weblate.org/'
USER = 'demo'
PASS = 'demo'
def print_session_id(data=None):
if data is not None:
if '/accounts/login/' in data:
print '- not authenticated'
else:
print '- authenticated'
for cookie in br._ua_handlers["_cookies"].cookiejar:
if cookie.name == 'sessionid':
print '- sessionid = {0}'.format(cookie.value)
return
print '- sessionid not set'
def home():
print 'HOMEPAGE', URL
response = br.open(URL)
data = response.read()
print_session_id(data)
def login(restore=False):
url = URL + "accounts/login/"
print 'LOGIN', url
br.open(URL + "accounts/login/")
br.select_form(nr=0)
br['username'] = USER
br['password'] = PASS
if restore:
br._ua_handlers["_cookies"].cookiejar = backup
response = br.submit()
data = response.read()
print_session_id(data)
def logout():
br.select_form(nr=1)
br.form.action += 'accounts/logout/'
print 'LOGOUT', br.form.action
response = br.submit()
data = response.read()
print_session_id(data)
# Browser
br = mechanize.Browser()
br.set_handle_robots(False)
home()
login()
# Backup cookies
jar = br._ua_handlers["_cookies"].cookiejar
backup = copy.deepcopy(jar)
logout()
home()
# Restore cookies
#print 'COOKIE RESTORE'
#br._ua_handlers["_cookies"].cookiejar = backup
#print_session_id()
# Open main page
#home()
login(True)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment