Merge branch 'i18n' into singularity
This commit is contained in:
0
scripts/__init__.py
Normal file
0
scripts/__init__.py
Normal file
13
scripts/compile_lang.py
Normal file
13
scripts/compile_lang.py
Normal file
@@ -0,0 +1,13 @@
|
||||
import os, glob
|
||||
import msgfmt
|
||||
|
||||
import sys
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
locale_path = os.path.abspath(os.path.join(script_dir, '..', 'locale'))
|
||||
|
||||
for name in glob.iglob(locale_path + '/**'):
|
||||
if not os.path.isfile(name):
|
||||
path = os.path.join(locale_path, name, 'LC_MESSAGES', 'lang')
|
||||
sys.argv[1:] = [path + '.po']
|
||||
msgfmt.main()
|
||||
35
scripts/dump_crowdin_progress.py
Normal file
35
scripts/dump_crowdin_progress.py
Normal file
@@ -0,0 +1,35 @@
|
||||
import requests
|
||||
import os
|
||||
import json
|
||||
import wx
|
||||
import sys
|
||||
|
||||
key = os.environ.get("CROWDIN_API_KEY", None)
|
||||
|
||||
if key is None:
|
||||
# if building from a forked PR, this is normal. Secret veariables are generally unavailable in those circumstances
|
||||
print("CROWDIN_API_KEY env variable not found, cannot fetch translation status.")
|
||||
sys.exit()
|
||||
|
||||
params = {
|
||||
'json': '',
|
||||
'key': key
|
||||
}
|
||||
|
||||
resp = requests.get('https://api.crowdin.com/api/project/pyfa/status', params=params)
|
||||
data = resp.json()
|
||||
|
||||
for x in data:
|
||||
x['code'] = x['code'].replace('-', '_')
|
||||
lang = wx.Locale.FindLanguageInfo(x['code'])
|
||||
if lang is None:
|
||||
print('Cannot find a match for '+x['code'])
|
||||
continue
|
||||
x['canonical_name'] = lang.CanonicalName
|
||||
|
||||
data = {x['canonical_name']: x for x in data}
|
||||
|
||||
with open("locale/progress.json", 'w') as file:
|
||||
file.seek(0)
|
||||
file.truncate()
|
||||
json.dump(data, file)
|
||||
@@ -9,74 +9,141 @@ diff which can then be used to assist in the updating.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import re
|
||||
import natsort
|
||||
from collections import OrderedDict
|
||||
|
||||
# Phobos location
|
||||
phb_path = os.path.expanduser("path/to/phobos")
|
||||
from itertools import izip_longest
|
||||
|
||||
try:
|
||||
major = sys.version_info.major
|
||||
minor = sys.version_info.minor
|
||||
except AttributeError:
|
||||
major = sys.version_info[0]
|
||||
minor = sys.version_info[1]
|
||||
if major != 2 or minor < 7:
|
||||
sys.stderr.write('This application requires Python 2.7 to run, but {0}.{1} was used\n'.format(major, minor))
|
||||
sys.exit()
|
||||
|
||||
import argparse
|
||||
import os.path
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("-e", "--eve", dest="eve_path", help="Location of EVE directory", required=True)
|
||||
parser.add_argument("-c", "--cache", dest="cache_path", help="Location of EVE cache directory. If not specified, an attempt will be make to automatically determine path.")
|
||||
parser.add_argument("-r", "--res", dest="res_path", help="Location of EVE shared resource cache. If not specified, an attempt will be make to automatically determine path.")
|
||||
parser.add_argument("-d", "--dump", dest="dump_path", help="Location of Phobos JSON dump directory", required=True)
|
||||
parser.add_argument("-p", "--phobos", dest="phb_path", help="Location of Phobos, defaults to path noted in script", default=phb_path)
|
||||
parser.add_argument("-s", "--singularity", action="store_true", help="Singularity build")
|
||||
parser = argparse.ArgumentParser(description='This script extracts data from EVE client and writes it into JSON files')
|
||||
parser.add_argument(
|
||||
'-e', '--eve', required=True, help='Path to EVE client\'s folder')
|
||||
parser.add_argument(
|
||||
'-p', '--phobos', required=True, help="Location of Phobos")
|
||||
parser.add_argument(
|
||||
'-s', '--server', default='tq', help='Server to pull data from. Default is "tq"',
|
||||
choices=('tq', 'sisi', 'duality', 'thunderdome', 'serenity'))
|
||||
parser.add_argument(
|
||||
'-j', '--json', required=True, help='Output folder for the JSON files')
|
||||
parser.add_argument(
|
||||
'-t', '--translate', default='multi',
|
||||
help='Attempt to translate strings into specified language. Default is "multi"',
|
||||
choices=('de', 'en-us', 'es', 'fr', 'it', 'ja', 'ru', 'zh', 'multi'))
|
||||
|
||||
args = parser.parse_args()
|
||||
eve_path = os.path.expanduser(args.eve_path)
|
||||
cache_path = os.path.expanduser(args.cache_path) if args.cache_path else None
|
||||
res_path = os.path.expanduser(args.res_path) if args.res_path else None
|
||||
dump_path = os.path.expanduser(args.dump_path)
|
||||
script_path = os.path.dirname(__file__)
|
||||
|
||||
### Append Phobos to path
|
||||
sys.path.append(os.path.expanduser(args.phb_path))
|
||||
# Expand home directory
|
||||
path_eve = os.path.expanduser(args.eve)
|
||||
path_json = os.path.expanduser(args.json)
|
||||
|
||||
def header(text, subtext=None):
|
||||
print()
|
||||
print("* "*30)
|
||||
print(text.center(60))
|
||||
if subtext:
|
||||
print(subtext.center(60))
|
||||
print("* "*30)
|
||||
print()
|
||||
sys.path.append(os.path.expanduser(args.phobos))
|
||||
|
||||
header("Dumping Phobos Data", dump_path)
|
||||
|
||||
import reverence
|
||||
from flow import FlowManager
|
||||
from miner import *
|
||||
from translator import Translator
|
||||
from writer import *
|
||||
from util import ResourceBrowser, Translator
|
||||
from writer.base import BaseWriter
|
||||
from writer.json_writer import CustomEncoder
|
||||
|
||||
rvr = reverence.blue.EVE(eve_path, cachepath=args.cache_path, sharedcachepath=res_path, server="singularity" if args.singularity else "tranquility")
|
||||
print("EVE Directory: {}".format(rvr.paths.root))
|
||||
print("Cache Directory: {}".format(rvr.paths.cache))
|
||||
print("Shared Resource Directory: {}".format(rvr.paths.sharedcache))
|
||||
|
||||
pickle_miner = ResourcePickleMiner(rvr)
|
||||
trans = Translator(pickle_miner)
|
||||
bulkdata_miner = BulkdataMiner(rvr, trans)
|
||||
staticcache_miner = ResourceStaticCacheMiner(rvr, trans)
|
||||
miners = (
|
||||
MetadataMiner(eve_path),
|
||||
bulkdata_miner,
|
||||
staticcache_miner,
|
||||
TraitMiner(staticcache_miner, bulkdata_miner, trans),
|
||||
SqliteMiner(rvr.paths.root, trans),
|
||||
CachedCallsMiner(rvr, trans),
|
||||
DynamicAttributesMiner(rvr),
|
||||
pickle_miner
|
||||
)
|
||||
class PyfaJsonWriter(BaseWriter):
|
||||
"""
|
||||
Class, which stores fetched data on storage device
|
||||
as JSON files.
|
||||
"""
|
||||
|
||||
writers = (
|
||||
JsonWriter(dump_path, indent=2),
|
||||
)
|
||||
def __init__(self, folder, indent=None, group=None):
|
||||
self.base_folder = folder
|
||||
self.indent = indent
|
||||
self.group = group
|
||||
|
||||
list = "dgmexpressions,dgmattribs,dgmeffects,dgmtypeattribs,dgmtypeeffects,"\
|
||||
"dgmunits,invcategories,invgroups,invmetagroups,invmetatypes,"\
|
||||
"invtypes,mapbulk_marketGroups,phbmetadata,phbtraits,fsdTypeOverrides,"\
|
||||
"evegroups,evetypes,evecategories,mapbulk_marketGroups,clonegrades,dynamicattributes"
|
||||
@staticmethod
|
||||
def __grouper(iterable, n, fillvalue=None):
|
||||
args = [iter(iterable)] * n
|
||||
return izip_longest(fillvalue=fillvalue, *args)
|
||||
|
||||
FlowManager(miners, writers).run(list, "en-us")
|
||||
def write(self, miner_name, container_name, container_data):
|
||||
# Create folder structure to path, if not created yet
|
||||
folder = os.path.join(self.base_folder, self.__secure_name(miner_name))
|
||||
if not os.path.exists(folder):
|
||||
os.makedirs(folder, mode=0o755)
|
||||
|
||||
if type(container_data) == dict:
|
||||
container_data = OrderedDict(natsort.natsorted(container_data.items()))
|
||||
|
||||
if self.group is None:
|
||||
filepath = os.path.join(folder, u'{}.json'.format(self.__secure_name(container_name)))
|
||||
self.__write_file(container_data, filepath)
|
||||
else:
|
||||
for i, group in enumerate(PyfaJsonWriter.__grouper(container_data, self.group)):
|
||||
filepath = os.path.join(folder, u'{}.{}.json'.format(self.__secure_name(container_name), i))
|
||||
if type(container_data) in (dict, OrderedDict):
|
||||
data = dict((k, container_data[k]) for k in group if k is not None)
|
||||
else:
|
||||
data = [k for k in group if k is not None]
|
||||
self.__write_file(data, filepath)
|
||||
|
||||
def __write_file(self, data, filepath):
|
||||
data_str = json.dumps(
|
||||
data,
|
||||
ensure_ascii=False,
|
||||
cls=CustomEncoder,
|
||||
indent=self.indent,
|
||||
# We're handling sorting in customized encoder
|
||||
sort_keys=False)
|
||||
data_bytes = data_str.encode('utf8')
|
||||
with open(filepath, 'wb') as f:
|
||||
f.write(data_bytes)
|
||||
|
||||
def __secure_name(self, name):
|
||||
"""
|
||||
As we're writing to disk, we should get rid of all
|
||||
filesystem-specific symbols.
|
||||
"""
|
||||
# Prefer safe way - replace any characters besides
|
||||
# alphanumeric and few special characters with
|
||||
# underscore
|
||||
writer_safe_name = re.sub('[^\w\-.,() ]', '_', name, flags=re.UNICODE)
|
||||
return writer_safe_name
|
||||
|
||||
|
||||
path_eve=path_eve
|
||||
server_alias=args.server
|
||||
language=args.translate
|
||||
path_json=path_json
|
||||
|
||||
resource_browser = ResourceBrowser(eve_path=path_eve, server_alias=server_alias)
|
||||
|
||||
pickle_miner = PickleMiner(resbrowser=resource_browser)
|
||||
trans = Translator(pickle_miner=pickle_miner)
|
||||
fsdlite_miner = FsdLiteMiner(resbrowser=resource_browser, translator=trans)
|
||||
fsdbinary_miner = FsdBinaryMiner(resbrowser=resource_browser, translator=trans)
|
||||
miners = [
|
||||
MetadataMiner(resbrowser=resource_browser),
|
||||
fsdlite_miner,
|
||||
fsdbinary_miner,
|
||||
TraitMiner(fsdlite_miner=fsdlite_miner, fsdbinary_miner=fsdbinary_miner, translator=trans),
|
||||
pickle_miner]
|
||||
|
||||
writers = [
|
||||
PyfaJsonWriter(path_json, indent=2, group=5000)]
|
||||
|
||||
filters = 'dogmaattributes,dogmaeffects,dogmaunits,dynamicitemattributes,marketgroups,metagroups,' \
|
||||
'typedogma,requiredskillsfortypes,clonegrades,dbuffcollections,evecategories,evegroups,' \
|
||||
'evetypes,traits,metadata'
|
||||
|
||||
FlowManager(miners, writers).run(filter_string=filters, language=language)
|
||||
|
||||
@@ -8,6 +8,10 @@ import yaml
|
||||
import subprocess
|
||||
import os
|
||||
|
||||
def rreplace(s, old, new, occurrence):
|
||||
li = s.rsplit(old, occurrence)
|
||||
return new.join(li)
|
||||
|
||||
|
||||
with open("version.yml", 'r+') as file:
|
||||
data = yaml.load(file, Loader=yaml.SafeLoader)
|
||||
@@ -17,6 +21,7 @@ with open("version.yml", 'r+') as file:
|
||||
# python's versioning spec doesn't handle the same format git describe outputs, so convert it.
|
||||
label = os.environ["PYFA_VERSION"].split('-') if "PYFA_VERSION" in os.environ else subprocess.check_output(["git", "describe", "--tags"]).strip().decode().split('-')
|
||||
label = '-'.join(label[:-2])+'+'+'-'.join(label[-2:]) if len(label) > 1 else label[0]
|
||||
label = rreplace(label, '+', '-', label.count('+') - 1)
|
||||
print(label)
|
||||
data['version'] = label
|
||||
yaml.dump(data, file, default_flow_style=False)
|
||||
|
||||
231
scripts/msgfmt.py
Normal file
231
scripts/msgfmt.py
Normal file
@@ -0,0 +1,231 @@
|
||||
#! /usr/bin/env python3
|
||||
# Written by Martin v. Löwis <loewis@informatik.hu-berlin.de>
|
||||
|
||||
"""Generate binary message catalog from textual translation description.
|
||||
|
||||
This program converts a textual Uniforum-style message catalog (.po file) into
|
||||
a binary GNU catalog (.mo file). This is essentially the same function as the
|
||||
GNU msgfmt program, however, it is a simpler implementation.
|
||||
|
||||
Usage: msgfmt.py [OPTIONS] filename.po
|
||||
|
||||
Options:
|
||||
-o file
|
||||
--output-file=file
|
||||
Specify the output file to write to. If omitted, output will go to a
|
||||
file named filename.mo (based off the input file name).
|
||||
|
||||
-h
|
||||
--help
|
||||
Print this message and exit.
|
||||
|
||||
-V
|
||||
--version
|
||||
Display version information and exit.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import ast
|
||||
import getopt
|
||||
import struct
|
||||
import array
|
||||
from email.parser import HeaderParser
|
||||
|
||||
__version__ = "1.1"
|
||||
|
||||
MESSAGES = {}
|
||||
|
||||
|
||||
def usage(code, msg=''):
|
||||
print(__doc__, file=sys.stderr)
|
||||
if msg:
|
||||
print(msg, file=sys.stderr)
|
||||
sys.exit(code)
|
||||
|
||||
|
||||
def add(id, str, fuzzy):
|
||||
"Add a non-fuzzy translation to the dictionary."
|
||||
global MESSAGES
|
||||
if not fuzzy and str:
|
||||
MESSAGES[id] = str
|
||||
|
||||
|
||||
def generate():
|
||||
"Return the generated output."
|
||||
global MESSAGES
|
||||
# the keys are sorted in the .mo file
|
||||
keys = sorted(MESSAGES.keys())
|
||||
offsets = []
|
||||
ids = strs = b''
|
||||
for id in keys:
|
||||
# For each string, we need size and file offset. Each string is NUL
|
||||
# terminated; the NUL does not count into the size.
|
||||
offsets.append((len(ids), len(id), len(strs), len(MESSAGES[id])))
|
||||
ids += id + b'\0'
|
||||
strs += MESSAGES[id] + b'\0'
|
||||
output = ''
|
||||
# The header is 7 32-bit unsigned integers. We don't use hash tables, so
|
||||
# the keys start right after the index tables.
|
||||
# translated string.
|
||||
keystart = 7*4+16*len(keys)
|
||||
# and the values start after the keys
|
||||
valuestart = keystart + len(ids)
|
||||
koffsets = []
|
||||
voffsets = []
|
||||
# The string table first has the list of keys, then the list of values.
|
||||
# Each entry has first the size of the string, then the file offset.
|
||||
for o1, l1, o2, l2 in offsets:
|
||||
koffsets += [l1, o1+keystart]
|
||||
voffsets += [l2, o2+valuestart]
|
||||
offsets = koffsets + voffsets
|
||||
output = struct.pack("Iiiiiii",
|
||||
0x950412de, # Magic
|
||||
0, # Version
|
||||
len(keys), # # of entries
|
||||
7*4, # start of key index
|
||||
7*4+len(keys)*8, # start of value index
|
||||
0, 0) # size and offset of hash table
|
||||
output += array.array("i", offsets).tostring()
|
||||
output += ids
|
||||
output += strs
|
||||
return output
|
||||
|
||||
|
||||
def make(filename, outfile):
|
||||
ID = 1
|
||||
STR = 2
|
||||
|
||||
# Compute .mo name from .po name and arguments
|
||||
if filename.endswith('.po'):
|
||||
infile = filename
|
||||
else:
|
||||
infile = filename + '.po'
|
||||
if outfile is None:
|
||||
outfile = os.path.splitext(infile)[0] + '.mo'
|
||||
|
||||
try:
|
||||
lines = open(infile, 'rb').readlines()
|
||||
except IOError as msg:
|
||||
print(msg, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
section = None
|
||||
fuzzy = 0
|
||||
|
||||
# Start off assuming Latin-1, so everything decodes without failure,
|
||||
# until we know the exact encoding
|
||||
encoding = 'latin-1'
|
||||
|
||||
# Parse the catalog
|
||||
lno = 0
|
||||
for l in lines:
|
||||
l = l.decode(encoding)
|
||||
lno += 1
|
||||
# If we get a comment line after a msgstr, this is a new entry
|
||||
if l[0] == '#' and section == STR:
|
||||
add(msgid, msgstr, fuzzy)
|
||||
section = None
|
||||
fuzzy = 0
|
||||
# Record a fuzzy mark
|
||||
if l[:2] == '#,' and 'fuzzy' in l:
|
||||
fuzzy = 1
|
||||
# Skip comments
|
||||
if l[0] == '#':
|
||||
continue
|
||||
# Now we are in a msgid section, output previous section
|
||||
if l.startswith('msgid') and not l.startswith('msgid_plural'):
|
||||
if section == STR:
|
||||
add(msgid, msgstr, fuzzy)
|
||||
if not msgid:
|
||||
# See whether there is an encoding declaration
|
||||
p = HeaderParser()
|
||||
charset = p.parsestr(msgstr.decode(encoding)).get_content_charset()
|
||||
if charset:
|
||||
encoding = charset
|
||||
section = ID
|
||||
l = l[5:]
|
||||
msgid = msgstr = b''
|
||||
is_plural = False
|
||||
# This is a message with plural forms
|
||||
elif l.startswith('msgid_plural'):
|
||||
if section != ID:
|
||||
print('msgid_plural not preceded by msgid on %s:%d' % (infile, lno),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
l = l[12:]
|
||||
msgid += b'\0' # separator of singular and plural
|
||||
is_plural = True
|
||||
# Now we are in a msgstr section
|
||||
elif l.startswith('msgstr'):
|
||||
section = STR
|
||||
if l.startswith('msgstr['):
|
||||
if not is_plural:
|
||||
print('plural without msgid_plural on %s:%d' % (infile, lno),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
l = l.split(']', 1)[1]
|
||||
if msgstr:
|
||||
msgstr += b'\0' # Separator of the various plural forms
|
||||
else:
|
||||
if is_plural:
|
||||
print('indexed msgstr required for plural on %s:%d' % (infile, lno),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
l = l[6:]
|
||||
# Skip empty lines
|
||||
l = l.strip()
|
||||
if not l:
|
||||
continue
|
||||
l = ast.literal_eval(l)
|
||||
if section == ID:
|
||||
msgid += l.encode(encoding)
|
||||
elif section == STR:
|
||||
msgstr += l.encode(encoding)
|
||||
else:
|
||||
print('Syntax error on %s:%d' % (infile, lno), \
|
||||
'before:', file=sys.stderr)
|
||||
print(l, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
# Add last entry
|
||||
if section == STR:
|
||||
add(msgid, msgstr, fuzzy)
|
||||
|
||||
# Compute output
|
||||
output = generate()
|
||||
|
||||
try:
|
||||
open(outfile,"wb").write(output)
|
||||
except IOError as msg:
|
||||
print(msg, file=sys.stderr)
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], 'hVo:',
|
||||
['help', 'version', 'output-file='])
|
||||
except getopt.error as msg:
|
||||
usage(1, msg)
|
||||
|
||||
outfile = None
|
||||
# parse options
|
||||
for opt, arg in opts:
|
||||
if opt in ('-h', '--help'):
|
||||
usage(0)
|
||||
elif opt in ('-V', '--version'):
|
||||
print("msgfmt.py", __version__)
|
||||
sys.exit(0)
|
||||
elif opt in ('-o', '--output-file'):
|
||||
outfile = arg
|
||||
# do it
|
||||
if not args:
|
||||
print('No input file given', file=sys.stderr)
|
||||
print("Try `msgfmt --help' for more information.", file=sys.stderr)
|
||||
return
|
||||
|
||||
for filename in args:
|
||||
make(filename, outfile)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
3
scripts/osx-translations.sh
Normal file
3
scripts/osx-translations.sh
Normal file
@@ -0,0 +1,3 @@
|
||||
#!/usr/bin/env bash
|
||||
brew link --force gettext
|
||||
find locale/ -type f -name "*.po" -exec msgen "{}" -o "{}" \;
|
||||
Reference in New Issue
Block a user