diff options
-rwxr-xr-x | help-to-wiki.py | 18 | ||||
-rwxr-xr-x | to-wiki/getalltitles.py | 26 | ||||
-rwxr-xr-x | to-wiki/wikiconv2.py | 15 |
3 files changed, 28 insertions, 31 deletions
diff --git a/help-to-wiki.py b/help-to-wiki.py index 19d58ed56e..448a1dac06 100755 --- a/help-to-wiki.py +++ b/help-to-wiki.py @@ -11,9 +11,7 @@ import sys, os, getopt, signal sys.path.append(sys.path[0]+"/to-wiki") import wikiconv2 - -# FIXME do proper modules from getalltitles & wikiconv2 -# [so far this is in fact just a shell thing] +import getalltitles def usage(): print ''' @@ -24,6 +22,7 @@ Converts .xhp files into a wiki -h, --help - this help -n, --no-translations - generate only English pages -r, --redirects - generate also redirect pages +-t, --title-savefile - save the title file Most probably, you want to generate the redirects only once when you initially populate the wiki, and then only update the ones that broke.\n''' @@ -68,12 +67,13 @@ langs = ['', 'ast', 'bg', 'bn', 'bn-IN', 'ca', 'cs', 'da', 'de', \ # Argument handling try: - opts, args = getopt.getopt(sys.argv[1:], 'hnr', ['help', 'no-translations', 'redirects']) + opts, args = getopt.getopt(sys.argv[1:], 'hnrt', ['help', 'no-translations', 'redirects', 'title-savefile']) except getopt.GetoptError: usage() sys.exit(1) generate_redirects = False +title_savefile = False for opt, arg in opts: if opt in ('-h', '--help'): usage() @@ -82,6 +82,8 @@ for opt, arg in opts: langs = [''] elif opt in ('-r', '--redirects'): generate_redirects = True + elif opt in ('-t', '--title-savefile'): + title_savefile = True def signal_handler(signal, frame): sys.stderr.write( 'Exiting...\n' ) @@ -93,7 +95,11 @@ signal.signal(signal.SIGINT, signal_handler) create_wiki_dirs() print "Generating the titles..." -os.system( "python to-wiki/getalltitles.py source/text > alltitles.csv" ) +title_data = getalltitles.gettitles("source/text") +if title_savefile: + with open ('alltitles.csv', 'w') as f: + for d in title_data: + f.write('%s;%s;%s\n' % (d[0], d[1], d[2])) try: po_path = args[0] @@ -103,6 +109,6 @@ except: # do the work for lang in langs: - wikiconv2.convert(generate_redirects, lang, '%s/%s/helpcontent2/source'% (po_path, lang)) + wikiconv2.convert(title_data, generate_redirects, lang, '%s/%s/helpcontent2/source'% (po_path, lang)) # vim:set shiftwidth=4 softtabstop=4 expandtab: diff --git a/to-wiki/getalltitles.py b/to-wiki/getalltitles.py index 8db9bcb457..71f5aed325 100755 --- a/to-wiki/getalltitles.py +++ b/to-wiki/getalltitles.py @@ -137,18 +137,18 @@ def parsexhp(filename): title = title.strip('_') title = make_unique(title) alltitles.append(title) - print filename + ';' + title + ';' + readable_title - -if len(sys.argv) < 2: - print "getalltitles.py <directory>" - print "e.g. getalltitles.py source/text/scalc" - sys.exit(1) - -pattern = "xhp" - -for root, dirs, files in os.walk(sys.argv[1]): - for i in files: - if i.find(pattern) >= 0: - parsexhp(root+"/"+i) + return((filename, title, readable_title)) + +# Main Function +def gettitles(path): + pattern = "xhp" + alltitles = [] + for root, dirs, files in os.walk(path): + for i in files: + if i.find(pattern) >= 0: + t = parsexhp(root+"/"+i) + if t is not None: + alltitles.append(t) + return alltitles # vim:set shiftwidth=4 softtabstop=4 expandtab: diff --git a/to-wiki/wikiconv2.py b/to-wiki/wikiconv2.py index f6569b85e2..b239419293 100755 --- a/to-wiki/wikiconv2.py +++ b/to-wiki/wikiconv2.py @@ -1371,15 +1371,6 @@ class XhpParser(ParserBase): ParserBase.__init__(self, filename, follow_embed, embedding_app, current_app, wiki_page_name, lang, XhpFile(), buf.encode('utf-8')) -def loadallfiles(filename): - global titles - titles = [] - file = codecs.open(filename, "r", "utf-8") - for line in file: - title = line.split(";", 2) - titles.append(title) - file.close() - class WikiConverter(Thread): def __init__(self, inputfile, wiki_page_name, lang, outputfile): Thread.__init__(self) @@ -1441,19 +1432,19 @@ def write_redirects(): write_link(r, target) # Main Function -def convert(generate_redirects, lang, po_root): +def convert(title_data, generate_redirects, lang, po_root): if lang == '': print 'Generating the main wiki pages...' else: print 'Generating the wiki pages for language %s...'% lang + global titles + titles = [t for t in title_data] global redirects redirects = [] global images images = set() - loadallfiles("alltitles.csv") - if lang != '': sys.stderr.write('Using localizations from "%s"\n'% po_root) if not load_localization_data(po_root): |