summaryrefslogtreecommitdiff
path: root/to-wiki
diff options
context:
space:
mode:
Diffstat (limited to 'to-wiki')
-rwxr-xr-xto-wiki/getalltitles.py26
-rwxr-xr-xto-wiki/wikiconv2.py15
2 files changed, 16 insertions, 25 deletions
diff --git a/to-wiki/getalltitles.py b/to-wiki/getalltitles.py
index 8db9bcb457..71f5aed325 100755
--- a/to-wiki/getalltitles.py
+++ b/to-wiki/getalltitles.py
@@ -137,18 +137,18 @@ def parsexhp(filename):
title = title.strip('_')
title = make_unique(title)
alltitles.append(title)
- print filename + ';' + title + ';' + readable_title
-
-if len(sys.argv) < 2:
- print "getalltitles.py <directory>"
- print "e.g. getalltitles.py source/text/scalc"
- sys.exit(1)
-
-pattern = "xhp"
-
-for root, dirs, files in os.walk(sys.argv[1]):
- for i in files:
- if i.find(pattern) >= 0:
- parsexhp(root+"/"+i)
+ return((filename, title, readable_title))
+
+# Main Function
+def gettitles(path):
+ pattern = "xhp"
+ alltitles = []
+ for root, dirs, files in os.walk(path):
+ for i in files:
+ if i.find(pattern) >= 0:
+ t = parsexhp(root+"/"+i)
+ if t is not None:
+ alltitles.append(t)
+ return alltitles
# vim:set shiftwidth=4 softtabstop=4 expandtab:
diff --git a/to-wiki/wikiconv2.py b/to-wiki/wikiconv2.py
index f6569b85e2..b239419293 100755
--- a/to-wiki/wikiconv2.py
+++ b/to-wiki/wikiconv2.py
@@ -1371,15 +1371,6 @@ class XhpParser(ParserBase):
ParserBase.__init__(self, filename, follow_embed, embedding_app,
current_app, wiki_page_name, lang, XhpFile(), buf.encode('utf-8'))
-def loadallfiles(filename):
- global titles
- titles = []
- file = codecs.open(filename, "r", "utf-8")
- for line in file:
- title = line.split(";", 2)
- titles.append(title)
- file.close()
-
class WikiConverter(Thread):
def __init__(self, inputfile, wiki_page_name, lang, outputfile):
Thread.__init__(self)
@@ -1441,19 +1432,19 @@ def write_redirects():
write_link(r, target)
# Main Function
-def convert(generate_redirects, lang, po_root):
+def convert(title_data, generate_redirects, lang, po_root):
if lang == '':
print 'Generating the main wiki pages...'
else:
print 'Generating the wiki pages for language %s...'% lang
+ global titles
+ titles = [t for t in title_data]
global redirects
redirects = []
global images
images = set()
- loadallfiles("alltitles.csv")
-
if lang != '':
sys.stderr.write('Using localizations from "%s"\n'% po_root)
if not load_localization_data(po_root):