diff options
author | Norbert Thiebaud <nthiebaud@gmail.com> | 2012-09-01 09:50:26 -0500 |
---|---|---|
committer | Norbert Thiebaud <nthiebaud@gmail.com> | 2012-10-16 11:09:27 -0500 |
commit | a4473e06b56bfe35187e302754f6baaa8d75e54f (patch) | |
tree | fd17c2dc5dbf56469de2eaa851eda4087f385313 /en/pythonpath | |
parent | 0493c1b142b0c498931e8ff5d6460ef852026d20 (diff) |
move dictionaries structure one directory up
Change-Id: I70388bf6b95d8692cc6f25fc5a9c7baf3a675710
Diffstat (limited to 'en/pythonpath')
-rw-r--r-- | en/pythonpath/lightproof_en.py | 3 | ||||
-rw-r--r-- | en/pythonpath/lightproof_handler_en.py | 119 | ||||
-rw-r--r-- | en/pythonpath/lightproof_impl_en.py | 336 | ||||
-rw-r--r-- | en/pythonpath/lightproof_opts_en.py | 4 |
4 files changed, 462 insertions, 0 deletions
diff --git a/en/pythonpath/lightproof_en.py b/en/pythonpath/lightproof_en.py new file mode 100644 index 0000000..3108489 --- /dev/null +++ b/en/pythonpath/lightproof_en.py @@ -0,0 +1,3 @@ +# -*- encoding: UTF-8 -*- +dic = [[u'(?u)(?<![-\\w\u2013.,\xad])and and(?![-\\w\u2013\xad])', u'and', u'Did you mean:', False, 0], [u'(?u)(?<![-\\w\u2013.,\xad])or or(?![-\\w\u2013\xad])', u'or', u'Did you mean:', False, 0], [u'(?u)(?<![-\\w\u2013.,\xad])for for(?![-\\w\u2013\xad])', u'for', u'Did you mean:', False, 0], [u'(?u)(?<![-\\w\u2013.,\xad])the the(?![-\\w\u2013\xad])', u'the', u'Did you mean:', False, 0], [u'(?iu)(?<![-\\w\u2013.,\xad])[Yy][Ii][Nn][Gg] [Aa][Nn][Dd] [Yy][Aa][Nn][Gg](?![-\\w\u2013\xad])', u'yin and yang', u'Did you mean:', False, 0], [u'(?iu)(?<![-\\w\u2013.,\xad])[Ss][Cc][Oo][Tt] [Ff][Rr][Ee][Ee](?![-\\w\u2013\xad])', u'scot-free\\nscotfree', u'Did you mean:', False, 0], [u"(?iu)(?<![-\\w\u2013.,\xad])([Yy][Oo][Uu][Rr]|[Hh][Ee][Rr]|[Oo][Uu][Rr]|[Tt][Hh][Ee][Ii][Rr])['\u2019][Ss](?![-\\w\u2013\xad])", u'\\1s', u'Possessive pronoun: \\n http://en.wikipedia.org/wiki/Possessive_pronoun', False, 0], [u'(?u)(?<![-\\w\u2013.,\xad])(?P<a_1>[Aa])n(?P<_>[ ][\'\u2018"\u201c]?)(?P<vow_1>[aeiouAEIOU]\\w*)(?P<etc_1>[-\u2013\'\u2019\\w]*)(?![-\\w\u2013\xad])', u'\\g<a_1>\\g<_>\\g<vow_1>\\g<etc_1>', u'Did you mean: \\n http://en.wikipedia.org/wiki/English_articles#Distinction_between_a_and_an', u'm.group("vow_1") in aA or m.group("vow_1").lower() in aA', 0], [u'(?u)(?<![-\\w\u2013.,\xad])a(?P<_>[ ][\'\u2018"\u201c]?)(?P<vow_1>[aeiouAEIOU]\\w*)(?P<etc_1>[-\u2013\'\u2019\\w]*)(?![-\\w\u2013\xad])', u'an\\g<_>\\g<vow_1>\\g<etc_1>', u'Bad article? \\n http://en.wikipedia.org/wiki/English_articles#Distinction_between_a_and_an', u'(m.group("vow_1") <> m.group("vow_1").upper()) and not (m.group("vow_1") in aA or m.group("vow_1").lower() in aA) and spell(LOCALE,m.group("vow_1"))', 0], [u'(?u)(?<![-\\w\u2013.,\xad])a(?P<_>[ ][\'\u2018"\u201c]?)(?P<con_1>[bcdfghj-np-tv-zBCDFGHJ-NP-TV-Z]\\w*)(?P<etc_1>[-\u2013\'\u2019\\w]*)(?![-\\w\u2013\xad])', u'an\\g<_>\\g<con_1>\\g<etc_1>', u'Did you mean: \\n http://en.wikipedia.org/wiki/English_articles#Distinction_between_a_and_an', u'm.group("con_1") in aAN or m.group("con_1").lower() in aAN', 0], [u'(?u)(?<![-\\w\u2013.,\xad])(?P<a_1>[Aa])n(?P<_>[ ][\'\u2018"\u201c]?)(?P<con_1>[bcdfghj-np-tv-zBCDFGHJ-NP-TV-Z]\\w*)(?P<etc_1>[-\u2013\'\u2019\\w]*)(?![-\\w\u2013\xad])', u'\\g<a_1>\\g<_>\\g<con_1>\\g<etc_1>', u'Bad article? \\n http://en.wikipedia.org/wiki/English_articles#Distinction_between_a_and_an', u'(m.group("con_1") <> m.group("con_1").upper()) and not (m.group("con_1") in aA or m.group("con_1").lower() in aAN) and not m.group("con_1") in aB and spell(LOCALE,m.group("con_1"))', 0], [u'(?u)((?<=[!?.] )|^)A(?P<_>[ ][\'\u2018"\u201c]?)(?P<vow_1>[aeiouAEIOU]\\w*)(?P<etc_1>[-\u2013\'\u2019\\w]*)(?![-\\w\u2013\xad])', u'An\\g<_>\\g<vow_1>\\g<etc_1>', u'Bad article? \\n http://en.wikipedia.org/wiki/English_articles#Distinction_between_a_and_an', u'(m.group("vow_1") <> m.group("vow_1").upper()) and not (m.group("vow_1") in aA or m.group("vow_1").lower() in aA) and spell(LOCALE,m.group("vow_1"))', 0], [u'(?u)((?<=[!?.] )|^)A(?P<_>[ ][\'\u2018"\u201c]?)(?P<con_1>[bcdfghj-np-tv-zBCDFGHJ-NP-TV-Z]\\w*)(?P<etc_1>[-\u2013\'\u2019\\w]*)(?![-\\w\u2013\xad])', u'An\\g<_>\\g<con_1>\\g<etc_1>', u'Did you mean: \\n http://en.wikipedia.org/wiki/English_articles#Distinction_between_a_and_an', u'm.group("con_1") in aAN or m.group("con_1").lower() in aAN', 0], [u'(?u)(?<![-\\w\u2013.,\xad])a(?P<_>[ ][\'\u2018"\u201c]?)(?P<nvow_1>(8[0-9]*|1[18](000)*)(th)?)(?P<etc_1>[-\u2013\'\u2019\\w]*)(?![-\\w\u2013\xad])', u'an\\g<_>\\g<nvow_1>\\g<etc_1>', u'Did you mean: \\n http://en.wikipedia.org/wiki/English_articles#Distinction_between_a_and_an', False, 0], [u'(?u)((?<=[!?.] )|^)A(?P<_>[ ][\'\u2018"\u201c]?)(?P<nvow_1>(8[0-9]*|1[18](000)*)(th)?)(?P<etc_1>[-\u2013\'\u2019\\w]*)(?![-\\w\u2013\xad])', u'An\\g<_>\\g<nvow_1>\\g<etc_1>', u'Did you mean: \\n http://en.wikipedia.org/wiki/English_articles#Distinction_between_a_and_an', False, 0], [u'(?u)(?<![-\\w\u2013.,\xad])(?P<a_1>[Aa])n(?P<_>[ ][\'\u2018"\u201c]?)(?P<ncon_1>[0-79][0-9]*)(?P<etc_1>[-\u2013\'\u2019\\w]*)(?![-\\w\u2013\xad])', u'\\g<a_1>\\g<_>\\g<ncon_1>\\g<etc_1>', u'Did you mean: \\n http://en.wikipedia.org/wiki/English_articles#Distinction_between_a_and_an', u'not m.group("ncon_1")[:2] in ["11", "18"]', 0], [u'(?u)(?<![-\\w\u2013.,\xad])(^)(?P<low_1>[a-z]+)(?![-\\w\u2013\xad])', u'= m.group("low_1").capitalize()', u'Missing capitalization?', u'paralcap.search(TEXT) and not abbrev.search(TEXT)', 0], [u'(?u)((?<=[!?.] )|^)(?P<low_1>[a-z]+)(?![-\\w\u2013\xad])', u'= m.group("low_1").capitalize()', u'Missing capitalization?', u'option(LOCALE,"cap") and not abbrev.search(TEXT)', 0], [u'(?u) ([.?!,:;)\u201d\\]])\\b', u'\\1 ', u'Reversed space and punctuation?', False, 0], [u'(?u) +[.]', u'.', u'Extra space before the period?', u'LOCALE.Country == "US"', 0], [u'(?u) +[.]', u'.', u'Extra space before the full stop?', u'LOCALE.Country != "US"', 0], [u'(?u) +([?!,:;)\u201d\\]])', u'\\1', u'= "Extra space before the " + punct[m.group(1)] + "?"', False, 0], [u'(?u)([([\u201c]) ', u'\\1', u'= "Extra space after the " + punct[m.group(1)] + "?"', False, 0], [u'(?u)\\b(---?| --? )\\b', u' \u2013 \\n\u2014', u'En dash or em dash:', u'not option(LOCALE,"ndash") and not option(LOCALE,"mdash")', 0], [u'(?u)\\b(---?| --? |\u2014)\\b', u' \u2013 ', u'En dash:', u'option(LOCALE,"ndash") and not option(LOCALE,"mdash")', 0], [u'(?u)\\b(---?| --? | \u2013 )\\b', u'\u2014', u'Em dash:', u'option(LOCALE,"mdash")', 0], [u'(?u)(?P<number_1>\\d+([.]\\d+)?)(x| x )(?P<number_2>\\d+([.]\\d+)?)', u'\\g<number_1>\xd7\\g<number_2>', u'Multiplication sign. \\n http://en.wikipedia.org/wiki/Multiplication_sign', u'option(LOCALE,"times")', 0], [u'(?u)(?P<Abc_1>[a-zA-Z]+)(?P<pun_1>[?!,:;%\u2030\u2031\u02da\u201c\u201d\u2018])(?P<Abc_2>[a-zA-Z]+)', u'\\g<Abc_1>\\g<pun_1> \\g<Abc_2>', u'Missing space?', False, 0], [u'(?u)(?P<abc_1>[a-z]+)[.](?P<ABC_1>[A-Z]+)', u'\\g<abc_1>. \\g<ABC_1>', u'Missing space?', False, 0], [u'(?u)[)]', u'', u'Extra closing parenthesis?', u'option(LOCALE,"pair") and not "(" in TEXT', 0], [u'(?u)[(]', u'', u'Extra opening parenthesis?', u'option(LOCALE,"pair") and TEXT[-1] in u"?!;:\u201d\u2019" and not ")" in TEXT', 0], [u'(?u)(?<![0-9])\u201d', u'', u'Extra quotation mark?', u'option(LOCALE,"pair") and not u"\u201c" in TEXT', 0], [u'(?u)(?<=[0-9])\u201d', u'\u2033\\n', u'Bad double prime or extra quotation mark?', u'option(LOCALE,"apostrophe") and not u"\u201c" in TEXT', 0], [u'(?u)\u201c', u'', u'Extra quotation mark?', u'option(LOCALE,"pair") and TEXT[-1] in u"?!;:\u201d\u2019" and not u"\u201d" in TEXT', 0], [u'(?u)[.]{3}', u'\u2026', u'Ellipsis.', u'option(LOCALE,"ellipsis")', 0], [u'(?u)\\b {2,3}(\\b|$)', u'\\1 ', u'Extra space.', u'option(LOCALE,"spaces")', 0], [u'(?u)(^|\\b|(?P<pun_1>[?!,:;%\u2030\u2031\u02da\u201c\u201d\u2018])|[.]) {2,3}(\\b|$)', u'\\1 ', u'Extra space.', u'option(LOCALE,"spaces2")', 0], [u'(?u)(^|\\b|(?P<pun_1>[?!,:;%\u2030\u2031\u02da\u201c\u201d\u2018])|[.]) {4,}(\\b|$)', u'\\1 \\n\t', u'Change multiple spaces to a single space or a tabulator:', u'option(LOCALE,"spaces3")', 0], [u'(?iu)[\\"\u201c\u201d\u201f\u201e]((?P<abc_1>[a-zA-Z]+)[^\\"\u201c\u201d\u201f\u201e]*)[\\"\u201c\u201f]', u'\u201c\\1\u201d', u'Quotation marks.', u'option(LOCALE,"quotation")', 0], [u'(?iu)[\\"\u201d\u201f\u201e]((?P<abc_1>[a-zA-Z]+)[^\\"\u201c\u201d\u201f\u201e]*)[\\"\u201c\u201d\u201f]', u'\u201c\\1\u201d', u'Quotation marks.', u'option(LOCALE,"quotation")', 0], [u"(?iu)'(?P<abc_1>[a-zA-Z]+)'", u'\u2018\\g<abc_1>\u2019', u'Quotation marks.', u'option(LOCALE,"apostrophe")', 0], [u'(?iu)[\\"\u201d\u201f\u201e]((?P<abc_1>[a-zA-Z]+)[^\\"\u201c\u201d\u201f\u201e]*)[\\"\u201c\u201d\u201f]', u'\u201c\\1\u201d', u'Quotation marks.', u'option(LOCALE,"apostrophe")', 0], [u"(?iu)(?P<Abc_1>[a-zA-ZA-Z]+)'(?P<w_1>\\w*)", u'\\g<Abc_1>\u2019\\g<w_1>', u'Replace typewriter apostrophe or quotation mark:', u'option(LOCALE,"apostrophe")', 0], [u"(?u)(?<= )'(?P<Abc_1>[a-zA-Z]+)", u'\u2018\\g<Abc_1>\\n\u2019\\g<Abc_1>', u'Replace typewriter quotation mark or apostrophe:', u'option(LOCALE,"apostrophe")', 0], [u"(?u)^'(?P<Abc_1>[a-zA-Z]+)", u'\u2018\\g<Abc_1>\\n\u2019\\g<Abc_1>', u'Replace typewriter quotation mark or apostrophe:', u'option(LOCALE,"apostrophe")', 0], [u'(?u)\\b(?P<d2_1>\\d\\d)(?P<d_1>\\d\\d\\d)\\b', u'\\g<d2_1>,\\g<d_1>\\n\\g<d2_1>\u202f\\g<d_1>', u'Use thousand separator (common or ISO).', u'option(LOCALE,"numsep")', 0], [u'(?u)\\b(?P<D_1>\\d{1,3})(?P<d_1>\\d\\d\\d)(?P<d_2>\\d\\d\\d)\\b', u'\\g<D_1>,\\g<d_1>,\\g<d_2>\\n\\g<D_1>\u202f\\g<d_1>\u202f\\g<d_2>', u'Use thousand separators (common or ISO).', u'option(LOCALE,"numsep")', 0], [u'(?u)\\b(?P<D_1>\\d{1,3})(?P<d_1>\\d\\d\\d)(?P<d_2>\\d\\d\\d)(?P<d_3>\\d\\d\\d)\\b', u'\\g<D_1>,\\g<d_1>,\\g<d_2>,\\g<d_3>\\n\\g<D_1>\u202f\\g<d_1>\u202f\\g<d_2>\u202f\\g<d_3>', u'Use thousand separators (common or ISO).', u'option(LOCALE,"numsep")', 0], [u'(?u)(?<![-\\w\u2013.,\xad])(?P<Abc_1>[a-zA-Z]+) \\1(?![-\\w\u2013\xad])', u'\\g<Abc_1>', u'Word duplication?', u'option(LOCALE,"dup")', 0], [u'(?u)(?<![-\\w\u2013.,\xad])([Tt])his (?P<abc_1>[a-z]+)(?![-\\w\u2013\xad])', u'\\1hese \\g<abc_1>\\n\\1his, \\g<abc_1>', u'Did you mean:', u'option(LOCALE,"grammar") and morph(LOCALE,m.group("abc_1"), "Ns")', 0], [u"(?u)(?<![-\\w\u2013.,\xad])with it['\u2019]s(?![-\\w\u2013\xad])", u'with its\\nwith, it\u2019s', u'Did you mean:', u'option(LOCALE,"grammar")', 0], [u"(?iu)(?<![-\\w\u2013.,\xad])([Ii][Tt]|[Ss]?[Hh][Ee]) [Dd][Oo][Nn]['\u2019][Tt](?![-\\w\u2013\xad])", u'\\1 doesn\u2019t', u'Did you mean:', u'option(LOCALE,"grammar")', 0], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*) (\xb0F|Fahrenheit)(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "F", "C", u" \xb0C", ".", ",")', u'Convert to Celsius:', u'option(LOCALE,"metric")', 0], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*) (\xb0C|Celsius)(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "C", "F", u" \xb0F", ".", ",")', u'Convert to Fahrenheit:', u'option(LOCALE,"nonmetric")', 0], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*(?: 1/2| ?\xbd)?) (ft|foot|feet)(?! [1-9])(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "ft", "cm", " cm", ".", ",") + "\\n" + measurement(m.group(1), "ft", "m", " m", ".", ",")', u'Convert to metric:', u'option(LOCALE,"metric")', 0], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*(?: 1/2| ?\xbd)?) ft[.]? ([0-9]+(?: 1/2| ?\xbd)?) in(?![-\\w\u2013\xad])', u'= measurement(m.group(1) + "*12+" + m.group(2), "in", "cm", " cm", ".", ",") + "\\n" + measurement(m.group(1) + "*12+" + m.group(2), "in", "m", " m", ".", ",")', u'Convert to metric:', u'option(LOCALE,"metric")', 0], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*(?: 1/2| ?\xbd)?) in(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "in", "mm", " mm", ".", ",") + "\\n" + measurement(m.group(1), "in", "cm", " cm", ".", ",") + "\\n" + measurement(m.group(1), "in", "m", " m", ".", ",")', u'Convert to metric:', u'option(LOCALE,"metric")', 0], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*) mm(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "mm", "in", " in", ".", ",")', u'Convert from metric:', u'option(LOCALE,"nonmetric")', 0], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*) cm(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "cm", "in", " in", ".", ",") + "\\n" + measurement(m.group(1), "cm", "ft", " ft", ".", ",")', u'Convert from metric:', u'option(LOCALE,"nonmetric")', 0], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*) (m|meter|metre)(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "m", "in", " in", ".", ",") + "\\n" + measurement(m.group(1), "m", "ft", " ft", ".", ",") + "\\n" + measurement(m.group(1), "m", "mi", " mi", ".", ",")', u'Convert from metric:', u'option(LOCALE,"nonmetric")', 0], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*(?: 1/2| ?\xbd)?) miles?(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "mi", "m", " m", ".", ",") + "\\n" + measurement(m.group(1), "mi", "km", " km", ".", ",")', u'Convert to metric:', u'option(LOCALE,"metric")', 0], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*) km(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "km", "mi", " mi", ".", ",")', u'Convert to miles:', u'option(LOCALE,"nonmetric")', 0], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:,\\d+)?) (yd|yards?)(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "yd", "m", " m", ".", ",")', u'Convert to metric:', u'option(LOCALE,"metric")', 0], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:,\\d+)?) (gal(lons?)?)(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "gal", "l", " l", ".", ",") + "\\n" + measurement(m.group(1), "uk_gal", "l", " l (in UK)", ".", ",")', u'Convert to metric:', u'option(LOCALE,"metric")', 0], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:,\\d+)?) (pint)(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "pt", "dl", " dl", ".", ",") + "\\n" + measurement(m.group(1), "uk_pt", "dl", " dl (in UK)", ".", ",") + "\\n" + measurement(m.group(1), "pt", "l", " l", ".", ",") + "\\n" + measurement(m.group(1), "uk_pt", "l", " l (in UK)", ".", ",")', u'Convert to metric:', u'option(LOCALE,"metric")', 0], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:,\\d+)?) (l|L|litres?|liters?)(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "l", "gal", " gal", ".", ",") + "\\n" + measurement(m.group(1), "l", "gal", " gal (in UK)", ".", ",")', u'Convert to gallons:', u'option(LOCALE,"nonmetric")', 0], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*) lbs?[.]?(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "lbm", "kg", " kg", ".", ",")', u'Convert to metric:', u'option(LOCALE,"metric")', 0], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*) kg[.]?(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "kg", "lbm", " lb", ".", ",")', u'Convert to pounds:', u'option(LOCALE,"nonmetric")', 0], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*) mph(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "mph", "km/h", " km/h", ".", ",")', u'Convert to km/hour:', u'option(LOCALE,"metric")', 0], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*) km/h(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "km/h", "mph", " mph", ".", ",")', u'Convert to miles/hour:', u'option(LOCALE,"nonmetric")', 0]] + diff --git a/en/pythonpath/lightproof_handler_en.py b/en/pythonpath/lightproof_handler_en.py new file mode 100644 index 0000000..f69ccf1 --- /dev/null +++ b/en/pythonpath/lightproof_handler_en.py @@ -0,0 +1,119 @@ +import uno +import unohelper +import lightproof_opts_en +from lightproof_impl_en import pkg + +from com.sun.star.lang import XServiceInfo +from com.sun.star.awt import XContainerWindowEventHandler + +# options +options = {} + +def load(context): + try: + l = LightproofOptionsEventHandler(context) + for i in lightproof_opts_en.lopts: + l.load(i) + except: + pass + +def get_option(page, option): + try: + return options[page + "," + option] + except: + try: + return options[page[:2] + "," + option] + except: + return 0 + +def set_option(page, option, value): + options[page + "," + option] = int(value) + +class LightproofOptionsEventHandler( unohelper.Base, XServiceInfo, XContainerWindowEventHandler ): + def __init__( self, ctx ): + p = uno.createUnoStruct( "com.sun.star.beans.PropertyValue" ) + p.Name = "nodepath" + p.Value = "/org.openoffice.Lightproof_%s/Leaves"%pkg + self.xConfig = ctx.ServiceManager.createInstance( 'com.sun.star.configuration.ConfigurationProvider' ) + self.node = self.xConfig.createInstanceWithArguments( 'com.sun.star.configuration.ConfigurationUpdateAccess', (p, ) ) + self.service = "org.openoffice.comp.pyuno.LightproofOptionsEventHandler." + pkg + self.ImplementationName = self.service + self.services = (self.service, ) + + # XContainerWindowEventHandler + def callHandlerMethod(self, aWindow, aEventObject, sMethod): + if sMethod == "external_event": + return self.handleExternalEvent(aWindow, aEventObject) + + def getSupportedMethodNames(self): + return ("external_event", ) + + def handleExternalEvent(self, aWindow, aEventObject): + sMethod = aEventObject + if sMethod == "ok": + self.saveData(aWindow) + elif sMethod == "back" or sMethod == "initialize": + self.loadData(aWindow) + return True + + def load(self, sWindowName): + child = self.getChild(sWindowName) + for i in lightproof_opts_en.lopts[sWindowName]: + sValue = child.getPropertyValue(i) + if sValue == '': + if i in lightproof_opts_en.lopts_default[sWindowName]: + sValue = 1 + else: + sValue = 0 + set_option(sWindowName, i, sValue) + + def loadData(self, aWindow): + sWindowName = self.getWindowName(aWindow) + if (sWindowName == None): + return + child = self.getChild(sWindowName) + for i in lightproof_opts_en.lopts[sWindowName]: + sValue = child.getPropertyValue(i) + if sValue == '': + if i in lightproof_opts_en.lopts_default[sWindowName]: + sValue = 1 + else: + sValue = 0 + xControl = aWindow.getControl(i) + xControl.State = sValue + set_option(sWindowName, i, sValue) + + def saveData(self, aWindow): + sWindowName = self.getWindowName(aWindow) + if (sWindowName == None): + return + child = self.getChild(sWindowName) + for i in lightproof_opts_en.lopts[sWindowName]: + xControl = aWindow.getControl(i) + sValue = xControl.State + child.setPropertyValue(i, str(sValue)) + set_option(sWindowName, i, sValue) + self.commitChanges() + + def getWindowName(self, aWindow): + sName = aWindow.getModel().Name + if sName in lightproof_opts_en.lopts: + return sName + return None + + # XServiceInfo method implementations + def getImplementationName (self): + return self.ImplementationName + + def supportsService(self, ServiceName): + return (ServiceName in self.services) + + def getSupportedServiceNames (self): + return self.services + + def getChild(self, name): + return self.node.getByName(name) + + def commitChanges(self): + self.node.commitChanges() + return True diff --git a/en/pythonpath/lightproof_impl_en.py b/en/pythonpath/lightproof_impl_en.py new file mode 100644 index 0000000..dd1adb0 --- /dev/null +++ b/en/pythonpath/lightproof_impl_en.py @@ -0,0 +1,336 @@ +# -*- encoding: UTF-8 -*- +import uno, re, sys, os, traceback +from string import join +from com.sun.star.text.TextMarkupType import PROOFREADING +from com.sun.star.beans import PropertyValue + +pkg = "en" +lang = "en" +locales = {'en-GB': ['en', 'GB', ''], 'en-ZW': ['en', 'ZW', ''], 'en-PH': ['en', 'PH', ''], 'en-TT': ['en', 'TT', ''], 'en-BZ': ['en', 'BZ', ''], 'en-NA': ['en', 'NA', ''], 'en-IE': ['en', 'IE', ''], 'en-GH': ['en', 'GH', ''], 'en-US': ['en', 'US', ''], 'en-IN': ['en', 'IN', ''], 'en-BS': ['en', 'BS', ''], 'en-JM': ['en', 'JM', ''], 'en-AU': ['en', 'AU', ''], 'en-NZ': ['en', 'NZ', ''], 'en-ZA': ['en', 'ZA', ''], 'en-CA': ['en', 'CA', '']} +version = "0.4.3" +author = "László Németh" +name = "Lightproof grammar checker (English)" + +import lightproof_handler_en + +# loaded rules (check for Update mechanism of the editor) +try: + langrule +except NameError: + langrule = {} + +# ignored rules +ignore = {} + +# cache for morphogical analyses +analyses = {} +stems = {} +suggestions = {} + +# assign Calc functions +calcfunc = None + +# check settings +def option(lang, opt): + return lightproof_handler_en.get_option(lang.Language + "_" + lang.Country, opt) + +# filtering affix fields (ds, is, ts etc.) +def onlymorph(st): + if st != None: + st = re.sub(r"^.*(st:|po:)", r"\\1", st) # keep last word part + st = re.sub(r"\\b(?=[dit][sp]:)","@", st) # and its affixes + st = re.sub(r"(?<!@)\\b\w\w:\w+","", st).replace('@','').strip() + return st + +# if the pattern matches all analyses of the input word, +# return the last matched substring +def _morph(rLoc, word, pattern, all, onlyaffix): + global analyses + if not word: + return None + if word not in analyses: + x = spellchecker.spell(u"<?xml?><query type='analyze'><word>" + word + "</word></query>", rLoc, ()) + if not x: + return None + t = x.getAlternatives() + if not t: + t = [""] + analyses[word] = t[0].split("</a>")[:-1] + a = analyses[word] + result = None + p = re.compile(pattern) + for i in a: + if onlyaffix: + i = onlymorph(i) + result = p.search(i) + if result: + result = result.group(0) + if not all: + return result + elif all: + return None + return result + +def morph(rLoc, word, pattern, all=True): + return _morph(rLoc, word, pattern, all, False) + +def affix(rLoc, word, pattern, all=True): + return _morph(rLoc, word, pattern, all, True) + +def spell(rLoc, word): + if not word: + return None + return spellchecker.isValid(word, rLoc, ()) + +# get the tuple of the stem of the word or an empty array +def stem(rLoc, word): + global stems + if not word: + return [] + if not word in stems: + x = spellchecker.spell(u"<?xml?><query type='stem'><word>" + word + "</word></query>", rLoc, ()) + if not x: + return [] + t = x.getAlternatives() + if not t: + t = [] + stems[word] = list(t) + return stems[word] + +# get the tuple of the morphological generation of a word or an empty array +def generate(rLoc, word, example): + if not word: + return [] + x = spellchecker.spell(u"<?xml?><query type='generate'><word>" + word + "</word><word>" + example + "</word></query>", rLoc, ()) + if not x: + return [] + t = x.getAlternatives() + if not t: + t = [] + return list(t) + +# get suggestions +def suggest(rLoc, word): + global suggestions + if not word: + return word + if word not in suggestions: + x = spellchecker.spell("_" + word, rLoc, ()) + if not x: + return word + t = x.getAlternatives() + suggestions[word] = join(t, "\\n") + return suggestions[word] + +# get the nth word of the input string or None +def word(s, n): + a = re.match("(?u)( [-.\w%%]+){" + str(n-1) + "}( [-.\w%%]+)", s) + if not a: + return '' + return a.group(2)[1:] + +# get the (-)nth word of the input string or None +def wordmin(s, n): + a = re.search("(?u)([-.\w%%]+ )([-.\w%%]+ ){" + str(n-1) + "}$", s) + if not a: + return '' + return a.group(1)[:-1] + +def calc(funcname, par): + global calcfunc + global SMGR + if calcfunc == None: + calcfunc = SMGR.createInstance( "com.sun.star.sheet.FunctionAccess") + if calcfunc == None: + return None + return calcfunc.callFunction(funcname, par) + +def proofread( nDocId, TEXT, LOCALE, nStartOfSentencePos, nSuggestedSentenceEndPos, rProperties ): + global ignore + aErrs = [] + s = TEXT[nStartOfSentencePos:nSuggestedSentenceEndPos] + for i in get_rule(LOCALE).dic: + # 0: regex, 1: replacement, 2: message, 3: condition, 4: ngroup, (5: oldline), 6: case sensitive ? + if i[0] and not str(i[0]) in ignore: + for m in i[0].finditer(s): + try: + if not i[3] or eval(i[3]): + aErr = uno.createUnoStruct( "com.sun.star.linguistic2.SingleProofreadingError" ) + aErr.nErrorStart = nStartOfSentencePos + m.start(i[4]) # nStartOfSentencePos + aErr.nErrorLength = m.end(i[4]) - m.start(i[4]) + aErr.nErrorType = PROOFREADING + aErr.aRuleIdentifier = str(i[0]) + iscap = (i[-1] and m.group(i[4])[0:1].isupper()) + if i[1][0:1] == "=": + aErr.aSuggestions = tuple(cap(eval(i[1][1:]).replace('|', "\n").split("\n"), iscap, LOCALE)) + elif i[1] == "_": + aErr.aSuggestions = () + else: + aErr.aSuggestions = tuple(cap(m.expand(i[1]).replace('|', "\n").split("\n"), iscap, LOCALE)) + comment = i[2] + if comment[0:1] == "=": + comment = eval(comment[1:]) + else: + comment = m.expand(comment) + aErr.aShortComment = comment.replace('|', '\n').replace('\\n', '\n').split("\n")[0].strip() + aErr.aFullComment = comment.replace('|', '\n').replace('\\n', '\n').split("\n")[-1].strip() + if "://" in aErr.aFullComment: + p = PropertyValue() + p.Name = "FullCommentURL" + p.Value = aErr.aFullComment + aErr.aFullComment = aErr.aShortComment + aErr.aProperties = (p,) + else: + aErr.aProperties = () + aErrs = aErrs + [aErr] + except Exception as e: + if len(i) == 7: + raise Exception(str(e), i[5]) + raise + + return tuple(aErrs) + +def cap(a, iscap, rLoc): + if iscap: + for i in range(0, len(a)): + if a[i][0:1] == "i": + if rLoc.Language == "tr" or rLoc.Language == "az": + a[i] = u"\u0130" + a[i][1:] + elif a[i][1:2] == "j" and rLoc.Language == "nl": + a[i] = "IJ" + a[i][2:] + else: + a[i] = "I" + a[i][1:] + else: + a[i] = a[i].capitalize() + return a + +def compile_rules(dic): + # compile regular expressions + for i in dic: + try: + if re.compile("[(][?]iu[)]").match(i[0]): + i += [True] + i[0] = re.sub("[(][?]iu[)]", "(?u)", i[0]) + else: + i += [False] + i[0] = re.compile(i[0]) + except: + if 'PYUNO_LOGLEVEL' in os.environ: + print("Lightproof: bad regular expression: ", traceback.format_exc()) + i[0] = None + +def get_rule(loc): + try: + return langrule[pkg] + except: + langrule[pkg] = __import__("lightproof_" + pkg) + compile_rules(langrule[pkg].dic) + return langrule[pkg] + +def get_path(): + return os.path.join(os.path.dirname(sys.modules[__name__].__file__), __name__ + ".py") + +# [code] + +# pattern matching for common English abbreviations +abbrev = re.compile("(?i)\\b([a-z]|acct|approx|appt|apr|apt|assoc|asst|aug|ave|avg|co(nt|rp)?|ct|dec|defn|dept|dr|eg|equip|esp|est|etc|excl|ext|feb|fri|ft|govt?|hrs?|ib(id)?|ie|in(c|t)?|jan|jr|jul|lit|ln|mar|max|mi(n|sc)?|mon|Mrs?|mun|natl?|neg?|no(rm|s|v)?|nw|obj|oct|org|orig|pl|pos|prev|proj|psi|qty|rd|rec|rel|reqd?|resp|rev|sat|sci|se(p|pt)?|spec(if)?|sq|sr|st|subj|sun|sw|temp|thurs|tot|tues|univ|var|vs)\\.") + +# pattern for paragraph checking +paralcap = re.compile(u"(?u)^[a-z].*[.?!] [A-Z].*[.?!][)\u201d]?$") + + +punct = { "?": "question mark", "!": "exclamation mark", + ",": "comma", ":": "colon", ";": "semicolon", + "(": "opening parenthesis", ")": "closing parenthesis", + "[": "opening square bracket", "]": "closing square bracket", + u"\u201c": "opening quotation mark", u"\u201d": "closing quotation mark"} + + +aA = set(["eucalypti", "eucalyptus", "Eucharist", "Eucharistic", +"euchre", "euchred", "euchring", "Euclid", "euclidean", "Eudora", +"eugene", "Eugenia", "eugenic", "eugenically", "eugenicist", +"eugenicists", "eugenics", "Eugenio", "eukaryote", "Eula", "eulogies", +"eulogist", "eulogists", "eulogistic", "eulogized", "eulogizer", +"eulogizers", "eulogizing", "eulogy", "eulogies", "Eunice", "eunuch", +"eunuchs", "Euphemia", "euphemism", "euphemisms", "euphemist", +"euphemists", "euphemistic", "euphemistically", "euphonious", +"euphoniously", "euphonium", "euphony", "euphoria", "euphoric", +"Euphrates", "euphuism", "Eurasia", "Eurasian", "Eurasians", "eureka", +"eurekas", "eurhythmic", "eurhythmy", "Euridyce", "Euripides", "euripus", +"Euro", "Eurocentric", "Euroclydon", "Eurocommunism", "Eurocrat", +"eurodollar", "Eurodollar", "Eurodollars", "Euromarket", "Europa", +"Europe", "European", "Europeanisation", "Europeanise", "Europeanised", +"Europeanization", "Europeanize", "Europeanized", "Europeans", "europium", +"Eurovision", "Eustace", "Eustachian", "Eustacia", "euthanasia", +"Ewart", "ewe", "Ewell", "ewer", "ewers", "Ewing", "once", "one", +"oneness", "ones", "oneself", "onetime", "oneway", "oneyear", "u", +"U", "UART", "ubiquitous", "ubiquity", "Udale", "Udall", "UEFA", +"Uganda", "Ugandan", "ugric", "UK", "ukase", "Ukraine", "Ukrainian", +"Ukrainians", "ukulele", "Ula", "ululated", "ululation", "Ulysses", +"UN", "unanimity", "unanimous", "unanimously", "unary", "Unesco", +"UNESCO", "UNHCR", "uni", "unicameral", "unicameralism", "Unicef", +"UNICEF", "unicellular", "Unicode", "unicorn", "unicorns", "unicycle", +"unicyclist", "unicyclists", "unidimensional", "unidirectional", +"unidirectionality", "unifiable", "unification", "unified", "unifier", +"unifilar", "uniform", "uniformally", "uniformed", "uniformer", +"uniforming", "uniformisation", "uniformise", "uniformitarian", +"uniformitarianism", "uniformity", "uniformly", "uniformness", "uniforms", +"unify", "unifying", "unijugate", "unilateral", "unilateralisation", +"unilateralise", "unilateralism", "unilateralist", "unilaterally", +"unilinear", "unilingual", "uniliteral", "uniliteralism", "uniliteralist", +"unimodal", "union", "unionism", "unionist", "unionists", "unionisation", +"unionise", "unionised", "unionising", "unionization", "unionize", +"unionized", "unionizing", "unions", "unipolar", "uniprocessor", +"unique", "uniquely", "uniqueness", "uniquer", "Uniroyal", "unisex", +"unison", "Unisys", "unit", "Unitarian", "Unitarianism", "Unitarians", +"unitary", "unite", "united", "unitedly", "uniter", "unites", "uniting", +"unitize", "unitizing", "unitless", "units", "unity", "univ", "Univac", +"univalent", "univalve", "univariate", "universal", "universalisation", +"universalise", "universalised", "universaliser", "universalisers", +"universalising", "universalism", "universalist", "universalistic", +"universality", "universalisation", "universalization", "universalize", +"universalized", "universalizer", "universalizers", "universalizing", +"universally", "universalness", "universe", "universes", "universities", +"university", "univocal", "Unix", "uracil", "Urals", "uranium", "Uranus", +"uranyl", "urate", "urea", "uremia", "uremic", "ureter", "urethane", +"urethra", "urethral", "urethritis", "Urey", "Uri", "uric", "urinal", +"urinalysis", "urinary", "urinated", "urinating", "urination", "urine", +"urogenital", "urokinase", "urologist", "urologists", "urology", +"Uruguay", "Uruguayan", "Uruguayans", "US", "USA", "usability", +"usable", "usably", "usage", +"usages", "use", "used", "useful", "usefulness", "usefully", "useless", +"uselessly", "uselessness", "Usenet", "user", "users", "uses", "using", +"usual", "usually", "usurer", "usurers", "usuress", "usurial", "usurious", +"usurp", "usurpation", "usurped", "usurper", "usurping", "usurps", +"usury", "Utah", "utensil", "utensils", "uterine", "uterus", "Utica", +"utilitarian", "utilitarianism", "utilities", "utility", "utilizable", +"utilization", "utilize", "utilized", "utilizes", "utilizing", "utopia", +"utopian", "utopians", "utopias", "Utrecht", "Uttoxeter", "uvula", +"uvular"]) + +aAN = set(["f", "F", "FBI", "FDA", "heir", "heirdom", "heired", +"heirer", "heiress", "heiring", "heirloom", "heirship", "honest", +"honester", "honestly", "honesty", "honor", "honorable", "honorableness", +"honorably", "honorarium", "honorary", "honored", "honorer", "honorific", +"honoring", "honors", "honour", "honourable", "honourableness", +"honourably", "honourarium", "honourary", "honoured", "honourer", +"honourific", "honouring", "Honours", "hors", "hour", "hourglass", "hourlong", +"hourly", "hours", "l", "L", "LCD", "m", "M", "MBA", "MP", "mpg", "mph", +"MRI", "MSc", "MTV", "n", "N", "NBA", "NBC", "NFL", "NGO", "NHL", "r", +"R", "s", "S", "SMS", "sos", "SOS", "SPF", "std", "STD", "SUV", "x", +"X", "XML"]) + +aB = set(["H", "habitual", "hallucination", "haute", "hauteur", "herb", "herbaceous", "herbal", +"herbalist", "herbalism", "heroic", "hilarious", "historian", "historic", "historical", +"homage", "homophone", "horrendous", "hospitable", "horrific", "hotel", "hypothesis", "Xmas"]) + +def measurement(mnum, min, mout, mstr, decimal, remove): + if min == "ft" or min == "in" or min == "mi": + mnum = mnum.replace(" 1/2", ".5").replace(u" \xbd", ".5").replace(u"\xbd",".5") + m = calc("CONVERT_ADD", (float(eval(mnum.replace(remove, "").replace(decimal, ".").replace(u"\u2212", "-"))), min, mout)) + a = list(set([str(calc("ROUND", (m, 0)))[:-2], str(calc("ROUND", (m, 1))), str(calc("ROUND", (m, 2))), str(m)])) # remove duplicated rounded items + a.sort(lambda x, y: len(x) - len(y)) # sort by string length + return join(a, mstr + "\n").replace(".", decimal).replace("-", u"\u2212") + mstr + + + diff --git a/en/pythonpath/lightproof_opts_en.py b/en/pythonpath/lightproof_opts_en.py new file mode 100644 index 0000000..aa43156 --- /dev/null +++ b/en/pythonpath/lightproof_opts_en.py @@ -0,0 +1,4 @@ +lopts = {} +lopts_default = {} +lopts['en'] = [u'grammar', u'cap', u'dup', u'pair', u'spaces', u'mdash', u'quotation', u'times', u'spaces2', u'ndash', u'apostrophe', u'ellipsis', u'spaces3', u'minus', u'metric', u'numsep', u'nonmetric'] +lopts_default['en'] = [u'spaces', u'times'] |