You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
libkkc/SOURCES/libkkc-HEAD.patch

3378 lines
110 KiB

This file contains invisible Unicode characters!

This file contains invisible Unicode characters that may be processed differently from what appears below. If your use case is intentional and legitimate, you can safely ignore this warning. Use the Escape button to reveal hidden characters.

This file contains ambiguous Unicode characters that may be confused with others in your current locale. If your use case is intentional and legitimate, you can safely ignore this warning. Use the Escape button to highlight these characters.

diff --git a/Makefile.am b/Makefile.am
index d4253d6..e95006a 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -14,7 +14,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-SUBDIRS = marisa-glib libkkc data tools tests po
+SUBDIRS = marisa-glib libkkc tools data tests po
DISTCHECK_CONFIGURE_FLAGS = --enable-docs
ACLOCAL_AMFLAGS = -I m4
@@ -22,20 +22,22 @@ if HAVE_VALADOC
SUBDIRS += docs
endif
-GITIGNOREFILES = \
- INSTALL \
- aclocal.m4 \
- compile \
- config.guess \
- config.h.in \
- config.sub \
- depcomp \
- install-sh \
- ltmain.sh \
- missing \
- mkinstalldirs \
- `find "m4" -type f -name "*.m4" ! -name "vala.m4" -print` \
- $(top_builddir)/lcov.html/* \
+GITIGNOREFILES = \
+ INSTALL \
+ aclocal.m4 \
+ compile \
+ config.guess \
+ config.h.in \
+ config.sub \
+ depcomp \
+ install-sh \
+ ltmain.sh \
+ missing \
+ mkinstalldirs \
+ `find "m4" -type f -name "*.m4" ! -name "vala.m4" -print` \
+ $(top_builddir)/lcov.html/* \
+ data/rules/*/*.pot \
+ test-driver \
$(NULL)
distclean-local:
diff --git a/README b/README
deleted file mode 100644
index bec5c53..0000000
--- a/README
+++ /dev/null
@@ -1,58 +0,0 @@
-libkkc - Japanese Kana Kanji conversion library -*- coding: utf-8 -*-
-
-* What's this?
-
-libkkc provides a converter from Kana-string to
-Kana-Kanji-mixed-string. It was named after kkc.el in GNU Emacs, a
-simple Kana Kanji converter, while libkkc tries to convert sentences
-in a bit more complex way using N-gram language models.
-
-* Install
-
-1. compile and install marisa-trie 0.2.1
-
- https://code.google.com/p/marisa-trie/
-
-2. compile and install
-
- $ ./autogen.sh
- $ make
- $ make install
-
-3. run kkc program
-
- $ kkc
- Type kana sentence in the following form:
- SENTENCE [N-BEST [SEGMENT-BOUNDARY...]]
-
- >> わたしのなまえはなかのです
- 0: <わたし/わたし><の/の><名前/なまえ><は/は><中野/なかの><で/で><す/す>
-
- # get 3 matches instead of 1
- >> わたしのなまえはなかのです 3
- 0: <わたし/わたし><の/の><名前/なまえ><は/は><中野/なかの><で/で><す/す>
- 1: <私/わたし><の/の><名前/なまえ><は/は><中野/なかの><で/で><す/す>
- 2: <わたし/わたし><の/の><名前/なまえ><は/は><中野/なかの><デス/です>
-
- # enlarge the second segment (の -> のな)
- >> わたしのなまえはなかのです 1 3 5
- 0: <わたし/わたし><のな/のな><前/まえ><は/は><中野/なかの><で/で><す/す>
-
- # shrink the fourth segment (なかの -> なか)
- >> わたしのなまえはなかのです 1 3 4 7 8 10
- 0: <わたし/わたし><の/の><名前/なまえ><は/は><中/なか><の/の><で/で><す/す>
-
-License:
-
-GPLv3+
-
-Copyright (C) 2011-2014 Daiki Ueno <ueno@gnu.org>
-Copyright (C) 2011-2014 Red Hat, Inc.
-
-This file is free software; as a special exception the author gives
-unlimited permission to copy and/or distribute it, with or without
-modifications, as long as this notice is preserved.
-
-This file is distributed in the hope that it will be useful, but
-WITHOUT ANY WARRANTY, to the extent permitted by law; without even the
-implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
diff --git a/README b/README
new file mode 120000
index 0000000..42061c0
--- /dev/null
+++ b/README
@@ -0,0 +1 @@
+README.md
\ No newline at end of file
diff --git a/configure.ac b/configure.ac
index e044965..a36d98c 100644
--- a/configure.ac
+++ b/configure.ac
@@ -176,6 +176,8 @@ if test "x$found_introspection" = "xyes" -a "x$enable_vapigen" != "xno"; then
fi
AM_CONDITIONAL(ENABLE_VAPIGEN, [test "x$enable_vapigen" = "xyes"])
+AC_CHECK_PROGS(JSON_VALIDATE, json-validate, true)
+
AC_CONFIG_HEADERS([config.h])
AC_CONFIG_FILES([Makefile
marisa-glib/Makefile
@@ -185,9 +187,11 @@ tools/Makefile
tests/Makefile
tests/lib/Makefile
data/Makefile
+data/dbus/Makefile
data/rules/Makefile
data/rules/default/Makefile
data/rules/act/Makefile
+data/rules/atok/Makefile
data/rules/azik/Makefile
data/rules/azik-jp106/Makefile
data/rules/kzik/Makefile
diff --git a/data/Makefile.am b/data/Makefile.am
index 794738f..9c03b32 100644
--- a/data/Makefile.am
+++ b/data/Makefile.am
@@ -14,6 +14,6 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-SUBDIRS = rules templates
+SUBDIRS = dbus rules templates
-include $(top_srcdir)/git.mk
diff --git a/data/dbus/Makefile.am b/data/dbus/Makefile.am
new file mode 100644
index 0000000..a68fa73
--- /dev/null
+++ b/data/dbus/Makefile.am
@@ -0,0 +1,24 @@
+# Copyright (C) 2011-2015 Daiki Ueno <ueno@gnu.org>
+# Copyright (C) 2011-2015 Red Hat, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+EXTRA_DIST = \
+ org.du_a.Kkc.CandidateList.xml \
+ org.du_a.Kkc.Context.xml \
+ org.du_a.Kkc.SegmentList.xml \
+ org.du_a.Kkc.Server.xml \
+ $(NULL)
+
+-include $(top_srcdir)/git.mk
diff --git a/data/dbus/org.du_a.Kkc.CandidateList.xml b/data/dbus/org.du_a.Kkc.CandidateList.xml
new file mode 100644
index 0000000..684d6fd
--- /dev/null
+++ b/data/dbus/org.du_a.Kkc.CandidateList.xml
@@ -0,0 +1,54 @@
+<!DOCTYPE node PUBLIC "-//freedesktop//DTD D-BUS Object Introspection 1.0//EN"
+ "http://www.freedesktop.org/standards/dbus/1.0/introspect.dtd">
+<node>
+ <interface name="org.du_a.Kkc.CandidateList">
+ <method name="SelectAt">
+ <arg type="u" name="index_in_page" direction="in"/>
+ <arg type="b" name="result" direction="out"/>
+ </method>
+ <method name="Select">
+ </method>
+ <method name="First">
+ <arg type="b" name="result" direction="out"/>
+ </method>
+ <method name="Next">
+ <arg type="b" name="result" direction="out"/>
+ </method>
+ <method name="Previous">
+ <arg type="b" name="result" direction="out"/>
+ </method>
+ <method name="CursorUp">
+ <arg type="b" name="result" direction="out"/>
+ </method>
+ <method name="CursorDown">
+ <arg type="b" name="result" direction="out"/>
+ </method>
+ <method name="PageUp">
+ <arg type="b" name="result" direction="out"/>
+ </method>
+ <method name="PageDown">
+ <arg type="b" name="result" direction="out"/>
+ </method>
+ <method name="Get">
+ <arg type="i" name="index" direction="in"/>
+ <arg type="s" name="midasi" direction="out"/>
+ <arg type="b" name="okuri" direction="out"/>
+ <arg type="s" name="text" direction="out"/>
+ <arg type="s" name="annotation" direction="out"/>
+ </method>
+ <signal name="Populated">
+ </signal>
+ <signal name="Selected">
+ <arg type="s" name="midasi"/>
+ <arg type="b" name="okuri"/>
+ <arg type="s" name="text"/>
+ <arg type="s" name="annotation"/>
+ </signal>
+ <property type="i" name="CursorPos" access="read"/>
+ <property type="i" name="Size" access="read"/>
+ <property type="u" name="PageStart" access="read"/>
+ <property type="u" name="PageSize" access="read"/>
+ <property type="b" name="Round" access="read"/>
+ <property type="b" name="PageVisible" access="read"/>
+ </interface>
+</node>
diff --git a/data/dbus/org.du_a.Kkc.Context.xml b/data/dbus/org.du_a.Kkc.Context.xml
new file mode 100644
index 0000000..f989227
--- /dev/null
+++ b/data/dbus/org.du_a.Kkc.Context.xml
@@ -0,0 +1,34 @@
+<!DOCTYPE node PUBLIC "-//freedesktop//DTD D-BUS Object Introspection 1.0//EN"
+ "http://www.freedesktop.org/standards/dbus/1.0/introspect.dtd">
+<node>
+ <interface name="org.du_a.Kkc.Context">
+ <method name="ProcessKeyEvent">
+ <arg type="u" name="keyval" direction="in"/>
+ <arg type="u" name="keycode" direction="in"/>
+ <arg type="u" name="modifiers" direction="in"/>
+ <arg type="b" name="result" direction="out"/>
+ </method>
+ <method name="ProcessCommandEvent">
+ <arg type="s" name="command" direction="in"/>
+ <arg type="b" name="result" direction="out"/>
+ </method>
+ <method name="Reset">
+ </method>
+ <method name="HasOutput">
+ <arg type="b" name="result" direction="out"/>
+ </method>
+ <method name="PeekOutput">
+ <arg type="s" name="result" direction="out"/>
+ </method>
+ <method name="PollOutput">
+ <arg type="s" name="result" direction="out"/>
+ </method>
+ <method name="ClearOutput">
+ </method>
+ <property type="s" name="Input" access="read"/>
+ <property type="i" name="InputCursorPos" access="read"/>
+ <property type="u" name="InputMode" access="readwrite"/>
+ <property type="u" name="PunctuationStyle" access="readwrite"/>
+ <property type="b" name="AutoCorrect" access="readwrite"/>
+ </interface>
+</node>
diff --git a/data/dbus/org.du_a.Kkc.SegmentList.xml b/data/dbus/org.du_a.Kkc.SegmentList.xml
new file mode 100644
index 0000000..43d578b
--- /dev/null
+++ b/data/dbus/org.du_a.Kkc.SegmentList.xml
@@ -0,0 +1,29 @@
+<!DOCTYPE node PUBLIC "-//freedesktop//DTD D-BUS Object Introspection 1.0//EN"
+ "http://www.freedesktop.org/standards/dbus/1.0/introspect.dtd">
+<node>
+ <interface name="org.du_a.Kkc.SegmentList">
+ <method name="Get">
+ <arg type="i" name="index" direction="in"/>
+ <arg type="s" name="input" direction="out"/>
+ <arg type="s" name="output" direction="out"/>
+ </method>
+ <method name="FirstSegment">
+ <arg type="b" name="result" direction="out"/>
+ </method>
+ <method name="LastSegment">
+ <arg type="b" name="result" direction="out"/>
+ </method>
+ <method name="NextSegment">
+ </method>
+ <method name="PreviousSegment">
+ </method>
+ <method name="GetOutput">
+ <arg type="s" name="result" direction="out"/>
+ </method>
+ <method name="GetInput">
+ <arg type="s" name="result" direction="out"/>
+ </method>
+ <property type="i" name="CursorPos" access="read"/>
+ <property type="i" name="Size" access="read"/>
+ </interface>
+</node>
diff --git a/data/dbus/org.du_a.Kkc.Server.xml b/data/dbus/org.du_a.Kkc.Server.xml
new file mode 100644
index 0000000..749abb4
--- /dev/null
+++ b/data/dbus/org.du_a.Kkc.Server.xml
@@ -0,0 +1,12 @@
+<!DOCTYPE node PUBLIC "-//freedesktop//DTD D-BUS Object Introspection 1.0//EN"
+ "http://www.freedesktop.org/standards/dbus/1.0/introspect.dtd">
+<node>
+ <interface name="org.du_a.Kkc.Server">
+ <method name="CreateContext">
+ <arg type="s" name="result" direction="out"/>
+ </method>
+ <method name="DestroyContext">
+ <arg type="s" name="object_path" direction="in"/>
+ </method>
+ </interface>
+</node>
diff --git a/data/rules/Makefile.am b/data/rules/Makefile.am
index 0e88d21..d1a1293 100644
--- a/data/rules/Makefile.am
+++ b/data/rules/Makefile.am
@@ -17,6 +17,7 @@
SUBDIRS = \
default \
act \
+ atok \
azik \
azik-jp106 \
kzik \
@@ -31,6 +32,9 @@ SUBDIRS = \
EXTRA_DIST = \
README.rules \
rule.mk \
+ metadata-schema.json \
+ keymap-schema.json \
+ rom-kana-schema.json \
$(NULL)
-include $(top_srcdir)/git.mk
diff --git a/data/rules/act/Makefile.am b/data/rules/act/Makefile.am
index 2524bd6..16ad93c 100644
--- a/data/rules/act/Makefile.am
+++ b/data/rules/act/Makefile.am
@@ -8,24 +8,16 @@ files = \
keymap/latin.json \
keymap/wide-latin.json \
rom-kana/default.json \
- $(NULL)
-metadata_in = \
- metadata.json.in \
+ metadata.json \
$(NULL)
nobase_rules_DATA = \
$(files) \
- $(metadata_in:.in=) \
$(NULL)
EXTRA_DIST = \
$(files) \
- $(metadata_in) \
- $(NULL)
-
-CLEANFILES = \
- metadata.json \
$(NULL)
--include $(top_srcdir)/data/rules/rule.mk
+include $(top_srcdir)/data/rules/rule.mk
-include $(top_srcdir)/git.mk
diff --git a/data/rules/act/metadata.json b/data/rules/act/metadata.json
new file mode 100644
index 0000000..0136cc4
--- /dev/null
+++ b/data/rules/act/metadata.json
@@ -0,0 +1,5 @@
+{
+ "name": "ACT",
+ "description": "Extended romaji input method based on AZIK for Dvorak keyboard layout, developed by Kiyoshi Kimura <http://hp.vector.co.jp/authors/VA002116/azik/azikindx.htm#act>",
+ "priority": 10
+}
diff --git a/data/rules/act/metadata.json.in b/data/rules/act/metadata.json.in
deleted file mode 100644
index f4b7721..0000000
--- a/data/rules/act/metadata.json.in
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "name": _("ACT"),
- "description": _("Extended romaji input method based on AZIK for Dvorak keyboard layout, developed by Kiyoshi Kimura <http://hp.vector.co.jp/authors/VA002116/azik/azikindx.htm#act>"),
- "priority": 10
-}
diff --git a/data/rules/atok/Makefile.am b/data/rules/atok/Makefile.am
new file mode 100644
index 0000000..d6fe7eb
--- /dev/null
+++ b/data/rules/atok/Makefile.am
@@ -0,0 +1,24 @@
+rulesdir = $(pkgdatadir)/rules/atok
+
+files = \
+ keymap/default.json \
+ keymap/hankaku-katakana.json \
+ keymap/hiragana.json \
+ keymap/katakana.json \
+ keymap/latin.json \
+ keymap/wide-latin.json \
+ keymap/direct.json \
+ rom-kana/default.json \
+ metadata.json \
+ $(NULL)
+
+nobase_rules_DATA = \
+ $(files) \
+ $(NULL)
+
+EXTRA_DIST = \
+ $(files) \
+ $(NULL)
+
+include $(top_srcdir)/data/rules/rule.mk
+-include $(top_srcdir)/git.mk
diff --git a/data/rules/atok/keymap/default.json b/data/rules/atok/keymap/default.json
new file mode 100644
index 0000000..a2e091b
--- /dev/null
+++ b/data/rules/atok/keymap/default.json
@@ -0,0 +1,31 @@
+{
+ "include": [
+ "default/default"
+ ],
+ "define": {
+ "keymap": {
+ "C-[": "abort",
+ "C-g": "delete-forward",
+ "C-q": null,
+ "C-F7": "register",
+ "C-Down": "next-candidate",
+ "S-space": "next-candidate",
+ "S-Henkan_Mode": "next-candidate",
+ "C-Up": "previous-candidate",
+ "Muhenkan": null,
+ "C-l": "expand-segment",
+ "C-k": "shrink-segment",
+ "Right": "expand-segment",
+ "Left": "shrink-segment",
+ "S-Left": "previous-segment",
+ "S-Right": "next-segment",
+ "C-Left": "first-segment",
+ "C-Right": "last-segment",
+ "C-u": "convert-hiragana",
+ "C-i": "convert-katakana",
+ "C-o": "convert-hankaku-katakana",
+ "C-p": "convert-wide-latin",
+ "C-@": "convert-latin"
+ }
+ }
+}
diff --git a/data/rules/atok/keymap/direct.json b/data/rules/atok/keymap/direct.json
new file mode 100644
index 0000000..b11a387
--- /dev/null
+++ b/data/rules/atok/keymap/direct.json
@@ -0,0 +1,9 @@
+{
+ "define": {
+ "keymap": {
+ "Hiragana_Katakana": "set-input-mode-hiragana",
+ "Zenkaku_Hankaku": "set-input-mode-hiragana",
+ "Henkan_Mode": "set-input-mode-hiragana"
+ }
+ }
+}
diff --git a/data/rules/atok/keymap/hankaku-katakana.json b/data/rules/atok/keymap/hankaku-katakana.json
new file mode 100644
index 0000000..84e46f5
--- /dev/null
+++ b/data/rules/atok/keymap/hankaku-katakana.json
@@ -0,0 +1,10 @@
+{
+ "include": [
+ "default"
+ ],
+ "define": {
+ "keymap": {
+ "Hiragana_Katakana": "set-input-mode-hiragana"
+ }
+ }
+}
diff --git a/data/rules/atok/keymap/hiragana.json b/data/rules/atok/keymap/hiragana.json
new file mode 100644
index 0000000..be63908
--- /dev/null
+++ b/data/rules/atok/keymap/hiragana.json
@@ -0,0 +1,12 @@
+{
+ "include": [
+ "default"
+ ],
+ "define": {
+ "keymap": {
+ "Hiragana_Katakana": "set-input-mode-katakana",
+ "Henkan_Mode": "set-input-mode-direct",
+ "Muhenkan": "set-input-mode-latin"
+ }
+ }
+}
diff --git a/data/rules/atok/keymap/katakana.json b/data/rules/atok/keymap/katakana.json
new file mode 100644
index 0000000..84e46f5
--- /dev/null
+++ b/data/rules/atok/keymap/katakana.json
@@ -0,0 +1,10 @@
+{
+ "include": [
+ "default"
+ ],
+ "define": {
+ "keymap": {
+ "Hiragana_Katakana": "set-input-mode-hiragana"
+ }
+ }
+}
diff --git a/data/rules/atok/keymap/latin.json b/data/rules/atok/keymap/latin.json
new file mode 100644
index 0000000..c3f7a78
--- /dev/null
+++ b/data/rules/atok/keymap/latin.json
@@ -0,0 +1,11 @@
+{
+ "include": [
+ "default"
+ ],
+ "define": {
+ "keymap": {
+ "Hiragana_Katakana": "set-input-mode-hiragana",
+ "Muhenkan": "set-input-mode-hiragana"
+ }
+ }
+}
diff --git a/data/rules/atok/keymap/wide-latin.json b/data/rules/atok/keymap/wide-latin.json
new file mode 100644
index 0000000..84e46f5
--- /dev/null
+++ b/data/rules/atok/keymap/wide-latin.json
@@ -0,0 +1,10 @@
+{
+ "include": [
+ "default"
+ ],
+ "define": {
+ "keymap": {
+ "Hiragana_Katakana": "set-input-mode-hiragana"
+ }
+ }
+}
diff --git a/data/rules/atok/metadata.json b/data/rules/atok/metadata.json
new file mode 100644
index 0000000..8c0d82b
--- /dev/null
+++ b/data/rules/atok/metadata.json
@@ -0,0 +1,5 @@
+{
+ "name": "ATOK",
+ "description": "The commercial Input Method ATOK like style",
+ "priority": 90
+}
diff --git a/data/rules/atok/rom-kana/default.json b/data/rules/atok/rom-kana/default.json
new file mode 100644
index 0000000..c179331
--- /dev/null
+++ b/data/rules/atok/rom-kana/default.json
@@ -0,0 +1,28 @@
+{
+ "include": [
+ "default/default"
+ ],
+ "define": {
+ "rom-kana": {
+ "dwu": ["", "どぅ" ],
+ "kwa": ["", "くぁ" ],
+ "lka": ["", "ヵ" ],
+ "lke": ["", "ヶ" ],
+ "ltu": ["", "っ" ],
+ "ltsu": ["", "っ" ],
+ "lwa": ["", "ゎ" ],
+ "tha": ["", "てゃ" ],
+ "tsa": ["", "つぁ" ],
+ "tsi": ["", "つぃ" ],
+ "tse": ["", "つぇ" ],
+ "tso": ["", "つぉ" ],
+ "twu": ["", "とぅ" ],
+ "wye": ["", "ゑ" ],
+ "wyi": ["", "ゐ" ],
+ "xye": ["", "ぇ" ],
+ "yi": ["", "い" ],
+ "zya": null,
+ "/": ["", "・", ""]
+ }
+ }
+}
diff --git a/data/rules/azik-jp106/Makefile.am b/data/rules/azik-jp106/Makefile.am
index 7311917..8d5e739 100644
--- a/data/rules/azik-jp106/Makefile.am
+++ b/data/rules/azik-jp106/Makefile.am
@@ -8,24 +8,16 @@ files = \
keymap/latin.json \
keymap/wide-latin.json \
rom-kana/default.json \
- $(NULL)
-metadata_in = \
- metadata.json.in \
+ metadata.json \
$(NULL)
nobase_rules_DATA = \
$(files) \
- $(metadata_in:.in=) \
$(NULL)
EXTRA_DIST = \
$(files) \
- $(metadata_in) \
- $(NULL)
-
-CLEANFILES = \
- metadata.json \
$(NULL)
--include $(top_srcdir)/data/rules/rule.mk
+include $(top_srcdir)/data/rules/rule.mk
-include $(top_srcdir)/git.mk
diff --git a/data/rules/azik-jp106/keymap/hiragana.json b/data/rules/azik-jp106/keymap/hiragana.json
index 60bffc8..e94115d 100644
--- a/data/rules/azik-jp106/keymap/hiragana.json
+++ b/data/rules/azik-jp106/keymap/hiragana.json
@@ -1,7 +1,7 @@
{
"include": [
"default/default"
- ]
+ ],
"define": {
"keymap": {
"[": null,
diff --git a/data/rules/azik-jp106/metadata.json b/data/rules/azik-jp106/metadata.json
new file mode 100644
index 0000000..75efc94
--- /dev/null
+++ b/data/rules/azik-jp106/metadata.json
@@ -0,0 +1,5 @@
+{
+ "name": "AZIK (Japanese 106 keyboard)",
+ "description": "Extended romaji input method developed by Kiyoshi Kimura <http://hp.vector.co.jp/authors/VA002116/azik/azikindx.htm>",
+ "priority": 10
+}
diff --git a/data/rules/azik-jp106/metadata.json.in b/data/rules/azik-jp106/metadata.json.in
deleted file mode 100644
index a7fe226..0000000
--- a/data/rules/azik-jp106/metadata.json.in
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "name": _("AZIK (Japanese 106 keyboard)"),
- "description": _("Extended romaji input method developed by Kiyoshi Kimura <http://hp.vector.co.jp/authors/VA002116/azik/azikindx.htm>"),
- "priority": 10
-}
diff --git a/data/rules/azik/Makefile.am b/data/rules/azik/Makefile.am
index acc54bc..e4ada1c 100644
--- a/data/rules/azik/Makefile.am
+++ b/data/rules/azik/Makefile.am
@@ -8,24 +8,16 @@ files = \
keymap/latin.json \
keymap/wide-latin.json \
rom-kana/default.json \
- $(NULL)
-metadata_in = \
- metadata.json.in \
+ metadata.json \
$(NULL)
nobase_rules_DATA = \
$(files) \
- $(metadata_in:.in=) \
$(NULL)
EXTRA_DIST = \
$(files) \
- $(metadata_in) \
- $(NULL)
-
-CLEANFILES = \
- metadata.json \
$(NULL)
--include $(top_srcdir)/data/rules/rule.mk
+include $(top_srcdir)/data/rules/rule.mk
-include $(top_srcdir)/git.mk
diff --git a/data/rules/azik/keymap/default.json b/data/rules/azik/keymap/default.json
index 307b53a..828a178 100644
--- a/data/rules/azik/keymap/default.json
+++ b/data/rules/azik/keymap/default.json
@@ -1,10 +1,5 @@
{
"include": [
"default/default"
- ],
- "define": {
- "keymap": {
- ":": "upper-;"
- }
- }
+ ]
}
diff --git a/data/rules/azik/metadata.json b/data/rules/azik/metadata.json
new file mode 100644
index 0000000..a7421dd
--- /dev/null
+++ b/data/rules/azik/metadata.json
@@ -0,0 +1,5 @@
+{
+ "name": "AZIK",
+ "description": "Extended romaji input method developed by Kiyoshi Kimura <http://hp.vector.co.jp/authors/VA002116/azik/azikindx.htm>",
+ "priority": 10
+}
diff --git a/data/rules/azik/metadata.json.in b/data/rules/azik/metadata.json.in
deleted file mode 100644
index 6c4fa9d..0000000
--- a/data/rules/azik/metadata.json.in
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "name": _("AZIK"),
- "description": _("Extended romaji input method developed by Kiyoshi Kimura <http://hp.vector.co.jp/authors/VA002116/azik/azikindx.htm>"),
- "priority": 10
-}
diff --git a/data/rules/default/Makefile.am b/data/rules/default/Makefile.am
index e2e0a56..77d220a 100644
--- a/data/rules/default/Makefile.am
+++ b/data/rules/default/Makefile.am
@@ -9,24 +9,16 @@ files = \
keymap/wide-latin.json \
keymap/direct.json \
rom-kana/default.json \
- $(NULL)
-metadata_in = \
- metadata.json.in \
+ metadata.json \
$(NULL)
nobase_rules_DATA = \
$(files) \
- $(metadata_in:.in=) \
$(NULL)
EXTRA_DIST = \
$(files) \
- $(metadata_in) \
- $(NULL)
-
-CLEANFILES = \
- metadata.json \
$(NULL)
--include $(top_srcdir)/data/rules/rule.mk
+include $(top_srcdir)/data/rules/rule.mk
-include $(top_srcdir)/git.mk
diff --git a/data/rules/default/metadata.json b/data/rules/default/metadata.json
new file mode 100644
index 0000000..78f492f
--- /dev/null
+++ b/data/rules/default/metadata.json
@@ -0,0 +1,5 @@
+{
+ "name": "Default",
+ "description": "Default typing rule",
+ "priority": 99
+}
diff --git a/data/rules/default/metadata.json.in b/data/rules/default/metadata.json.in
deleted file mode 100644
index f9aa8dc..0000000
--- a/data/rules/default/metadata.json.in
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "name": _("Default"),
- "description": _("Default typing rule"),
- "priority": 99
-}
diff --git a/data/rules/kana/Makefile.am b/data/rules/kana/Makefile.am
index 559ef16..b00df0b 100644
--- a/data/rules/kana/Makefile.am
+++ b/data/rules/kana/Makefile.am
@@ -9,24 +9,16 @@ files = \
keymap/wide-latin.json \
keymap/direct.json \
rom-kana/default.json \
- $(NULL)
-metadata_in = \
- metadata.json.in \
+ metadata.json \
$(NULL)
nobase_rules_DATA = \
$(files) \
- $(metadata_in:.in=) \
$(NULL)
EXTRA_DIST = \
$(files) \
- $(metadata_in) \
- $(NULL)
-
-CLEANFILES = \
- metadata.json \
$(NULL)
--include $(top_srcdir)/data/rules/rule.mk
+include $(top_srcdir)/data/rules/rule.mk
-include $(top_srcdir)/git.mk
diff --git a/data/rules/kana/metadata.json b/data/rules/kana/metadata.json
new file mode 100644
index 0000000..e2dbe8f
--- /dev/null
+++ b/data/rules/kana/metadata.json
@@ -0,0 +1,6 @@
+{
+ "name": "Kana",
+ "description": "Direct Kana typing",
+ "filter": "kana",
+ "priority": 99
+}
diff --git a/data/rules/kana/metadata.json.in b/data/rules/kana/metadata.json.in
deleted file mode 100644
index 47efecc..0000000
--- a/data/rules/kana/metadata.json.in
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "name": _("Kana"),
- "description": _("Direct Kana typing"),
- "filter": "kana",
- "priority": 99
-}
diff --git a/data/rules/kana/rom-kana/default.json b/data/rules/kana/rom-kana/default.json
index e5c573e..d68ca78 100644
--- a/data/rules/kana/rom-kana/default.json
+++ b/data/rules/kana/rom-kana/default.json
@@ -29,40 +29,70 @@
"^@": ["", "べ"],
"^[": ["", "ぺ"],
"q": ["", "た", "タ", "た", "タ"],
+ "Q": ["", "た", "タ", "た", "タ"],
"q@": ["", "だ"],
+ "Q@": ["", "だ"],
"w": ["", "て", "テ", "て", "テ"],
+ "W": ["", "て", "テ", "て", "テ"],
"w@": ["", "で"],
+ "W@": ["", "で"],
"e": ["", "い"],
"E": ["", "ぃ"],
"r": ["", "す", "ス", "す", "ス"],
+ "R": ["", "す", "ス", "す", "ス"],
"r@": ["", "ず"],
+ "R@": ["", "ず"],
"t": ["", "か", "カ", "か", "カ"],
+ "T": ["", "か", "カ", "か", "カ"],
"t@": ["", "が"],
+ "T@": ["", "が"],
"y": ["", "ん"],
+ "Y": ["", "ん"],
"u": ["", "な"],
+ "U": ["", "な"],
"i": ["", "に"],
+ "I": ["", "に"],
"o": ["", "ら"],
+ "O": ["", "ら"],
"p": ["", "せ", "セ", "せ", "セ"],
+ "P": ["", "せ", "セ", "せ", "セ"],
"p@": ["", "ぜ"],
+ "P@": ["", "ぜ"],
"@": ["", "゛"],
"[": ["", "゜"],
"{": ["", "「"],
"a": ["", "ち", "チ", "ち", "チ"],
+ "A": ["", "ち", "チ", "ち", "チ"],
"a@": ["", "ぢ"],
+ "A@": ["", "ぢ"],
"s": ["", "と", "ト", "と", "ト"],
+ "S": ["", "と", "ト", "と", "ト"],
"s@": ["", "ど"],
+ "S@": ["", "ど"],
"d": ["", "し", "シ", "し", "シ"],
+ "D": ["", "し", "シ", "し", "シ"],
"d@": ["", "じ"],
+ "D@": ["", "じ"],
"f": ["", "は", "ハ", "は", "ハ"],
+ "F": ["", "は", "ハ", "は", "ハ"],
"f@": ["", "ば"],
+ "F@": ["", "ば"],
"f[": ["", "ぱ"],
+ "F[": ["", "ぱ"],
"g": ["", "き", "キ", "き", "キ"],
+ "G": ["", "き", "キ", "き", "キ"],
"g@": ["", "ぎ"],
+ "G@": ["", "ぎ"],
"h": ["", "く", "ク", "く", "ク"],
+ "H": ["", "く", "ク", "く", "ク"],
"h@": ["", "ぐ"],
+ "H@": ["", "ぐ"],
"j": ["", "ま"],
+ "J": ["", "ま"],
"k": ["", "の"],
+ "K": ["", "の"],
"l": ["", "り"],
+ "L": ["", "り"],
";": ["", "れ"],
":": ["", "け", "ケ", "け", "ケ"],
":@": ["", "げ"],
@@ -70,18 +100,30 @@
"}": ["", "」"],
"z": ["", "つ", "ツ", "つ", "ツ"],
"z@": ["", "づ"],
+ "Z@": ["", "づ"],
"Z": ["", "っ"],
"x": ["", "さ", "サ", "さ", "サ"],
+ "X": ["", "さ", "サ", "さ", "サ"],
"x@": ["", "ざ"],
+ "X@": ["", "ざ"],
"c": ["", "そ", "ソ", "そ", "ソ"],
+ "C": ["", "そ", "ソ", "そ", "ソ"],
"c@": ["", "ぞ"],
+ "C@": ["", "ぞ"],
"v": ["", "ひ", "ヒ", "ひ", "ヒ"],
+ "V": ["", "ひ", "ヒ", "ひ", "ヒ"],
"v@": ["", "び"],
+ "V@": ["", "び"],
"v[": ["", "ぴ"],
+ "V[": ["", "ぴ"],
"b": ["", "こ", "コ", "こ", "コ"],
+ "B": ["", "こ", "コ", "こ", "コ"],
"b@": ["", "ご"],
+ "B@": ["", "ご"],
"n": ["", "み"],
+ "N": ["", "み"],
"m": ["", "も"],
+ "M": ["", "も"],
",": ["", "ね"],
"<": ["", "、"],
".": ["", "る"],
diff --git a/data/rules/keymap-schema.json b/data/rules/keymap-schema.json
new file mode 100644
index 0000000..34c945e
--- /dev/null
+++ b/data/rules/keymap-schema.json
@@ -0,0 +1,54 @@
+{
+ "type": "object",
+ "properties": {
+ "include": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "define" : {
+ "type": "object",
+ "properties": {
+ "keymap": {
+ "type": "object",
+ "patternProperties": {
+ ".*": {
+ "enum": [
+ null,
+ "abort",
+ "first-segment",
+ "last-segment",
+ "commit",
+ "complete",
+ "delete",
+ "delete-forward",
+ "quote",
+ "register",
+ "next-candidate"
+ "previous-candidate",
+ "purge-candidate",
+ "next-segment",
+ "previous-segment",
+ "expand-segment",
+ "shrink-segment",
+ "set-input-mode-hiragana",
+ "set-input-mode-katakana",
+ "set-input-mode-hankaku-katakana",
+ "set-input-mode-latin",
+ "set-input-mode-wide-latin",
+ "set-input-mode-direct",
+ "convert-hiragana",
+ "convert-katakana",
+ "convert-hankaku-katakana",
+ "convert-latin",
+ "convert-wide-latin",
+ "original-candidate"
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/data/rules/kzik/Makefile.am b/data/rules/kzik/Makefile.am
index fb38c42..fa900b5 100644
--- a/data/rules/kzik/Makefile.am
+++ b/data/rules/kzik/Makefile.am
@@ -8,24 +8,16 @@ files = \
keymap/latin.json \
keymap/wide-latin.json \
rom-kana/default.json \
- $(NULL)
-metadata_in = \
- metadata.json.in \
+ metadata.json \
$(NULL)
nobase_rules_DATA = \
$(files) \
- $(metadata_in:.in=) \
$(NULL)
EXTRA_DIST = \
$(files) \
- $(metadata_in) \
- $(NULL)
-
-CLEANFILES = \
- metadata.json \
$(NULL)
--include $(top_srcdir)/data/rules/rule.mk
+include $(top_srcdir)/data/rules/rule.mk
-include $(top_srcdir)/git.mk
diff --git a/data/rules/kzik/keymap/default.json b/data/rules/kzik/keymap/default.json
index 307b53a..828a178 100644
--- a/data/rules/kzik/keymap/default.json
+++ b/data/rules/kzik/keymap/default.json
@@ -1,10 +1,5 @@
{
"include": [
"default/default"
- ],
- "define": {
- "keymap": {
- ":": "upper-;"
- }
- }
+ ]
}
diff --git a/data/rules/kzik/metadata.json b/data/rules/kzik/metadata.json
new file mode 100644
index 0000000..5f79ac1
--- /dev/null
+++ b/data/rules/kzik/metadata.json
@@ -0,0 +1,5 @@
+{
+ "name": "KZIK",
+ "description": "Extended romaji input method based on AZIK, developed by OHASHI Hideya <http://ohac.sytes.net/pukiwiki.php?kzik>",
+ "priority": 10
+}
diff --git a/data/rules/kzik/metadata.json.in b/data/rules/kzik/metadata.json.in
deleted file mode 100644
index 6472671..0000000
--- a/data/rules/kzik/metadata.json.in
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "name": _("KZIK"),
- "description": _("Extended romaji input method based on AZIK, developed by OHASHI Hideya <http://ohac.sytes.net/pukiwiki.php?kzik>"),
- "priority": 10
-}
diff --git a/data/rules/metadata-schema.json b/data/rules/metadata-schema.json
new file mode 100644
index 0000000..584b3c2
--- /dev/null
+++ b/data/rules/metadata-schema.json
@@ -0,0 +1,16 @@
+{
+ "type": "object",
+ "properties": {
+ "name": {
+ "type": "string"
+ },
+ "description": {
+ "type": "string"
+ },
+ "priority": {
+ "type": "integer",
+ "minimum": 0,
+ "maximum": 100
+ }
+ }
+}
diff --git a/data/rules/nicola/Makefile.am b/data/rules/nicola/Makefile.am
index 045ced1..ae7dbe9 100644
--- a/data/rules/nicola/Makefile.am
+++ b/data/rules/nicola/Makefile.am
@@ -9,24 +9,16 @@ files = \
keymap/wide-latin.json \
keymap/direct.json \
rom-kana/default.json \
- $(NULL)
-metadata_in = \
- metadata.json.in \
+ metadata.json \
$(NULL)
nobase_rules_DATA = \
$(files) \
- $(metadata_in:.in=) \
$(NULL)
EXTRA_DIST = \
$(files) \
- $(metadata_in) \
- $(NULL)
-
-CLEANFILES = \
- metadata.json \
$(NULL)
--include $(top_srcdir)/data/rules/rule.mk
+include $(top_srcdir)/data/rules/rule.mk
-include $(top_srcdir)/git.mk
diff --git a/data/rules/nicola/metadata.json b/data/rules/nicola/metadata.json
new file mode 100644
index 0000000..85b1398
--- /dev/null
+++ b/data/rules/nicola/metadata.json
@@ -0,0 +1,6 @@
+{
+ "name": "NICOLA",
+ "description": "Input method using thumb shift keyboard developed by the NICOLA (NIhongo-nyuuryoku COnsortium LAyout) project <http://nicola.sunicom.co.jp/index.html>",
+ "filter": "nicola",
+ "priority": 10
+}
diff --git a/data/rules/nicola/metadata.json.in b/data/rules/nicola/metadata.json.in
deleted file mode 100644
index 2458e40..0000000
--- a/data/rules/nicola/metadata.json.in
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "name": _("NICOLA"),
- "description": _("Input method using thumb shift keyboard developed by the NICOLA (NIhongo-nyuuryoku COnsortium LAyout) project <http://nicola.sunicom.co.jp/index.html>"),
- "filter": "nicola",
- "priority": 10
-}
diff --git a/data/rules/rom-kana-schema.json b/data/rules/rom-kana-schema.json
new file mode 100644
index 0000000..84b0fde
--- /dev/null
+++ b/data/rules/rom-kana-schema.json
@@ -0,0 +1,36 @@
+{
+ "type": "object",
+ "properties": {
+ "include": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "define" : {
+ "type": "object",
+ "properties": {
+ "rom-kana": {
+ "type": "object",
+ "patternProperties": {
+ ".*": {
+ "anyOf": [
+ {
+ "type": "array",
+ "minItems": 2,
+ "maxItems": 5,
+ "items": {
+ "type": "string"
+ }
+ },
+ {
+ "type": "null"
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/data/rules/rule.mk b/data/rules/rule.mk
index 1da19d1..f125d80 100644
--- a/data/rules/rule.mk
+++ b/data/rules/rule.mk
@@ -1,9 +1,26 @@
-.SUFFIXES: .json .json.in
+SUFFIXES = .json .pot
-edit = sed -e 's!\(^ *"[^"]*": *\)_(\("[^"]*"\))!\1\2!g'
-.json.in.json:
+.json.pot:
$(AM_V_GEN) rm -f $@ $@.tmp; \
srcdir=''; \
test -f ./$< || srcdir=$(srcdir)/; \
- $(edit) $${srcdir}$< >$@.tmp; \
- mv $@.tmp $@
+ $(top_builddir)/tools/gen-metadata-pot $${srcdir}$< \
+ '$$.name' '$$.description' >$@.tmp && mv $@.tmp $@
+
+# 'make check' in po/ requires metadata.pot
+all-local: metadata.pot
+
+check-local:
+ $(AM_V_at)$(JSON_VALIDATE) \
+ --schema $(top_srcdir)/data/rules/metadata-schema.json \
+ metadata.json
+ $(AM_V_at)$(JSON_VALIDATE) \
+ --schema $(top_srcdir)/data/rules/keymap-schema.json \
+ keymap/*.json
+ $(AM_V_at)$(JSON_VALIDATE) \
+ --schema $(top_srcdir)/data/rules/rom-kana-schema.json \
+ rom-kana/*.json
+
+metadata.pot: metadata.json $(top_srcdir)/tools/gen-metadata-pot.c
+
+EXTRA_DIST += metadata.pot
diff --git a/data/rules/tcode/Makefile.am b/data/rules/tcode/Makefile.am
index 02e8fce..9e0f9ef 100644
--- a/data/rules/tcode/Makefile.am
+++ b/data/rules/tcode/Makefile.am
@@ -7,24 +7,16 @@ files = \
keymap/latin.json \
keymap/wide-latin.json \
rom-kana/default.json \
- $(NULL)
-metadata_in = \
- metadata.json.in \
+ metadata.json \
$(NULL)
nobase_rules_DATA = \
$(files) \
- $(metadata_in:.in=) \
$(NULL)
EXTRA_DIST = \
$(files) \
- $(metadata_in) \
- $(NULL)
-
-CLEANFILES = \
- metadata.json \
$(NULL)
--include $(top_srcdir)/data/rules/rule.mk
+include $(top_srcdir)/data/rules/rule.mk
-include $(top_srcdir)/git.mk
diff --git a/data/rules/tcode/metadata.json b/data/rules/tcode/metadata.json
new file mode 100644
index 0000000..1ea7221
--- /dev/null
+++ b/data/rules/tcode/metadata.json
@@ -0,0 +1,5 @@
+{
+ "name": "T-Code",
+ "description": "Japanese direct input method developed by the T-Code project <http://openlab.jp/tcode/>",
+ "priority": 10
+}
diff --git a/data/rules/tcode/metadata.json.in b/data/rules/tcode/metadata.json.in
deleted file mode 100644
index e127f3b..0000000
--- a/data/rules/tcode/metadata.json.in
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "name": _("T-Code"),
- "description": _("Japanese direct input method developed by the T-Code project <http://openlab.jp/tcode/>"),
- "priority": 10
-}
diff --git a/data/rules/trycode/Makefile.am b/data/rules/trycode/Makefile.am
index f17c9e9..23eef01 100644
--- a/data/rules/trycode/Makefile.am
+++ b/data/rules/trycode/Makefile.am
@@ -7,24 +7,16 @@ files = \
keymap/latin.json \
keymap/wide-latin.json \
rom-kana/default.json \
- $(NULL)
-metadata_in = \
- metadata.json.in \
+ metadata.json \
$(NULL)
nobase_rules_DATA = \
$(files) \
- $(metadata_in:.in=) \
$(NULL)
EXTRA_DIST = \
$(files) \
- $(metadata_in) \
- $(NULL)
-
-CLEANFILES = \
- metadata.json \
$(NULL)
--include $(top_srcdir)/data/rules/rule.mk
+include $(top_srcdir)/data/rules/rule.mk
-include $(top_srcdir)/git.mk
diff --git a/data/rules/trycode/metadata.json b/data/rules/trycode/metadata.json
new file mode 100644
index 0000000..7b9ad7a
--- /dev/null
+++ b/data/rules/trycode/metadata.json
@@ -0,0 +1,5 @@
+{
+ "name": "TRY-CODE",
+ "description": "Japanese direct input method based on T-Code, developed by Naoto Takahashi <http://www.m17n.org/ntakahas/npx/aggressive/aggressive4.en.html>",
+ "priority": 10
+}
diff --git a/data/rules/trycode/metadata.json.in b/data/rules/trycode/metadata.json.in
deleted file mode 100644
index c95340f..0000000
--- a/data/rules/trycode/metadata.json.in
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "name": _("TRY-CODE"),
- "description": _("Japanese direct input method based on T-Code, developed by Naoto Takahashi <http://www.m17n.org/ntakahas/npx/aggressive/aggressive4.en.html>"),
- "priority": 10
-}
diff --git a/data/rules/tutcode-touch16x/Makefile.am b/data/rules/tutcode-touch16x/Makefile.am
index d432306..af14949 100644
--- a/data/rules/tutcode-touch16x/Makefile.am
+++ b/data/rules/tutcode-touch16x/Makefile.am
@@ -7,24 +7,16 @@ files = \
keymap/latin.json \
keymap/wide-latin.json \
rom-kana/default.json \
- $(NULL)
-metadata_in = \
- metadata.json.in \
+ metadata.json \
$(NULL)
nobase_rules_DATA = \
$(files) \
- $(metadata_in:.in=) \
$(NULL)
EXTRA_DIST = \
$(files) \
- $(metadata_in) \
- $(NULL)
-
-CLEANFILES = \
- metadata.json \
$(NULL)
--include $(top_srcdir)/data/rules/rule.mk
+include $(top_srcdir)/data/rules/rule.mk
-include $(top_srcdir)/git.mk
diff --git a/data/rules/tutcode-touch16x/metadata.json b/data/rules/tutcode-touch16x/metadata.json
new file mode 100644
index 0000000..d8a41b1
--- /dev/null
+++ b/data/rules/tutcode-touch16x/metadata.json
@@ -0,0 +1,5 @@
+{
+ "name": "TUT-Code (Touch16+)",
+ "description": "TUT-Code with Touch16+ extension",
+ "priority": 10
+}
diff --git a/data/rules/tutcode-touch16x/metadata.json.in b/data/rules/tutcode-touch16x/metadata.json.in
deleted file mode 100644
index 1112c7b..0000000
--- a/data/rules/tutcode-touch16x/metadata.json.in
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "name": _("TUT-Code (Touch16+)"),
- "description": _("TUT-Code with Touch16+ extension"),
- "priority": 10
-}
diff --git a/data/rules/tutcode/Makefile.am b/data/rules/tutcode/Makefile.am
index 486f003..ce4b99f 100644
--- a/data/rules/tutcode/Makefile.am
+++ b/data/rules/tutcode/Makefile.am
@@ -7,24 +7,16 @@ files = \
keymap/latin.json \
keymap/wide-latin.json \
rom-kana/default.json \
- $(NULL)
-metadata_in = \
- metadata.json.in \
+ metadata.json \
$(NULL)
nobase_rules_DATA = \
$(files) \
- $(metadata_in:.in=) \
$(NULL)
EXTRA_DIST = \
$(files) \
- $(metadata_in) \
- $(NULL)
-
-CLEANFILES = \
- metadata.json \
$(NULL)
--include $(top_srcdir)/data/rules/rule.mk
+include $(top_srcdir)/data/rules/rule.mk
-include $(top_srcdir)/git.mk
diff --git a/data/rules/tutcode/metadata.json b/data/rules/tutcode/metadata.json
new file mode 100644
index 0000000..1d1685f
--- /dev/null
+++ b/data/rules/tutcode/metadata.json
@@ -0,0 +1,5 @@
+{
+ "name": "TUT-Code",
+ "description": "Japanese direct input method developed by Hajime Ohiwa and Takaaki Takashima <http://www.crew.sfc.keio.ac.jp/~chk/intro.html>",
+ "priority": 10
+}
diff --git a/data/rules/tutcode/metadata.json.in b/data/rules/tutcode/metadata.json.in
deleted file mode 100644
index 8bc11ff..0000000
--- a/data/rules/tutcode/metadata.json.in
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "name": _("TUT-Code"),
- "description": _("Japanese direct input method developed by Hajime Ohiwa and Takaaki Takashima <http://www.crew.sfc.keio.ac.jp/~chk/intro.html>"),
- "priority": 10
-}
diff --git a/data/templates/libkkc-data/tools/genfilter.py b/data/templates/libkkc-data/tools/genfilter.py
index 5ffab32..0c5f75a 100644
--- a/data/templates/libkkc-data/tools/genfilter.py
+++ b/data/templates/libkkc-data/tools/genfilter.py
@@ -84,24 +84,24 @@ class FilterGenerator(object):
def generate(self):
size = os.fstat(self.infile.fileno()).st_size
- n = size / self.record_size
+ n = size // self.record_size
m = int(math.ceil(-n*math.log10(ERROR_RATE) /
math.pow(math.log10(2), 2)))
- m = (m/8 + 1)*8
+ m = (m//8 + 1)*8
inmem = mmap.mmap(self.infile.fileno(),
size,
access=mmap.ACCESS_READ)
- outmem = bytearray(m/8)
- for i in xrange(0, n):
+ outmem = bytearray(m//8)
+ for i in range(0, n):
offset = i*self.record_size
b0, b1 = struct.unpack("=LL", inmem[offset:offset+8])
- for k in xrange(0, 4):
+ for k in range(0, 4):
h = murmur_hash3_32(b0, b1, k)
h = int(h * (m / float(0xFFFFFFFF)))
- outmem[h/8] |= (1 << (h%8))
+ outmem[h//8] |= (1 << (h%8))
inmem.close()
- # Convert bytearray to str, for Python 2.6 compatibility.
- self.outfile.write(str(outmem))
+ # Convert bytearray to bytes, for Python 3 compatibility.
+ self.outfile.write(bytes(outmem))
if __name__ == '__main__':
import sys
@@ -110,7 +110,7 @@ if __name__ == '__main__':
parser = argparse.ArgumentParser(description='filter')
parser.add_argument('infile', type=argparse.FileType('r'),
help='input file')
- parser.add_argument('outfile', type=argparse.FileType('w'),
+ parser.add_argument('outfile', type=argparse.FileType('wb'),
help='output file')
parser.add_argument('record_size', type=int,
help='record size')
diff --git a/data/templates/libkkc-data/tools/sortlm.py b/data/templates/libkkc-data/tools/sortlm.py
index a0dd8fe..40f0837 100644
--- a/data/templates/libkkc-data/tools/sortlm.py
+++ b/data/templates/libkkc-data/tools/sortlm.py
@@ -40,10 +40,10 @@ class SortedGenerator(object):
self.__min_cost = 0.0
def read(self):
- print "reading N-grams"
+ print("reading N-grams")
self.__read_tries()
self.__read_ngrams()
- print "min cost = %lf" % self.__min_cost
+ print("min cost = %lf" % self.__min_cost)
def __read_tries(self):
while True:
@@ -58,7 +58,7 @@ class SortedGenerator(object):
line = self.__infile.readline()
if line == "":
break
- line = line.strip()
+ line = line.strip('\n')
if line == "":
break
match = self.__ngram_line_regex.match(line)
@@ -89,7 +89,7 @@ class SortedGenerator(object):
line = self.__infile.readline()
if line == "":
break
- line = line.strip()
+ line = line.strip('\n')
if line == "":
break
match = self.__ngram_line_regex.match(line)
@@ -125,14 +125,11 @@ class SortedGenerator(object):
def quantize(cost, min_cost):
return max(0, min(65535, int(cost * 65535 / min_cost)))
- def cmp_header(a, b):
- return cmp(a[0], b[0])
-
- print "writing 1-gram file"
+ print("writing 1-gram file")
unigram_offsets = {}
unigram_file = open("%s.1gram" % self.__output_prefix, "wb")
offset = 0
- for ids, value in sorted(self.__ngram_entries[0].iteritems()):
+ for ids, value in sorted(self.__ngram_entries[0].items()):
unigram_offsets[ids[0]] = offset
s = struct.pack("=HHH",
quantize(value[0], self.__min_cost),
@@ -143,13 +140,13 @@ class SortedGenerator(object):
offset += 1
unigram_file.close()
- print "writing 2-gram file"
+ print("writing 2-gram file")
bigram_offsets = {}
bigram_file = open("%s.2gram" % self.__output_prefix, "wb")
keys = self.__ngram_entries[1].keys()
items = [(struct.pack("=LL", ids[1], unigram_offsets[ids[0]]), ids) for ids in keys]
offset = 0
- for header, ids in sorted(items, cmp=cmp_header):
+ for header, ids in sorted(items, key=lambda x: x[0]):
value = self.__ngram_entries[1][ids]
bigram_offsets[ids] = offset
s = struct.pack("=HH",
@@ -160,11 +157,11 @@ class SortedGenerator(object):
bigram_file.close()
if len(self.__ngram_entries[2]) > 0:
- print "writing 3-gram file"
+ print("writing 3-gram file")
trigram_file = open("%s.3gram" % self.__output_prefix, "wb")
keys = self.__ngram_entries[2].keys()
items = [(struct.pack("=LL", ids[2], bigram_offsets[(ids[0], ids[1])]), ids) for ids in keys]
- for header, ids in sorted(items, cmp=cmp_header):
+ for header, ids in sorted(items, key=lambda x: x[0]):
value = self.__ngram_entries[2][ids]
s = struct.pack("=H",
quantize(value[0], self.__min_cost))
diff --git a/libkkc/Makefile.am b/libkkc/Makefile.am
index 02ca2ab..28a9f68 100644
--- a/libkkc/Makefile.am
+++ b/libkkc/Makefile.am
@@ -108,6 +108,7 @@ libkkc_shell_sources = \
template.vala \
numeric-template.vala \
expression.vala \
+ server.vala \
$(NULL)
libkkc_la_SOURCES = \
diff --git a/libkkc/candidate-list.vala b/libkkc/candidate-list.vala
index 92ddb79..e6e6bfe 100644
--- a/libkkc/candidate-list.vala
+++ b/libkkc/candidate-list.vala
@@ -122,7 +122,8 @@ namespace Kkc {
}
uint get_page_start_cursor_pos (uint pos) {
- return (pos / page_size) * page_size;
+ var page_index = (pos - page_start) / page_size;
+ return page_index * page_size + page_start;
}
/**
@@ -210,23 +211,33 @@ namespace Kkc {
}
}
+ bool update_cursor_pos (uint pos) {
+ if (0 <= pos && pos < _candidates.size && pos != _cursor_pos) {
+ _cursor_pos = (int) pos;
+ notify_property ("cursor-pos");
+ return true;
+ }
+ return false;
+ }
+
bool cursor_move (int step) {
if (_candidates.is_empty || step == 0)
return false;
+ int start = _cursor_pos - (int) page_start;
+ int total = (int) _candidates.size - (int) page_start;
+
if (round) {
- var pos = (_cursor_pos + step) % _candidates.size;
+ int pos = (start + step) % total;
if (pos < 0)
- pos += _candidates.size;
- _cursor_pos = pos;
- notify_property ("cursor-pos");
- return true;
- } else {
- var pos = _cursor_pos + step;
- if (0 <= pos && pos < _candidates.size) {
- _cursor_pos = pos;
- notify_property ("cursor-pos");
+ pos += total;
+ if (update_cursor_pos (pos + page_start))
return true;
+ } else {
+ var pos = start + step;
+ if (0 <= pos && pos < total) {
+ if (update_cursor_pos (pos + page_start))
+ return true;
}
}
@@ -239,7 +250,11 @@ namespace Kkc {
* @return `true` if cursor position has changed, `false` otherwise.
*/
public bool cursor_up () {
- return cursor_move (-1);
+ if (_cursor_pos >= page_start)
+ return cursor_move (-1);
+ else if (update_cursor_pos (_cursor_pos - 1))
+ return true;
+ return false;
}
/**
@@ -248,32 +263,35 @@ namespace Kkc {
* @return `true` if cursor position has changed, `false` otherwise
*/
public bool cursor_down () {
- return cursor_move (1);
+ if (_cursor_pos >= page_start)
+ return cursor_move (1);
+ else if (update_cursor_pos (_cursor_pos + 1))
+ return true;
+ return false;
}
bool page_move (int step) {
if (_candidates.is_empty || step == 0)
return false;
+ int start = _cursor_pos - (int) page_start;
+ int total = (int) _candidates.size - (int) page_start;
+
if (round) {
- var pos = (_cursor_pos + page_size * step) % _candidates.size;
+ int pos = (start + (int) page_size * step) % total;
if (pos < 0)
- pos += _candidates.size;
- pos = get_page_start_cursor_pos (pos);
- if (pos != _cursor_pos) {
- _cursor_pos = (int) pos;
- notify_property ("cursor-pos");
- return true;
+ pos += total;
+ if (pos + (int) page_start < _candidates.size) {
+ var new_pos = get_page_start_cursor_pos (pos + page_start);
+ if (update_cursor_pos (new_pos))
+ return true;
}
} else {
- var pos = _cursor_pos + page_size * step;
- if (0 <= pos && pos < _candidates.size) {
- pos = get_page_start_cursor_pos (pos);
- if (pos != _cursor_pos) {
- _cursor_pos = (int) pos;
- notify_property ("cursor-pos");
+ var pos = start + (int) page_size * step;
+ if (0 <= pos && pos < total) {
+ var new_pos = get_page_start_cursor_pos (pos + page_start);
+ if (update_cursor_pos (new_pos))
return true;
- }
}
}
return false;
diff --git a/libkkc/context.vala b/libkkc/context.vala
index e328c34..d94a248 100644
--- a/libkkc/context.vala
+++ b/libkkc/context.vala
@@ -326,6 +326,35 @@ namespace Kkc {
}
}
+ /**
+ * Process an explicit command in the context.
+ *
+ * This function is rarely used in programs but called from
+ * D-Bus service.
+ *
+ * @param command a command
+ *
+ * @return `true` if the command is handled, `false` otherwise
+ */
+ internal bool process_command_event (string command) {
+ var key = new KeyEvent (Keysyms.VoidSymbol, 0, 0);
+ while (true) {
+ var handler_type = state.handler_type;
+ var handler = handlers.get (handler_type);
+ state.this_command_key = key;
+ if (handler.dispatch_command (command, state, key)) {
+ notify_property ("input");
+ return true;
+ }
+ // state.handler_type may change if handler cannot
+ // handle the event. In that case retry with the new
+ // handler. Otherwise exit the loop.
+ if (handler_type == state.handler_type) {
+ return false;
+ }
+ }
+ }
+
/**
* Reset the context.
*/
diff --git a/libkkc/convert-segment-state-handler.vala b/libkkc/convert-segment-state-handler.vala
index 3a4e05d..10a48b2 100644
--- a/libkkc/convert-segment-state-handler.vala
+++ b/libkkc/convert-segment-state-handler.vala
@@ -36,6 +36,8 @@ namespace Kkc {
do_select_unhandled);
register_command_callback ("last-segment",
do_select_unhandled);
+ register_command_callback ("commit",
+ do_select_unhandled);
register_command_callback ("delete",
do_clear_unhandled);
register_command_callback ("original-candidate",
@@ -49,21 +51,19 @@ namespace Kkc {
"convert-" + enum_value.value_nick,
do_clear_unhandled);
}
-
- register_command_callback (null, do_select_unhandled);
}
- bool do_next_candidate (string? command, State state, KeyEvent key) {
+ bool do_next_candidate (string command, State state, KeyEvent key) {
state.candidates.cursor_down ();
return true;
}
- bool do_previous_candidate (string? command, State state, KeyEvent key) {
+ bool do_previous_candidate (string command, State state, KeyEvent key) {
state.candidates.cursor_up ();
return true;
}
- bool do_purge_candidate (string? command, State state, KeyEvent key) {
+ bool do_purge_candidate (string command, State state, KeyEvent key) {
if (state.candidates.cursor_pos >= 0) {
var candidate = state.candidates.get ();
state.purge_candidate (candidate);
@@ -72,21 +72,29 @@ namespace Kkc {
return true;
}
- bool do_select_unhandled (string? command, State state, KeyEvent key) {
+ bool do_select_unhandled (string command, State state, KeyEvent key) {
if (state.candidates.cursor_pos >= 0)
state.candidates.select ();
state.handler_type = typeof (ConvertSentenceStateHandler);
return false;
}
- bool do_clear_unhandled (string? command, State state, KeyEvent key) {
+ bool do_clear_unhandled (string command, State state, KeyEvent key) {
state.candidates.clear ();
state.handler_type = typeof (ConvertSentenceStateHandler);
return false;
}
-
+
+ public override bool default_command_callback (string? command,
+ State state,
+ KeyEvent key)
+ {
+ return do_select_unhandled (command ?? "", state, key);
+ }
+
public override bool process_key_event (State state, KeyEvent key) {
- return dispatch_command (state, key);
+ var command = state.lookup_key (key);
+ return dispatch_command (command, state, key);
}
}
}
diff --git a/libkkc/convert-sentence-state-handler.vala b/libkkc/convert-sentence-state-handler.vala
index 476c8ae..ae97e68 100644
--- a/libkkc/convert-sentence-state-handler.vala
+++ b/libkkc/convert-sentence-state-handler.vala
@@ -25,7 +25,7 @@ namespace Kkc {
this.mode = mode;
}
- public bool call (string? command, State state, KeyEvent key) {
+ public bool call (string command, State state, KeyEvent key) {
state.convert_segment_by_kana_mode (mode);
return true;
}
@@ -57,6 +57,7 @@ namespace Kkc {
register_command_callback ("abort", do_clear_unhandled);
register_command_callback ("delete", do_clear_unhandled);
+ register_command_callback ("commit", do_commit);
var enum_class = (EnumClass) typeof (KanaMode).class_ref ();
for (int i = enum_class.minimum; i <= enum_class.maximum; i++) {
@@ -67,62 +68,70 @@ namespace Kkc {
new ConvertCommandHandler (
(KanaMode) enum_value.value));
}
-
- register_command_callback (null, do_);
}
- bool do_original_candidate (string? command, State state, KeyEvent key) {
+ bool do_original_candidate (string command, State state, KeyEvent key) {
var segment = state.segments[state.segments.cursor_pos];
segment.output = segment.input;
return true;
}
- bool do_expand_segment (string? command, State state, KeyEvent key) {
+ bool do_expand_segment (string command, State state, KeyEvent key) {
if (state.segments.cursor_pos < state.segments.size - 1)
state.resize_segment (1);
return true;
}
- bool do_shrink_segment (string? command, State state, KeyEvent key) {
+ bool do_shrink_segment (string command, State state, KeyEvent key) {
if (state.segments[state.segments.cursor_pos].input.char_count () > 1)
state.resize_segment (-1);
return true;
}
- bool do_next_segment (string? command, State state, KeyEvent key) {
+ bool do_next_segment (string command, State state, KeyEvent key) {
state.segments.next_segment ();
return true;
}
- bool do_previous_segment (string? command, State state, KeyEvent key) {
+ bool do_previous_segment (string command, State state, KeyEvent key) {
state.segments.previous_segment ();
return true;
}
- bool do_first_segment (string? command, State state, KeyEvent key) {
+ bool do_first_segment (string command, State state, KeyEvent key) {
state.segments.first_segment ();
return true;
}
- bool do_last_segment (string? command, State state, KeyEvent key) {
+ bool do_last_segment (string command, State state, KeyEvent key) {
state.segments.last_segment ();
return true;
}
- bool do_start_segment_conversion (string? command, State state, KeyEvent key) {
+ bool do_start_segment_conversion (string command, State state, KeyEvent key) {
state.lookup (state.segments[state.segments.cursor_pos]);
state.candidates.first ();
state.handler_type = typeof (ConvertSegmentStateHandler);
return false;
}
- bool do_clear_unhandled (string? command, State state, KeyEvent key) {
+ bool do_clear_unhandled (string command, State state, KeyEvent key) {
state.segments.clear ();
state.handler_type = typeof (InitialStateHandler);
return true;
}
- bool do_ (string? command, State state, KeyEvent key) {
+ bool do_commit (string command, State state, KeyEvent key) {
+ state.output.append (state.segments.get_output ());
+ state.select_sentence ();
+ state.reset ();
+ return true;
+ }
+
+ public override bool default_command_callback (string? command,
+ State state,
+ KeyEvent key)
+ {
state.output.append (state.segments.get_output ());
state.select_sentence ();
state.reset ();
@@ -142,7 +151,8 @@ namespace Kkc {
}
public override bool process_key_event (State state, KeyEvent key) {
- return dispatch_command (state, key);
+ var command = state.lookup_key (key);
+ return dispatch_command (command, state, key);
}
}
}
diff --git a/libkkc/encoding.vala b/libkkc/encoding.vala
index fe9ced1..af64ef7 100644
--- a/libkkc/encoding.vala
+++ b/libkkc/encoding.vala
@@ -19,15 +19,15 @@ namespace Kkc {
// XXX: we use Vala string to represent byte array, assuming that
// it does not contain null element
class EncodingConverter : Object, Initable {
- static const int BUFSIZ = 4096;
- static const string INTERNAL_ENCODING = "UTF-8";
+ const int BUFSIZ = 4096;
+ const string INTERNAL_ENCODING = "UTF-8";
struct EncodingCodingSystemEntry {
string key;
string value;
}
- static const EncodingCodingSystemEntry ENCODING_TO_CODING_SYSTEM_RULE[] = {
+ const EncodingCodingSystemEntry ENCODING_TO_CODING_SYSTEM_RULE[] = {
{ "UTF-8", "utf-8" },
{ "EUC-JP", "euc-jp" },
{ "Shift_JIS", "shift_jis" },
diff --git a/libkkc/initial-state-handler.vala b/libkkc/initial-state-handler.vala
index 3679b60..927560f 100644
--- a/libkkc/initial-state-handler.vala
+++ b/libkkc/initial-state-handler.vala
@@ -25,7 +25,7 @@ namespace Kkc {
this.mode = mode;
}
- public bool call (string? command, State state, KeyEvent key) {
+ public bool call (string command, State state, KeyEvent key) {
state.finish_input_editing ();
if (state.input_characters.size > 0) {
state.selection.erase ();
@@ -61,21 +61,20 @@ namespace Kkc {
register_command_callback ("last-segment", do_last_character);
register_command_callback ("quote", do_quote);
register_command_callback ("register", do_register);
-
- register_command_callback (null, do_);
+ register_command_callback ("commit", do_commit);
}
- bool do_quote (string? command, State state, KeyEvent key) {
+ bool do_quote (string command, State state, KeyEvent key) {
state.quoted = true;
return true;
}
- bool do_register (string? command, State state, KeyEvent key) {
+ bool do_register (string command, State state, KeyEvent key) {
state.request_selection_text ();
return true;
}
- bool do_abort (string? command, State state, KeyEvent key) {
+ bool do_abort (string command, State state, KeyEvent key) {
if (state.overriding_input != null) {
state.overriding_input = null;
return true;
@@ -90,7 +89,7 @@ namespace Kkc {
return false;
}
- bool do_delete (string? command, State state, KeyEvent key) {
+ bool do_delete (string command, State state, KeyEvent key) {
if (state.overriding_input != null) {
state.overriding_input = null;
return true;
@@ -115,7 +114,7 @@ namespace Kkc {
return false;
}
- bool do_delete_forward (string? command, State state, KeyEvent key) {
+ bool do_delete_forward (string command, State state, KeyEvent key) {
if (state.input_characters_cursor_pos >= 0 &&
state.input_characters_cursor_pos < state.input_characters.size) {
state.input_characters.remove_at (
@@ -128,7 +127,7 @@ namespace Kkc {
return false;
}
- bool do_complete (string? command, State state, KeyEvent key) {
+ bool do_complete (string command, State state, KeyEvent key) {
state.finish_input_editing ();
if (state.input_characters.size > 0) {
if (state.completion_iterator == null)
@@ -144,7 +143,7 @@ namespace Kkc {
return false;
}
- bool do_next_candidate (string? command, State state, KeyEvent key) {
+ bool do_next_candidate (string command, State state, KeyEvent key) {
state.finish_input_editing ();
if (state.input_characters.size == 0)
return false;
@@ -180,7 +179,7 @@ namespace Kkc {
return true;
}
- bool do_next_character (string? command, State state, KeyEvent key) {
+ bool do_next_character (string command, State state, KeyEvent key) {
state.finish_input_editing ();
if (state.input_characters.size == 0)
return false;
@@ -194,7 +193,7 @@ namespace Kkc {
return true;
}
- bool do_previous_character (string? command, State state, KeyEvent key) {
+ bool do_previous_character (string command, State state, KeyEvent key) {
state.finish_input_editing ();
if (state.input_characters.size == 0)
return false;
@@ -210,7 +209,7 @@ namespace Kkc {
return true;
}
- bool do_first_character (string? command, State state, KeyEvent key) {
+ bool do_first_character (string command, State state, KeyEvent key) {
state.finish_input_editing ();
if (state.input_characters.size == 0)
return false;
@@ -219,7 +218,7 @@ namespace Kkc {
return true;
}
- bool do_last_character (string? command, State state, KeyEvent key) {
+ bool do_last_character (string command, State state, KeyEvent key) {
state.finish_input_editing ();
if (state.input_characters.size == 0)
return false;
@@ -228,7 +227,28 @@ namespace Kkc {
return true;
}
- bool do_ (string? command, State state, KeyEvent key) {
+ bool do_commit (string command, State state, KeyEvent key) {
+ bool retval = false;
+
+ if (state.overriding_input != null) {
+ state.output.append (state.get_input ());
+ state.overriding_input = null;
+ state.reset ();
+ retval = true;
+ }
+
+ var last_input = state.get_input ();
+ state.finish_input_editing ();
+ var input = state.get_input ();
+ state.output.append (input);
+ state.reset ();
+ return retval || input.length > 0 || last_input != input;
+ }
+
+ public override bool default_command_callback (string? command,
+ State state,
+ KeyEvent key)
+ {
bool retval = false;
if (state.overriding_input != null) {
@@ -277,7 +297,6 @@ namespace Kkc {
return true;
}
}
-
var last_input = state.get_input ();
state.finish_input_editing ();
var input = state.get_input ();
@@ -347,7 +366,7 @@ namespace Kkc {
return true;
}
- return dispatch_command (state, key);
+ return dispatch_command (command, state, key);
}
}
}
diff --git a/libkkc/key-event-filter.vala b/libkkc/key-event-filter.vala
index 9d9a089..3ceb16e 100644
--- a/libkkc/key-event-filter.vala
+++ b/libkkc/key-event-filter.vala
@@ -53,7 +53,7 @@ namespace Kkc {
* @see Rule
*/
class SimpleKeyEventFilter : KeyEventFilter {
- static const uint[] modifier_keyvals = {
+ const uint[] modifier_keyvals = {
Keysyms.Shift_L,
Keysyms.Shift_R,
Keysyms.Control_L,
diff --git a/libkkc/key-event.vala b/libkkc/key-event.vala
index 0baa85c..6e28aa6 100644
--- a/libkkc/key-event.vala
+++ b/libkkc/key-event.vala
@@ -148,7 +148,7 @@ namespace Kkc {
throw new KeyEventFormatError.PARSE_FAILED (
"unknown keyval %s", _name);
}
- from_x_event (_keyval, 0, _modifiers);
+ this.from_x_event (_keyval, 0, _modifiers);
}
/**
diff --git a/libkkc/keymap.vala b/libkkc/keymap.vala
index f89c2a6..42af3b7 100644
--- a/libkkc/keymap.vala
+++ b/libkkc/keymap.vala
@@ -32,7 +32,7 @@ namespace Kkc {
* Object representing a keymap.
*/
public class Keymap : Object {
- static const KeymapCommandEntry Commands[] = {
+ const KeymapCommandEntry Commands[] = {
{ "abort", N_("Abort") },
{ "first-segment", N_("First Segment") },
{ "last-segment", N_("Last Segment") },
diff --git a/libkkc/rom-kana-utils.vala b/libkkc/rom-kana-utils.vala
index 32cffcb..fe16960 100644
--- a/libkkc/rom-kana-utils.vala
+++ b/libkkc/rom-kana-utils.vala
@@ -38,7 +38,7 @@ namespace Kkc {
string? hankaku_katakana;
}
- static const KanaTableEntry[] KanaTable = {
+ const KanaTableEntry[] KanaTable = {
{'ア', "あ", "ア"}, {'イ', "い", "イ"}, {'ウ', "う", "ウ"},
{'エ', "え", "エ"}, {'オ', "お", "オ"}, {'カ', "か", "カ"},
{'キ', "き", "キ"}, {'ク', "く", "ク"}, {'ケ', "け", "ケ"},
@@ -73,13 +73,13 @@ namespace Kkc {
{'、', "、", "、"}, {'・', "・", "・"}, {'ー', "ー", "ー"}
};
- static const KanaTableEntry[] HankakuKatakanaSubstitute = {
+ const KanaTableEntry[] HankakuKatakanaSubstitute = {
{'ヮ', null, "ワ"},
{'ヵ', null, "カ"},
{'ヶ', null, "ケ"}
};
- static const string[] WideLatinTable = {
+ const string[] WideLatinTable = {
" ", "", "”", "", "", "", "", "",
"", "", "", "", "", "", "", "",
"", "", "", "", "", "", "", "",
@@ -94,7 +94,7 @@ namespace Kkc {
"", "", "", "", "", "", "〜"
};
- static const string[] KanaRomTable = {
+ const string[] KanaRomTable = {
"x", "a", "x", "i", "x", "u", "x", "e", "x", "o", "k",
"g", "k", "g", "k", "g", "k", "g", "k", "g", "s", "z",
"s", "z", "s", "z", "s", "z", "s", "z", "t", "d", "t",
@@ -126,20 +126,20 @@ namespace Kkc {
return get_okurigana_prefix_for_char (head);
}
- static const string[] KanjiNumericTable = {
+ const string[] KanjiNumericTable = {
"", "一", "二", "三", "四", "五", "六", "七", "八", "九"
};
- static const string[] DaijiNumericTable = {
+ const string[] DaijiNumericTable = {
"零", "壱", "弐", "参", "四", "伍", "六", "七", "八", "九"
};
- static const string?[] KanjiNumericalPositionTable = {
+ const string?[] KanjiNumericalPositionTable = {
null, "十", "百", "千", "万", null, null, null, "億",
null, null, null, "兆", null, null, null, null, "京"
};
- static const string?[] DaijiNumericalPositionTable = {
+ const string?[] DaijiNumericalPositionTable = {
null, "拾", "百", "阡", "萬", null, null, null, "億",
null, null, null, "兆", null, null, null, null, "京"
};
diff --git a/libkkc/rom-kana.vala b/libkkc/rom-kana.vala
index 529a1cc..96ecaa6 100644
--- a/libkkc/rom-kana.vala
+++ b/libkkc/rom-kana.vala
@@ -41,7 +41,7 @@ namespace Kkc {
}
}
- static const string[] PUNCTUATION_RULE = {"。、", "", "。,", ".、"};
+ const string[] PUNCTUATION_RULE = {"。、", "", "。,", ".、"};
class RomKanaNode : Object {
internal RomKanaEntry? entry;
@@ -410,7 +410,7 @@ namespace Kkc {
* @return `true` if uc is in a valid range, `false` otherwise
*/
public bool is_valid (unichar uc) {
- if (uc > 256)
+ if (uc >= 256)
return false;
uint8 mask = (uint8) (1 << (uc % 8));
return (current_node.valid[uc / 8] & mask) != 0 ||
diff --git a/libkkc/rule.vala b/libkkc/rule.vala
index 61aa8ee..aa364a3 100644
--- a/libkkc/rule.vala
+++ b/libkkc/rule.vala
@@ -98,7 +98,7 @@ namespace Kkc {
}
else {
throw new RuleParseError.FAILED (
- "\"rom-kana\" must have two to four elements");
+ "\"rom-kana\" must have two to five elements");
}
} else {
throw new RuleParseError.FAILED (
@@ -122,6 +122,18 @@ namespace Kkc {
else
parent_map.set (key, value);
}
+
+ // Remove null entries added to parent_map, while
+ // traversing 'include'. We don't need to do that
+ // recursively, since those entries override the
+ // corresponding entries in ancestor maps.
+ var parent_iter = parent_map.map_iterator ();
+ while (parent_iter.next ()) {
+ var value = parent_iter.get_value ();
+ if (value.get_node_type () == Json.NodeType.NULL)
+ parent_iter.unset ();
+ }
+
load_rom_kana (root_node, parent_map);
}
}
diff --git a/libkkc/server.vala b/libkkc/server.vala
new file mode 100644
index 0000000..f4c25c0
--- /dev/null
+++ b/libkkc/server.vala
@@ -0,0 +1,492 @@
+/*
+ * Copyright (C) 2011-2015 Daiki Ueno <ueno@gnu.org>
+ * Copyright (C) 2011-2015 Red Hat, Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+using Gee;
+
+namespace Kkc
+{
+ namespace DBusUtils {
+ internal static void send_property_change (DBusConnection connection,
+ string object_path,
+ string interface_name,
+ string name,
+ Variant value)
+ {
+ var builder = new VariantBuilder (VariantType.ARRAY);
+ var invalid_builder = new VariantBuilder (new VariantType ("as"));
+
+ builder.add ("{sv}", name, value);
+
+ try {
+ connection.emit_signal (null,
+ object_path,
+ "org.freedesktop.DBus.Properties",
+ "PropertiesChanged",
+ new Variant ("(sa{sv}as)",
+ interface_name,
+ builder,
+ invalid_builder)
+ );
+ } catch (Error e) {
+ stderr.printf ("%s\n", e.message);
+ }
+ }
+ }
+
+ [DBus (name = "org.du_a.Kkc.CandidateList")]
+ public class DBusCandidateList : Object
+ {
+ DBusConnection connection;
+ string object_path;
+ Kkc.CandidateList candidates;
+
+ public DBusCandidateList (DBusConnection connection,
+ string object_path,
+ Kkc.CandidateList candidates)
+ {
+ this.connection = connection;
+ this.object_path = object_path;
+ this.candidates = candidates;
+ this.candidates.populated.connect (() => {
+ this.populated ();
+ });
+ this.candidates.selected.connect ((candidate) => {
+ this.selected (candidate.midasi,
+ candidate.okuri,
+ candidate.text,
+ candidate.annotation ?? "");
+ });
+ this.candidates.notify["cursor-pos"].connect ((p) => {
+ DBusUtils.send_property_change (
+ connection,
+ object_path,
+ "org.du_a.Kkc.CandidateList",
+ "CursorPos",
+ new Variant.int32 (cursor_pos));
+ });
+ register ();
+ }
+
+ ~DBusCandidateList () {
+ unregister ();
+ }
+
+ public int cursor_pos {
+ get {
+ return this.candidates.cursor_pos;
+ }
+ }
+
+ public int size {
+ get {
+ return this.candidates.size;
+ }
+ }
+
+ public bool select_at (uint index_in_page) {
+ return this.candidates.select_at (index_in_page);
+ }
+
+ public void select () {
+ this.candidates.select ();
+ }
+
+ public bool first () {
+ return this.candidates.first ();
+ }
+
+ public bool next () {
+ return this.candidates.next ();
+ }
+
+ public bool previous () {
+ return this.candidates.previous ();
+ }
+
+ public bool cursor_up () {
+ return this.candidates.cursor_up ();
+ }
+
+ public bool cursor_down () {
+ return this.candidates.cursor_down ();
+ }
+
+ public bool page_up () {
+ return this.candidates.page_up ();
+ }
+
+ public bool page_down () {
+ return this.candidates.page_down ();
+ }
+
+ public uint page_start {
+ get {
+ return this.candidates.page_start;
+ }
+ }
+
+ public uint page_size {
+ get {
+ return this.candidates.page_size;
+ }
+ }
+
+ public bool round {
+ get {
+ return this.candidates.round;
+ }
+ }
+
+ public bool page_visible {
+ get {
+ return this.candidates.page_visible;
+ }
+ }
+
+ public signal void populated ();
+
+ public signal void selected (string midasi, bool okuri,
+ string text, string annotation);
+
+ public new void @get (int index, out string midasi, out bool okuri,
+ out string text, out string annotation)
+ {
+ var candidate = this.candidates.get (index);
+ midasi = candidate.midasi;
+ okuri = candidate.okuri;
+ text = candidate.text;
+ annotation = candidate.annotation ?? "";
+ }
+
+ uint register_id = 0;
+
+ void register () {
+ try {
+ register_id = connection.register_object (object_path, this);
+ } catch (IOError e) {
+ error ("Could not register D-Bus object at %s: %s",
+ object_path, e.message);
+ }
+ }
+
+ internal void unregister () {
+ if (register_id > 0) {
+ connection.unregister_object (register_id);
+ register_id = 0;
+ }
+ }
+ }
+
+ [DBus (name = "org.du_a.Kkc.SegmentList")]
+ public class DBusSegmentList : Object
+ {
+ DBusConnection connection;
+ string object_path;
+ Kkc.SegmentList segments;
+
+ public DBusSegmentList (DBusConnection connection,
+ string object_path,
+ Kkc.SegmentList segments)
+ {
+ this.connection = connection;
+ this.object_path = object_path;
+ this.segments = segments;
+ this.segments.notify["cursor-pos"].connect ((p) => {
+ DBusUtils.send_property_change (
+ connection,
+ object_path,
+ "org.du_a.Kkc.SegmentList",
+ "CursorPos",
+ new Variant.int32 (cursor_pos));
+ });
+ register ();
+ }
+
+ ~DBusSegmentList () {
+ unregister ();
+ }
+
+ public int cursor_pos {
+ get {
+ return this.segments.cursor_pos;
+ }
+ }
+
+ public int size {
+ get {
+ return this.segments.size;
+ }
+ }
+
+ public new void @get (int index, out string input, out string output) {
+ var segment = this.segments.get (index);
+ input = segment.input;
+ output = segment.output;
+ }
+
+ public bool first_segment () {
+ return this.segments.first_segment ();
+ }
+
+ public bool last_segment () {
+ return this.segments.last_segment ();
+ }
+
+ public void next_segment () {
+ this.segments.next_segment ();
+ }
+
+ public void previous_segment () {
+ this.segments.previous_segment ();
+ }
+
+ public string get_output () {
+ return this.segments.get_output ();
+ }
+
+ public string get_input () {
+ return this.segments.get_input ();
+ }
+
+ uint register_id = 0;
+
+ void register () {
+ try {
+ register_id = connection.register_object (object_path, this);
+ } catch (IOError e) {
+ error ("Could not register D-Bus object at %s: %s",
+ object_path, e.message);
+ }
+ }
+
+ internal void unregister () {
+ if (register_id > 0) {
+ connection.unregister_object (register_id);
+ register_id = 0;
+ }
+ }
+ }
+
+ [DBus (name = "org.du_a.Kkc.Context")]
+ public class DBusContext : Object
+ {
+ DBusConnection connection;
+ string object_path;
+ Kkc.Context context;
+ DBusCandidateList candidates;
+ DBusSegmentList segments;
+
+ public DBusContext (DBusConnection connection,
+ string object_path,
+ Kkc.Context context)
+ {
+ this.connection = connection;
+ this.object_path = object_path;
+ this.context = context;
+ this.candidates = new DBusCandidateList (
+ connection,
+ "%s/CandidateList".printf (object_path),
+ context.candidates);
+ this.segments = new DBusSegmentList (
+ connection,
+ "%s/SegmentList".printf (object_path),
+ context.segments);
+ context.notify["input"].connect ((p) => {
+ DBusUtils.send_property_change (
+ connection,
+ object_path,
+ "org.du_a.Kkc.Context",
+ "Input",
+ new Variant.string (input));
+ });
+ context.notify["input_cursor_pos"].connect ((p) => {
+ DBusUtils.send_property_change (
+ connection,
+ object_path,
+ "org.du_a.Kkc.Context",
+ "InputCursorPos",
+ new Variant.int32 ((int32) input_cursor_pos));
+ });
+ register ();
+ }
+
+ ~DBusContext () {
+ unregister ();
+ }
+
+ public string input {
+ owned get {
+ return this.context.input;
+ }
+ }
+
+ public int input_cursor_pos {
+ get {
+ return this.context.input_cursor_pos;
+ }
+ }
+
+ public uint input_mode {
+ get {
+ return (uint) this.context.input_mode;
+ }
+ set {
+ this.context.input_mode = (InputMode) value;
+ }
+ }
+
+ public uint punctuation_style {
+ get {
+ return (uint) this.context.punctuation_style;
+ }
+ set {
+ this.context.punctuation_style = (PunctuationStyle) value;
+ }
+ }
+
+ public bool auto_correct {
+ get {
+ return this.context.auto_correct;
+ }
+ set {
+ this.context.auto_correct = value;
+ }
+ }
+
+ public bool process_key_event (uint keyval, uint keycode,
+ uint modifiers)
+ {
+ var event = new Kkc.KeyEvent (keyval, keycode,
+ (ModifierType) modifiers);
+ return this.context.process_key_event (event);
+ }
+
+ public bool process_command_event (string command) {
+ return this.context.process_command_event (command);
+ }
+
+ public void reset () {
+ this.context.reset ();
+ }
+
+ public bool has_output () {
+ return this.context.has_output ();
+ }
+
+ public string peek_output () {
+ return this.context.peek_output ();
+ }
+
+ public string poll_output () {
+ return this.context.poll_output ();
+ }
+
+ public void clear_output () {
+ this.context.clear_output ();
+ }
+
+ uint register_id = 0;
+
+ void register () {
+ try {
+ register_id = connection.register_object (object_path, this);
+ } catch (IOError e) {
+ error ("Could not register D-Bus object at %s: %s",
+ object_path, e.message);
+ }
+ }
+
+ internal void unregister () {
+ if (register_id > 0) {
+ connection.unregister_object (register_id);
+ candidates.unregister ();
+ segments.unregister ();
+ register_id = 0;
+ }
+ }
+ }
+
+ [DBus (name = "org.du_a.Kkc.Server")]
+ public class DBusServer : Object {
+ DBusConnection connection;
+ Kkc.LanguageModel model;
+ Kkc.DictionaryList dictionaries;
+ Kkc.Rule? typing_rule;
+ uint own_name_id;
+ uint context_id = 0;
+
+ public DBusServer (DBusConnection connection,
+ Kkc.LanguageModel model,
+ Kkc.DictionaryList dictionaries,
+ Kkc.Rule? typing_rule) {
+ this.connection = connection;
+ this.model = model;
+ this.dictionaries = dictionaries;
+ this.typing_rule = typing_rule;
+ own_name_id = Bus.own_name_on_connection (
+ connection,
+ "org.du_a.Kkc.Server",
+ BusNameOwnerFlags.NONE,
+ on_name_acquired, on_name_lost);
+ }
+
+ ~DBusServer () {
+ Bus.unown_name (own_name_id);
+ }
+
+ void on_name_acquired (DBusConnection connection, string name) {
+ try {
+ connection.register_object ("/org/du_a/Kkc/Server", this);
+ } catch (IOError e) {
+ error ("Could not register D-Bus service %s: %s",
+ name, e.message);
+ }
+ }
+
+ void on_name_lost (DBusConnection connection, string name) {
+ }
+
+ public string create_context (BusName sender) {
+ var context = new Kkc.Context (this.model);
+ context.dictionaries = dictionaries;
+ if (typing_rule != null)
+ context.typing_rule = typing_rule;
+ var object_path = "/org/du_a/Kkc/Context_%u".printf (context_id++);
+ var dbus_context = new DBusContext (connection,
+ object_path,
+ context);
+ contexts.set (object_path, dbus_context);
+ Bus.watch_name_on_connection (
+ connection,
+ sender,
+ BusNameWatcherFlags.NONE,
+ null,
+ (c, n) => {
+ destroy_context (object_path);
+ });
+ return object_path;
+ }
+
+ Map<string,DBusContext> contexts = new HashMap<string,DBusContext> ();
+
+ public void destroy_context (string object_path) {
+ DBusContext context;
+ if (contexts.unset (object_path, out context))
+ context.unregister ();
+ }
+ }
+}
diff --git a/libkkc/state.vala b/libkkc/state.vala
index 4ba4c50..c4b3ba5 100644
--- a/libkkc/state.vala
+++ b/libkkc/state.vala
@@ -324,11 +324,14 @@ namespace Kkc {
out _candidates)) {
return template.expand (_candidates[0].text);
}
- template = new OkuriganaTemplate (input);
- if (segment_dict.lookup_candidates (template.source,
- template.okuri,
- out _candidates)) {
- return template.expand (_candidates[0].text);
+ var count = input.char_count ();
+ if (count > 1) {
+ template = new OkuriganaTemplate (input, count - 1);
+ if (segment_dict.lookup_candidates (template.source,
+ template.okuri,
+ out _candidates)) {
+ return template.expand (_candidates[0].text);
+ }
}
return null;
}
@@ -385,7 +388,10 @@ namespace Kkc {
// 1. Look up candidates from user segment dictionaries.
lookup_template (new NumericTemplate (normalized_input), true);
lookup_template (new SimpleTemplate (normalized_input), true);
- lookup_template (new OkuriganaTemplate (normalized_input), true);
+ for (var i = normalized_input.char_count (); i > 1; i--) {
+ lookup_template (
+ new OkuriganaTemplate (normalized_input, i - 1), true);
+ }
// 2. Look up the most frequently used unigram from language model.
if (normalized_input.char_count () > 1) {
@@ -405,7 +411,6 @@ namespace Kkc {
// 3. Look up candidates from system segment dictionaries.
lookup_template (new NumericTemplate (normalized_input), false);
lookup_template (new SimpleTemplate (normalized_input), false);
- lookup_template (new OkuriganaTemplate (normalized_input), false);
// 4. Do sentence conversion with N-best search.
@@ -445,9 +450,17 @@ namespace Kkc {
builder.str);
if (!kana_candidates.contains (sentence))
candidates.add (sentence);
+
+ }
+
+ // 4.3. Look up okuri-ari candidates from system segment
+ // dictionaries, for each possible okurigana combination.
+ for (var i = normalized_input.char_count (); i > 1; i--) {
+ lookup_template (
+ new OkuriganaTemplate (normalized_input, i - 1), false);
}
- // 4.3. Add Kana candidates at the end.
+ // 4.4. Add Kana candidates at the end.
candidates.add_all (kana_candidates);
candidates.populated ();
@@ -731,10 +744,10 @@ namespace Kkc {
}
interface CommandHandler : Object {
- public abstract bool call (string? command, State state, KeyEvent key);
+ public abstract bool call (string command, State state, KeyEvent key);
}
- delegate bool CommandCallback (string? command, State state, KeyEvent key);
+ delegate bool CommandCallback (string command, State state, KeyEvent key);
class CallbackCommandHandler : CommandHandler, Object {
unowned CommandCallback cb;
@@ -743,7 +756,7 @@ namespace Kkc {
this.cb = cb;
}
- public bool call (string? command,
+ public bool call (string command,
State state,
KeyEvent key)
{
@@ -771,13 +784,19 @@ namespace Kkc {
register_command_handler (command, new CallbackCommandHandler (cb));
}
- public bool dispatch_command (State state, KeyEvent key) {
- var command = state.lookup_key (key);
+ public abstract bool default_command_callback (string? command,
+ State state,
+ KeyEvent key);
+
+ public bool dispatch_command (string? command,
+ State state,
+ KeyEvent key)
+ {
if (command != null && command_handlers.has_key (command))
return command_handlers.get (command).call (command,
state,
key);
- return default_command_handler.call (command, state, key);
+ return default_command_callback (command, state, key);
}
public abstract bool process_key_event (State state, KeyEvent key);
diff --git a/libkkc/template.vala b/libkkc/template.vala
index 7768f80..92c9995 100644
--- a/libkkc/template.vala
+++ b/libkkc/template.vala
@@ -42,19 +42,15 @@ namespace Kkc {
string? okurigana = null;
- public OkuriganaTemplate (string source) {
- var count = source.char_count ();
- if (count > 1) {
- var last_char_index = source.index_of_nth_char (count - 1);
- this.okurigana = source[last_char_index:source.length];
- string? prefix = RomKanaUtils.get_okurigana_prefix (
- this.okurigana);
- this.source = source[0:last_char_index] + prefix;
- this.okuri = true;
- } else {
- this.source = source;
- this.okuri = false;
- }
+ public OkuriganaTemplate (string source, int pos) {
+ assert (source.char_count () > 1);
+ assert (0 < pos && pos < source.char_count ());
+
+ var last_char_index = source.index_of_nth_char (pos);
+ this.okurigana = source[last_char_index:source.length];
+ string? prefix = RomKanaUtils.get_okurigana_prefix (this.okurigana);
+ this.source = source[0:last_char_index] + prefix;
+ this.okuri = true;
}
public string expand (string text) {
diff --git a/po/POTFILES.in b/po/POTFILES.in
index cd2e1b8..aee91a8 100644
--- a/po/POTFILES.in
+++ b/po/POTFILES.in
@@ -1,13 +1,14 @@
libkkc/keymap.vala
tools/kkc.vala
-data/rules/tutcode/metadata.json.in
-data/rules/azik/metadata.json.in
-data/rules/trycode/metadata.json.in
-data/rules/default/metadata.json.in
-data/rules/azik-jp106/metadata.json.in
-data/rules/tcode/metadata.json.in
-data/rules/act/metadata.json.in
-data/rules/kana/metadata.json.in
-data/rules/nicola/metadata.json.in
-data/rules/tutcode-touch16x/metadata.json.in
-data/rules/kzik/metadata.json.in
+data/rules/tutcode/metadata.pot
+data/rules/azik/metadata.pot
+data/rules/atok/metadata.pot
+data/rules/trycode/metadata.pot
+data/rules/default/metadata.pot
+data/rules/azik-jp106/metadata.pot
+data/rules/tcode/metadata.pot
+data/rules/act/metadata.pot
+data/rules/kana/metadata.pot
+data/rules/nicola/metadata.pot
+data/rules/tutcode-touch16x/metadata.pot
+data/rules/kzik/metadata.pot
diff --git a/tests/Makefile.am b/tests/Makefile.am
index 2ac9145..ec6b547 100644
--- a/tests/Makefile.am
+++ b/tests/Makefile.am
@@ -248,4 +248,11 @@ clean-local:
-rm -rf test-user-segment-dictionary
-rm -rf test-user-sentence-dictionary
+GITIGNOREFILES = \
+ test-driver \
+ data/rules \
+ data/models \
+ $(TESTS:=.c) \
+ $(NULL)
+
-include $(top_srcdir)/git.mk
diff --git a/tests/candidate-list.vala b/tests/candidate-list.vala
index 54d9d38..f089cd0 100644
--- a/tests/candidate-list.vala
+++ b/tests/candidate-list.vala
@@ -35,9 +35,9 @@ class CandidateListTests : Kkc.TestCase {
candidates.page_start = 2;
candidates.page_size = 3;
+ candidates.add (new Kkc.Candidate ("a", false, "0"));
candidates.add (new Kkc.Candidate ("a", false, "1"));
candidates.add (new Kkc.Candidate ("a", false, "2"));
- candidates.add (new Kkc.Candidate ("a", false, "3"));
assert (!candidates.page_visible);
candidates.cursor_down ();
@@ -45,21 +45,25 @@ class CandidateListTests : Kkc.TestCase {
candidates.cursor_down ();
assert (candidates.page_visible);
+ candidates.add (new Kkc.Candidate ("a", false, "3"));
candidates.add (new Kkc.Candidate ("a", false, "4"));
- candidates.add (new Kkc.Candidate ("a", false, "5"));
candidates.round = false;
assert (!candidates.page_down ());
assert (!candidates.page_up ());
+ candidates.add (new Kkc.Candidate ("a", false, "5"));
+ candidates.add (new Kkc.Candidate ("a", false, "6"));
+ candidates.add (new Kkc.Candidate ("a", false, "7"));
+
candidates.round = true;
assert (candidates.page_down ());
- assert (candidates.cursor_pos == 0);
+ assert (candidates.cursor_pos == 5);
assert (candidates.page_up ());
- assert (candidates.cursor_pos == 3);
+ assert (candidates.cursor_pos == 2);
assert (candidates.select_at (1));
- assert (candidates.cursor_pos == 4);
+ assert (candidates.cursor_pos == 3);
candidates.first ();
assert (candidates.next ());
@@ -68,8 +72,9 @@ class CandidateListTests : Kkc.TestCase {
assert (candidates.cursor_pos == 0);
assert (candidates.next ());
assert (candidates.next ());
+ assert (candidates.cursor_pos == 2);
assert (candidates.next ());
- assert (candidates.cursor_pos == 0);
+ assert (candidates.cursor_pos == 5);
}
}
diff --git a/tests/conversions-segment.json b/tests/conversions-segment.json
index 63d0b9b..33baadf 100644
--- a/tests/conversions-segment.json
+++ b/tests/conversions-segment.json
@@ -122,11 +122,11 @@
{
"keys": "w a t a s h i n o n a m a e h a n a k a n o d e s u SPC SPC",
"input": "わたしのなまえはなかのです",
- "segments": "わたしの名前は中野です",
+ "segments": "渡しの名前は中野です",
"segments_size": 3,
"segments_cursor_pos": 0,
"output": "",
- "candidates_size": 4,
+ "candidates_size": 5,
"input_cursor_pos": -1
},
{
@@ -136,7 +136,7 @@
"segments_size": 3,
"segments_cursor_pos": 0,
"output": "",
- "candidates_size": 4,
+ "candidates_size": 5,
"input_cursor_pos": -1
},
{
@@ -152,17 +152,17 @@
{
"keys": "w a t a s h i n o n a m a e h a n a k a n o d e s u SPC SPC Right",
"input": "わたしのなまえはなかのです",
- "segments": "わたしの名前は中野です",
+ "segments": "渡しの名前は中野です",
"segments_size": 3,
"segments_cursor_pos": 1,
"output": "",
- "candidates_size": 4,
+ "candidates_size": 5,
"input_cursor_pos": -1
},
{
"keys": "w a t a s h i n o n a m a e h a n a k a n o d e s u SPC SPC Right SPC",
"input": "わたしのなまえはなかのです",
- "segments": "わたしのなまえは中野です",
+ "segments": "渡しのなまえは中野です",
"segments_size": 3,
"segments_cursor_pos": 1,
"output": "",
@@ -172,7 +172,7 @@
{
"keys": "w a t a s h i n o n a m a e h a n a k a n o d e s u SPC SPC Right SPC SPC",
"input": "わたしのなまえはなかのです",
- "segments": "わたしのナマエハ中野です",
+ "segments": "渡しのナマエハ中野です",
"segments_size": 3,
"segments_cursor_pos": 1,
"output": "",
diff --git a/tests/conversions-user-dictionary.json b/tests/conversions-user-dictionary.json
index 6c52df5..c5ddace 100644
--- a/tests/conversions-user-dictionary.json
+++ b/tests/conversions-user-dictionary.json
@@ -29,12 +29,12 @@
"segments": "",
"segments_size": 0,
"segments_cursor_pos": -1,
- "output": "わたしの名前はなかのです"
+ "output": "渡しの名前はなかのです"
},
{
"keys": "w a t a s h i n o n a m a e h a n a k a n o d e s u SPC",
"input": "わたしのなまえはなかのです",
- "segments": "わたしの名前はなかのです",
+ "segments": "渡しの名前はなかのです",
"segments_size": 2,
"segments_cursor_pos": 0,
"output": ""
@@ -42,7 +42,7 @@
{
"keys": "w a t a s h i n o n a m a e h a n a k a n o d e s u SPC Right SPC Right Right SPC",
"input": "わたしのなまえはなかのです",
- "segments": "わたしのなまえはなかのです",
+ "segments": "渡しのなまえはなかのです",
"segments_size": 2,
"segments_cursor_pos": 1,
"output": ""
diff --git a/tests/template.vala b/tests/template.vala
index 1f8fb5e..5900cd1 100644
--- a/tests/template.vala
+++ b/tests/template.vala
@@ -16,7 +16,7 @@ class TemplateTests : Kkc.TestCase {
assert (source == "source");
assert (!okuri);
- template = new Kkc.OkuriganaTemplate ("かう");
+ template = new Kkc.OkuriganaTemplate ("かう", 1);
template.get ("source", out source,
"okuri", out okuri);
diff --git a/tools/Makefile.am b/tools/Makefile.am
index e65c513..7d05834 100644
--- a/tools/Makefile.am
+++ b/tools/Makefile.am
@@ -18,6 +18,7 @@ AM_CPPFLAGS = -include $(CONFIG_HEADER)
bin_PROGRAMS = kkc
bin_SCRIPTS = kkc-package-data
+noinst_PROGRAMS = gen-metadata-pot
kkc_VALAFLAGS = \
--vapidir=$(top_srcdir)/libkkc \
@@ -36,7 +37,17 @@ kkc_CFLAGS = \
kkc_LDADD = $(top_builddir)/libkkc/libkkc.la $(LIBKKC_LIBS)
kkc_SOURCES = kkc.vala
+gen_metadata_pot_VALAFLAGS = --pkg json-glib-1.0 --pkg posix $(VALAFLAGS)
+gen_metadata_pot_SOURCES = gen-metadata-pot.vala
+gen_metadata_pot_CFLAGS = $(JSON_GLIB_CFLAGS)
+gen_metadata_pot_LDADD = $(JSON_GLIB_LIBS)
+
DISTCLEANFILES = kkc-package-data
EXTRA_DIST = kkc-package-data.in
+GITIGNOREFILES = \
+ kkc.c \
+ gen-metadata-pot.c \
+ *_vala.stamp \
+ $(NULL)
-include $(top_srcdir)/git.mk
diff --git a/tools/gen-metadata-pot.vala b/tools/gen-metadata-pot.vala
new file mode 100644
index 0000000..4bcfdca
--- /dev/null
+++ b/tools/gen-metadata-pot.vala
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2015 Daiki Ueno <ueno@gnu.org>
+ * Copyright (C) 2015 Red Hat, Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+static int main (string[] args) {
+ if (args.length < 3) {
+ stderr.printf ("Usage: gen-metadata-pot FILE EXPR...\n");
+ return Posix.EXIT_FAILURE;
+ }
+
+ var parser = new Json.Parser ();
+ try {
+ parser.load_from_file (args[1]);
+ } catch (Error e) {
+ stderr.printf ("Can't load json file %s: %s\n",
+ args[1], e.message);
+ return Posix.EXIT_FAILURE;
+ }
+
+ var root = parser.get_root ();
+ for (var i = 2; i < args.length; i++) {
+ Json.Node result;
+ try {
+ result = Json.Path.query (args[i], root);
+ } catch (Error e) {
+ stderr.printf ("can't parse json expression \"%s\": %s\n",
+ args[i], e.message);
+ return Posix.EXIT_FAILURE;
+ }
+ var array = result.get_array ();
+ array.foreach_element ((a, index_, node) => {
+ stdout.printf ("msgid \"%s\"\nmsgstr \"\"\n\n",
+ node.get_string ());
+ });
+ }
+
+ return Posix.EXIT_SUCCESS;
+}
\ No newline at end of file
diff --git a/tools/kkc.vala b/tools/kkc.vala
index c383584..e9186d8 100644
--- a/tools/kkc.vala
+++ b/tools/kkc.vala
@@ -58,6 +58,7 @@ static void usage (string[] args, FileStream output) {
help Shows this information
decoder Run decoder
context Run context
+ server Run server
Use "%s COMMAND --help" to get help on each command.
""").printf (
@@ -85,11 +86,13 @@ static int main (string[] args) {
Environment.set_prgname ("%s %s".printf (args[0], new_args[0]));
- Repl repl;
+ Tool tool;
if (new_args[0] == "decoder")
- repl = new DecoderRepl ();
+ tool = new DecoderTool ();
else if (new_args[0] == "context")
- repl = new ContextRepl ();
+ tool = new ContextTool ();
+ else if (new_args[0] == "server")
+ tool = new ServerTool ();
else if (new_args[0] == "help") {
usage (args, stdout);
return 0;
@@ -100,14 +103,14 @@ static int main (string[] args) {
}
try {
- repl.parse_arguments (new_args);
+ tool.parse_arguments (new_args);
} catch (Error e) {
usage (args, stderr);
return 1;
}
try {
- repl.run ();
+ tool.run ();
} catch (Error e) {
return 1;
}
@@ -115,12 +118,12 @@ static int main (string[] args) {
return 0;
}
-interface Repl : Object {
+interface Tool : Object {
public abstract bool parse_arguments (string[] args) throws Error;
public abstract bool run () throws Error;
}
-class DecoderRepl : Object, Repl {
+class DecoderTool : Object, Tool {
public bool parse_arguments (string[] args) throws Error {
var o = new OptionContext (
_("- run decoder on the command line"));
@@ -175,7 +178,7 @@ class DecoderRepl : Object, Repl {
}
}
-class ContextRepl : Object, Repl {
+class ContextTool : Object, Tool {
public bool parse_arguments (string[] args) throws Error {
var o = new OptionContext (
_("- run context on the command line"));
@@ -268,3 +271,81 @@ class ContextRepl : Object, Repl {
return true;
}
}
+
+class ServerTool : Object, Tool {
+ public bool parse_arguments (string[] args) throws Error {
+ var o = new OptionContext (
+ _("- run server on the command line"));
+ o.add_main_entries (context_entries, "libkkc");
+ o.add_group ((owned) model_group);
+
+ return o.parse (ref args);
+ }
+
+ public bool run () throws Error {
+ if (opt_typing_rule == "?") {
+ var rules = Kkc.Rule.list ();
+ foreach (var rule in rules) {
+ stdout.printf ("%s - %s: %s\n",
+ rule.name,
+ rule.label,
+ rule.description);
+ }
+ return true;
+ }
+
+ Kkc.LanguageModel model;
+ try {
+ var name = opt_model == null ? "sorted3" : opt_model;
+ model = Kkc.LanguageModel.load (name);
+ } catch (Kkc.LanguageModelError e) {
+ stderr.printf ("%s\n", e.message);
+ return false;
+ }
+
+ var dictionaries = new Kkc.DictionaryList ();
+ if (opt_user_dictionary != null) {
+ try {
+ dictionaries.add (
+ new Kkc.UserDictionary (opt_user_dictionary));
+ } catch (GLib.Error e) {
+ stderr.printf ("can't open user dictionary %s: %s",
+ opt_user_dictionary, e.message);
+ return false;
+ }
+ }
+
+ if (opt_system_dictionary == null)
+ opt_system_dictionary = Path.build_filename (Config.DATADIR,
+ "skk", "SKK-JISYO.L");
+
+ try {
+ dictionaries.add (
+ new Kkc.SystemSegmentDictionary (opt_system_dictionary));
+ } catch (GLib.Error e) {
+ stderr.printf ("can't open system dictionary %s: %s",
+ opt_system_dictionary, e.message);
+ return false;
+ }
+
+ Kkc.Rule? typing_rule = null;
+ if (opt_typing_rule != null) {
+ try {
+ var metadata = Kkc.RuleMetadata.find (opt_typing_rule);
+ typing_rule = new Kkc.Rule (metadata);
+ } catch (Kkc.RuleParseError e) {
+ stderr.printf ("can't load rule \"%s\": %s\n",
+ opt_typing_rule,
+ e.message);
+ return false;
+ }
+ }
+
+ var connection = Bus.get_sync (BusType.SESSION);
+ var server = new Kkc.DBusServer (connection,
+ model, dictionaries, typing_rule);
+ var loop = new MainLoop (null, true);
+ loop.run ();
+ return true;
+ }
+}