From 62470d8a3353519eb1baa03dbf7f0c9988827553 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Mon, 5 Aug 2013 09:31:59 -0400 Subject: [PATCH 01/43] added sample data file to test logistic regression program --- samples/cpp/data01.xml | 58 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) create mode 100644 samples/cpp/data01.xml diff --git a/samples/cpp/data01.xml b/samples/cpp/data01.xml new file mode 100644 index 0000000000..d17167ccae --- /dev/null +++ b/samples/cpp/data01.xml @@ -0,0 +1,58 @@ + + + + 40 + 784 +
f
+ + 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 0 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 0 0 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 0 0 0 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 149 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 149 149 149 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 149 149 149 149 149 149 149 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 149 149 149 149 149 149 149 149 149 149 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 149 149 149 149 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 + +
+ + 40 + 1 +
d
+ + 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 + +
+
From 835529330644a94d9b5dfdafd2dba9f85984cc8e Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Mon, 5 Aug 2013 09:32:39 -0400 Subject: [PATCH 02/43] added program to demonstrate use of logistic regression classifier --- samples/cpp/sample_logistic_regression.cpp | 97 ++++++++++++++++++++++ 1 file changed, 97 insertions(+) create mode 100644 samples/cpp/sample_logistic_regression.cpp diff --git a/samples/cpp/sample_logistic_regression.cpp b/samples/cpp/sample_logistic_regression.cpp new file mode 100644 index 0000000000..95649e3911 --- /dev/null +++ b/samples/cpp/sample_logistic_regression.cpp @@ -0,0 +1,97 @@ +/////////////////////////////////////////////////////////////////////////////////////// +// sample_logistic_regression.cpp +// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + +// By downloading, copying, installing or using the software you agree to this license. +// If you do not agree to this license, do not download, install, +// copy or use the software. + +// This is a sample program demostrating classification of digits 0 and 1 using Logistic Regression + +// AUTHOR: +// Rahul Kavi rahulkavi[at]live[at]com +// + +#include + +#include +#include + +using namespace std; +using namespace cv; + + +int main() +{ + Mat data_temp, labels_temp; + Mat data, labels; + Mat responses, result; + + FileStorage f; + + cout<<"*****************************************************************************************"<> data_temp; + f["labelsmat"] >> labels_temp; + + data_temp.convertTo(data, CV_32F); + labels_temp.convertTo(labels, CV_32F); + + cout<<"initializing Logisitc Regression Parameters\n"<(i,0)<<" :: "<< responses.at(i,0)< Date: Mon, 5 Aug 2013 09:33:33 -0400 Subject: [PATCH 03/43] added reference to logistic regression document --- modules/ml/doc/ml.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/ml/doc/ml.rst b/modules/ml/doc/ml.rst index 86da3ac4ff..5c7cc145c1 100644 --- a/modules/ml/doc/ml.rst +++ b/modules/ml/doc/ml.rst @@ -19,3 +19,4 @@ Most of the classification and regression algorithms are implemented as C++ clas expectation_maximization neural_networks mldata + logistic_regression From 3bf6c3c2aa34748fdf11b34485bcb38ad57ede7e Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Mon, 5 Aug 2013 09:34:04 -0400 Subject: [PATCH 04/43] added documentation for logistic regression classifier --- modules/ml/doc/logistic_regression.rst | 170 +++++++++++++++++++++++++ 1 file changed, 170 insertions(+) create mode 100644 modules/ml/doc/logistic_regression.rst diff --git a/modules/ml/doc/logistic_regression.rst b/modules/ml/doc/logistic_regression.rst new file mode 100644 index 0000000000..06f695641d --- /dev/null +++ b/modules/ml/doc/logistic_regression.rst @@ -0,0 +1,170 @@ +Logistic Regression +=================== + +.. highlight:: cpp + +ML implements logistic regression, which is a probabilistic classification technique. Logistic Regression is a binary classification algorithm which is closely related to Support Vector Machines (SVM). +Like SVM, Logistic Regression can be extended to work on multi-class classification problems like digit recognition (i.e. recognizing digitis like 0,1 2, 3,... from the given images). +This version of Logistic Regression supports both binary and multi-class classifications (for multi-class it creates a multiple 2-class classifiers). +In order to train the logistic regression classifier, Batch Gradient Descent and Mini-Batch Gradient Descent algorithms are used (see [BatchDesWiki]_). +Logistic Regression is a discriminative classifier (see [LogRegTomMitch]_ for more details). Logistic Regression is implemented as a C++ class in ``CvLR``. + + +In Logistic Regression, we try to optimize the training paramater +:math:`\theta` +such that the hypothesis +:math:`0 \leq h_\theta(x) \leq 1` is acheived. +We have +:math:`h_\theta(x) = g(h_\theta(x))` +and +:math:`g(z) = \frac{1}{1+e^{-z}}` +as the logistic or sigmoid function. +The term "Logistic" in Logistic Regression refers to this function. +For given data of a binary classification problem of classes 0 and 1, +one can determine that the given data instance belongs to class 1 if +:math:`h_\theta(x) \geq 0.5` +or class 0 if +:math:`h_\theta(x) < 0.5` +. + +In Logistic Regression, choosing the right parameters is of utmost importance for reducing the training error and ensuring high training accuracy. +``CvLR_TrainParams`` is the structure that defines parameters that are required to train a Logistic Regression classifier. +The learning rate is determined by ``CvLR_TrainParams.alpha``. It determines how faster we approach the solution. +It is a positive real number. Optimization algorithms like Batch Gradient Descent and Mini-Batch Gradient Descent are supported in ``CvLR``. +It is important that we mention the number of iterations these optimization algorithms have to run. +The number of iterations are mentioned by ``CvLR_TrainParams.num_iters``. +The number of iterations can be thought as number of steps taken and learning rate specifies if it is a long step or a short step. These two parameters define how fast we arrive at a possible solution. +In order to compensate for overfitting regularization is performed, which can be enabled by setting ``CvLR_TrainParams.regularized`` to a positive integer (greater than zero). +One can specify what kind of regularization has to be performed by setting ``CvLR_TrainParams.norm`` to ``CvLR::REG_L1`` or ``CvLR::REG_L2`` values. +``CvLR`` provides a choice of 2 training methods with Batch Gradient Descent or the Mini-Batch Gradient Descent. To specify this, set ``CvLR_TrainParams.train_method`` to either ``CvLR::BATCH`` or ``CvLR::MINI_BATCH``. +If ``CvLR_TrainParams`` is set to ``CvLR::MINI_BATCH``, the size of the mini batch has to be to a postive integer using ``CvLR_TrainParams.minibatchsize``. + +A sample set of training parameters for the Logistic Regression classifier can be initialized as follows: +:: + CvLR_TrainParams params; + params.alpha = 0.5; + params.num_iters = 10000; + params.norm = CvLR::REG_L2; + params.regularized = 1; + params.train_method = CvLR::MINI_BATCH; + params.minibatchsize = 10; + +.. [LogRegWiki] http://en.wikipedia.org/wiki/Logistic_regression. Wikipedia article about the Logistic Regression algorithm. + +.. [RenMalik2003] Learning a Classification Model for Segmentation. Proc. CVPR, Nice, France (2003). + +.. [LogRegTomMitch] http://www.cs.cmu.edu/~tom/NewChapters.html. "Generative and Discriminative Classifiers: Naive Bayes and Logistic Regression" in Machine Learning, Tom Mitchell. +.. [BatchDesWiki] http://en.wikipedia.org/wiki/Gradient_descent_optimization. Wikipedia article about Gradient Descent based optimization. + +CvLR_TrainParams +---------------- +.. ocv:struct:: CvLR_TrainParams + + Parameters of the Logistic Regression training algorithm. You can initialize the structure using a constructor or declaring the variable and initializing the the individual parameters. + + The training parameters for Logistic Regression: + + .. ocv:member:: double alpha + + The learning rate of the optimization algorithm. The higher the value, faster the rate and vice versa. If the value is too high, the learning algorithm may overshoot the optimal parameters and result in lower training accuracy. If the value is too low, the learning algorithm converges towards the optimal parameters very slowly. The value must a be a positive real number. You can experiment with different values with small increments as in 0.0001, 0.0003, 0.001, 0.003, 0.01, 0.03, 0.1, 0.3, ... and select the learning rate with less training error. + + .. ocv:member:: int num_iters + + The number of iterations required for the learing algorithm (Gradient Descent or Mini Batch Gradient Descent). It has to be a positive integer. You can try different number of iterations like in 100, 1000, 2000, 3000, 5000, 10000, .. so on. + + .. ocv:member:: int norm + + The type of normalization applied. It takes value ``CvLR::L1`` or ``CvLR::L2``. + + .. ocv:member:: int regularized + + It should be set to postive integer (greater than zero) in order to enable regularization. + + .. ocv:member:: int train_method + + The kind of training method used to train the classifier. It should be set to either ``CvLR::BATCH`` or ``CvLR::MINI_BATCH``. + + .. ocv:member:: int minibatchsize + + If the training method is set to CvLR::MINI_BATCH, it has to be set to positive integer. It can range from 1 to number of training samples. + + +CvLR_TrainParams::CvLR_TrainParams +---------------------------------- +The constructors. + +.. ocv:function:: CvLR_TrainParams::CvLR_TrainParams() + +.. ocv:function:: CvLR_TrainParams::CvLR_TrainParams(double alpha, int num_iters, int norm, int regularized, int train_method, int minbatchsize) + + :param alpha: Specifies the learning rate. + + :param num_iters: Specifies the number of iterations. + + :param norm: Specifies the kind of regularization to be applied. ``CvLR::REG_L1`` or ``CvLR::REG_L2``. To use this, set ``CvLR_TrainParams.regularized`` to a integer greater than zero. + + :param: regularized: To enable or disable regularization. Set to positive integer (greater than zero) to enable and to 0 to disable. + + :param: train_method: Specifies the kind of training method used. It should be set to either ``CvLR::BATCH`` or ``CvLR::MINI_BATCH``. If using ``CvLR::MINI_BATCH``, set ``CvLR_TrainParams.minibatchsize`` to a positive integer. + + :param: minibatchsize: Specifies the number of training samples taken in each step of Mini-Batch Gradient Descent. + +By initializing this structure, one can set all the parameters required for Logistic Regression classifier. + +CvLR +---- +.. ocv:class:: CvLR : public CvStatModel + +Implements Logistic Regression classifier. + +CvLR::CvLR +---------- +The constructors. + +.. ocv:function:: CvLR::CvLR() + +.. ocv:function:: CvLR::CvLR(const cv::Mat& data, const cv::Mat& labels, const CvLR_TrainParams& params) + + :param data: The data variable of type ``CV_32F``. Each data instance has to be arranged per across different rows. + + :param labels: The data variable of type ``CV_32F``. Each label instance has to be arranged across differnet rows. + + :param params: The training parameters for the classifier of type ``CVLR_TrainParams``. + +The constructor with parameters allows to create a Logistic Regression object intialized with given data and trains it. + +CvLR::train +----------- +Trains the Logistic Regression classifier and returns true if successful. + +.. ocv:function:: bool CvLR::train(const cv::Mat& data, const cv::Mat& labels) + + :param data: The data variable of type ``CV_32F``. Each data instance has to be arranged per across different rows. + + :param labels: The data variable of type ``CV_32F``. Each label instance has to be arranged across differnet rows. + + +CvLR::predict +------------- +Predicts responses for input samples and returns a float type. + +.. ocv:function:: float CvLR::predict(const Mat& data) + + :param data: The data variable should be a row matrix and of type ``CV_32F``. + +.. ocv:function:: float CvLR::predict( const Mat& data, Mat& predicted_labels ) + + :param data: The input data for the prediction algorithm. The ``data`` variable should be of type ``CV_32F``. + + :param predicted_labels: Predicted labels as a column matrix and of type ``CV_32S``. + +The function ``CvLR::predict(const Mat& data)`` returns the label of single data variable. It should be used if data contains only 1 row. + + +CvLR::get_learnt_mat() +---------------------- +This function returns the trained paramters arranged across rows. For a two class classifcation problem, it returns a row matrix. + +.. ocv:function:: cv::Mat CvLR::get_learnt_mat() + +It returns learnt paramters of the Logistic Regression as a matrix of type ``CV_32F``. From 3039ed7682baf3ded94a159088ad5b51436a5082 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Mon, 5 Aug 2013 09:34:53 -0400 Subject: [PATCH 05/43] added test for logistic regression --- modules/ml/test/test_lr.cpp | 345 ++++++++++++++++++++++++++++++++++++ 1 file changed, 345 insertions(+) create mode 100644 modules/ml/test/test_lr.cpp diff --git a/modules/ml/test/test_lr.cpp b/modules/ml/test/test_lr.cpp new file mode 100644 index 0000000000..c82d46c137 --- /dev/null +++ b/modules/ml/test/test_lr.cpp @@ -0,0 +1,345 @@ +/////////////////////////////////////////////////////////////////////////////////////// +// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + +// By downloading, copying, installing or using the software you agree to this license. +// If you do not agree to this license, do not download, install, +// copy or use the software. + +// This is a implementation of the Logistic Regression algorithm in C++ in OpenCV. + +// AUTHOR: +// Rahul Kavi rahulkavi[at]live[at]com +// + +// contains a subset of data from the popular Iris Dataset (taken from "http://archive.ics.uci.edu/ml/datasets/Iris") + +// # You are free to use, change, or redistribute the code in any way you wish for +// # non-commercial purposes, but please maintain the name of the original author. +// # This code comes with no warranty of any kind. + +// # +// # You are free to use, change, or redistribute the code in any way you wish for +// # non-commercial purposes, but please maintain the name of the original author. +// # This code comes with no warranty of any kind. + +// # Logistic Regression ALGORITHM + + +// License Agreement +// For Open Source Computer Vision Library + +// Copyright (C) 2000-2008, Intel Corporation, all rights reserved. +// Copyright (C) 2008-2011, Willow Garage Inc., all rights reserved. +// Third party copyrights are property of their respective owners. + +// Redistribution and use in source and binary forms, with or without modification, +// are permitted provided that the following conditions are met: + +// * Redistributions of source code must retain the above copyright notice, +// this list of conditions and the following disclaimer. + +// * Redistributions in binary form must reproduce the above copyright notice, +// this list of conditions and the following disclaimer in the documentation +// and/or other materials provided with the distribution. + +// * The name of the copyright holders may not be used to endorse or promote products +// derived from this software without specific prior written permission. + +// This software is provided by the copyright holders and contributors "as is" and +// any express or implied warranties, including, but not limited to, the implied +// warranties of merchantability and fitness for a particular purpose are disclaimed. +// In no event shall the Intel Corporation or contributors be liable for any direct, +// indirect, incidental, special, exemplary, or consequential damages +// (including, but not limited to, procurement of substitute goods or services; +// loss of use, data, or profits; or business interruption) however caused +// and on any theory of liability, whether in contract, strict liability, +// or tort (including negligence or otherwise) arising in any way out of +// the use of this software, even if advised of the possibility of such damage. + +#include "test_precomp.hpp" + +using namespace std; +using namespace cv; + + +static bool calculateError( const Mat& _p_labels, const Mat& _o_labels, float& error) +{ + error = 0.0f; + float accuracy = 0.0f; + Mat _p_labels_temp; + Mat _o_labels_temp; + _p_labels.convertTo(_p_labels_temp, CV_32S); + _o_labels.convertTo(_o_labels_temp, CV_32S); + + CV_Assert(_p_labels_temp.total() == _o_labels_temp.total()); + CV_Assert(_p_labels_temp.rows == _o_labels_temp.rows); + Mat result = (_p_labels_temp == _o_labels_temp)/255; + + accuracy = (float)cv::sum(result)[0]/result.rows; + error = 1 - accuracy; + return true; +} + +//-------------------------------------------------------------------------------------------- + +class CV_LRTest : public cvtest::BaseTest +{ +public: + CV_LRTest() {} +protected: + virtual void run( int start_from ); +}; + +void CV_LRTest::run( int /*start_from*/ ) +{ + // initialize varibles from the popular Iris Dataset + Mat data = (Mat_(150, 4)<< + 5.1,3.5,1.4,0.2, 4.9,3.0,1.4,0.2, 4.7,3.2,1.3,0.2, 4.6,3.1,1.5,0.2, + 5.0,3.6,1.4,0.2, 5.4,3.9,1.7,0.4, 4.6,3.4,1.4,0.3, 5.0,3.4,1.5,0.2, 4.4,2.9,1.4,0.2, 4.9,3.1,1.5,0.1, + 5.4,3.7,1.5,0.2, 4.8,3.4,1.6,0.2, 4.8,3.0,1.4,0.1, 4.3,3.0,1.1,0.1, 5.8,4.0,1.2,0.2, 5.7,4.4,1.5,0.4, + 5.4,3.9,1.3,0.4, 5.1,3.5,1.4,0.3, 5.7,3.8,1.7,0.3, 5.1,3.8,1.5,0.3, 5.4,3.4,1.7,0.2, 5.1,3.7,1.5,0.4, + 4.6,3.6,1.0,0.2, 5.1,3.3,1.7,0.5, 4.8,3.4,1.9,0.2, 5.0,3.0,1.6,0.2, 5.0,3.4,1.6,0.4, + 5.2,3.5,1.5,0.2, 5.2,3.4,1.4,0.2, 4.7,3.2,1.6,0.2, 4.8,3.1,1.6,0.2, 5.4,3.4,1.5,0.4, + 5.2,4.1,1.5,0.1, 5.5,4.2,1.4,0.2, 4.9,3.1,1.5,0.1, 5.0,3.2,1.2,0.2, 5.5,3.5,1.3,0.2, + 4.9,3.1,1.5,0.1, 4.4,3.0,1.3,0.2, 5.1,3.4,1.5,0.2, 5.0,3.5,1.3,0.3, 4.5,2.3,1.3,0.3, + 4.4,3.2,1.3,0.2, 5.0,3.5,1.6,0.6, 5.1,3.8,1.9,0.4, 4.8,3.0,1.4,0.3, 5.1,3.8,1.6,0.2, + 4.6,3.2,1.4,0.2, 5.3,3.7,1.5,0.2, 5.0,3.3,1.4,0.2, 7.0,3.2,4.7,1.4, 6.4,3.2,4.5,1.5, + 6.9,3.1,4.9,1.5, 5.5,2.3,4.0,1.3, 6.5,2.8,4.6,1.5, 5.7,2.8,4.5,1.3, 6.3,3.3,4.7,1.6, + 4.9,2.4,3.3,1.0, 6.6,2.9,4.6,1.3, 5.2,2.7,3.9,1.4, 5.0,2.0,3.5,1.0, 5.9,3.0,4.2,1.5, + 6.0,2.2,4.0,1.0, 6.1,2.9,4.7,1.4, 5.6,2.9,3.6,1.3, 6.7,3.1,4.4,1.4, 5.6,3.0,4.5,1.5, + 5.8,2.7,4.1,1.0, 6.2,2.2,4.5,1.5, 5.6,2.5,3.9,1.1, 5.9,3.2,4.8,1.8, 6.1,2.8,4.0,1.3, + 6.3,2.5,4.9,1.5, 6.1,2.8,4.7,1.2, 6.4,2.9,4.3,1.3, 6.6,3.0,4.4,1.4, 6.8,2.8,4.8,1.4, + 6.7,3.0,5.0,1.7, 6.0,2.9,4.5,1.5, 5.7,2.6,3.5,1.0, 5.5,2.4,3.8,1.1, 5.5,2.4,3.7,1.0, + 5.8,2.7,3.9,1.2, 6.0,2.7,5.1,1.6, 5.4,3.0,4.5,1.5, 6.0,3.4,4.5,1.6, 6.7,3.1,4.7,1.5, + 6.3,2.3,4.4,1.3, 5.6,3.0,4.1,1.3, 5.5,2.5,4.0,1.3, 5.5,2.6,4.4,1.2, 6.1,3.0,4.6,1.4, + 5.8,2.6,4.0,1.2, 5.0,2.3,3.3,1.0, 5.6,2.7,4.2,1.3, 5.7,3.0,4.2,1.2, 5.7,2.9,4.2,1.3, + 6.2,2.9,4.3,1.3, 5.1,2.5,3.0,1.1, 5.7,2.8,4.1,1.3, 6.3,3.3,6.0,2.5, 5.8,2.7,5.1,1.9, + 7.1,3.0,5.9,2.1, 6.3,2.9,5.6,1.8, 6.5,3.0,5.8,2.2, 7.6,3.0,6.6,2.1, 4.9,2.5,4.5,1.7, + 7.3,2.9,6.3,1.8, 6.7,2.5,5.8,1.8, 7.2,3.6,6.1,2.5, 6.5,3.2,5.1,2.0, 6.4,2.7,5.3,1.9, + 6.8,3.0,5.5,2.1, 5.7,2.5,5.0,2.0, 5.8,2.8,5.1,2.4, 6.4,3.2,5.3,2.3, 6.5,3.0,5.5,1.8, + 7.7,3.8,6.7,2.2, 7.7,2.6,6.9,2.3, 6.0,2.2,5.0,1.5, 6.9,3.2,5.7,2.3, 5.6,2.8,4.9,2.0, + 7.7,2.8,6.7,2.0, 6.3,2.7,4.9,1.8, 6.7,3.3,5.7,2.1, 7.2,3.2,6.0,1.8, 6.2,2.8,4.8,1.8, + 6.1,3.0,4.9,1.8, 6.4,2.8,5.6,2.1, 7.2,3.0,5.8,1.6, 7.4,2.8,6.1,1.9, 7.9,3.8,6.4,2.0, + 6.4,2.8,5.6,2.2, 6.3,2.8,5.1,1.5, 6.1,2.6,5.6,1.4, 7.7,3.0,6.1,2.3, 6.3,3.4,5.6,2.4, + 6.4,3.1,5.5,1.8, 6.0,3.0,4.8,1.8, 6.9,3.1,5.4,2.1, 6.7,3.1,5.6,2.4, 6.9,3.1,5.1,2.3, + 5.8,2.7,5.1,1.9, 6.8,3.2,5.9,2.3, 6.7,3.3,5.7,2.5, 6.7,3.0,5.2,2.3, 6.3,2.5,5.0,1.9, + 6.5,3.0,5.2,2.0, 6.2,3.4,5.4,2.3, 5.9,3.0,5.1,1.8); + + Mat labels = (Mat_(150, 1)<< 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3); + + CvLR_TrainParams params = CvLR_TrainParams(); + Mat responses1, responses2; + float error = 0.0f; + + CvLR_TrainParams params1 = CvLR_TrainParams(); + CvLR_TrainParams params2 = CvLR_TrainParams(); + + params1.alpha = 1.0; + params1.num_iters = 10001; + params1.norm = CvLR::REG_L2; + // params1.debug = 1; + params1.regularized = 1; + params1.train_method = CvLR::BATCH; + params1.minibatchsize = 10; + + // run LR classifier train classifier + data.convertTo(data, CV_32FC1); + labels.convertTo(labels, CV_32FC1); + CvLR lr1(data, labels, params1); + + // predict using the same data + lr1.predict(data, responses1); + + int test_code = cvtest::TS::OK; + + // calculate error + if(!calculateError(responses1, labels, error)) + { + ts->printf(cvtest::TS::LOG, "Bad prediction labels\n" ); + test_code = cvtest::TS::FAIL_INVALID_OUTPUT; + } + + else if(error > 0.05f) + { + ts->printf(cvtest::TS::LOG, "Bad accuracy of (%f)\n", error); + test_code = cvtest::TS::FAIL_BAD_ACCURACY; + } + + params2.alpha = 1.0; + params2.num_iters = 9000; + params2.norm = CvLR::REG_L2; + // params2.debug = 1; + params2.regularized = 1; + params2.train_method = CvLR::MINI_BATCH; + params2.minibatchsize = 10; + + // now train using mini batch gradient descent + CvLR lr2(data, labels, params2); + lr2.predict(data, responses2); + responses2.convertTo(responses2, CV_32S); + + //calculate error + + if(!calculateError(responses2, labels, error)) + { + ts->printf(cvtest::TS::LOG, "Bad prediction labels\n" ); + test_code = cvtest::TS::FAIL_INVALID_OUTPUT; + } + + else if(error > 0.06f) + { + ts->printf(cvtest::TS::LOG, "Bad accuracy of (%f)\n", error); + test_code = cvtest::TS::FAIL_BAD_ACCURACY; + } + + ts->set_failed_test_info(test_code); +} + +//-------------------------------------------------------------------------------------------- +class CV_LRTest_SaveLoad : public cvtest::BaseTest +{ +public: + CV_LRTest_SaveLoad(){} +protected: + virtual void run(int start_from); +}; + + +void CV_LRTest_SaveLoad::run( int /*start_from*/ ) +{ + + int code = cvtest::TS::OK; + + // initialize varibles from the popular Iris Dataset + Mat data = (Mat_(150, 4)<< + 5.1,3.5,1.4,0.2, 4.9,3.0,1.4,0.2, 4.7,3.2,1.3,0.2, 4.6,3.1,1.5,0.2, + 5.0,3.6,1.4,0.2, 5.4,3.9,1.7,0.4, 4.6,3.4,1.4,0.3, 5.0,3.4,1.5,0.2, 4.4,2.9,1.4,0.2, 4.9,3.1,1.5,0.1, + 5.4,3.7,1.5,0.2, 4.8,3.4,1.6,0.2, 4.8,3.0,1.4,0.1, 4.3,3.0,1.1,0.1, 5.8,4.0,1.2,0.2, 5.7,4.4,1.5,0.4, + 5.4,3.9,1.3,0.4, 5.1,3.5,1.4,0.3, 5.7,3.8,1.7,0.3, 5.1,3.8,1.5,0.3, 5.4,3.4,1.7,0.2, 5.1,3.7,1.5,0.4, + 4.6,3.6,1.0,0.2, 5.1,3.3,1.7,0.5, 4.8,3.4,1.9,0.2, 5.0,3.0,1.6,0.2, 5.0,3.4,1.6,0.4, + 5.2,3.5,1.5,0.2, 5.2,3.4,1.4,0.2, 4.7,3.2,1.6,0.2, 4.8,3.1,1.6,0.2, 5.4,3.4,1.5,0.4, + 5.2,4.1,1.5,0.1, 5.5,4.2,1.4,0.2, 4.9,3.1,1.5,0.1, 5.0,3.2,1.2,0.2, 5.5,3.5,1.3,0.2, + 4.9,3.1,1.5,0.1, 4.4,3.0,1.3,0.2, 5.1,3.4,1.5,0.2, 5.0,3.5,1.3,0.3, 4.5,2.3,1.3,0.3, + 4.4,3.2,1.3,0.2, 5.0,3.5,1.6,0.6, 5.1,3.8,1.9,0.4, 4.8,3.0,1.4,0.3, 5.1,3.8,1.6,0.2, + 4.6,3.2,1.4,0.2, 5.3,3.7,1.5,0.2, 5.0,3.3,1.4,0.2, 7.0,3.2,4.7,1.4, 6.4,3.2,4.5,1.5, + 6.9,3.1,4.9,1.5, 5.5,2.3,4.0,1.3, 6.5,2.8,4.6,1.5, 5.7,2.8,4.5,1.3, 6.3,3.3,4.7,1.6, + 4.9,2.4,3.3,1.0, 6.6,2.9,4.6,1.3, 5.2,2.7,3.9,1.4, 5.0,2.0,3.5,1.0, 5.9,3.0,4.2,1.5, + 6.0,2.2,4.0,1.0, 6.1,2.9,4.7,1.4, 5.6,2.9,3.6,1.3, 6.7,3.1,4.4,1.4, 5.6,3.0,4.5,1.5, + 5.8,2.7,4.1,1.0, 6.2,2.2,4.5,1.5, 5.6,2.5,3.9,1.1, 5.9,3.2,4.8,1.8, 6.1,2.8,4.0,1.3, + 6.3,2.5,4.9,1.5, 6.1,2.8,4.7,1.2, 6.4,2.9,4.3,1.3, 6.6,3.0,4.4,1.4, 6.8,2.8,4.8,1.4, + 6.7,3.0,5.0,1.7, 6.0,2.9,4.5,1.5, 5.7,2.6,3.5,1.0, 5.5,2.4,3.8,1.1, 5.5,2.4,3.7,1.0, + 5.8,2.7,3.9,1.2, 6.0,2.7,5.1,1.6, 5.4,3.0,4.5,1.5, 6.0,3.4,4.5,1.6, 6.7,3.1,4.7,1.5, + 6.3,2.3,4.4,1.3, 5.6,3.0,4.1,1.3, 5.5,2.5,4.0,1.3, 5.5,2.6,4.4,1.2, 6.1,3.0,4.6,1.4, + 5.8,2.6,4.0,1.2, 5.0,2.3,3.3,1.0, 5.6,2.7,4.2,1.3, 5.7,3.0,4.2,1.2, 5.7,2.9,4.2,1.3, + 6.2,2.9,4.3,1.3, 5.1,2.5,3.0,1.1, 5.7,2.8,4.1,1.3, 6.3,3.3,6.0,2.5, 5.8,2.7,5.1,1.9, + 7.1,3.0,5.9,2.1, 6.3,2.9,5.6,1.8, 6.5,3.0,5.8,2.2, 7.6,3.0,6.6,2.1, 4.9,2.5,4.5,1.7, + 7.3,2.9,6.3,1.8, 6.7,2.5,5.8,1.8, 7.2,3.6,6.1,2.5, 6.5,3.2,5.1,2.0, 6.4,2.7,5.3,1.9, + 6.8,3.0,5.5,2.1, 5.7,2.5,5.0,2.0, 5.8,2.8,5.1,2.4, 6.4,3.2,5.3,2.3, 6.5,3.0,5.5,1.8, + 7.7,3.8,6.7,2.2, 7.7,2.6,6.9,2.3, 6.0,2.2,5.0,1.5, 6.9,3.2,5.7,2.3, 5.6,2.8,4.9,2.0, + 7.7,2.8,6.7,2.0, 6.3,2.7,4.9,1.8, 6.7,3.3,5.7,2.1, 7.2,3.2,6.0,1.8, 6.2,2.8,4.8,1.8, + 6.1,3.0,4.9,1.8, 6.4,2.8,5.6,2.1, 7.2,3.0,5.8,1.6, 7.4,2.8,6.1,1.9, 7.9,3.8,6.4,2.0, + 6.4,2.8,5.6,2.2, 6.3,2.8,5.1,1.5, 6.1,2.6,5.6,1.4, 7.7,3.0,6.1,2.3, 6.3,3.4,5.6,2.4, + 6.4,3.1,5.5,1.8, 6.0,3.0,4.8,1.8, 6.9,3.1,5.4,2.1, 6.7,3.1,5.6,2.4, 6.9,3.1,5.1,2.3, + 5.8,2.7,5.1,1.9, 6.8,3.2,5.9,2.3, 6.7,3.3,5.7,2.5, 6.7,3.0,5.2,2.3, 6.3,2.5,5.0,1.9, + 6.5,3.0,5.2,2.0, 6.2,3.4,5.4,2.3, 5.9,3.0,5.1,1.8); + + Mat labels = (Mat_(150, 1)<< 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3); + + CvLR_TrainParams params = CvLR_TrainParams(); + + Mat responses1, responses2; + Mat learnt_mat1, learnt_mat2; + Mat pred_result1, comp_learnt_mats; + + float errorCount = 0.0; + + CvLR_TrainParams params1 = CvLR_TrainParams(); + CvLR_TrainParams params2 = CvLR_TrainParams(); + + params1.alpha = 1.0; + params1.num_iters = 10001; + params1.norm = CvLR::REG_L2; + // params1.debug = 1; + params1.regularized = 1; + params1.train_method = CvLR::BATCH; + params1.minibatchsize = 10; + + data.convertTo(data, CV_32FC1); + labels.convertTo(labels, CV_32FC1); + + // run LR classifier train classifier + CvLR lr1(data, labels, params1); + CvLR lr2; + learnt_mat1 = lr1.get_learnt_mat(); + lr1.predict(data, responses1); + // now save the classifier + + // Write out + string filename = cv::tempfile(".xml"); + try + { + lr1.save(filename.c_str()); + } + + catch(...) + { + ts->printf(cvtest::TS::LOG, "Crash in write method.\n" ); + ts->set_failed_test_info(cvtest::TS::FAIL_EXCEPTION); + } + + try + { + lr2.load(filename.c_str()); + } + + catch(...) + { + ts->printf(cvtest::TS::LOG, "Crash in read method.\n"); + ts->set_failed_test_info(cvtest::TS::FAIL_EXCEPTION); + } + + lr2.predict(data, responses2); + + learnt_mat2 = lr2.get_learnt_mat(); + + // compare difference in prediction outputs before and after loading from disk + pred_result1 = (responses1 == responses2)/255; + + // compare difference in learnt matrices before and after loading from disk + comp_learnt_mats = (learnt_mat1 == learnt_mat2); + comp_learnt_mats = comp_learnt_mats.reshape(1, comp_learnt_mats.rows*comp_learnt_mats.cols); + comp_learnt_mats.convertTo(comp_learnt_mats, CV_32S); + comp_learnt_mats = comp_learnt_mats/255; + + // compare difference in prediction outputs and stored inputs + // check if there is any difference between computed learnt mat and retreived mat + + errorCount += 1 - (float)cv::sum(pred_result1)[0]/pred_result1.rows; + errorCount += 1 - (float)cv::sum(comp_learnt_mats)[0]/comp_learnt_mats.rows; + + + if(errorCount>0) + { + ts->printf( cvtest::TS::LOG, "Different prediction results before writing and after reading (errorCount=%d).\n", errorCount ); + code = cvtest::TS::FAIL_BAD_ACCURACY; + } + + remove( filename.c_str() ); + + ts->set_failed_test_info( code ); +} + +TEST(ML_LR, accuracy) { CV_LRTest test; test.safe_run(); } +TEST(ML_LR, save_load) { CV_LRTest_SaveLoad test; test.safe_run(); } From fcfeb2451b32b7ea65e55104297529fb83cfed10 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Mon, 5 Aug 2013 09:42:07 -0400 Subject: [PATCH 06/43] added logistic regression prototype --- modules/ml/include/opencv2/ml.hpp | 84 +++++++++++++++++++++++++++++++ 1 file changed, 84 insertions(+) diff --git a/modules/ml/include/opencv2/ml.hpp b/modules/ml/include/opencv2/ml.hpp index a5ce3010bf..ea90538a92 100644 --- a/modules/ml/include/opencv2/ml.hpp +++ b/modules/ml/include/opencv2/ml.hpp @@ -89,6 +89,8 @@ public: CV_PROP_RW double maxVal; CV_PROP_RW double logStep; }; +#define CV_TYPE_NAME_ML_LR "opencv-ml-lr" + class CV_EXPORTS TrainData @@ -566,6 +568,85 @@ public: static Ptr create(const Params& params=Params()); }; +/****************************************************************************************\ +* Logistic Regression * +\****************************************************************************************/ + +struct CV_EXPORTS_W_MAP CvLR_TrainParams +{ + CV_PROP_RW double alpha; + CV_PROP_RW int num_iters; + CV_PROP_RW int norm; + /////////////////////////////////////////////////// + // CV_PROP_RW int debug; + /////////////////////////////////////////////////// + CV_PROP_RW int regularized; + CV_PROP_RW int train_method; + CV_PROP_RW int minibatchsize; + + CV_PROP_RW CvTermCriteria term_crit; + + CvLR_TrainParams(); + /////////////////////////////////////////////////// + // CvLR_TrainParams(double alpha, int num_iters, int norm, int debug, int regularized, int train_method, int minbatchsize); + /////////////////////////////////////////////////// + CvLR_TrainParams(double alpha, int num_iters, int norm, int regularized, int train_method, int minbatchsize); + ~CvLR_TrainParams(); +}; + +class CV_EXPORTS_W CvLR : public CvStatModel +{ +public: + CvLR(); + // CvLR(const CvLR_TrainParams& Params); + + CvLR(const cv::Mat& data, const cv::Mat& labels, const CvLR_TrainParams& params); + + virtual ~CvLR(); + + enum { REG_L1=0, REG_L2 = 1}; + enum { BATCH, MINI_BATCH}; + + + virtual bool train(const cv::Mat& data, const cv::Mat& labels);//, const CvLR_TrainParams& params); + + virtual float predict(const cv::Mat& data, cv::Mat& predicted_labels); + virtual float predict(const cv::Mat& data); + + virtual void write( CvFileStorage* storage, const char* name ) const; + virtual void read( CvFileStorage* storage, CvFileNode* node ); + + virtual void clear(); + + virtual cv::Mat get_learnt_mat(); + +protected: + + cv::Mat learnt_thetas; + CvLR_TrainParams params; + + std::map forward_mapper; + std::map reverse_mapper; + + virtual bool set_default_params(); + virtual cv::Mat calc_sigmoid(const cv::Mat& data); + + virtual double compute_cost(const cv::Mat& data, const cv::Mat& labels, const cv::Mat& init_theta); + virtual cv::Mat compute_batch_gradient(const cv::Mat& data, const cv::Mat& labels, const cv::Mat& init_theta); + virtual cv::Mat compute_mini_batch_gradient(const cv::Mat& data, const cv::Mat& labels, const cv::Mat& init_theta); + + virtual std::map get_label_map(const cv::Mat& labels); + + virtual bool set_label_map(const cv::Mat& labels); + virtual cv::Mat remap_labels(const cv::Mat& labels, const std::map lmap); + + //cv::Mat Mapper; + + cv::Mat labels_o; + cv::Mat labels_n; + +}; + /****************************************************************************************\ * Auxilary functions declarations * \****************************************************************************************/ @@ -581,6 +662,9 @@ CV_EXPORTS void randGaussMixture( InputArray means, InputArray covs, InputArray /* creates test set */ CV_EXPORTS void createConcentricSpheresTestSet( int nsamples, int nfeatures, int nclasses, OutputArray samples, OutputArray responses); +typedef CvLR_TrainParams LogisticRegression_TrainParams; +typedef CvLR LogisticRegression; + } } From 6ae43a2243fdb90088507e882640500ca958d074 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Mon, 5 Aug 2013 09:42:59 -0400 Subject: [PATCH 07/43] added logistic regression classifier --- modules/ml/src/lr.cpp | 754 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 754 insertions(+) create mode 100644 modules/ml/src/lr.cpp diff --git a/modules/ml/src/lr.cpp b/modules/ml/src/lr.cpp new file mode 100644 index 0000000000..73e4c00c9e --- /dev/null +++ b/modules/ml/src/lr.cpp @@ -0,0 +1,754 @@ +/////////////////////////////////////////////////////////////////////////////////////// +// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + +// By downloading, copying, installing or using the software you agree to this license. +// If you do not agree to this license, do not download, install, +// copy or use the software. + +// This is a implementation of the Logistic Regression algorithm in C++ in OpenCV. + +// AUTHOR: +// Rahul Kavi rahulkavi[at]live[at]com + +// # You are free to use, change, or redistribute the code in any way you wish for +// # non-commercial purposes, but please maintain the name of the original author. +// # This code comes with no warranty of any kind. + +// # +// # You are free to use, change, or redistribute the code in any way you wish for +// # non-commercial purposes, but please maintain the name of the original author. +// # This code comes with no warranty of any kind. + +// # Logistic Regression ALGORITHM + + +// License Agreement +// For Open Source Computer Vision Library + +// Copyright (C) 2000-2008, Intel Corporation, all rights reserved. +// Copyright (C) 2008-2011, Willow Garage Inc., all rights reserved. +// Third party copyrights are property of their respective owners. + +// Redistribution and use in source and binary forms, with or without modification, +// are permitted provided that the following conditions are met: + +// * Redistributions of source code must retain the above copyright notice, +// this list of conditions and the following disclaimer. + +// * Redistributions in binary form must reproduce the above copyright notice, +// this list of conditions and the following disclaimer in the documentation +// and/or other materials provided with the distribution. + +// * The name of the copyright holders may not be used to endorse or promote products +// derived from this software without specific prior written permission. + +// This software is provided by the copyright holders and contributors "as is" and +// any express or implied warranties, including, but not limited to, the implied +// warranties of merchantability and fitness for a particular purpose are disclaimed. +// In no event shall the Intel Corporation or contributors be liable for any direct, +// indirect, incidental, special, exemplary, or consequential damages +// (including, but not limited to, procurement of substitute goods or services; +// loss of use, data, or profits; or business interruption) however caused +// and on any theory of liability, whether in contract, strict liability, +// or tort (including negligence or otherwise) arising in any way out of +// the use of this software, even if advised of the possibility of such damage. + +#include "precomp.hpp" + +using namespace cv; +using namespace std; + +CvLR_TrainParams::CvLR_TrainParams() +{ + term_crit = CvTermCriteria(TermCriteria::COUNT + TermCriteria::EPS, 10000, 0.001); +} + +CvLR_TrainParams::CvLR_TrainParams(double _alpha, int _num_iters, int _norm, int _regularized, int _train_method, int _minibatchsize): + alpha(_alpha), num_iters(_num_iters), norm(_norm), regularized(_regularized), train_method(_train_method), minibatchsize(_minibatchsize) +/////////////////////////////////////////////////// +// CvLR_TrainParams::CvLR_TrainParams(double _alpha, int _num_iters, int _norm, int _debug, int _regularized, int _train_method, int _minibatchsize): +// alpha(_alpha), num_iters(_num_iters), norm(_norm), debug(_debug), regularized(_regularized), train_method(_train_method), minibatchsize(_minibatchsize) +/////////////////////////////////////////////////// +{ + term_crit = CvTermCriteria(TermCriteria::COUNT + TermCriteria::EPS, num_iters, 0.001); +} + +CvLR_TrainParams::~CvLR_TrainParams() +{ + +} + +CvLR::CvLR() +{ + default_model_name = "my_lr"; + // set_default_params(); +} + + +CvLR::CvLR(const cv::Mat& _data, const cv::Mat& _labels, const CvLR_TrainParams& _params) +{ + this->params = _params; + default_model_name = "my_lr"; + train(_data, _labels); +} + +CvLR::~CvLR() +{ + clear(); +} + + +bool CvLR::train(const cv::Mat& _data_i, const cv::Mat& _labels_i) +{ + CV_Assert( !_labels_i.empty() && !_data_i.empty()); + + // check the number of colums + CV_Assert( _labels_i.cols == 1); + + if(_labels_i.cols != 1) + { + cv::error(Error::StsBadArg, "_labels_i should be a column matrix", "cv::ml::CvLR::train", __FILE__, __LINE__); + } + // check data type. + // data should be of floating type CV_32FC1 + + if((_data_i.type() != CV_32FC1) || (_labels_i.type() != CV_32FC1)) + { + cv::error(Error::StsBadArg, "train: data and labels must be a floating point matrix", "cv::ml::CvLR::train", __FILE__, __LINE__); + } + + bool ok = false; + + cv::Mat labels; + + //CvLR::set_label_map(_labels_i); + set_label_map(_labels_i); + int num_classes = this->forward_mapper.size(); + + // add a column of ones + cv::Mat data_t = cv::Mat::zeros(_data_i.rows, _data_i.cols+1, CV_32F); + vconcat(cv::Mat(_data_i.rows, 1, _data_i.type(), Scalar::all(1.0)), data_t.col(0)); + for (int i=1;iforward_mapper); + cv::Mat new_local_labels; + + int ii=0; + + if(num_classes == 2) + { + //data_t.convertTo(data, CV_32F); + labels_l.convertTo(labels, CV_32F); + + //cv::Mat new_theta = CvLR::compute_batch_gradient(data, labels, init_theta); + cv::Mat new_theta = compute_batch_gradient(data_t, labels, init_theta); + + thetas = new_theta.t(); + } + + else + { + /* take each class and rename classes you will get a theta per class + as in multi class class scenario, we will have n thetas for n classes */ + ii = 0; + + for(map::iterator it = this->forward_mapper.begin(); it != this->forward_mapper.end(); ++it) + { + new_local_labels = (labels_l == it->second)/255; + // cout<<"processing class "<second<learnt_thetas = thetas.clone(); + if( cvIsNaN( (double)cv::sum(this->learnt_thetas)[0] ) ) + { + cv::error(Error::StsBadArg, "train: check training parameters. Invalid training classifier","cv::ml::CvLR::train", __FILE__, __LINE__); + } + + ok = true; + + return ok; +} + +float CvLR::predict(const Mat& _data) +{ + cv::Mat pred_labs; + pred_labs = cv::Mat::zeros(1,1, _data.type()); + + if(_data.rows >1) + { + cv::error(Error::StsBadArg, "predict: _data should have only 1 row", "cv::ml::CvLR::predict", __FILE__, __LINE__); + } + + + predict(_data, pred_labs); + + return static_cast(pred_labs.at(0,0)); +} + +float CvLR::predict(const cv::Mat& _data, cv::Mat& _pred_labs) +{ + /* returns a class of the predicted class + class names can be 1,2,3,4, .... etc */ + cv::Mat thetas; + + // check if learnt_mats array is populated + if(this->learnt_thetas.total()<=0) + { + cv::error(Error::StsBadArg, "predict: classifier should be trained first", "cv::ml::CvLR::predict", __FILE__, __LINE__); + } + if(_data.type() != CV_32F) + { + cv::error(Error::StsBadArg, "predict: _data must be of floating type","cv::ml::CvLR::predict",__FILE__, __LINE__); + } + + // add a column of ones + cv::Mat data_t = cv::Mat::zeros(_data.rows, _data.cols+1, CV_32F); + for (int i=0;ilearnt_thetas.convertTo(thetas, CV_32F); + + CV_Assert(thetas.rows > 0); + + double min_val; + double max_val; + + Point min_loc; + Point max_loc; + + cv::Mat labels; + cv::Mat labels_c; + cv::Mat temp_pred; + + cv::Mat pred_m = cv::Mat::zeros(data_t.rows, thetas.rows, _data.type()); + + if(thetas.rows == 1) + { + temp_pred = calc_sigmoid(data_t*thetas.t()); + CV_Assert(temp_pred.cols==1); + // if greater than 0.5, predict class 0 or predict class 1 + temp_pred = (temp_pred>0.5)/255; + temp_pred.convertTo(labels_c, CV_32S); + } + + else + { + for(int i = 0;ireverse_mapper); + + // convert _pred_labs to integer type + + _pred_labs.convertTo(_pred_labs, CV_32S); + + return 0.0; +} + +cv::Mat CvLR::calc_sigmoid(const Mat& data) +{ + cv::Mat dest; + cv::exp(-data, dest); + return 1.0/(1.0+dest); +} + +double CvLR::compute_cost(const cv::Mat& _data, const cv::Mat& _labels, const cv::Mat& _init_theta) +{ + + int llambda = 0; + int m; + int n; + + double cost = 0; + double rparameter = 0; + + cv::Mat gradient; + cv::Mat theta_b; + cv::Mat theta_c; + + m = _data.rows; + n = _data.cols; + + gradient = cv::Mat::zeros( _init_theta.rows, _init_theta.cols, _init_theta.type()); + + theta_b = _init_theta(Range(1, n), Range::all()); + + cv::multiply(theta_b, theta_b, theta_c, 1); + + if(this->params.regularized > 0) + { + llambda = 1; + } + + if(this->params.norm == CvLR::REG_L1) + { + rparameter = (llambda/(2*m)) * cv::sum(theta_b)[0]; + } + else + { + // assuming it to be L2 by default + rparameter = (llambda/(2*m)) * cv::sum(theta_c)[0]; + } + + + // cv::Mat d_a = LogisticRegression::CvLR::calc_sigmoid(_data* _init_theta); + cv::Mat d_a = calc_sigmoid(_data* _init_theta); + + + cv::log(d_a, d_a); + cv::multiply(d_a, _labels, d_a); + + // cv::Mat d_b = 1 - LogisticRegression::CvLR::calc_sigmoid(_data * _init_theta); + cv::Mat d_b = 1 - calc_sigmoid(_data * _init_theta); + cv::log(d_b, d_b); + cv::multiply(d_b, 1-_labels, d_b); + + cost = (-1.0/m) * (cv::sum(d_a)[0] + cv::sum(d_b)[0]); + cost = cost + rparameter; + + return cost; +} + +cv::Mat CvLR::compute_batch_gradient(const cv::Mat& _data, const cv::Mat& _labels, const cv::Mat& _init_theta) +{ + // implements batch gradient descent + + if(this->params.alpha<=0) + { + cv::error(Error::StsBadArg, "compute_batch_gradient: check training parameters for the classifier","cv::ml::CvLR::compute_batch_gradient", __FILE__, __LINE__); + } + + if(this->params.num_iters <= 0) + { + cv::error(Error::StsBadArg,"compute_batch_gradient: number of iterations cannot be zero or a negative number","cv::ml::CvLR::compute_batch_gradient",__FILE__,__LINE__); + } + + int llambda = 0; + /////////////////////////////////////////////////// + double ccost; + /////////////////////////////////////////////////// + int m, n; + + cv::Mat pcal_a; + cv::Mat pcal_b; + cv::Mat pcal_ab; + cv::Mat gradient; + cv::Mat theta_p = _init_theta.clone(); + + // cout<<"_data size "<<_data.rows<<", "<<_data.cols<params.regularized > 0) + { + llambda = 1; + } + + for(int i = 0;iparams.num_iters;i++) + { + ccost = compute_cost(_data, _labels, theta_p); + + if( cvIsNaN( ccost ) ) + { + cv::error(Error::StsBadArg, "compute_batch_gradient: check training parameters. Invalid training classifier","cv::ml::CvLR::compute_batch_gradient", __FILE__, __LINE__); + } + + /////////////////////////////////////////////////// + // cout<<"calculated cost: "<params.debug == 1 && i%(this->params.num_iters/2)==0) // + // { + // cout<<"iter: "<params.alpha<params.num_iters<params.norm<params.debug<params.regularized<params.train_method<(1/m)) * _data.t(); + + gradient = pcal_a * pcal_b; + + pcal_a = calc_sigmoid(_data*theta_p) - _labels; + + pcal_b = _data(Range::all(), Range(0,1)); + + cv::multiply(pcal_a, pcal_b, pcal_ab, 1); + + gradient.row(0) = ((float)1/m) * sum(pcal_ab)[0]; + + + pcal_b = _data(Range::all(), Range(1,n)); + + //cout<<"for each training data entry"<(this->params.alpha)/m)*gradient; + //cout<<"updated theta_p"<params.minibatchsize; + + // if(this->minibatchsize == 0) + // { + // cv::error(Error::StsDivByZero, "compute_mini_batch_gradient: set CvLR::MINI_BATCH value to a non-zero number (and less than number of samples in a given class) ", "cv::ml::CvLR::compute_mini_batch_gradient", __FILE__, __LINE__); + // } + + if(this->params.minibatchsize <= 0 || this->params.alpha == 0) + { + cv::error(Error::StsBadArg, "compute_mini_batch_gradient: check training parameters for the classifier","cv::ml::CvLR::compute_mini_batch_gradient", __FILE__, __LINE__); + } + + if(this->params.num_iters <= 0) + { + cv::error(Error::StsBadArg,"compute_mini_batch_gradient: number of iterations cannot be zero or a negative number","cv::ml::CvLR::compute_mini_batch_gradient",__FILE__,__LINE__); + } + + cv::Mat pcal_a; + cv::Mat pcal_b; + cv::Mat pcal_ab; + cv::Mat gradient; + cv::Mat theta_p = _init_theta.clone(); + cv::Mat data_d; + cv::Mat labels_l; + + if(this->params.regularized > 0) + { + lambda_l = 1; + } + + for(int i = 0;this->params.term_crit.max_iter;i++) + { + if(j+size_b<=_data.rows) + { + data_d = _data(Range(j,j+size_b), Range::all()); + labels_l = _labels(Range(j,j+size_b),Range::all()); + } + else + { + data_d = _data(Range(j, _data.rows), Range::all()); + labels_l = _labels(Range(j, _labels.rows),Range::all()); + } + + m = data_d.rows; + n = data_d.cols; + + ccost = compute_cost(data_d, labels_l, theta_p); + + + if( cvIsNaN( ccost ) == 1) + { + cv::error(Error::StsBadArg, "compute_mini_batch_gradient: check training parameters. Invalid training classifier","cv::ml::CvLR::compute_mini_batch_gradient", __FILE__, __LINE__); + } + + /////////////////////////////////////////////////// + // if(this->params.debug == 1 && i%(this->params.term_crit.max_iter/2)==0) + // { + // cout<<"iter: "<params.alpha<params.num_iters<params.norm<params.debug<params.regularized<params.train_method<params.minibatchsize<(1/m)) * data_d.t(); + + gradient = pcal_a * pcal_b; + + pcal_a = calc_sigmoid(data_d*theta_p) - labels_l; + + pcal_b = data_d(Range::all(), Range(0,1)); + + cv::multiply(pcal_a, pcal_b, pcal_ab, 1); + + gradient.row(0) = ((float)1/m) * sum(pcal_ab)[0]; + + pcal_b = data_d(Range::all(), Range(1,n)); + + for(int k = 1;k(this->params.alpha)/m)*gradient; + + j+=this->params.minibatchsize; + + if(j+size_b>_data.rows) + { + // if parsed through all data variables + break; + } + } + + return theta_p; +} + + +std::map CvLR::get_label_map(const cv::Mat& _labels_i) +{ + // this function creates two maps to map user defined labels to program friendsly labels + // two ways. + + cv::Mat labels; + int ii = 0; + + _labels_i.convertTo(labels, CV_32S); + + for(int i = 0;iforward_mapper[labels.at(i)] += 1; + } + + for(map::iterator it = this->forward_mapper.begin(); it != this->forward_mapper.end(); ++it) + { + this->forward_mapper[it->first] = ii; + ii += 1; + } + + for(map::iterator it = this->forward_mapper.begin(); it != this->forward_mapper.end(); ++it) + { + this->reverse_mapper[it->second] = it->first; + } + + return this->forward_mapper; +} + +bool CvLR::set_label_map(const cv::Mat& _labels_i) +{ + // this function creates two maps to map user defined labels to program friendsly labels + // two ways. + + int ii = 0; + cv::Mat labels; + bool ok = false; + + this->labels_o = cv::Mat(0,1, CV_8U); + this->labels_n = cv::Mat(0,1, CV_8U); + + _labels_i.convertTo(labels, CV_32S); + + for(int i = 0;iforward_mapper[labels.at(i)] += 1; + } + + for(map::iterator it = this->forward_mapper.begin(); it != this->forward_mapper.end(); ++it) + { + this->forward_mapper[it->first] = ii; + this->labels_o.push_back(it->first); + this->labels_n.push_back(ii); + ii += 1; + } + + for(map::iterator it = this->forward_mapper.begin(); it != this->forward_mapper.end(); ++it) + { + this->reverse_mapper[it->second] = it->first; + } + ok = true; + + return ok; +} + +cv::Mat CvLR::remap_labels(const Mat& _labels_i, std::map lmap) +{ + cv::Mat labels; + _labels_i.convertTo(labels, CV_32S); + + cv::Mat new_labels = cv::Mat::zeros(labels.rows, labels.cols, labels.type()); + + CV_Assert( lmap.size() > 0 ); + + for(int i =0;i(i,0) = lmap[labels.at(i,0)]; + } + + return new_labels; +} + + +bool CvLR::set_default_params() +{ + // set default parameters for the Logisitic Regression classifier + this->params.alpha = 1.0; + this->params.term_crit.max_iter = 10000; + this->params.norm = CvLR::REG_L2; + /////////////////////////////////////////////////// + // this->params.debug = 1; + /////////////////////////////////////////////////// + this->params.regularized = 1; + this->params.train_method = CvLR::MINI_BATCH; + this->params.minibatchsize = 10; + + return true; +} + +void CvLR::clear() +{ + this->learnt_thetas.release(); + this->labels_o.release(); + this->labels_n.release(); +} + +void CvLR::read( CvFileStorage* fs, CvFileNode* node ) +{ + CvMat *newData; + CvMat *o_labels; + CvMat *n_labels; + + + this->params.alpha = cvReadRealByName(fs, node,"alpha", 1.0); + this->params.num_iters = cvReadIntByName(fs, node,"iterations", 1000); + this->params.norm = cvReadIntByName(fs, node,"norm", 1); + // this->params.debug = cvReadIntByName(fs, node,"debug", 1); + this->params.regularized = cvReadIntByName(fs, node,"regularized", 1); + this->params.train_method = cvReadIntByName(fs, node,"train_method", 0); + + if(this->params.train_method == CvLR::MINI_BATCH) + { + this->params.minibatchsize = cvReadIntByName(fs, node,"mini_batch_size", 1); + } + + newData = (CvMat*)cvReadByName( fs, node, "learnt_thetas" ); + o_labels = (CvMat*)cvReadByName( fs, node, "o_labels" ); + n_labels = (CvMat*)cvReadByName( fs, node, "n_labels" ); + + this->learnt_thetas = cv::Mat(newData->rows, newData->cols, CV_32F, newData->data.db); + this->labels_o = cv::Mat(o_labels->rows, o_labels->cols, CV_32S, o_labels->data.ptr); + this->labels_n = cv::Mat(n_labels->rows, n_labels->cols, CV_32S, n_labels->data.ptr); + + for(int ii =0;iiforward_mapper[labels_o.at(ii,0)] = labels_n.at(ii,0); + this->reverse_mapper[labels_n.at(ii,0)] = labels_o.at(ii,0); + } + +} + +void CvLR::write( CvFileStorage* fs, const char* name ) const +{ + string desc = "Logisitic Regression Classifier"; + + cvStartWriteStruct( fs, name, CV_NODE_MAP, CV_TYPE_NAME_ML_LR ); + + cvWriteString( fs, "classifier", desc.c_str()); + cvWriteReal(fs,"alpha",this->params.alpha); + cvWriteInt(fs,"iterations",this->params.num_iters); + cvWriteInt(fs,"norm",this->params.norm); + // cvWriteInt(fs,"debug",this->params.debug); + cvWriteInt(fs,"regularized",this->params.regularized); + cvWriteInt(fs,"train_method",this->params.train_method); + + if(this->params.train_method == CvLR::MINI_BATCH) + { + cvWriteInt(fs,"mini_batch_size",this->params.minibatchsize); + } + + CvMat mat_learnt_thetas = this->learnt_thetas; + CvMat o_labels = this->labels_o; + CvMat n_labels = this->labels_n; + + cvWrite(fs, "learnt_thetas", &mat_learnt_thetas ); + cvWrite(fs, "n_labels", &n_labels); + cvWrite(fs, "o_labels", &o_labels); + + cvEndWriteStruct(fs); +} + + +cv::Mat CvLR::get_learnt_mat() +{ + return this->learnt_thetas; +} + +/* End of file. */ From 3a6466d2e196b98a412f8c8dd98562a3532a0ae8 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Wed, 7 Aug 2013 20:39:46 -0400 Subject: [PATCH 08/43] updated logistic regression sample program --- samples/cpp/sample_logistic_regression.cpp | 58 +++++++++++++++++----- 1 file changed, 45 insertions(+), 13 deletions(-) diff --git a/samples/cpp/sample_logistic_regression.cpp b/samples/cpp/sample_logistic_regression.cpp index 95649e3911..165ead3b52 100644 --- a/samples/cpp/sample_logistic_regression.cpp +++ b/samples/cpp/sample_logistic_regression.cpp @@ -16,6 +16,8 @@ #include #include +#include + using namespace std; using namespace cv; @@ -25,6 +27,10 @@ int main() { Mat data_temp, labels_temp; Mat data, labels; + + Mat data_train, data_test; + Mat labels_train, labels_test; + Mat responses, result; FileStorage f; @@ -44,6 +50,32 @@ int main() data_temp.convertTo(data, CV_32F); labels_temp.convertTo(labels, CV_32F); + for(int i =0;i(i,0)<<" :: "<< responses.at(i,0)<(i,0)<<" :: "<< responses.at(i,0)< Date: Sat, 10 Aug 2013 08:49:17 -0400 Subject: [PATCH 09/43] fixed trailing white-space in sample program for logistic regression --- samples/cpp/sample_logistic_regression.cpp | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/samples/cpp/sample_logistic_regression.cpp b/samples/cpp/sample_logistic_regression.cpp index 165ead3b52..36fd88253e 100644 --- a/samples/cpp/sample_logistic_regression.cpp +++ b/samples/cpp/sample_logistic_regression.cpp @@ -27,7 +27,7 @@ int main() { Mat data_temp, labels_temp; Mat data, labels; - + Mat data_train, data_test; Mat labels_train, labels_test; @@ -66,7 +66,7 @@ int main() cout<<"training samples per class: "<(i,0)<<" :: "<< responses.at(i,0)< Date: Sat, 10 Aug 2013 08:50:35 -0400 Subject: [PATCH 10/43] fixed trailing white-space in logistic regression class --- modules/ml/src/lr.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/ml/src/lr.cpp b/modules/ml/src/lr.cpp index 73e4c00c9e..b7e2a1e9d3 100644 --- a/modules/ml/src/lr.cpp +++ b/modules/ml/src/lr.cpp @@ -685,7 +685,7 @@ void CvLR::read( CvFileStorage* fs, CvFileNode* node ) CvMat *newData; CvMat *o_labels; CvMat *n_labels; - + this->params.alpha = cvReadRealByName(fs, node,"alpha", 1.0); this->params.num_iters = cvReadIntByName(fs, node,"iterations", 1000); From b8ea21b2e5f401200d41734d611085e625bf9604 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Fri, 4 Oct 2013 08:30:10 -0400 Subject: [PATCH 11/43] updated logistic regression sample program --- ...regression.cpp => logistic_regression.cpp} | 63 +++++++++++++++---- 1 file changed, 50 insertions(+), 13 deletions(-) rename samples/cpp/{sample_logistic_regression.cpp => logistic_regression.cpp} (52%) diff --git a/samples/cpp/sample_logistic_regression.cpp b/samples/cpp/logistic_regression.cpp similarity index 52% rename from samples/cpp/sample_logistic_regression.cpp rename to samples/cpp/logistic_regression.cpp index 36fd88253e..71b71af687 100644 --- a/samples/cpp/sample_logistic_regression.cpp +++ b/samples/cpp/logistic_regression.cpp @@ -1,17 +1,61 @@ /////////////////////////////////////////////////////////////////////////////////////// -// sample_logistic_regression.cpp // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. // By downloading, copying, installing or using the software you agree to this license. // If you do not agree to this license, do not download, install, // copy or use the software. -// This is a sample program demostrating classification of digits 0 and 1 using Logistic Regression +// This is a implementation of the Logistic Regression algorithm in C++ in OpenCV. // AUTHOR: // Rahul Kavi rahulkavi[at]live[at]com // +// contains a subset of data from the popular Iris Dataset (taken from "http://archive.ics.uci.edu/ml/datasets/Iris") + +// # You are free to use, change, or redistribute the code in any way you wish for +// # non-commercial purposes, but please maintain the name of the original author. +// # This code comes with no warranty of any kind. + +// # +// # You are free to use, change, or redistribute the code in any way you wish for +// # non-commercial purposes, but please maintain the name of the original author. +// # This code comes with no warranty of any kind. + +// # Logistic Regression ALGORITHM + + +// License Agreement +// For Open Source Computer Vision Library + +// Copyright (C) 2000-2008, Intel Corporation, all rights reserved. +// Copyright (C) 2008-2011, Willow Garage Inc., all rights reserved. +// Third party copyrights are property of their respective owners. + +// Redistribution and use in source and binary forms, with or without modification, +// are permitted provided that the following conditions are met: + +// * Redistributions of source code must retain the above copyright notice, +// this list of conditions and the following disclaimer. + +// * Redistributions in binary form must reproduce the above copyright notice, +// this list of conditions and the following disclaimer in the documentation +// and/or other materials provided with the distribution. + +// * The name of the copyright holders may not be used to endorse or promote products +// derived from this software without specific prior written permission. + +// This software is provided by the copyright holders and contributors "as is" and +// any express or implied warranties, including, but not limited to, the implied +// warranties of merchantability and fitness for a particular purpose are disclaimed. +// In no event shall the Intel Corporation or contributors be liable for any direct, +// indirect, incidental, special, exemplary, or consequential damages +// (including, but not limited to, procurement of substitute goods or services; +// loss of use, data, or profits; or business interruption) however caused +// and on any theory of liability, whether in contract, strict liability, +// or tort (including negligence or otherwise) arising in any way out of +// the use of this software, even if advised of the possibility of such damage. + #include #include @@ -76,17 +120,11 @@ int main() cout<<"initializing Logisitc Regression Parameters\n"< Date: Fri, 4 Oct 2013 08:32:12 -0400 Subject: [PATCH 12/43] rewrote the code to update API, removed inheritance from CvStatModel. --- modules/ml/src/lr.cpp | 376 +++++++++++++----------------------------- 1 file changed, 115 insertions(+), 261 deletions(-) diff --git a/modules/ml/src/lr.cpp b/modules/ml/src/lr.cpp index b7e2a1e9d3..5d53d0f868 100644 --- a/modules/ml/src/lr.cpp +++ b/modules/ml/src/lr.cpp @@ -58,76 +58,73 @@ using namespace cv; using namespace std; -CvLR_TrainParams::CvLR_TrainParams() +LogisticRegressionParams::LogisticRegressionParams() { term_crit = CvTermCriteria(TermCriteria::COUNT + TermCriteria::EPS, 10000, 0.001); + alpha = 0.001; + num_iters = 10; + norm = LogisticRegression::REG_L2; + regularized = 1; + train_method = LogisticRegression::BATCH; + mini_batch_size = 1; } -CvLR_TrainParams::CvLR_TrainParams(double _alpha, int _num_iters, int _norm, int _regularized, int _train_method, int _minibatchsize): - alpha(_alpha), num_iters(_num_iters), norm(_norm), regularized(_regularized), train_method(_train_method), minibatchsize(_minibatchsize) -/////////////////////////////////////////////////// -// CvLR_TrainParams::CvLR_TrainParams(double _alpha, int _num_iters, int _norm, int _debug, int _regularized, int _train_method, int _minibatchsize): -// alpha(_alpha), num_iters(_num_iters), norm(_norm), debug(_debug), regularized(_regularized), train_method(_train_method), minibatchsize(_minibatchsize) -/////////////////////////////////////////////////// +LogisticRegressionParams::LogisticRegressionParams(double _alpha, int _num_iters, int _norm, int _regularized, int _train_method, int _mini_batch_size): + alpha(_alpha), num_iters(_num_iters), norm(_norm), regularized(_regularized), train_method(_train_method), mini_batch_size(_mini_batch_size) { term_crit = CvTermCriteria(TermCriteria::COUNT + TermCriteria::EPS, num_iters, 0.001); } -CvLR_TrainParams::~CvLR_TrainParams() -{ - -} - -CvLR::CvLR() +LogisticRegression::LogisticRegression() { default_model_name = "my_lr"; - // set_default_params(); } - -CvLR::CvLR(const cv::Mat& _data, const cv::Mat& _labels, const CvLR_TrainParams& _params) +LogisticRegression::LogisticRegression(cv::InputArray data, cv::InputArray labels, const LogisticRegressionParams& pms) { - this->params = _params; + this->params = pms; default_model_name = "my_lr"; - train(_data, _labels); + train(data, labels); } -CvLR::~CvLR() +LogisticRegression::~LogisticRegression() { clear(); } - -bool CvLR::train(const cv::Mat& _data_i, const cv::Mat& _labels_i) +bool LogisticRegression::train(cv::InputArray data_ip, cv::InputArray labels_ip) { - CV_Assert( !_labels_i.empty() && !_data_i.empty()); + clear(); + cv::Mat _data_i = data_ip.getMat(); + cv::Mat _labels_i = labels_ip.getMat(); - // check the number of colums - CV_Assert( _labels_i.cols == 1); + CV_Assert( !_labels_i.empty() && !_data_i.empty()); + // check the number of columns if(_labels_i.cols != 1) { - cv::error(Error::StsBadArg, "_labels_i should be a column matrix", "cv::ml::CvLR::train", __FILE__, __LINE__); + cv::error(Error::StsBadArg, "_labels_i should be a column matrix", "cv::ml::LogisticRegression::train", __FILE__, __LINE__); } + // check data type. // data should be of floating type CV_32FC1 if((_data_i.type() != CV_32FC1) || (_labels_i.type() != CV_32FC1)) { - cv::error(Error::StsBadArg, "train: data and labels must be a floating point matrix", "cv::ml::CvLR::train", __FILE__, __LINE__); + cv::error(Error::StsBadArg, "train: data and labels must be a floating point matrix", "cv::ml::LogisticRegression::train", __FILE__, __LINE__); } bool ok = false; cv::Mat labels; - //CvLR::set_label_map(_labels_i); set_label_map(_labels_i); int num_classes = this->forward_mapper.size(); // add a column of ones cv::Mat data_t = cv::Mat::zeros(_data_i.rows, _data_i.cols+1, CV_32F); vconcat(cv::Mat(_data_i.rows, 1, _data_i.type(), Scalar::all(1.0)), data_t.col(0)); + for (int i=1;i::iterator it = this->forward_mapper.begin(); it != this->forward_mapper.end(); ++it) { new_local_labels = (labels_l == it->second)/255; - // cout<<"processing class "<second<learnt_thetas = thetas.clone(); if( cvIsNaN( (double)cv::sum(this->learnt_thetas)[0] ) ) { - cv::error(Error::StsBadArg, "train: check training parameters. Invalid training classifier","cv::ml::CvLR::train", __FILE__, __LINE__); + cv::error(Error::StsBadArg, "train: check training parameters. Invalid training classifier","cv::ml::LogisticRegression::train", __FILE__, __LINE__); } - ok = true; - return ok; } -float CvLR::predict(const Mat& _data) -{ - cv::Mat pred_labs; - pred_labs = cv::Mat::zeros(1,1, _data.type()); - - if(_data.rows >1) - { - cv::error(Error::StsBadArg, "predict: _data should have only 1 row", "cv::ml::CvLR::predict", __FILE__, __LINE__); - } - - - predict(_data, pred_labs); - - return static_cast(pred_labs.at(0,0)); -} -float CvLR::predict(const cv::Mat& _data, cv::Mat& _pred_labs) +void LogisticRegression::predict( cv::InputArray _ip_data, cv::OutputArray _output_predicted_labels ) const { /* returns a class of the predicted class class names can be 1,2,3,4, .... etc */ - cv::Mat thetas; + + cv::Mat thetas, data, pred_labs; + data = _ip_data.getMat(); // check if learnt_mats array is populated if(this->learnt_thetas.total()<=0) { - cv::error(Error::StsBadArg, "predict: classifier should be trained first", "cv::ml::CvLR::predict", __FILE__, __LINE__); + cv::error(Error::StsBadArg, "predict: classifier should be trained first", "cv::ml::LogisticRegression::predict", __FILE__, __LINE__); } - if(_data.type() != CV_32F) + if(data.type() != CV_32F) { - cv::error(Error::StsBadArg, "predict: _data must be of floating type","cv::ml::CvLR::predict",__FILE__, __LINE__); + cv::error(Error::StsBadArg, "predict: data must be of floating type","cv::ml::LogisticRegression::predict",__FILE__, __LINE__); } // add a column of ones - cv::Mat data_t = cv::Mat::zeros(_data.rows, _data.cols+1, CV_32F); + cv::Mat data_t = cv::Mat::zeros(data.rows, data.cols+1, CV_32F); for (int i=0;ilearnt_thetas.convertTo(thetas, CV_32F); @@ -254,74 +226,65 @@ float CvLR::predict(const cv::Mat& _data, cv::Mat& _pred_labs) cv::Mat labels; cv::Mat labels_c; cv::Mat temp_pred; - - cv::Mat pred_m = cv::Mat::zeros(data_t.rows, thetas.rows, _data.type()); + + cv::Mat pred_m = cv::Mat::zeros(data_t.rows, thetas.rows, data.type()); if(thetas.rows == 1) { temp_pred = calc_sigmoid(data_t*thetas.t()); CV_Assert(temp_pred.cols==1); + // if greater than 0.5, predict class 0 or predict class 1 temp_pred = (temp_pred>0.5)/255; temp_pred.convertTo(labels_c, CV_32S); } - else { for(int i = 0;ireverse_mapper); - - // convert _pred_labs to integer type - - _pred_labs.convertTo(_pred_labs, CV_32S); - - return 0.0; + pred_labs = remap_labels(labels_c, this->reverse_mapper); + // convert pred_labs to integer type + pred_labs.convertTo(pred_labs, CV_32S); + pred_labs.copyTo(_output_predicted_labels); } -cv::Mat CvLR::calc_sigmoid(const Mat& data) +cv::Mat LogisticRegression::calc_sigmoid(const Mat& data) { cv::Mat dest; cv::exp(-data, dest); return 1.0/(1.0+dest); } -double CvLR::compute_cost(const cv::Mat& _data, const cv::Mat& _labels, const cv::Mat& _init_theta) +double LogisticRegression::compute_cost(const cv::Mat& _data, const cv::Mat& _labels, const cv::Mat& _init_theta) { int llambda = 0; int m; int n; - double cost = 0; double rparameter = 0; - cv::Mat gradient; cv::Mat theta_b; cv::Mat theta_c; + cv::Mat d_a; + cv::Mat d_b; m = _data.rows; n = _data.cols; gradient = cv::Mat::zeros( _init_theta.rows, _init_theta.cols, _init_theta.type()); - theta_b = _init_theta(Range(1, n), Range::all()); - cv::multiply(theta_b, theta_b, theta_c, 1); if(this->params.regularized > 0) @@ -329,7 +292,7 @@ double CvLR::compute_cost(const cv::Mat& _data, const cv::Mat& _labels, const cv llambda = 1; } - if(this->params.norm == CvLR::REG_L1) + if(this->params.norm == LogisticRegression::REG_L1) { rparameter = (llambda/(2*m)) * cv::sum(theta_b)[0]; } @@ -339,16 +302,13 @@ double CvLR::compute_cost(const cv::Mat& _data, const cv::Mat& _labels, const cv rparameter = (llambda/(2*m)) * cv::sum(theta_c)[0]; } - - // cv::Mat d_a = LogisticRegression::CvLR::calc_sigmoid(_data* _init_theta); - cv::Mat d_a = calc_sigmoid(_data* _init_theta); + d_a = calc_sigmoid(_data* _init_theta); cv::log(d_a, d_a); cv::multiply(d_a, _labels, d_a); - // cv::Mat d_b = 1 - LogisticRegression::CvLR::calc_sigmoid(_data * _init_theta); - cv::Mat d_b = 1 - calc_sigmoid(_data * _init_theta); + d_b = 1 - calc_sigmoid(_data * _init_theta); cv::log(d_b, d_b); cv::multiply(d_b, 1-_labels, d_b); @@ -358,35 +318,27 @@ double CvLR::compute_cost(const cv::Mat& _data, const cv::Mat& _labels, const cv return cost; } -cv::Mat CvLR::compute_batch_gradient(const cv::Mat& _data, const cv::Mat& _labels, const cv::Mat& _init_theta) +cv::Mat LogisticRegression::compute_batch_gradient(const cv::Mat& _data, const cv::Mat& _labels, const cv::Mat& _init_theta) { // implements batch gradient descent - if(this->params.alpha<=0) { - cv::error(Error::StsBadArg, "compute_batch_gradient: check training parameters for the classifier","cv::ml::CvLR::compute_batch_gradient", __FILE__, __LINE__); + cv::error(Error::StsBadArg, "compute_batch_gradient: check training parameters for the classifier","cv::ml::LogisticRegression::compute_batch_gradient", __FILE__, __LINE__); } if(this->params.num_iters <= 0) { - cv::error(Error::StsBadArg,"compute_batch_gradient: number of iterations cannot be zero or a negative number","cv::ml::CvLR::compute_batch_gradient",__FILE__,__LINE__); + cv::error(Error::StsBadArg,"compute_batch_gradient: number of iterations cannot be zero or a negative number","cv::ml::LogisticRegression::compute_batch_gradient",__FILE__,__LINE__); } int llambda = 0; - /////////////////////////////////////////////////// double ccost; - /////////////////////////////////////////////////// int m, n; - cv::Mat pcal_a; cv::Mat pcal_b; cv::Mat pcal_ab; cv::Mat gradient; cv::Mat theta_p = _init_theta.clone(); - - // cout<<"_data size "<<_data.rows<<", "<<_data.cols<params.debug == 1 && i%(this->params.num_iters/2)==0) // - // { - // cout<<"iter: "<params.alpha<params.num_iters<params.norm<params.debug<params.regularized<params.train_method<(1/m)) * _data.t(); @@ -433,7 +370,6 @@ cv::Mat CvLR::compute_batch_gradient(const cv::Mat& _data, const cv::Mat& _label gradient.row(0) = ((float)1/m) * sum(pcal_ab)[0]; - pcal_b = _data(Range::all(), Range(1,n)); //cout<<"for each training data entry"<(this->params.alpha)/m)*gradient; - //cout<<"updated theta_p"<params.minibatchsize; - - // if(this->minibatchsize == 0) - // { - // cv::error(Error::StsDivByZero, "compute_mini_batch_gradient: set CvLR::MINI_BATCH value to a non-zero number (and less than number of samples in a given class) ", "cv::ml::CvLR::compute_mini_batch_gradient", __FILE__, __LINE__); - // } + int size_b = this->params.mini_batch_size; - if(this->params.minibatchsize <= 0 || this->params.alpha == 0) + if(this->params.mini_batch_size <= 0 || this->params.alpha == 0) { - cv::error(Error::StsBadArg, "compute_mini_batch_gradient: check training parameters for the classifier","cv::ml::CvLR::compute_mini_batch_gradient", __FILE__, __LINE__); + cv::error(Error::StsBadArg, "compute_mini_batch_gradient: check training parameters for the classifier","cv::ml::LogisticRegression::compute_mini_batch_gradient", __FILE__, __LINE__); } if(this->params.num_iters <= 0) { - cv::error(Error::StsBadArg,"compute_mini_batch_gradient: number of iterations cannot be zero or a negative number","cv::ml::CvLR::compute_mini_batch_gradient",__FILE__,__LINE__); + cv::error(Error::StsBadArg,"compute_mini_batch_gradient: number of iterations cannot be zero or a negative number","cv::ml::LogisticRegression::compute_mini_batch_gradient",__FILE__,__LINE__); } cv::Mat pcal_a; @@ -512,27 +437,11 @@ cv::Mat CvLR::compute_mini_batch_gradient(const cv::Mat& _data, const cv::Mat& _ ccost = compute_cost(data_d, labels_l, theta_p); - if( cvIsNaN( ccost ) == 1) { - cv::error(Error::StsBadArg, "compute_mini_batch_gradient: check training parameters. Invalid training classifier","cv::ml::CvLR::compute_mini_batch_gradient", __FILE__, __LINE__); + cv::error(Error::StsBadArg, "compute_mini_batch_gradient: check training parameters. Invalid training classifier","cv::ml::LogisticRegression::compute_mini_batch_gradient", __FILE__, __LINE__); } - /////////////////////////////////////////////////// - // if(this->params.debug == 1 && i%(this->params.term_crit.max_iter/2)==0) - // { - // cout<<"iter: "<params.alpha<params.num_iters<params.norm<params.debug<params.regularized<params.train_method<params.minibatchsize<(1/m)) * data_d.t(); @@ -552,16 +461,13 @@ cv::Mat CvLR::compute_mini_batch_gradient(const cv::Mat& _data, const cv::Mat& _ for(int k = 1;k(this->params.alpha)/m)*gradient; - j+=this->params.minibatchsize; + j+=this->params.mini_batch_size; if(j+size_b>_data.rows) { @@ -569,45 +475,12 @@ cv::Mat CvLR::compute_mini_batch_gradient(const cv::Mat& _data, const cv::Mat& _ break; } } - return theta_p; } - -std::map CvLR::get_label_map(const cv::Mat& _labels_i) +bool LogisticRegression::set_label_map(const cv::Mat& _labels_i) { - // this function creates two maps to map user defined labels to program friendsly labels - // two ways. - - cv::Mat labels; - int ii = 0; - - _labels_i.convertTo(labels, CV_32S); - - for(int i = 0;iforward_mapper[labels.at(i)] += 1; - } - - for(map::iterator it = this->forward_mapper.begin(); it != this->forward_mapper.end(); ++it) - { - this->forward_mapper[it->first] = ii; - ii += 1; - } - - for(map::iterator it = this->forward_mapper.begin(); it != this->forward_mapper.end(); ++it) - { - this->reverse_mapper[it->second] = it->first; - } - - return this->forward_mapper; -} - -bool CvLR::set_label_map(const cv::Mat& _labels_i) -{ - // this function creates two maps to map user defined labels to program friendsly labels - // two ways. - + // this function creates two maps to map user defined labels to program friendly labels two ways. int ii = 0; cv::Mat labels; bool ok = false; @@ -639,7 +512,7 @@ bool CvLR::set_label_map(const cv::Mat& _labels_i) return ok; } -cv::Mat CvLR::remap_labels(const Mat& _labels_i, std::map lmap) +cv::Mat LogisticRegression::remap_labels(const Mat& _labels_i, const std::map& lmap) { cv::Mat labels; _labels_i.convertTo(labels, CV_32S); @@ -650,105 +523,86 @@ cv::Mat CvLR::remap_labels(const Mat& _labels_i, std::map lmap) for(int i =0;i(i,0) = lmap[labels.at(i,0)]; + new_labels.at(i,0) = lmap.find(labels.at(i,0))->second; } - return new_labels; } - -bool CvLR::set_default_params() -{ - // set default parameters for the Logisitic Regression classifier - this->params.alpha = 1.0; - this->params.term_crit.max_iter = 10000; - this->params.norm = CvLR::REG_L2; - /////////////////////////////////////////////////// - // this->params.debug = 1; - /////////////////////////////////////////////////// - this->params.regularized = 1; - this->params.train_method = CvLR::MINI_BATCH; - this->params.minibatchsize = 10; - - return true; -} - -void CvLR::clear() +void LogisticRegression::clear() { this->learnt_thetas.release(); this->labels_o.release(); this->labels_n.release(); } -void CvLR::read( CvFileStorage* fs, CvFileNode* node ) +void LogisticRegression::write(FileStorage& fs) const { - CvMat *newData; - CvMat *o_labels; - CvMat *n_labels; + CV_Assert(fs.isOpened() == 1); + string desc = "Logisitic Regression Classifier"; + fs<<"classifier"<params.alpha; + fs<<"iterations"<params.num_iters; + fs<<"norm"<params.norm; + fs<<"regularized"<params.regularized; + fs<<"train_method"<params.train_method; + if(this->params.train_method == LogisticRegression::MINI_BATCH) + { + fs<<"mini_batch_size"<params.mini_batch_size; + } + fs<<"learnt_thetas"<learnt_thetas; + fs<<"n_labels"<labels_n; + fs<<"o_labels"<labels_o; +} - this->params.alpha = cvReadRealByName(fs, node,"alpha", 1.0); - this->params.num_iters = cvReadIntByName(fs, node,"iterations", 1000); - this->params.norm = cvReadIntByName(fs, node,"norm", 1); - // this->params.debug = cvReadIntByName(fs, node,"debug", 1); - this->params.regularized = cvReadIntByName(fs, node,"regularized", 1); - this->params.train_method = cvReadIntByName(fs, node,"train_method", 0); - - if(this->params.train_method == CvLR::MINI_BATCH) +void LogisticRegression::read(const FileNode& fn ) +{ + // check if empty + if(fn.empty()) { - this->params.minibatchsize = cvReadIntByName(fs, node,"mini_batch_size", 1); + cv::error(Error::StsBadArg, "read: empty FileNode object","cv::ml::LogisticRegression::read", __FILE__, __LINE__); } - newData = (CvMat*)cvReadByName( fs, node, "learnt_thetas" ); - o_labels = (CvMat*)cvReadByName( fs, node, "o_labels" ); - n_labels = (CvMat*)cvReadByName( fs, node, "n_labels" ); + this->params.alpha = (double)fn["alpha"]; + this->params.num_iters = (int)fn["iterations"]; + this->params.norm = (int)fn["norm"]; + this->params.regularized = (int)fn["regularized"]; + this->params.train_method = (int)fn["train_method"]; + + if(this->params.train_method == LogisticRegression::MINI_BATCH) + { + this->params.mini_batch_size = (int)fn["mini_batch_size"]; + } - this->learnt_thetas = cv::Mat(newData->rows, newData->cols, CV_32F, newData->data.db); - this->labels_o = cv::Mat(o_labels->rows, o_labels->cols, CV_32S, o_labels->data.ptr); - this->labels_n = cv::Mat(n_labels->rows, n_labels->cols, CV_32S, n_labels->data.ptr); + fn["learnt_thetas"] >> this->learnt_thetas; + fn["o_labels"] >> this->labels_o; + fn["n_labels"] >> this->labels_n; for(int ii =0;iiforward_mapper[labels_o.at(ii,0)] = labels_n.at(ii,0); this->reverse_mapper[labels_n.at(ii,0)] = labels_o.at(ii,0); } - } -void CvLR::write( CvFileStorage* fs, const char* name ) const +void LogisticRegression::save(string filepath) const { - string desc = "Logisitic Regression Classifier"; - - cvStartWriteStruct( fs, name, CV_NODE_MAP, CV_TYPE_NAME_ML_LR ); - - cvWriteString( fs, "classifier", desc.c_str()); - cvWriteReal(fs,"alpha",this->params.alpha); - cvWriteInt(fs,"iterations",this->params.num_iters); - cvWriteInt(fs,"norm",this->params.norm); - // cvWriteInt(fs,"debug",this->params.debug); - cvWriteInt(fs,"regularized",this->params.regularized); - cvWriteInt(fs,"train_method",this->params.train_method); - - if(this->params.train_method == CvLR::MINI_BATCH) - { - cvWriteInt(fs,"mini_batch_size",this->params.minibatchsize); - } + FileStorage fs; + fs.open(filepath.c_str(),FileStorage::WRITE); + write(fs); + fs.release(); - CvMat mat_learnt_thetas = this->learnt_thetas; - CvMat o_labels = this->labels_o; - CvMat n_labels = this->labels_n; - - cvWrite(fs, "learnt_thetas", &mat_learnt_thetas ); - cvWrite(fs, "n_labels", &n_labels); - cvWrite(fs, "o_labels", &o_labels); - - cvEndWriteStruct(fs); +} +void LogisticRegression::load(const string filepath) +{ + FileStorage fs; + fs.open(filepath.c_str(),FileStorage::READ); + FileNode fn = fs.root(); + read(fn); } - -cv::Mat CvLR::get_learnt_mat() +cv::Mat LogisticRegression::get_learnt_thetas() const { return this->learnt_thetas; } - /* End of file. */ From d5ad4f32556dd4e2f12ca99aed3d51d256463b51 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Fri, 4 Oct 2013 08:34:01 -0400 Subject: [PATCH 13/43] added updated logistic regression prototype with newer C++ API --- modules/ml/include/opencv2/ml.hpp | 87 +++++++++++++------------------ 1 file changed, 36 insertions(+), 51 deletions(-) diff --git a/modules/ml/include/opencv2/ml.hpp b/modules/ml/include/opencv2/ml.hpp index ea90538a92..e424f2b499 100644 --- a/modules/ml/include/opencv2/ml.hpp +++ b/modules/ml/include/opencv2/ml.hpp @@ -571,81 +571,66 @@ public: /****************************************************************************************\ * Logistic Regression * \****************************************************************************************/ - -struct CV_EXPORTS_W_MAP CvLR_TrainParams +namespace cv +{ +struct CV_EXPORTS LogisticRegressionParams { - CV_PROP_RW double alpha; - CV_PROP_RW int num_iters; - CV_PROP_RW int norm; - /////////////////////////////////////////////////// - // CV_PROP_RW int debug; - /////////////////////////////////////////////////// - CV_PROP_RW int regularized; - CV_PROP_RW int train_method; - CV_PROP_RW int minibatchsize; - - CV_PROP_RW CvTermCriteria term_crit; - - CvLR_TrainParams(); - /////////////////////////////////////////////////// - // CvLR_TrainParams(double alpha, int num_iters, int norm, int debug, int regularized, int train_method, int minbatchsize); - /////////////////////////////////////////////////// - CvLR_TrainParams(double alpha, int num_iters, int norm, int regularized, int train_method, int minbatchsize); - ~CvLR_TrainParams(); + double alpha; + int num_iters; + int norm; + int regularized; + int train_method; + int mini_batch_size; + CvTermCriteria term_crit; + + LogisticRegressionParams(); + LogisticRegressionParams(double alpha, int num_iters, int norm, int regularized, int train_method, int minbatchsize); }; -class CV_EXPORTS_W CvLR : public CvStatModel +class CV_EXPORTS LogisticRegression { public: - CvLR(); - // CvLR(const CvLR_TrainParams& Params); - - CvLR(const cv::Mat& data, const cv::Mat& labels, const CvLR_TrainParams& params); - - virtual ~CvLR(); - enum { REG_L1=0, REG_L2 = 1}; - enum { BATCH, MINI_BATCH}; + LogisticRegression(); + LogisticRegression(cv::InputArray data_ip, cv::InputArray labels_ip, const LogisticRegressionParams& params); + virtual ~LogisticRegression(); + enum { REG_L1 = 0, REG_L2 = 1}; + enum { BATCH = 0, MINI_BATCH = 1}; - virtual bool train(const cv::Mat& data, const cv::Mat& labels);//, const CvLR_TrainParams& params); + virtual bool train(cv::InputArray data_ip, cv::InputArray label_ip); + virtual void predict( cv::InputArray data, cv::OutputArray predicted_labels ) const; - virtual float predict(const cv::Mat& data, cv::Mat& predicted_labels); - virtual float predict(const cv::Mat& data); + virtual void save(std::string filepath) const; + virtual void load(const std::string filepath); - virtual void write( CvFileStorage* storage, const char* name ) const; - virtual void read( CvFileStorage* storage, CvFileNode* node ); - - virtual void clear(); - - virtual cv::Mat get_learnt_mat(); + cv::Mat get_learnt_thetas() const; protected: - cv::Mat learnt_thetas; - CvLR_TrainParams params; - + LogisticRegressionParams params; + cv::Mat learnt_thetas; + std::string default_model_name; std::map forward_mapper; std::map reverse_mapper; - virtual bool set_default_params(); - virtual cv::Mat calc_sigmoid(const cv::Mat& data); + cv::Mat labels_o; + cv::Mat labels_n; + + static cv::Mat calc_sigmoid(const cv::Mat& data); virtual double compute_cost(const cv::Mat& data, const cv::Mat& labels, const cv::Mat& init_theta); virtual cv::Mat compute_batch_gradient(const cv::Mat& data, const cv::Mat& labels, const cv::Mat& init_theta); virtual cv::Mat compute_mini_batch_gradient(const cv::Mat& data, const cv::Mat& labels, const cv::Mat& init_theta); - - virtual std::map get_label_map(const cv::Mat& labels); - virtual bool set_label_map(const cv::Mat& labels); - virtual cv::Mat remap_labels(const cv::Mat& labels, const std::map lmap); + static cv::Mat remap_labels(const cv::Mat& labels, const std::map& lmap); - //cv::Mat Mapper; - - cv::Mat labels_o; - cv::Mat labels_n; + virtual void write(FileStorage& fs) const; + virtual void read(const FileNode& fn); + virtual void clear(); }; +}// namespace cv /****************************************************************************************\ * Auxilary functions declarations * From 64aaa6e1ae7cc8bbe14e3bdd949dc3d60040dad1 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Fri, 4 Oct 2013 08:34:55 -0400 Subject: [PATCH 14/43] updated test for logistic regression after changes to LogisticRegression class --- modules/ml/test/test_lr.cpp | 56 +++++++++++++++---------------------- 1 file changed, 23 insertions(+), 33 deletions(-) diff --git a/modules/ml/test/test_lr.cpp b/modules/ml/test/test_lr.cpp index c82d46c137..b0ab00e6c1 100644 --- a/modules/ml/test/test_lr.cpp +++ b/modules/ml/test/test_lr.cpp @@ -73,9 +73,8 @@ static bool calculateError( const Mat& _p_labels, const Mat& _o_labels, float& e CV_Assert(_p_labels_temp.total() == _o_labels_temp.total()); CV_Assert(_p_labels_temp.rows == _o_labels_temp.rows); - Mat result = (_p_labels_temp == _o_labels_temp)/255; - accuracy = (float)cv::sum(result)[0]/result.rows; + accuracy = (float)cv::countNonZero(_p_labels_temp == _o_labels_temp)/_p_labels_temp.rows; error = 1 - accuracy; return true; } @@ -133,25 +132,23 @@ void CV_LRTest::run( int /*start_from*/ ) 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3); - CvLR_TrainParams params = CvLR_TrainParams(); Mat responses1, responses2; float error = 0.0f; - CvLR_TrainParams params1 = CvLR_TrainParams(); - CvLR_TrainParams params2 = CvLR_TrainParams(); + LogisticRegressionParams params1 = LogisticRegressionParams(); + LogisticRegressionParams params2 = LogisticRegressionParams(); params1.alpha = 1.0; params1.num_iters = 10001; - params1.norm = CvLR::REG_L2; - // params1.debug = 1; + params1.norm = LogisticRegression::REG_L2; params1.regularized = 1; - params1.train_method = CvLR::BATCH; - params1.minibatchsize = 10; + params1.train_method = LogisticRegression::BATCH; + params1.mini_batch_size = 10; // run LR classifier train classifier data.convertTo(data, CV_32FC1); labels.convertTo(labels, CV_32FC1); - CvLR lr1(data, labels, params1); + LogisticRegression lr1(data, labels, params1); // predict using the same data lr1.predict(data, responses1); @@ -164,7 +161,6 @@ void CV_LRTest::run( int /*start_from*/ ) ts->printf(cvtest::TS::LOG, "Bad prediction labels\n" ); test_code = cvtest::TS::FAIL_INVALID_OUTPUT; } - else if(error > 0.05f) { ts->printf(cvtest::TS::LOG, "Bad accuracy of (%f)\n", error); @@ -173,14 +169,13 @@ void CV_LRTest::run( int /*start_from*/ ) params2.alpha = 1.0; params2.num_iters = 9000; - params2.norm = CvLR::REG_L2; - // params2.debug = 1; + params2.norm = LogisticRegression::REG_L2; params2.regularized = 1; - params2.train_method = CvLR::MINI_BATCH; - params2.minibatchsize = 10; + params2.train_method = LogisticRegression::MINI_BATCH; + params2.mini_batch_size = 10; // now train using mini batch gradient descent - CvLR lr2(data, labels, params2); + LogisticRegression lr2(data, labels, params2); lr2.predict(data, responses2); responses2.convertTo(responses2, CV_32S); @@ -191,7 +186,6 @@ void CV_LRTest::run( int /*start_from*/ ) ts->printf(cvtest::TS::LOG, "Bad prediction labels\n" ); test_code = cvtest::TS::FAIL_INVALID_OUTPUT; } - else if(error > 0.06f) { ts->printf(cvtest::TS::LOG, "Bad accuracy of (%f)\n", error); @@ -257,7 +251,7 @@ void CV_LRTest_SaveLoad::run( int /*start_from*/ ) 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3); - CvLR_TrainParams params = CvLR_TrainParams(); + // LogisticRegressionParams params = LogisticRegressionParams(); Mat responses1, responses2; Mat learnt_mat1, learnt_mat2; @@ -265,28 +259,26 @@ void CV_LRTest_SaveLoad::run( int /*start_from*/ ) float errorCount = 0.0; - CvLR_TrainParams params1 = CvLR_TrainParams(); - CvLR_TrainParams params2 = CvLR_TrainParams(); + LogisticRegressionParams params1 = LogisticRegressionParams(); params1.alpha = 1.0; params1.num_iters = 10001; - params1.norm = CvLR::REG_L2; - // params1.debug = 1; + params1.norm = LogisticRegression::REG_L2; params1.regularized = 1; - params1.train_method = CvLR::BATCH; - params1.minibatchsize = 10; + params1.train_method = LogisticRegression::BATCH; + params1.mini_batch_size = 10; data.convertTo(data, CV_32FC1); labels.convertTo(labels, CV_32FC1); // run LR classifier train classifier - CvLR lr1(data, labels, params1); - CvLR lr2; - learnt_mat1 = lr1.get_learnt_mat(); + LogisticRegression lr1(data, labels, params1); + LogisticRegression lr2; + learnt_mat1 = lr1.get_learnt_thetas(); + lr1.predict(data, responses1); // now save the classifier - // Write out string filename = cv::tempfile(".xml"); try { @@ -312,10 +304,9 @@ void CV_LRTest_SaveLoad::run( int /*start_from*/ ) lr2.predict(data, responses2); - learnt_mat2 = lr2.get_learnt_mat(); + learnt_mat2 = lr2.get_learnt_thetas(); - // compare difference in prediction outputs before and after loading from disk - pred_result1 = (responses1 == responses2)/255; + CV_Assert(responses1.rows == responses2.rows); // compare difference in learnt matrices before and after loading from disk comp_learnt_mats = (learnt_mat1 == learnt_mat2); @@ -326,10 +317,9 @@ void CV_LRTest_SaveLoad::run( int /*start_from*/ ) // compare difference in prediction outputs and stored inputs // check if there is any difference between computed learnt mat and retreived mat - errorCount += 1 - (float)cv::sum(pred_result1)[0]/pred_result1.rows; + errorCount += 1 - (float)cv::countNonZero(responses1 == responses2)/responses1.rows; errorCount += 1 - (float)cv::sum(comp_learnt_mats)[0]/comp_learnt_mats.rows; - if(errorCount>0) { ts->printf( cvtest::TS::LOG, "Different prediction results before writing and after reading (errorCount=%d).\n", errorCount ); From e4a90c19cc7187058572d662d27aa53a6f4f996b Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Fri, 4 Oct 2013 08:36:00 -0400 Subject: [PATCH 15/43] updated documentation to reflect newer changes to LogisticRegression class --- modules/ml/doc/logistic_regression.rst | 112 +++++++++++++------------ 1 file changed, 59 insertions(+), 53 deletions(-) diff --git a/modules/ml/doc/logistic_regression.rst b/modules/ml/doc/logistic_regression.rst index 06f695641d..09647e4bcf 100644 --- a/modules/ml/doc/logistic_regression.rst +++ b/modules/ml/doc/logistic_regression.rst @@ -7,7 +7,7 @@ ML implements logistic regression, which is a probabilistic classification techn Like SVM, Logistic Regression can be extended to work on multi-class classification problems like digit recognition (i.e. recognizing digitis like 0,1 2, 3,... from the given images). This version of Logistic Regression supports both binary and multi-class classifications (for multi-class it creates a multiple 2-class classifiers). In order to train the logistic regression classifier, Batch Gradient Descent and Mini-Batch Gradient Descent algorithms are used (see [BatchDesWiki]_). -Logistic Regression is a discriminative classifier (see [LogRegTomMitch]_ for more details). Logistic Regression is implemented as a C++ class in ``CvLR``. +Logistic Regression is a discriminative classifier (see [LogRegTomMitch]_ for more details). Logistic Regression is implemented as a C++ class in ``LogisticRegression``. In Logistic Regression, we try to optimize the training paramater @@ -28,26 +28,26 @@ or class 0 if . In Logistic Regression, choosing the right parameters is of utmost importance for reducing the training error and ensuring high training accuracy. -``CvLR_TrainParams`` is the structure that defines parameters that are required to train a Logistic Regression classifier. -The learning rate is determined by ``CvLR_TrainParams.alpha``. It determines how faster we approach the solution. -It is a positive real number. Optimization algorithms like Batch Gradient Descent and Mini-Batch Gradient Descent are supported in ``CvLR``. +``LogisticRegressionParams`` is the structure that defines parameters that are required to train a Logistic Regression classifier. +The learning rate is determined by ``LogisticRegressionParams.alpha``. It determines how faster we approach the solution. +It is a positive real number. Optimization algorithms like Batch Gradient Descent and Mini-Batch Gradient Descent are supported in ``LogisticRegression``. It is important that we mention the number of iterations these optimization algorithms have to run. -The number of iterations are mentioned by ``CvLR_TrainParams.num_iters``. +The number of iterations are mentioned by ``LogisticRegressionParams.num_iters``. The number of iterations can be thought as number of steps taken and learning rate specifies if it is a long step or a short step. These two parameters define how fast we arrive at a possible solution. -In order to compensate for overfitting regularization is performed, which can be enabled by setting ``CvLR_TrainParams.regularized`` to a positive integer (greater than zero). -One can specify what kind of regularization has to be performed by setting ``CvLR_TrainParams.norm`` to ``CvLR::REG_L1`` or ``CvLR::REG_L2`` values. -``CvLR`` provides a choice of 2 training methods with Batch Gradient Descent or the Mini-Batch Gradient Descent. To specify this, set ``CvLR_TrainParams.train_method`` to either ``CvLR::BATCH`` or ``CvLR::MINI_BATCH``. -If ``CvLR_TrainParams`` is set to ``CvLR::MINI_BATCH``, the size of the mini batch has to be to a postive integer using ``CvLR_TrainParams.minibatchsize``. +In order to compensate for overfitting regularization is performed, which can be enabled by setting ``LogisticRegressionParams.regularized`` to a positive integer (greater than zero). +One can specify what kind of regularization has to be performed by setting ``LogisticRegressionParams.norm`` to ``LogisticRegression::REG_L1`` or ``LogisticRegression::REG_L2`` values. +``LogisticRegression`` provides a choice of 2 training methods with Batch Gradient Descent or the Mini-Batch Gradient Descent. To specify this, set ``LogisticRegressionParams.train_method`` to either ``LogisticRegression::BATCH`` or ``LogisticRegression::MINI_BATCH``. +If ``LogisticRegressionParams`` is set to ``LogisticRegression::MINI_BATCH``, the size of the mini batch has to be to a postive integer using ``LogisticRegressionParams.mini_batch_size``. A sample set of training parameters for the Logistic Regression classifier can be initialized as follows: :: - CvLR_TrainParams params; + LogisticRegressionParams params; params.alpha = 0.5; params.num_iters = 10000; - params.norm = CvLR::REG_L2; + params.norm = LogisticRegression::REG_L2; params.regularized = 1; - params.train_method = CvLR::MINI_BATCH; - params.minibatchsize = 10; + params.train_method = LogisticRegression::MINI_BATCH; + params.mini_batch_size = 10; .. [LogRegWiki] http://en.wikipedia.org/wiki/Logistic_regression. Wikipedia article about the Logistic Regression algorithm. @@ -56,9 +56,9 @@ A sample set of training parameters for the Logistic Regression classifier can b .. [LogRegTomMitch] http://www.cs.cmu.edu/~tom/NewChapters.html. "Generative and Discriminative Classifiers: Naive Bayes and Logistic Regression" in Machine Learning, Tom Mitchell. .. [BatchDesWiki] http://en.wikipedia.org/wiki/Gradient_descent_optimization. Wikipedia article about Gradient Descent based optimization. -CvLR_TrainParams ----------------- -.. ocv:struct:: CvLR_TrainParams +LogisticRegressionParams +------------------------ +.. ocv:struct:: LogisticRegressionParams Parameters of the Logistic Regression training algorithm. You can initialize the structure using a constructor or declaring the variable and initializing the the individual parameters. @@ -74,7 +74,7 @@ CvLR_TrainParams .. ocv:member:: int norm - The type of normalization applied. It takes value ``CvLR::L1`` or ``CvLR::L2``. + The type of normalization applied. It takes value ``LogisticRegression::L1`` or ``LogisticRegression::L2``. .. ocv:member:: int regularized @@ -82,89 +82,95 @@ CvLR_TrainParams .. ocv:member:: int train_method - The kind of training method used to train the classifier. It should be set to either ``CvLR::BATCH`` or ``CvLR::MINI_BATCH``. + The kind of training method used to train the classifier. It should be set to either ``LogisticRegression::BATCH`` or ``LogisticRegression::MINI_BATCH``. - .. ocv:member:: int minibatchsize + .. ocv:member:: int mini_batch_size - If the training method is set to CvLR::MINI_BATCH, it has to be set to positive integer. It can range from 1 to number of training samples. + If the training method is set to LogisticRegression::MINI_BATCH, it has to be set to positive integer. It can range from 1 to number of training samples. -CvLR_TrainParams::CvLR_TrainParams ----------------------------------- +LogisticRegressionParams::LogisticRegressionParams +-------------------------------------------------- The constructors. -.. ocv:function:: CvLR_TrainParams::CvLR_TrainParams() +.. ocv:function:: LogisticRegressionParams::LogisticRegressionParams() -.. ocv:function:: CvLR_TrainParams::CvLR_TrainParams(double alpha, int num_iters, int norm, int regularized, int train_method, int minbatchsize) +.. ocv:function:: LogisticRegressionParams::LogisticRegressionParams(double alpha, int num_iters, int norm, int regularized, int train_method, int minbatchsize) :param alpha: Specifies the learning rate. :param num_iters: Specifies the number of iterations. - :param norm: Specifies the kind of regularization to be applied. ``CvLR::REG_L1`` or ``CvLR::REG_L2``. To use this, set ``CvLR_TrainParams.regularized`` to a integer greater than zero. + :param norm: Specifies the kind of regularization to be applied. ``LogisticRegression::REG_L1`` or ``LogisticRegression::REG_L2``. To use this, set ``LogisticRegressionParams.regularized`` to a integer greater than zero. :param: regularized: To enable or disable regularization. Set to positive integer (greater than zero) to enable and to 0 to disable. - :param: train_method: Specifies the kind of training method used. It should be set to either ``CvLR::BATCH`` or ``CvLR::MINI_BATCH``. If using ``CvLR::MINI_BATCH``, set ``CvLR_TrainParams.minibatchsize`` to a positive integer. + :param: train_method: Specifies the kind of training method used. It should be set to either ``LogisticRegression::BATCH`` or ``LogisticRegression::MINI_BATCH``. If using ``LogisticRegression::MINI_BATCH``, set ``LogisticRegressionParams.mini_batch_size`` to a positive integer. - :param: minibatchsize: Specifies the number of training samples taken in each step of Mini-Batch Gradient Descent. + :param: mini_batch_size: Specifies the number of training samples taken in each step of Mini-Batch Gradient Descent. By initializing this structure, one can set all the parameters required for Logistic Regression classifier. -CvLR ----- -.. ocv:class:: CvLR : public CvStatModel +LogisticRegression +------------------ +.. ocv:class:: LogisticRegression : public CvStatModel Implements Logistic Regression classifier. -CvLR::CvLR ----------- +LogisticRegression::LogisticRegression +-------------------------------------- The constructors. -.. ocv:function:: CvLR::CvLR() +.. ocv:function:: LogisticRegression::LogisticRegression() -.. ocv:function:: CvLR::CvLR(const cv::Mat& data, const cv::Mat& labels, const CvLR_TrainParams& params) +.. ocv:function:: LogisticRegression::LogisticRegression(cv::InputArray data_ip, cv::InputArray labels_ip, const LogisticRegressionParams& params); :param data: The data variable of type ``CV_32F``. Each data instance has to be arranged per across different rows. - :param labels: The data variable of type ``CV_32F``. Each label instance has to be arranged across differnet rows. + :param labels_ip: The data variable of type ``CV_32F``. Each label instance has to be arranged across different rows. - :param params: The training parameters for the classifier of type ``CVLR_TrainParams``. + :param params: The training parameters for the classifier of type ``LogisticRegressionParams``. The constructor with parameters allows to create a Logistic Regression object intialized with given data and trains it. -CvLR::train ------------ +LogisticRegression::train +------------------------- Trains the Logistic Regression classifier and returns true if successful. -.. ocv:function:: bool CvLR::train(const cv::Mat& data, const cv::Mat& labels) +.. ocv:function:: bool LogisticRegression::train(cv::InputArray data_ip, cv::InputArray label_ip) - :param data: The data variable of type ``CV_32F``. Each data instance has to be arranged per across different rows. + :param data_ip: An InputArray variable of type ``CV_32F``. Each data instance has to be arranged per across different rows. - :param labels: The data variable of type ``CV_32F``. Each label instance has to be arranged across differnet rows. + :param labels_ip: An InputArray variable of type ``CV_32F``. Each label instance has to be arranged across differnet rows. -CvLR::predict -------------- +LogisticRegression::predict +--------------------------- Predicts responses for input samples and returns a float type. -.. ocv:function:: float CvLR::predict(const Mat& data) - - :param data: The data variable should be a row matrix and of type ``CV_32F``. - -.. ocv:function:: float CvLR::predict( const Mat& data, Mat& predicted_labels ) +.. ocv:function:: void LogisticRegression::predict( cv::InputArray data, cv::OutputArray predicted_labels ) const; :param data: The input data for the prediction algorithm. The ``data`` variable should be of type ``CV_32F``. :param predicted_labels: Predicted labels as a column matrix and of type ``CV_32S``. -The function ``CvLR::predict(const Mat& data)`` returns the label of single data variable. It should be used if data contains only 1 row. - -CvLR::get_learnt_mat() ----------------------- +LogisticRegression::get_learnt_thetas +--------------------------------------- This function returns the trained paramters arranged across rows. For a two class classifcation problem, it returns a row matrix. -.. ocv:function:: cv::Mat CvLR::get_learnt_mat() +.. ocv:function:: cv::Mat LogisticRegression::get_learnt_thetas() It returns learnt paramters of the Logistic Regression as a matrix of type ``CV_32F``. + +LogisticRegression::save +------------------------ +This function saves the trained LogisticRegression clasifier to disk. + +.. ocv:function:: void LogisticRegression::save(string filepath) const + +LogisticRegression::load +------------------------ +This function loads the trained LogisticRegression clasifier from disk. + +.. ocv:function:: void LogisticRegression::load(const string filepath) From 56d715b091c05aac5bb106ad004620909f3d31c9 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Fri, 4 Oct 2013 08:47:49 -0400 Subject: [PATCH 16/43] fixed typedef for LogisticRegression. Updated LogisticRegression prototype with newer C++ API --- modules/ml/include/opencv2/ml.hpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/ml/include/opencv2/ml.hpp b/modules/ml/include/opencv2/ml.hpp index e424f2b499..3abc890342 100644 --- a/modules/ml/include/opencv2/ml.hpp +++ b/modules/ml/include/opencv2/ml.hpp @@ -647,8 +647,8 @@ CV_EXPORTS void randGaussMixture( InputArray means, InputArray covs, InputArray /* creates test set */ CV_EXPORTS void createConcentricSpheresTestSet( int nsamples, int nfeatures, int nclasses, OutputArray samples, OutputArray responses); -typedef CvLR_TrainParams LogisticRegression_TrainParams; -typedef CvLR LogisticRegression; +typedef LogisticRegressionParams LRParams; +typedef LogisticRegression LR; } From 78f3b0ad1fd2f6a362348eb2276adc173ae92994 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Fri, 4 Oct 2013 09:17:58 -0400 Subject: [PATCH 17/43] removed unnecessary semicolon in LogisticRegression class documentation --- modules/ml/doc/logistic_regression.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/ml/doc/logistic_regression.rst b/modules/ml/doc/logistic_regression.rst index 09647e4bcf..f143645267 100644 --- a/modules/ml/doc/logistic_regression.rst +++ b/modules/ml/doc/logistic_regression.rst @@ -123,7 +123,7 @@ The constructors. .. ocv:function:: LogisticRegression::LogisticRegression() -.. ocv:function:: LogisticRegression::LogisticRegression(cv::InputArray data_ip, cv::InputArray labels_ip, const LogisticRegressionParams& params); +.. ocv:function:: LogisticRegression::LogisticRegression(cv::InputArray data_ip, cv::InputArray labels_ip, const LogisticRegressionParams& params) :param data: The data variable of type ``CV_32F``. Each data instance has to be arranged per across different rows. @@ -148,7 +148,7 @@ LogisticRegression::predict --------------------------- Predicts responses for input samples and returns a float type. -.. ocv:function:: void LogisticRegression::predict( cv::InputArray data, cv::OutputArray predicted_labels ) const; +.. ocv:function:: void LogisticRegression::predict( cv::InputArray data, cv::OutputArray predicted_labels ) const :param data: The input data for the prediction algorithm. The ``data`` variable should be of type ``CV_32F``. From 50031ffb1597e4f9dacd041c34ecbed61383716f Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Fri, 4 Oct 2013 09:20:18 -0400 Subject: [PATCH 18/43] removed trailing whitespaces from updated logistic regression definition --- modules/ml/include/opencv2/ml.hpp | 3 --- 1 file changed, 3 deletions(-) diff --git a/modules/ml/include/opencv2/ml.hpp b/modules/ml/include/opencv2/ml.hpp index 3abc890342..9575901c6b 100644 --- a/modules/ml/include/opencv2/ml.hpp +++ b/modules/ml/include/opencv2/ml.hpp @@ -613,12 +613,9 @@ protected: std::string default_model_name; std::map forward_mapper; std::map reverse_mapper; - cv::Mat labels_o; cv::Mat labels_n; - static cv::Mat calc_sigmoid(const cv::Mat& data); - virtual double compute_cost(const cv::Mat& data, const cv::Mat& labels, const cv::Mat& init_theta); virtual cv::Mat compute_batch_gradient(const cv::Mat& data, const cv::Mat& labels, const cv::Mat& init_theta); virtual cv::Mat compute_mini_batch_gradient(const cv::Mat& data, const cv::Mat& labels, const cv::Mat& init_theta); From 5bb43f69abf288d107b40cb3ddcd4cb23978bf8f Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Fri, 4 Oct 2013 09:21:07 -0400 Subject: [PATCH 19/43] removed trailing whitespaces from predict function --- modules/ml/src/lr.cpp | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/ml/src/lr.cpp b/modules/ml/src/lr.cpp index 5d53d0f868..035a08dfa7 100644 --- a/modules/ml/src/lr.cpp +++ b/modules/ml/src/lr.cpp @@ -226,7 +226,6 @@ void LogisticRegression::predict( cv::InputArray _ip_data, cv::OutputArray _outp cv::Mat labels; cv::Mat labels_c; cv::Mat temp_pred; - cv::Mat pred_m = cv::Mat::zeros(data_t.rows, thetas.rows, data.type()); if(thetas.rows == 1) From 1652b2a4432d79d093904d2d8ed47b656bc15b1e Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Fri, 4 Oct 2013 14:00:38 -0400 Subject: [PATCH 20/43] fixed trailing whitespaces in LogisticRegression code. --- modules/ml/include/opencv2/ml.hpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/modules/ml/include/opencv2/ml.hpp b/modules/ml/include/opencv2/ml.hpp index 9575901c6b..aa3df06ac9 100644 --- a/modules/ml/include/opencv2/ml.hpp +++ b/modules/ml/include/opencv2/ml.hpp @@ -609,12 +609,14 @@ public: protected: LogisticRegressionParams params; - cv::Mat learnt_thetas; + cv::Mat learnt_thetas; std::string default_model_name; std::map forward_mapper; std::map reverse_mapper; + cv::Mat labels_o; cv::Mat labels_n; + static cv::Mat calc_sigmoid(const cv::Mat& data); virtual double compute_cost(const cv::Mat& data, const cv::Mat& labels, const cv::Mat& init_theta); virtual cv::Mat compute_batch_gradient(const cv::Mat& data, const cv::Mat& labels, const cv::Mat& init_theta); From af88f0c067c7f1fc8ca962407de5b72b9031b227 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Mon, 7 Oct 2013 16:01:50 -0400 Subject: [PATCH 21/43] updated documentation to do reflect changes to logistic regression class --- modules/ml/doc/logistic_regression.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/ml/doc/logistic_regression.rst b/modules/ml/doc/logistic_regression.rst index f143645267..79518b3f4c 100644 --- a/modules/ml/doc/logistic_regression.rst +++ b/modules/ml/doc/logistic_regression.rst @@ -113,7 +113,7 @@ By initializing this structure, one can set all the parameters required for Logi LogisticRegression ------------------ -.. ocv:class:: LogisticRegression : public CvStatModel +.. ocv:class:: LogisticRegression Implements Logistic Regression classifier. From 3cdd2b27619b90e035d0a562b23fe9de254a9a5d Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Tue, 5 Nov 2013 05:30:46 -0500 Subject: [PATCH 22/43] updated logistic regression program with new api example --- samples/cpp/logistic_regression.cpp | 27 ++++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/samples/cpp/logistic_regression.cpp b/samples/cpp/logistic_regression.cpp index 71b71af687..1ef261d4a6 100644 --- a/samples/cpp/logistic_regression.cpp +++ b/samples/cpp/logistic_regression.cpp @@ -76,6 +76,7 @@ int main() Mat labels_train, labels_test; Mat responses, result; + FileStorage fs1, fs2; FileStorage f; @@ -120,12 +121,17 @@ int main() cout<<"initializing Logisitc Regression Parameters\n"< Date: Tue, 5 Nov 2013 05:31:49 -0500 Subject: [PATCH 23/43] updated documentation to reflect new api changes for logistic regression --- modules/ml/doc/logistic_regression.rst | 53 +++++++++++++++++--------- 1 file changed, 36 insertions(+), 17 deletions(-) diff --git a/modules/ml/doc/logistic_regression.rst b/modules/ml/doc/logistic_regression.rst index 79518b3f4c..f527917ec2 100644 --- a/modules/ml/doc/logistic_regression.rst +++ b/modules/ml/doc/logistic_regression.rst @@ -88,6 +88,9 @@ LogisticRegressionParams If the training method is set to LogisticRegression::MINI_BATCH, it has to be set to positive integer. It can range from 1 to number of training samples. + .. ocv:member:: cv::TermCriteria term_crit + + Sets termination criteria for training algorithm. LogisticRegressionParams::LogisticRegressionParams -------------------------------------------------- @@ -95,19 +98,34 @@ The constructors. .. ocv:function:: LogisticRegressionParams::LogisticRegressionParams() -.. ocv:function:: LogisticRegressionParams::LogisticRegressionParams(double alpha, int num_iters, int norm, int regularized, int train_method, int minbatchsize) +.. ocv:function:: LogisticRegressionParams::LogisticRegressionParams(double learning_rate, int iters, int train_method, int normlization, int reg, int mini_batch_size) - :param alpha: Specifies the learning rate. + :param learning_rate: Specifies the learning rate. - :param num_iters: Specifies the number of iterations. + :param iters: Specifies the number of iterations. - :param norm: Specifies the kind of regularization to be applied. ``LogisticRegression::REG_L1`` or ``LogisticRegression::REG_L2``. To use this, set ``LogisticRegressionParams.regularized`` to a integer greater than zero. + :param: train_method: Specifies the kind of training method used. It should be set to either ``LogisticRegression::BATCH`` or ``LogisticRegression::MINI_BATCH``. If using ``LogisticRegression::MINI_BATCH``, set ``LogisticRegressionParams.mini_batch_size`` to a positive integer. - :param: regularized: To enable or disable regularization. Set to positive integer (greater than zero) to enable and to 0 to disable. + :param normalization: Specifies the kind of regularization to be applied. ``LogisticRegression::REG_L1`` or ``LogisticRegression::REG_L2`` (L1 norm or L2 norm). To use this, set ``LogisticRegressionParams.regularized`` to a integer greater than zero. - :param: train_method: Specifies the kind of training method used. It should be set to either ``LogisticRegression::BATCH`` or ``LogisticRegression::MINI_BATCH``. If using ``LogisticRegression::MINI_BATCH``, set ``LogisticRegressionParams.mini_batch_size`` to a positive integer. + :param: reg: To enable or disable regularization. Set to positive integer (greater than zero) to enable and to 0 to disable. - :param: mini_batch_size: Specifies the number of training samples taken in each step of Mini-Batch Gradient Descent. + :param: mini_batch_size: Specifies the number of training samples taken in each step of Mini-Batch Gradient Descent. Will only be used if using ``LogisticRegression::MINI_BATCH`` training algorithm. + + +The full constructor initializes corresponding members. The default constructor creates an object with dummy parameters. + +:: + LogisticRegressionParams::LogisticRegressionParams() + { + term_crit = cv::TermCriteria(TermCriteria::COUNT + TermCriteria::EPS, 1000, 0.001); + alpha = 0.001; + num_iters = 1000; + norm = LogisticRegression::REG_L2; + regularized = 1; + train_method = LogisticRegression::BATCH; + mini_batch_size = 1; + } By initializing this structure, one can set all the parameters required for Logistic Regression classifier. @@ -121,7 +139,9 @@ LogisticRegression::LogisticRegression -------------------------------------- The constructors. -.. ocv:function:: LogisticRegression::LogisticRegression() +.. ocv:function:: LogisticRegression( const LogisticRegressionParams& params) + + :param params: The training parameters for the classifier of type ``LogisticRegressionParams``. .. ocv:function:: LogisticRegression::LogisticRegression(cv::InputArray data_ip, cv::InputArray labels_ip, const LogisticRegressionParams& params) @@ -154,23 +174,22 @@ Predicts responses for input samples and returns a float type. :param predicted_labels: Predicted labels as a column matrix and of type ``CV_32S``. - LogisticRegression::get_learnt_thetas ---------------------------------------- +------------------------------------- This function returns the trained paramters arranged across rows. For a two class classifcation problem, it returns a row matrix. .. ocv:function:: cv::Mat LogisticRegression::get_learnt_thetas() It returns learnt paramters of the Logistic Regression as a matrix of type ``CV_32F``. -LogisticRegression::save +LogisticRegression::read ------------------------ -This function saves the trained LogisticRegression clasifier to disk. +This function reads the trained LogisticRegression clasifier from disk. -.. ocv:function:: void LogisticRegression::save(string filepath) const +.. ocv:function:: void LogisticRegression::read(const FileNode& fn) -LogisticRegression::load ------------------------- -This function loads the trained LogisticRegression clasifier from disk. +LogisticRegression::write +------------------------- +This function writes the trained LogisticRegression clasifier to disk. -.. ocv:function:: void LogisticRegression::load(const string filepath) +.. ocv:function:: void LogisticRegression::write(FileStorage& fs) const From bf2ee3c58a0e2b8129c740bc7f0d12cd2ed7f427 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Tue, 5 Nov 2013 05:33:14 -0500 Subject: [PATCH 24/43] updated prototype for logistic regression classifier --- modules/ml/include/opencv2/ml.hpp | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/modules/ml/include/opencv2/ml.hpp b/modules/ml/include/opencv2/ml.hpp index aa3df06ac9..cf7f7cf070 100644 --- a/modules/ml/include/opencv2/ml.hpp +++ b/modules/ml/include/opencv2/ml.hpp @@ -581,17 +581,17 @@ struct CV_EXPORTS LogisticRegressionParams int regularized; int train_method; int mini_batch_size; - CvTermCriteria term_crit; + cv::TermCriteria term_crit; LogisticRegressionParams(); - LogisticRegressionParams(double alpha, int num_iters, int norm, int regularized, int train_method, int minbatchsize); + LogisticRegressionParams(double learning_rate, int iters, int train_method, int normlization, int reg, int mini_batch_size); + }; class CV_EXPORTS LogisticRegression { public: - - LogisticRegression(); + LogisticRegression( const LogisticRegressionParams& params); LogisticRegression(cv::InputArray data_ip, cv::InputArray labels_ip, const LogisticRegressionParams& params); virtual ~LogisticRegression(); @@ -601,10 +601,11 @@ public: virtual bool train(cv::InputArray data_ip, cv::InputArray label_ip); virtual void predict( cv::InputArray data, cv::OutputArray predicted_labels ) const; - virtual void save(std::string filepath) const; - virtual void load(const std::string filepath); + virtual void write(FileStorage& fs) const; + virtual void read(const FileNode& fn); - cv::Mat get_learnt_thetas() const; + const cv::Mat get_learnt_thetas() const; + virtual void clear(); protected: @@ -623,11 +624,6 @@ protected: virtual cv::Mat compute_mini_batch_gradient(const cv::Mat& data, const cv::Mat& labels, const cv::Mat& init_theta); virtual bool set_label_map(const cv::Mat& labels); static cv::Mat remap_labels(const cv::Mat& labels, const std::map& lmap); - - virtual void write(FileStorage& fs) const; - virtual void read(const FileNode& fn); - virtual void clear(); - }; }// namespace cv From 95ea09c3dc3d389af5c6c61ca1cccb18fe66d056 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Tue, 5 Nov 2013 05:33:46 -0500 Subject: [PATCH 25/43] updated prototype for logistic regression classifier --- modules/ml/include/opencv2/ml.hpp | 3 --- 1 file changed, 3 deletions(-) diff --git a/modules/ml/include/opencv2/ml.hpp b/modules/ml/include/opencv2/ml.hpp index cf7f7cf070..7b03343947 100644 --- a/modules/ml/include/opencv2/ml.hpp +++ b/modules/ml/include/opencv2/ml.hpp @@ -642,9 +642,6 @@ CV_EXPORTS void randGaussMixture( InputArray means, InputArray covs, InputArray /* creates test set */ CV_EXPORTS void createConcentricSpheresTestSet( int nsamples, int nfeatures, int nclasses, OutputArray samples, OutputArray responses); -typedef LogisticRegressionParams LRParams; -typedef LogisticRegression LR; - } } From b3b4e83aed7c0d18f767fa90854c382a61f8fa99 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Tue, 5 Nov 2013 05:34:45 -0500 Subject: [PATCH 26/43] updated logistic regression definition --- modules/ml/src/lr.cpp | 96 ++++++++++++++++++++++--------------------- 1 file changed, 49 insertions(+), 47 deletions(-) diff --git a/modules/ml/src/lr.cpp b/modules/ml/src/lr.cpp index 035a08dfa7..6ac863876e 100644 --- a/modules/ml/src/lr.cpp +++ b/modules/ml/src/lr.cpp @@ -55,35 +55,41 @@ #include "precomp.hpp" + using namespace cv; using namespace std; LogisticRegressionParams::LogisticRegressionParams() { - term_crit = CvTermCriteria(TermCriteria::COUNT + TermCriteria::EPS, 10000, 0.001); + term_crit = cv::TermCriteria(TermCriteria::COUNT + TermCriteria::EPS, 1000, 0.001); alpha = 0.001; - num_iters = 10; + num_iters = 1000; norm = LogisticRegression::REG_L2; regularized = 1; train_method = LogisticRegression::BATCH; mini_batch_size = 1; } - -LogisticRegressionParams::LogisticRegressionParams(double _alpha, int _num_iters, int _norm, int _regularized, int _train_method, int _mini_batch_size): - alpha(_alpha), num_iters(_num_iters), norm(_norm), regularized(_regularized), train_method(_train_method), mini_batch_size(_mini_batch_size) +LogisticRegressionParams::LogisticRegressionParams( double learning_rate, int iters, int train_algo = LogisticRegression::BATCH, int normlization = LogisticRegression::REG_L2, int reg = 1, int mb_size = 5) { - term_crit = CvTermCriteria(TermCriteria::COUNT + TermCriteria::EPS, num_iters, 0.001); + term_crit = cv::TermCriteria(TermCriteria::COUNT + TermCriteria::EPS, iters, learning_rate); + alpha = learning_rate; + num_iters = iters; + norm = normlization; + regularized = reg; + train_method = train_algo; + mini_batch_size = mb_size; } -LogisticRegression::LogisticRegression() +LogisticRegression::LogisticRegression(const LogisticRegressionParams& pms = LogisticRegressionParams() ) { default_model_name = "my_lr"; + this->params = pms; } LogisticRegression::LogisticRegression(cv::InputArray data, cv::InputArray labels, const LogisticRegressionParams& pms) { - this->params = pms; default_model_name = "my_lr"; + this->params = pms; train(data, labels); } @@ -103,7 +109,7 @@ bool LogisticRegression::train(cv::InputArray data_ip, cv::InputArray labels_ip) // check the number of columns if(_labels_i.cols != 1) { - cv::error(Error::StsBadArg, "_labels_i should be a column matrix", "cv::ml::LogisticRegression::train", __FILE__, __LINE__); + CV_Error( CV_StsBadArg, "_labels_i should be a column matrix" ); } // check data type. @@ -111,7 +117,7 @@ bool LogisticRegression::train(cv::InputArray data_ip, cv::InputArray labels_ip) if((_data_i.type() != CV_32FC1) || (_labels_i.type() != CV_32FC1)) { - cv::error(Error::StsBadArg, "train: data and labels must be a floating point matrix", "cv::ml::LogisticRegression::train", __FILE__, __LINE__); + CV_Error( CV_StsBadArg, "data and labels must be a floating point matrix" ); } bool ok = false; @@ -132,12 +138,12 @@ bool LogisticRegression::train(cv::InputArray data_ip, cv::InputArray labels_ip) if(num_classes < 2) { - cv::error(Error::StsBadArg, "train: data should have atleast 2 classes", "cv::ml::LogisticRegression::train", __FILE__, __LINE__); + CV_Error( CV_StsBadArg, "data should have atleast 2 classes" ); } if(_labels_i.rows != _data_i.rows) { - cv::error(Error::StsBadArg, "train: number of rows in data and labels should be the equal", "cv::ml::LogisticRegression::train", __FILE__, __LINE__); + CV_Error( CV_StsBadArg, "number of rows in data and labels should be the equal" ); } @@ -148,11 +154,17 @@ bool LogisticRegression::train(cv::InputArray data_ip, cv::InputArray labels_ip) cv::Mat new_local_labels; int ii=0; + cv::Mat new_theta; if(num_classes == 2) { labels_l.convertTo(labels, CV_32F); - cv::Mat new_theta = compute_batch_gradient(data_t, labels, init_theta); + // new_theta = compute_batch_gradient(data_t, labels, init_theta); + //currently supported training methods LogisticRegression::BATCH and LogisticRegression::MINI_BATCH + if(this->params.train_method == LogisticRegression::BATCH) + new_theta = compute_batch_gradient(data_t, labels, init_theta); + else + new_theta = compute_mini_batch_gradient(data_t, labels, init_theta); thetas = new_theta.t(); } else @@ -165,9 +177,13 @@ bool LogisticRegression::train(cv::InputArray data_ip, cv::InputArray labels_ip) { new_local_labels = (labels_l == it->second)/255; new_local_labels.convertTo(labels, CV_32F); - - cv::Mat new_theta = compute_batch_gradient(data_t, labels, init_theta); - + + // new_theta = compute_batch_gradient(data_t, labels, init_theta); + // currently supported training methods LogisticRegression::BATCH and LogisticRegression::MINI_BATCH + if(this->params.train_method == LogisticRegression::BATCH) + new_theta = compute_batch_gradient(data_t, labels, init_theta); + else + new_theta = compute_mini_batch_gradient(data_t, labels, init_theta); hconcat(new_theta.t(), thetas.row(ii)); ii += 1; } @@ -176,7 +192,7 @@ bool LogisticRegression::train(cv::InputArray data_ip, cv::InputArray labels_ip) this->learnt_thetas = thetas.clone(); if( cvIsNaN( (double)cv::sum(this->learnt_thetas)[0] ) ) { - cv::error(Error::StsBadArg, "train: check training parameters. Invalid training classifier","cv::ml::LogisticRegression::train", __FILE__, __LINE__); + CV_Error( CV_StsBadArg, "check training parameters. Invalid training classifier" ); } ok = true; return ok; @@ -187,18 +203,17 @@ void LogisticRegression::predict( cv::InputArray _ip_data, cv::OutputArray _outp { /* returns a class of the predicted class class names can be 1,2,3,4, .... etc */ - cv::Mat thetas, data, pred_labs; data = _ip_data.getMat(); // check if learnt_mats array is populated if(this->learnt_thetas.total()<=0) { - cv::error(Error::StsBadArg, "predict: classifier should be trained first", "cv::ml::LogisticRegression::predict", __FILE__, __LINE__); + CV_Error( CV_StsBadArg, "classifier should be trained first" ); } if(data.type() != CV_32F) { - cv::error(Error::StsBadArg, "predict: data must be of floating type","cv::ml::LogisticRegression::predict",__FILE__, __LINE__); + CV_Error( CV_StsBadArg, "data must be of floating type" ); } // add a column of ones @@ -322,12 +337,12 @@ cv::Mat LogisticRegression::compute_batch_gradient(const cv::Mat& _data, const c // implements batch gradient descent if(this->params.alpha<=0) { - cv::error(Error::StsBadArg, "compute_batch_gradient: check training parameters for the classifier","cv::ml::LogisticRegression::compute_batch_gradient", __FILE__, __LINE__); + CV_Error( CV_StsBadArg, "check training parameters for the classifier" ); } if(this->params.num_iters <= 0) { - cv::error(Error::StsBadArg,"compute_batch_gradient: number of iterations cannot be zero or a negative number","cv::ml::LogisticRegression::compute_batch_gradient",__FILE__,__LINE__); + CV_Error( CV_StsBadArg, "number of iterations cannot be zero or a negative number" ); } int llambda = 0; @@ -352,7 +367,7 @@ cv::Mat LogisticRegression::compute_batch_gradient(const cv::Mat& _data, const c if( cvIsNaN( ccost ) ) { - cv::error(Error::StsBadArg, "compute_batch_gradient: check training parameters. Invalid training classifier","cv::ml::LogisticRegression::compute_batch_gradient", __FILE__, __LINE__); + CV_Error( CV_StsBadArg, "check training parameters. Invalid training classifier" ); } pcal_b = calc_sigmoid((_data*theta_p) - _labels); @@ -397,12 +412,12 @@ cv::Mat LogisticRegression::compute_mini_batch_gradient(const cv::Mat& _data, co if(this->params.mini_batch_size <= 0 || this->params.alpha == 0) { - cv::error(Error::StsBadArg, "compute_mini_batch_gradient: check training parameters for the classifier","cv::ml::LogisticRegression::compute_mini_batch_gradient", __FILE__, __LINE__); + CV_Error( CV_StsBadArg, "check training parameters for the classifier" ); } if(this->params.num_iters <= 0) { - cv::error(Error::StsBadArg,"compute_mini_batch_gradient: number of iterations cannot be zero or a negative number","cv::ml::LogisticRegression::compute_mini_batch_gradient",__FILE__,__LINE__); + CV_Error( CV_StsBadArg, "number of iterations cannot be zero or a negative number" ); } cv::Mat pcal_a; @@ -418,7 +433,7 @@ cv::Mat LogisticRegression::compute_mini_batch_gradient(const cv::Mat& _data, co lambda_l = 1; } - for(int i = 0;this->params.term_crit.max_iter;i++) + for(int i = 0;this->params.term_crit.maxCount;i++) { if(j+size_b<=_data.rows) { @@ -438,7 +453,7 @@ cv::Mat LogisticRegression::compute_mini_batch_gradient(const cv::Mat& _data, co if( cvIsNaN( ccost ) == 1) { - cv::error(Error::StsBadArg, "compute_mini_batch_gradient: check training parameters. Invalid training classifier","cv::ml::LogisticRegression::compute_mini_batch_gradient", __FILE__, __LINE__); + CV_Error( CV_StsBadArg, "check training parameters. Invalid training classifier" ); } pcal_b = calc_sigmoid((data_d*theta_p) - labels_l); @@ -536,8 +551,11 @@ void LogisticRegression::clear() void LogisticRegression::write(FileStorage& fs) const { - CV_Assert(fs.isOpened() == 1); - + // check if open + if(fs.isOpened() == 0) + { + CV_Error(CV_StsBadArg,"file can't open. Check file path"); + } string desc = "Logisitic Regression Classifier"; fs<<"classifier"<params.alpha; @@ -559,7 +577,7 @@ void LogisticRegression::read(const FileNode& fn ) // check if empty if(fn.empty()) { - cv::error(Error::StsBadArg, "read: empty FileNode object","cv::ml::LogisticRegression::read", __FILE__, __LINE__); + CV_Error( CV_StsBadArg, "empty FileNode object" ); } this->params.alpha = (double)fn["alpha"]; @@ -584,23 +602,7 @@ void LogisticRegression::read(const FileNode& fn ) } } -void LogisticRegression::save(string filepath) const -{ - FileStorage fs; - fs.open(filepath.c_str(),FileStorage::WRITE); - write(fs); - fs.release(); - -} -void LogisticRegression::load(const string filepath) -{ - FileStorage fs; - fs.open(filepath.c_str(),FileStorage::READ); - FileNode fn = fs.root(); - read(fn); -} - -cv::Mat LogisticRegression::get_learnt_thetas() const +const cv::Mat LogisticRegression::get_learnt_thetas() const { return this->learnt_thetas; } From a9df50eefc957bd479b249d80169e25c20322299 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Tue, 5 Nov 2013 05:35:21 -0500 Subject: [PATCH 27/43] updated test for logistic regression --- modules/ml/test/test_lr.cpp | 174 ++++++++++++++++++------------------ 1 file changed, 87 insertions(+), 87 deletions(-) diff --git a/modules/ml/test/test_lr.cpp b/modules/ml/test/test_lr.cpp index b0ab00e6c1..3aa4cda002 100644 --- a/modules/ml/test/test_lr.cpp +++ b/modules/ml/test/test_lr.cpp @@ -94,35 +94,43 @@ void CV_LRTest::run( int /*start_from*/ ) // initialize varibles from the popular Iris Dataset Mat data = (Mat_(150, 4)<< 5.1,3.5,1.4,0.2, 4.9,3.0,1.4,0.2, 4.7,3.2,1.3,0.2, 4.6,3.1,1.5,0.2, - 5.0,3.6,1.4,0.2, 5.4,3.9,1.7,0.4, 4.6,3.4,1.4,0.3, 5.0,3.4,1.5,0.2, 4.4,2.9,1.4,0.2, 4.9,3.1,1.5,0.1, - 5.4,3.7,1.5,0.2, 4.8,3.4,1.6,0.2, 4.8,3.0,1.4,0.1, 4.3,3.0,1.1,0.1, 5.8,4.0,1.2,0.2, 5.7,4.4,1.5,0.4, - 5.4,3.9,1.3,0.4, 5.1,3.5,1.4,0.3, 5.7,3.8,1.7,0.3, 5.1,3.8,1.5,0.3, 5.4,3.4,1.7,0.2, 5.1,3.7,1.5,0.4, - 4.6,3.6,1.0,0.2, 5.1,3.3,1.7,0.5, 4.8,3.4,1.9,0.2, 5.0,3.0,1.6,0.2, 5.0,3.4,1.6,0.4, - 5.2,3.5,1.5,0.2, 5.2,3.4,1.4,0.2, 4.7,3.2,1.6,0.2, 4.8,3.1,1.6,0.2, 5.4,3.4,1.5,0.4, - 5.2,4.1,1.5,0.1, 5.5,4.2,1.4,0.2, 4.9,3.1,1.5,0.1, 5.0,3.2,1.2,0.2, 5.5,3.5,1.3,0.2, - 4.9,3.1,1.5,0.1, 4.4,3.0,1.3,0.2, 5.1,3.4,1.5,0.2, 5.0,3.5,1.3,0.3, 4.5,2.3,1.3,0.3, - 4.4,3.2,1.3,0.2, 5.0,3.5,1.6,0.6, 5.1,3.8,1.9,0.4, 4.8,3.0,1.4,0.3, 5.1,3.8,1.6,0.2, - 4.6,3.2,1.4,0.2, 5.3,3.7,1.5,0.2, 5.0,3.3,1.4,0.2, 7.0,3.2,4.7,1.4, 6.4,3.2,4.5,1.5, - 6.9,3.1,4.9,1.5, 5.5,2.3,4.0,1.3, 6.5,2.8,4.6,1.5, 5.7,2.8,4.5,1.3, 6.3,3.3,4.7,1.6, - 4.9,2.4,3.3,1.0, 6.6,2.9,4.6,1.3, 5.2,2.7,3.9,1.4, 5.0,2.0,3.5,1.0, 5.9,3.0,4.2,1.5, - 6.0,2.2,4.0,1.0, 6.1,2.9,4.7,1.4, 5.6,2.9,3.6,1.3, 6.7,3.1,4.4,1.4, 5.6,3.0,4.5,1.5, - 5.8,2.7,4.1,1.0, 6.2,2.2,4.5,1.5, 5.6,2.5,3.9,1.1, 5.9,3.2,4.8,1.8, 6.1,2.8,4.0,1.3, - 6.3,2.5,4.9,1.5, 6.1,2.8,4.7,1.2, 6.4,2.9,4.3,1.3, 6.6,3.0,4.4,1.4, 6.8,2.8,4.8,1.4, - 6.7,3.0,5.0,1.7, 6.0,2.9,4.5,1.5, 5.7,2.6,3.5,1.0, 5.5,2.4,3.8,1.1, 5.5,2.4,3.7,1.0, - 5.8,2.7,3.9,1.2, 6.0,2.7,5.1,1.6, 5.4,3.0,4.5,1.5, 6.0,3.4,4.5,1.6, 6.7,3.1,4.7,1.5, - 6.3,2.3,4.4,1.3, 5.6,3.0,4.1,1.3, 5.5,2.5,4.0,1.3, 5.5,2.6,4.4,1.2, 6.1,3.0,4.6,1.4, - 5.8,2.6,4.0,1.2, 5.0,2.3,3.3,1.0, 5.6,2.7,4.2,1.3, 5.7,3.0,4.2,1.2, 5.7,2.9,4.2,1.3, - 6.2,2.9,4.3,1.3, 5.1,2.5,3.0,1.1, 5.7,2.8,4.1,1.3, 6.3,3.3,6.0,2.5, 5.8,2.7,5.1,1.9, - 7.1,3.0,5.9,2.1, 6.3,2.9,5.6,1.8, 6.5,3.0,5.8,2.2, 7.6,3.0,6.6,2.1, 4.9,2.5,4.5,1.7, - 7.3,2.9,6.3,1.8, 6.7,2.5,5.8,1.8, 7.2,3.6,6.1,2.5, 6.5,3.2,5.1,2.0, 6.4,2.7,5.3,1.9, - 6.8,3.0,5.5,2.1, 5.7,2.5,5.0,2.0, 5.8,2.8,5.1,2.4, 6.4,3.2,5.3,2.3, 6.5,3.0,5.5,1.8, - 7.7,3.8,6.7,2.2, 7.7,2.6,6.9,2.3, 6.0,2.2,5.0,1.5, 6.9,3.2,5.7,2.3, 5.6,2.8,4.9,2.0, - 7.7,2.8,6.7,2.0, 6.3,2.7,4.9,1.8, 6.7,3.3,5.7,2.1, 7.2,3.2,6.0,1.8, 6.2,2.8,4.8,1.8, - 6.1,3.0,4.9,1.8, 6.4,2.8,5.6,2.1, 7.2,3.0,5.8,1.6, 7.4,2.8,6.1,1.9, 7.9,3.8,6.4,2.0, - 6.4,2.8,5.6,2.2, 6.3,2.8,5.1,1.5, 6.1,2.6,5.6,1.4, 7.7,3.0,6.1,2.3, 6.3,3.4,5.6,2.4, - 6.4,3.1,5.5,1.8, 6.0,3.0,4.8,1.8, 6.9,3.1,5.4,2.1, 6.7,3.1,5.6,2.4, 6.9,3.1,5.1,2.3, - 5.8,2.7,5.1,1.9, 6.8,3.2,5.9,2.3, 6.7,3.3,5.7,2.5, 6.7,3.0,5.2,2.3, 6.3,2.5,5.0,1.9, - 6.5,3.0,5.2,2.0, 6.2,3.4,5.4,2.3, 5.9,3.0,5.1,1.8); + 5.0,3.6,1.4,0.2, 5.4,3.9,1.7,0.4, 4.6,3.4,1.4,0.3, 5.0,3.4,1.5,0.2, + 4.4,2.9,1.4,0.2, 4.9,3.1,1.5,0.1, 5.4,3.7,1.5,0.2, 4.8,3.4,1.6,0.2, + 4.8,3.0,1.4,0.1, 4.3,3.0,1.1,0.1, 5.8,4.0,1.2,0.2, 5.7,4.4,1.5,0.4, + 5.4,3.9,1.3,0.4, 5.1,3.5,1.4,0.3, 5.7,3.8,1.7,0.3, 5.1,3.8,1.5,0.3, + 5.4,3.4,1.7,0.2, 5.1,3.7,1.5,0.4, 4.6,3.6,1.0,0.2, 5.1,3.3,1.7,0.5, + 4.8,3.4,1.9,0.2, 5.0,3.0,1.6,0.2, 5.0,3.4,1.6,0.4, 5.2,3.5,1.5,0.2, + 5.2,3.4,1.4,0.2, 4.7,3.2,1.6,0.2, 4.8,3.1,1.6,0.2, 5.4,3.4,1.5,0.4, + 5.2,4.1,1.5,0.1, 5.5,4.2,1.4,0.2, 4.9,3.1,1.5,0.1, 5.0,3.2,1.2,0.2, + 5.5,3.5,1.3,0.2, 4.9,3.1,1.5,0.1, 4.4,3.0,1.3,0.2, 5.1,3.4,1.5,0.2, + 5.0,3.5,1.3,0.3, 4.5,2.3,1.3,0.3, 4.4,3.2,1.3,0.2, 5.0,3.5,1.6,0.6, + 5.1,3.8,1.9,0.4, 4.8,3.0,1.4,0.3, 5.1,3.8,1.6,0.2, 4.6,3.2,1.4,0.2, + 5.3,3.7,1.5,0.2, 5.0,3.3,1.4,0.2, 7.0,3.2,4.7,1.4, 6.4,3.2,4.5,1.5, + 6.9,3.1,4.9,1.5, 5.5,2.3,4.0,1.3, 6.5,2.8,4.6,1.5, 5.7,2.8,4.5,1.3, + 6.3,3.3,4.7,1.6, 4.9,2.4,3.3,1.0, 6.6,2.9,4.6,1.3, 5.2,2.7,3.9,1.4, + 5.0,2.0,3.5,1.0, 5.9,3.0,4.2,1.5, 6.0,2.2,4.0,1.0, 6.1,2.9,4.7,1.4, + 5.6,2.9,3.6,1.3, 6.7,3.1,4.4,1.4, 5.6,3.0,4.5,1.5, 5.8,2.7,4.1,1.0, + 6.2,2.2,4.5,1.5, 5.6,2.5,3.9,1.1, 5.9,3.2,4.8,1.8, 6.1,2.8,4.0,1.3, + 6.3,2.5,4.9,1.5, 6.1,2.8,4.7,1.2, 6.4,2.9,4.3,1.3, 6.6,3.0,4.4,1.4, + 6.8,2.8,4.8,1.4, 6.7,3.0,5.0,1.7, 6.0,2.9,4.5,1.5, 5.7,2.6,3.5,1.0, + 5.5,2.4,3.8,1.1, 5.5,2.4,3.7,1.0, 5.8,2.7,3.9,1.2, 6.0,2.7,5.1,1.6, + 5.4,3.0,4.5,1.5, 6.0,3.4,4.5,1.6, 6.7,3.1,4.7,1.5, 6.3,2.3,4.4,1.3, + 5.6,3.0,4.1,1.3, 5.5,2.5,4.0,1.3, 5.5,2.6,4.4,1.2, 6.1,3.0,4.6,1.4, + 5.8,2.6,4.0,1.2, 5.0,2.3,3.3,1.0, 5.6,2.7,4.2,1.3, 5.7,3.0,4.2,1.2, + 5.7,2.9,4.2,1.3, 6.2,2.9,4.3,1.3, 5.1,2.5,3.0,1.1, 5.7,2.8,4.1,1.3, + 6.3,3.3,6.0,2.5, 5.8,2.7,5.1,1.9, 7.1,3.0,5.9,2.1, 6.3,2.9,5.6,1.8, + 6.5,3.0,5.8,2.2, 7.6,3.0,6.6,2.1, 4.9,2.5,4.5,1.7, 7.3,2.9,6.3,1.8, + 6.7,2.5,5.8,1.8, 7.2,3.6,6.1,2.5, 6.5,3.2,5.1,2.0, 6.4,2.7,5.3,1.9, + 6.8,3.0,5.5,2.1, 5.7,2.5,5.0,2.0, 5.8,2.8,5.1,2.4, 6.4,3.2,5.3,2.3, + 6.5,3.0,5.5,1.8, 7.7,3.8,6.7,2.2, 7.7,2.6,6.9,2.3, 6.0,2.2,5.0,1.5, + 6.9,3.2,5.7,2.3, 5.6,2.8,4.9,2.0, 7.7,2.8,6.7,2.0, 6.3,2.7,4.9,1.8, + 6.7,3.3,5.7,2.1, 7.2,3.2,6.0,1.8, 6.2,2.8,4.8,1.8, 6.1,3.0,4.9,1.8, + 6.4,2.8,5.6,2.1, 7.2,3.0,5.8,1.6, 7.4,2.8,6.1,1.9, 7.9,3.8,6.4,2.0, + 6.4,2.8,5.6,2.2, 6.3,2.8,5.1,1.5, 6.1,2.6,5.6,1.4, 7.7,3.0,6.1,2.3, + 6.3,3.4,5.6,2.4, 6.4,3.1,5.5,1.8, 6.0,3.0,4.8,1.8, 6.9,3.1,5.4,2.1, + 6.7,3.1,5.6,2.4, 6.9,3.1,5.1,2.3, 5.8,2.7,5.1,1.9, 6.8,3.2,5.9,2.3, + 6.7,3.3,5.7,2.5, 6.7,3.0,5.2,2.3, 6.3,2.5,5.0,1.9, 6.5,3.0,5.2,2.0, + 6.2,3.4,5.4,2.3, 5.9,3.0,5.1,1.8); Mat labels = (Mat_(150, 1)<< 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, @@ -136,7 +144,6 @@ void CV_LRTest::run( int /*start_from*/ ) float error = 0.0f; LogisticRegressionParams params1 = LogisticRegressionParams(); - LogisticRegressionParams params2 = LogisticRegressionParams(); params1.alpha = 1.0; params1.num_iters = 10001; @@ -167,31 +174,6 @@ void CV_LRTest::run( int /*start_from*/ ) test_code = cvtest::TS::FAIL_BAD_ACCURACY; } - params2.alpha = 1.0; - params2.num_iters = 9000; - params2.norm = LogisticRegression::REG_L2; - params2.regularized = 1; - params2.train_method = LogisticRegression::MINI_BATCH; - params2.mini_batch_size = 10; - - // now train using mini batch gradient descent - LogisticRegression lr2(data, labels, params2); - lr2.predict(data, responses2); - responses2.convertTo(responses2, CV_32S); - - //calculate error - - if(!calculateError(responses2, labels, error)) - { - ts->printf(cvtest::TS::LOG, "Bad prediction labels\n" ); - test_code = cvtest::TS::FAIL_INVALID_OUTPUT; - } - else if(error > 0.06f) - { - ts->printf(cvtest::TS::LOG, "Bad accuracy of (%f)\n", error); - test_code = cvtest::TS::FAIL_BAD_ACCURACY; - } - ts->set_failed_test_info(test_code); } @@ -213,35 +195,43 @@ void CV_LRTest_SaveLoad::run( int /*start_from*/ ) // initialize varibles from the popular Iris Dataset Mat data = (Mat_(150, 4)<< 5.1,3.5,1.4,0.2, 4.9,3.0,1.4,0.2, 4.7,3.2,1.3,0.2, 4.6,3.1,1.5,0.2, - 5.0,3.6,1.4,0.2, 5.4,3.9,1.7,0.4, 4.6,3.4,1.4,0.3, 5.0,3.4,1.5,0.2, 4.4,2.9,1.4,0.2, 4.9,3.1,1.5,0.1, - 5.4,3.7,1.5,0.2, 4.8,3.4,1.6,0.2, 4.8,3.0,1.4,0.1, 4.3,3.0,1.1,0.1, 5.8,4.0,1.2,0.2, 5.7,4.4,1.5,0.4, - 5.4,3.9,1.3,0.4, 5.1,3.5,1.4,0.3, 5.7,3.8,1.7,0.3, 5.1,3.8,1.5,0.3, 5.4,3.4,1.7,0.2, 5.1,3.7,1.5,0.4, - 4.6,3.6,1.0,0.2, 5.1,3.3,1.7,0.5, 4.8,3.4,1.9,0.2, 5.0,3.0,1.6,0.2, 5.0,3.4,1.6,0.4, - 5.2,3.5,1.5,0.2, 5.2,3.4,1.4,0.2, 4.7,3.2,1.6,0.2, 4.8,3.1,1.6,0.2, 5.4,3.4,1.5,0.4, - 5.2,4.1,1.5,0.1, 5.5,4.2,1.4,0.2, 4.9,3.1,1.5,0.1, 5.0,3.2,1.2,0.2, 5.5,3.5,1.3,0.2, - 4.9,3.1,1.5,0.1, 4.4,3.0,1.3,0.2, 5.1,3.4,1.5,0.2, 5.0,3.5,1.3,0.3, 4.5,2.3,1.3,0.3, - 4.4,3.2,1.3,0.2, 5.0,3.5,1.6,0.6, 5.1,3.8,1.9,0.4, 4.8,3.0,1.4,0.3, 5.1,3.8,1.6,0.2, - 4.6,3.2,1.4,0.2, 5.3,3.7,1.5,0.2, 5.0,3.3,1.4,0.2, 7.0,3.2,4.7,1.4, 6.4,3.2,4.5,1.5, - 6.9,3.1,4.9,1.5, 5.5,2.3,4.0,1.3, 6.5,2.8,4.6,1.5, 5.7,2.8,4.5,1.3, 6.3,3.3,4.7,1.6, - 4.9,2.4,3.3,1.0, 6.6,2.9,4.6,1.3, 5.2,2.7,3.9,1.4, 5.0,2.0,3.5,1.0, 5.9,3.0,4.2,1.5, - 6.0,2.2,4.0,1.0, 6.1,2.9,4.7,1.4, 5.6,2.9,3.6,1.3, 6.7,3.1,4.4,1.4, 5.6,3.0,4.5,1.5, - 5.8,2.7,4.1,1.0, 6.2,2.2,4.5,1.5, 5.6,2.5,3.9,1.1, 5.9,3.2,4.8,1.8, 6.1,2.8,4.0,1.3, - 6.3,2.5,4.9,1.5, 6.1,2.8,4.7,1.2, 6.4,2.9,4.3,1.3, 6.6,3.0,4.4,1.4, 6.8,2.8,4.8,1.4, - 6.7,3.0,5.0,1.7, 6.0,2.9,4.5,1.5, 5.7,2.6,3.5,1.0, 5.5,2.4,3.8,1.1, 5.5,2.4,3.7,1.0, - 5.8,2.7,3.9,1.2, 6.0,2.7,5.1,1.6, 5.4,3.0,4.5,1.5, 6.0,3.4,4.5,1.6, 6.7,3.1,4.7,1.5, - 6.3,2.3,4.4,1.3, 5.6,3.0,4.1,1.3, 5.5,2.5,4.0,1.3, 5.5,2.6,4.4,1.2, 6.1,3.0,4.6,1.4, - 5.8,2.6,4.0,1.2, 5.0,2.3,3.3,1.0, 5.6,2.7,4.2,1.3, 5.7,3.0,4.2,1.2, 5.7,2.9,4.2,1.3, - 6.2,2.9,4.3,1.3, 5.1,2.5,3.0,1.1, 5.7,2.8,4.1,1.3, 6.3,3.3,6.0,2.5, 5.8,2.7,5.1,1.9, - 7.1,3.0,5.9,2.1, 6.3,2.9,5.6,1.8, 6.5,3.0,5.8,2.2, 7.6,3.0,6.6,2.1, 4.9,2.5,4.5,1.7, - 7.3,2.9,6.3,1.8, 6.7,2.5,5.8,1.8, 7.2,3.6,6.1,2.5, 6.5,3.2,5.1,2.0, 6.4,2.7,5.3,1.9, - 6.8,3.0,5.5,2.1, 5.7,2.5,5.0,2.0, 5.8,2.8,5.1,2.4, 6.4,3.2,5.3,2.3, 6.5,3.0,5.5,1.8, - 7.7,3.8,6.7,2.2, 7.7,2.6,6.9,2.3, 6.0,2.2,5.0,1.5, 6.9,3.2,5.7,2.3, 5.6,2.8,4.9,2.0, - 7.7,2.8,6.7,2.0, 6.3,2.7,4.9,1.8, 6.7,3.3,5.7,2.1, 7.2,3.2,6.0,1.8, 6.2,2.8,4.8,1.8, - 6.1,3.0,4.9,1.8, 6.4,2.8,5.6,2.1, 7.2,3.0,5.8,1.6, 7.4,2.8,6.1,1.9, 7.9,3.8,6.4,2.0, - 6.4,2.8,5.6,2.2, 6.3,2.8,5.1,1.5, 6.1,2.6,5.6,1.4, 7.7,3.0,6.1,2.3, 6.3,3.4,5.6,2.4, - 6.4,3.1,5.5,1.8, 6.0,3.0,4.8,1.8, 6.9,3.1,5.4,2.1, 6.7,3.1,5.6,2.4, 6.9,3.1,5.1,2.3, - 5.8,2.7,5.1,1.9, 6.8,3.2,5.9,2.3, 6.7,3.3,5.7,2.5, 6.7,3.0,5.2,2.3, 6.3,2.5,5.0,1.9, - 6.5,3.0,5.2,2.0, 6.2,3.4,5.4,2.3, 5.9,3.0,5.1,1.8); + 5.0,3.6,1.4,0.2, 5.4,3.9,1.7,0.4, 4.6,3.4,1.4,0.3, 5.0,3.4,1.5,0.2, + 4.4,2.9,1.4,0.2, 4.9,3.1,1.5,0.1, 5.4,3.7,1.5,0.2, 4.8,3.4,1.6,0.2, + 4.8,3.0,1.4,0.1, 4.3,3.0,1.1,0.1, 5.8,4.0,1.2,0.2, 5.7,4.4,1.5,0.4, + 5.4,3.9,1.3,0.4, 5.1,3.5,1.4,0.3, 5.7,3.8,1.7,0.3, 5.1,3.8,1.5,0.3, + 5.4,3.4,1.7,0.2, 5.1,3.7,1.5,0.4, 4.6,3.6,1.0,0.2, 5.1,3.3,1.7,0.5, + 4.8,3.4,1.9,0.2, 5.0,3.0,1.6,0.2, 5.0,3.4,1.6,0.4, 5.2,3.5,1.5,0.2, + 5.2,3.4,1.4,0.2, 4.7,3.2,1.6,0.2, 4.8,3.1,1.6,0.2, 5.4,3.4,1.5,0.4, + 5.2,4.1,1.5,0.1, 5.5,4.2,1.4,0.2, 4.9,3.1,1.5,0.1, 5.0,3.2,1.2,0.2, + 5.5,3.5,1.3,0.2, 4.9,3.1,1.5,0.1, 4.4,3.0,1.3,0.2, 5.1,3.4,1.5,0.2, + 5.0,3.5,1.3,0.3, 4.5,2.3,1.3,0.3, 4.4,3.2,1.3,0.2, 5.0,3.5,1.6,0.6, + 5.1,3.8,1.9,0.4, 4.8,3.0,1.4,0.3, 5.1,3.8,1.6,0.2, 4.6,3.2,1.4,0.2, + 5.3,3.7,1.5,0.2, 5.0,3.3,1.4,0.2, 7.0,3.2,4.7,1.4, 6.4,3.2,4.5,1.5, + 6.9,3.1,4.9,1.5, 5.5,2.3,4.0,1.3, 6.5,2.8,4.6,1.5, 5.7,2.8,4.5,1.3, + 6.3,3.3,4.7,1.6, 4.9,2.4,3.3,1.0, 6.6,2.9,4.6,1.3, 5.2,2.7,3.9,1.4, + 5.0,2.0,3.5,1.0, 5.9,3.0,4.2,1.5, 6.0,2.2,4.0,1.0, 6.1,2.9,4.7,1.4, + 5.6,2.9,3.6,1.3, 6.7,3.1,4.4,1.4, 5.6,3.0,4.5,1.5, 5.8,2.7,4.1,1.0, + 6.2,2.2,4.5,1.5, 5.6,2.5,3.9,1.1, 5.9,3.2,4.8,1.8, 6.1,2.8,4.0,1.3, + 6.3,2.5,4.9,1.5, 6.1,2.8,4.7,1.2, 6.4,2.9,4.3,1.3, 6.6,3.0,4.4,1.4, + 6.8,2.8,4.8,1.4, 6.7,3.0,5.0,1.7, 6.0,2.9,4.5,1.5, 5.7,2.6,3.5,1.0, + 5.5,2.4,3.8,1.1, 5.5,2.4,3.7,1.0, 5.8,2.7,3.9,1.2, 6.0,2.7,5.1,1.6, + 5.4,3.0,4.5,1.5, 6.0,3.4,4.5,1.6, 6.7,3.1,4.7,1.5, 6.3,2.3,4.4,1.3, + 5.6,3.0,4.1,1.3, 5.5,2.5,4.0,1.3, 5.5,2.6,4.4,1.2, 6.1,3.0,4.6,1.4, + 5.8,2.6,4.0,1.2, 5.0,2.3,3.3,1.0, 5.6,2.7,4.2,1.3, 5.7,3.0,4.2,1.2, + 5.7,2.9,4.2,1.3, 6.2,2.9,4.3,1.3, 5.1,2.5,3.0,1.1, 5.7,2.8,4.1,1.3, + 6.3,3.3,6.0,2.5, 5.8,2.7,5.1,1.9, 7.1,3.0,5.9,2.1, 6.3,2.9,5.6,1.8, + 6.5,3.0,5.8,2.2, 7.6,3.0,6.6,2.1, 4.9,2.5,4.5,1.7, 7.3,2.9,6.3,1.8, + 6.7,2.5,5.8,1.8, 7.2,3.6,6.1,2.5, 6.5,3.2,5.1,2.0, 6.4,2.7,5.3,1.9, + 6.8,3.0,5.5,2.1, 5.7,2.5,5.0,2.0, 5.8,2.8,5.1,2.4, 6.4,3.2,5.3,2.3, + 6.5,3.0,5.5,1.8, 7.7,3.8,6.7,2.2, 7.7,2.6,6.9,2.3, 6.0,2.2,5.0,1.5, + 6.9,3.2,5.7,2.3, 5.6,2.8,4.9,2.0, 7.7,2.8,6.7,2.0, 6.3,2.7,4.9,1.8, + 6.7,3.3,5.7,2.1, 7.2,3.2,6.0,1.8, 6.2,2.8,4.8,1.8, 6.1,3.0,4.9,1.8, + 6.4,2.8,5.6,2.1, 7.2,3.0,5.8,1.6, 7.4,2.8,6.1,1.9, 7.9,3.8,6.4,2.0, + 6.4,2.8,5.6,2.2, 6.3,2.8,5.1,1.5, 6.1,2.6,5.6,1.4, 7.7,3.0,6.1,2.3, + 6.3,3.4,5.6,2.4, 6.4,3.1,5.5,1.8, 6.0,3.0,4.8,1.8, 6.9,3.1,5.4,2.1, + 6.7,3.1,5.6,2.4, 6.9,3.1,5.1,2.3, 5.8,2.7,5.1,1.9, 6.8,3.2,5.9,2.3, + 6.7,3.3,5.7,2.5, 6.7,3.0,5.2,2.3, 6.3,2.5,5.0,1.9, 6.5,3.0,5.2,2.0, + 6.2,3.4,5.4,2.3, 5.9,3.0,5.1,1.8); Mat labels = (Mat_(150, 1)<< 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, @@ -260,6 +250,7 @@ void CV_LRTest_SaveLoad::run( int /*start_from*/ ) float errorCount = 0.0; LogisticRegressionParams params1 = LogisticRegressionParams(); + LogisticRegressionParams params2 = LogisticRegressionParams(); params1.alpha = 1.0; params1.num_iters = 10001; @@ -273,7 +264,7 @@ void CV_LRTest_SaveLoad::run( int /*start_from*/ ) // run LR classifier train classifier LogisticRegression lr1(data, labels, params1); - LogisticRegression lr2; + LogisticRegression lr2(params2); learnt_mat1 = lr1.get_learnt_thetas(); lr1.predict(data, responses1); @@ -282,7 +273,11 @@ void CV_LRTest_SaveLoad::run( int /*start_from*/ ) string filename = cv::tempfile(".xml"); try { - lr1.save(filename.c_str()); + //lr1.save(filename.c_str()); + FileStorage fs; + fs.open(filename.c_str(),FileStorage::WRITE); + lr1.write(fs); + fs.release(); } catch(...) @@ -293,7 +288,12 @@ void CV_LRTest_SaveLoad::run( int /*start_from*/ ) try { - lr2.load(filename.c_str()); + //lr2.load(filename.c_str()); + FileStorage fs; + fs.open(filename.c_str(),FileStorage::READ); + FileNode fn = fs.root(); + lr2.read(fn); + fs.release(); } catch(...) From 8995921cc5597962b4e8a0b2ee5433ee4594aff1 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Tue, 5 Nov 2013 06:05:29 -0500 Subject: [PATCH 28/43] fixed white space in logistic regression classifier --- modules/ml/src/lr.cpp | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/ml/src/lr.cpp b/modules/ml/src/lr.cpp index 6ac863876e..cc129f70df 100644 --- a/modules/ml/src/lr.cpp +++ b/modules/ml/src/lr.cpp @@ -177,7 +177,6 @@ bool LogisticRegression::train(cv::InputArray data_ip, cv::InputArray labels_ip) { new_local_labels = (labels_l == it->second)/255; new_local_labels.convertTo(labels, CV_32F); - // new_theta = compute_batch_gradient(data_t, labels, init_theta); // currently supported training methods LogisticRegression::BATCH and LogisticRegression::MINI_BATCH if(this->params.train_method == LogisticRegression::BATCH) From aa35835cfc6455f62bdf125eebfec64dc400bd5e Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Tue, 5 Nov 2013 06:07:40 -0500 Subject: [PATCH 29/43] fixed white space in logistic regression prototype --- modules/ml/include/opencv2/ml.hpp | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/ml/include/opencv2/ml.hpp b/modules/ml/include/opencv2/ml.hpp index 7b03343947..32b2eaa7d8 100644 --- a/modules/ml/include/opencv2/ml.hpp +++ b/modules/ml/include/opencv2/ml.hpp @@ -585,7 +585,6 @@ struct CV_EXPORTS LogisticRegressionParams LogisticRegressionParams(); LogisticRegressionParams(double learning_rate, int iters, int train_method, int normlization, int reg, int mini_batch_size); - }; class CV_EXPORTS LogisticRegression From 7c97dbc196d3561414e531237047e3a85ce10ded Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Tue, 5 Nov 2013 18:11:41 -0500 Subject: [PATCH 30/43] fixed indentation in logistic regression documentation --- modules/ml/doc/logistic_regression.rst | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/modules/ml/doc/logistic_regression.rst b/modules/ml/doc/logistic_regression.rst index f527917ec2..3fd71d377c 100644 --- a/modules/ml/doc/logistic_regression.rst +++ b/modules/ml/doc/logistic_regression.rst @@ -116,6 +116,7 @@ The constructors. The full constructor initializes corresponding members. The default constructor creates an object with dummy parameters. :: + LogisticRegressionParams::LogisticRegressionParams() { term_crit = cv::TermCriteria(TermCriteria::COUNT + TermCriteria::EPS, 1000, 0.001); @@ -174,11 +175,12 @@ Predicts responses for input samples and returns a float type. :param predicted_labels: Predicted labels as a column matrix and of type ``CV_32S``. + LogisticRegression::get_learnt_thetas ------------------------------------- This function returns the trained paramters arranged across rows. For a two class classifcation problem, it returns a row matrix. -.. ocv:function:: cv::Mat LogisticRegression::get_learnt_thetas() +.. ocv:function:: cv::Mat LogisticRegression::get_learnt_thetas() const; It returns learnt paramters of the Logistic Regression as a matrix of type ``CV_32F``. @@ -189,7 +191,7 @@ This function reads the trained LogisticRegression clasifier from disk. .. ocv:function:: void LogisticRegression::read(const FileNode& fn) LogisticRegression::write -------------------------- +------------------------ This function writes the trained LogisticRegression clasifier to disk. -.. ocv:function:: void LogisticRegression::write(FileStorage& fs) const +.. ocv:function:: void LogisticRegression::write(FileStorage& fs) const; From e4ef0e0ca8dfa51b5221a0d66c6931e8f71e22f0 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Tue, 5 Nov 2013 18:13:05 -0500 Subject: [PATCH 31/43] fixed indentation in logistic regression documentation --- modules/ml/doc/logistic_regression.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/ml/doc/logistic_regression.rst b/modules/ml/doc/logistic_regression.rst index 3fd71d377c..c3236c7dec 100644 --- a/modules/ml/doc/logistic_regression.rst +++ b/modules/ml/doc/logistic_regression.rst @@ -140,7 +140,7 @@ LogisticRegression::LogisticRegression -------------------------------------- The constructors. -.. ocv:function:: LogisticRegression( const LogisticRegressionParams& params) +.. ocv:function:: LogisticRegression::LogisticRegression( const LogisticRegressionParams& params) :param params: The training parameters for the classifier of type ``LogisticRegressionParams``. From 3622de26213acf6c6c2d82c1ccbc67134735f36b Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Thu, 7 Nov 2013 05:15:56 -0500 Subject: [PATCH 32/43] updated mini_batch_size description for LogisticRegressionParams in documentation --- modules/ml/doc/logistic_regression.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/modules/ml/doc/logistic_regression.rst b/modules/ml/doc/logistic_regression.rst index c3236c7dec..33dc5d8b3a 100644 --- a/modules/ml/doc/logistic_regression.rst +++ b/modules/ml/doc/logistic_regression.rst @@ -110,8 +110,7 @@ The constructors. :param: reg: To enable or disable regularization. Set to positive integer (greater than zero) to enable and to 0 to disable. - :param: mini_batch_size: Specifies the number of training samples taken in each step of Mini-Batch Gradient Descent. Will only be used if using ``LogisticRegression::MINI_BATCH`` training algorithm. - + :param: mini_batch_size: Specifies the number of training samples taken in each step of Mini-Batch Gradient Descent. Will only be used if using ``LogisticRegression::MINI_BATCH`` training algorithm. It has to take values less than the total number of training samples. The full constructor initializes corresponding members. The default constructor creates an object with dummy parameters. From e7f14f3d0e443227c324b7abe211b6fd46995a6b Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Fri, 8 Nov 2013 19:56:42 -0500 Subject: [PATCH 33/43] fixed missing semicolon in logistic regression documentation --- modules/ml/doc/logistic_regression.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/ml/doc/logistic_regression.rst b/modules/ml/doc/logistic_regression.rst index 33dc5d8b3a..100036284a 100644 --- a/modules/ml/doc/logistic_regression.rst +++ b/modules/ml/doc/logistic_regression.rst @@ -179,7 +179,7 @@ LogisticRegression::get_learnt_thetas ------------------------------------- This function returns the trained paramters arranged across rows. For a two class classifcation problem, it returns a row matrix. -.. ocv:function:: cv::Mat LogisticRegression::get_learnt_thetas() const; +.. ocv:function:: cv::Mat LogisticRegression::get_learnt_thetas() const It returns learnt paramters of the Logistic Regression as a matrix of type ``CV_32F``. @@ -190,7 +190,7 @@ This function reads the trained LogisticRegression clasifier from disk. .. ocv:function:: void LogisticRegression::read(const FileNode& fn) LogisticRegression::write ------------------------- +------------------------- This function writes the trained LogisticRegression clasifier to disk. -.. ocv:function:: void LogisticRegression::write(FileStorage& fs) const; +.. ocv:function:: void LogisticRegression::write(FileStorage& fs) const From 65eb52a247e5cdabf0623e404fe1c7f17ee54746 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Fri, 8 Nov 2013 20:03:57 -0500 Subject: [PATCH 34/43] removed default value from LogisticRegression constructor function definition --- modules/ml/src/lr.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/ml/src/lr.cpp b/modules/ml/src/lr.cpp index cc129f70df..09acf9de67 100644 --- a/modules/ml/src/lr.cpp +++ b/modules/ml/src/lr.cpp @@ -80,7 +80,7 @@ LogisticRegressionParams::LogisticRegressionParams( double learning_rate, int it mini_batch_size = mb_size; } -LogisticRegression::LogisticRegression(const LogisticRegressionParams& pms = LogisticRegressionParams() ) +LogisticRegression::LogisticRegression(const LogisticRegressionParams& pms) { default_model_name = "my_lr"; this->params = pms; From 21de04b4a2c8cb359984ce63b1cf396cd281ea28 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Fri, 8 Nov 2013 20:23:34 -0500 Subject: [PATCH 35/43] fixed default constructor for LogisticRegression class declaration --- modules/ml/include/opencv2/ml.hpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/ml/include/opencv2/ml.hpp b/modules/ml/include/opencv2/ml.hpp index 32b2eaa7d8..7f442b9f3b 100644 --- a/modules/ml/include/opencv2/ml.hpp +++ b/modules/ml/include/opencv2/ml.hpp @@ -590,7 +590,7 @@ struct CV_EXPORTS LogisticRegressionParams class CV_EXPORTS LogisticRegression { public: - LogisticRegression( const LogisticRegressionParams& params); + LogisticRegression( const LogisticRegressionParams& params = LogisticRegressionParams()); LogisticRegression(cv::InputArray data_ip, cv::InputArray labels_ip, const LogisticRegressionParams& params); virtual ~LogisticRegression(); From f20db35b78113e3d05c76a2d55561a5085e1dae8 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Fri, 8 Nov 2013 21:32:53 -0500 Subject: [PATCH 36/43] fixed logistic regression documentation warnings --- modules/ml/doc/logistic_regression.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/ml/doc/logistic_regression.rst b/modules/ml/doc/logistic_regression.rst index 100036284a..62336b18bc 100644 --- a/modules/ml/doc/logistic_regression.rst +++ b/modules/ml/doc/logistic_regression.rst @@ -139,7 +139,7 @@ LogisticRegression::LogisticRegression -------------------------------------- The constructors. -.. ocv:function:: LogisticRegression::LogisticRegression( const LogisticRegressionParams& params) +.. ocv:function:: LogisticRegression::LogisticRegression( const LogisticRegressionParams& params = LogisticRegressionParams()) :param params: The training parameters for the classifier of type ``LogisticRegressionParams``. @@ -179,7 +179,7 @@ LogisticRegression::get_learnt_thetas ------------------------------------- This function returns the trained paramters arranged across rows. For a two class classifcation problem, it returns a row matrix. -.. ocv:function:: cv::Mat LogisticRegression::get_learnt_thetas() const +.. ocv:function:: const cv::Mat LogisticRegression::get_learnt_thetas() const It returns learnt paramters of the Logistic Regression as a matrix of type ``CV_32F``. From ae02ecec6003af181abdee97e76dfdaf9d902248 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Mon, 11 Nov 2013 15:29:19 -0500 Subject: [PATCH 37/43] removed a couple of unnecessary comments in Logistic Regression training method --- modules/ml/src/lr.cpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/modules/ml/src/lr.cpp b/modules/ml/src/lr.cpp index 09acf9de67..2411ea3d9b 100644 --- a/modules/ml/src/lr.cpp +++ b/modules/ml/src/lr.cpp @@ -159,7 +159,6 @@ bool LogisticRegression::train(cv::InputArray data_ip, cv::InputArray labels_ip) if(num_classes == 2) { labels_l.convertTo(labels, CV_32F); - // new_theta = compute_batch_gradient(data_t, labels, init_theta); //currently supported training methods LogisticRegression::BATCH and LogisticRegression::MINI_BATCH if(this->params.train_method == LogisticRegression::BATCH) new_theta = compute_batch_gradient(data_t, labels, init_theta); @@ -177,7 +176,6 @@ bool LogisticRegression::train(cv::InputArray data_ip, cv::InputArray labels_ip) { new_local_labels = (labels_l == it->second)/255; new_local_labels.convertTo(labels, CV_32F); - // new_theta = compute_batch_gradient(data_t, labels, init_theta); // currently supported training methods LogisticRegression::BATCH and LogisticRegression::MINI_BATCH if(this->params.train_method == LogisticRegression::BATCH) new_theta = compute_batch_gradient(data_t, labels, init_theta); From d20b2a5a97e6531b048280d498f62d6d6df5cb48 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Mon, 11 Nov 2013 16:43:14 -0500 Subject: [PATCH 38/43] removed extra comments in train method --- modules/ml/src/lr.cpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/modules/ml/src/lr.cpp b/modules/ml/src/lr.cpp index 2411ea3d9b..2089bb816a 100644 --- a/modules/ml/src/lr.cpp +++ b/modules/ml/src/lr.cpp @@ -159,7 +159,6 @@ bool LogisticRegression::train(cv::InputArray data_ip, cv::InputArray labels_ip) if(num_classes == 2) { labels_l.convertTo(labels, CV_32F); - //currently supported training methods LogisticRegression::BATCH and LogisticRegression::MINI_BATCH if(this->params.train_method == LogisticRegression::BATCH) new_theta = compute_batch_gradient(data_t, labels, init_theta); else @@ -176,7 +175,6 @@ bool LogisticRegression::train(cv::InputArray data_ip, cv::InputArray labels_ip) { new_local_labels = (labels_l == it->second)/255; new_local_labels.convertTo(labels, CV_32F); - // currently supported training methods LogisticRegression::BATCH and LogisticRegression::MINI_BATCH if(this->params.train_method == LogisticRegression::BATCH) new_theta = compute_batch_gradient(data_t, labels, init_theta); else From a23836231fb71390f38df6505b261b45655ed918 Mon Sep 17 00:00:00 2001 From: Rahul Kavi Date: Wed, 12 Feb 2014 07:24:40 -0500 Subject: [PATCH 39/43] fixed warnings in type conversions fixed warnings in type conversions from size_t to int (in getting size of number of unique classes in a training problem). --- modules/ml/src/lr.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/ml/src/lr.cpp b/modules/ml/src/lr.cpp index 2089bb816a..aa93866ccc 100644 --- a/modules/ml/src/lr.cpp +++ b/modules/ml/src/lr.cpp @@ -125,7 +125,7 @@ bool LogisticRegression::train(cv::InputArray data_ip, cv::InputArray labels_ip) cv::Mat labels; set_label_map(_labels_i); - int num_classes = this->forward_mapper.size(); + int num_classes = (int) this->forward_mapper.size(); // add a column of ones cv::Mat data_t = cv::Mat::zeros(_data_i.rows, _data_i.cols+1, CV_32F); From 71770eb790a1f0c066526bd8f84df4c5491797c0 Mon Sep 17 00:00:00 2001 From: Maksim Shabunin Date: Thu, 14 Aug 2014 17:19:21 +0400 Subject: [PATCH 40/43] Fixed ML module build after merge --- modules/ml/include/opencv2/ml.hpp | 3 --- modules/ml/src/lr.cpp | 1 + modules/ml/test/test_lr.cpp | 2 +- samples/cpp/logistic_regression.cpp | 2 +- 4 files changed, 3 insertions(+), 5 deletions(-) diff --git a/modules/ml/include/opencv2/ml.hpp b/modules/ml/include/opencv2/ml.hpp index 7f442b9f3b..f6e3bf2de5 100644 --- a/modules/ml/include/opencv2/ml.hpp +++ b/modules/ml/include/opencv2/ml.hpp @@ -571,8 +571,6 @@ public: /****************************************************************************************\ * Logistic Regression * \****************************************************************************************/ -namespace cv -{ struct CV_EXPORTS LogisticRegressionParams { double alpha; @@ -624,7 +622,6 @@ protected: virtual bool set_label_map(const cv::Mat& labels); static cv::Mat remap_labels(const cv::Mat& labels, const std::map& lmap); }; -}// namespace cv /****************************************************************************************\ * Auxilary functions declarations * diff --git a/modules/ml/src/lr.cpp b/modules/ml/src/lr.cpp index aa93866ccc..e09a505931 100644 --- a/modules/ml/src/lr.cpp +++ b/modules/ml/src/lr.cpp @@ -57,6 +57,7 @@ using namespace cv; +using namespace cv::ml; using namespace std; LogisticRegressionParams::LogisticRegressionParams() diff --git a/modules/ml/test/test_lr.cpp b/modules/ml/test/test_lr.cpp index 3aa4cda002..90ee7b808d 100644 --- a/modules/ml/test/test_lr.cpp +++ b/modules/ml/test/test_lr.cpp @@ -60,7 +60,7 @@ using namespace std; using namespace cv; - +using namespace cv::ml; static bool calculateError( const Mat& _p_labels, const Mat& _o_labels, float& error) { diff --git a/samples/cpp/logistic_regression.cpp b/samples/cpp/logistic_regression.cpp index 1ef261d4a6..2ef41c0e01 100644 --- a/samples/cpp/logistic_regression.cpp +++ b/samples/cpp/logistic_regression.cpp @@ -65,7 +65,7 @@ using namespace std; using namespace cv; - +using namespace cv::ml; int main() { From 3e26086f820990c2f561bb4856ca90343abeeb40 Mon Sep 17 00:00:00 2001 From: Maksim Shabunin Date: Thu, 14 Aug 2014 19:01:45 +0400 Subject: [PATCH 41/43] Reworked ML logistic regression implementation, initial version --- modules/ml/include/opencv2/ml.hpp | 67 ++++------ modules/ml/src/lr.cpp | 119 ++++++++++------- modules/ml/test/test_lr.cpp | 193 ++++++---------------------- samples/cpp/logistic_regression.cpp | 157 +++++++++++----------- 4 files changed, 219 insertions(+), 317 deletions(-) diff --git a/modules/ml/include/opencv2/ml.hpp b/modules/ml/include/opencv2/ml.hpp index f6e3bf2de5..145eedba60 100644 --- a/modules/ml/include/opencv2/ml.hpp +++ b/modules/ml/include/opencv2/ml.hpp @@ -571,56 +571,43 @@ public: /****************************************************************************************\ * Logistic Regression * \****************************************************************************************/ -struct CV_EXPORTS LogisticRegressionParams -{ - double alpha; - int num_iters; - int norm; - int regularized; - int train_method; - int mini_batch_size; - cv::TermCriteria term_crit; - - LogisticRegressionParams(); - LogisticRegressionParams(double learning_rate, int iters, int train_method, int normlization, int reg, int mini_batch_size); -}; -class CV_EXPORTS LogisticRegression +class CV_EXPORTS LogisticRegression : public StatModel { public: - LogisticRegression( const LogisticRegressionParams& params = LogisticRegressionParams()); - LogisticRegression(cv::InputArray data_ip, cv::InputArray labels_ip, const LogisticRegressionParams& params); - virtual ~LogisticRegression(); + class CV_EXPORTS Params + { + public: + Params(double learning_rate = 0.001, + int iters = 1000, + int method = LogisticRegression::BATCH, + int normlization = LogisticRegression::REG_L2, + int reg = 1, + int batch_size = 1); + double alpha; + int num_iters; + int norm; + int regularized; + int train_method; + int mini_batch_size; + cv::TermCriteria term_crit; + }; enum { REG_L1 = 0, REG_L2 = 1}; enum { BATCH = 0, MINI_BATCH = 1}; - virtual bool train(cv::InputArray data_ip, cv::InputArray label_ip); - virtual void predict( cv::InputArray data, cv::OutputArray predicted_labels ) const; - - virtual void write(FileStorage& fs) const; - virtual void read(const FileNode& fn); + // Algorithm interface + virtual void write( FileStorage &fs ) const = 0; + virtual void read( const FileNode &fn ) = 0; - const cv::Mat get_learnt_thetas() const; - virtual void clear(); - -protected: - - LogisticRegressionParams params; - cv::Mat learnt_thetas; - std::string default_model_name; - std::map forward_mapper; - std::map reverse_mapper; + // StatModel interface + virtual bool train( const Ptr& trainData, int flags=0 ) = 0; + virtual float predict( InputArray samples, OutputArray results=noArray(), int flags=0 ) const = 0; + virtual void clear() = 0; - cv::Mat labels_o; - cv::Mat labels_n; + virtual Mat get_learnt_thetas() const = 0; - static cv::Mat calc_sigmoid(const cv::Mat& data); - virtual double compute_cost(const cv::Mat& data, const cv::Mat& labels, const cv::Mat& init_theta); - virtual cv::Mat compute_batch_gradient(const cv::Mat& data, const cv::Mat& labels, const cv::Mat& init_theta); - virtual cv::Mat compute_mini_batch_gradient(const cv::Mat& data, const cv::Mat& labels, const cv::Mat& init_theta); - virtual bool set_label_map(const cv::Mat& labels); - static cv::Mat remap_labels(const cv::Mat& labels, const std::map& lmap); + static Ptr create( const Params& params = Params() ); }; /****************************************************************************************\ diff --git a/modules/ml/src/lr.cpp b/modules/ml/src/lr.cpp index e09a505931..2a08e04b6c 100644 --- a/modules/ml/src/lr.cpp +++ b/modules/ml/src/lr.cpp @@ -55,55 +55,72 @@ #include "precomp.hpp" - -using namespace cv; -using namespace cv::ml; using namespace std; -LogisticRegressionParams::LogisticRegressionParams() -{ - term_crit = cv::TermCriteria(TermCriteria::COUNT + TermCriteria::EPS, 1000, 0.001); - alpha = 0.001; - num_iters = 1000; - norm = LogisticRegression::REG_L2; - regularized = 1; - train_method = LogisticRegression::BATCH; - mini_batch_size = 1; -} -LogisticRegressionParams::LogisticRegressionParams( double learning_rate, int iters, int train_algo = LogisticRegression::BATCH, int normlization = LogisticRegression::REG_L2, int reg = 1, int mb_size = 5) +namespace cv { +namespace ml { + +LogisticRegression::Params::Params(double learning_rate, + int iters, + int method, + int normlization, + int reg, + int batch_size) { - term_crit = cv::TermCriteria(TermCriteria::COUNT + TermCriteria::EPS, iters, learning_rate); alpha = learning_rate; num_iters = iters; norm = normlization; regularized = reg; - train_method = train_algo; - mini_batch_size = mb_size; + train_method = method; + mini_batch_size = batch_size; + term_crit = cv::TermCriteria(TermCriteria::COUNT + TermCriteria::EPS, num_iters, alpha); } -LogisticRegression::LogisticRegression(const LogisticRegressionParams& pms) +class LogisticRegressionImpl : public LogisticRegression { - default_model_name = "my_lr"; - this->params = pms; -} - -LogisticRegression::LogisticRegression(cv::InputArray data, cv::InputArray labels, const LogisticRegressionParams& pms) -{ - default_model_name = "my_lr"; - this->params = pms; - train(data, labels); -} - -LogisticRegression::~LogisticRegression() +public: + LogisticRegressionImpl(const Params& pms) + : params(pms) + { + } + virtual ~LogisticRegressionImpl() {} + + virtual bool train( const Ptr& trainData, int=0 ); + virtual float predict(InputArray samples, OutputArray results, int) const; + virtual void clear(); + virtual void write(FileStorage& fs) const; + virtual void read(const FileNode& fn); + virtual cv::Mat get_learnt_thetas() const; + virtual int getVarCount() const { return learnt_thetas.cols; } + virtual bool isTrained() const { return !learnt_thetas.empty(); } + virtual bool isClassifier() const { return true; } + virtual String getDefaultModelName() const { return "opencv_ml_lr"; } +protected: + cv::Mat calc_sigmoid(const cv::Mat& data) const; + double compute_cost(const cv::Mat& _data, const cv::Mat& _labels, const cv::Mat& _init_theta); + cv::Mat compute_batch_gradient(const cv::Mat& _data, const cv::Mat& _labels, const cv::Mat& _init_theta); + cv::Mat compute_mini_batch_gradient(const cv::Mat& _data, const cv::Mat& _labels, const cv::Mat& _init_theta); + bool set_label_map(const cv::Mat& _labels_i); + cv::Mat remap_labels(const cv::Mat& _labels_i, const map& lmap) const; +protected: + Params params; + cv::Mat learnt_thetas; + map forward_mapper; + map reverse_mapper; + cv::Mat labels_o; + cv::Mat labels_n; +}; + +Ptr LogisticRegression::create(const Params& params) { - clear(); + return makePtr(params); } -bool LogisticRegression::train(cv::InputArray data_ip, cv::InputArray labels_ip) +bool LogisticRegressionImpl::train(const Ptr& trainData, int) { clear(); - cv::Mat _data_i = data_ip.getMat(); - cv::Mat _labels_i = labels_ip.getMat(); + cv::Mat _data_i = trainData->getSamples(); + cv::Mat _labels_i = trainData->getResponses(); CV_Assert( !_labels_i.empty() && !_data_i.empty()); @@ -194,13 +211,12 @@ bool LogisticRegression::train(cv::InputArray data_ip, cv::InputArray labels_ip) return ok; } - -void LogisticRegression::predict( cv::InputArray _ip_data, cv::OutputArray _output_predicted_labels ) const +float LogisticRegressionImpl::predict(InputArray samples, OutputArray results, int) const { /* returns a class of the predicted class class names can be 1,2,3,4, .... etc */ cv::Mat thetas, data, pred_labs; - data = _ip_data.getMat(); + data = samples.getMat(); // check if learnt_mats array is populated if(this->learnt_thetas.total()<=0) @@ -266,19 +282,20 @@ void LogisticRegression::predict( cv::InputArray _ip_data, cv::OutputArray _outp pred_labs = remap_labels(labels_c, this->reverse_mapper); // convert pred_labs to integer type pred_labs.convertTo(pred_labs, CV_32S); - pred_labs.copyTo(_output_predicted_labels); + pred_labs.copyTo(results); + // TODO: determine + return 0; } -cv::Mat LogisticRegression::calc_sigmoid(const Mat& data) +cv::Mat LogisticRegressionImpl::calc_sigmoid(const cv::Mat& data) const { cv::Mat dest; cv::exp(-data, dest); return 1.0/(1.0+dest); } -double LogisticRegression::compute_cost(const cv::Mat& _data, const cv::Mat& _labels, const cv::Mat& _init_theta) +double LogisticRegressionImpl::compute_cost(const cv::Mat& _data, const cv::Mat& _labels, const cv::Mat& _init_theta) { - int llambda = 0; int m; int n; @@ -328,7 +345,7 @@ double LogisticRegression::compute_cost(const cv::Mat& _data, const cv::Mat& _la return cost; } -cv::Mat LogisticRegression::compute_batch_gradient(const cv::Mat& _data, const cv::Mat& _labels, const cv::Mat& _init_theta) +cv::Mat LogisticRegressionImpl::compute_batch_gradient(const cv::Mat& _data, const cv::Mat& _labels, const cv::Mat& _init_theta) { // implements batch gradient descent if(this->params.alpha<=0) @@ -397,7 +414,7 @@ cv::Mat LogisticRegression::compute_batch_gradient(const cv::Mat& _data, const c return theta_p; } -cv::Mat LogisticRegression::compute_mini_batch_gradient(const cv::Mat& _data, const cv::Mat& _labels, const cv::Mat& _init_theta) +cv::Mat LogisticRegressionImpl::compute_mini_batch_gradient(const cv::Mat& _data, const cv::Mat& _labels, const cv::Mat& _init_theta) { // implements batch gradient descent int lambda_l = 0; @@ -488,7 +505,7 @@ cv::Mat LogisticRegression::compute_mini_batch_gradient(const cv::Mat& _data, co return theta_p; } -bool LogisticRegression::set_label_map(const cv::Mat& _labels_i) +bool LogisticRegressionImpl::set_label_map(const cv::Mat &_labels_i) { // this function creates two maps to map user defined labels to program friendly labels two ways. int ii = 0; @@ -522,7 +539,7 @@ bool LogisticRegression::set_label_map(const cv::Mat& _labels_i) return ok; } -cv::Mat LogisticRegression::remap_labels(const Mat& _labels_i, const std::map& lmap) +cv::Mat LogisticRegressionImpl::remap_labels(const cv::Mat& _labels_i, const map& lmap) const { cv::Mat labels; _labels_i.convertTo(labels, CV_32S); @@ -538,14 +555,14 @@ cv::Mat LogisticRegression::remap_labels(const Mat& _labels_i, const std::maplearnt_thetas.release(); this->labels_o.release(); this->labels_n.release(); } -void LogisticRegression::write(FileStorage& fs) const +void LogisticRegressionImpl::write(FileStorage& fs) const { // check if open if(fs.isOpened() == 0) @@ -568,7 +585,7 @@ void LogisticRegression::write(FileStorage& fs) const fs<<"o_labels"<labels_o; } -void LogisticRegression::read(const FileNode& fn ) +void LogisticRegressionImpl::read(const FileNode& fn) { // check if empty if(fn.empty()) @@ -598,8 +615,12 @@ void LogisticRegression::read(const FileNode& fn ) } } -const cv::Mat LogisticRegression::get_learnt_thetas() const +cv::Mat LogisticRegressionImpl::get_learnt_thetas() const { return this->learnt_thetas; } + +} +} + /* End of file. */ diff --git a/modules/ml/test/test_lr.cpp b/modules/ml/test/test_lr.cpp index 90ee7b808d..a5f1306c3d 100644 --- a/modules/ml/test/test_lr.cpp +++ b/modules/ml/test/test_lr.cpp @@ -92,78 +92,29 @@ protected: void CV_LRTest::run( int /*start_from*/ ) { // initialize varibles from the popular Iris Dataset - Mat data = (Mat_(150, 4)<< - 5.1,3.5,1.4,0.2, 4.9,3.0,1.4,0.2, 4.7,3.2,1.3,0.2, 4.6,3.1,1.5,0.2, - 5.0,3.6,1.4,0.2, 5.4,3.9,1.7,0.4, 4.6,3.4,1.4,0.3, 5.0,3.4,1.5,0.2, - 4.4,2.9,1.4,0.2, 4.9,3.1,1.5,0.1, 5.4,3.7,1.5,0.2, 4.8,3.4,1.6,0.2, - 4.8,3.0,1.4,0.1, 4.3,3.0,1.1,0.1, 5.8,4.0,1.2,0.2, 5.7,4.4,1.5,0.4, - 5.4,3.9,1.3,0.4, 5.1,3.5,1.4,0.3, 5.7,3.8,1.7,0.3, 5.1,3.8,1.5,0.3, - 5.4,3.4,1.7,0.2, 5.1,3.7,1.5,0.4, 4.6,3.6,1.0,0.2, 5.1,3.3,1.7,0.5, - 4.8,3.4,1.9,0.2, 5.0,3.0,1.6,0.2, 5.0,3.4,1.6,0.4, 5.2,3.5,1.5,0.2, - 5.2,3.4,1.4,0.2, 4.7,3.2,1.6,0.2, 4.8,3.1,1.6,0.2, 5.4,3.4,1.5,0.4, - 5.2,4.1,1.5,0.1, 5.5,4.2,1.4,0.2, 4.9,3.1,1.5,0.1, 5.0,3.2,1.2,0.2, - 5.5,3.5,1.3,0.2, 4.9,3.1,1.5,0.1, 4.4,3.0,1.3,0.2, 5.1,3.4,1.5,0.2, - 5.0,3.5,1.3,0.3, 4.5,2.3,1.3,0.3, 4.4,3.2,1.3,0.2, 5.0,3.5,1.6,0.6, - 5.1,3.8,1.9,0.4, 4.8,3.0,1.4,0.3, 5.1,3.8,1.6,0.2, 4.6,3.2,1.4,0.2, - 5.3,3.7,1.5,0.2, 5.0,3.3,1.4,0.2, 7.0,3.2,4.7,1.4, 6.4,3.2,4.5,1.5, - 6.9,3.1,4.9,1.5, 5.5,2.3,4.0,1.3, 6.5,2.8,4.6,1.5, 5.7,2.8,4.5,1.3, - 6.3,3.3,4.7,1.6, 4.9,2.4,3.3,1.0, 6.6,2.9,4.6,1.3, 5.2,2.7,3.9,1.4, - 5.0,2.0,3.5,1.0, 5.9,3.0,4.2,1.5, 6.0,2.2,4.0,1.0, 6.1,2.9,4.7,1.4, - 5.6,2.9,3.6,1.3, 6.7,3.1,4.4,1.4, 5.6,3.0,4.5,1.5, 5.8,2.7,4.1,1.0, - 6.2,2.2,4.5,1.5, 5.6,2.5,3.9,1.1, 5.9,3.2,4.8,1.8, 6.1,2.8,4.0,1.3, - 6.3,2.5,4.9,1.5, 6.1,2.8,4.7,1.2, 6.4,2.9,4.3,1.3, 6.6,3.0,4.4,1.4, - 6.8,2.8,4.8,1.4, 6.7,3.0,5.0,1.7, 6.0,2.9,4.5,1.5, 5.7,2.6,3.5,1.0, - 5.5,2.4,3.8,1.1, 5.5,2.4,3.7,1.0, 5.8,2.7,3.9,1.2, 6.0,2.7,5.1,1.6, - 5.4,3.0,4.5,1.5, 6.0,3.4,4.5,1.6, 6.7,3.1,4.7,1.5, 6.3,2.3,4.4,1.3, - 5.6,3.0,4.1,1.3, 5.5,2.5,4.0,1.3, 5.5,2.6,4.4,1.2, 6.1,3.0,4.6,1.4, - 5.8,2.6,4.0,1.2, 5.0,2.3,3.3,1.0, 5.6,2.7,4.2,1.3, 5.7,3.0,4.2,1.2, - 5.7,2.9,4.2,1.3, 6.2,2.9,4.3,1.3, 5.1,2.5,3.0,1.1, 5.7,2.8,4.1,1.3, - 6.3,3.3,6.0,2.5, 5.8,2.7,5.1,1.9, 7.1,3.0,5.9,2.1, 6.3,2.9,5.6,1.8, - 6.5,3.0,5.8,2.2, 7.6,3.0,6.6,2.1, 4.9,2.5,4.5,1.7, 7.3,2.9,6.3,1.8, - 6.7,2.5,5.8,1.8, 7.2,3.6,6.1,2.5, 6.5,3.2,5.1,2.0, 6.4,2.7,5.3,1.9, - 6.8,3.0,5.5,2.1, 5.7,2.5,5.0,2.0, 5.8,2.8,5.1,2.4, 6.4,3.2,5.3,2.3, - 6.5,3.0,5.5,1.8, 7.7,3.8,6.7,2.2, 7.7,2.6,6.9,2.3, 6.0,2.2,5.0,1.5, - 6.9,3.2,5.7,2.3, 5.6,2.8,4.9,2.0, 7.7,2.8,6.7,2.0, 6.3,2.7,4.9,1.8, - 6.7,3.3,5.7,2.1, 7.2,3.2,6.0,1.8, 6.2,2.8,4.8,1.8, 6.1,3.0,4.9,1.8, - 6.4,2.8,5.6,2.1, 7.2,3.0,5.8,1.6, 7.4,2.8,6.1,1.9, 7.9,3.8,6.4,2.0, - 6.4,2.8,5.6,2.2, 6.3,2.8,5.1,1.5, 6.1,2.6,5.6,1.4, 7.7,3.0,6.1,2.3, - 6.3,3.4,5.6,2.4, 6.4,3.1,5.5,1.8, 6.0,3.0,4.8,1.8, 6.9,3.1,5.4,2.1, - 6.7,3.1,5.6,2.4, 6.9,3.1,5.1,2.3, 5.8,2.7,5.1,1.9, 6.8,3.2,5.9,2.3, - 6.7,3.3,5.7,2.5, 6.7,3.0,5.2,2.3, 6.3,2.5,5.0,1.9, 6.5,3.0,5.2,2.0, - 6.2,3.4,5.4,2.3, 5.9,3.0,5.1,1.8); - - Mat labels = (Mat_(150, 1)<< 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3); + string dataFileName = ts->get_data_path() + "iris.data"; + Ptr tdata = TrainData::loadFromCSV(dataFileName, 0); - Mat responses1, responses2; - float error = 0.0f; - - LogisticRegressionParams params1 = LogisticRegressionParams(); - - params1.alpha = 1.0; - params1.num_iters = 10001; - params1.norm = LogisticRegression::REG_L2; - params1.regularized = 1; - params1.train_method = LogisticRegression::BATCH; - params1.mini_batch_size = 10; + LogisticRegression::Params params = LogisticRegression::Params(); + params.alpha = 1.0; + params.num_iters = 10001; + params.norm = LogisticRegression::REG_L2; + params.regularized = 1; + params.train_method = LogisticRegression::BATCH; + params.mini_batch_size = 10; // run LR classifier train classifier - data.convertTo(data, CV_32FC1); - labels.convertTo(labels, CV_32FC1); - LogisticRegression lr1(data, labels, params1); + Ptr p = LogisticRegression::create(params); + p->train(tdata); // predict using the same data - lr1.predict(data, responses1); - - int test_code = cvtest::TS::OK; + Mat responses; + p->predict(tdata->getSamples(), responses); // calculate error - if(!calculateError(responses1, labels, error)) + int test_code = cvtest::TS::OK; + float error = 0.0f; + if(!calculateError(responses, tdata->getResponses(), error)) { ts->printf(cvtest::TS::LOG, "Bad prediction labels\n" ); test_code = cvtest::TS::FAIL_INVALID_OUTPUT; @@ -174,6 +125,14 @@ void CV_LRTest::run( int /*start_from*/ ) test_code = cvtest::TS::FAIL_BAD_ACCURACY; } + { + FileStorage s("debug.xml", FileStorage::WRITE); + s << "original" << tdata->getResponses(); + s << "predicted1" << responses; + s << "learnt" << p->get_learnt_thetas(); + s << "error" << error; + s.release(); + } ts->set_failed_test_info(test_code); } @@ -189,69 +148,16 @@ protected: void CV_LRTest_SaveLoad::run( int /*start_from*/ ) { - int code = cvtest::TS::OK; // initialize varibles from the popular Iris Dataset - Mat data = (Mat_(150, 4)<< - 5.1,3.5,1.4,0.2, 4.9,3.0,1.4,0.2, 4.7,3.2,1.3,0.2, 4.6,3.1,1.5,0.2, - 5.0,3.6,1.4,0.2, 5.4,3.9,1.7,0.4, 4.6,3.4,1.4,0.3, 5.0,3.4,1.5,0.2, - 4.4,2.9,1.4,0.2, 4.9,3.1,1.5,0.1, 5.4,3.7,1.5,0.2, 4.8,3.4,1.6,0.2, - 4.8,3.0,1.4,0.1, 4.3,3.0,1.1,0.1, 5.8,4.0,1.2,0.2, 5.7,4.4,1.5,0.4, - 5.4,3.9,1.3,0.4, 5.1,3.5,1.4,0.3, 5.7,3.8,1.7,0.3, 5.1,3.8,1.5,0.3, - 5.4,3.4,1.7,0.2, 5.1,3.7,1.5,0.4, 4.6,3.6,1.0,0.2, 5.1,3.3,1.7,0.5, - 4.8,3.4,1.9,0.2, 5.0,3.0,1.6,0.2, 5.0,3.4,1.6,0.4, 5.2,3.5,1.5,0.2, - 5.2,3.4,1.4,0.2, 4.7,3.2,1.6,0.2, 4.8,3.1,1.6,0.2, 5.4,3.4,1.5,0.4, - 5.2,4.1,1.5,0.1, 5.5,4.2,1.4,0.2, 4.9,3.1,1.5,0.1, 5.0,3.2,1.2,0.2, - 5.5,3.5,1.3,0.2, 4.9,3.1,1.5,0.1, 4.4,3.0,1.3,0.2, 5.1,3.4,1.5,0.2, - 5.0,3.5,1.3,0.3, 4.5,2.3,1.3,0.3, 4.4,3.2,1.3,0.2, 5.0,3.5,1.6,0.6, - 5.1,3.8,1.9,0.4, 4.8,3.0,1.4,0.3, 5.1,3.8,1.6,0.2, 4.6,3.2,1.4,0.2, - 5.3,3.7,1.5,0.2, 5.0,3.3,1.4,0.2, 7.0,3.2,4.7,1.4, 6.4,3.2,4.5,1.5, - 6.9,3.1,4.9,1.5, 5.5,2.3,4.0,1.3, 6.5,2.8,4.6,1.5, 5.7,2.8,4.5,1.3, - 6.3,3.3,4.7,1.6, 4.9,2.4,3.3,1.0, 6.6,2.9,4.6,1.3, 5.2,2.7,3.9,1.4, - 5.0,2.0,3.5,1.0, 5.9,3.0,4.2,1.5, 6.0,2.2,4.0,1.0, 6.1,2.9,4.7,1.4, - 5.6,2.9,3.6,1.3, 6.7,3.1,4.4,1.4, 5.6,3.0,4.5,1.5, 5.8,2.7,4.1,1.0, - 6.2,2.2,4.5,1.5, 5.6,2.5,3.9,1.1, 5.9,3.2,4.8,1.8, 6.1,2.8,4.0,1.3, - 6.3,2.5,4.9,1.5, 6.1,2.8,4.7,1.2, 6.4,2.9,4.3,1.3, 6.6,3.0,4.4,1.4, - 6.8,2.8,4.8,1.4, 6.7,3.0,5.0,1.7, 6.0,2.9,4.5,1.5, 5.7,2.6,3.5,1.0, - 5.5,2.4,3.8,1.1, 5.5,2.4,3.7,1.0, 5.8,2.7,3.9,1.2, 6.0,2.7,5.1,1.6, - 5.4,3.0,4.5,1.5, 6.0,3.4,4.5,1.6, 6.7,3.1,4.7,1.5, 6.3,2.3,4.4,1.3, - 5.6,3.0,4.1,1.3, 5.5,2.5,4.0,1.3, 5.5,2.6,4.4,1.2, 6.1,3.0,4.6,1.4, - 5.8,2.6,4.0,1.2, 5.0,2.3,3.3,1.0, 5.6,2.7,4.2,1.3, 5.7,3.0,4.2,1.2, - 5.7,2.9,4.2,1.3, 6.2,2.9,4.3,1.3, 5.1,2.5,3.0,1.1, 5.7,2.8,4.1,1.3, - 6.3,3.3,6.0,2.5, 5.8,2.7,5.1,1.9, 7.1,3.0,5.9,2.1, 6.3,2.9,5.6,1.8, - 6.5,3.0,5.8,2.2, 7.6,3.0,6.6,2.1, 4.9,2.5,4.5,1.7, 7.3,2.9,6.3,1.8, - 6.7,2.5,5.8,1.8, 7.2,3.6,6.1,2.5, 6.5,3.2,5.1,2.0, 6.4,2.7,5.3,1.9, - 6.8,3.0,5.5,2.1, 5.7,2.5,5.0,2.0, 5.8,2.8,5.1,2.4, 6.4,3.2,5.3,2.3, - 6.5,3.0,5.5,1.8, 7.7,3.8,6.7,2.2, 7.7,2.6,6.9,2.3, 6.0,2.2,5.0,1.5, - 6.9,3.2,5.7,2.3, 5.6,2.8,4.9,2.0, 7.7,2.8,6.7,2.0, 6.3,2.7,4.9,1.8, - 6.7,3.3,5.7,2.1, 7.2,3.2,6.0,1.8, 6.2,2.8,4.8,1.8, 6.1,3.0,4.9,1.8, - 6.4,2.8,5.6,2.1, 7.2,3.0,5.8,1.6, 7.4,2.8,6.1,1.9, 7.9,3.8,6.4,2.0, - 6.4,2.8,5.6,2.2, 6.3,2.8,5.1,1.5, 6.1,2.6,5.6,1.4, 7.7,3.0,6.1,2.3, - 6.3,3.4,5.6,2.4, 6.4,3.1,5.5,1.8, 6.0,3.0,4.8,1.8, 6.9,3.1,5.4,2.1, - 6.7,3.1,5.6,2.4, 6.9,3.1,5.1,2.3, 5.8,2.7,5.1,1.9, 6.8,3.2,5.9,2.3, - 6.7,3.3,5.7,2.5, 6.7,3.0,5.2,2.3, 6.3,2.5,5.0,1.9, 6.5,3.0,5.2,2.0, - 6.2,3.4,5.4,2.3, 5.9,3.0,5.1,1.8); - - Mat labels = (Mat_(150, 1)<< 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3); - - // LogisticRegressionParams params = LogisticRegressionParams(); + string dataFileName = ts->get_data_path() + "iris.data"; + Ptr tdata = TrainData::loadFromCSV(dataFileName, 0); Mat responses1, responses2; Mat learnt_mat1, learnt_mat2; - Mat pred_result1, comp_learnt_mats; - - float errorCount = 0.0; - - LogisticRegressionParams params1 = LogisticRegressionParams(); - LogisticRegressionParams params2 = LogisticRegressionParams(); + LogisticRegression::Params params1 = LogisticRegression::Params(); params1.alpha = 1.0; params1.num_iters = 10001; params1.norm = LogisticRegression::REG_L2; @@ -259,56 +165,40 @@ void CV_LRTest_SaveLoad::run( int /*start_from*/ ) params1.train_method = LogisticRegression::BATCH; params1.mini_batch_size = 10; - data.convertTo(data, CV_32FC1); - labels.convertTo(labels, CV_32FC1); - - // run LR classifier train classifier - LogisticRegression lr1(data, labels, params1); - LogisticRegression lr2(params2); - learnt_mat1 = lr1.get_learnt_thetas(); - - lr1.predict(data, responses1); - // now save the classifier - - string filename = cv::tempfile(".xml"); + // train and save the classifier + String filename = cv::tempfile(".xml"); try { - //lr1.save(filename.c_str()); - FileStorage fs; - fs.open(filename.c_str(),FileStorage::WRITE); - lr1.write(fs); - fs.release(); + // run LR classifier train classifier + Ptr lr1 = LogisticRegression::create(params1); + lr1->train(tdata); + lr1->predict(tdata->getSamples(), responses1); + learnt_mat1 = lr1->get_learnt_thetas(); + lr1->save(filename); } - catch(...) { ts->printf(cvtest::TS::LOG, "Crash in write method.\n" ); ts->set_failed_test_info(cvtest::TS::FAIL_EXCEPTION); } + // and load to another try { - //lr2.load(filename.c_str()); - FileStorage fs; - fs.open(filename.c_str(),FileStorage::READ); - FileNode fn = fs.root(); - lr2.read(fn); - fs.release(); + Ptr lr2 = StatModel::load(filename); + lr2->predict(tdata->getSamples(), responses2); + learnt_mat2 = lr2->get_learnt_thetas(); } - catch(...) { - ts->printf(cvtest::TS::LOG, "Crash in read method.\n"); + ts->printf(cvtest::TS::LOG, "Crash in write method.\n" ); ts->set_failed_test_info(cvtest::TS::FAIL_EXCEPTION); } - lr2.predict(data, responses2); - - learnt_mat2 = lr2.get_learnt_thetas(); - CV_Assert(responses1.rows == responses2.rows); // compare difference in learnt matrices before and after loading from disk + Mat comp_learnt_mats; comp_learnt_mats = (learnt_mat1 == learnt_mat2); comp_learnt_mats = comp_learnt_mats.reshape(1, comp_learnt_mats.rows*comp_learnt_mats.cols); comp_learnt_mats.convertTo(comp_learnt_mats, CV_32S); @@ -317,6 +207,7 @@ void CV_LRTest_SaveLoad::run( int /*start_from*/ ) // compare difference in prediction outputs and stored inputs // check if there is any difference between computed learnt mat and retreived mat + float errorCount = 0.0; errorCount += 1 - (float)cv::countNonZero(responses1 == responses2)/responses1.rows; errorCount += 1 - (float)cv::sum(comp_learnt_mats)[0]/comp_learnt_mats.rows; diff --git a/samples/cpp/logistic_regression.cpp b/samples/cpp/logistic_regression.cpp index 2ef41c0e01..e97f602dfe 100644 --- a/samples/cpp/logistic_regression.cpp +++ b/samples/cpp/logistic_regression.cpp @@ -1,4 +1,4 @@ -/////////////////////////////////////////////////////////////////////////////////////// +/*////////////////////////////////////////////////////////////////////////////////////// // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. // By downloading, copying, installing or using the software you agree to this license. @@ -11,7 +11,8 @@ // Rahul Kavi rahulkavi[at]live[at]com // -// contains a subset of data from the popular Iris Dataset (taken from "http://archive.ics.uci.edu/ml/datasets/Iris") +// contains a subset of data from the popular Iris Dataset (taken from +// "http://archive.ics.uci.edu/ml/datasets/Iris") // # You are free to use, change, or redistribute the code in any way you wish for // # non-commercial purposes, but please maintain the name of the original author. @@ -24,7 +25,6 @@ // # Logistic Regression ALGORITHM - // License Agreement // For Open Source Computer Vision Library @@ -54,7 +54,7 @@ // loss of use, data, or profits; or business interruption) however caused // and on any theory of liability, whether in contract, strict liability, // or tort (including negligence or otherwise) arising in any way out of -// the use of this software, even if advised of the possibility of such damage. +// the use of this software, even if advised of the possibility of such damage.*/ #include @@ -62,42 +62,45 @@ #include #include - using namespace std; using namespace cv; using namespace cv::ml; int main() { - Mat data_temp, labels_temp; + const String filename = "data01.xml"; + cout << "**********************************************************************" << endl; + cout << filename + << " contains digits 0 and 1 of 20 samples each, collected on an Android device" << endl; + cout << "Each of the collected images are of size 28 x 28 re-arranged to 1 x 784 matrix" + << endl; + cout << "**********************************************************************" << endl; + Mat data, labels; + { + cout << "loading the dataset" << endl; + FileStorage f; + if(f.open(filename, FileStorage::READ)) + { + f["datamat"] >> data; + f["labelsmat"] >> labels; + f.release(); + } + else + { + cerr << "File can not be opened: " << filename << endl; + return 1; + } + data.convertTo(data, CV_32F); + labels.convertTo(labels, CV_32F); + cout << "read " << data.rows << " rows of data" << endl; + } Mat data_train, data_test; Mat labels_train, labels_test; - - Mat responses, result; - FileStorage fs1, fs2; - - FileStorage f; - - cout<<"*****************************************************************************************"<> data_temp; - f["labelsmat"] >> labels_temp; - - data_temp.convertTo(data, CV_32F); - labels_temp.convertTo(labels, CV_32F); - - for(int i =0;i lr1 = LogisticRegression::create(params); + lr1->train(data_train, ROW_SAMPLE, labels_train); + cout << "done!" << endl; + + cout << "predicting..."; + lr1->predict(data_test, responses); + cout << "done!" << endl; + + // show prediction report + cout << "original vs predicted:" << endl; labels_test.convertTo(labels_test, CV_32S); - - cout<<"Original Label :: Predicted Label"<(i,0)<<" :: "<< responses.at(i,0)<save(saveFilename); // load the classifier onto new object - LogisticRegressionParams params2 = LogisticRegressionParams(); - LogisticRegression lr2(params2); - cout<<"loading a new classifier"< lr2 = StatModel::load(saveFilename); // predict using loaded classifier - cout<<"predicting the dataset using the loaded classfier\n"<predict(data_test, responses2); // calculate accuracy - cout<<"accuracy using loaded classifier: "<<100 * (float)cv::countNonZero(labels_test == responses2)/responses2.rows<<"%"< Date: Mon, 18 Aug 2014 13:11:02 +0400 Subject: [PATCH 42/43] Updated logistic regression example - Extracted common operations to separate functions. - Activated first parameters set. - Some output formatting. - Fixed loop break condition in mini_batch_gradient function. --- modules/ml/src/lr.cpp | 2 +- samples/cpp/logistic_regression.cpp | 68 +++++++++++++++++------------ 2 files changed, 40 insertions(+), 30 deletions(-) diff --git a/modules/ml/src/lr.cpp b/modules/ml/src/lr.cpp index 2a08e04b6c..9ca83311d9 100644 --- a/modules/ml/src/lr.cpp +++ b/modules/ml/src/lr.cpp @@ -446,7 +446,7 @@ cv::Mat LogisticRegressionImpl::compute_mini_batch_gradient(const cv::Mat& _data lambda_l = 1; } - for(int i = 0;this->params.term_crit.maxCount;i++) + for(int i = 0;iparams.term_crit.maxCount;i++) { if(j+size_b<=_data.rows) { diff --git a/samples/cpp/logistic_regression.cpp b/samples/cpp/logistic_regression.cpp index e97f602dfe..74a4c214ad 100644 --- a/samples/cpp/logistic_regression.cpp +++ b/samples/cpp/logistic_regression.cpp @@ -66,6 +66,21 @@ using namespace std; using namespace cv; using namespace cv::ml; +static void showImage(const Mat &data, int columns, const String &name) +{ + Mat bigImage; + for(int i = 0; i < data.rows; ++i) + { + bigImage.push_back(data.row(i).reshape(0, columns)); + } + imshow(name, bigImage.t()); +} + +static float calculateAccuracyPercent(const Mat &original, const Mat &predicted) +{ + return 100 * (float)cv::countNonZero(original == predicted) / predicted.rows; +} + int main() { const String filename = "data01.xml"; @@ -78,7 +93,7 @@ int main() Mat data, labels; { - cout << "loading the dataset" << endl; + cout << "loading the dataset..."; FileStorage f; if(f.open(filename, FileStorage::READ)) { @@ -88,7 +103,7 @@ int main() } else { - cerr << "File can not be opened: " << filename << endl; + cerr << "file can not be opened: " << filename << endl; return 1; } data.convertTo(data, CV_32F); @@ -114,27 +129,20 @@ int main() cout << "training/testing samples count: " << data_train.rows << "/" << data_test.rows << endl; // display sample image -// Mat bigImage; -// for(int i = 0; i < data_train.rows; ++i) -// { -// bigImage.push_back(data_train.row(i).reshape(0, 28)); -// } -// imshow("digits", bigImage.t()); - - Mat responses, result; - -// LogisticRegression::Params params = LogisticRegression::Params( -// 0.001, 10, LogisticRegression::BATCH, LogisticRegression::REG_L2, 1, 1); - // params1 (above) with batch gradient performs better than mini batch - // gradient below with same parameters + showImage(data_train, 28, "train data"); + showImage(data_test, 28, "test data"); + + + // simple case with batch gradient LogisticRegression::Params params = LogisticRegression::Params( - 0.001, 10, LogisticRegression::MINI_BATCH, LogisticRegression::REG_L2, 1, 1); + 0.001, 10, LogisticRegression::BATCH, LogisticRegression::REG_L2, 1, 1); + // simple case with mini-batch gradient + // LogisticRegression::Params params = LogisticRegression::Params( + // 0.001, 10, LogisticRegression::MINI_BATCH, LogisticRegression::REG_L2, 1, 1); - // however mini batch gradient descent parameters with slower learning - // rate(below) can be used to get higher accuracy than with parameters - // mentioned above -// LogisticRegression::Params params = LogisticRegression::Params( -// 0.000001, 10, LogisticRegression::MINI_BATCH, LogisticRegression::REG_L2, 1, 1); + // mini-batch gradient with higher accuracy + // LogisticRegression::Params params = LogisticRegression::Params( + // 0.000001, 10, LogisticRegression::MINI_BATCH, LogisticRegression::REG_L2, 1, 1); cout << "training..."; Ptr lr1 = LogisticRegression::create(params); @@ -142,6 +150,7 @@ int main() cout << "done!" << endl; cout << "predicting..."; + Mat responses; lr1->predict(data_test, responses); cout << "done!" << endl; @@ -150,26 +159,27 @@ int main() labels_test.convertTo(labels_test, CV_32S); cout << labels_test.t() << endl; cout << responses.t() << endl; - result = (labels_test == responses) / 255; - cout << "accuracy: " << ((double)cv::sum(result)[0] / result.rows) * 100 << "%\n"; + cout << "accuracy: " << calculateAccuracyPercent(labels_test, responses) << "%" << endl; // save the classfier - cout << "saving the classifier" << endl; const String saveFilename = "NewLR_Trained.xml"; + cout << "saving the classifier to " << saveFilename << endl; lr1->save(saveFilename); // load the classifier onto new object - cout << "loading a new classifier" << endl; + cout << "loading a new classifier from " << saveFilename << endl; Ptr lr2 = StatModel::load(saveFilename); // predict using loaded classifier - cout << "predicting the dataset using the loaded classfier" << endl; + cout << "predicting the dataset using the loaded classfier..."; Mat responses2; lr2->predict(data_test, responses2); + cout << "done!" << endl; + // calculate accuracy - cout << "accuracy using loaded classifier: " - << 100 * (float)cv::countNonZero(labels_test == responses2) / responses2.rows << "%" - << endl; + cout << labels_test.t() << endl; + cout << responses2.t() << endl; + cout << "accuracy: " << calculateAccuracyPercent(labels_test, responses2) << "%" << endl; waitKey(0); return 0; From 108caae2164c37ed94bf0518d955f15bcd36dd00 Mon Sep 17 00:00:00 2001 From: Maksim Shabunin Date: Mon, 18 Aug 2014 18:15:10 +0400 Subject: [PATCH 43/43] Modified logistic regression module according to comments - Reworked documentation to reflect actual code - Removed some unused variables - Removed unnecessary 'cv::' modifiers --- modules/ml/doc/logistic_regression.rst | 100 +++++++--------- modules/ml/doc/ml.rst | 2 +- modules/ml/include/opencv2/ml.hpp | 5 +- modules/ml/src/lr.cpp | 154 ++++++++++++------------- modules/ml/test/test_lr.cpp | 8 +- samples/cpp/logistic_regression.cpp | 8 +- 6 files changed, 126 insertions(+), 151 deletions(-) diff --git a/modules/ml/doc/logistic_regression.rst b/modules/ml/doc/logistic_regression.rst index 62336b18bc..74e0321ea6 100644 --- a/modules/ml/doc/logistic_regression.rst +++ b/modules/ml/doc/logistic_regression.rst @@ -28,20 +28,22 @@ or class 0 if . In Logistic Regression, choosing the right parameters is of utmost importance for reducing the training error and ensuring high training accuracy. -``LogisticRegressionParams`` is the structure that defines parameters that are required to train a Logistic Regression classifier. -The learning rate is determined by ``LogisticRegressionParams.alpha``. It determines how faster we approach the solution. +``LogisticRegression::Params`` is the structure that defines parameters that are required to train a Logistic Regression classifier. +The learning rate is determined by ``LogisticRegression::Params.alpha``. It determines how faster we approach the solution. It is a positive real number. Optimization algorithms like Batch Gradient Descent and Mini-Batch Gradient Descent are supported in ``LogisticRegression``. It is important that we mention the number of iterations these optimization algorithms have to run. -The number of iterations are mentioned by ``LogisticRegressionParams.num_iters``. +The number of iterations are mentioned by ``LogisticRegression::Params.num_iters``. The number of iterations can be thought as number of steps taken and learning rate specifies if it is a long step or a short step. These two parameters define how fast we arrive at a possible solution. -In order to compensate for overfitting regularization is performed, which can be enabled by setting ``LogisticRegressionParams.regularized`` to a positive integer (greater than zero). -One can specify what kind of regularization has to be performed by setting ``LogisticRegressionParams.norm`` to ``LogisticRegression::REG_L1`` or ``LogisticRegression::REG_L2`` values. -``LogisticRegression`` provides a choice of 2 training methods with Batch Gradient Descent or the Mini-Batch Gradient Descent. To specify this, set ``LogisticRegressionParams.train_method`` to either ``LogisticRegression::BATCH`` or ``LogisticRegression::MINI_BATCH``. -If ``LogisticRegressionParams`` is set to ``LogisticRegression::MINI_BATCH``, the size of the mini batch has to be to a postive integer using ``LogisticRegressionParams.mini_batch_size``. +In order to compensate for overfitting regularization is performed, which can be enabled by setting ``LogisticRegression::Params.regularized`` to a positive integer (greater than zero). +One can specify what kind of regularization has to be performed by setting ``LogisticRegression::Params.norm`` to ``LogisticRegression::REG_L1`` or ``LogisticRegression::REG_L2`` values. +``LogisticRegression`` provides a choice of 2 training methods with Batch Gradient Descent or the Mini-Batch Gradient Descent. To specify this, set ``LogisticRegression::Params.train_method`` to either ``LogisticRegression::BATCH`` or ``LogisticRegression::MINI_BATCH``. +If ``LogisticRegression::Params`` is set to ``LogisticRegression::MINI_BATCH``, the size of the mini batch has to be to a postive integer using ``LogisticRegression::Params.mini_batch_size``. A sample set of training parameters for the Logistic Regression classifier can be initialized as follows: + :: - LogisticRegressionParams params; + + LogisticRegression::Params params; params.alpha = 0.5; params.num_iters = 10000; params.norm = LogisticRegression::REG_L2; @@ -49,16 +51,19 @@ A sample set of training parameters for the Logistic Regression classifier can b params.train_method = LogisticRegression::MINI_BATCH; params.mini_batch_size = 10; +**References:** + .. [LogRegWiki] http://en.wikipedia.org/wiki/Logistic_regression. Wikipedia article about the Logistic Regression algorithm. .. [RenMalik2003] Learning a Classification Model for Segmentation. Proc. CVPR, Nice, France (2003). .. [LogRegTomMitch] http://www.cs.cmu.edu/~tom/NewChapters.html. "Generative and Discriminative Classifiers: Naive Bayes and Logistic Regression" in Machine Learning, Tom Mitchell. + .. [BatchDesWiki] http://en.wikipedia.org/wiki/Gradient_descent_optimization. Wikipedia article about Gradient Descent based optimization. -LogisticRegressionParams ------------------------- -.. ocv:struct:: LogisticRegressionParams +LogisticRegression::Params +-------------------------- +.. ocv:struct:: LogisticRegression::Params Parameters of the Logistic Regression training algorithm. You can initialize the structure using a constructor or declaring the variable and initializing the the individual parameters. @@ -92,94 +97,71 @@ LogisticRegressionParams Sets termination criteria for training algorithm. -LogisticRegressionParams::LogisticRegressionParams --------------------------------------------------- -The constructors. +LogisticRegression::Params::Params +---------------------------------- +The constructors -.. ocv:function:: LogisticRegressionParams::LogisticRegressionParams() - -.. ocv:function:: LogisticRegressionParams::LogisticRegressionParams(double learning_rate, int iters, int train_method, int normlization, int reg, int mini_batch_size) +.. ocv:function:: LogisticRegression::Params::Params(double learning_rate = 0.001, int iters = 1000, int method = LogisticRegression::BATCH, int normlization = LogisticRegression::REG_L2, int reg = 1, int batch_size = 1) :param learning_rate: Specifies the learning rate. :param iters: Specifies the number of iterations. - :param: train_method: Specifies the kind of training method used. It should be set to either ``LogisticRegression::BATCH`` or ``LogisticRegression::MINI_BATCH``. If using ``LogisticRegression::MINI_BATCH``, set ``LogisticRegressionParams.mini_batch_size`` to a positive integer. - - :param normalization: Specifies the kind of regularization to be applied. ``LogisticRegression::REG_L1`` or ``LogisticRegression::REG_L2`` (L1 norm or L2 norm). To use this, set ``LogisticRegressionParams.regularized`` to a integer greater than zero. + :param train_method: Specifies the kind of training method used. It should be set to either ``LogisticRegression::BATCH`` or ``LogisticRegression::MINI_BATCH``. If using ``LogisticRegression::MINI_BATCH``, set ``LogisticRegression::Params.mini_batch_size`` to a positive integer. - :param: reg: To enable or disable regularization. Set to positive integer (greater than zero) to enable and to 0 to disable. + :param normalization: Specifies the kind of regularization to be applied. ``LogisticRegression::REG_L1`` or ``LogisticRegression::REG_L2`` (L1 norm or L2 norm). To use this, set ``LogisticRegression::Params.regularized`` to a integer greater than zero. - :param: mini_batch_size: Specifies the number of training samples taken in each step of Mini-Batch Gradient Descent. Will only be used if using ``LogisticRegression::MINI_BATCH`` training algorithm. It has to take values less than the total number of training samples. + :param reg: To enable or disable regularization. Set to positive integer (greater than zero) to enable and to 0 to disable. -The full constructor initializes corresponding members. The default constructor creates an object with dummy parameters. - -:: - - LogisticRegressionParams::LogisticRegressionParams() - { - term_crit = cv::TermCriteria(TermCriteria::COUNT + TermCriteria::EPS, 1000, 0.001); - alpha = 0.001; - num_iters = 1000; - norm = LogisticRegression::REG_L2; - regularized = 1; - train_method = LogisticRegression::BATCH; - mini_batch_size = 1; - } + :param mini_batch_size: Specifies the number of training samples taken in each step of Mini-Batch Gradient Descent. Will only be used if using ``LogisticRegression::MINI_BATCH`` training algorithm. It has to take values less than the total number of training samples. By initializing this structure, one can set all the parameters required for Logistic Regression classifier. LogisticRegression ------------------ -.. ocv:class:: LogisticRegression - -Implements Logistic Regression classifier. - -LogisticRegression::LogisticRegression --------------------------------------- -The constructors. -.. ocv:function:: LogisticRegression::LogisticRegression( const LogisticRegressionParams& params = LogisticRegressionParams()) +.. ocv:class:: LogisticRegression : public StatModel - :param params: The training parameters for the classifier of type ``LogisticRegressionParams``. - -.. ocv:function:: LogisticRegression::LogisticRegression(cv::InputArray data_ip, cv::InputArray labels_ip, const LogisticRegressionParams& params) +Implements Logistic Regression classifier. - :param data: The data variable of type ``CV_32F``. Each data instance has to be arranged per across different rows. +LogisticRegression::create +-------------------------- +Creates empty model. - :param labels_ip: The data variable of type ``CV_32F``. Each label instance has to be arranged across different rows. +.. ocv:function:: Ptr LogisticRegression::create( const Params& params = Params() ) - :param params: The training parameters for the classifier of type ``LogisticRegressionParams``. + :param params: The training parameters for the classifier of type ``LogisticRegression::Params``. -The constructor with parameters allows to create a Logistic Regression object intialized with given data and trains it. +Creates Logistic Regression model with parameters given. LogisticRegression::train ------------------------- Trains the Logistic Regression classifier and returns true if successful. -.. ocv:function:: bool LogisticRegression::train(cv::InputArray data_ip, cv::InputArray label_ip) +.. ocv:function:: bool LogisticRegression::train( const Ptr& trainData, int flags=0 ) - :param data_ip: An InputArray variable of type ``CV_32F``. Each data instance has to be arranged per across different rows. - - :param labels_ip: An InputArray variable of type ``CV_32F``. Each label instance has to be arranged across differnet rows. + :param trainData: Instance of ml::TrainData class holding learning data. + :param flags: Not used. LogisticRegression::predict --------------------------- Predicts responses for input samples and returns a float type. -.. ocv:function:: void LogisticRegression::predict( cv::InputArray data, cv::OutputArray predicted_labels ) const +.. ocv:function:: void LogisticRegression::predict( InputArray samples, OutputArray results=noArray(), int flags=0 ) const + + :param samples: The input data for the prediction algorithm. Matrix [m x n], where each row contains variables (features) of one object being classified. Should have data type ``CV_32F``. - :param data: The input data for the prediction algorithm. The ``data`` variable should be of type ``CV_32F``. + :param results: Predicted labels as a column matrix of type ``CV_32S``. - :param predicted_labels: Predicted labels as a column matrix and of type ``CV_32S``. + :param flags: Not used. LogisticRegression::get_learnt_thetas ------------------------------------- This function returns the trained paramters arranged across rows. For a two class classifcation problem, it returns a row matrix. -.. ocv:function:: const cv::Mat LogisticRegression::get_learnt_thetas() const +.. ocv:function:: Mat LogisticRegression::get_learnt_thetas() const It returns learnt paramters of the Logistic Regression as a matrix of type ``CV_32F``. diff --git a/modules/ml/doc/ml.rst b/modules/ml/doc/ml.rst index 5c7cc145c1..7f6c9d98f0 100644 --- a/modules/ml/doc/ml.rst +++ b/modules/ml/doc/ml.rst @@ -18,5 +18,5 @@ Most of the classification and regression algorithms are implemented as C++ clas random_trees expectation_maximization neural_networks - mldata logistic_regression + mldata diff --git a/modules/ml/include/opencv2/ml.hpp b/modules/ml/include/opencv2/ml.hpp index 145eedba60..b223aa80c7 100644 --- a/modules/ml/include/opencv2/ml.hpp +++ b/modules/ml/include/opencv2/ml.hpp @@ -89,9 +89,6 @@ public: CV_PROP_RW double maxVal; CV_PROP_RW double logStep; }; -#define CV_TYPE_NAME_ML_LR "opencv-ml-lr" - - class CV_EXPORTS TrainData { @@ -590,7 +587,7 @@ public: int regularized; int train_method; int mini_batch_size; - cv::TermCriteria term_crit; + TermCriteria term_crit; }; enum { REG_L1 = 0, REG_L2 = 1}; diff --git a/modules/ml/src/lr.cpp b/modules/ml/src/lr.cpp index 9ca83311d9..ade60d7357 100644 --- a/modules/ml/src/lr.cpp +++ b/modules/ml/src/lr.cpp @@ -73,7 +73,7 @@ LogisticRegression::Params::Params(double learning_rate, regularized = reg; train_method = method; mini_batch_size = batch_size; - term_crit = cv::TermCriteria(TermCriteria::COUNT + TermCriteria::EPS, num_iters, alpha); + term_crit = TermCriteria(TermCriteria::COUNT + TermCriteria::EPS, num_iters, alpha); } class LogisticRegressionImpl : public LogisticRegression @@ -90,25 +90,25 @@ public: virtual void clear(); virtual void write(FileStorage& fs) const; virtual void read(const FileNode& fn); - virtual cv::Mat get_learnt_thetas() const; + virtual Mat get_learnt_thetas() const; virtual int getVarCount() const { return learnt_thetas.cols; } virtual bool isTrained() const { return !learnt_thetas.empty(); } virtual bool isClassifier() const { return true; } virtual String getDefaultModelName() const { return "opencv_ml_lr"; } protected: - cv::Mat calc_sigmoid(const cv::Mat& data) const; - double compute_cost(const cv::Mat& _data, const cv::Mat& _labels, const cv::Mat& _init_theta); - cv::Mat compute_batch_gradient(const cv::Mat& _data, const cv::Mat& _labels, const cv::Mat& _init_theta); - cv::Mat compute_mini_batch_gradient(const cv::Mat& _data, const cv::Mat& _labels, const cv::Mat& _init_theta); - bool set_label_map(const cv::Mat& _labels_i); - cv::Mat remap_labels(const cv::Mat& _labels_i, const map& lmap) const; + Mat calc_sigmoid(const Mat& data) const; + double compute_cost(const Mat& _data, const Mat& _labels, const Mat& _init_theta); + Mat compute_batch_gradient(const Mat& _data, const Mat& _labels, const Mat& _init_theta); + Mat compute_mini_batch_gradient(const Mat& _data, const Mat& _labels, const Mat& _init_theta); + bool set_label_map(const Mat& _labels_i); + Mat remap_labels(const Mat& _labels_i, const map& lmap) const; protected: Params params; - cv::Mat learnt_thetas; + Mat learnt_thetas; map forward_mapper; map reverse_mapper; - cv::Mat labels_o; - cv::Mat labels_n; + Mat labels_o; + Mat labels_n; }; Ptr LogisticRegression::create(const Params& params) @@ -119,8 +119,8 @@ Ptr LogisticRegression::create(const Params& params) bool LogisticRegressionImpl::train(const Ptr& trainData, int) { clear(); - cv::Mat _data_i = trainData->getSamples(); - cv::Mat _labels_i = trainData->getResponses(); + Mat _data_i = trainData->getSamples(); + Mat _labels_i = trainData->getResponses(); CV_Assert( !_labels_i.empty() && !_data_i.empty()); @@ -140,14 +140,14 @@ bool LogisticRegressionImpl::train(const Ptr& trainData, int) bool ok = false; - cv::Mat labels; + Mat labels; set_label_map(_labels_i); int num_classes = (int) this->forward_mapper.size(); // add a column of ones - cv::Mat data_t = cv::Mat::zeros(_data_i.rows, _data_i.cols+1, CV_32F); - vconcat(cv::Mat(_data_i.rows, 1, _data_i.type(), Scalar::all(1.0)), data_t.col(0)); + Mat data_t = Mat::zeros(_data_i.rows, _data_i.cols+1, CV_32F); + vconcat(Mat(_data_i.rows, 1, _data_i.type(), Scalar::all(1.0)), data_t.col(0)); for (int i=1;i& trainData, int) } - cv::Mat thetas = cv::Mat::zeros(num_classes, data_t.cols, CV_32F); - cv::Mat init_theta = cv::Mat::zeros(data_t.cols, 1, CV_32F); + Mat thetas = Mat::zeros(num_classes, data_t.cols, CV_32F); + Mat init_theta = Mat::zeros(data_t.cols, 1, CV_32F); - cv::Mat labels_l = remap_labels(_labels_i, this->forward_mapper); - cv::Mat new_local_labels; + Mat labels_l = remap_labels(_labels_i, this->forward_mapper); + Mat new_local_labels; int ii=0; - cv::Mat new_theta; + Mat new_theta; if(num_classes == 2) { @@ -203,7 +203,7 @@ bool LogisticRegressionImpl::train(const Ptr& trainData, int) } this->learnt_thetas = thetas.clone(); - if( cvIsNaN( (double)cv::sum(this->learnt_thetas)[0] ) ) + if( cvIsNaN( (double)sum(this->learnt_thetas)[0] ) ) { CV_Error( CV_StsBadArg, "check training parameters. Invalid training classifier" ); } @@ -215,7 +215,7 @@ float LogisticRegressionImpl::predict(InputArray samples, OutputArray results, i { /* returns a class of the predicted class class names can be 1,2,3,4, .... etc */ - cv::Mat thetas, data, pred_labs; + Mat thetas, data, pred_labs; data = samples.getMat(); // check if learnt_mats array is populated @@ -229,12 +229,12 @@ float LogisticRegressionImpl::predict(InputArray samples, OutputArray results, i } // add a column of ones - cv::Mat data_t = cv::Mat::zeros(data.rows, data.cols+1, CV_32F); + Mat data_t = Mat::zeros(data.rows, data.cols+1, CV_32F); for (int i=0;iparams.regularized > 0) { @@ -321,31 +319,31 @@ double LogisticRegressionImpl::compute_cost(const cv::Mat& _data, const cv::Mat& if(this->params.norm == LogisticRegression::REG_L1) { - rparameter = (llambda/(2*m)) * cv::sum(theta_b)[0]; + rparameter = (llambda/(2*m)) * sum(theta_b)[0]; } else { // assuming it to be L2 by default - rparameter = (llambda/(2*m)) * cv::sum(theta_c)[0]; + rparameter = (llambda/(2*m)) * sum(theta_c)[0]; } d_a = calc_sigmoid(_data* _init_theta); - cv::log(d_a, d_a); - cv::multiply(d_a, _labels, d_a); + log(d_a, d_a); + multiply(d_a, _labels, d_a); d_b = 1 - calc_sigmoid(_data * _init_theta); - cv::log(d_b, d_b); - cv::multiply(d_b, 1-_labels, d_b); + log(d_b, d_b); + multiply(d_b, 1-_labels, d_b); - cost = (-1.0/m) * (cv::sum(d_a)[0] + cv::sum(d_b)[0]); + cost = (-1.0/m) * (sum(d_a)[0] + sum(d_b)[0]); cost = cost + rparameter; return cost; } -cv::Mat LogisticRegressionImpl::compute_batch_gradient(const cv::Mat& _data, const cv::Mat& _labels, const cv::Mat& _init_theta) +Mat LogisticRegressionImpl::compute_batch_gradient(const Mat& _data, const Mat& _labels, const Mat& _init_theta) { // implements batch gradient descent if(this->params.alpha<=0) @@ -361,11 +359,11 @@ cv::Mat LogisticRegressionImpl::compute_batch_gradient(const cv::Mat& _data, con int llambda = 0; double ccost; int m, n; - cv::Mat pcal_a; - cv::Mat pcal_b; - cv::Mat pcal_ab; - cv::Mat gradient; - cv::Mat theta_p = _init_theta.clone(); + Mat pcal_a; + Mat pcal_b; + Mat pcal_ab; + Mat gradient; + Mat theta_p = _init_theta.clone(); m = _data.rows; n = _data.cols; @@ -393,7 +391,7 @@ cv::Mat LogisticRegressionImpl::compute_batch_gradient(const cv::Mat& _data, con pcal_b = _data(Range::all(), Range(0,1)); - cv::multiply(pcal_a, pcal_b, pcal_ab, 1); + multiply(pcal_a, pcal_b, pcal_ab, 1); gradient.row(0) = ((float)1/m) * sum(pcal_ab)[0]; @@ -404,9 +402,9 @@ cv::Mat LogisticRegressionImpl::compute_batch_gradient(const cv::Mat& _data, con { pcal_b = _data(Range::all(), Range(ii,ii+1)); - cv::multiply(pcal_a, pcal_b, pcal_ab, 1); + multiply(pcal_a, pcal_b, pcal_ab, 1); - gradient.row(ii) = (1.0/m)*cv::sum(pcal_ab)[0] + (llambda/m) * theta_p.row(ii); + gradient.row(ii) = (1.0/m)*sum(pcal_ab)[0] + (llambda/m) * theta_p.row(ii); } theta_p = theta_p - ( static_cast(this->params.alpha)/m)*gradient; @@ -414,7 +412,7 @@ cv::Mat LogisticRegressionImpl::compute_batch_gradient(const cv::Mat& _data, con return theta_p; } -cv::Mat LogisticRegressionImpl::compute_mini_batch_gradient(const cv::Mat& _data, const cv::Mat& _labels, const cv::Mat& _init_theta) +Mat LogisticRegressionImpl::compute_mini_batch_gradient(const Mat& _data, const Mat& _labels, const Mat& _init_theta) { // implements batch gradient descent int lambda_l = 0; @@ -433,13 +431,13 @@ cv::Mat LogisticRegressionImpl::compute_mini_batch_gradient(const cv::Mat& _data CV_Error( CV_StsBadArg, "number of iterations cannot be zero or a negative number" ); } - cv::Mat pcal_a; - cv::Mat pcal_b; - cv::Mat pcal_ab; - cv::Mat gradient; - cv::Mat theta_p = _init_theta.clone(); - cv::Mat data_d; - cv::Mat labels_l; + Mat pcal_a; + Mat pcal_b; + Mat pcal_ab; + Mat gradient; + Mat theta_p = _init_theta.clone(); + Mat data_d; + Mat labels_l; if(this->params.regularized > 0) { @@ -479,7 +477,7 @@ cv::Mat LogisticRegressionImpl::compute_mini_batch_gradient(const cv::Mat& _data pcal_b = data_d(Range::all(), Range(0,1)); - cv::multiply(pcal_a, pcal_b, pcal_ab, 1); + multiply(pcal_a, pcal_b, pcal_ab, 1); gradient.row(0) = ((float)1/m) * sum(pcal_ab)[0]; @@ -488,8 +486,8 @@ cv::Mat LogisticRegressionImpl::compute_mini_batch_gradient(const cv::Mat& _data for(int k = 1;k(this->params.alpha)/m)*gradient; @@ -505,15 +503,14 @@ cv::Mat LogisticRegressionImpl::compute_mini_batch_gradient(const cv::Mat& _data return theta_p; } -bool LogisticRegressionImpl::set_label_map(const cv::Mat &_labels_i) +bool LogisticRegressionImpl::set_label_map(const Mat &_labels_i) { // this function creates two maps to map user defined labels to program friendly labels two ways. int ii = 0; - cv::Mat labels; - bool ok = false; + Mat labels; - this->labels_o = cv::Mat(0,1, CV_8U); - this->labels_n = cv::Mat(0,1, CV_8U); + this->labels_o = Mat(0,1, CV_8U); + this->labels_n = Mat(0,1, CV_8U); _labels_i.convertTo(labels, CV_32S); @@ -534,17 +531,16 @@ bool LogisticRegressionImpl::set_label_map(const cv::Mat &_labels_i) { this->reverse_mapper[it->second] = it->first; } - ok = true; - return ok; + return true; } -cv::Mat LogisticRegressionImpl::remap_labels(const cv::Mat& _labels_i, const map& lmap) const +Mat LogisticRegressionImpl::remap_labels(const Mat& _labels_i, const map& lmap) const { - cv::Mat labels; + Mat labels; _labels_i.convertTo(labels, CV_32S); - cv::Mat new_labels = cv::Mat::zeros(labels.rows, labels.cols, labels.type()); + Mat new_labels = Mat::zeros(labels.rows, labels.cols, labels.type()); CV_Assert( lmap.size() > 0 ); @@ -615,7 +611,7 @@ void LogisticRegressionImpl::read(const FileNode& fn) } } -cv::Mat LogisticRegressionImpl::get_learnt_thetas() const +Mat LogisticRegressionImpl::get_learnt_thetas() const { return this->learnt_thetas; } diff --git a/modules/ml/test/test_lr.cpp b/modules/ml/test/test_lr.cpp index a5f1306c3d..18de0825dc 100644 --- a/modules/ml/test/test_lr.cpp +++ b/modules/ml/test/test_lr.cpp @@ -74,7 +74,7 @@ static bool calculateError( const Mat& _p_labels, const Mat& _o_labels, float& e CV_Assert(_p_labels_temp.total() == _o_labels_temp.total()); CV_Assert(_p_labels_temp.rows == _o_labels_temp.rows); - accuracy = (float)cv::countNonZero(_p_labels_temp == _o_labels_temp)/_p_labels_temp.rows; + accuracy = (float)countNonZero(_p_labels_temp == _o_labels_temp)/_p_labels_temp.rows; error = 1 - accuracy; return true; } @@ -166,7 +166,7 @@ void CV_LRTest_SaveLoad::run( int /*start_from*/ ) params1.mini_batch_size = 10; // train and save the classifier - String filename = cv::tempfile(".xml"); + String filename = tempfile(".xml"); try { // run LR classifier train classifier @@ -208,8 +208,8 @@ void CV_LRTest_SaveLoad::run( int /*start_from*/ ) // check if there is any difference between computed learnt mat and retreived mat float errorCount = 0.0; - errorCount += 1 - (float)cv::countNonZero(responses1 == responses2)/responses1.rows; - errorCount += 1 - (float)cv::sum(comp_learnt_mats)[0]/comp_learnt_mats.rows; + errorCount += 1 - (float)countNonZero(responses1 == responses2)/responses1.rows; + errorCount += 1 - (float)sum(comp_learnt_mats)[0]/comp_learnt_mats.rows; if(errorCount>0) { diff --git a/samples/cpp/logistic_regression.cpp b/samples/cpp/logistic_regression.cpp index 74a4c214ad..a30853a819 100644 --- a/samples/cpp/logistic_regression.cpp +++ b/samples/cpp/logistic_regression.cpp @@ -58,9 +58,9 @@ #include -#include -#include -#include +#include +#include +#include using namespace std; using namespace cv; @@ -78,7 +78,7 @@ static void showImage(const Mat &data, int columns, const String &name) static float calculateAccuracyPercent(const Mat &original, const Mat &predicted) { - return 100 * (float)cv::countNonZero(original == predicted) / predicted.rows; + return 100 * (float)countNonZero(original == predicted) / predicted.rows; } int main()