aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorIliyan Malchev <malchev@google.com>2008-09-16 16:12:11 -0700
committerIliyan Malchev <malchev@google.com>2008-09-16 16:12:11 -0700
commitb84fb5f91f9b90864e44d248cfe0c2f22bf54048 (patch)
tree324db0a2e9265a49ca26691631e7f7e1a9e00545
parentb04539f2fc1c8912b28680309b78dbe748156027 (diff)
downloadtesseract-b84fb5f91f9b90864e44d248cfe0c2f22bf54048.tar.gz
integrate CL 8288284
Signed-off-by: Iliyan Malchev <malchev@google.com>
-rw-r--r--ccmain/baseapi.cpp17
-rw-r--r--ccmain/baseapi.h1
-rw-r--r--ccmain/control.cpp81
-rw-r--r--ccmain/tessedit.cpp6
-rw-r--r--classify/adaptmatch.cpp455
-rw-r--r--classify/classify.cpp8
-rw-r--r--classify/classify.h36
-rw-r--r--classify/float2int.cpp11
-rw-r--r--classify/float2int.h31
-rw-r--r--classify/normmatch.cpp11
-rw-r--r--classify/normmatch.h24
-rw-r--r--image/imgs.cpp5
-rw-r--r--image/imgtiff.cpp17
-rwxr-xr-xneural_networks/runtime/BUILD16
-rw-r--r--neural_networks/runtime/input_file_buffer.cpp36
-rw-r--r--neural_networks/runtime/input_file_buffer.h28
-rw-r--r--neural_networks/runtime/neural_net.cpp220
-rw-r--r--neural_networks/runtime/neural_net.h229
-rw-r--r--neural_networks/runtime/neuron.cpp94
-rw-r--r--neural_networks/runtime/neuron.h141
-rw-r--r--neural_networks/runtime/sigmoid_table.cpp514
21 files changed, 1519 insertions, 462 deletions
diff --git a/ccmain/baseapi.cpp b/ccmain/baseapi.cpp
index 6340bd9..a0f26ff 100644
--- a/ccmain/baseapi.cpp
+++ b/ccmain/baseapi.cpp
@@ -77,6 +77,7 @@ TessBaseAPI::TessBaseAPI()
// A constructor of a derived API, SetThresholder(), or
// created implicitly when used in InternalSetImage.
thresholder_(NULL),
+ threshold_done_(false),
block_list_(NULL),
page_res_(NULL),
input_file_(NULL),
@@ -277,7 +278,7 @@ void TessBaseAPI::SetRectangle(int left, int top, int width, int height) {
Pix* TessBaseAPI::GetThresholdedImage() {
if (tesseract_ == NULL)
return NULL;
- if (page_res_ == NULL)
+ if (!threshold_done_)
Threshold();
return page_image.ToPix();
}
@@ -311,8 +312,10 @@ int TessBaseAPI::Recognize(struct ETEXT_STRUCT* monitor) {
tprintf("Please call SetImage before attempting recognition.");
return -1;
}
- ClearResults();
- Threshold();
+ if (page_res_ != NULL)
+ ClearResults();
+ if (!threshold_done_)
+ Threshold();
if (FindLines() != 0)
return -1;
if (tesseract_->tessedit_resegment_from_boxes)
@@ -660,7 +663,7 @@ int TessBaseAPI::IsValidWord(const char *word) {
bool TessBaseAPI::GetTextDirection(int* out_offset, float* out_slope) {
- if (thresholder_ != NULL)
+ if (thresholder_ != NULL && !threshold_done_)
Threshold();
if (page_res_ == NULL)
FindLines();
@@ -713,6 +716,7 @@ void TessBaseAPI::Threshold() {
thresholder_->GetImageSizes(&rect_left_, &rect_top_,
&rect_width_, &rect_height_,
&image_width_, &image_height_);
+ threshold_done_ = true;
}
// Find lines from the image making the BLOCK_LIST.
@@ -721,14 +725,17 @@ int TessBaseAPI::FindLines() {
// component analysis and text line creation.
if (input_file_ == NULL)
input_file_ = new STRING(kInputFile);
- if (tesseract_ == NULL)
+ if (tesseract_ == NULL) {
tesseract_ = new Tesseract;
+ tesseract_->InitAdaptiveClassifier();
+ }
tesseract_->pgeditor_read_file(*input_file_, block_list_);
return 0;
}
// Delete the pageres and clear the block list ready for a new page.
void TessBaseAPI::ClearResults() {
+ threshold_done_ = false;
if (page_res_ != NULL) {
delete page_res_;
page_res_ = NULL;
diff --git a/ccmain/baseapi.h b/ccmain/baseapi.h
index 8a7c34c..b36b951 100644
--- a/ccmain/baseapi.h
+++ b/ccmain/baseapi.h
@@ -297,6 +297,7 @@ class TessBaseAPI {
protected:
Tesseract* tesseract_; // The underlying data object.
ImageThresholder* thresholder_; // The image thresholding module.
+ bool threshold_done_; // The image has been passed to page_image.
BLOCK_LIST* block_list_; // The page layout.
PAGE_RES* page_res_; // The page-level data.
STRING* input_file_; // Name used by training code.
diff --git a/ccmain/control.cpp b/ccmain/control.cpp
index 93e2125..0b1cb61 100644
--- a/ccmain/control.cpp
+++ b/ccmain/control.cpp
@@ -1551,8 +1551,11 @@ void Tesseract::set_word_fonts(
// character iterator
BLOB_CHOICE_LIST_C_IT char_it = blob_choices;
BLOB_CHOICE_IT choice_it; // choice iterator
- STATS fonts(0, get_fontinfo_table().size() ?
- get_fontinfo_table().size() : 32); // font counters
+ int fontinfo_size = get_fontinfo_table().size();
+ int fontset_size = get_fontset_table().size();
+ if (fontinfo_size == 0 || fontset_size == 0)
+ return;
+ STATS fonts(0, fontinfo_size); // font counters
word->italic = 0;
word->bold = 0;
@@ -1566,51 +1569,31 @@ void Tesseract::set_word_fonts(
config = choice_it.data()->config();
int class_id = choice_it.data()->unichar_id();
int font_set_id = PreTrainedTemplates->Class[class_id]->font_set_id;
- if (tessedit_debug_fonts)
- tprintf("%s(%d=%d%c%c)", unicharset.id_to_unichar(choice_char_id),
- config, (config & 31) >> 2,
- config & 2 ? 'N' : 'B', config & 1 ? 'N' : 'I');
- if (tessedit_debug_fonts && config >= 0 && font_set_id >= 0) {
- const char* fontname;
- if (get_fontset_table().get(font_set_id).size < config) {
- fontname = "Unknown";
- } else {
- fontname = get_fontinfo_table().get(
- get_fontset_table().get(font_set_id).configs[config]).name;
+ if (font_set_id >= 0 && config >= 0 && font_set_id < fontset_size) {
+ FontSet font_set = get_fontset_table().get(font_set_id);
+ if (tessedit_debug_fonts) {
+ tprintf("%s(%d=%d%c%c)", unicharset.id_to_unichar(choice_char_id),
+ config, (config & 31) >> 2,
+ config & 2 ? 'N' : 'B', config & 1 ? 'N' : 'I');
+ const char* fontname;
+ if (config >= font_set.size) {
+ fontname = "Unknown";
+ } else {
+ fontname = get_fontinfo_table().get(
+ font_set.configs[config]).name;
+ }
+ tprintf("%s(%d,%d=%s)\n",
+ unicharset.id_to_unichar(choice_it.data()->unichar_id()),
+ font_set_id, config, fontname);
}
- tprintf("%s(%d,%d=%s)\n",
- unicharset.id_to_unichar(choice_it.data()->unichar_id()),
- font_set_id, config, fontname);
- }
-
- if (font_set_id >= 0 && config >= 0) {
- int fontinfo_id = get_fontset_table().get(
- font_set_id).configs[config];
- FontInfo fi =
- get_fontinfo_table().get(fontinfo_id);
- word->italic += fi.is_italic();
- word->bold += fi.is_bold();
- fonts.add(fontinfo_id, 1);
- } else if (config >= 0) {
- config &= 31;
- word->italic += config & 1 ? -1 : 1;
- word->bold += config & 2 ? -1 : 1;
- if (config < 4 || config > 7) {
- int val = -1;
- if (config < 4)
- val = 2;
- else if (config < 12)
- val = 0;
- else if (config < 16)
- val = 3;
- else if (config < 20)
- val = 4;
- else if (config < 24)
- val = 5;
- else if (config < 32)
- val = 2;
-
- fonts.add (val, 1);
+ if (config < font_set.size) {
+ int fontinfo_id = font_set.configs[config];
+ if (fontinfo_id < fontinfo_size) {
+ FontInfo fi = get_fontinfo_table().get(fontinfo_id);
+ word->italic += fi.is_italic();
+ word->bold += fi.is_bold();
+ fonts.add(fontinfo_id, 1);
+ }
}
}
break;
@@ -1633,11 +1616,11 @@ void Tesseract::set_word_fonts(
config = choice_it.data()->config();
int class_id = choice_it.data()->unichar_id();
int font_set_id = PreTrainedTemplates->Class[class_id]->font_set_id;
- if (font_set_id >= 0 && config >= 0) {
+ if (font_set_id >= 0 && config >= 0 && font_set_id < fontset_size) {
int fontinfo_id = get_fontset_table().get(font_set_id).
configs[config];
- FontInfo fi = fontinfo_table_.get(fontinfo_id);
- if (fontinfo_id == word->font1) {
+ if (fontinfo_id == word->font1 && fontinfo_id < fontinfo_size) {
+ FontInfo fi = fontinfo_table_.get(fontinfo_id);
word->italic += fi.is_italic();
word->bold += fi.is_bold();
}
diff --git a/ccmain/tessedit.cpp b/ccmain/tessedit.cpp
index 05d920e..022c1ff 100644
--- a/ccmain/tessedit.cpp
+++ b/ccmain/tessedit.cpp
@@ -241,11 +241,15 @@ void read_tiff_image(TIFF* tif, IMAGE* image) {
tdata_t buf;
uint32 image_width, image_height;
uint16 photometric;
- short bpp;
+ inT16 bpp;
+ inT16 samples_per_pixel = 0;
TIFFGetField(tif, TIFFTAG_IMAGEWIDTH, &image_width);
TIFFGetField(tif, TIFFTAG_IMAGELENGTH, &image_height);
TIFFGetField(tif, TIFFTAG_BITSPERSAMPLE, &bpp);
+ TIFFGetField(tif, TIFFTAG_SAMPLESPERPIXEL, &samples_per_pixel);
TIFFGetField(tif, TIFFTAG_PHOTOMETRIC, &photometric);
+ if (samples_per_pixel > 1)
+ bpp *= samples_per_pixel;
// Tesseract's internal representation is 0-is-black,
// so if the photometric is 1 (min is black) then high-valued pixels
// are 1 (white), otherwise they are 0 (black).
diff --git a/classify/adaptmatch.cpp b/classify/adaptmatch.cpp
index 7ab26a6..b852802 100644
--- a/classify/adaptmatch.cpp
+++ b/classify/adaptmatch.cpp
@@ -176,6 +176,7 @@ UNICHAR_ID *GetAmbiguities(TBLOB *Blob,
LINE_STATS *LineStats,
CLASS_ID CorrectClass);
+namespace tesseract {
int GetBaselineFeatures(TBLOB *Blob,
LINE_STATS *LineStats,
INT_TEMPLATES Templates,
@@ -183,14 +184,6 @@ int GetBaselineFeatures(TBLOB *Blob,
CLASS_NORMALIZATION_ARRAY CharNormArray,
inT32 *BlobLength);
-FLOAT32 GetBestRatingFor(TBLOB *Blob, LINE_STATS *LineStats, CLASS_ID ClassId);
-
-int GetCharNormFeatures(TBLOB *Blob,
- LINE_STATS *LineStats,
- INT_TEMPLATES Templates,
- INT_FEATURE_ARRAY IntFeatures,
- CLASS_NORMALIZATION_ARRAY CharNormArray,
- inT32 *BlobLength);
int GetIntBaselineFeatures(TBLOB *Blob,
LINE_STATS *LineStats,
@@ -199,21 +192,10 @@ int GetIntBaselineFeatures(TBLOB *Blob,
CLASS_NORMALIZATION_ARRAY CharNormArray,
inT32 *BlobLength);
-int GetIntCharNormFeatures(TBLOB *Blob,
- LINE_STATS *LineStats,
- INT_TEMPLATES Templates,
- INT_FEATURE_ARRAY IntFeatures,
- CLASS_NORMALIZATION_ARRAY CharNormArray,
- inT32 *BlobLength);
+} // namespace tesseract.
void InitMatcherRatings(register FLOAT32 *Rating);
-int MakeNewTemporaryConfig(ADAPT_TEMPLATES Templates,
- CLASS_ID ClassId,
- int NumFeatures,
- INT_FEATURE_ARRAY Features,
- FEATURE_SET FloatFeatures);
-
PROTO_ID MakeNewTempProtos(FEATURE_SET Features,
int NumBadFeat,
FEATURE_ID BadFeat[],
@@ -245,196 +227,6 @@ void ShowBestMatchFor(TBLOB *Blob,
BOOL8 AdaptiveOn,
BOOL8 PreTrainedOn);
-/*
-#if defined(__STDC__) || defined(__cplusplus)
-# define _ARGS(s) s
-#else
-# define _ARGS(s) ()
-#endif*/
-
-/* /users/danj/wiseowl/src/danj/microfeatures/adaptmatch.c
-int AdaptableWord
- _ARGS((TWERD *Word,
- char *BestChoice,
- char *BestRawChoice));
-
- void AdaptToChar
- _ARGS((BLOB *Blob,
- LINE_STATS *LineStats,
- CLASS_ID ClassId,
- FLOAT32 Threshold));
-
- void AdaptToPunc
- _ARGS((BLOB *Blob,
- LINE_STATS *LineStats,
- CLASS_ID ClassId,
- FLOAT32 Threshold));
-
- void AddNewResult
- _ARGS((ADAPT_RESULTS *Results,
- CLASS_ID ClassId,
- FLOAT32 Rating,
- int ConfigId));
-
- void AmbigClassifier
- _ARGS((BLOB *Blob,
- LINE_STATS *LineStats,
- INT_TEMPLATES Templates,
- char *Ambiguities,
- ADAPT_RESULTS *Results));
-
- char *BaselineClassifier
- _ARGS((BLOB *Blob,
- LINE_STATS *LineStats,
- ADAPT_TEMPLATES Templates,
- ADAPT_RESULTS *Results));
-
- void CharNormClassifier
- _ARGS((BLOB *Blob,
- LINE_STATS *LineStats,
- INT_TEMPLATES Templates,
- ADAPT_RESULTS *Results));
-
- void ClassifyAsNoise
- _ARGS((BLOB *Blob,
- LINE_STATS *LineStats,
- ADAPT_RESULTS *Results));
-
- int CompareCurrentRatings
- _ARGS((CLASS_ID *Class1,
- CLASS_ID *Class2));
-
- LIST ConvertMatchesToChoices
- _ARGS((ADAPT_RESULTS *Results));
-
- void DebugAdaptiveClassifier
- _ARGS((BLOB *Blob,
- LINE_STATS *LineStats,
- ADAPT_RESULTS *Results));
-
- void DoAdaptiveMatch
- _ARGS((BLOB *Blob,
- LINE_STATS *LineStats,
- ADAPT_RESULTS *Results));
-
- void GetAdaptThresholds
- _ARGS((TWERD *Word,
- LINE_STATS *LineStats,
- char *BestChoice,
- char *BestRawChoice,
- FLOAT32 Thresholds []));
-
-int GetAdaptiveFeatures
- _ARGS((BLOB *Blob,
- LINE_STATS *LineStats,
- INT_FEATURE_ARRAY IntFeatures,
- CHAR_DESC *FloatFeatures));
-
- char *GetAmbiguities
- _ARGS((BLOB *Blob,
- LINE_STATS *LineStats,
- CLASS_ID CorrectClass));
-
- int GetBaselineFeatures
- _ARGS((BLOB *Blob,
- LINE_STATS *LineStats,
- INT_TEMPLATES Templates,
- INT_FEATURE_ARRAY IntFeatures,
- CLASS_NORMALIZATION_ARRAY CharNormArray,
- FLOAT32 *BlobLength));
-
- FLOAT32 GetBestRatingFor
- _ARGS((BLOB *Blob,
- LINE_STATS *LineStats,
- CLASS_ID ClassId));
-
- int GetCharNormFeatures
- _ARGS((BLOB *Blob,
- LINE_STATS *LineStats,
- INT_TEMPLATES Templates,
- INT_FEATURE_ARRAY IntFeatures,
- CLASS_NORMALIZATION_ARRAY CharNormArray,
- FLOAT32 *BlobLength));
-
- int GetIntBaselineFeatures
- _ARGS((BLOB *Blob,
- LINE_STATS *LineStats,
- INT_TEMPLATES Templates,
- INT_FEATURE_ARRAY IntFeatures,
- CLASS_NORMALIZATION_ARRAY CharNormArray,
- FLOAT32 *BlobLength));
-
- int GetIntCharNormFeatures
- _ARGS((BLOB *Blob,
- LINE_STATS *LineStats,
- INT_TEMPLATES Templates,
- INT_FEATURE_ARRAY IntFeatures,
- CLASS_NORMALIZATION_ARRAY CharNormArray,
- FLOAT32 *BlobLength));
-
- void InitMatcherRatings
- _ARGS((FLOAT32 *Rating));
-
-void InitAdaptedClass
- _ARGS((BLOB *Blob,
- LINE_STATS *LineStats,
- CLASS_ID ClassId,
- ADAPT_CLASS Class,
- ADAPT_TEMPLATES Templates));
-
- void MakeNewTemporaryConfig
- _ARGS((ADAPT_TEMPLATES Templates,
- CLASS_ID ClassId,
- int NumFeatures,
- INT_FEATURE_ARRAY Features,
- FEATURE_SET FloatFeatures));
-
- PROTO_ID MakeNewTempProtos
- _ARGS((FEATURE_SET Features,
- int NumBadFeat,
- FEATURE_ID BadFeat [],
- INT_CLASS IClass,
- ADAPT_CLASS Class,
- BIT_VECTOR TempProtoMask));
-
- void MakePermanent
- _ARGS((ADAPT_TEMPLATES Templates,
- CLASS_ID ClassId,
- int ConfigId,
- BLOB *Blob,
- LINE_STATS *LineStats));
-
- int MakeTempProtoPerm
- _ARGS((TEMP_PROTO TempProto,
- PROTO_KEY *ProtoKey));
-
- int NumBlobsIn
- _ARGS((TWERD *Word));
-
- int NumOutlinesInBlob
- _ARGS((BLOB *Blob));
-
- void PrintAdaptiveMatchResults
- _ARGS((FILE *File,
- ADAPT_RESULTS *Results));
-
- void RemoveBadMatches
- _ARGS((ADAPT_RESULTS *Results));
- void RemoveExtraPuncs
- _ARGS((ADAPT_RESULTS *Results));
-
- void SetAdaptiveThreshold
- _ARGS((FLOAT32 Threshold));
-
- void ShowBestMatchFor
- _ARGS((BLOB *Blob,
- LINE_STATS *LineStats,
- CLASS_ID ClassId,
- BOOL8 AdaptiveOn,
- BOOL8 PreTrainedOn));
-
-#undef _ARGS
-*/
/**----------------------------------------------------------------------------
Global Data Definitions and Declarations
@@ -472,14 +264,6 @@ static const char *BuiltInCutoffsFile = BUILT_IN_CUTOFFS_FILE;
static CLASS_CUTOFF_ARRAY CharNormCutoffs;
static CLASS_CUTOFF_ARRAY BaselineCutoffs;
-/* create dummy proto and config masks for use with the built-in templates */
-static BIT_VECTOR AllProtosOn;
-static BIT_VECTOR PrunedProtos;
-static BIT_VECTOR AllConfigsOn;
-static BIT_VECTOR AllProtosOff;
-static BIT_VECTOR AllConfigsOff;
-static BIT_VECTOR TempProtoMask;
-
/* define control knobs for adaptive matcher */
make_toggle_const(EnableAdaptiveMatcher, 1, MakeEnableAdaptiveMatcher);
/* PREV DEFAULT 0 */
@@ -799,24 +583,26 @@ void Classify::EndAdaptiveClassifier() {
AdaptedTemplates = NULL;
}
- if (PreTrainedTemplates == NULL)
- return; // This function isn't safe to run twice.
+ if (PreTrainedTemplates != NULL) {
+ free_int_templates(PreTrainedTemplates);
+ PreTrainedTemplates = NULL;
+ }
getDict().EndDangerousAmbigs();
FreeNormProtos();
- free_int_templates(PreTrainedTemplates);
- PreTrainedTemplates = NULL;
- FreeBitVector(AllProtosOn);
- FreeBitVector(PrunedProtos);
- FreeBitVector(AllConfigsOn);
- FreeBitVector(AllProtosOff);
- FreeBitVector(AllConfigsOff);
- FreeBitVector(TempProtoMask);
- AllProtosOn = NULL;
- PrunedProtos = NULL;
- AllConfigsOn = NULL;
- AllProtosOff = NULL;
- AllConfigsOff = NULL;
- TempProtoMask = NULL;
+ if (AllProtosOn != NULL) {
+ FreeBitVector(AllProtosOn);
+ FreeBitVector(PrunedProtos);
+ FreeBitVector(AllConfigsOn);
+ FreeBitVector(AllProtosOff);
+ FreeBitVector(AllConfigsOff);
+ FreeBitVector(TempProtoMask);
+ AllProtosOn = NULL;
+ PrunedProtos = NULL;
+ AllConfigsOn = NULL;
+ AllProtosOff = NULL;
+ AllConfigsOff = NULL;
+ TempProtoMask = NULL;
+ }
} /* EndAdaptiveClassifier */
@@ -848,82 +634,78 @@ void Classify::InitAdaptiveClassifier() {
** Exceptions: none
** History: Mon Mar 11 12:49:34 1991, DSJ, Created.
*/
- int i;
- FILE *File;
- STRING Filename;
-
if (!EnableAdaptiveMatcher)
return;
- if (PreTrainedTemplates != NULL)
+ if (AllProtosOn != NULL)
EndAdaptiveClassifier(); // Don't leak with multiple inits.
- Filename = language_data_path_prefix;
- Filename += BuiltInTemplatesFile;
- #ifndef SECURE_NAMES
- // cprintf( "\nReading built-in templates from %s ...\n",
- // Filename.string());
- fflush(stdout);
- #endif
+ // If there is no language_data_path_prefix, the classifier will be
+ // adaptive only.
+ if (language_data_path_prefix.length() > 0) {
+ FILE *File;
+ STRING Filename;
- #ifdef __UNIX__
- File = Efopen (Filename.string(), "r");
- #else
- File = Efopen (Filename.string(), "rb");
- #endif
- PreTrainedTemplates = ReadIntTemplates (File);
- fclose(File);
+ Filename = language_data_path_prefix;
+ Filename += BuiltInTemplatesFile;
- Filename = language_data_path_prefix;
- Filename += BuiltInCutoffsFile;
- #ifndef SECURE_NAMES
- // cprintf( "\nReading built-in pico-feature cutoffs from %s ...\n",
- // Filename);
- // fflush(stdout);
- #endif
- ReadNewCutoffs (Filename.string(), CharNormCutoffs);
+#ifdef __UNIX__
+ File = Efopen(Filename.string(), "r");
+#else
+ File = Efopen(Filename.string(), "rb");
+#endif
+ PreTrainedTemplates = ReadIntTemplates(File);
+ fclose(File);
- GetNormProtos();
+ Filename = language_data_path_prefix;
+ Filename += BuiltInCutoffsFile;
+ ReadNewCutoffs(Filename.string(), CharNormCutoffs);
+
+ GetNormProtos();
+ }
InitIntegerMatcher();
InitIntegerFX();
- AllProtosOn = NewBitVector (MAX_NUM_PROTOS);
- PrunedProtos = NewBitVector (MAX_NUM_PROTOS);
- AllConfigsOn = NewBitVector (MAX_NUM_CONFIGS);
- AllProtosOff = NewBitVector (MAX_NUM_PROTOS);
- AllConfigsOff = NewBitVector (MAX_NUM_CONFIGS);
- TempProtoMask = NewBitVector (MAX_NUM_PROTOS);
- set_all_bits (AllProtosOn, WordsInVectorOfSize (MAX_NUM_PROTOS));
- set_all_bits (PrunedProtos, WordsInVectorOfSize (MAX_NUM_PROTOS));
- set_all_bits (AllConfigsOn, WordsInVectorOfSize (MAX_NUM_CONFIGS));
- zero_all_bits (AllProtosOff, WordsInVectorOfSize (MAX_NUM_PROTOS));
- zero_all_bits (AllConfigsOff, WordsInVectorOfSize (MAX_NUM_CONFIGS));
+ AllProtosOn = NewBitVector(MAX_NUM_PROTOS);
+ PrunedProtos = NewBitVector(MAX_NUM_PROTOS);
+ AllConfigsOn = NewBitVector(MAX_NUM_CONFIGS);
+ AllProtosOff = NewBitVector(MAX_NUM_PROTOS);
+ AllConfigsOff = NewBitVector(MAX_NUM_CONFIGS);
+ TempProtoMask = NewBitVector(MAX_NUM_PROTOS);
+ set_all_bits(AllProtosOn, WordsInVectorOfSize(MAX_NUM_PROTOS));
+ set_all_bits(PrunedProtos, WordsInVectorOfSize(MAX_NUM_PROTOS));
+ set_all_bits(AllConfigsOn, WordsInVectorOfSize(MAX_NUM_CONFIGS));
+ zero_all_bits(AllProtosOff, WordsInVectorOfSize(MAX_NUM_PROTOS));
+ zero_all_bits(AllConfigsOff, WordsInVectorOfSize(MAX_NUM_CONFIGS));
if (UsePreAdaptedTemplates) {
+ FILE *File;
+ STRING Filename;
+
Filename = imagefile;
Filename += ADAPT_TEMPLATE_SUFFIX;
- File = fopen (Filename.string(), "rb");
- if (File == NULL)
- AdaptedTemplates = NewAdaptedTemplates (true);
- else {
+ File = fopen(Filename.string(), "rb");
+ if (File == NULL) {
+ AdaptedTemplates = NewAdaptedTemplates(true);
+ } else {
#ifndef SECURE_NAMES
- cprintf ("\nReading pre-adapted templates from %s ...\n",
- Filename.string());
+ cprintf("\nReading pre-adapted templates from %s ...\n",
+ Filename.string());
fflush(stdout);
#endif
- AdaptedTemplates = ReadAdaptedTemplates (File);
- cprintf ("\n");
+ AdaptedTemplates = ReadAdaptedTemplates(File);
+ cprintf("\n");
fclose(File);
PrintAdaptedTemplates(stdout, AdaptedTemplates);
- for (i = 0; i < NumClassesIn (AdaptedTemplates->Templates); i++) {
+ for (int i = 0; i < NumClassesIn(AdaptedTemplates->Templates); i++) {
BaselineCutoffs[i] = CharNormCutoffs[i];
}
}
} else {
if (AdaptedTemplates != NULL)
free_adapted_templates(AdaptedTemplates);
- AdaptedTemplates = NewAdaptedTemplates (true);
+ AdaptedTemplates = NewAdaptedTemplates(true);
}
old_enable_learning = EnableLearning;
@@ -2130,7 +1912,6 @@ UNICHAR_ID *Classify::GetAmbiguities(TBLOB *Blob,
return (Ambiguities);
} /* GetAmbiguities */
-} // namespace tesseract
/*---------------------------------------------------------------------------*/
int GetBaselineFeatures(TBLOB *Blob,
@@ -2191,7 +1972,6 @@ int GetBaselineFeatures(TBLOB *Blob,
} /* GetBaselineFeatures */
-namespace tesseract {
FLOAT32 Classify::GetBestRatingFor(TBLOB *Blob,
LINE_STATS *LineStats,
CLASS_ID ClassId) {
@@ -2261,15 +2041,14 @@ FLOAT32 Classify::GetBestRatingFor(TBLOB *Blob,
return (MIN (BLResult.Rating, CNResult.Rating));
} /* GetBestRatingFor */
-} // namespace tesseract
/*---------------------------------------------------------------------------*/
-int GetCharNormFeatures(TBLOB *Blob,
- LINE_STATS *LineStats,
- INT_TEMPLATES Templates,
- INT_FEATURE_ARRAY IntFeatures,
- CLASS_NORMALIZATION_ARRAY CharNormArray,
- inT32 *BlobLength) {
+int Classify::GetCharNormFeatures(TBLOB *Blob,
+ LINE_STATS *LineStats,
+ INT_TEMPLATES Templates,
+ INT_FEATURE_ARRAY IntFeatures,
+ CLASS_NORMALIZATION_ARRAY CharNormArray,
+ inT32 *BlobLength) {
/*
** Parameters:
** Blob
@@ -2365,12 +2144,12 @@ int GetIntBaselineFeatures(TBLOB *Blob,
} /* GetIntBaselineFeatures */
/*---------------------------------------------------------------------------*/
-int GetIntCharNormFeatures(TBLOB *Blob,
- LINE_STATS *LineStats,
- INT_TEMPLATES Templates,
- INT_FEATURE_ARRAY IntFeatures,
- CLASS_NORMALIZATION_ARRAY CharNormArray,
- inT32 *BlobLength) {
+int Classify::GetIntCharNormFeatures(TBLOB *Blob,
+ LINE_STATS *LineStats,
+ INT_TEMPLATES Templates,
+ INT_FEATURE_ARRAY IntFeatures,
+ CLASS_NORMALIZATION_ARRAY CharNormArray,
+ inT32 *BlobLength) {
/*
** Parameters:
** Blob
@@ -2440,7 +2219,7 @@ int GetIntCharNormFeatures(TBLOB *Blob,
} /* GetIntCharNormFeatures */
/*---------------------------------------------------------------------------*/
-int MakeNewTemporaryConfig(ADAPT_TEMPLATES Templates,
+int Classify::MakeNewTemporaryConfig(ADAPT_TEMPLATES Templates,
CLASS_ID ClassId,
int NumFeatures,
INT_FEATURE_ARRAY Features,
@@ -2536,14 +2315,15 @@ int MakeNewTemporaryConfig(ADAPT_TEMPLATES Templates,
return ConfigId;
} /* MakeNewTemporaryConfig */
+} // namespace tesseract
/*---------------------------------------------------------------------------*/
PROTO_ID
-MakeNewTempProtos (FEATURE_SET Features,
- int NumBadFeat,
- FEATURE_ID BadFeat[],
- INT_CLASS IClass,
- ADAPT_CLASS Class, BIT_VECTOR TempProtoMask) {
+MakeNewTempProtos(FEATURE_SET Features,
+ int NumBadFeat,
+ FEATURE_ID BadFeat[],
+ INT_CLASS IClass,
+ ADAPT_CLASS Class, BIT_VECTOR TempProtoMask) {
/*
** Parameters:
** Features
@@ -2581,49 +2361,49 @@ MakeNewTempProtos (FEATURE_SET Features,
for (ProtoStart = BadFeat, LastBad = ProtoStart + NumBadFeat;
ProtoStart < LastBad; ProtoStart = ProtoEnd) {
- F1 = FeatureIn (Features, *ProtoStart);
- X1 = ParamOf (F1, PicoFeatX);
- Y1 = ParamOf (F1, PicoFeatY);
- A1 = ParamOf (F1, PicoFeatDir);
+ F1 = FeatureIn(Features, *ProtoStart);
+ X1 = ParamOf(F1, PicoFeatX);
+ Y1 = ParamOf(F1, PicoFeatY);
+ A1 = ParamOf(F1, PicoFeatDir);
for (ProtoEnd = ProtoStart + 1,
- SegmentLength = GetPicoFeatureLength ();
+ SegmentLength = GetPicoFeatureLength();
ProtoEnd < LastBad;
- ProtoEnd++, SegmentLength += GetPicoFeatureLength ()) {
- F2 = FeatureIn (Features, *ProtoEnd);
- X2 = ParamOf (F2, PicoFeatX);
- Y2 = ParamOf (F2, PicoFeatY);
- A2 = ParamOf (F2, PicoFeatDir);
+ ProtoEnd++, SegmentLength += GetPicoFeatureLength()) {
+ F2 = FeatureIn(Features, *ProtoEnd);
+ X2 = ParamOf(F2, PicoFeatX);
+ Y2 = ParamOf(F2, PicoFeatY);
+ A2 = ParamOf(F2, PicoFeatDir);
- AngleDelta = fabs (A1 - A2);
+ AngleDelta = fabs(A1 - A2);
if (AngleDelta > 0.5)
AngleDelta = 1.0 - AngleDelta;
if (AngleDelta > matcher_clustering_max_angle_delta ||
- fabs (X1 - X2) > SegmentLength ||
- fabs (Y1 - Y2) > SegmentLength)
+ fabs(X1 - X2) > SegmentLength ||
+ fabs(Y1 - Y2) > SegmentLength)
break;
}
- F2 = FeatureIn (Features, *(ProtoEnd - 1));
- X2 = ParamOf (F2, PicoFeatX);
- Y2 = ParamOf (F2, PicoFeatY);
- A2 = ParamOf (F2, PicoFeatDir);
+ F2 = FeatureIn(Features, *(ProtoEnd - 1));
+ X2 = ParamOf(F2, PicoFeatX);
+ Y2 = ParamOf(F2, PicoFeatY);
+ A2 = ParamOf(F2, PicoFeatDir);
- Pid = AddIntProto (IClass);
+ Pid = AddIntProto(IClass);
if (Pid == NO_PROTO)
return (NO_PROTO);
- TempProto = NewTempProto ();
+ TempProto = NewTempProto();
Proto = &(TempProto->Proto);
/* compute proto params - NOTE that Y_DIM_OFFSET must be used because
ConvertProto assumes that the Y dimension varies from -0.5 to 0.5
instead of the -0.25 to 0.75 used in baseline normalization */
- ProtoLength (Proto) = SegmentLength;
- ProtoAngle (Proto) = A1;
- ProtoX (Proto) = (X1 + X2) / 2.0;
- ProtoY (Proto) = (Y1 + Y2) / 2.0 - Y_DIM_OFFSET;
+ ProtoLength(Proto) = SegmentLength;
+ ProtoAngle(Proto) = A1;
+ ProtoX(Proto) = (X1 + X2) / 2.0;
+ ProtoY(Proto) = (Y1 + Y2) / 2.0 - Y_DIM_OFFSET;
FillABC(Proto);
TempProto->ProtoId = Pid;
@@ -2632,9 +2412,9 @@ MakeNewTempProtos (FEATURE_SET Features,
ConvertProto(Proto, Pid, IClass);
AddProtoToProtoPruner(Proto, Pid, IClass);
- Class->TempProtos = push (Class->TempProtos, TempProto);
+ Class->TempProtos = push(Class->TempProtos, TempProto);
}
- return (NumIntProtosIn (IClass) - 1);
+ return (NumIntProtosIn(IClass) - 1);
} /* MakeNewTempProtos */
/*---------------------------------------------------------------------------*/
@@ -2668,7 +2448,7 @@ void Classify::MakePermanent(ADAPT_TEMPLATES Templates,
PROTO_KEY ProtoKey;
Class = Templates->Class[ClassId];
- Config = TempConfigFor (Class, ConfigId);
+ Config = TempConfigFor(Class, ConfigId);
MakeConfigPermanent(Class, ConfigId);
if (Class->NumPermConfigs == 0)
@@ -2678,24 +2458,23 @@ void Classify::MakePermanent(ADAPT_TEMPLATES Templates,
ProtoKey.Templates = Templates;
ProtoKey.ClassId = ClassId;
ProtoKey.ConfigId = ConfigId;
- Class->TempProtos = delete_d (Class->TempProtos, &ProtoKey,
- MakeTempProtoPerm);
+ Class->TempProtos = delete_d(Class->TempProtos, &ProtoKey,
+ MakeTempProtoPerm);
FreeTempConfig(Config);
- Ambigs = GetAmbiguities (Blob, LineStats, ClassId);
- PermConfigFor (Class, ConfigId) = Ambigs;
+ Ambigs = GetAmbiguities(Blob, LineStats, ClassId);
+ PermConfigFor(Class, ConfigId) = Ambigs;
if (LearningDebugLevel >= 1) {
- cprintf ("Making config %d permanent with ambiguities '",
- ConfigId, Ambigs);
+ cprintf("Making config %d permanent with ambiguities '",
+ ConfigId, Ambigs);
for (UNICHAR_ID *AmbigsPointer = Ambigs;
*AmbigsPointer >= 0; ++AmbigsPointer)
cprintf("%s", unicharset.id_to_unichar(*AmbigsPointer));
cprintf("'.\n");
}
-
} /* MakePermanent */
-} // namespace tesseract
+} // namespace tesseract
/*---------------------------------------------------------------------------*/
int MakeTempProtoPerm(void *item1, //TEMP_PROTO TempProto,
diff --git a/classify/classify.cpp b/classify/classify.cpp
index 9cec281..09dfa5c 100644
--- a/classify/classify.cpp
+++ b/classify/classify.cpp
@@ -65,9 +65,17 @@ Classify::Classify()
NewPermanentCallback(delete_callback_fs));
AdaptedTemplates = NULL;
PreTrainedTemplates = NULL;
+ AllProtosOn = NULL;
+ PrunedProtos = NULL;
+ AllConfigsOn = NULL;
+ AllProtosOff = NULL;
+ AllConfigsOff = NULL;
+ TempProtoMask = NULL;
+ NormProtos = NULL;
}
Classify::~Classify() {
+ EndAdaptiveClassifier();
}
} // namespace tesseract
diff --git a/classify/classify.h b/classify/classify.h
index 03b7afe..3c3d67c 100644
--- a/classify/classify.h
+++ b/classify/classify.h
@@ -26,6 +26,7 @@
#include "fxdefs.h"
#include "intmatcher.h"
#include "ratngs.h"
+#include "ocrfeatures.h"
#include "unicity_table.h"
class WERD_CHOICE;
@@ -55,7 +56,9 @@ class Classify : public CCStruct {
void WriteAdaptedTemplates(FILE *File, ADAPT_TEMPLATES Templates);
ADAPT_TEMPLATES ReadAdaptedTemplates(FILE *File);
/* normmatch.cpp ************************************************************/
+ FLOAT32 ComputeNormMatch(CLASS_ID ClassId, FEATURE Feature, BOOL8 DebugMatch);
void GetNormProtos();
+ void FreeNormProtos();
NORM_PROTOS *ReadNormProtos(FILE *File);
/* protos.cpp ***************************************************************/
void ReadClassFile();
@@ -108,6 +111,12 @@ class Classify : public CCStruct {
LINE_STATS * LineStats,
const WERD_CHOICE& BestChoice,
const WERD_CHOICE& BestRawChoice, FLOAT32 Thresholds[]);
+
+int MakeNewTemporaryConfig(ADAPT_TEMPLATES Templates,
+ CLASS_ID ClassId,
+ int NumFeatures,
+ INT_FEATURE_ARRAY Features,
+ FEATURE_SET FloatFeatures);
void MakePermanent(ADAPT_TEMPLATES Templates,
CLASS_ID ClassId,
int ConfigId,
@@ -154,6 +163,23 @@ class Classify : public CCStruct {
FLOAT32 GetBestRatingFor(TBLOB *Blob,
LINE_STATS *LineStats,
CLASS_ID ClassId);
+ int GetCharNormFeatures(TBLOB *Blob,
+ LINE_STATS *LineStats,
+ INT_TEMPLATES Templates,
+ INT_FEATURE_ARRAY IntFeatures,
+ CLASS_NORMALIZATION_ARRAY CharNormArray,
+ inT32 *BlobLength);
+ int GetIntCharNormFeatures(TBLOB *Blob,
+ LINE_STATS *LineStats,
+ INT_TEMPLATES Templates,
+ INT_FEATURE_ARRAY IntFeatures,
+ CLASS_NORMALIZATION_ARRAY CharNormArray,
+ inT32 *BlobLength);
+
+ /* float2int.cpp ************************************************************/
+ void ComputeIntCharNormArray(FEATURE NormFeature,
+ INT_TEMPLATES Templates,
+ CLASS_NORMALIZATION_ARRAY CharNormArray);
/* intproto.cpp *************************************************************/
INT_TEMPLATES ReadIntTemplates(FILE *File);
void WriteIntTemplates(FILE *File, INT_TEMPLATES Templates,
@@ -173,6 +199,16 @@ class Classify : public CCStruct {
templates */
INT_TEMPLATES PreTrainedTemplates;
ADAPT_TEMPLATES AdaptedTemplates;
+
+ /* create dummy proto and config masks for use with the built-in templates */
+ BIT_VECTOR AllProtosOn;
+ BIT_VECTOR PrunedProtos;
+ BIT_VECTOR AllConfigsOn;
+ BIT_VECTOR AllProtosOff;
+ BIT_VECTOR AllConfigsOff;
+ BIT_VECTOR TempProtoMask;
+ /* normmatch.cpp */
+ NORM_PROTOS *NormProtos;
/* font detection ***********************************************************/
UnicityTable<FontInfo> fontinfo_table_;
UnicityTable<FontSet> fontset_table_;
diff --git a/classify/float2int.cpp b/classify/float2int.cpp
index b52237c..21e7dc5 100644
--- a/classify/float2int.cpp
+++ b/classify/float2int.cpp
@@ -21,6 +21,7 @@
#include "float2int.h"
#include "normmatch.h"
#include "mfoutline.h"
+#include "classify.h"
#include "picofeat.h"
#define MAX_INT_CHAR_NORM (INT_CHAR_NORM_RANGE - 1)
@@ -29,6 +30,8 @@
Public Code
----------------------------------------------------------------------------**/
/*---------------------------------------------------------------------------*/
+namespace tesseract {
+
void ClearCharNormArray(INT_TEMPLATES Templates,
CLASS_NORMALIZATION_ARRAY CharNormArray) {
/*
@@ -53,9 +56,9 @@ void ClearCharNormArray(INT_TEMPLATES Templates,
/*---------------------------------------------------------------------------*/
-void ComputeIntCharNormArray(FEATURE NormFeature,
- INT_TEMPLATES Templates,
- CLASS_NORMALIZATION_ARRAY CharNormArray) {
+void Classify::ComputeIntCharNormArray(
+ FEATURE NormFeature, INT_TEMPLATES Templates,
+ CLASS_NORMALIZATION_ARRAY CharNormArray) {
/*
** Parameters:
** NormFeature character normalization feature
@@ -84,9 +87,9 @@ void ComputeIntCharNormArray(FEATURE NormFeature,
CharNormArray[i] = NormAdjust;
}
-
} /* ComputeIntCharNormArray */
+} // namespace tesseract
/*---------------------------------------------------------------------------*/
void ComputeIntFeatures(FEATURE_SET Features, INT_FEATURE_ARRAY IntFeatures) {
diff --git a/classify/float2int.h b/classify/float2int.h
index 60d1dea..0c6e42c 100644
--- a/classify/float2int.h
+++ b/classify/float2int.h
@@ -30,36 +30,11 @@
/**----------------------------------------------------------------------------
Public Function Prototypes
----------------------------------------------------------------------------**/
+namespace tesseract {
void ClearCharNormArray(INT_TEMPLATES Templates,
CLASS_NORMALIZATION_ARRAY CharNormArray);
+} // namespace tesseract.
-void ComputeIntCharNormArray(FEATURE NormFeature,
- INT_TEMPLATES Templates,
- CLASS_NORMALIZATION_ARRAY CharNormArray);
+void ComputeIntFeatures(FEATURE_SET Features, INT_FEATURE_ARRAY IntFeatures);
-void ComputeIntFeatures(FEATURE_SET Features, INT_FEATURE_ARRAY IntFeatures);
-
-/*
-#if defined(__STDC__) || defined(__cplusplus)
-# define _ARGS(s) s
-#else
-# define _ARGS(s) ()
-#endif*/
-
-/* float2int.c
-void ClearCharNormArray
- _ARGS((INT_TEMPLATES Templates,
- CLASS_NORMALIZATION_ARRAY CharNormArray));
-
-void ComputeIntCharNormArray
- _ARGS((FEATURE NormFeature,
- INT_TEMPLATES Templates,
- CLASS_NORMALIZATION_ARRAY CharNormArray));
-
-void ComputeIntFeatures
- _ARGS((FEATURE_SET Features,
- INT_FEATURE_ARRAY IntFeatures));
-
-#undef _ARGS
-*/
#endif
diff --git a/classify/normmatch.cpp b/classify/normmatch.cpp
index 2893631..1378271 100644
--- a/classify/normmatch.cpp
+++ b/classify/normmatch.cpp
@@ -58,8 +58,6 @@ NORM_PROTOS *ReadNormProtos(FILE *File);
/**----------------------------------------------------------------------------
Global Data Definitions and Declarations
----------------------------------------------------------------------------**/
-/* global data structure to hold char normalization protos */
-static NORM_PROTOS *NormProtos;
/* name of file containing char normalization protos */
static const char *NormProtoFile = NORM_PROTO_FILE;
@@ -73,7 +71,9 @@ make_float_var (NormAdjCurl, 2.0, MakeNormAdjCurl,
Public Code
----------------------------------------------------------------------------**/
/*---------------------------------------------------------------------------*/
-FLOAT32 ComputeNormMatch(CLASS_ID ClassId, FEATURE Feature, BOOL8 DebugMatch) {
+namespace tesseract {
+FLOAT32 Classify::ComputeNormMatch(CLASS_ID ClassId, FEATURE Feature,
+ BOOL8 DebugMatch) {
/*
** Parameters:
** ClassId id of class to match against
@@ -142,7 +142,6 @@ FLOAT32 ComputeNormMatch(CLASS_ID ClassId, FEATURE Feature, BOOL8 DebugMatch) {
/*---------------------------------------------------------------------------*/
-namespace tesseract {
void Classify::GetNormProtos() {
/*
** Parameters: none
@@ -165,9 +164,8 @@ void Classify::GetNormProtos() {
fclose(File);
} /* GetNormProtos */
-} // namespace tesseract
-void FreeNormProtos() {
+void Classify::FreeNormProtos() {
if (NormProtos != NULL) {
for (int i = 0; i < NormProtos->NumProtos; i++)
FreeProtoList(&NormProtos->Protos[i]);
@@ -177,6 +175,7 @@ void FreeNormProtos() {
NormProtos = NULL;
}
}
+} // namespace tesseract
/*---------------------------------------------------------------------------*/
void InitNormProtoVars() {
diff --git a/classify/normmatch.h b/classify/normmatch.h
index 4d73894..722c797 100644
--- a/classify/normmatch.h
+++ b/classify/normmatch.h
@@ -28,31 +28,7 @@
/**----------------------------------------------------------------------------
Public Function Prototypes
----------------------------------------------------------------------------**/
-FLOAT32 ComputeNormMatch(CLASS_ID ClassId, FEATURE Feature, BOOL8 DebugMatch);
-
-void FreeNormProtos();
void InitNormProtoVars();
-/*
-#if defined(__STDC__) || defined(__cplusplus)
-# define _ARGS(s) s
-#else
-# define _ARGS(s) ()
-#endif*/
-
-/* normmatch.c *
-FLOAT32 ComputeNormMatch
- _ARGS((CLASS_ID ClassId,
- FEATURE Feature,
- BOOL8 DebugMatch));
-
-void GetNormProtos
- _ARGS((void));
-
-void InitNormProtoVars
- _ARGS((void));
-
-#undef _ARGS
-*/
#endif
diff --git a/image/imgs.cpp b/image/imgs.cpp
index 1984d7c..957811d 100644
--- a/image/imgs.cpp
+++ b/image/imgs.cpp
@@ -249,8 +249,9 @@ inT32 check_legal_image_size( //get rest of image
return -1; //failed
}
if (bits_per_pixel != 1 && bits_per_pixel != 2
- && bits_per_pixel != 4 && bits_per_pixel != 5
- && bits_per_pixel != 6 && bits_per_pixel != 8 && bits_per_pixel != 24) {
+ && bits_per_pixel != 4 && bits_per_pixel != 5
+ && bits_per_pixel != 6 && bits_per_pixel != 8 && bits_per_pixel != 24
+ && bits_per_pixel != 32) {
BADBPP.error ("check_legal_image_size", TESSLOG, "%d", bits_per_pixel);
return -1;
}
diff --git a/image/imgtiff.cpp b/image/imgtiff.cpp
index 83b01ae..1bde37a 100644
--- a/image/imgtiff.cpp
+++ b/image/imgtiff.cpp
@@ -229,6 +229,8 @@ inT8 open_tif_image( //read header
// printf("No of tiff directory entries=%d\n",entries);
imagestart = 0;
compressed = FALSE;
+ int samples_per_pixel = 1;
+ int bits_per_sample = 1;
for (; entries-- > 0;) {
if (read (fd, (char *) &tiffentry, sizeof tiffentry) !=
sizeof tiffentry) {
@@ -272,9 +274,12 @@ inT8 open_tif_image( //read header
break;
case 0x102:
if (tiffentry.length == 1)
- *bpp = (inT8) tiffentry.value;
+ bits_per_sample = (inT8) tiffentry.value;
else
- *bpp = 24;
+ bits_per_sample = 8;
+ break;
+ case 0x115:
+ samples_per_pixel = (inT8) tiffentry.value;
break;
case 0x111:
imagestart = tiffentry.value;
@@ -299,12 +304,14 @@ inT8 open_tif_image( //read header
break;
} //endswitch
}
- if (*xsize <= 0 || *ysize <= 0 || *bpp > 24 || imagestart <= 0) {
+ if (*xsize <= 0 || *ysize <= 0 || imagestart <= 0) {
BADIMAGEFORMAT.error ("read_tif_image", TESSLOG, "Vital tag");
return -1;
}
- tprintf ("Image has %d bit%c per pixel and size (%d,%d)\n",
- *bpp, *bpp == 1 ? ' ' : 's', *xsize, *ysize);
+ tprintf("Image has %d * %d bit%c per pixel, and size (%d,%d)\n",
+ bits_per_sample, samples_per_pixel, bits_per_sample == 1 ? ' ' : 's',
+ *xsize, *ysize);
+ *bpp = bits_per_sample * samples_per_pixel;
if (resoffset >= 0) {
lseek (fd, resoffset, 0);
if (read (fd, (char *) &resinfo, sizeof (resinfo)) != sizeof (resinfo)) {
diff --git a/neural_networks/runtime/BUILD b/neural_networks/runtime/BUILD
new file mode 100755
index 0000000..4ce5364
--- /dev/null
+++ b/neural_networks/runtime/BUILD
@@ -0,0 +1,16 @@
+# -*- mode: python; -*-
+
+licenses(['notice']) # Apache 2.0
+
+#-----------------------------------------------------------------------------
+# LIBRARIES
+
+cc_library(name = "nn_runtime",
+ srcs = [
+ "neural_net.cpp",
+ "neuron.cpp",
+ "sigmoid_table.cpp",
+ "input_file_buffer.cpp"
+ ],
+ deps = [ "//third_party/stl"] )
+
diff --git a/neural_networks/runtime/input_file_buffer.cpp b/neural_networks/runtime/input_file_buffer.cpp
new file mode 100644
index 0000000..c3ca67b
--- /dev/null
+++ b/neural_networks/runtime/input_file_buffer.cpp
@@ -0,0 +1,36 @@
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+// Author: ahmadab@google.com (Ahmad Abdulkader)
+//
+// input_file_buffer.h: Declarations of a class for an object that
+// represents an input file buffer.
+
+#include <string>
+#include "input_file_buffer.h"
+
+namespace tesseract {
+// default and only contsructor
+InputFileBuffer::InputFileBuffer(const string &file_name)
+ : file_name_(file_name) {
+ fp_ = NULL;
+}
+
+// virtual destructor
+InputFileBuffer::~InputFileBuffer() {
+ if (fp_ != NULL) {
+ fclose(fp_);
+ }
+}
+
+// Read the specified number of bytes to the specified input buffer
+int InputFileBuffer::Read(void *buffer, int bytes_to_read) {
+ // open the file if necessary
+ if (fp_ == NULL) {
+ fp_ = fopen(file_name_.c_str(), "rb");
+ if (fp_ == NULL) {
+ return 0;
+ }
+ }
+ return fread(buffer, 1, bytes_to_read, fp_);
+}
+}
diff --git a/neural_networks/runtime/input_file_buffer.h b/neural_networks/runtime/input_file_buffer.h
new file mode 100644
index 0000000..b512aee
--- /dev/null
+++ b/neural_networks/runtime/input_file_buffer.h
@@ -0,0 +1,28 @@
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+// Author: ahmadab@google.com (Ahmad Abdulkader)
+//
+// input_file_buffer.h: Declarations of a class for an object that
+// represents an input file buffer.
+//
+
+#ifndef INPUT_FILE_BUFFER_H
+#define INPUT_FILE_BUFFER_H
+
+#include <stdio.h>
+#include <string>
+
+namespace tesseract {
+class InputFileBuffer {
+ public:
+ explicit InputFileBuffer(const string &file_name);
+ virtual ~InputFileBuffer();
+ int Read(void *buffer, int bytes_to_read);
+
+ protected:
+ string file_name_;
+ FILE *fp_;
+};
+}
+
+#endif // INPUT_FILE_BUFFER_H__
diff --git a/neural_networks/runtime/neural_net.cpp b/neural_networks/runtime/neural_net.cpp
new file mode 100644
index 0000000..d0b8a17
--- /dev/null
+++ b/neural_networks/runtime/neural_net.cpp
@@ -0,0 +1,220 @@
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+// Author: ahmadab@google.com (Ahmad Abdulkader)
+//
+// neural_net.cpp: Declarations of a class for an object that
+// represents an arbitrary network of neurons
+//
+#include <vector>
+#include <string>
+#include "neural_net.h"
+#include "input_file_buffer.h"
+
+namespace tesseract {
+
+// Instantiate all supported templates
+template bool NeuralNet::FeedForward(const float *inputs, float *outputs);
+template bool NeuralNet::FeedForward(const double *inputs, double *outputs);
+template bool NeuralNet::FastFeedForward(const float *inputs, float *outputs);
+template bool NeuralNet::FastFeedForward(const double *inputs,
+ double *outputs);
+template bool NeuralNet::ReadBinary(InputFileBuffer *input_buffer);
+
+NeuralNet::NeuralNet() {
+ Init();
+}
+
+NeuralNet::~NeuralNet() {
+ // clean up the wts chunks vector
+ for(int vec = 0; vec < wts_vec_.size(); vec++) {
+ delete wts_vec_[vec];
+ }
+ // clean up neurons
+ delete []neurons_;
+ // clean up nodes
+ for (int node_idx = 0; node_idx < neuron_cnt_; node_idx++) {
+ delete []fast_nodes_[node_idx].inputs;
+ }
+
+}
+
+// Initiaization function
+void NeuralNet::Init() {
+ read_only_ = true;
+ auto_encoder_ = false;
+ alloc_wgt_cnt_ = 0;
+ wts_cnt_ = 0;
+ neuron_cnt_ = 0;
+ in_cnt_ = 0;
+ out_cnt_ = 0;
+ wts_vec_.clear();
+ neurons_ = NULL;
+ inputs_mean_.clear();
+ inputs_std_dev_.clear();
+ inputs_min_.clear();
+ inputs_max_.clear();
+}
+
+// Does a fast feedforward for read_only nets
+// Templatized for float and double Types
+template <typename Type> bool NeuralNet::FastFeedForward(const Type *inputs,
+ Type *outputs) {
+ int node_idx = 0;
+ Node *node = &fast_nodes_[0];
+ // feed inputs in and offset them by the pre-computed bias
+ for (node_idx = 0; node_idx < in_cnt_; node_idx++, node++) {
+ node->out = inputs[node_idx] - node->bias;
+ }
+ // compute nodes activations and outputs
+ for (;node_idx < neuron_cnt_; node_idx++, node++) {
+ double activation = -node->bias;
+ for (int fan_in_idx = 0; fan_in_idx < node->fan_in_cnt; fan_in_idx++) {
+ activation += (node->inputs[fan_in_idx].input_weight *
+ node->inputs[fan_in_idx].input_node->out);
+ }
+ node->out = Neuron::Sigmoid(activation);
+ }
+ // copy the outputs to the output buffers
+ node = &fast_nodes_[neuron_cnt_ - out_cnt_];
+ for (node_idx = 0; node_idx < out_cnt_; node_idx++, node++) {
+ outputs[node_idx] = node->out;
+ }
+ return true;
+}
+
+// Performs a feedforward for general nets. Used mainly in training mode
+// Templatized for float and double Types
+template <typename Type> bool NeuralNet::FeedForward(const Type *inputs,
+ Type *outputs) {
+ // call the fast version in case of readonly nets
+ if (read_only_) {
+ return FastFeedForward(inputs, outputs);
+ }
+ // clear all neurons
+ Clear();
+ // for auto encoders, apply no input normalization
+ if (auto_encoder_) {
+ for (int in = 0; in < in_cnt_; in++) {
+ neurons_[in].set_output(inputs[in]);
+ }
+ } else {
+ // Input normalization : subtract mean and divide by stddev
+ for (int in = 0; in < in_cnt_; in++) {
+ neurons_[in].set_output((inputs[in] - inputs_min_[in]) /
+ (inputs_max_[in] - inputs_min_[in]));
+ neurons_[in].set_output((neurons_[in].output() - inputs_mean_[in]) /
+ inputs_std_dev_[in]);
+ }
+ }
+ // compute the net outputs: follow a pull model each output pulls the
+ // outputs of its input nodes and so on
+ for (int out = neuron_cnt_ - out_cnt_; out < neuron_cnt_; out++) {
+ neurons_[out].FeedForward();
+ // copy the values to the output buffer
+ outputs[out] = neurons_[out].output();
+ }
+ return true;
+}
+
+// Sets a connection between two neurons
+bool NeuralNet::SetConnection(int from, int to) {
+ // allocate the wgt
+ float *wts = AllocWgt(1);
+ if (wts == NULL) {
+ return false;
+ }
+ // register the connection
+ neurons_[to].AddFromConnection(neurons_ + from, wts, 1);
+ return true;
+}
+
+// Create a fast readonly version of the net
+bool NeuralNet::CreateFastNet() {
+ fast_nodes_.resize(neuron_cnt_);
+ // build the node structures
+ int wts_cnt = 0;
+ for (int node_idx = 0; node_idx < neuron_cnt_; node_idx++) {
+ Node *node = &fast_nodes_[node_idx];
+ if (neurons_[node_idx].node_type() == Neuron::Input) {
+ // Input neurons have no fan-in
+ node->fan_in_cnt = 0;
+ node->inputs = NULL;
+ // Input bias is the normalization offset computed from
+ // training input stats
+ node->bias = inputs_min_[node_idx] +
+ (inputs_mean_[node_idx] *
+ (inputs_max_[node_idx] - inputs_min_[node_idx]));
+ } else {
+ node->bias = neurons_[node_idx].bias();
+ node->fan_in_cnt = neurons_[node_idx].fan_in_cnt();
+ // allocate memory for fan-in nodes
+ node->inputs = new WeightedNode[node->fan_in_cnt];
+ if (node->inputs == NULL) {
+ return false;
+ }
+ for (int fan_in = 0; fan_in < node->fan_in_cnt; fan_in++) {
+ // identify fan-in neuron
+ const int id = neurons_[node_idx].fan_in(fan_in)->id();
+ // Feedback connections are not allowed and should never happen
+ if (id >= node_idx) {
+ return false;
+ }
+ // add the the fan-in neuron and its wgt
+ node->inputs[fan_in].input_node = &fast_nodes_[id];
+ float wgt_val = neurons_[node_idx].fan_in_wts(fan_in);
+ // for input neurons normalize the wgt by the input scaling
+ // values to save time during feedforward
+ if (neurons_[node_idx].fan_in(fan_in)->node_type() == Neuron::Input) {
+ wgt_val /= ((inputs_max_[id] - inputs_min_[id]) *
+ inputs_std_dev_[id]);
+ }
+ node->inputs[fan_in].input_weight = wgt_val;
+ }
+ // incr wgt count to validate against at the end
+ wts_cnt += node->fan_in_cnt;
+ }
+ }
+ // sanity check
+ return wts_cnt_ == wts_cnt;
+}
+
+// returns a pointer to the requested set of weights
+// Allocates in chunks
+float * NeuralNet::AllocWgt(int wgt_cnt) {
+ // see if need to allocate a new chunk of wts
+ if (wts_vec_.size() == 0 || (alloc_wgt_cnt_ + wgt_cnt) > kWgtChunkSize) {
+ // add the new chunck to the wts_chunks vector
+ wts_vec_.push_back(new vector<float> (kWgtChunkSize));
+ alloc_wgt_cnt_ = 0;
+ }
+ float *ret_ptr = &((*wts_vec_.back())[alloc_wgt_cnt_]);
+ // incr usage counts
+ alloc_wgt_cnt_ += wgt_cnt;
+ wts_cnt_ += wgt_cnt;
+ return ret_ptr;
+}
+
+// create a new net object using an input file as a source
+NeuralNet *NeuralNet::FromFile(const string file_name) {
+ // open the file
+ InputFileBuffer input_buff(file_name);
+ // create a new net object using input buffer
+ NeuralNet *net_obj = FromInputBuffer(&input_buff);
+ return net_obj;
+}
+
+// create a net object from an input buffer
+NeuralNet *NeuralNet::FromInputBuffer(InputFileBuffer *ib) {
+ // create a new net object
+ NeuralNet *net_obj = new NeuralNet();
+ if (net_obj == NULL) {
+ return NULL;
+ }
+ // load the net
+ if (!net_obj->ReadBinary(ib)) {
+ delete net_obj;
+ net_obj = NULL;
+ }
+ return net_obj;
+}
+}
diff --git a/neural_networks/runtime/neural_net.h b/neural_networks/runtime/neural_net.h
new file mode 100644
index 0000000..959910a
--- /dev/null
+++ b/neural_networks/runtime/neural_net.h
@@ -0,0 +1,229 @@
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+// Author: ahmadab@google.com (Ahmad Abdulkader)
+//
+// neural_net.h: Declarations of a class for an object that
+// represents an arbitrary network of neurons
+//
+
+#ifndef NEURAL_NET_H
+#define NEURAL_NET_H
+
+#include <string>
+#include <vector>
+#include "neuron.h"
+#include "input_file_buffer.h"
+
+namespace tesseract {
+class NeuralNet {
+ public:
+ NeuralNet();
+ virtual ~NeuralNet();
+ // create a net object from a file. Uses stdio
+ static NeuralNet *FromFile(const string file_name);
+ // create a net object from an input buffer
+ static NeuralNet *FromInputBuffer(InputFileBuffer *ib);
+ // Different flavors of feed forward function
+ template <typename Type> bool FeedForward(const Type *inputs,
+ Type *outputs);
+ // Accessor functions
+ int in_cnt() const { return in_cnt_; }
+ int out_cnt() const { return out_cnt_; }
+
+ protected:
+ struct Node;
+ // A node-weight pair
+ struct WeightedNode {
+ Node *input_node;
+ float input_weight;
+ };
+ // node struct used for fast feedforward in
+ // Read only nets
+ struct Node {
+ float out;
+ float bias;
+ int fan_in_cnt;
+ WeightedNode *inputs;
+ };
+ // Read-Only flag (no training: On by default)
+ // will presumeably be set to false by
+ // the inherting TrainableNeuralNet class
+ bool read_only_;
+ // input count
+ int in_cnt_;
+ // output count
+ int out_cnt_;
+ // Total neuron count (including inputs)
+ int neuron_cnt_;
+ // count of unique weights
+ int wts_cnt_;
+ // Neuron vector
+ Neuron *neurons_;
+ // size of allocated weight chunk (in weights)
+ // This is basically the size of the biggest network
+ // that I have trained. However, the class will allow
+ // a bigger sized net if desired
+ static const int kWgtChunkSize = 0x10000;
+ // Magic number expected at the beginning of the NN
+ // binary file
+ static const unsigned int kNetSignature = 0xFEFEABD0;
+ // count of allocated wgts in the last chunk
+ int alloc_wgt_cnt_;
+ // vector of weights buffers
+ vector<vector<float> *>wts_vec_;
+ // Is the net an auto-encoder type
+ bool auto_encoder_;
+ // vector of input max values
+ vector<float> inputs_max_;
+ // vector of input min values
+ vector<float> inputs_min_;
+ // vector of input mean values
+ vector<float> inputs_mean_;
+ // vector of input standard deviation values
+ vector<float> inputs_std_dev_;
+ // vector of input offsets used by fast read-only
+ // feedforward function
+ vector<Node> fast_nodes_;
+ // Network Initialization function
+ void Init();
+ // Clears all neurons
+ void Clear() {
+ for (int node = 0; node < neuron_cnt_; node++) {
+ neurons_[node].Clear();
+ }
+ }
+ // Reads the net from an input buffer
+ template<class ReadBuffType> bool ReadBinary(ReadBuffType *input_buff) {
+ // Init vars
+ Init();
+ // is this an autoencoder
+ unsigned int read_val;
+ unsigned int auto_encode;
+ // read and verify signature
+ if (input_buff->Read(&read_val, sizeof(read_val)) != sizeof(read_val)) {
+ return false;
+ }
+ if (read_val != kNetSignature) {
+ return false;
+ }
+ if (input_buff->Read(&auto_encode, sizeof(auto_encode)) !=
+ sizeof(auto_encode)) {
+ return false;
+ }
+ auto_encoder_ = auto_encode;
+ // read and validate total # of nodes
+ if (input_buff->Read(&read_val, sizeof(read_val)) != sizeof(read_val)) {
+ return false;
+ }
+ neuron_cnt_ = read_val;
+ if (neuron_cnt_ <= 0) {
+ return false;
+ }
+ // set the size of the neurons vector
+ neurons_ = new Neuron[neuron_cnt_];
+ if (neurons_ == NULL) {
+ return false;
+ }
+ // read & validate inputs
+ if (input_buff->Read(&read_val, sizeof(read_val)) != sizeof(read_val)) {
+ return false;
+ }
+ in_cnt_ = read_val;
+ if (in_cnt_ <= 0) {
+ return false;
+ }
+ // read outputs
+ if (input_buff->Read(&read_val, sizeof(read_val)) != sizeof(read_val)) {
+ return false;
+ }
+ out_cnt_ = read_val;
+ if (out_cnt_ <= 0) {
+ return false;
+ }
+ // set neuron ids and types
+ for (int idx = 0; idx < neuron_cnt_; idx++) {
+ neurons_[idx].set_id(idx);
+ // input type
+ if (idx < in_cnt_) {
+ neurons_[idx].set_node_type(Neuron::Input);
+ } else if (idx >= (neuron_cnt_ - out_cnt_)) {
+ neurons_[idx].set_node_type(Neuron::Output);
+ } else {
+ neurons_[idx].set_node_type(Neuron::Hidden);
+ }
+ }
+ // read the connections
+ for (int node_idx = 0; node_idx < neuron_cnt_; node_idx++) {
+ // read fanout
+ if (input_buff->Read(&read_val, sizeof(read_val)) != sizeof(read_val)) {
+ return false;
+ }
+ // read the neuron's info
+ int fan_out_cnt = read_val;
+ for (int fan_out_idx = 0; fan_out_idx < fan_out_cnt; fan_out_idx++) {
+ // read the neuron id
+ if (input_buff->Read(&read_val, sizeof(read_val)) != sizeof(read_val)) {
+ return false;
+ }
+ // create the connection
+ if (!SetConnection(node_idx, read_val)) {
+ return false;
+ }
+ }
+ }
+ // read all the neurons' fan-in connections
+ for (int node_idx = 0; node_idx < neuron_cnt_; node_idx++) {
+ // read
+ if (!neurons_[node_idx].ReadBinary(input_buff)) {
+ return false;
+ }
+ }
+ // size input stats vector to expected input size
+ inputs_mean_.resize(in_cnt_);
+ inputs_std_dev_.resize(in_cnt_);
+ inputs_min_.resize(in_cnt_);
+ inputs_max_.resize(in_cnt_);
+ // read stats
+ if (input_buff->Read(&(inputs_mean_.front()),
+ sizeof(inputs_mean_[0]) * in_cnt_) !=
+ sizeof(inputs_mean_[0]) * in_cnt_) {
+ return false;
+ }
+ if (input_buff->Read(&(inputs_std_dev_.front()),
+ sizeof(inputs_std_dev_[0]) * in_cnt_) !=
+ sizeof(inputs_std_dev_[0]) * in_cnt_) {
+ return false;
+ }
+ if (input_buff->Read(&(inputs_min_.front()),
+ sizeof(inputs_min_[0]) * in_cnt_) !=
+ sizeof(inputs_min_[0]) * in_cnt_) {
+ return false;
+ }
+ if (input_buff->Read(&(inputs_max_.front()),
+ sizeof(inputs_max_[0]) * in_cnt_) !=
+ sizeof(inputs_max_[0]) * in_cnt_) {
+ return false;
+ }
+ // create a readonly version for fast feedforward
+ if (read_only_) {
+ return CreateFastNet();
+ }
+ return true;
+ }
+
+ // creates a connection between two nodes
+ bool SetConnection(int from, int to);
+ // Create a read only version of the net that
+ // has faster feedforward performance
+ bool CreateFastNet();
+ // internal function to allocate a new set of weights
+ // Centralized weight allocation attempts to increase
+ // weights locality of reference making it more cache friendly
+ float *AllocWgt(int wgt_cnt);
+ // different flavors read-only feedforward function
+ template <typename Type> bool FastFeedForward(const Type *inputs,
+ Type *outputs);
+};
+}
+
+#endif // NEURAL_NET_H__
diff --git a/neural_networks/runtime/neuron.cpp b/neural_networks/runtime/neuron.cpp
new file mode 100644
index 0000000..3630908
--- /dev/null
+++ b/neural_networks/runtime/neuron.cpp
@@ -0,0 +1,94 @@
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+// Author: ahmadab@google.com (Ahmad Abdulkader)
+//
+// neuron.cpp: The implementation of a class for an object
+// that represents a single neuron in a neural network
+
+#include "neuron.h"
+#include "input_file_buffer.h"
+
+namespace tesseract {
+
+// Instantiate all supported templates
+template bool Neuron::ReadBinary(InputFileBuffer *input_buffer);
+
+// default and only constructor
+Neuron::Neuron() {
+ Init();
+}
+
+// virtual destructor
+Neuron::~Neuron() {
+}
+
+// Initializer
+void Neuron::Init() {
+ id_ = -1;
+ frwd_dirty_ = false;
+ fan_in_.clear();
+ fan_in_weights_.clear();
+ activation_ = 0.0f;
+ output_ = 0.0f;
+ bias_ = 0.0f;
+ node_type_ = Unknown;
+}
+
+// Computes the activation and output of the neuron if not fresh
+// by pulling the outputs of all fan-in neurons
+void Neuron::FeedForward() {
+ if (!frwd_dirty_ ) {
+ return;
+ }
+ // nothing to do for input nodes: just pass the input to the o/p
+ // otherwise, pull the output of all fan-in neurons
+ if (node_type_ != Input) {
+ int fan_in_cnt = fan_in_.size();
+ // sum out the activation
+ activation_ = -bias_;
+ for (int in = 0; in < fan_in_cnt; in++) {
+ if (fan_in_[in]->frwd_dirty_) {
+ fan_in_[in]->FeedForward();
+ }
+ activation_ += ((*(fan_in_weights_[in])) * fan_in_[in]->output_);
+ }
+ // sigmoid it
+ output_ = Sigmoid(activation_);
+ }
+ frwd_dirty_ = false;
+}
+
+// set the type of the neuron
+void Neuron::set_node_type(NeuronTypes Type) {
+ node_type_ = Type;
+}
+
+// Adds new connections *to* this neuron *From*
+// a target neuron using specfied params
+// Note that what is actually copied in this function are pointers to the
+// specified Neurons and weights and not the actualt values. This is by
+// design to centralize the alloction of neurons and weights and so
+// increase the locality of reference and improve cache-hits resulting
+// in a faster net. This technique resulted in a 2X-10X speedup
+// (depending on network size and processor)
+void Neuron::AddFromConnection(Neuron *neurons,
+ float *wts_offset,
+ int from_cnt) {
+ for (int in = 0; in < from_cnt; in++) {
+ fan_in_.push_back(neurons + in);
+ fan_in_weights_.push_back(wts_offset + in);
+ }
+}
+
+// fast computation of sigmoid function using a lookup table
+// defined in sigmoid_table.cpp
+float Neuron::Sigmoid(float activation) {
+ if (activation <= -10.0f) {
+ return 0.0f;
+ } else if (activation >= 10.0f) {
+ return 1.0f;
+ } else {
+ return kSigmoidTable[static_cast<int>(100 * (activation + 10.0))];
+ }
+}
+}
diff --git a/neural_networks/runtime/neuron.h b/neural_networks/runtime/neuron.h
new file mode 100644
index 0000000..dd9f759
--- /dev/null
+++ b/neural_networks/runtime/neuron.h
@@ -0,0 +1,141 @@
+// Copyright 2008 Google Inc.
+// All Rights Reserved.
+// Author: ahmadab@google.com (Ahmad Abdulkader)
+//
+// neuron.h: Declarations of a class for an object that
+// represents a single neuron in a neural network
+//
+
+#ifndef NEURON_H
+#define NEURON_H
+
+#include <math.h>
+#include <vector>
+
+namespace tesseract {
+class Neuron {
+ public:
+ // Types of nodes
+ enum NeuronTypes {
+ Unknown = 0,
+ Input,
+ Hidden,
+ Output
+ };
+ Neuron();
+ ~Neuron();
+ // set the forward dirty flag indicating that the
+ // activation of the net is not fresh
+ void Clear() {
+ frwd_dirty_ = true;
+ }
+ // Read a binary representation of the neuron info from
+ // an input buffer.
+ template <class BuffType> bool ReadBinary(BuffType *input_buff) {
+ float val;
+ if (input_buff->Read(&val, sizeof(val)) != sizeof(val)) {
+ return false;
+ }
+ // input nodes should have no biases
+ if (node_type_ == Input) {
+ bias_ = kInputNodeBias;
+ } else {
+ bias_ = val;
+ }
+ // read fanin count
+ int fan_in_cnt;
+ if (input_buff->Read(&fan_in_cnt, sizeof(fan_in_cnt)) !=
+ sizeof(fan_in_cnt)) {
+ return false;
+ }
+ // validate fan-in cnt
+ if (fan_in_cnt != fan_in_.size()) {
+ return false;
+ }
+ // read the weights
+ for (int in = 0; in < fan_in_cnt; in++) {
+ if (input_buff->Read(&val, sizeof(val)) != sizeof(val)) {
+ return false;
+ }
+ *(fan_in_weights_[in]) = val;
+ }
+ return true;
+ }
+
+ // Add a new connection from this neuron *From*
+ // a target neuron using specfied params
+ // Note that what is actually copied in this function are pointers to the
+ // specified Neurons and weights and not the actualt values. This is by
+ // design to centralize the alloction of neurons and weights and so
+ // increase the locality of reference and improve cache-hits resulting
+ // in a faster net. This technique resulted in a 2X-10X speedup
+ // (depending on network size and processor)
+ void AddFromConnection(Neuron *neuron_vec,
+ float *wts_offset,
+ int from_cnt);
+ // Set the type of a neuron
+ void set_node_type(NeuronTypes type);
+ // Computes the output of the node by
+ // "pulling" the output of the fan-in nodes
+ void FeedForward();
+ // fast computation of sigmoid function using a lookup table
+ // defined in sigmoid_table.cpp
+ static float Sigmoid(float activation);
+ // Accessor functions
+ float output() const {
+ return output_;
+ }
+ void set_output(float out_val) {
+ output_ = out_val;
+ }
+ int id() const {
+ return id_;
+ }
+ int fan_in_cnt() const {
+ return fan_in_.size();
+ }
+ Neuron * fan_in(int idx) const {
+ return fan_in_[idx];
+ }
+ float fan_in_wts(int idx) const {
+ return *(fan_in_weights_[idx]);
+ }
+ void set_id(int id) {
+ id_ = id;
+ }
+ float bias() const {
+ return bias_;
+ }
+ Neuron::NeuronTypes node_type() const {
+ return node_type_;
+ }
+
+ protected:
+ // Type of Neuron
+ NeuronTypes node_type_;
+ // unqique id of the neuron
+ int id_;
+ // node bias
+ float bias_;
+ // node net activation
+ float activation_;
+ // node output
+ float output_;
+ // pointers to fanin nodes
+ vector<Neuron *> fan_in_;
+ // pointers to fanin weights
+ vector<float *> fan_in_weights_;
+ // Sigmoid function lookup table used for fast computation
+ // of sigmoid function
+ static const float kSigmoidTable[];
+ // flag determining if the activation of the node
+ // is fresh or not (dirty)
+ bool frwd_dirty_;
+ // Input Node bias values
+ static const float kInputNodeBias = 0.0f;
+ // Initializer
+ void Init();
+};
+}
+
+#endif // NEURON_H__
diff --git a/neural_networks/runtime/sigmoid_table.cpp b/neural_networks/runtime/sigmoid_table.cpp
new file mode 100644
index 0000000..f170a10
--- /dev/null
+++ b/neural_networks/runtime/sigmoid_table.cpp
@@ -0,0 +1,514 @@
+// Copyright 2007 Google Inc.
+// All Rights Reserved.
+// Author: ahmadab@google.com (Ahmad Abdulkader)
+//
+// sigmoid_table.cpp: Sigmoid function lookup table
+
+#include "neuron.h"
+
+namespace tesseract {
+
+const float Neuron::kSigmoidTable[] = {
+ 4.53979E-05f, 4.58541E-05f, 4.63149E-05f, 4.67804E-05f,
+ 4.72505E-05f, 4.77254E-05f, 4.8205E-05f, 4.86894E-05f,
+ 4.91787E-05f, 4.9673E-05f, 5.01722E-05f, 5.06764E-05f,
+ 5.11857E-05f, 5.17001E-05f, 5.22196E-05f, 5.27444E-05f,
+ 5.32745E-05f, 5.38099E-05f, 5.43506E-05f, 5.48968E-05f,
+ 5.54485E-05f, 5.60058E-05f, 5.65686E-05f, 5.71371E-05f,
+ 5.77113E-05f, 5.82913E-05f, 5.88771E-05f, 5.94688E-05f,
+ 6.00664E-05f, 6.067E-05f, 6.12797E-05f, 6.18956E-05f,
+ 6.25176E-05f, 6.31459E-05f, 6.37805E-05f, 6.44214E-05f,
+ 6.50688E-05f, 6.57227E-05f, 6.63832E-05f, 6.70503E-05f,
+ 6.77241E-05f, 6.84047E-05f, 6.90922E-05f, 6.97865E-05f,
+ 7.04878E-05f, 7.11962E-05f, 7.19117E-05f, 7.26343E-05f,
+ 7.33643E-05f, 7.41016E-05f, 7.48462E-05f, 7.55984E-05f,
+ 7.63581E-05f, 7.71255E-05f, 7.79005E-05f, 7.86834E-05f,
+ 7.94741E-05f, 8.02728E-05f, 8.10794E-05f, 8.18942E-05f,
+ 8.27172E-05f, 8.35485E-05f, 8.43881E-05f, 8.52361E-05f,
+ 8.60927E-05f, 8.69579E-05f, 8.78317E-05f, 8.87144E-05f,
+ 8.96059E-05f, 9.05064E-05f, 9.14159E-05f, 9.23345E-05f,
+ 9.32624E-05f, 9.41996E-05f, 9.51463E-05f, 9.61024E-05f,
+ 9.70682E-05f, 9.80436E-05f, 9.90289E-05f, 0.000100024f,
+ 0.000101029f, 0.000102044f, 0.00010307f, 0.000104106f,
+ 0.000105152f, 0.000106209f, 0.000107276f, 0.000108354f,
+ 0.000109443f, 0.000110542f, 0.000111653f, 0.000112775f,
+ 0.000113909f, 0.000115053f, 0.000116209f, 0.000117377f,
+ 0.000118557f, 0.000119748f, 0.000120951f, 0.000122167f,
+ 0.000123395f, 0.000124635f, 0.000125887f, 0.000127152f,
+ 0.00012843f, 0.00012972f, 0.000131024f, 0.000132341f,
+ 0.00013367f, 0.000135014f, 0.00013637f, 0.000137741f,
+ 0.000139125f, 0.000140523f, 0.000141935f, 0.000143361f,
+ 0.000144802f, 0.000146257f, 0.000147727f, 0.000149211f,
+ 0.00015071f, 0.000152225f, 0.000153754f, 0.000155299f,
+ 0.00015686f, 0.000158436f, 0.000160028f, 0.000161636f,
+ 0.000163261f, 0.000164901f, 0.000166558f, 0.000168232f,
+ 0.000169922f, 0.00017163f, 0.000173354f, 0.000175096f,
+ 0.000176856f, 0.000178633f, 0.000180428f, 0.000182241f,
+ 0.000184072f, 0.000185922f, 0.00018779f, 0.000189677f,
+ 0.000191583f, 0.000193508f, 0.000195452f, 0.000197416f,
+ 0.0001994f, 0.000201403f, 0.000203427f, 0.000205471f,
+ 0.000207536f, 0.000209621f, 0.000211727f, 0.000213855f,
+ 0.000216003f, 0.000218174f, 0.000220366f, 0.00022258f,
+ 0.000224817f, 0.000227076f, 0.000229357f, 0.000231662f,
+ 0.00023399f, 0.000236341f, 0.000238715f, 0.000241114f,
+ 0.000243537f, 0.000245984f, 0.000248455f, 0.000250951f,
+ 0.000253473f, 0.00025602f, 0.000258592f, 0.00026119f,
+ 0.000263815f, 0.000266465f, 0.000269143f, 0.000271847f,
+ 0.000274578f, 0.000277337f, 0.000280123f, 0.000282938f,
+ 0.000285781f, 0.000288652f, 0.000291552f, 0.000294481f,
+ 0.00029744f, 0.000300429f, 0.000303447f, 0.000306496f,
+ 0.000309575f, 0.000312685f, 0.000315827f, 0.000319f,
+ 0.000322205f, 0.000325442f, 0.000328712f, 0.000332014f,
+ 0.00033535f, 0.000338719f, 0.000342122f, 0.00034556f,
+ 0.000349031f, 0.000352538f, 0.00035608f, 0.000359657f,
+ 0.00036327f, 0.00036692f, 0.000370606f, 0.000374329f,
+ 0.00037809f, 0.000381888f, 0.000385725f, 0.0003896f,
+ 0.000393514f, 0.000397467f, 0.00040146f, 0.000405494f,
+ 0.000409567f, 0.000413682f, 0.000417838f, 0.000422035f,
+ 0.000426275f, 0.000430557f, 0.000434882f, 0.000439251f,
+ 0.000443664f, 0.000448121f, 0.000452622f, 0.000457169f,
+ 0.000461762f, 0.0004664f, 0.000471085f, 0.000475818f,
+ 0.000480597f, 0.000485425f, 0.000490301f, 0.000495226f,
+ 0.000500201f, 0.000505226f, 0.000510301f, 0.000515427f,
+ 0.000520604f, 0.000525833f, 0.000531115f, 0.00053645f,
+ 0.000541839f, 0.000547281f, 0.000552779f, 0.000558331f,
+ 0.000563939f, 0.000569604f, 0.000575325f, 0.000581104f,
+ 0.00058694f, 0.000592836f, 0.00059879f, 0.000604805f,
+ 0.000610879f, 0.000617015f, 0.000623212f, 0.000629472f,
+ 0.000635794f, 0.00064218f, 0.00064863f, 0.000655144f,
+ 0.000661724f, 0.00066837f, 0.000675083f, 0.000681863f,
+ 0.000688711f, 0.000695628f, 0.000702614f, 0.00070967f,
+ 0.000716798f, 0.000723996f, 0.000731267f, 0.000738611f,
+ 0.000746029f, 0.000753521f, 0.000761088f, 0.000768731f,
+ 0.000776451f, 0.000784249f, 0.000792124f, 0.000800079f,
+ 0.000808113f, 0.000816228f, 0.000824425f, 0.000832703f,
+ 0.000841065f, 0.000849511f, 0.000858041f, 0.000866657f,
+ 0.00087536f, 0.000884149f, 0.000893027f, 0.000901994f,
+ 0.000911051f, 0.000920199f, 0.000929439f, 0.000938771f,
+ 0.000948197f, 0.000957717f, 0.000967333f, 0.000977045f,
+ 0.000986855f, 0.000996763f, 0.001006771f, 0.001016879f,
+ 0.001027088f, 0.0010374f, 0.001047815f, 0.001058334f,
+ 0.00106896f, 0.001079691f, 0.00109053f, 0.001101478f,
+ 0.001112536f, 0.001123705f, 0.001134985f, 0.001146379f,
+ 0.001157887f, 0.00116951f, 0.00118125f, 0.001193108f,
+ 0.001205084f, 0.001217181f, 0.001229399f, 0.001241739f,
+ 0.001254203f, 0.001266792f, 0.001279507f, 0.00129235f,
+ 0.001305321f, 0.001318423f, 0.001331655f, 0.001345021f,
+ 0.00135852f, 0.001372155f, 0.001385926f, 0.001399835f,
+ 0.001413884f, 0.001428073f, 0.001442405f, 0.00145688f,
+ 0.001471501f, 0.001486267f, 0.001501182f, 0.001516247f,
+ 0.001531462f, 0.001546829f, 0.001562351f, 0.001578028f,
+ 0.001593862f, 0.001609855f, 0.001626008f, 0.001642323f,
+ 0.001658801f, 0.001675444f, 0.001692254f, 0.001709233f,
+ 0.001726381f, 0.001743701f, 0.001761195f, 0.001778864f,
+ 0.00179671f, 0.001814734f, 0.001832939f, 0.001851326f,
+ 0.001869898f, 0.001888655f, 0.0019076f, 0.001926735f,
+ 0.001946061f, 0.001965581f, 0.001985296f, 0.002005209f,
+ 0.00202532f, 0.002045634f, 0.00206615f, 0.002086872f,
+ 0.002107801f, 0.00212894f, 0.00215029f, 0.002171854f,
+ 0.002193633f, 0.002215631f, 0.002237849f, 0.002260288f,
+ 0.002282953f, 0.002305844f, 0.002328964f, 0.002352316f,
+ 0.002375901f, 0.002399721f, 0.002423781f, 0.00244808f,
+ 0.002472623f, 0.002497411f, 0.002522447f, 0.002547734f,
+ 0.002573273f, 0.002599068f, 0.00262512f, 0.002651433f,
+ 0.002678009f, 0.002704851f, 0.002731961f, 0.002759342f,
+ 0.002786996f, 0.002814927f, 0.002843137f, 0.002871629f,
+ 0.002900406f, 0.00292947f, 0.002958825f, 0.002988472f,
+ 0.003018416f, 0.003048659f, 0.003079205f, 0.003110055f,
+ 0.003141213f, 0.003172683f, 0.003204467f, 0.003236568f,
+ 0.00326899f, 0.003301735f, 0.003334807f, 0.00336821f,
+ 0.003401946f, 0.003436018f, 0.003470431f, 0.003505187f,
+ 0.00354029f, 0.003575744f, 0.003611551f, 0.003647715f,
+ 0.00368424f, 0.003721129f, 0.003758387f, 0.003796016f,
+ 0.00383402f, 0.003872403f, 0.00391117f, 0.003950322f,
+ 0.003989865f, 0.004029802f, 0.004070138f, 0.004110875f,
+ 0.004152019f, 0.004193572f, 0.00423554f, 0.004277925f,
+ 0.004320734f, 0.004363968f, 0.004407633f, 0.004451734f,
+ 0.004496273f, 0.004541256f, 0.004586687f, 0.004632571f,
+ 0.004678911f, 0.004725713f, 0.00477298f, 0.004820718f,
+ 0.004868931f, 0.004917624f, 0.004966802f, 0.005016468f,
+ 0.005066629f, 0.005117289f, 0.005168453f, 0.005220126f,
+ 0.005272312f, 0.005325018f, 0.005378247f, 0.005432006f,
+ 0.005486299f, 0.005541132f, 0.005596509f, 0.005652437f,
+ 0.005708921f, 0.005765966f, 0.005823577f, 0.005881761f,
+ 0.005940522f, 0.005999867f, 0.006059801f, 0.006120331f,
+ 0.006181461f, 0.006243198f, 0.006305547f, 0.006368516f,
+ 0.006432108f, 0.006496332f, 0.006561193f, 0.006626697f,
+ 0.006692851f, 0.006759661f, 0.006827132f, 0.006895273f,
+ 0.006964089f, 0.007033587f, 0.007103774f, 0.007174656f,
+ 0.00724624f, 0.007318533f, 0.007391541f, 0.007465273f,
+ 0.007539735f, 0.007614933f, 0.007690876f, 0.00776757f,
+ 0.007845023f, 0.007923242f, 0.008002235f, 0.008082009f,
+ 0.008162571f, 0.00824393f, 0.008326093f, 0.008409068f,
+ 0.008492863f, 0.008577485f, 0.008662944f, 0.008749246f,
+ 0.0088364f, 0.008924415f, 0.009013299f, 0.009103059f,
+ 0.009193705f, 0.009285246f, 0.009377689f, 0.009471044f,
+ 0.009565319f, 0.009660523f, 0.009756666f, 0.009853756f,
+ 0.009951802f, 0.010050814f, 0.010150801f, 0.010251772f,
+ 0.010353738f, 0.010456706f, 0.010560688f, 0.010665693f,
+ 0.01077173f, 0.01087881f, 0.010986943f, 0.011096138f,
+ 0.011206406f, 0.011317758f, 0.011430203f, 0.011543752f,
+ 0.011658417f, 0.011774206f, 0.011891132f, 0.012009204f,
+ 0.012128435f, 0.012248835f, 0.012370415f, 0.012493186f,
+ 0.012617161f, 0.012742349f, 0.012868764f, 0.012996417f,
+ 0.013125318f, 0.013255481f, 0.013386918f, 0.01351964f,
+ 0.013653659f, 0.013788989f, 0.01392564f, 0.014063627f,
+ 0.014202961f, 0.014343656f, 0.014485724f, 0.014629178f,
+ 0.014774032f, 0.014920298f, 0.01506799f, 0.015217121f,
+ 0.015367706f, 0.015519757f, 0.015673288f, 0.015828314f,
+ 0.015984848f, 0.016142905f, 0.016302499f, 0.016463645f,
+ 0.016626356f, 0.016790648f, 0.016956536f, 0.017124033f,
+ 0.017293157f, 0.01746392f, 0.01763634f, 0.017810432f,
+ 0.01798621f, 0.018163691f, 0.018342891f, 0.018523825f,
+ 0.01870651f, 0.018890962f, 0.019077197f, 0.019265233f,
+ 0.019455085f, 0.01964677f, 0.019840306f, 0.020035709f,
+ 0.020232997f, 0.020432187f, 0.020633297f, 0.020836345f,
+ 0.021041347f, 0.021248323f, 0.02145729f, 0.021668266f,
+ 0.021881271f, 0.022096322f, 0.022313439f, 0.022532639f,
+ 0.022753943f, 0.02297737f, 0.023202938f, 0.023430668f,
+ 0.023660578f, 0.023892689f, 0.024127021f, 0.024363594f,
+ 0.024602428f, 0.024843544f, 0.025086962f, 0.025332703f,
+ 0.025580788f, 0.025831239f, 0.026084075f, 0.02633932f,
+ 0.026596994f, 0.026857119f, 0.027119717f, 0.027384811f,
+ 0.027652422f, 0.027922574f, 0.028195288f, 0.028470588f,
+ 0.028748496f, 0.029029036f, 0.029312231f, 0.029598104f,
+ 0.02988668f, 0.030177981f, 0.030472033f, 0.030768859f,
+ 0.031068484f, 0.031370932f, 0.031676228f, 0.031984397f,
+ 0.032295465f, 0.032609455f, 0.032926395f, 0.033246309f,
+ 0.033569223f, 0.033895164f, 0.034224158f, 0.03455623f,
+ 0.034891409f, 0.035229719f, 0.035571189f, 0.035915846f,
+ 0.036263716f, 0.036614828f, 0.036969209f, 0.037326887f,
+ 0.037687891f, 0.038052247f, 0.038419986f, 0.038791134f,
+ 0.039165723f, 0.03954378f, 0.039925334f, 0.040310415f,
+ 0.040699054f, 0.041091278f, 0.041487119f, 0.041886607f,
+ 0.042289772f, 0.042696644f, 0.043107255f, 0.043521635f,
+ 0.043939815f, 0.044361828f, 0.044787703f, 0.045217473f,
+ 0.045651171f, 0.046088827f, 0.046530475f, 0.046976146f,
+ 0.047425873f, 0.04787969f, 0.048337629f, 0.048799723f,
+ 0.049266006f, 0.049736512f, 0.050211273f, 0.050690325f,
+ 0.051173701f, 0.051661435f, 0.052153563f, 0.052650118f,
+ 0.053151136f, 0.053656652f, 0.0541667f, 0.054681317f,
+ 0.055200538f, 0.055724398f, 0.056252934f, 0.056786181f,
+ 0.057324176f, 0.057866955f, 0.058414556f, 0.058967013f,
+ 0.059524366f, 0.06008665f, 0.060653903f, 0.061226163f,
+ 0.061803466f, 0.062385851f, 0.062973356f, 0.063566018f,
+ 0.064163876f, 0.064766969f, 0.065375333f, 0.065989009f,
+ 0.066608036f, 0.067232451f, 0.067862294f, 0.068497604f,
+ 0.06913842f, 0.069784783f, 0.070436731f, 0.071094304f,
+ 0.071757542f, 0.072426485f, 0.073101173f, 0.073781647f,
+ 0.074467945f, 0.075160109f, 0.07585818f, 0.076562197f,
+ 0.077272202f, 0.077988235f, 0.078710337f, 0.079438549f,
+ 0.080172912f, 0.080913467f, 0.081660255f, 0.082413318f,
+ 0.083172696f, 0.083938432f, 0.084710566f, 0.085489139f,
+ 0.086274194f, 0.087065772f, 0.087863915f, 0.088668663f,
+ 0.089480059f, 0.090298145f, 0.091122961f, 0.09195455f,
+ 0.092792953f, 0.093638212f, 0.094490369f, 0.095349465f,
+ 0.096215542f, 0.097088641f, 0.097968804f, 0.098856073f,
+ 0.099750489f, 0.100652094f, 0.101560928f, 0.102477033f,
+ 0.103400451f, 0.104331223f, 0.10526939f, 0.106214992f,
+ 0.10716807f, 0.108128667f, 0.109096821f, 0.110072574f,
+ 0.111055967f, 0.112047039f, 0.11304583f, 0.114052381f,
+ 0.115066732f, 0.116088922f, 0.117118991f, 0.118156978f,
+ 0.119202922f, 0.120256862f, 0.121318838f, 0.122388887f,
+ 0.123467048f, 0.124553358f, 0.125647857f, 0.12675058f,
+ 0.127861566f, 0.128980852f, 0.130108474f, 0.131244469f,
+ 0.132388874f, 0.133541723f, 0.134703052f, 0.135872897f,
+ 0.137051293f, 0.138238273f, 0.139433873f, 0.140638126f,
+ 0.141851065f, 0.143072723f, 0.144303134f, 0.145542329f,
+ 0.14679034f, 0.148047198f, 0.149312935f, 0.15058758f,
+ 0.151871164f, 0.153163716f, 0.154465265f, 0.15577584f,
+ 0.157095469f, 0.158424179f, 0.159761997f, 0.16110895f,
+ 0.162465063f, 0.163830361f, 0.16520487f, 0.166588614f,
+ 0.167981615f, 0.169383897f, 0.170795482f, 0.172216392f,
+ 0.173646647f, 0.175086268f, 0.176535275f, 0.177993686f,
+ 0.179461519f, 0.180938793f, 0.182425524f, 0.183921727f,
+ 0.185427419f, 0.186942614f, 0.188467325f, 0.190001566f,
+ 0.191545349f, 0.193098684f, 0.194661584f, 0.196234056f,
+ 0.197816111f, 0.199407757f, 0.201009f, 0.202619846f,
+ 0.204240302f, 0.205870372f, 0.207510059f, 0.209159365f,
+ 0.210818293f, 0.212486844f, 0.214165017f, 0.215852811f,
+ 0.217550224f, 0.219257252f, 0.220973892f, 0.222700139f,
+ 0.224435986f, 0.226181426f, 0.227936451f, 0.229701051f,
+ 0.231475217f, 0.233258936f, 0.235052196f, 0.236854984f,
+ 0.238667285f, 0.240489083f, 0.242320361f, 0.244161101f,
+ 0.246011284f, 0.247870889f, 0.249739894f, 0.251618278f,
+ 0.253506017f, 0.255403084f, 0.257309455f, 0.259225101f,
+ 0.261149994f, 0.263084104f, 0.265027401f, 0.266979851f,
+ 0.268941421f, 0.270912078f, 0.272891784f, 0.274880502f,
+ 0.276878195f, 0.278884822f, 0.280900343f, 0.282924715f,
+ 0.284957894f, 0.286999837f, 0.289050497f, 0.291109827f,
+ 0.293177779f, 0.295254302f, 0.297339346f, 0.299432858f,
+ 0.301534784f, 0.30364507f, 0.30576366f, 0.307890496f,
+ 0.310025519f, 0.312168669f, 0.314319886f, 0.316479106f,
+ 0.318646266f, 0.320821301f, 0.323004144f, 0.325194727f,
+ 0.327392983f, 0.32959884f, 0.331812228f, 0.334033073f,
+ 0.336261303f, 0.338496841f, 0.340739612f, 0.342989537f,
+ 0.345246539f, 0.347510538f, 0.349781451f, 0.352059198f,
+ 0.354343694f, 0.356634854f, 0.358932594f, 0.361236825f,
+ 0.36354746f, 0.365864409f, 0.368187582f, 0.370516888f,
+ 0.372852234f, 0.375193526f, 0.377540669f, 0.379893568f,
+ 0.382252125f, 0.384616244f, 0.386985824f, 0.389360766f,
+ 0.391740969f, 0.394126332f, 0.39651675f, 0.398912121f,
+ 0.40131234f, 0.403717301f, 0.406126897f, 0.408541022f,
+ 0.410959566f, 0.413382421f, 0.415809477f, 0.418240623f,
+ 0.420675748f, 0.423114739f, 0.425557483f, 0.428003867f,
+ 0.430453776f, 0.432907095f, 0.435363708f, 0.437823499f,
+ 0.440286351f, 0.442752145f, 0.445220765f, 0.44769209f,
+ 0.450166003f, 0.452642382f, 0.455121108f, 0.457602059f,
+ 0.460085115f, 0.462570155f, 0.465057055f, 0.467545694f,
+ 0.470035948f, 0.472527696f, 0.475020813f, 0.477515175f,
+ 0.48001066f, 0.482507142f, 0.485004498f, 0.487502604f,
+ 0.490001333f, 0.492500562f, 0.495000167f, 0.497500021f,
+ 0.5f, 0.502499979f, 0.504999833f, 0.507499438f,
+ 0.509998667f, 0.512497396f, 0.514995502f, 0.517492858f,
+ 0.51998934f, 0.522484825f, 0.524979187f, 0.527472304f,
+ 0.529964052f, 0.532454306f, 0.534942945f, 0.537429845f,
+ 0.539914885f, 0.542397941f, 0.544878892f, 0.547357618f,
+ 0.549833997f, 0.55230791f, 0.554779235f, 0.557247855f,
+ 0.559713649f, 0.562176501f, 0.564636292f, 0.567092905f,
+ 0.569546224f, 0.571996133f, 0.574442517f, 0.576885261f,
+ 0.579324252f, 0.581759377f, 0.584190523f, 0.586617579f,
+ 0.589040434f, 0.591458978f, 0.593873103f, 0.596282699f,
+ 0.59868766f, 0.601087879f, 0.60348325f, 0.605873668f,
+ 0.608259031f, 0.610639234f, 0.613014176f, 0.615383756f,
+ 0.617747875f, 0.620106432f, 0.622459331f, 0.624806474f,
+ 0.627147766f, 0.629483112f, 0.631812418f, 0.634135591f,
+ 0.63645254f, 0.638763175f, 0.641067406f, 0.643365146f,
+ 0.645656306f, 0.647940802f, 0.650218549f, 0.652489462f,
+ 0.654753461f, 0.657010463f, 0.659260388f, 0.661503159f,
+ 0.663738697f, 0.665966927f, 0.668187772f, 0.67040116f,
+ 0.672607017f, 0.674805273f, 0.676995856f, 0.679178699f,
+ 0.681353734f, 0.683520894f, 0.685680114f, 0.687831331f,
+ 0.689974481f, 0.692109504f, 0.69423634f, 0.69635493f,
+ 0.698465216f, 0.700567142f, 0.702660654f, 0.704745698f,
+ 0.706822221f, 0.708890173f, 0.710949503f, 0.713000163f,
+ 0.715042106f, 0.717075285f, 0.719099657f, 0.721115178f,
+ 0.723121805f, 0.725119498f, 0.727108216f, 0.729087922f,
+ 0.731058579f, 0.733020149f, 0.734972599f, 0.736915896f,
+ 0.738850006f, 0.740774899f, 0.742690545f, 0.744596916f,
+ 0.746493983f, 0.748381722f, 0.750260106f, 0.752129111f,
+ 0.753988716f, 0.755838899f, 0.757679639f, 0.759510917f,
+ 0.761332715f, 0.763145016f, 0.764947804f, 0.766741064f,
+ 0.768524783f, 0.770298949f, 0.772063549f, 0.773818574f,
+ 0.775564014f, 0.777299861f, 0.779026108f, 0.780742748f,
+ 0.782449776f, 0.784147189f, 0.785834983f, 0.787513156f,
+ 0.789181707f, 0.790840635f, 0.792489941f, 0.794129628f,
+ 0.795759698f, 0.797380154f, 0.798991f, 0.800592243f,
+ 0.802183889f, 0.803765944f, 0.805338416f, 0.806901316f,
+ 0.808454651f, 0.809998434f, 0.811532675f, 0.813057386f,
+ 0.814572581f, 0.816078273f, 0.817574476f, 0.819061207f,
+ 0.820538481f, 0.822006314f, 0.823464725f, 0.824913732f,
+ 0.826353353f, 0.827783608f, 0.829204518f, 0.830616103f,
+ 0.832018385f, 0.833411386f, 0.83479513f, 0.836169639f,
+ 0.837534937f, 0.83889105f, 0.840238003f, 0.841575821f,
+ 0.842904531f, 0.84422416f, 0.845534735f, 0.846836284f,
+ 0.848128836f, 0.84941242f, 0.850687065f, 0.851952802f,
+ 0.85320966f, 0.854457671f, 0.855696866f, 0.856927277f,
+ 0.858148935f, 0.859361874f, 0.860566127f, 0.861761727f,
+ 0.862948707f, 0.864127103f, 0.865296948f, 0.866458277f,
+ 0.867611126f, 0.868755531f, 0.869891526f, 0.871019148f,
+ 0.872138434f, 0.87324942f, 0.874352143f, 0.875446642f,
+ 0.876532952f, 0.877611113f, 0.878681162f, 0.879743138f,
+ 0.880797078f, 0.881843022f, 0.882881009f, 0.883911078f,
+ 0.884933268f, 0.885947619f, 0.88695417f, 0.887952961f,
+ 0.888944033f, 0.889927426f, 0.890903179f, 0.891871333f,
+ 0.89283193f, 0.893785008f, 0.89473061f, 0.895668777f,
+ 0.896599549f, 0.897522967f, 0.898439072f, 0.899347906f,
+ 0.900249511f, 0.901143927f, 0.902031196f, 0.902911359f,
+ 0.903784458f, 0.904650535f, 0.905509631f, 0.906361788f,
+ 0.907207047f, 0.90804545f, 0.908877039f, 0.909701855f,
+ 0.910519941f, 0.911331337f, 0.912136085f, 0.912934228f,
+ 0.913725806f, 0.914510861f, 0.915289434f, 0.916061568f,
+ 0.916827304f, 0.917586682f, 0.918339745f, 0.919086533f,
+ 0.919827088f, 0.920561451f, 0.921289663f, 0.922011765f,
+ 0.922727798f, 0.923437803f, 0.92414182f, 0.924839891f,
+ 0.925532055f, 0.926218353f, 0.926898827f, 0.927573515f,
+ 0.928242458f, 0.928905696f, 0.929563269f, 0.930215217f,
+ 0.93086158f, 0.931502396f, 0.932137706f, 0.932767549f,
+ 0.933391964f, 0.934010991f, 0.934624667f, 0.935233031f,
+ 0.935836124f, 0.936433982f, 0.937026644f, 0.937614149f,
+ 0.938196534f, 0.938773837f, 0.939346097f, 0.93991335f,
+ 0.940475634f, 0.941032987f, 0.941585444f, 0.942133045f,
+ 0.942675824f, 0.943213819f, 0.943747066f, 0.944275602f,
+ 0.944799462f, 0.945318683f, 0.9458333f, 0.946343348f,
+ 0.946848864f, 0.947349882f, 0.947846437f, 0.948338565f,
+ 0.948826299f, 0.949309675f, 0.949788727f, 0.950263488f,
+ 0.950733994f, 0.951200277f, 0.951662371f, 0.95212031f,
+ 0.952574127f, 0.953023854f, 0.953469525f, 0.953911173f,
+ 0.954348829f, 0.954782527f, 0.955212297f, 0.955638172f,
+ 0.956060185f, 0.956478365f, 0.956892745f, 0.957303356f,
+ 0.957710228f, 0.958113393f, 0.958512881f, 0.958908722f,
+ 0.959300946f, 0.959689585f, 0.960074666f, 0.96045622f,
+ 0.960834277f, 0.961208866f, 0.961580014f, 0.961947753f,
+ 0.962312109f, 0.962673113f, 0.963030791f, 0.963385172f,
+ 0.963736284f, 0.964084154f, 0.964428811f, 0.964770281f,
+ 0.965108591f, 0.96544377f, 0.965775842f, 0.966104836f,
+ 0.966430777f, 0.966753691f, 0.967073605f, 0.967390545f,
+ 0.967704535f, 0.968015603f, 0.968323772f, 0.968629068f,
+ 0.968931516f, 0.969231141f, 0.969527967f, 0.969822019f,
+ 0.97011332f, 0.970401896f, 0.970687769f, 0.970970964f,
+ 0.971251504f, 0.971529412f, 0.971804712f, 0.972077426f,
+ 0.972347578f, 0.972615189f, 0.972880283f, 0.973142881f,
+ 0.973403006f, 0.97366068f, 0.973915925f, 0.974168761f,
+ 0.974419212f, 0.974667297f, 0.974913038f, 0.975156456f,
+ 0.975397572f, 0.975636406f, 0.975872979f, 0.976107311f,
+ 0.976339422f, 0.976569332f, 0.976797062f, 0.97702263f,
+ 0.977246057f, 0.977467361f, 0.977686561f, 0.977903678f,
+ 0.978118729f, 0.978331734f, 0.97854271f, 0.978751677f,
+ 0.978958653f, 0.979163655f, 0.979366703f, 0.979567813f,
+ 0.979767003f, 0.979964291f, 0.980159694f, 0.98035323f,
+ 0.980544915f, 0.980734767f, 0.980922803f, 0.981109038f,
+ 0.98129349f, 0.981476175f, 0.981657109f, 0.981836309f,
+ 0.98201379f, 0.982189568f, 0.98236366f, 0.98253608f,
+ 0.982706843f, 0.982875967f, 0.983043464f, 0.983209352f,
+ 0.983373644f, 0.983536355f, 0.983697501f, 0.983857095f,
+ 0.984015152f, 0.984171686f, 0.984326712f, 0.984480243f,
+ 0.984632294f, 0.984782879f, 0.98493201f, 0.985079702f,
+ 0.985225968f, 0.985370822f, 0.985514276f, 0.985656344f,
+ 0.985797039f, 0.985936373f, 0.98607436f, 0.986211011f,
+ 0.986346341f, 0.98648036f, 0.986613082f, 0.986744519f,
+ 0.986874682f, 0.987003583f, 0.987131236f, 0.987257651f,
+ 0.987382839f, 0.987506814f, 0.987629585f, 0.987751165f,
+ 0.987871565f, 0.987990796f, 0.988108868f, 0.988225794f,
+ 0.988341583f, 0.988456248f, 0.988569797f, 0.988682242f,
+ 0.988793594f, 0.988903862f, 0.989013057f, 0.98912119f,
+ 0.98922827f, 0.989334307f, 0.989439312f, 0.989543294f,
+ 0.989646262f, 0.989748228f, 0.989849199f, 0.989949186f,
+ 0.990048198f, 0.990146244f, 0.990243334f, 0.990339477f,
+ 0.990434681f, 0.990528956f, 0.990622311f, 0.990714754f,
+ 0.990806295f, 0.990896941f, 0.990986701f, 0.991075585f,
+ 0.9911636f, 0.991250754f, 0.991337056f, 0.991422515f,
+ 0.991507137f, 0.991590932f, 0.991673907f, 0.99175607f,
+ 0.991837429f, 0.991917991f, 0.991997765f, 0.992076758f,
+ 0.992154977f, 0.99223243f, 0.992309124f, 0.992385067f,
+ 0.992460265f, 0.992534727f, 0.992608459f, 0.992681467f,
+ 0.99275376f, 0.992825344f, 0.992896226f, 0.992966413f,
+ 0.993035911f, 0.993104727f, 0.993172868f, 0.993240339f,
+ 0.993307149f, 0.993373303f, 0.993438807f, 0.993503668f,
+ 0.993567892f, 0.993631484f, 0.993694453f, 0.993756802f,
+ 0.993818539f, 0.993879669f, 0.993940199f, 0.994000133f,
+ 0.994059478f, 0.994118239f, 0.994176423f, 0.994234034f,
+ 0.994291079f, 0.994347563f, 0.994403491f, 0.994458868f,
+ 0.994513701f, 0.994567994f, 0.994621753f, 0.994674982f,
+ 0.994727688f, 0.994779874f, 0.994831547f, 0.994882711f,
+ 0.994933371f, 0.994983532f, 0.995033198f, 0.995082376f,
+ 0.995131069f, 0.995179282f, 0.99522702f, 0.995274287f,
+ 0.995321089f, 0.995367429f, 0.995413313f, 0.995458744f,
+ 0.995503727f, 0.995548266f, 0.995592367f, 0.995636032f,
+ 0.995679266f, 0.995722075f, 0.99576446f, 0.995806428f,
+ 0.995847981f, 0.995889125f, 0.995929862f, 0.995970198f,
+ 0.996010135f, 0.996049678f, 0.99608883f, 0.996127597f,
+ 0.99616598f, 0.996203984f, 0.996241613f, 0.996278871f,
+ 0.99631576f, 0.996352285f, 0.996388449f, 0.996424256f,
+ 0.99645971f, 0.996494813f, 0.996529569f, 0.996563982f,
+ 0.996598054f, 0.99663179f, 0.996665193f, 0.996698265f,
+ 0.99673101f, 0.996763432f, 0.996795533f, 0.996827317f,
+ 0.996858787f, 0.996889945f, 0.996920795f, 0.996951341f,
+ 0.996981584f, 0.997011528f, 0.997041175f, 0.99707053f,
+ 0.997099594f, 0.997128371f, 0.997156863f, 0.997185073f,
+ 0.997213004f, 0.997240658f, 0.997268039f, 0.997295149f,
+ 0.997321991f, 0.997348567f, 0.99737488f, 0.997400932f,
+ 0.997426727f, 0.997452266f, 0.997477553f, 0.997502589f,
+ 0.997527377f, 0.99755192f, 0.997576219f, 0.997600279f,
+ 0.997624099f, 0.997647684f, 0.997671036f, 0.997694156f,
+ 0.997717047f, 0.997739712f, 0.997762151f, 0.997784369f,
+ 0.997806367f, 0.997828146f, 0.99784971f, 0.99787106f,
+ 0.997892199f, 0.997913128f, 0.99793385f, 0.997954366f,
+ 0.99797468f, 0.997994791f, 0.998014704f, 0.998034419f,
+ 0.998053939f, 0.998073265f, 0.9980924f, 0.998111345f,
+ 0.998130102f, 0.998148674f, 0.998167061f, 0.998185266f,
+ 0.99820329f, 0.998221136f, 0.998238805f, 0.998256299f,
+ 0.998273619f, 0.998290767f, 0.998307746f, 0.998324556f,
+ 0.998341199f, 0.998357677f, 0.998373992f, 0.998390145f,
+ 0.998406138f, 0.998421972f, 0.998437649f, 0.998453171f,
+ 0.998468538f, 0.998483753f, 0.998498818f, 0.998513733f,
+ 0.998528499f, 0.99854312f, 0.998557595f, 0.998571927f,
+ 0.998586116f, 0.998600165f, 0.998614074f, 0.998627845f,
+ 0.99864148f, 0.998654979f, 0.998668345f, 0.998681577f,
+ 0.998694679f, 0.99870765f, 0.998720493f, 0.998733208f,
+ 0.998745797f, 0.998758261f, 0.998770601f, 0.998782819f,
+ 0.998794916f, 0.998806892f, 0.99881875f, 0.99883049f,
+ 0.998842113f, 0.998853621f, 0.998865015f, 0.998876295f,
+ 0.998887464f, 0.998898522f, 0.99890947f, 0.998920309f,
+ 0.99893104f, 0.998941666f, 0.998952185f, 0.9989626f,
+ 0.998972912f, 0.998983121f, 0.998993229f, 0.999003237f,
+ 0.999013145f, 0.999022955f, 0.999032667f, 0.999042283f,
+ 0.999051803f, 0.999061229f, 0.999070561f, 0.999079801f,
+ 0.999088949f, 0.999098006f, 0.999106973f, 0.999115851f,
+ 0.99912464f, 0.999133343f, 0.999141959f, 0.999150489f,
+ 0.999158935f, 0.999167297f, 0.999175575f, 0.999183772f,
+ 0.999191887f, 0.999199921f, 0.999207876f, 0.999215751f,
+ 0.999223549f, 0.999231269f, 0.999238912f, 0.999246479f,
+ 0.999253971f, 0.999261389f, 0.999268733f, 0.999276004f,
+ 0.999283202f, 0.99929033f, 0.999297386f, 0.999304372f,
+ 0.999311289f, 0.999318137f, 0.999324917f, 0.99933163f,
+ 0.999338276f, 0.999344856f, 0.99935137f, 0.99935782f,
+ 0.999364206f, 0.999370528f, 0.999376788f, 0.999382985f,
+ 0.999389121f, 0.999395195f, 0.99940121f, 0.999407164f,
+ 0.99941306f, 0.999418896f, 0.999424675f, 0.999430396f,
+ 0.999436061f, 0.999441669f, 0.999447221f, 0.999452719f,
+ 0.999458161f, 0.99946355f, 0.999468885f, 0.999474167f,
+ 0.999479396f, 0.999484573f, 0.999489699f, 0.999494774f,
+ 0.999499799f, 0.999504774f, 0.999509699f, 0.999514575f,
+ 0.999519403f, 0.999524182f, 0.999528915f, 0.9995336f,
+ 0.999538238f, 0.999542831f, 0.999547378f, 0.999551879f,
+ 0.999556336f, 0.999560749f, 0.999565118f, 0.999569443f,
+ 0.999573725f, 0.999577965f, 0.999582162f, 0.999586318f,
+ 0.999590433f, 0.999594506f, 0.99959854f, 0.999602533f,
+ 0.999606486f, 0.9996104f, 0.999614275f, 0.999618112f,
+ 0.99962191f, 0.999625671f, 0.999629394f, 0.99963308f,
+ 0.99963673f, 0.999640343f, 0.99964392f, 0.999647462f,
+ 0.999650969f, 0.99965444f, 0.999657878f, 0.999661281f,
+ 0.99966465f, 0.999667986f, 0.999671288f, 0.999674558f,
+ 0.999677795f, 0.999681f, 0.999684173f, 0.999687315f,
+ 0.999690425f, 0.999693504f, 0.999696553f, 0.999699571f,
+ 0.99970256f, 0.999705519f, 0.999708448f, 0.999711348f,
+ 0.999714219f, 0.999717062f, 0.999719877f, 0.999722663f,
+ 0.999725422f, 0.999728153f, 0.999730857f, 0.999733535f,
+ 0.999736185f, 0.99973881f, 0.999741408f, 0.99974398f,
+ 0.999746527f, 0.999749049f, 0.999751545f, 0.999754016f,
+ 0.999756463f, 0.999758886f, 0.999761285f, 0.999763659f,
+ 0.99976601f, 0.999768338f, 0.999770643f, 0.999772924f,
+ 0.999775183f, 0.99977742f, 0.999779634f, 0.999781826f,
+ 0.999783997f, 0.999786145f, 0.999788273f, 0.999790379f,
+ 0.999792464f, 0.999794529f, 0.999796573f, 0.999798597f,
+ 0.9998006f, 0.999802584f, 0.999804548f, 0.999806492f,
+ 0.999808417f, 0.999810323f, 0.99981221f, 0.999814078f,
+ 0.999815928f, 0.999817759f, 0.999819572f, 0.999821367f,
+ 0.999823144f, 0.999824904f, 0.999826646f, 0.99982837f,
+ 0.999830078f, 0.999831768f, 0.999833442f, 0.999835099f,
+ 0.999836739f, 0.999838364f, 0.999839972f, 0.999841564f,
+ 0.99984314f, 0.999844701f, 0.999846246f, 0.999847775f,
+ 0.99984929f, 0.999850789f, 0.999852273f, 0.999853743f,
+ 0.999855198f, 0.999856639f, 0.999858065f, 0.999859477f,
+ 0.999860875f, 0.999862259f, 0.99986363f, 0.999864986f,
+ 0.99986633f, 0.999867659f, 0.999868976f, 0.99987028f,
+ 0.99987157f, 0.999872848f, 0.999874113f, 0.999875365f,
+ 0.999876605f, 0.999877833f, 0.999879049f, 0.999880252f,
+ 0.999881443f, 0.999882623f, 0.999883791f, 0.999884947f,
+ 0.999886091f, 0.999887225f, 0.999888347f, 0.999889458f,
+ 0.999890557f, 0.999891646f, 0.999892724f, 0.999893791f,
+ 0.999894848f, 0.999895894f, 0.99989693f, 0.999897956f,
+ 0.999898971f, 0.999899976f, 0.999900971f, 0.999901956f,
+ 0.999902932f, 0.999903898f, 0.999904854f, 0.9999058f,
+ 0.999906738f, 0.999907665f, 0.999908584f, 0.999909494f,
+ 0.999910394f, 0.999911286f, 0.999912168f, 0.999913042f,
+ 0.999913907f, 0.999914764f, 0.999915612f, 0.999916452f,
+ 0.999917283f, 0.999918106f, 0.999918921f, 0.999919727f,
+ 0.999920526f, 0.999921317f, 0.999922099f, 0.999922875f,
+ 0.999923642f, 0.999924402f, 0.999925154f, 0.999925898f,
+ 0.999926636f, 0.999927366f, 0.999928088f, 0.999928804f,
+ 0.999929512f, 0.999930213f, 0.999930908f, 0.999931595f,
+ 0.999932276f, 0.99993295f, 0.999933617f, 0.999934277f,
+ 0.999934931f, 0.999935579f, 0.99993622f, 0.999936854f,
+ 0.999937482f, 0.999938104f, 0.99993872f, 0.99993933f,
+ 0.999939934f, 0.999940531f, 0.999941123f, 0.999941709f,
+ 0.999942289f, 0.999942863f, 0.999943431f, 0.999943994f,
+ 0.999944551f, 0.999945103f, 0.999945649f, 0.99994619f,
+ 0.999946726f, 0.999947256f, 0.99994778f, 0.9999483f,
+ 0.999948814f, 0.999949324f, 0.999949828f, 0.999950327f,
+ 0.999950821f, 0.999951311f, 0.999951795f, 0.999952275f,
+ 0.999952749f, 0.99995322f, 0.999953685f, 0.999954146f,
+ 0.999954602f
+};
+} // namespace tesseract