PLearn 0.1
Public Member Functions | Public Attributes | Protected Attributes
PLearn::WordNetOntology Class Reference

#include <WordNetOntology.h>

Collaboration diagram for PLearn::WordNetOntology:
Collaboration graph
[legend]

List of all members.

Public Member Functions

 WordNetOntology ()
 WordNetOntology (string voc_file, bool differentiate_unknown_words, bool pre_compute_ancestors, bool pre_compute_descendants, int wn_pos_type=ALL_WN_TYPE, int word_coverage_threshold=-1)
 WordNetOntology (string voc_file, string synset_file, string ontology_file, bool pre_compute_ancestors, bool pre_compute_descendants, int word_coverage_threshold=-1)
 WordNetOntology (string voc_file, string synset_file, string ontology_file, string sense_key_file, bool pre_compute_ancestors, bool pre_compute_descendants, int word_coverage_threshold=-1)
void save (string synset_file, string ontology_file)
void save (string voc_file)
void saveVocInWordnet (string voc_file)
void save (string synset_file, string ontology_file, string sense_key_file)
void load (string voc_file, string synset_file, string ontology_file)
void load (string voc_file, string synset_file, string ontology_file, string sense_key_file)
void savePredominentSyntacticClasses (string file)
void loadPredominentSyntacticClasses (string file)
string getSenseKey (int word_id, int ss_id) const
int getSynsetIDForSenseKey (int word_id, string sense_key) const
int getWordId (string word) const
string getWord (int id) const
int getWordSenseIdForWnsn (string word, int wn_pos_type, int wnsn)
int getWordSenseIdForSenseKey (string lemma, string lexsn, string word)
int getWordSenseUniqueId (int word, int sense)
int getWordSenseUniqueIdSize ()
Set getWordSenses (int id) const
Set getWordHighLevelSenses (int id)
Set getWordNounSenses (int id)
Set getWordVerbSenses (int id)
Set getWordAdjSenses (int id)
Set getWordAdvSenses (int id)
Set getWordsForSense (int id)
Set getSynsetAncestors (int id, int max_level=-1)
Set getSynsetAncestors (int id, int max_level=-1) const
Set getSynsetParents (int id)
Set getWordAncestors (int id, int max_level=-1)
Set getSynsetSenseDescendants (int id)
Set getSynsetWordDescendants (int id)
NodegetSynset (int id)
NodegetRootSynset ()
Set getAllWords () const
Set getAllSenses ()
Set getAllCategories ()
int getVocSize ()
int getSenseSize ()
int getSynsetSize ()
int getMaxSynsetId ()
Set getSyntacticClassesForWord (int word_id)
int getSyntacticClassForSense (int sense_id)
int getPredominentSyntacticClassForWord (int word_id)
void getDescendantCategoriesAtLevel (int ss_id, int cur_level, int target_level, Set categories)
void getDownToUpParentCategoriesAtLevel (int ss_id, int target_level, Set categories, int cur_level=0)
bool isWord (int id) const
bool isWord (string word)
bool isSense (int id)
bool isPureSense (int id)
bool isCategory (int id)
bool isPureCategory (int id)
bool isSynset (int id) const
bool isWordUnknown (string word)
bool isWordUnknown (int id)
bool isSynsetUnknown (int id)
bool isInWordNet (string word, bool trim_word=true, bool stem_word=true, bool remove_undescores=false)
bool isInWordNet (int word_id) const
bool hasSenseInWordNet (string word, int wn_pos_type)
bool isTopLevelCategory (int ss_id)
bool containsWord (string word)
bool containsWordId (int id)
NodefindSynsetFromSynsAndGloss (const vector< string > &syns, const string &gloss, const long offset, const int fnum)
void removeNonReachableSynsets ()
void removeWord (int id)
void print (bool print_ontology=true)
void printSynset (int ss_id, int indent_level=0)
void printSynset (int ss_id, ostream &sout, int indent_level=0)
void printStats ()
void printSynsetAncestors ()
void printWordAncestors ()
void printDescendants ()
void printNodes ()
void printWordOntology (int id)
void printWordOntology (string word)
void printInvertedSynsetOntology (int id, int level=0)
int overlappingSynsets (int ss_id1, int ss_id2)
bool areOverlappingSynsets (int ss_id1, int ss_id2)
void intersectAncestorsAndSenses (Set categories, Set senses)
void reducePolysemy (int level)
void extractPredominentSyntacticClasses ()
void extractWordHighLevelSenses (int noun_depth, int verb_depth, int adj_depth, int adv_depth, int unk_depth)
void extractWordNounAndVerbHighLevelSenses (int noun_depth, int verb_depth)
void detectWordsWithoutOntology ()
void lookForSpecialTags ()
void extract (string voc_file, int wn_pos_type)
void extractWord (string original_word, int wn_pos_type, bool trim_word, bool stem_word, bool remove_underscores)
bool extractSenses (string original_word, string processed_word, int wn_pos_type)
void extractTaggedWordFrequencies (map< int, map< int, int > > &word_senses_to_tagged_frequencies)
NodeextractOntology (SynsetPtr ssp)
void extractAncestors (int threshold, bool cut_with_word_coverage, bool exclude_itself)
void extractAncestors (Node *node, Set ancestors, int level, int level_threshold)
void extractAncestors (Node *node, Set ancestors, int word_coverage_threshold)
void extractDescendants (Node *node, Set sense_descendants, Set word_descendants)
void extractStrictDescendants (Node *node, Set sense_descendants, Set word_descendants)
void extractDescendants ()
void computeWordSenseUniqueIds ()
void init (bool differentiate_unknown_words=true)
void createBaseSynsets ()
void processUnknownWord (int word_id)
void finalize ()
void propagatePOSTypes ()
void propagatePOSTypes (Node *node)
void linkUpperCategories ()
NodecheckForAlreadyExtractedSynset (SynsetPtr ssp)
vector< string > getSynsetWords (SynsetPtr ssp)
bool catchSpecialTags (string word)
void reduceWordPolysemy (int word_id, int level)
void reduceWordPolysemy_preserveSenseOverlapping (int word_id, int level)
void reduceWordPolysemy_preserveSenseOverlapping2 (int word_id, int level)
void getCategoriesAtLevel (int ss_id, int cur_level, int target_level, set< int > &categories)
void getCategoriesUnderLevel (int ss_id, int cur_level, int target_level, Set categories)
void visitUpward (Node *node)
void unvisitDownward (Node *node)
void unvisitAll ()
void printOntology (Node *node, int level=0)
map< int, string > getWords ()
map< string, intgetWordsId ()
void fillTempWordToSensesTVecMap ()
TVec< intgetSensesForWord (int w) const
void fillTempWordToHighLevelSensesTVecMap ()
TVec< intgetHighLevelSensesForWord (int w)
TVec< intgetSecondLevelSensesForWord (int w)
TVec< intgetThirdLevelSensesForWord (int w)

Public Attributes

map< int, TVec< int > > temp_word_to_senses
map< int, TVec< int > > temp_word_to_noun_senses
map< int, TVec< int > > temp_word_to_verb_senses
map< int, TVec< int > > temp_word_to_adj_senses
map< int, TVec< int > > temp_word_to_adv_senses
map< int, TVec< int > > temp_word_to_high_level_senses

Protected Attributes

map< int, Setword_to_senses
map< int, Setword_to_noun_senses
map< int, Setword_to_verb_senses
map< int, Setword_to_adj_senses
map< int, Setword_to_adv_senses
map< int, Setsense_to_words
map< int, Setsynset_to_ancestors
map< int, Setword_to_ancestors
map< int, Setsynset_to_sense_descendants
map< int, Setsynset_to_word_descendants
map< int, Node * > synsets
map< int, string > words
map< string, intwords_id
map< int, vector< int > > word_to_noun_wnsn
map< int, vector< int > > word_to_verb_wnsn
map< int, vector< int > > word_to_adj_wnsn
map< int, vector< int > > word_to_adv_wnsn
map< int, intword_to_predominent_pos
map< int, boolword_is_in_wn
map< int, Setword_to_high_level_senses
map< pair< int, int >, intword_sense_to_unique_id
map< int, Setword_to_under_target_level_high_level_senses
map< pair< int, string >, intsense_key_to_ss_id
map< pair< int, int >, string > ws_id_to_sense_key
int word_index
int synset_index
int unknown_sense_index
int noun_count
int verb_count
int adj_count
int adv_count
int noun_sense_count
int verb_sense_count
int adj_sense_count
int adv_sense_count
int in_wn_word_count
int out_of_wn_word_count
bool are_ancestors_extracted
bool are_descendants_extracted
bool are_predominent_pos_extracted
bool are_word_high_level_senses_extracted
bool are_word_sense_unique_ids_computed
int n_word_high_level_senses
bool differentiate_unknown_words

Detailed Description

Definition at line 162 of file WordNetOntology.h.


Constructor & Destructor Documentation

PLearn::WordNetOntology::WordNetOntology ( )

Definition at line 51 of file WordNetOntology.cc.

PLearn::WordNetOntology::WordNetOntology ( string  voc_file,
bool  differentiate_unknown_words,
bool  pre_compute_ancestors,
bool  pre_compute_descendants,
int  wn_pos_type = ALL_WN_TYPE,
int  word_coverage_threshold = -1 
)

Definition at line 57 of file WordNetOntology.cc.

References PLearn::extract(), and PLearn::extractAncestors().

{
    init(differentiate_unknown_words);
    createBaseSynsets();
    extract(voc_file, wn_pos_type);
    if (pre_compute_descendants)
        extractDescendants();
    if (pre_compute_ancestors)
        extractAncestors(word_coverage_threshold, true, true);
}

Here is the call graph for this function:

PLearn::WordNetOntology::WordNetOntology ( string  voc_file,
string  synset_file,
string  ontology_file,
bool  pre_compute_ancestors,
bool  pre_compute_descendants,
int  word_coverage_threshold = -1 
)

Definition at line 73 of file WordNetOntology.cc.

References PLearn::extractAncestors(), and PLearn::load().

{
    init();
    //createBaseSynsets();
    load(voc_file, synset_file, ontology_file);
    if (pre_compute_descendants)
        extractDescendants();
    if (pre_compute_ancestors)
        extractAncestors(word_coverage_threshold, true, true);
}

Here is the call graph for this function:

PLearn::WordNetOntology::WordNetOntology ( string  voc_file,
string  synset_file,
string  ontology_file,
string  sense_key_file,
bool  pre_compute_ancestors,
bool  pre_compute_descendants,
int  word_coverage_threshold = -1 
)

Definition at line 89 of file WordNetOntology.cc.

References PLearn::extractAncestors(), and PLearn::load().

{
    init();
    //createBaseSynsets();
    load(voc_file, synset_file, ontology_file, sense_key_file);
    if (pre_compute_descendants)
        extractDescendants();
    if (pre_compute_ancestors)
        extractAncestors(word_coverage_threshold, true, true);
}

Here is the call graph for this function:


Member Function Documentation

bool PLearn::WordNetOntology::areOverlappingSynsets ( int  ss_id1,
int  ss_id2 
) [inline]

Definition at line 334 of file WordNetOntology.h.

References overlappingSynsets().

{ return (overlappingSynsets(ss_id1, ss_id2) > 1); }

Here is the call graph for this function:

bool PLearn::WordNetOntology::catchSpecialTags ( string  word)

Definition at line 763 of file WordNetOntology.cc.

References BOS_SS_ID, BOS_TAG, EOS_SS_ID, EOS_TAG, NUMERIC_SS_ID, NUMERIC_TAG, OOV_SS_ID, OOV_TAG, PROPER_NOUN_SS_ID, PROPER_NOUN_TAG, PUNCTUATION_SS_ID, PUNCTUATION_TAG, STOP_SS_ID, and STOP_TAG.

{
    int word_id = words_id[word];
    if (word == OOV_TAG)
    {
        word_to_senses[word_id].insert(OOV_SS_ID);
        sense_to_words[OOV_SS_ID].insert(word_id);
        return true;
    } else if (word == PROPER_NOUN_TAG)
    {
        word_to_senses[word_id].insert(PROPER_NOUN_SS_ID);
        sense_to_words[PROPER_NOUN_SS_ID].insert(word_id);
        return true;
    } else if (word == NUMERIC_TAG)
    {
        word_to_senses[word_id].insert(NUMERIC_SS_ID);
        sense_to_words[NUMERIC_SS_ID].insert(word_id);
        return true;
    } else if (word == PUNCTUATION_TAG)
    {
        word_to_senses[word_id].insert(PUNCTUATION_SS_ID);
        sense_to_words[PUNCTUATION_SS_ID].insert(word_id);
        return true;
    } else if (word == STOP_TAG)
    {
        word_to_senses[word_id].insert(STOP_SS_ID);
        sense_to_words[STOP_SS_ID].insert(word_id);
        return true;
    } else if (word == BOS_TAG)
    {
        word_to_senses[word_id].insert(BOS_SS_ID);
        sense_to_words[BOS_SS_ID].insert(word_id);
        return true;
    } else if (word == EOS_TAG)
    {
        word_to_senses[word_id].insert(EOS_SS_ID);
        sense_to_words[EOS_SS_ID].insert(word_id);
        return true;
    }
    return false;
}
Node * PLearn::WordNetOntology::checkForAlreadyExtractedSynset ( SynsetPtr  ssp)

Definition at line 1038 of file WordNetOntology.cc.

References PLearn::Node::fnum, PLearn::Node::gloss, PLearn::Node::hereiam, and PLearn::Node::syns.

{
    vector<string> syns = getSynsetWords(ssp);
    string gloss = ssp->defn;
    long offset = ssp->hereiam;
    int fnum = ssp->fnum;
    for (map<int, Node*>::iterator it = synsets.begin(); it != synsets.end(); ++it)
    {
        Node* node = it->second;
        if (node->syns == syns && node->gloss == gloss && node->hereiam == offset && node->fnum == fnum)
        {
            return node;
        }
    }
    return NULL;

}
void PLearn::WordNetOntology::computeWordSenseUniqueIds ( )

Definition at line 2788 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Set::end(), PLERROR, w, and PLearn::ws().

{
    int unique_id = 0;
    for (map<int, Set>::iterator wit = word_to_senses.begin(); wit != word_to_senses.end(); ++wit)
    {
        int w = wit->first;
        Set senses = wit->second;
        for (SetIterator sit = senses.begin(); sit != senses.end(); ++sit)
        {
            int s = *sit;
            pair<int, int> ws(w, s);
            if (word_sense_to_unique_id.find(ws) != word_sense_to_unique_id.end())
                PLERROR("in computeWordSenseUniqueIds(): dupe word/sense keys (w = %d, s = %d)", w, s);
            word_sense_to_unique_id[ws] = unique_id++;
        }
    }
    are_word_sense_unique_ids_computed = true;
}

Here is the call graph for this function:

bool PLearn::WordNetOntology::containsWord ( string  word) [inline]

Definition at line 314 of file WordNetOntology.h.

References words_id.

Referenced by main().

{ return (words_id.find(word) != words_id.end()); }

Here is the caller graph for this function:

bool PLearn::WordNetOntology::containsWordId ( int  id) [inline]

Definition at line 315 of file WordNetOntology.h.

References words.

{ return (words.find(id) != words.end()); }
void PLearn::WordNetOntology::createBaseSynsets ( )

Definition at line 140 of file WordNetOntology.cc.

References ADJ_OFFSET, ADJ_SS_ID, ADV_OFFSET, ADV_SS_ID, BOS_OFFSET, BOS_SS_ID, PLearn::Node::children, EOS_OFFSET, EOS_SS_ID, PLearn::Node::gloss, PLearn::Node::hereiam, PLearn::Set::insert(), NOUN_OFFSET, NOUN_SS_ID, NUMERIC_OFFSET, NUMERIC_SS_ID, OOV_OFFSET, OOV_SS_ID, PLearn::Node::parents, PROPER_NOUN_OFFSET, PROPER_NOUN_SS_ID, PUNCTUATION_OFFSET, PUNCTUATION_SS_ID, ROOT_OFFSET, ROOT_SS_ID, STOP_OFFSET, STOP_SS_ID, SUPER_UNKNOWN_OFFSET, SUPER_UNKNOWN_SS_ID, PLearn::Node::syns, PLearn::Node::types, UNDEFINED_TYPE, VERB_OFFSET, and VERB_SS_ID.

{
    // create ROOT synset
    Node* root_node = new Node(ROOT_SS_ID);
    root_node->syns.push_back("ROOT");
    root_node->types.insert(UNDEFINED_TYPE);
    root_node->gloss = "(root concept)";
    root_node->hereiam = ROOT_OFFSET;
    synsets[ROOT_SS_ID] = root_node;
    //root_node->visited = true;

    // create SUPER-UNKNOWN synset
    Node* unk_node = new Node(SUPER_UNKNOWN_SS_ID);
    unk_node->syns.push_back("SUPER_UNKNOWN");
    unk_node->types.insert(UNDEFINED_TYPE);
    unk_node->gloss = "(super-unknown concept)";
    unk_node->hereiam = SUPER_UNKNOWN_OFFSET;
    synsets[SUPER_UNKNOWN_SS_ID] = unk_node;
    //unk_node->visited = true;

    // link it <-> ROOT
    unk_node->parents.insert(ROOT_SS_ID);
    root_node->children.insert(SUPER_UNKNOWN_SS_ID);
  
    // create OOV (out-of-vocabulary) synset
    Node* oov_node = new Node(OOV_SS_ID);
    oov_node->syns.push_back("OOV");
    oov_node->types.insert(UNDEFINED_TYPE);
    oov_node->gloss = "(out-of-vocabulary)";
    oov_node->hereiam = OOV_OFFSET;
    synsets[OOV_SS_ID] = oov_node;
    //oov_node->visited = true;

    // link it <-> SUPER-UNKNOWN
    oov_node->parents.insert(SUPER_UNKNOWN_SS_ID);
    unk_node->children.insert(OOV_SS_ID);

    // create PROPER_NOUN, NUMERIC, PUNCTUATION, BOS, EOS and STOP synsets
    Node* proper_node = new Node(PROPER_NOUN_SS_ID);
    proper_node->syns.push_back("PROPER NOUN");
    proper_node->types.insert(UNDEFINED_TYPE);
    proper_node->gloss = "(proper noun)";
    proper_node->hereiam = PROPER_NOUN_OFFSET;
    synsets[PROPER_NOUN_SS_ID] = proper_node;
    //proper_node->visited = true;

    Node* num_node = new Node(NUMERIC_SS_ID);
    num_node->syns.push_back("NUMERIC");
    num_node->types.insert(UNDEFINED_TYPE);
    num_node->gloss = "(numeric)";
    num_node->hereiam = NUMERIC_OFFSET;
    synsets[NUMERIC_SS_ID] = num_node;
    //num_node->visited = true;

    Node* punct_node = new Node(PUNCTUATION_SS_ID);
    punct_node->syns.push_back("PUNCTUATION");
    punct_node->types.insert(UNDEFINED_TYPE);
    punct_node->gloss = "(punctuation)";
    punct_node->hereiam = PUNCTUATION_OFFSET;
    synsets[PUNCTUATION_SS_ID] = punct_node;
    //punct_node->visited = true;

    Node* stop_node = new Node(STOP_SS_ID);
    stop_node->syns.push_back("STOP");
    stop_node->types.insert(UNDEFINED_TYPE);
    stop_node->gloss = "(stop)";
    stop_node->hereiam = STOP_OFFSET;
    synsets[STOP_SS_ID] = stop_node;

    Node* bos_node = new Node(BOS_SS_ID);
    bos_node->syns.push_back("BOS");
    bos_node->types.insert(UNDEFINED_TYPE);
    bos_node->gloss = "(BOS)";
    bos_node->hereiam = BOS_OFFSET;
    synsets[BOS_SS_ID] = bos_node;

    Node* eos_node = new Node(EOS_SS_ID);
    eos_node->syns.push_back("EOS");
    eos_node->types.insert(UNDEFINED_TYPE);
    eos_node->gloss = "(EOS)";
    eos_node->hereiam = EOS_OFFSET;
    synsets[EOS_SS_ID] = eos_node;

    // link them <-> SUPER-UNKNOWN
    proper_node->parents.insert(SUPER_UNKNOWN_SS_ID);
    unk_node->children.insert(PROPER_NOUN_SS_ID);
    num_node->parents.insert(SUPER_UNKNOWN_SS_ID);
    unk_node->children.insert(NUMERIC_SS_ID);
    punct_node->parents.insert(SUPER_UNKNOWN_SS_ID);
    unk_node->children.insert(PUNCTUATION_SS_ID);
    stop_node->parents.insert(SUPER_UNKNOWN_SS_ID);
    unk_node->children.insert(STOP_SS_ID);
    bos_node->parents.insert(SUPER_UNKNOWN_SS_ID);
    unk_node->children.insert(BOS_SS_ID);
    eos_node->parents.insert(SUPER_UNKNOWN_SS_ID);
    unk_node->children.insert(EOS_SS_ID);

    // create NOUN, VERB, ADJECTIVE and ADVERB synsets
    Node* noun_node = new Node(NOUN_SS_ID);
    noun_node->syns.push_back("NOUN");
    noun_node->types.insert(UNDEFINED_TYPE);
    noun_node->gloss = "(noun concept)";
    noun_node->hereiam = NOUN_OFFSET;
    synsets[NOUN_SS_ID] = noun_node;
    //noun_node->visited = true;

    Node* verb_node = new Node(VERB_SS_ID);
    verb_node->syns.push_back("VERB");
    verb_node->types.insert(UNDEFINED_TYPE);
    verb_node->gloss = "(verb concept)";
    verb_node->hereiam = VERB_OFFSET;
    synsets[VERB_SS_ID] = verb_node;
    //verb_node->visited = true;

    Node* adj_node = new Node(ADJ_SS_ID);
    adj_node->syns.push_back("ADJECTIVE");
    adj_node->types.insert(UNDEFINED_TYPE);
    adj_node->gloss = "(adjective concept)";
    adj_node->hereiam = ADJ_OFFSET;
    synsets[ADJ_SS_ID] = adj_node;
    //adj_node->visited = true;
  
    Node* adv_node = new Node(ADV_SS_ID);
    adv_node->syns.push_back("ADVERB");
    adv_node->types.insert(UNDEFINED_TYPE);
    adv_node->gloss = "(adverb concept)";
    adv_node->hereiam = ADV_OFFSET;
    synsets[ADV_SS_ID] = adv_node;
    //adv_node->visited = true;
  
    // link them <-> ROOT
    noun_node->parents.insert(ROOT_SS_ID);
    root_node->children.insert(NOUN_SS_ID);
    verb_node->parents.insert(ROOT_SS_ID);
    root_node->children.insert(VERB_SS_ID);
    adj_node->parents.insert(ROOT_SS_ID);
    root_node->children.insert(ADJ_SS_ID);
    adv_node->parents.insert(ROOT_SS_ID);
    root_node->children.insert(ADV_SS_ID);

}

Here is the call graph for this function:

void PLearn::WordNetOntology::detectWordsWithoutOntology ( )

Definition at line 2489 of file WordNetOntology.cc.

References PLearn::Set::isEmpty(), and PLWARNING.

Referenced by main().

{
    for (map<int, Set>::iterator it = word_to_senses.begin(); it != word_to_senses.end(); ++it)
    {
        int word_id = it->first;
        Set senses = it->second;
        if (senses.isEmpty())
            PLWARNING("word %d (%s) has no attached ontology", word_id, words[word_id].c_str());
    }
}

Here is the call graph for this function:

Here is the caller graph for this function:

void PLearn::WordNetOntology::extract ( string  voc_file,
int  wn_pos_type 
)

Definition at line 282 of file WordNetOntology.cc.

References PLearn::ShellProgressBar::done(), PLearn::ShellProgressBar::draw(), PLearn::ShellProgressBar::getAsciiFileLineCount(), and PLearn::ShellProgressBar::update().

{
    int n_lines = ShellProgressBar::getAsciiFileLineCount(voc_file);
    ShellProgressBar progress(0, n_lines - 1, "extracting ontology", 50);
    progress.draw();
    ifstream input_if(voc_file.c_str());
    string word;
    while (!input_if.eof())
    {
        getline(input_if, word, '\n');
        if (word == "") continue;
        if (word[0] == '#' && word[1] == '#') continue;
        extractWord(word, wn_pos_type, true, true, false); 
        progress.update(word_index);
    }
    input_if.close();
    progress.done();
    finalize();
    input_if.close();
}

Here is the call graph for this function:

void PLearn::WordNetOntology::extractAncestors ( Node node,
Set  ancestors,
int  level,
int  level_threshold 
)

Definition at line 1517 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Set::end(), PLearn::extractAncestors(), PLearn::Set::insert(), and PLearn::Node::parents.

{
    for (SetIterator it = node->parents.begin(); it != node->parents.end(); ++it)
    {
        ancestors.insert(*it);
        if (level_threshold == -1 || level < level_threshold)
            extractAncestors(synsets[*it], ancestors, level + 1, level_threshold);
    }
}

Here is the call graph for this function:

void PLearn::WordNetOntology::extractAncestors ( Node node,
Set  ancestors,
int  word_coverage_threshold 
)

Definition at line 1491 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Set::end(), PLearn::extractAncestors(), PLearn::Set::insert(), and PLearn::Node::parents.

{
/*
  int ss_id = node->ss_id;
  if (word_coverage_threshold == -1 || synset_to_word_descendants[ss_id].size() < word_coverage_threshold)
  {
  ancestors.insert(ss_id);
  for (SetIterator it = node->parents.begin(); it != node->parents.end(); ++it)
  {
  extractAncestors(synsets[*it], ancestors, word_coverage_threshold);
  }
  }
*/

    for (SetIterator it = node->parents.begin(); it != node->parents.end(); ++it)
    {
        int ss_id = *it;
        if (word_coverage_threshold == -1 || synset_to_word_descendants[ss_id].size() < word_coverage_threshold)
        {
            ancestors.insert(ss_id);
            extractAncestors(synsets[ss_id], ancestors, word_coverage_threshold);
        }
    }
}

Here is the call graph for this function:

void PLearn::WordNetOntology::extractAncestors ( int  threshold,
bool  cut_with_word_coverage,
bool  exclude_itself 
)

Definition at line 1435 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Set::end(), PLearn::endl(), PLearn::extractAncestors(), PLearn::Set::insert(), PLearn::Set::merge(), and PLearn::Set::size().

Referenced by PLearn::GraphicalBiText::compute_nodemap().

{
#ifdef VERBOSE
    cout << "extracting ancestors... ";
#endif

    if (cut_with_word_coverage && !are_descendants_extracted)
    {
        cout << "*** I need to extract descendants before I can extract ancestors with a word coverage threshold ***" << endl;
        extractDescendants();
    }

    // synsets -> ancestors
    int n_sense_ancestors = 0;
    for (map<int, Node*>::iterator it = synsets.begin(); it != synsets.end(); ++it)
    {
        int ss = it->first;
        Node* node = it->second;
        Set ancestors;
        if (cut_with_word_coverage)
            extractAncestors(node, ancestors, threshold);
        else
            extractAncestors(node, ancestors, 1, threshold);
        if (!exclude_itself)
            ancestors.insert(ss);
        synset_to_ancestors[ss] = ancestors;
        n_sense_ancestors += ancestors.size();
    }

    are_ancestors_extracted = true;

    // words -> ancestors
    int n_word_ancestors = 0;
    for (map<int, Set>::iterator it = word_to_senses.begin(); it != word_to_senses.end(); ++it)
    {
        int word_id = it->first;
        Set senses = it->second;
        Set word_ancestors;
        for (SetIterator it = senses.begin(); it != senses.end(); it++)
        {
            int sense_id = *it;
            Set ancestors = getSynsetAncestors(sense_id);
            word_ancestors.merge(ancestors);
            word_ancestors.insert(sense_id);
        }
        word_to_ancestors[word_id] = word_ancestors;
        n_word_ancestors += word_ancestors.size();
    }

#ifdef VERBOSE
    cout << "(" << n_sense_ancestors << " sense ancestors, " << n_word_ancestors << " word ancestors)" << endl;
#endif

}

Here is the call graph for this function:

Here is the caller graph for this function:

void PLearn::WordNetOntology::extractDescendants ( Node node,
Set  sense_descendants,
Set  word_descendants 
)

Definition at line 1859 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Node::children, PLearn::Set::end(), PLearn::Set::insert(), and PLearn::Node::ss_id.

{
    int ss_id = node->ss_id;
    if (isSense(ss_id)) // is a sense
    {
        sense_descendants.insert(ss_id);
        for (SetIterator it = sense_to_words[ss_id].begin(); it != sense_to_words[ss_id].end(); ++it)
        {
            int word_id = *it;
            word_descendants.insert(word_id);
        }
    } 
    for (SetIterator it = node->children.begin(); it != node->children.end(); ++it)
    {
        extractDescendants(synsets[*it], sense_descendants, word_descendants);
    }
}

Here is the call graph for this function:

void PLearn::WordNetOntology::extractDescendants ( )

Definition at line 1833 of file WordNetOntology.cc.

References PLearn::endl(), and PLearn::Set::size().

{
#ifdef VERBOSE
    cout << "extracting descendants... ";
#endif

    int n_sense_descendants = 0;
    int n_word_descendants = 0;
    for (map<int, Node*>::iterator it = synsets.begin(); it != synsets.end(); ++it)
    {
        Set sense_descendants;
        Set word_descendants;
        extractDescendants(it->second, sense_descendants, word_descendants);
        synset_to_sense_descendants[it->first] = sense_descendants;
        synset_to_word_descendants[it->first] = word_descendants;
        n_sense_descendants += sense_descendants.size();
        n_word_descendants += word_descendants.size();
    }
    are_descendants_extracted = true;

#ifdef VERBOSE
    cout << "(" << n_sense_descendants << " senses, " << n_word_descendants << " words)" << endl;
#endif

}

Here is the call graph for this function:

Node * PLearn::WordNetOntology::extractOntology ( SynsetPtr  ssp)

Definition at line 729 of file WordNetOntology.cc.

References PLearn::Node::children, PLearn::Set::contains(), PLearn::Node::fnum, PLearn::Node::gloss, PLearn::Node::hereiam, PLearn::Set::insert(), PLearn::Node::is_unknown, PLearn::Node::parents, PLearn::removeDelimiters(), PLearn::Node::ss_id, and PLearn::Node::syns.

{
    Node* node = new Node(synset_index++); // increment synset counter
    node->syns = getSynsetWords(ssp);
    string defn = ssp->defn;
    removeDelimiters(defn, "*", "%");
    removeDelimiters(defn, "|", "/");
    node->gloss = defn;
    node->hereiam = ssp->hereiam;
    node->fnum = ssp->fnum;
    node->is_unknown = false;
    synsets[node->ss_id] = node;

    ssp = ssp->ptrlist;
  
    while (ssp != NULL)
    {
        Node* parent_node = checkForAlreadyExtractedSynset(ssp);
        if (parent_node == NULL) // create new synset Node
        {
            parent_node = extractOntology(ssp);
        }
    
        if (parent_node->ss_id != node->ss_id && !(node->children.contains(parent_node->ss_id))) // avoid cycles (that are in fact due to errors in the WordNet database)
        {                                      
            node->parents.insert(parent_node->ss_id);
            parent_node->children.insert(node->ss_id);
        }

        ssp = ssp->nextss;
    }
    return node;
}

Here is the call graph for this function:

void PLearn::WordNetOntology::extractPredominentSyntacticClasses ( )

Definition at line 2579 of file WordNetOntology.cc.

{
    for (map<int, Set>::iterator it = word_to_senses.begin(); it != word_to_senses.end(); ++it)
    {
        int word_id = it->first;
        word_to_predominent_pos[word_id] = getPredominentSyntacticClassForWord(word_id);
    }
    are_predominent_pos_extracted = true;
}
bool PLearn::WordNetOntology::extractSenses ( string  original_word,
string  processed_word,
int  wn_pos_type 
)

Definition at line 604 of file WordNetOntology.cc.

References ADJ_TYPE, ADV_TYPE, PLearn::cstr(), PLearn::Set::insert(), NOUN_TYPE, PLearn::Node::ss_id, PLearn::Node::types, VERB_TYPE, and PLearn::ws().

{

    //char* cword = const_cast<char*>(processed_word.c_str());
    char* cword = cstr(processed_word);
    SynsetPtr ssp = NULL;
    IndexPtr idx = getindex(cword, wn_pos_type);

    switch (wn_pos_type)
    {
    case NOUN_TYPE:
        ssp = findtheinfo_ds(cword, NOUN, -HYPERPTR, ALLSENSES);
        break;
    case VERB_TYPE:
        ssp = findtheinfo_ds(cword, VERB, -HYPERPTR, ALLSENSES);
        break;
    case ADJ_TYPE:
        ssp = findtheinfo_ds(cword, ADJ, -HYPERPTR, ALLSENSES);
        break;
    case ADV_TYPE:
        ssp = findtheinfo_ds(cword, ADV, -HYPERPTR, ALLSENSES);
        break;
    }

    if (ssp == NULL)
    {
        return false;
    } else
    {
        switch (wn_pos_type)
        {
        case NOUN_TYPE:
            noun_count++;
            break;
        case VERB_TYPE:
            verb_count++;
            break;
        case ADJ_TYPE:
            adj_count++;
            break;
        case ADV_TYPE:
            adv_count++;
            break;
        }

        int wnsn = 0;
        // extract all senses for a given word
        while (ssp != NULL)
        {
            wnsn++;
            Node* node = checkForAlreadyExtractedSynset(ssp);
            if (node == NULL) // not found
            {
        
                switch (wn_pos_type)
                {
                case NOUN_TYPE:
                    noun_sense_count++;
                    break;
                case VERB_TYPE:
                    verb_sense_count++;
                    break;
                case ADJ_TYPE:
                    adj_sense_count++;
                    break;
                case ADV_TYPE:
                    adv_sense_count++;
                    break;
                }

                // create a new sense (1rst-level synset Node)
                node = extractOntology(ssp);
            }

            int word_id = words_id[original_word];
            node->types.insert(wn_pos_type);
            word_to_senses[word_id].insert(node->ss_id);
            sense_to_words[node->ss_id].insert(word_id);

            char *charsk = WNSnsToStr(idx, wnsn);
            string sense_key(charsk);

      

            pair<int, string> ss(word_id,sense_key);
            if (sense_key_to_ss_id.find(ss) == sense_key_to_ss_id.end())
                sense_key_to_ss_id[ss] = node->ss_id;
            pair<int, int> ws(word_id, node->ss_id);

            //cout << sense_key << "word_id: " << word_id << "synset " << node->ss_id << endl;

            // e.g. green%1:13:00:: and greens%1:13:00:: 
            // correspond to the same synset
            if (ws_id_to_sense_key.find(ws) == ws_id_to_sense_key.end())
                ws_id_to_sense_key[ws] = sense_key;

            // warning : should check if inserting a given sense twice (vector)
            // (should not happen if vocabulary contains only unique values)
            switch(wn_pos_type)
            {
            case NOUN_TYPE:
                word_to_noun_wnsn[word_id].push_back(node->ss_id);
                word_to_noun_senses[word_id].insert(node->ss_id);
                break;
            case VERB_TYPE:
                word_to_verb_wnsn[word_id].push_back(node->ss_id);
                word_to_verb_senses[word_id].insert(node->ss_id);
                break;
            case ADJ_TYPE:
                word_to_adj_wnsn[word_id].push_back(node->ss_id);
                word_to_adj_senses[word_id].insert(node->ss_id);
                break;
            case ADV_TYPE:
                word_to_adv_wnsn[word_id].push_back(node->ss_id);
                word_to_adv_senses[word_id].insert(node->ss_id);
                break;
            }

            ssp = ssp->nextss;
        }
        free_syns(ssp);
        return true;
    }
}

Here is the call graph for this function:

void PLearn::WordNetOntology::extractStrictDescendants ( Node node,
Set  sense_descendants,
Set  word_descendants 
)

Definition at line 1878 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Node::children, PLearn::Set::end(), PLearn::Set::insert(), and PLearn::Node::ss_id.

{
    int ss_id = node->ss_id;
    if (isSense(ss_id)){ // is a sense
        for (SetIterator it = sense_to_words[ss_id].begin(); it != sense_to_words[ss_id].end(); ++it){
            int word_id = *it;
            word_descendants.insert(word_id);
        }
    } 
    for (SetIterator it = node->children.begin(); it != node->children.end(); ++it){
        extractDescendants(synsets[*it], sense_descendants, word_descendants);
    }
}

Here is the call graph for this function:

void PLearn::WordNetOntology::extractTaggedWordFrequencies ( map< int, map< int, int > > &  word_senses_to_tagged_frequencies)

Definition at line 489 of file WordNetOntology.cc.

References PLearn::cstr(), PLearn::ShellProgressBar::draw(), PLearn::endl(), i, and PLearn::Node::ss_id.

{
    // NOTE: The 'word_senses_to_tagged_frequencies' is a map where the key to the
    //       map is a 'word_id' and the value associated with the key is another
    //       map. This other map takes a 'synset_id' as its key and associates
    //       a frequency value. Thus the data structure associates a frequency
    //       to a (word_id, synset_id) couple.

    cout << "in WordNetOntology::extractTaggedWordFrequencies()" << endl;
    vector<int> dbases;
    dbases.reserve(4);
    dbases.push_back(NOUN);
    dbases.push_back(VERB);
    dbases.push_back(ADJ);
    dbases.push_back(ADV);
    int dbases_size = dbases.size();

    word_senses_to_tagged_frequencies.clear();
    vector<string> syns;
    string gloss;
    long offset;
    int fnum;

    int total_senses_found = 0;
    ShellProgressBar progress(0, words.size() * dbases_size, "[Extracting word-sense tagged frequencies]", 50);
    progress.draw();
    int ws2tf_i = 0;

    // Go through all databases
    for (int i = 0; i < dbases_size; ++i) {
        // Go through all words in the ontology
        for (map<int, string>::iterator w_it = words.begin(); w_it != words.end(); ++w_it) {
            progress.update(++ws2tf_i);
            char *cword = cstr(w_it->second);
            wnresults.numforms = wnresults.printcnt = 0; // Useful??
            SynsetPtr ssp = findtheinfo_ds(cword, dbases[i], -HYPERPTR, ALLSENSES);
            if (ssp != NULL) {
                IndexPtr idx;
                SynsetPtr cursyn;
                while ((idx = getindex(cword, dbases[i])) != NULL) {
                    cword = NULL;
                    if (idx->tagged_cnt) {
                        map<int, map<int, int> >::iterator ws2tf_it = word_senses_to_tagged_frequencies.find(w_it->first);
                        if (ws2tf_it == word_senses_to_tagged_frequencies.end()) {
                            word_senses_to_tagged_frequencies[w_it->first] = map<int, int>();
                            ws2tf_it = word_senses_to_tagged_frequencies.find(w_it->first);
                        }
                        //for (int l = 0; l < idx->tagged_cnt; ++l) {
                        for (int l = 0; l < idx->sense_cnt; ++l) {
                            if ((cursyn = read_synset(dbases[i], idx->offset[l], idx->wd)) != NULL) {
                                //int freq = GetTagcnt(idx, l + 1);
                                int freq = -1;
                                wnresults.OutSenseCount[wnresults.numforms]++;
                                // Find if synset is in ontology
                                //if (freq) {
                                // NOTE: We extract zero frequencies even though
                                //       this is not useful...
                                syns = getSynsetWords(cursyn);
                                gloss = string(cursyn->defn);
                                offset = cursyn->hereiam;
                                fnum = cursyn->fnum;
                                
                                Node *node = findSynsetFromSynsAndGloss(syns, gloss, offset, fnum);
                                if (node != NULL) {
                                    (ws2tf_it->second)[node->ss_id] = freq;
                                    ++total_senses_found;
                                }
                                //}
                                free_synset(cursyn);
                            }
                        }
                    }
                    wnresults.numforms++;
                    free_index(idx);
                } // while()
                free_syns(ssp);
            } // ssp != NULL
        }
    }
    progress.done();
    cout << "FOUND A GRAND TOTAL OF " << total_senses_found << " senses" << endl;
}

Here is the call graph for this function:

void PLearn::WordNetOntology::extractWord ( string  original_word,
int  wn_pos_type,
bool  trim_word,
bool  stem_word,
bool  remove_underscores 
)

Definition at line 383 of file WordNetOntology.cc.

References ADJ_TYPE, ADV_TYPE, ALL_WN_TYPE, PLearn::extractSenses(), NOUN_TYPE, NULL_TAG, PLWARNING, PLearn::stemWord(), PLearn::trimWord(), PLearn::underscore_to_space(), and VERB_TYPE.

Referenced by main().

{
    bool found_noun = false;
    bool found_verb = false;
    bool found_adj = false;
    bool found_adv = false;
    bool found_stemmed_noun = false;
    bool found_stemmed_verb = false;
    bool found_stemmed_adj = false;
    bool found_stemmed_adv = false;
    bool found = false;
    string processed_word = original_word;
    string stemmed_word;

    words[word_index] = original_word;
    words_id[original_word] = word_index;

    if (!catchSpecialTags(original_word))
    {
        if (trim_word)
            processed_word = trimWord(original_word);

        if (remove_undescores)
            processed_word = underscore_to_space(processed_word);

        if (processed_word == NULL_TAG)
        {
            out_of_wn_word_count++;
            processUnknownWord(word_index);
            word_is_in_wn[word_index] = false;
        } else
        {
            if (wn_pos_type == NOUN_TYPE || wn_pos_type == ALL_WN_TYPE)
                found_noun = extractSenses(original_word, processed_word, NOUN_TYPE);
            if (wn_pos_type == VERB_TYPE || wn_pos_type == ALL_WN_TYPE)
                found_verb = extractSenses(original_word, processed_word, VERB_TYPE);
            if (wn_pos_type == ADJ_TYPE || wn_pos_type == ALL_WN_TYPE)
                found_adj = extractSenses(original_word, processed_word, ADJ_TYPE);
            if (wn_pos_type == ADV_TYPE || wn_pos_type == ALL_WN_TYPE)
                found_adv = extractSenses(original_word, processed_word, ADV_TYPE);
    
            if (stem_word)
            {
                if (wn_pos_type == NOUN_TYPE || wn_pos_type == ALL_WN_TYPE)
                {
                    stemmed_word = stemWord(processed_word, NOUN);
                    if (stemmed_word != processed_word)
                        found_stemmed_noun = extractSenses(original_word, stemmed_word, NOUN_TYPE);
                }
                if (wn_pos_type == VERB_TYPE || wn_pos_type == ALL_WN_TYPE)
                {
                    stemmed_word = stemWord(processed_word, VERB);
                    if (stemmed_word != processed_word)
                        found_stemmed_verb = extractSenses(original_word, stemmed_word, VERB_TYPE);
                }
                if (wn_pos_type == ADJ_TYPE || wn_pos_type == ALL_WN_TYPE)
                {
                    stemmed_word = stemWord(processed_word, ADJ);
                    if (stemmed_word != processed_word)
                        found_stemmed_adj = extractSenses(original_word, stemmed_word, ADJ_TYPE);
                }
                if (wn_pos_type == ADV_TYPE || wn_pos_type == ALL_WN_TYPE)
                {
                    stemmed_word = stemWord(processed_word, ADV);
                    if (stemmed_word != processed_word)
                        found_stemmed_adv = extractSenses(original_word, stemmed_word, ADV_TYPE);
                }
            }
    
            found = (found_noun || found_verb || found_adj || found_adv ||
                     found_stemmed_noun || found_stemmed_verb || found_stemmed_adj || found_stemmed_adv);
            if (found)
            {
                in_wn_word_count++;
                word_is_in_wn[word_index] = true;
            } else
            {
                out_of_wn_word_count++;
                processUnknownWord(word_index);
                word_is_in_wn[word_index] = false;
            }
        }
    } else // word is a "special tag" (<OOV>, etc...)
    {
        out_of_wn_word_count++;
        word_is_in_wn[word_index] = false;
    }
    if (word_to_senses[word_index].isEmpty())
        PLWARNING("word %d (%s) was not processed correctly (found = %d)", word_index, words[word_index].c_str(), found);
    word_index++;

}

Here is the call graph for this function:

Here is the caller graph for this function:

void PLearn::WordNetOntology::extractWordHighLevelSenses ( int  noun_depth,
int  verb_depth,
int  adj_depth,
int  adv_depth,
int  unk_depth 
)

Definition at line 2652 of file WordNetOntology.cc.

References ADJ_SS_ID, ADV_SS_ID, PLearn::Set::begin(), PLearn::Set::end(), NOUN_SS_ID, SUPER_UNKNOWN_SS_ID, and VERB_SS_ID.

{
    Set noun_categories;
    getDescendantCategoriesAtLevel(NOUN_SS_ID, 0, noun_depth, noun_categories);
    for (SetIterator sit = noun_categories.begin(); sit != noun_categories.end(); ++sit)
    {
        int ss_id = *sit;
        Set word_descendants = getSynsetWordDescendants(ss_id);
        for (SetIterator wit = word_descendants.begin(); wit != word_descendants.end(); ++wit)
        {
            int word_id = *wit;
            word_to_high_level_senses[word_id].insert(ss_id);
        }
    }
    Set verb_categories;
    getDescendantCategoriesAtLevel(VERB_SS_ID, 0, verb_depth, verb_categories);
    for (SetIterator sit = verb_categories.begin(); sit != verb_categories.end(); ++sit)
    {
        int ss_id = *sit;
        Set word_descendants = getSynsetWordDescendants(ss_id);
        for (SetIterator wit = word_descendants.begin(); wit != word_descendants.end(); ++wit)
        {
            int word_id = *wit;
            word_to_high_level_senses[word_id].insert(ss_id);
        }
    }
    Set adj_categories;
    getDescendantCategoriesAtLevel(ADJ_SS_ID, 0, adj_depth, adj_categories);
    for (SetIterator sit = adj_categories.begin(); sit != adj_categories.end(); ++sit)
    {
        int ss_id = *sit;
        Set word_descendants = getSynsetWordDescendants(ss_id);
        for (SetIterator wit = word_descendants.begin(); wit != word_descendants.end(); ++wit)
        {
            int word_id = *wit;
            word_to_high_level_senses[word_id].insert(ss_id);
        }
    }
    Set adv_categories;
    getDescendantCategoriesAtLevel(ADV_SS_ID, 0, adv_depth, adv_categories);
    for (SetIterator sit = adv_categories.begin(); sit != adv_categories.end(); ++sit)
    {
        int ss_id = *sit;
        Set word_descendants = getSynsetWordDescendants(ss_id);
        for (SetIterator wit = word_descendants.begin(); wit != word_descendants.end(); ++wit)
        {
            int word_id = *wit;
            word_to_high_level_senses[word_id].insert(ss_id);
        }
    }
    Set unk_categories;
    getDescendantCategoriesAtLevel(SUPER_UNKNOWN_SS_ID, 0, unk_depth, unk_categories);
    for (SetIterator sit = unk_categories.begin(); sit != unk_categories.end(); ++sit)
    {
        int ss_id = *sit;
        Set word_descendants = getSynsetWordDescendants(ss_id);
        for (SetIterator wit = word_descendants.begin(); wit != word_descendants.end(); ++wit)
        {
            int word_id = *wit;
            word_to_high_level_senses[word_id].insert(ss_id);
        }
    }
  
    // This role is deprecated: integrity verification : to each word should be assigned at least 1 high-level sense
    // The new role is now to assign "normal" senses to word that didn't get high-level senses
    for (map<int, string>::iterator it = words.begin(); it != words.end(); ++it)
    {
        int word_id = it->first;
        if (word_to_high_level_senses[word_id].size() == 0)
            word_to_high_level_senses[word_id] = word_to_senses[word_id];
        // This is deprecated: PLWARNING("word '%s' (%d) has no high-level sense", words[word_id].c_str(), word_id);
    }

    are_word_high_level_senses_extracted = true;
}

Here is the call graph for this function:

void PLearn::WordNetOntology::extractWordNounAndVerbHighLevelSenses ( int  noun_depth,
int  verb_depth 
)

Definition at line 2728 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Set::end(), NOUN_SS_ID, and VERB_SS_ID.

{
    for (map<int, string>::iterator it = words.begin(); it != words.end(); ++it)
    {
        int word_id = it->first;
        word_to_high_level_senses[word_id] = word_to_adv_senses[word_id];
        word_to_high_level_senses[word_id].merge(word_to_adj_senses[word_id]);
    }

    Set noun_categories;
    getDescendantCategoriesAtLevel(NOUN_SS_ID, 0, noun_depth, noun_categories);
    for (SetIterator sit = noun_categories.begin(); sit != noun_categories.end(); ++sit)
    {
        int ss_id = *sit;
        Set word_descendants = getSynsetWordDescendants(ss_id);
        for (SetIterator wit = word_descendants.begin(); wit != word_descendants.end(); ++wit)
        {
            int word_id = *wit;
            word_to_high_level_senses[word_id].insert(ss_id);
        }
    }
    Set verb_categories;
    getDescendantCategoriesAtLevel(VERB_SS_ID, 0, verb_depth, verb_categories);
    for (SetIterator sit = verb_categories.begin(); sit != verb_categories.end(); ++sit)
    {
        int ss_id = *sit;
        Set word_descendants = getSynsetWordDescendants(ss_id);
        for (SetIterator wit = word_descendants.begin(); wit != word_descendants.end(); ++wit)
        {
            int word_id = *wit;
            word_to_high_level_senses[word_id].insert(ss_id);
        }
    }

    // BIG HACK!!!
    for (map<int, Set>::iterator it = word_to_under_target_level_high_level_senses.begin(); it != word_to_under_target_level_high_level_senses.end(); ++it)
    {
        word_to_high_level_senses[it->first].merge(it->second);
    }
  
    for (map<int, string>::iterator it = words.begin(); it != words.end(); ++it)
    {
        int word_id = it->first;
        if (word_to_high_level_senses[word_id].size() == 0)
            word_to_high_level_senses[word_id] = word_to_senses[word_id];
    }

    are_word_high_level_senses_extracted = true;
}

Here is the call graph for this function:

void PLearn::WordNetOntology::fillTempWordToHighLevelSensesTVecMap ( ) [inline]

Definition at line 441 of file WordNetOntology.h.

References PLearn::Set::begin(), PLearn::Set::end(), getWordHighLevelSenses(), temp_word_to_high_level_senses, w, and words.

    {
        for (map<int, string>::iterator it = words.begin(); it != words.end(); ++it)
        {
            int w = it->first;
            Set hl_senses = getWordHighLevelSenses(w);
            for (SetIterator sit = hl_senses.begin(); sit != hl_senses.end(); ++sit)
                temp_word_to_high_level_senses[w].push_back(*sit);
        }
    }

Here is the call graph for this function:

void PLearn::WordNetOntology::fillTempWordToSensesTVecMap ( ) [inline]

Definition at line 396 of file WordNetOntology.h.

References PLearn::Set::begin(), PLearn::Set::end(), temp_word_to_adj_senses, temp_word_to_adv_senses, temp_word_to_noun_senses, temp_word_to_senses, temp_word_to_verb_senses, w, word_to_adj_senses, word_to_adv_senses, word_to_noun_senses, word_to_senses, and word_to_verb_senses.

Referenced by PLearn::GraphicalBiText::build_().

    {
        for (map<int, Set>::iterator it = word_to_senses.begin(); it != word_to_senses.end(); ++it)
        {
            int w = it->first;
            Set senses = it->second;
            for (SetIterator sit = senses.begin(); sit != senses.end(); ++sit)
                temp_word_to_senses[w].push_back(*sit);
        }

        for (map<int, Set>::iterator it = word_to_noun_senses.begin(); it != word_to_noun_senses.end(); ++it)
        {
            int w = it->first;
            Set senses = it->second;
            for (SetIterator sit = senses.begin(); sit != senses.end(); ++sit)
                temp_word_to_noun_senses[w].push_back(*sit);
        }

        for (map<int, Set>::iterator it = word_to_verb_senses.begin(); it != word_to_verb_senses.end(); ++it)
        {
            int w = it->first;
            Set senses = it->second;
            for (SetIterator sit = senses.begin(); sit != senses.end(); ++sit)
                temp_word_to_verb_senses[w].push_back(*sit);
        }

        for (map<int, Set>::iterator it = word_to_adj_senses.begin(); it != word_to_adj_senses.end(); ++it)
        {
            int w = it->first;
            Set senses = it->second;
            for (SetIterator sit = senses.begin(); sit != senses.end(); ++sit)
                temp_word_to_adj_senses[w].push_back(*sit);
        }

        for (map<int, Set>::iterator it = word_to_adv_senses.begin(); it != word_to_adv_senses.end(); ++it)
        {
            int w = it->first;
            Set senses = it->second;
            for (SetIterator sit = senses.begin(); sit != senses.end(); ++sit)
                temp_word_to_adv_senses[w].push_back(*sit);
        }
    }

Here is the call graph for this function:

Here is the caller graph for this function:

void PLearn::WordNetOntology::finalize ( )
Node * PLearn::WordNetOntology::findSynsetFromSynsAndGloss ( const vector< string > &  syns,
const string &  gloss,
const long  offset,
const int  fnum 
)

Definition at line 478 of file WordNetOntology.cc.

References PLearn::Node::fnum, PLearn::Node::gloss, PLearn::Node::hereiam, and PLearn::Node::syns.

{
    for (map<int, Node *>::iterator it = synsets.begin(); it != synsets.end(); ++it) {
        Node *node = it->second;
        if ((node->gloss == gloss) && (node->syns == syns) && (node->hereiam == offset) && (node->fnum == fnum))
            return node;
    }
    return NULL;
}
Set PLearn::WordNetOntology::getAllCategories ( )

Definition at line 2035 of file WordNetOntology.cc.

References PLearn::Set::insert().

Referenced by PLearn::GraphicalBiText::check_set_pA(), PLearn::GraphicalBiText::compute_BN_likelihood(), PLearn::GraphicalBiText::compute_efs_likelihood(), PLearn::GraphicalBiText::compute_likelihood(), PLearn::GraphicalBiText::compute_pTC(), and PLearn::GraphicalBiText::print().

{
    Set categories;
    for (map<int, Node*>::iterator it = synsets.begin(); it != synsets.end(); ++it)
    {
        categories.insert(it->first);
    }
    return categories;
}

Here is the call graph for this function:

Here is the caller graph for this function:

Set PLearn::WordNetOntology::getAllSenses ( )

Definition at line 2025 of file WordNetOntology.cc.

References PLearn::Set::insert().

{
    Set senses;
    for (map<int, Set>::iterator it = sense_to_words.begin(); it != sense_to_words.end(); ++it)
    {
        senses.insert(it->first);
    }
    return senses;
}

Here is the call graph for this function:

Set PLearn::WordNetOntology::getAllWords ( ) const

Definition at line 2016 of file WordNetOntology.cc.

References PLearn::Set::insert().

Referenced by PLearn::GraphicalBiText::build_(), PLearn::GraphicalBiText::compute_likelihood(), PLearn::GraphicalBiText::compute_nodemap(), PLearn::GraphicalBiText::computeKL(), PLearn::GraphicalBiText::init(), main(), PLearn::GraphicalBiText::print(), PLearn::GraphicalBiText::print_sensemap(), and PLearn::GraphicalBiText::test_WSD().

{
    Set all_words;
    for (map<int, string>::const_iterator it = words.begin(); it != words.end(); ++it){
        all_words.insert(it->first);
    }
    return all_words;
}

Here is the call graph for this function:

Here is the caller graph for this function:

void PLearn::WordNetOntology::getCategoriesAtLevel ( int  ss_id,
int  cur_level,
int  target_level,
set< int > &  categories 
)

Definition at line 2193 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Set::end(), and PLearn::Node::parents.

{
    Node* node = synsets[ss_id];
    if (cur_level == target_level && !isTopLevelCategory(ss_id))
    {
        categories.insert(ss_id);
    } else
    {
        for (SetIterator it = node->parents.begin(); it != node->parents.end(); ++it)
        {
            getCategoriesAtLevel(*it, cur_level + 1, target_level, categories);
        }
    }  
}

Here is the call graph for this function:

void PLearn::WordNetOntology::getCategoriesUnderLevel ( int  ss_id,
int  cur_level,
int  target_level,
Set  categories 
)

Definition at line 2208 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Set::end(), PLearn::Set::insert(), and PLearn::Node::parents.

{
    Node* node = synsets[ss_id];
    if (!isTopLevelCategory(ss_id))
        categories.insert(ss_id);
    if (cur_level != target_level)
    {
        for (SetIterator it = node->parents.begin(); it != node->parents.end(); ++it)
            getCategoriesUnderLevel(*it, cur_level + 1, target_level, categories);
    }
}

Here is the call graph for this function:

void PLearn::WordNetOntology::getDescendantCategoriesAtLevel ( int  ss_id,
int  cur_level,
int  target_level,
Set  categories 
)

Definition at line 2627 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Node::children, PLearn::Set::end(), and PLearn::Set::insert().

{
    if (isSynset(ss_id))
    {
        Node* node = synsets[ss_id];

        // WARNING: HERE IS A HUGE HACK!!!
        if (cur_level < target_level && isSense(ss_id))
        {
            Set words = sense_to_words[ss_id];
            for (SetIterator wit = words.begin(); wit != words.end(); ++wit)
                word_to_under_target_level_high_level_senses[*wit].insert(ss_id);
        }

        if (cur_level == target_level)
            categories.insert(ss_id);
        else
        {
            for (SetIterator it = node->children.begin(); it != node->children.end(); ++it)
                getDescendantCategoriesAtLevel(*it, cur_level + 1, target_level, categories);
        }  
    }
}

Here is the call graph for this function:

void PLearn::WordNetOntology::getDownToUpParentCategoriesAtLevel ( int  ss_id,
int  target_level,
Set  categories,
int  cur_level = 0 
)

Definition at line 2180 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Set::end(), PLearn::Set::insert(), and PLearn::Node::parents.

{
    Node* node = synsets[ss_id];
    if (cur_level == target_level && !isTopLevelCategory(ss_id))
    {
        categories.insert(ss_id);
    } else
    {
        for (SetIterator it = node->parents.begin(); it != node->parents.end(); ++it)
            getDownToUpParentCategoriesAtLevel(*it, target_level, categories, cur_level + 1);
    }  
}

Here is the call graph for this function:

TVec<int> PLearn::WordNetOntology::getHighLevelSensesForWord ( int  w) [inline]

Definition at line 451 of file WordNetOntology.h.

References temp_word_to_high_level_senses, and w.

int PLearn::WordNetOntology::getMaxSynsetId ( )

Definition at line 2500 of file WordNetOntology.cc.

Referenced by PLearn::GraphicalBiText::build_().

{
    return synsets.rbegin()->first;
}

Here is the caller graph for this function:

int PLearn::WordNetOntology::getPredominentSyntacticClassForWord ( int  word_id)

Definition at line 2535 of file WordNetOntology.cc.

References ADJ_TYPE, ADV_TYPE, PLearn::Set::begin(), PLearn::Set::end(), NOUN_TYPE, PLWARNING, UNDEFINED_TYPE, and VERB_TYPE.

{
#ifndef NOWARNING
    if (!isWord(word_id))
        PLWARNING("asking for a non-word id (%d)", word_id);
#endif
    if (are_predominent_pos_extracted)
        return word_to_predominent_pos[word_id];
    int n_noun = 0;
    int n_verb = 0;
    int n_adj = 0;
    int n_adv = 0;
    Set senses = word_to_senses[word_id];
    for (SetIterator it = senses.begin(); it != senses.end(); ++it)
    {
        int sense_id = *it;
        int type = getSyntacticClassForSense(sense_id);
        switch (type)
        {
        case NOUN_TYPE:
            n_noun++;
            break;
        case VERB_TYPE:
            n_verb++;
            break;
        case ADJ_TYPE:
            n_adj++;
            break;
        case ADV_TYPE:
            n_adv++;
        }
    }
    if (n_noun == 0 && n_verb == 0 && n_adj == 0 && n_adv == 0)
        return UNDEFINED_TYPE;
    else if (n_noun >= n_verb && n_noun >= n_adj && n_noun >= n_adv)
        return NOUN_TYPE;
    else if (n_verb >= n_noun && n_verb >= n_adj && n_verb >= n_adv)
        return VERB_TYPE;
    else if (n_adj >= n_noun && n_adj >= n_verb && n_adj >= n_adv)
        return ADJ_TYPE;
    else 
        return ADV_TYPE;
}

Here is the call graph for this function:

Node* PLearn::WordNetOntology::getRootSynset ( ) [inline]

Definition at line 285 of file WordNetOntology.h.

References ROOT_SS_ID, and synsets.

{ return synsets[ROOT_SS_ID]; }
TVec<int> PLearn::WordNetOntology::getSecondLevelSensesForWord ( int  w) [inline]

Definition at line 453 of file WordNetOntology.h.

References PLearn::Set::begin(), PLearn::Set::end(), PLearn::Set::insert(), PLearn::Node::parents, PLearn::TVec< T >::push_back(), synsets, w, and word_to_senses.

    {
        Set sl_senses;
        Set senses = word_to_senses[w];
        for (SetIterator sit = senses.begin(); sit != senses.end(); ++sit)
        {
            int s = *sit;
            Node* node = synsets[s];
            for (SetIterator ssit = node->parents.begin(); ssit != node->parents.end(); ++ssit)
            {
                sl_senses.insert(*ssit);
            }
        }
        TVec<int> sl_senses_vec;
        for (SetIterator slit = sl_senses.begin(); slit != sl_senses.end(); ++slit)
            sl_senses_vec.push_back(*slit);
        return sl_senses_vec;
    }

Here is the call graph for this function:

string PLearn::WordNetOntology::getSenseKey ( int  word_id,
int  ss_id 
) const

Definition at line 1630 of file WordNetOntology.cc.

References PLWARNING, and PLearn::ws().

Referenced by PLearn::GraphicalBiText::compute_nodemap(), PLearn::GraphicalBiText::init(), PLearn::GraphicalBiText::init_WSD(), PLearn::GraphicalBiText::print(), PLearn::GraphicalBiText::senseTagBitext(), PLearn::GraphicalBiText::set_nodemap(), PLearn::GraphicalBiText::test_WSD(), and PLearn::GraphicalBiText::update_WSD_model().

{
    pair<int, int> ws(word_id, ss_id);
    if (ws_id_to_sense_key.find(ws) == ws_id_to_sense_key.end()){
        PLWARNING("getSenseKey: can't find sense key word %d ss_id %d",word_id,ss_id);
        return "";
    }
    return ws_id_to_sense_key.find(ws)->second;

}

Here is the call graph for this function:

Here is the caller graph for this function:

TVec<int> PLearn::WordNetOntology::getSensesForWord ( int  w) const [inline]
int PLearn::WordNetOntology::getSenseSize ( ) [inline]

Definition at line 290 of file WordNetOntology.h.

References sense_to_words.

Referenced by PLearn::GraphicalBiText::build_(), and PLearn::TextSenseSequenceVMatrix::build_().

{ return sense_to_words.size(); }

Here is the caller graph for this function:

Node * PLearn::WordNetOntology::getSynset ( int  id)

Definition at line 1781 of file WordNetOntology.cc.

References PLWARNING.

Referenced by PLearn::GraphicalBiText::check_set_pA(), PLearn::GraphicalBiText::compute_node_level(), PLearn::GraphicalBiText::compute_nodemap(), PLearn::GraphicalBiText::compute_pMC(), PLearn::GraphicalBiText::getDeepestCommonAncestor(), PLearn::GraphicalBiText::printNode(), and PLearn::GraphicalBiText::set_nodemap().

{
#ifndef NOWARNING
    if (!isSynset(id))
    {
        PLWARNING("asking for a non-synset id (%d)", id);
        return NULL;
    }
#endif
#ifndef NOWARNING
    if (synsets.find(id) == synsets.end()) {
        PLWARNING("Asking for a non-existent synset id (%d)", id);
        return NULL;
    }
#endif
    return synsets[id];
}

Here is the caller graph for this function:

Set PLearn::WordNetOntology::getSynsetAncestors ( int  id,
int  max_level = -1 
) const

Definition at line 1558 of file WordNetOntology.cc.

References PLERROR, and PLWARNING.

{
    if (are_ancestors_extracted)
    {
        if (!isSynset(id))
        {
#ifndef NOWARNING
            PLWARNING("asking for a non-synset id (%d)", id);
#endif
        }
        return synset_to_ancestors.find(id)->second;
    } else
    {
        PLERROR("You must extract ancestors before calling getSynsetAncestors const ");
    }
}
Set PLearn::WordNetOntology::getSynsetAncestors ( int  id,
int  max_level = -1 
)

Definition at line 1527 of file WordNetOntology.cc.

References PLearn::extractAncestors(), and PLWARNING.

Referenced by PLearn::GraphicalBiText::compute_BN_likelihood(), PLearn::GraphicalBiText::compute_efs_likelihood(), and PLearn::GraphicalBiText::getDeepestCommonAncestor().

{
    if (are_ancestors_extracted)
    {
        if (!isSynset(id))
        {
#ifndef NOWARNING
            PLWARNING("asking for a non-synset id (%d)", id);
#endif
        }
        return synset_to_ancestors[id];
    } else
    {
        Set ancestors;
        if (isSynset(id))
        {
#ifndef NOWARNING
            PLWARNING("using non-pre-computed version");
#endif
            extractAncestors(synsets[id], ancestors, 1, max_level);
        } else
        {
#ifndef NOWARNING
            PLWARNING("asking for a non-synset id (%d)", id);
#endif
        }
        return ancestors;
    }
}

Here is the call graph for this function:

Here is the caller graph for this function:

int PLearn::WordNetOntology::getSynsetIDForSenseKey ( int  word_id,
string  sense_key 
) const

Definition at line 1641 of file WordNetOntology.cc.

Referenced by PLearn::GraphicalBiText::init(), PLearn::GraphicalBiText::init_WSD(), and PLearn::GraphicalBiText::test_WSD().

{
    pair<int, string> ss(word_id,sense_key);
    map< pair<int, string>, int>::const_iterator it = sense_key_to_ss_id.find(ss);
    if(it == sense_key_to_ss_id.end())
        return -1;
    else
        return it->second;
}

Here is the caller graph for this function:

Set PLearn::WordNetOntology::getSynsetParents ( int  id)

Definition at line 1576 of file WordNetOntology.cc.

Referenced by PLearn::GraphicalBiText::distribute_pS_on_ancestors().

{
    return  synsets[id]->parents;
}

Here is the caller graph for this function:

Set PLearn::WordNetOntology::getSynsetSenseDescendants ( int  id)

Definition at line 1893 of file WordNetOntology.cc.

References PLWARNING.

{
    if (are_descendants_extracted)
    {
        if (!isSynset(id))
        {
#ifndef NOWARNING
            PLWARNING("asking for a non-synset id (%d)", id);
#endif
        }
        return synset_to_sense_descendants[id];
    }

    Set sense_descendants;
    if (isSynset(id))
    {
#ifndef NOWARNING
        PLWARNING("using non-pre-computed version");
#endif
        extractDescendants(synsets[id], sense_descendants, Set());
    } else
    {
#ifndef NOWARNING
        PLWARNING("asking for non-synset id (%d)", id);
#endif
    }
    return sense_descendants;
}
int PLearn::WordNetOntology::getSynsetSize ( ) [inline]

Definition at line 291 of file WordNetOntology.h.

References synsets.

{ return synsets.size(); }
Set PLearn::WordNetOntology::getSynsetWordDescendants ( int  id)

Definition at line 1922 of file WordNetOntology.cc.

References PLWARNING.

Referenced by PLearn::GraphicalBiText::printNode().

{
    if (are_descendants_extracted)
    {
        if (!isSynset(id))
        {
#ifndef NOWARNING
            PLWARNING("asking for a non-synset id (%d)", id);
#endif
        }
        return synset_to_word_descendants[id];
    }

    Set word_descendants;
    if (isSynset(id))
    {
#ifndef NOWARNING
        PLWARNING("using non-pre-computed version");
#endif
        extractDescendants(synsets[id], Set(), word_descendants);
    } else
    {
#ifndef NOWARNING
        PLWARNING("asking for non-synset id (%d)", id);
#endif
    }
    return word_descendants;
}

Here is the caller graph for this function:

vector< string > PLearn::WordNetOntology::getSynsetWords ( SynsetPtr  ssp)

Definition at line 1056 of file WordNetOntology.cc.

References i, and PLearn::removeDelimiters().

{
    vector<string> syns;
    for (int i = 0; i < ssp->wcount; i++)
    {
        strsubst(ssp->words[i], '_', ' ');
        string word_i = ssp->words[i];
        removeDelimiters(word_i, "*", "%");
        removeDelimiters(word_i, "|", "/");
        syns.push_back(word_i);
    }
    return syns;
}

Here is the call graph for this function:

Set PLearn::WordNetOntology::getSyntacticClassesForWord ( int  word_id)

Definition at line 2505 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Set::end(), PLearn::Set::insert(), PLWARNING, and PLearn::Node::types.

{
#ifndef NOWARNING
    if (!isWord(word_id))
        PLWARNING("asking for a non-word id (%d)", word_id);
#endif
    Set syntactic_classes;
    Set senses = word_to_senses[word_id];
    for (SetIterator it = senses.begin(); it != senses.end(); ++it)
    {
        Node* node = synsets[*it];
        for (SetIterator tit = node->types.begin(); tit != node->types.end(); ++tit)
            syntactic_classes.insert(*tit);
    }
    return syntactic_classes;
}

Here is the call graph for this function:

int PLearn::WordNetOntology::getSyntacticClassForSense ( int  sense_id)

Definition at line 2522 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLWARNING, PLearn::Set::size(), and PLearn::Node::types.

{
#ifndef NOWARNING
    if (!isSense(sense_id))
        PLWARNING("asking for a non-sense id (%d)", sense_id);
#endif
    Node* sense = synsets[sense_id];
    if (sense->types.size() > 1)
        PLWARNING("a sense has more than 1 POS type");
    int type = *(sense->types.begin());
    return type;
}

Here is the call graph for this function:

TVec<int> PLearn::WordNetOntology::getThirdLevelSensesForWord ( int  w) [inline]

Definition at line 472 of file WordNetOntology.h.

References PLearn::Set::begin(), PLearn::Set::end(), PLearn::Set::insert(), PLearn::Node::parents, PLearn::TVec< T >::push_back(), synsets, w, and word_to_senses.

    {
        Set tl_senses;
        Set senses = word_to_senses[w];
        for (SetIterator sit = senses.begin(); sit != senses.end(); ++sit)
        {
            int s = *sit;
            Node* node = synsets[s];
            for (SetIterator slit = node->parents.begin(); slit != node->parents.end(); ++slit)
            {
                int sl_sense = *slit;
                Node* node = synsets[sl_sense];
                for (SetIterator tlit = node->parents.begin(); tlit != node->parents.end(); ++tlit)
                {
                    tl_senses.insert(*tlit);
                }
            }
        }
        TVec<int> tl_senses_vec;
        for (SetIterator tlit = tl_senses.begin(); tlit != tl_senses.end(); ++tlit)
            tl_senses_vec.push_back(*tlit);
        return tl_senses_vec;
    }

Here is the call graph for this function:

int PLearn::WordNetOntology::getVocSize ( ) [inline]

Definition at line 289 of file WordNetOntology.h.

References words.

Referenced by PLearn::GraphicalBiText::build_(), and PLearn::GraphicalBiText::print().

{ return words.size(); }

Here is the caller graph for this function:

string PLearn::WordNetOntology::getWord ( int  id) const

Definition at line 1676 of file WordNetOntology.cc.

References NULL_TAG, and PLWARNING.

Referenced by PLearn::GraphicalBiText::build_(), main(), and PLearn::TextSenseSequenceVMatrix::permute().

{
#ifndef NOWARNING
    if (!isWord(id))
    {
        PLWARNING("asking for a non-word id (%d)", id);
        return NULL_TAG;
    }
#endif
    return words.find(id)->second;
}

Here is the caller graph for this function:

Set PLearn::WordNetOntology::getWordAdjSenses ( int  id)

Definition at line 1745 of file WordNetOntology.cc.

References PLWARNING.

{
#ifndef NOWARNING
    if (!isWord(id))
    {
        PLWARNING("asking for a non-word id (%d)", id);
        return Set();
    }
#endif
    return word_to_adj_senses[id];
}
Set PLearn::WordNetOntology::getWordAdvSenses ( int  id)

Definition at line 1757 of file WordNetOntology.cc.

References PLWARNING.

{
#ifndef NOWARNING
    if (!isWord(id))
    {
        PLWARNING("asking for a non-word id (%d)", id);
        return Set();
    }
#endif
    return word_to_adv_senses[id];
}
Set PLearn::WordNetOntology::getWordAncestors ( int  id,
int  max_level = -1 
)

Definition at line 1581 of file WordNetOntology.cc.

References PLearn::Set::insert(), PLearn::Set::merge(), and PLWARNING.

{
    if (are_ancestors_extracted)
    {
        if (!isWord(id))
        {
#ifndef NOWARNING
            PLWARNING("asking for a non-word id (%d)", id);
#endif
        }
        return word_to_ancestors[id];
    } else
    {
        Set word_ancestors;
        if (isWord(id))
        {
#ifndef NOWARNING
            PLWARNING("using non-pre-computed version");
#endif
            for (SetIterator it = word_to_senses[id].begin(); it != word_to_senses[id].end(); ++it)
            {
                int sense_id = *it;
                word_ancestors.insert(sense_id);
                Set synset_ancestors = getSynsetAncestors(sense_id, max_level);
                word_ancestors.merge(synset_ancestors);
            }
        } else
        {
#ifndef NOWARNING
            PLWARNING("asking for a non-word id");
#endif
        }

        return word_ancestors;
    }
}

Here is the call graph for this function:

Set PLearn::WordNetOntology::getWordHighLevelSenses ( int  id)

Definition at line 1705 of file WordNetOntology.cc.

References PLERROR, and PLWARNING.

Referenced by fillTempWordToHighLevelSensesTVecMap().

{
#ifndef NOWARNING
    if (!isWord(id))
    {
        PLWARNING("asking for a non-word id (%d)", id);
        return Set();
    }
#endif

    if (!are_word_high_level_senses_extracted)
        PLERROR("word high-level senses have not been extracted");

    return word_to_high_level_senses[id];
}

Here is the caller graph for this function:

int PLearn::WordNetOntology::getWordId ( string  word) const

Definition at line 1651 of file WordNetOntology.cc.

References OOV_TAG.

Referenced by PLearn::GraphicalBiText::build_(), PLearn::GraphicalBiText::init(), PLearn::GraphicalBiText::init_WSD(), main(), PLearn::TextSenseSequenceVMatrix::permute(), and PLearn::GraphicalBiText::test_WSD().

{
    map<string, int>::const_iterator it = words_id.find(word);
    if (it == words_id.end())
    {
        map<string, int>::const_iterator iit = words_id.find(OOV_TAG);
        if (iit == words_id.end())
            return -1;
        else
            return iit->second;
    } else
    {
        return it->second;
    }

// #ifndef NOWARNING
//   if (words_id.find(word) == words_id.end())
//   {
//     PLWARNING("asking for a non-word (%s)", word.c_str());
//     return -1;
//   }
// #endif
//   return words_id[word];
}

Here is the caller graph for this function:

Set PLearn::WordNetOntology::getWordNounSenses ( int  id)

Definition at line 1721 of file WordNetOntology.cc.

References PLWARNING.

Referenced by PLearn::GraphicalBiText::init().

{
#ifndef NOWARNING
    if (!isWord(id))
    {
        PLWARNING("asking for a non-word id (%d)", id);
        return Set();
    }
#endif
    return word_to_noun_senses[id];
}

Here is the caller graph for this function:

map<int,string> PLearn::WordNetOntology::getWords ( ) [inline]

Definition at line 393 of file WordNetOntology.h.

References words.

{return words;};
int PLearn::WordNetOntology::getWordSenseIdForSenseKey ( string  lemma,
string  lexsn,
string  word 
)

Definition at line 887 of file WordNetOntology.cc.

References PLearn::cstr(), PLearn::Node::fnum, PLearn::Node::gloss, PLearn::Node::hereiam, PLearn::Node::ss_id, PLearn::Node::syns, and WNO_ERROR.

Referenced by main().

{
    string sense_key = lemma + "%" + lexsn;
    char* csense_key = cstr(sense_key);
    SynsetPtr ssp = GetSynsetForSense(csense_key);
    if (ssp != NULL)
    {
        vector<string> synset_words = getSynsetWords(ssp);
        string gloss = ssp->defn;
        int word_id = words_id[word];
        long offset = ssp->hereiam;
        int fnum = ssp->fnum;
        for (SetIterator it = word_to_senses[word_id].begin(); it != word_to_senses[word_id].end(); ++it)
        {
            Node* node = synsets[*it];
            if (node->syns == synset_words && node->gloss == gloss && node->hereiam == offset && node->fnum == fnum)
                return node->ss_id;
        }
    }
    return WNO_ERROR;
}

Here is the call graph for this function:

Here is the caller graph for this function:

int PLearn::WordNetOntology::getWordSenseIdForWnsn ( string  word,
int  wn_pos_type,
int  wnsn 
)

Definition at line 826 of file WordNetOntology.cc.

References ADJ_TYPE, ADV_TYPE, NOUN_TYPE, PLWARNING, VERB_TYPE, and WNO_ERROR.

{
    if (!isWord(word))
    {
#ifndef NOWARNING
        PLWARNING("asking for a non-word (%s)", word.c_str());
#endif
        return WNO_ERROR;
    }

    int word_id = words_id[word];
    switch (wn_pos_type)
    {
    case NOUN_TYPE:
        if (wnsn > (int)word_to_noun_wnsn[word_id].size())
        {
#ifndef NOWARNING
            PLWARNING("invalid noun wnsn (%d)", wnsn);
#endif
            return WNO_ERROR;
        } else
            return word_to_noun_wnsn[word_id][wnsn - 1];
        break;
    case VERB_TYPE:
        if (wnsn > (int)word_to_verb_wnsn[word_id].size())
        {
#ifndef NOWARNING
            PLWARNING("invalid verb wnsn (%d)", wnsn);
#endif
            return WNO_ERROR;
        } else
            return word_to_verb_wnsn[word_id][wnsn - 1];
        break;
    case ADJ_TYPE:
        if (wnsn > (int)word_to_adj_wnsn[word_id].size())
        {
#ifndef NOWARNING
            PLWARNING("invalid adj wnsn (%d)", wnsn);
#endif
            return WNO_ERROR;
        } else
            return word_to_adj_wnsn[word_id][wnsn - 1];
        break;
    case ADV_TYPE:
        if (wnsn > (int)word_to_adv_wnsn[word_id].size())
        {
#ifndef NOWARNING
            PLWARNING("invalid adv wnsn (%d)", wnsn);
#endif
            return WNO_ERROR;
        } else
            return word_to_adv_wnsn[word_id][wnsn - 1];
        break;
    default:
#ifndef NOWARNING
        PLWARNING("undefined type");
#endif
        return WNO_ERROR;
    }
}
Set PLearn::WordNetOntology::getWordSenses ( int  id) const

Definition at line 1688 of file WordNetOntology.cc.

References PLWARNING.

Referenced by PLearn::GraphicalBiText::compute_BN_likelihood(), PLearn::GraphicalBiText::compute_nodemap(), PLearn::GraphicalBiText::compute_pTC(), main(), PLearn::GraphicalBiText::print_sensemap(), and PLearn::GraphicalBiText::set_nodemap().

{
#ifndef NOWARNING
    if (!isWord(id))
    {
        PLWARNING("asking for a non-word id (%d)", id);
        return Set();
    }
#endif
    map<int, Set>::const_iterator it = word_to_senses.find(id);
    if(it==word_to_senses.end()){
        return Set();
    }else{
        return it->second;
    }
}

Here is the caller graph for this function:

int PLearn::WordNetOntology::getWordSenseUniqueId ( int  word,
int  sense 
)

Definition at line 2778 of file WordNetOntology.cc.

References PLearn::ws().

{
    if (!are_word_sense_unique_ids_computed)
        computeWordSenseUniqueIds();
    pair<int, int> ws(word, sense);
    if (word_sense_to_unique_id.find(ws) == word_sense_to_unique_id.end())
        return -1;
    return word_sense_to_unique_id[ws];
}

Here is the call graph for this function:

int PLearn::WordNetOntology::getWordSenseUniqueIdSize ( )
Set PLearn::WordNetOntology::getWordsForSense ( int  id)

Definition at line 1769 of file WordNetOntology.cc.

References PLWARNING.

Referenced by PLearn::TextSenseSequenceVMatrix::build_().

{
#ifndef NOWARNING
    if (!isSense(id))
    {
        PLWARNING("asking for a non-sense id (%d)", id);
        return Set();
    }
#endif
    return sense_to_words[id];
}

Here is the caller graph for this function:

map<string,int> PLearn::WordNetOntology::getWordsId ( ) [inline]

Definition at line 394 of file WordNetOntology.h.

References words_id.

{return words_id;};
Set PLearn::WordNetOntology::getWordVerbSenses ( int  id)

Definition at line 1733 of file WordNetOntology.cc.

References PLWARNING.

{
#ifndef NOWARNING
    if (!isWord(id))
    {
        PLWARNING("asking for a non-word id (%d)", id);
        return Set();
    }
#endif
    return word_to_verb_senses[id];
}
bool PLearn::WordNetOntology::hasSenseInWordNet ( string  word,
int  wn_pos_type 
)

Definition at line 352 of file WordNetOntology.cc.

References ADJ_TYPE, ADV_TYPE, PLearn::cstr(), NOUN_TYPE, and VERB_TYPE.

{
    //char* cword = const_cast<char*>(word.c_str());
    char* cword = cstr(word);
    SynsetPtr ssp = NULL;

    switch (wn_pos_type)
    {
    case NOUN_TYPE:
        ssp = findtheinfo_ds(cword, NOUN, -HYPERPTR, ALLSENSES);
        break;
    case VERB_TYPE:
        ssp = findtheinfo_ds(cword, VERB, -HYPERPTR, ALLSENSES);
        break;
    case ADJ_TYPE:
        ssp = findtheinfo_ds(cword, ADJ, -HYPERPTR, ALLSENSES);
        break;
    case ADV_TYPE:
        ssp = findtheinfo_ds(cword, ADV, -HYPERPTR, ALLSENSES);
        break;
    }

    bool ssp_is_null = (ssp == NULL);

    delete(cword);
    free_syns(ssp);

    return !ssp_is_null;

}

Here is the call graph for this function:

void PLearn::WordNetOntology::init ( bool  differentiate_unknown_words = true)

Definition at line 106 of file WordNetOntology.cc.

References EOS_SS_ID.

{
    if (wninit() != 0) {
//    PLERROR("WordNet init error");
    }

    noun_count = 0;
    verb_count = 0;
    adj_count = 0;
    adv_count = 0;

    synset_index = EOS_SS_ID + 1; // first synset id
    word_index = 0;
    unknown_sense_index = 0;

    noun_sense_count = 0;
    verb_sense_count = 0;
    adj_sense_count = 0;
    adv_sense_count = 0;
  
    in_wn_word_count = 0;
    out_of_wn_word_count = 0;

    are_ancestors_extracted = false;
    are_descendants_extracted = false;
    are_predominent_pos_extracted = false;
    are_word_high_level_senses_extracted = false;
    are_word_sense_unique_ids_computed = false;

    n_word_high_level_senses = 0;

    differentiate_unknown_words = the_differentiate_unknown_words;
}
void PLearn::WordNetOntology::intersectAncestorsAndSenses ( Set  categories,
Set  senses 
)

Definition at line 2078 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Set::clear(), PLearn::Set::contains(), PLearn::Set::end(), and PLearn::Set::insert().

{
    // pour tous les mappings "mot -> ancetres", fait une intersection de "ancetres"
    // avec "categories"
    for (map<int, Set>::iterator it = word_to_ancestors.begin(); it != word_to_ancestors.end(); ++it)
    {
        it->second.intersection(categories);
    }

    // pour tous les mappings "synset -> ancetres" (ou "synset" = "sense" U "category")
    // enleve le mapping complet, si "synset" (la cle) n'intersecte pas avec "categories"
    Set keys_to_be_removed;
    for (map<int, Set>::iterator it = synset_to_ancestors.begin(); it != synset_to_ancestors.end(); ++it)
    {
        if (!categories.contains(it->first) && !senses.contains(it->first))
            keys_to_be_removed.insert(it->first);
    }
    // purge synset_to_ancestors
    for (SetIterator it = keys_to_be_removed.begin(); it != keys_to_be_removed.end(); ++it)
    {
        synset_to_ancestors.erase(*it);
        synsets.erase(*it);
    }

    // pour tous les mappings "synset -> ancetres" restants (ou "synset" = "sense" U "category")
    // fait une intersection de "ancetres" avec "categories"
    for (map<int, Set>::iterator it = synset_to_ancestors.begin(); it != synset_to_ancestors.end(); ++it)
    {
        it->second.intersection(categories); 
    }

    // pour tous les mappings "mot -> senses", fait une intersection de "senses"
    // avec "senses"
    for (map<int, Set>::iterator it = word_to_senses.begin(); it != word_to_senses.end(); ++it)
    {
        it->second.intersection(senses);
    }

    keys_to_be_removed->clear();
    for (map<int, Set>::iterator it = sense_to_words.begin(); it != sense_to_words.end(); ++it)
    {
        if (!senses.contains(it->first))
            keys_to_be_removed.insert(it->first);
    }

    for (SetIterator it = keys_to_be_removed.begin(); it != keys_to_be_removed.end(); ++it)
    {
        sense_to_words.erase(*it);
    }
}

Here is the call graph for this function:

bool PLearn::WordNetOntology::isCategory ( int  id)

Definition at line 1988 of file WordNetOntology.cc.

{
    return isSynset(id);
}
bool PLearn::WordNetOntology::isInWordNet ( string  word,
bool  trim_word = true,
bool  stem_word = true,
bool  remove_undescores = false 
)

Definition at line 303 of file WordNetOntology.cc.

References ADJ_TYPE, ADV_TYPE, NOUN_TYPE, NULL_TAG, PLearn::stemWord(), PLearn::trimWord(), PLearn::underscore_to_space(), and VERB_TYPE.

Referenced by main().

{
    if (trim_word)
        word = trimWord(word);

    if (remove_undescores)
        word = underscore_to_space(word);

    if (word == NULL_TAG)
    {
        return false;
    } else
    {
        bool found_noun = hasSenseInWordNet(word, NOUN_TYPE);
        bool found_verb = hasSenseInWordNet(word, VERB_TYPE);
        bool found_adj = hasSenseInWordNet(word, ADJ_TYPE);
        bool found_adv = hasSenseInWordNet(word, ADV_TYPE);
        bool found_stemmed_noun = false;
        bool found_stemmed_verb = false;
        bool found_stemmed_adj = false;
        bool found_stemmed_adv = false;
    
        if (stem_word)
        {
            string stemmed_word = stemWord(word, NOUN);
            if (stemmed_word != word)
                found_stemmed_noun = hasSenseInWordNet(stemmed_word, NOUN_TYPE);
            stemmed_word = stemWord(word, VERB);
            if (stemmed_word != word)
                found_stemmed_verb = hasSenseInWordNet(stemmed_word, VERB_TYPE);
            stemmed_word = stemWord(word, ADJ);
            if (stemmed_word != word)
                found_stemmed_adj = hasSenseInWordNet(stemmed_word, ADJ_TYPE);
            stemmed_word = stemWord(word, ADV);
            if (stemmed_word != word)
                found_stemmed_adv = hasSenseInWordNet(stemmed_word, ADV_TYPE);
        }
    
        if (found_noun || found_verb || found_adj || found_adv ||
            found_stemmed_noun || found_stemmed_verb || found_stemmed_adj || found_stemmed_adv)
        {
            return true;
        } else
        {
            return false;
        }
    }
}

Here is the call graph for this function:

Here is the caller graph for this function:

bool PLearn::WordNetOntology::isInWordNet ( int  word_id) const

Definition at line 1618 of file WordNetOntology.cc.

References PLWARNING.

{
#ifndef NOWARNING
    if (!isWord(word_id))
    {
        PLWARNING("asking for a non-word id (%d)", word_id);
        return false;
    }
#endif
    return word_is_in_wn.find(word_id)->second;
}
bool PLearn::WordNetOntology::isPureCategory ( int  id)

Definition at line 1993 of file WordNetOntology.cc.

Referenced by PLearn::GraphicalBiText::compute_pTC().

{
    return (isCategory(id) && !isSense(id));
}

Here is the caller graph for this function:

bool PLearn::WordNetOntology::isPureSense ( int  id)

Definition at line 1983 of file WordNetOntology.cc.

Referenced by PLearn::GraphicalBiText::compute_pTC().

{
    return (isSense(id) && synsets[id]->children.size() == 0);
}

Here is the caller graph for this function:

bool PLearn::WordNetOntology::isSense ( int  id)

Definition at line 1978 of file WordNetOntology.cc.

Referenced by PLearn::GraphicalBiText::init(), and PLearn::GraphicalBiText::init_WSD().

{
    return (sense_to_words.find(id) != sense_to_words.end());
}

Here is the caller graph for this function:

bool PLearn::WordNetOntology::isSynset ( int  id) const

Definition at line 1998 of file WordNetOntology.cc.

Referenced by PLearn::GraphicalBiText::compute_node_level(), PLearn::GraphicalBiText::compute_nodemap(), PLearn::GraphicalBiText::compute_pMC(), and PLearn::GraphicalBiText::set_nodemap().

{
    return (synsets.find(id) != synsets.end());
}

Here is the caller graph for this function:

bool PLearn::WordNetOntology::isSynsetUnknown ( int  id)

Definition at line 2145 of file WordNetOntology.cc.

{
    return synsets[id]->is_unknown;
}
bool PLearn::WordNetOntology::isTopLevelCategory ( int  ss_id)

Definition at line 2616 of file WordNetOntology.cc.

References ADJ_SS_ID, ADV_SS_ID, BOS_SS_ID, EOS_SS_ID, NOUN_SS_ID, NUMERIC_SS_ID, OOV_SS_ID, PROPER_NOUN_SS_ID, PUNCTUATION_SS_ID, ROOT_SS_ID, STOP_SS_ID, SUPER_UNKNOWN_SS_ID, UNDEFINED_SS_ID, and VERB_SS_ID.

{
    return (ss_id == ROOT_SS_ID || ss_id == SUPER_UNKNOWN_SS_ID || 
            ss_id == NOUN_SS_ID || ss_id == VERB_SS_ID || 
            ss_id == ADJ_SS_ID || ss_id == ADV_SS_ID ||
            ss_id == OOV_SS_ID || ss_id == PROPER_NOUN_SS_ID ||
            ss_id == NUMERIC_SS_ID || ss_id == PUNCTUATION_SS_ID ||
            ss_id == STOP_SS_ID || ss_id == UNDEFINED_SS_ID ||
            ss_id == BOS_SS_ID || ss_id == EOS_SS_ID);
}
bool PLearn::WordNetOntology::isWord ( string  word)

Definition at line 1973 of file WordNetOntology.cc.

{
    return (words_id.find(word) != words_id.end());
}
bool PLearn::WordNetOntology::isWord ( int  id) const
bool PLearn::WordNetOntology::isWordUnknown ( string  word)

Definition at line 2129 of file WordNetOntology.cc.

{
    return isWordUnknown(words_id[word]);
}
bool PLearn::WordNetOntology::isWordUnknown ( int  id)

Definition at line 2134 of file WordNetOntology.cc.

{
    bool is_unknown = true;
    for (SetIterator it = word_to_senses[id].begin(); it != word_to_senses[id].end(); ++it)
    {
        if (!synsets[*it]->is_unknown)
            is_unknown = false;
    }
    return is_unknown;
}
void PLearn::WordNetOntology::linkUpperCategories ( )

Definition at line 974 of file WordNetOntology.cc.

References ADJ_SS_ID, ADJ_TYPE, ADV_SS_ID, ADV_TYPE, PLearn::Set::contains(), PLearn::Set::insert(), NOUN_SS_ID, NOUN_TYPE, PLearn::Node::parents, ROOT_SS_ID, PLearn::Set::size(), PLearn::Node::types, VERB_SS_ID, and VERB_TYPE.

{
    for (map<int, Node*>::iterator it = synsets.begin(); it != synsets.end(); ++it)
    {
        int ss_id = it->first;
        Node* node = it->second;
        if (node->parents.size() == 0 && ss_id != ROOT_SS_ID) 
        {
            bool link_directly_to_root = true;
            if (node->types.contains(NOUN_TYPE))
            {
                node->parents.insert(NOUN_SS_ID);
                synsets[NOUN_SS_ID]->children.insert(ss_id);
                link_directly_to_root = false;
            }
            if (node->types.contains(VERB_TYPE))
            {
                node->parents.insert(VERB_SS_ID);
                synsets[VERB_SS_ID]->children.insert(ss_id);
                link_directly_to_root = false;
            }
            if (node->types.contains(ADJ_TYPE))
            {
                node->parents.insert(ADJ_SS_ID);
                synsets[ADJ_SS_ID]->children.insert(ss_id);
                link_directly_to_root = false;
            }
            if (node->types.contains(ADV_TYPE))
            {
                node->parents.insert(ADV_SS_ID);
                synsets[ADV_SS_ID]->children.insert(ss_id);
                link_directly_to_root = false;
            }
            if (link_directly_to_root)
            {
                node->parents.insert(ROOT_SS_ID);
                synsets[ROOT_SS_ID]->children.insert(ss_id);
            }
        }
    }
}

Here is the call graph for this function:

void PLearn::WordNetOntology::load ( string  voc_file,
string  synset_file,
string  ontology_file 
)

Definition at line 1267 of file WordNetOntology.cc.

References ADJ_TYPE, ADV_TYPE, PLearn::ShellProgressBar::done(), PLearn::ShellProgressBar::draw(), PLearn::Node::fnum, PLearn::ShellProgressBar::getAsciiFileLineCount(), PLearn::Node::gloss, PLearn::Node::hereiam, i, PLearn::Set::insert(), NOUN_TYPE, PLERROR, PLWARNING, PLearn::split(), PLearn::Node::ss_id, PLearn::startsWith(), PLearn::Node::syns, PLearn::tobool(), PLearn::toint(), PLearn::tolong(), PLearn::Node::types, PLearn::ShellProgressBar::update(), and VERB_TYPE.

{
    ifstream if_voc(voc_file.c_str());
    if (!if_voc) PLERROR("can't open %s", voc_file.c_str());
    ifstream if_synsets(synset_file.c_str());
    if (!if_synsets) PLERROR("can't open %s", synset_file.c_str());
    ifstream if_ontology(ontology_file.c_str());
    if (!if_ontology) PLERROR("can't open %s", ontology_file.c_str());

    string line;
    int word_count = 0;
    while (!if_voc.eof()) // voc
    {
        getline(if_voc, line, '\n');
        if (line == "") continue;
        if (line[0] == '#' && line[1] == '#') continue;
        words_id[line] = word_count;
        word_to_senses[word_count] = Set();
        words[word_count++] = line;
    }
    if_voc.close();
    word_index = word_count;
    int line_no = 0;
    int ss_id = -1;
    while (!if_synsets.eof()) // synsets
    {
        ++line_no;
        getline(if_synsets, line, '\n');
        if (line == "") continue;
        if (line[0] == '#') continue;
        vector<string> tokens = split(line, "*");
        if (tokens.size() != 3 && tokens.size() != 4)
            PLERROR("the synset file has not the expected format, line %d = '%s'", line_no, line.c_str());
        if(tokens.size() == 3 && line_no == 1)
            PLWARNING("The synset file doesn't contain enough information for correct representation of the synsets!");
        ss_id = toint(tokens[0]);
        vector<string> type_tokens = split(tokens[1], "|");
        vector<string> ss_tokens = split(tokens[2], "|");
        vector<string> offset_tokens;
        if(tokens.size() == 4) offset_tokens = split(tokens[3],"|");
        Node* node = new Node(ss_id);
        for (unsigned int i = 0; i < type_tokens.size(); i++)
            node->types.insert(toint(type_tokens[i]));
        node->gloss = ss_tokens[0];
        //node->syns.reserve(ss_tokens.size() - 1);
        for (unsigned int i = 1; i < ss_tokens.size(); i++)
        {
            if (i == 1) // extract unknown_sense_index
                if (startsWith(ss_tokens[i], "UNKNOWN_SENSE_"))
                    unknown_sense_index = toint(ss_tokens[i].substr(14, ss_tokens[i].size())) + 1;
            node->syns.push_back(ss_tokens[i]);
        }
        if(tokens.size() == 4)
        {
            node->fnum = toint(offset_tokens[0]);
            node->hereiam = tolong(offset_tokens[1]);
        }
        synsets[node->ss_id] = node;
    }
    synset_index = ss_id + 1;
    if_synsets.close();
    int n_lines = ShellProgressBar::getAsciiFileLineCount(ontology_file);
    ShellProgressBar progress(0, n_lines - 1, "loading ontology", 50);
    progress.draw();
    int counter = 0;
    while (!if_ontology.eof()) // ontology
    {
        getline(if_ontology, line, '\n');
        progress.update(counter++);
        if (line == "") continue;
        if (line[0] == '#') continue;
        vector<string> tokens = split(line);
        if (tokens.size() != 3) 
        {
            PLERROR("the ontology file has not the expected format");
        }
        int id = toint(tokens[1]);
        int child_id;

        if (tokens[0] == "w")
        {
            bool is_in_wn = tobool(tokens[2]);
            word_is_in_wn[id] = is_in_wn;
            if (is_in_wn)
                in_wn_word_count++;
            else
                out_of_wn_word_count++;
        } else if (tokens[0] == "s")
        {
            child_id = toint(tokens[2]);
            word_to_senses[child_id].insert(id);
            sense_to_words[id].insert(child_id);
            for (SetIterator tit = synsets[id]->types.begin(); tit != synsets[id]->types.end(); ++tit)
            {
                int type = *tit;
                switch (type)
                {
                case NOUN_TYPE:
                    word_to_noun_senses[child_id].insert(id);
                    break;
                case VERB_TYPE:
                    word_to_verb_senses[child_id].insert(id);
                    break;
                case ADJ_TYPE:
                    word_to_adj_senses[child_id].insert(id);
                    break;
                case ADV_TYPE:
                    word_to_adv_senses[child_id].insert(id);
                    break;          
                }
            }
        } else if (tokens[0] == "c")
        {
            child_id = toint(tokens[2]);
            synsets[child_id]->parents.insert(id);
            synsets[id]->children.insert(child_id);
        }
    }
    if_ontology.close();
    progress.done();
    if_voc.close();
    if_synsets.close();
    if_ontology.close();
}

Here is the call graph for this function:

void PLearn::WordNetOntology::load ( string  voc_file,
string  synset_file,
string  ontology_file,
string  sense_key_file 
)

Definition at line 1392 of file WordNetOntology.cc.

References PLearn::load(), PLERROR, PLearn::split(), PLearn::toint(), and PLearn::ws().

{
    load(voc_file, synset_file, ontology_file);
  
    ifstream if_sense_key(sense_key_file.c_str());
    if (!if_sense_key) PLERROR("can't open %s", sense_key_file.c_str());

    string line;
    while (!if_sense_key.eof()) // voc
    {
        getline(if_sense_key, line, '\n');
        if (line == "") continue;
        if (line[0] == '#' && line[1] == '#') continue;
        vector<string> tokens = split(line, " ");
        if(tokens.size() != 3)
            PLERROR("sense_key_file %s not compatible", sense_key_file.c_str());
        pair<int, string> ss(toint(tokens[1]), tokens[0]);
        sense_key_to_ss_id[ss] = toint(tokens[2]);
        pair<int, int> ws(toint(tokens[1]), toint(tokens[2]));
        ws_id_to_sense_key[ws] = tokens[0];
    }
    if_sense_key.close();
}

Here is the call graph for this function:

void PLearn::WordNetOntology::loadPredominentSyntacticClasses ( string  file)

Definition at line 2601 of file WordNetOntology.cc.

References PLearn::pgetline(), and PLearn::toint().

{
    ifstream in_pos(file.c_str());
    int line_counter = 0;
    while (!in_pos.eof())
    {
        string line = pgetline(in_pos);
        if (line == "") continue;
        int pos = toint(line);
        word_to_predominent_pos[line_counter++] = pos;
    }
    in_pos.close();
    are_predominent_pos_extracted = true;
}

Here is the call graph for this function:

void PLearn::WordNetOntology::lookForSpecialTags ( )

Definition at line 805 of file WordNetOntology.cc.

References NUMERIC_SS_ID, OOV_SS_ID, PLWARNING, PROPER_NOUN_SS_ID, PUNCTUATION_SS_ID, and STOP_SS_ID.

{
    if (!isSense(OOV_SS_ID))
        PLWARNING("no <oov> tag found");
    if (!isSense(PROPER_NOUN_SS_ID))
        PLWARNING("no <proper_noun> tag found");
    if (!isSense(NUMERIC_SS_ID))
        PLWARNING("no <numeric> tag found");
    if (!isSense(PUNCTUATION_SS_ID))
        PLWARNING("no <punctuation> tag found");
    if (!isSense(STOP_SS_ID))
        PLWARNING("no <stop> tag found");
}
int PLearn::WordNetOntology::overlappingSynsets ( int  ss_id1,
int  ss_id2 
)

Definition at line 2003 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Set::contains(), PLearn::Set::end(), PLearn::Set::insert(), and PLearn::Set::size().

Referenced by areOverlappingSynsets().

{
    Set words1 = sense_to_words[ss_id1];
    Set words2 = sense_to_words[ss_id2];
    Set overlap;
    for (SetIterator it1=words1.begin();it1!=words1.end();++it1)
        if (words2.contains(*it1))
            overlap.insert(*it1);
    //for (set<int>::iterator it=overlap.begin();it!=overlap.end();++it)
    //  cout << words[*it] << endl;
    return overlap.size();
}

Here is the call graph for this function:

Here is the caller graph for this function:

void PLearn::WordNetOntology::print ( bool  print_ontology = true)

Definition at line 1070 of file WordNetOntology.cc.

References PLearn::endl().

Referenced by main().

{
    for (map<int, Set>::iterator it = word_to_senses.begin(); it != word_to_senses.end(); ++it)
    {
        cout << words[it->first] << endl;
        for (SetIterator iit = it->second.begin(); iit != it->second.end(); ++iit)
        {
            printSynset(*iit, 1);
            if (print_ontology)
            {
                printOntology(synsets[*iit], 2);
            }
        }
    }
}

Here is the call graph for this function:

Here is the caller graph for this function:

void PLearn::WordNetOntology::printDescendants ( )

Definition at line 1951 of file WordNetOntology.cc.

{
/*
  if (!are_descendants_extracted)
  {
  extractDescendants();
  }
  for (map<int, Set>::iterator it = synset_to_descendants.begin(); it != synset_to_descendants.end(); ++it)
  {
  cout << it->first << " -> ";
  for (SetIterator iit = it->second.begin(); iit != it->second.end(); ++iit)
  cout << *iit << " ";
  cout << endl;
  }
*/
}
void PLearn::WordNetOntology::printInvertedSynsetOntology ( int  id,
int  level = 0 
)

Definition at line 2061 of file WordNetOntology.cc.

References PLWARNING.

{
    if (isSynset(id))
    {
        printSynset(id, level);
        for (SetIterator it = synsets[id]->children.begin(); it != synsets[id]->children.end(); ++it)
        {
            printInvertedSynsetOntology(*it, level + 1);
        }
    } else
    {
#ifndef NOWARNING
        PLWARNING("asking for a non-synset id (%d)", id);
#endif
    }
}
void PLearn::WordNetOntology::printNodes ( )

Definition at line 1416 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Node::children, PLearn::Set::end(), PLearn::endl(), PLearn::Node::parents, and PLearn::Node::ss_id.

{
    for (map<int, Node*>::iterator it = synsets.begin(); it != synsets.end(); ++it)
    {
        Node* node = it->second;
        cout << "Node id = " << node->ss_id << " | parents = ";
        for (SetIterator pit = node->parents.begin(); pit != node->parents.end(); ++pit)
        {
            cout << *pit << " ";
        }
        cout << " | children = ";
        for (SetIterator cit = node->children.begin(); cit != node->children.end(); ++cit)
        {
            cout << *cit << " ";
        }
        cout << endl;
    }
}

Here is the call graph for this function:

void PLearn::WordNetOntology::printOntology ( Node node,
int  level = 0 
)

Definition at line 1086 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Set::end(), and PLearn::Node::parents.

{
    for (SetIterator it = node->parents.begin(); it != node->parents.end(); ++it)
    {
        printSynset(*it, level);
        printOntology(synsets[*it], level + 1);
    }
}

Here is the call graph for this function:

void PLearn::WordNetOntology::printStats ( )

Definition at line 1151 of file WordNetOntology.cc.

References PLearn::endl().

Referenced by main().

{
/*
  cout << getSenseSize() << " senses (" << noun_sense_count << " nouns, " << verb_sense_count << " verbs, " 
  << adj_sense_count << " adjectives, " << adv_sense_count << " adverbs) for " << getVocSize() << " words" << endl;
  cout << out_of_wn_word_count << " out-of-wordnet words" << endl;
  cout << in_wn_word_count << " in-wordnet words" << endl;
  cout << noun_count << " nouns" << endl;
  cout << verb_count << " verbs" << endl;
  cout << adj_count << " adjectives" << endl;
  cout << adv_count << " adverbs" << endl;
  cout << (double)getSenseSize() / (double)getVocSize() << " senses per word on average" << endl;
  int all_classes = noun_count + verb_count + adj_count + adv_count;
  cout << (double)all_classes / (double)in_wn_count << " classes per word on average" << endl;
*/
    cout << getVocSize() << " words in vocabulary" << endl;
    cout << in_wn_word_count << " in WN words" << endl;
    cout << out_of_wn_word_count << " out of WN words" << endl;
    cout << getSenseSize() << " senses (" << (real)getSenseSize() / (real)getVocSize() << " senses per word on average)" << endl;
    cout << getSynsetSize() << " categories (ontology : sense + category, possible overlap)" << endl;
    if (are_word_high_level_senses_extracted)
    {
        cout << n_word_high_level_senses << " high-level senses (" << (real)n_word_high_level_senses / (real)getVocSize() << " high-level senses per word on average)" << endl;
    }
}

Here is the call graph for this function:

Here is the caller graph for this function:

void PLearn::WordNetOntology::printSynset ( int  ss_id,
ostream &  sout,
int  indent_level = 0 
)

Definition at line 1134 of file WordNetOntology.cc.

References PLearn::endl(), and i.

{
    for (int i = 0; i < indent_level; i++) sout << "    "; // indent
    sout << "=> ";

    for (vector<string>::iterator it = synsets[ss_id]->syns.begin(); it != synsets[ss_id]->syns.end(); ++it)
    {
        sout << *it << ", ";
    }
    sout << " (" << ss_id << ")" << endl;

    for (int i = 0; i < indent_level; i++) cout << "    "; // indent
    sout << "gloss = " << synsets[ss_id]->gloss << endl;

}

Here is the call graph for this function:

void PLearn::WordNetOntology::printSynset ( int  ss_id,
int  indent_level = 0 
)

Definition at line 1095 of file WordNetOntology.cc.

References PLearn::endl(), and i.

Referenced by PLearn::GraphicalBiText::compute_pMC(), and PLearn::GraphicalBiText::init().

{
    for (int i = 0; i < indent_level; i++) cout << "    "; // indent
    cout << "=> ";

    for (vector<string>::iterator it = synsets[ss_id]->syns.begin(); it != synsets[ss_id]->syns.end(); ++it)
    {
        cout << *it << ", ";
    }
    cout << " (" << ss_id << ")" << endl;
    for (int i = 0; i < indent_level; i++) cout << "    "; // indent
    cout << "fnum: " << synsets[ss_id]->fnum << "synset offset: " << synsets[ss_id]->hereiam << " gloss = " << synsets[ss_id]->gloss << endl;
    //cout << "syns = " << synsets[ss_id]->syns << endl;
//   cout << " {";
//   for (SetIterator it = synsets[ss_id]->types.begin(); it != synsets[ss_id]->types.end(); ++it)
//   {
//     int type = *it;
//     switch (type)
//     {
//     case NOUN_TYPE:
//       cout << "noun ";
//       break;
//     case VERB_TYPE:
//       cout << "verb ";
//       break;
//     case ADJ_TYPE:
//       cout << "adjective ";
//       break;
//     case ADV_TYPE:
//       cout << "adverb ";
//       break;
//     case UNDEFINED_TYPE:
//       cout << "undefined ";
//       break;
//     }
//   }
//   cout << "}" << endl;
}

Here is the call graph for this function:

Here is the caller graph for this function:

void PLearn::WordNetOntology::printSynsetAncestors ( )

Definition at line 1799 of file WordNetOntology.cc.

References PLearn::endl(), PLearn::extractAncestors(), and WORD_COVERAGE_THRESHOLD.

{
    if (!are_ancestors_extracted)
    {
        extractAncestors(WORD_COVERAGE_THRESHOLD, true, true);
    }
    for (map<int, Set>::iterator it = synset_to_ancestors.begin(); it != synset_to_ancestors.end(); ++it)
    {
        cout << it->first << " -> ";
        for (SetIterator iit = it->second.begin(); iit != it->second.end(); ++iit)
            cout << *iit << " ";
        cout << endl;
    }
}

Here is the call graph for this function:

void PLearn::WordNetOntology::printWordAncestors ( )

Definition at line 1814 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Set::end(), PLearn::endl(), PLearn::extractAncestors(), and WORD_COVERAGE_THRESHOLD.

{
    if (!are_ancestors_extracted)
    {
        extractAncestors(WORD_COVERAGE_THRESHOLD, true, true);
    }
    for (map<int, Set>::iterator it = word_to_senses.begin(); it != word_to_senses.end(); ++it)
    {
        int id = it->first;
        cout << id << " -> ";
        Set ancestors = getWordAncestors(id);
        for (SetIterator iit = ancestors.begin(); iit != ancestors.end(); ++iit)
        {
            cout << *iit << " ";
        }
        cout << endl;
    }
}

Here is the call graph for this function:

void PLearn::WordNetOntology::printWordOntology ( int  id)

Definition at line 2045 of file WordNetOntology.cc.

References PLearn::endl().

{
    cout << words[id] << endl;
    for (SetIterator sit = word_to_senses[id].begin(); sit != word_to_senses[id].end(); ++sit)
    {
        int sense_id = *sit;
        printSynset(sense_id, 1);
        printOntology(synsets[sense_id], 2);    
    }
}

Here is the call graph for this function:

void PLearn::WordNetOntology::printWordOntology ( string  word)

Definition at line 2056 of file WordNetOntology.cc.

void PLearn::WordNetOntology::processUnknownWord ( int  word_id)

Definition at line 909 of file WordNetOntology.cc.

References EOS_OFFSET, PLearn::Node::gloss, PLearn::Node::hereiam, PLearn::Set::insert(), PLearn::Node::parents, PLearn::Node::ss_id, SUPER_UNKNOWN_SS_ID, PLearn::Node::syns, PLearn::tostring(), PLearn::Node::types, and UNDEFINED_TYPE.

{
    if (differentiate_unknown_words)
    {
        // create an UNKNOWN synset for a particular word
        Node* unk_node = new Node(synset_index++);
        int unknown_sense_id = unknown_sense_index++;
        unk_node->syns.push_back("UNKNOWN_SENSE_" + tostring(unknown_sense_id));
        unk_node->gloss = "(unknown sense " + tostring(unknown_sense_id) + ")";
        unk_node->types.insert(UNDEFINED_TYPE);
        unk_node->hereiam = EOS_OFFSET - unknown_sense_id - 1;
        synsets[unk_node->ss_id] = unk_node;
   
        // link UNKNOWN <-> SUPER-UNKNOWN
        unk_node->parents.insert(SUPER_UNKNOWN_SS_ID);
        synsets[SUPER_UNKNOWN_SS_ID]->children.insert(unk_node->ss_id);
  
        word_to_senses[word_id].insert(unk_node->ss_id);
        sense_to_words[unk_node->ss_id].insert(word_id);
    } else // all the unknown words are linked to SUPER-UNKNOWN 
    {      // (acting in this context as a sense)
        word_to_senses[word_id].insert(SUPER_UNKNOWN_SS_ID);
        sense_to_words[SUPER_UNKNOWN_SS_ID].insert(word_id);
    }

}

Here is the call graph for this function:

void PLearn::WordNetOntology::propagatePOSTypes ( Node node)

Definition at line 946 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Set::end(), PLearn::Set::insert(), PLearn::Node::parents, PLWARNING, PLearn::Set::size(), PLearn::Node::types, and PLearn::Node::visited.

{
    node->visited = true;
    for (SetIterator it = node->parents.begin(); it != node->parents.end(); ++it)
    {
        Node* parent_node = synsets[*it];
        for (SetIterator iit = node->types.begin(); iit != node->types.end(); ++iit)
        {
            parent_node->types.insert(*iit);
        }
        if (parent_node->types.size() > 1)
        {
#ifndef NOWARNING
            PLWARNING("a synset has more than 1 type");
#endif
        }
        if (!parent_node->visited)
            propagatePOSTypes(parent_node);
    }
}

Here is the call graph for this function:

void PLearn::WordNetOntology::propagatePOSTypes ( )

Definition at line 936 of file WordNetOntology.cc.

{
    for (map<int, Set>::iterator it = sense_to_words.begin(); it != sense_to_words.end(); ++it)
    {
        Node* node = synsets[it->first];
        propagatePOSTypes(node);
    }
    unvisitAll();
}
void PLearn::WordNetOntology::reducePolysemy ( int  level)

Definition at line 2220 of file WordNetOntology.cc.

References PLearn::ShellProgressBar::init().

{
    ShellProgressBar progress(0, words.size() - 1, "reducing polysemy", 50);
    progress.init();
    progress.draw();
    int count = 0;
    for (map<int, string>::iterator it = words.begin(); it != words.end(); ++it)
    {
        int word_id = it->first;
        //reduceWordPolysemy(word_id, level);
        reduceWordPolysemy_preserveSenseOverlapping(word_id, level);
        progress.update(count++);
    }
    progress.done();
    removeNonReachableSynsets();
}

Here is the call graph for this function:

void PLearn::WordNetOntology::reduceWordPolysemy ( int  word_id,
int  level 
)

Definition at line 2240 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Set::end(), PLearn::Set::insert(), and PLearn::Set::size().

{
    Set senses = word_to_senses[word_id];
    Set senses_to_be_removed;
    if (senses.size() > 1)
    {
        //SetsValuesSet svs;
        set<set<int> > ss;
        for (SetIterator it = senses.begin(); it != senses.end(); ++it)
        {
            int sense_id = *it;
            set<int> categories_at_level;
            getCategoriesAtLevel(sense_id, 0, level, categories_at_level);

//       cout << "sense_id = " << sense_id << ", categories_at_level[" << level << "] for word '" << words[word_id] << "' : ";
//       for (set<int>::iterator sit = categories_at_level.begin(); sit != categories_at_level.end(); ++sit)
//         cout << *sit << " ";

            if (categories_at_level.size() != 0)
            {
                // if a list of categories, for a given sense, is already extracted 
                // (through a different sense) mark the sense for deletion
                //bool already_there = !svs.insert(categories_at_level, sense_id);
                bool already_there = (ss.find(categories_at_level) != ss.end());
                if (already_there)
                {
                    //cout << "*" << endl;
                    senses_to_be_removed.insert(sense_id);
                    sense_to_words[sense_id].remove(word_id);
                    // if a sense doesn't point to any word anymore, erase it from the sense table
                    if (sense_to_words[sense_id].isEmpty())
                        sense_to_words.erase(sense_id);
                } else
                {
                    ss.insert(categories_at_level);
                    //cout << endl;
                }
            } else
            {
                //cout << endl;
            }
        }
        // erase the marked senses
        for (SetIterator it = senses_to_be_removed.begin(); it != senses_to_be_removed.end(); ++it)
        {
            int sense_id = *it;
            word_to_senses[word_id].remove(sense_id);
            word_to_noun_senses[word_id].remove(sense_id);
            word_to_verb_senses[word_id].remove(sense_id);
            word_to_adj_senses[word_id].remove(sense_id);
            word_to_adv_senses[word_id].remove(sense_id);
        }
    }
}

Here is the call graph for this function:

void PLearn::WordNetOntology::reduceWordPolysemy_preserveSenseOverlapping ( int  word_id,
int  level 
)

Definition at line 2295 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Set::end(), PLearn::endl(), PLearn::Set::insert(), PLearn::Set::isEmpty(), and PLearn::Set::size().

{
    Set senses = word_to_senses[word_id];
    Set senses_to_be_removed;
    map<set<int>, Set> categories_to_senses;
    if (senses.size() > 1)
    {
        for (SetIterator it = senses.begin(); it != senses.end(); ++it)
        {
            int sense_id = *it;
            set<int> categories_at_level;
            getCategoriesAtLevel(sense_id, 0, level, categories_at_level);
            if (categories_at_level.size() != 0)
                categories_to_senses[categories_at_level].insert(sense_id);
        }

        for (map<set<int>, Set>::iterator it = categories_to_senses.begin(); it != categories_to_senses.end(); ++it)
        {
            Set sense_cluster = it->second;
            if (sense_cluster.size() > 1)
            {
                int sense_cluster_size = sense_cluster.size();
                int n_sense_removed = 0;
                for (SetIterator sit = sense_cluster.begin(); sit != sense_cluster.end(); ++sit)
                {
                    int sense_id = *sit;
                    if (sense_to_words[sense_id].size() < 2 && n_sense_removed < (sense_cluster_size - 1))
                    {
                        senses_to_be_removed.insert(sense_id);
                        sense_to_words[sense_id].remove(word_id);
                        // if a sense doesn't point to any word anymore, erase it from the sense table
                        if (sense_to_words[sense_id].isEmpty())
                            sense_to_words.erase(sense_id);
                        n_sense_removed++;
                    }
                }
            }
        }

        if (!senses_to_be_removed.isEmpty())
        {
            cout << words[word_id] << endl;
//       cout << "senses = " << senses; 
//       cout << ", senses_to_be_removed = " << senses_to_be_removed << endl;
        }

        // erase the marked senses
        for (SetIterator it = senses_to_be_removed.begin(); it != senses_to_be_removed.end(); ++it)
        {
            int sense_id = *it;

            printSynset(sense_id, 1);

            word_to_senses[word_id].remove(sense_id);
            word_to_noun_senses[word_id].remove(sense_id);
            word_to_verb_senses[word_id].remove(sense_id);
            word_to_adj_senses[word_id].remove(sense_id);
            word_to_adv_senses[word_id].remove(sense_id);
        }
    }
}

Here is the call graph for this function:

void PLearn::WordNetOntology::reduceWordPolysemy_preserveSenseOverlapping2 ( int  word_id,
int  level 
)

Definition at line 2357 of file WordNetOntology.cc.

{
/*
  Set senses = word_to_senses[word_id];
  Set senses_to_be_removed;
  map<int, Set> sense_to_categories_under_level(senses.size());
  if (senses.size() > 1)
  {
  for (SetIterator it = senses.begin(); it != senses.end(); ++it)
  {
  int sense_id = *it;
  Set categories_under_level;
  getCategoriesUnderLevel(sense_id, 0, level, categories_under_level);
  sense_to_categories_under_level[sense_id] = categories_under_level;
  }
    
  if (!senses_to_be_removed.isEmpty())
  {
  //cout << words[word_id] << endl;
  //cout << "senses = " << senses; 
  //cout << ", senses_to_be_removed = " << senses_to_be_removed << endl;
  }
  // erase the marked senses
  for (SetIterator it = senses_to_be_removed.begin(); it != senses_to_be_removed.end(); ++it)
  {
  int sense_id = *it;

  printSynset(sense_id, 1);

  word_to_senses[word_id].remove(sense_id);
  word_to_noun_senses[word_id].remove(sense_id);
  word_to_verb_senses[word_id].remove(sense_id);
  word_to_adj_senses[word_id].remove(sense_id);
  word_to_adv_senses[word_id].remove(sense_id);
  }
  }
*/
}
void PLearn::WordNetOntology::removeNonReachableSynsets ( )

Definition at line 2398 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Node::children, PLearn::Set::end(), PLearn::Set::insert(), PLearn::Set::remove(), and PLearn::Node::visited.

Referenced by PLearn::GraphicalBiText::build_().

{
    // visit the whole graph, beginning with words, and going upward, and marking the nodes
    for (map<int, Set>::iterator wit = word_to_senses.begin(); wit != word_to_senses.end(); ++wit)
    {
        Set senses = wit->second;
        for (SetIterator sit = senses.begin(); sit != senses.end(); ++sit)
        {
            int sense_id = *sit;
            visitUpward(synsets[sense_id]);
        }
    }
    // mark synsets that need to be removed
    Set synsets_to_be_removed;
    for (map<int, Node*>::iterator sit = synsets.begin(); sit != synsets.end(); ++sit)
    {
        int ss_id = sit->first;
        Node* node = sit->second;
        if (!node->visited)
        {
            synsets_to_be_removed.insert(ss_id);
        } else
        {
            // for a synset that does not need to be removed, check if there are child pointers
            // to a removed one (mark them for deletion if so)
            Set children_to_be_removed;
            for (SetIterator cit = node->children.begin(); cit != node->children.end(); ++cit)
            {
                int child_id = *cit;
                if (!synsets[child_id]->visited)
                    children_to_be_removed.insert(child_id);
            }
            // remove the marked child pointers
            for (SetIterator rit = children_to_be_removed.begin(); rit != children_to_be_removed.end(); ++rit)
                node->children.remove(*rit);
        }
    }

    // remove the marked synsets
    for (SetIterator rit = synsets_to_be_removed.begin(); rit != synsets_to_be_removed.end(); ++rit)
    {
        int ss_id = *rit;
        delete(synsets[ss_id]);
        synsets.erase(ss_id);
    }

    unvisitAll();
}

Here is the call graph for this function:

Here is the caller graph for this function:

void PLearn::WordNetOntology::removeWord ( int  id)

Definition at line 2447 of file WordNetOntology.cc.

Referenced by PLearn::GraphicalBiText::build_().

{
    string word_string = words[id];
    words.erase(id);
    word_to_senses.erase(id);
    word_to_noun_senses.erase(id);
    word_to_verb_senses.erase(id);
    word_to_adj_senses.erase(id);
    word_to_adv_senses.erase(id);
    words_id.erase(word_string);
    word_to_noun_wnsn.erase(id);
    word_to_verb_wnsn.erase(id);
    word_to_adj_wnsn.erase(id);
    word_to_adv_wnsn.erase(id);
    word_to_predominent_pos.erase(id);
    //  word_is_in_wn[id]=0 Should I do that ?
    word_to_high_level_senses.erase(id);
}

Here is the caller graph for this function:

void PLearn::WordNetOntology::save ( string  voc_file)

Definition at line 1229 of file WordNetOntology.cc.

References PLearn::endl().

{
    ofstream of_voc(voc_file.c_str());
    for (map<int, string>::iterator it = words.begin(); it != words.end(); ++it)
    {
        of_voc << it->second << endl;
    }
    of_voc.close();
}

Here is the call graph for this function:

void PLearn::WordNetOntology::save ( string  synset_file,
string  ontology_file,
string  sense_key_file 
)

Definition at line 1254 of file WordNetOntology.cc.

References PLearn::endl(), and PLearn::save().

{
    save(synset_file, ontology_file);

    ofstream of_voc(sense_key_file.c_str());
    for (map<pair<int, int>, string>::iterator it = ws_id_to_sense_key.begin(); it != ws_id_to_sense_key.end(); ++it)
    {
        of_voc << it->second << " " << (it->first).first << " " << (it->first).second << endl;
    }
    of_voc.close();
}

Here is the call graph for this function:

void PLearn::WordNetOntology::save ( string  synset_file,
string  ontology_file 
)

Definition at line 1177 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Node::children, PLearn::Set::end(), PLearn::endl(), PLearn::Node::fnum, PLearn::Node::gloss, PLearn::Node::hereiam, PLearn::Node::syns, and PLearn::Node::types.

Referenced by main().

{
    // synset
    ofstream of_synsets(synset_file.c_str());
    for (map<int, Node*>::iterator it = synsets.begin(); it != synsets.end(); ++it)
    {
        int ss_id = it->first;
        Node* node = it->second;
        of_synsets << ss_id << "*|";
        for (SetIterator it = node->types.begin(); it != node->types.end(); ++it)
        {
            of_synsets << *it << "|";
        }
        of_synsets << "*|";
        of_synsets << node->gloss << "|";
        for (vector<string>::iterator iit = node->syns.begin(); iit != node->syns.end(); ++iit)
        {
            of_synsets << *iit << "|";
        }
        of_synsets << "*|";
        of_synsets << node->fnum << "|";
        of_synsets << node->hereiam << "|";
        of_synsets << endl;
    }
    of_synsets.close();

    // ontology
    ofstream of_ontology(ontology_file.c_str());
    for (map<int, Set>::iterator wit = word_to_senses.begin(); wit != word_to_senses.end(); ++wit)
    {
        int word_id = wit->first;
        of_ontology << "w " << word_id << " " << word_is_in_wn[word_id] << endl;
    }
    for (map<int, Node*>::iterator it = synsets.begin(); it != synsets.end(); ++it)
    {
        int id = it->first;
        Node* node = it->second;
        for(SetIterator iit = node->children.begin(); iit != node->children.end(); ++iit)
        {
            int child_id = *iit;
            of_ontology << "c " << id << " " << child_id << endl;
        }
        if (sense_to_words.find(id) != sense_to_words.end())
        {
            for (SetIterator iit = sense_to_words[id].begin(); iit != sense_to_words[id].end(); ++iit)
                of_ontology << "s " << id << " " << (*iit) << endl;
        }
    }

    of_ontology.close();
}

Here is the call graph for this function:

Here is the caller graph for this function:

void PLearn::WordNetOntology::savePredominentSyntacticClasses ( string  file)

Definition at line 2589 of file WordNetOntology.cc.

References PLearn::endl().

{
    ofstream out_pos(file.c_str());
    for (map<int, Set>::iterator it = word_to_senses.begin(); it != word_to_senses.end(); ++it)
    {
        int word_id = it->first;
        out_pos << getPredominentSyntacticClassForWord(word_id) << endl;
    
    }
    out_pos.close();
}

Here is the call graph for this function:

void PLearn::WordNetOntology::saveVocInWordnet ( string  voc_file)

Definition at line 1240 of file WordNetOntology.cc.

References PLearn::endl().

{
    ofstream of_voc(voc_file.c_str());
    for (map<int, string>::iterator it = words.begin(); it != words.end(); ++it)
    {
        if (word_is_in_wn[it->first] == false)continue;
        of_voc << it->second << endl;
    }
    of_voc.close();
}

Here is the call graph for this function:

void PLearn::WordNetOntology::unvisitAll ( )

Definition at line 967 of file WordNetOntology.cc.

{
    for (map<int, Node*>::iterator it = synsets.begin(); it != synsets.end(); ++it)
        it->second->visited = false;
}
void PLearn::WordNetOntology::unvisitDownward ( Node node)

Definition at line 2479 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Node::children, PLearn::Set::end(), and PLearn::Node::visited.

{
    node->visited = false;
    for (SetIterator s_it = node->children.begin(); s_it != node->children.end(); ++s_it) {
        Node *child = synsets[*s_it];
        if (child->visited)
            unvisitDownward(child);
    }
}

Here is the call graph for this function:

void PLearn::WordNetOntology::visitUpward ( Node node)

Definition at line 2467 of file WordNetOntology.cc.

References PLearn::Set::begin(), PLearn::Set::end(), PLearn::Node::parents, and PLearn::Node::visited.

{
    node->visited = true;
    for (SetIterator pit = node->parents.begin(); pit != node->parents.end(); ++pit)
    {
        int parent_id = *pit;
        if (!synsets[parent_id]->visited)
            visitUpward(synsets[parent_id]);
    }
}

Here is the call graph for this function:


Member Data Documentation

Definition at line 200 of file WordNetOntology.h.

Definition at line 205 of file WordNetOntology.h.

Definition at line 201 of file WordNetOntology.h.

Definition at line 206 of file WordNetOntology.h.

Definition at line 212 of file WordNetOntology.h.

Definition at line 213 of file WordNetOntology.h.

Definition at line 214 of file WordNetOntology.h.

Definition at line 215 of file WordNetOntology.h.

Definition at line 216 of file WordNetOntology.h.

Definition at line 225 of file WordNetOntology.h.

Definition at line 208 of file WordNetOntology.h.

Definition at line 218 of file WordNetOntology.h.

Definition at line 198 of file WordNetOntology.h.

Definition at line 203 of file WordNetOntology.h.

Definition at line 209 of file WordNetOntology.h.

map< pair<int, string>,int> PLearn::WordNetOntology::sense_key_to_ss_id [protected]

Definition at line 190 of file WordNetOntology.h.

Definition at line 173 of file WordNetOntology.h.

Referenced by getSenseSize().

Definition at line 194 of file WordNetOntology.h.

Definition at line 174 of file WordNetOntology.h.

Definition at line 176 of file WordNetOntology.h.

Definition at line 177 of file WordNetOntology.h.

Definition at line 386 of file WordNetOntology.h.

Referenced by fillTempWordToSensesTVecMap(), and getSensesForWord().

Definition at line 195 of file WordNetOntology.h.

Definition at line 199 of file WordNetOntology.h.

Definition at line 204 of file WordNetOntology.h.

Definition at line 193 of file WordNetOntology.h.

Definition at line 186 of file WordNetOntology.h.

Definition at line 188 of file WordNetOntology.h.

Definition at line 171 of file WordNetOntology.h.

Referenced by fillTempWordToSensesTVecMap().

Definition at line 183 of file WordNetOntology.h.

Definition at line 172 of file WordNetOntology.h.

Referenced by fillTempWordToSensesTVecMap().

Definition at line 184 of file WordNetOntology.h.

Definition at line 175 of file WordNetOntology.h.

Definition at line 187 of file WordNetOntology.h.

Definition at line 169 of file WordNetOntology.h.

Referenced by fillTempWordToSensesTVecMap().

Definition at line 181 of file WordNetOntology.h.

Definition at line 185 of file WordNetOntology.h.

Definition at line 189 of file WordNetOntology.h.

Definition at line 170 of file WordNetOntology.h.

Referenced by fillTempWordToSensesTVecMap().

Definition at line 182 of file WordNetOntology.h.

map<int, string> PLearn::WordNetOntology::words [protected]
map<string, int> PLearn::WordNetOntology::words_id [protected]

Definition at line 180 of file WordNetOntology.h.

Referenced by containsWord(), and getWordsId().

map<pair<int,int>, string> PLearn::WordNetOntology::ws_id_to_sense_key [protected]

Definition at line 191 of file WordNetOntology.h.


The documentation for this class was generated from the following files:
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines