/* 
 *  call-seq:
 *     RegExpAnalyzer.new(reg_exp, lower = true) -> analyzer
 *
 *  Create a new RegExpAnalyzer which will create tokenizers based on the
 *  regular expression and lowercasing if required.
 *
 *  reg_exp:: the token matcher for the tokenizer to use
 *  lower::   set to false if you don't want to downcase the tokens
 */
static VALUE
frt_re_analyzer_init(int argc, VALUE *argv, VALUE self)
{
    VALUE lower, rets, regex, proc;
    Analyzer *a;
    TokenStream *ts;
    rb_scan_args(argc, argv, "02&", &regex, &lower, &proc);

    ts = rets_new(Qnil, regex, proc);
    rets = Data_Wrap_Struct(cRegExpTokenizer, &frt_rets_mark, &frt_rets_free, ts);
    /* rb_hash_aset(object_space, LONG2NUM((long)rets), rets); */
    object_add(ts, rets);

    if (lower != Qfalse) {
        rets = frt_lowercase_filter_init(frt_data_alloc(cLowerCaseFilter), rets);
        ts = DATA_PTR(rets);
    }
    REF(ts);

    a = analyzer_new(ts, &re_analyzer_destroy_i, NULL);
    Frt_Wrap_Struct(self, &frt_re_analyzer_mark, &frt_analyzer_free, a);
    object_add(a, self);
    return self;
}