summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--include/helper.h2
-rw-r--r--source/dialogs/dmenu.c6
-rw-r--r--source/dialogs/drun.c8
-rw-r--r--source/dialogs/help-keys.c2
-rw-r--r--source/dialogs/run.c2
-rw-r--r--source/dialogs/script.c2
-rw-r--r--source/dialogs/ssh.c2
-rw-r--r--source/dialogs/window.c12
-rw-r--r--source/helper.c2
-rw-r--r--test/helper-tokenize.c248
10 files changed, 143 insertions, 143 deletions
diff --git a/include/helper.h b/include/helper.h
index 85ff158a..cd46eed8 100644
--- a/include/helper.h
+++ b/include/helper.h
@@ -108,7 +108,7 @@ int find_arg ( const char * const key );
*
* @returns TRUE when matches, FALSE otherwise
*/
-int token_match ( GRegex * const *tokens, const char *input );
+int helper_token_match ( GRegex * const *tokens, const char *input );
/**
* @param cmd The command to execute.
*
diff --git a/source/dialogs/dmenu.c b/source/dialogs/dmenu.c
index 026f065e..5a264fdd 100644
--- a/source/dialogs/dmenu.c
+++ b/source/dialogs/dmenu.c
@@ -469,7 +469,7 @@ static int dmenu_mode_init ( Mode *sw )
static int dmenu_token_match ( const Mode *sw, GRegex **tokens, unsigned int index )
{
DmenuModePrivateData *rmpd = (DmenuModePrivateData *) mode_get_private_data ( sw );
- return token_match ( tokens, rmpd->cmd_list[index] );
+ return helper_token_match ( tokens, rmpd->cmd_list[index] );
}
#include "mode-private.h"
@@ -677,7 +677,7 @@ int dmenu_switcher_dialog ( void )
GRegex **tokens = tokenize ( select, config.case_sensitive );
unsigned int i = 0;
for ( i = 0; i < cmd_list_length; i++ ) {
- if ( token_match ( tokens, cmd_list[i] ) ) {
+ if ( helper_token_match ( tokens, cmd_list[i] ) ) {
pd->selected_line = i;
break;
}
@@ -688,7 +688,7 @@ int dmenu_switcher_dialog ( void )
GRegex **tokens = tokenize ( config.filter ? config.filter : "", config.case_sensitive );
unsigned int i = 0;
for ( i = 0; i < cmd_list_length; i++ ) {
- if ( tokens == NULL || token_match ( tokens, cmd_list[i] ) ) {
+ if ( tokens == NULL || helper_token_match ( tokens, cmd_list[i] ) ) {
dmenu_output_formatted_line ( pd->format, cmd_list[i], i, config.filter );
}
}
diff --git a/source/dialogs/drun.c b/source/dialogs/drun.c
index fa10c863..4415c3d3 100644
--- a/source/dialogs/drun.c
+++ b/source/dialogs/drun.c
@@ -534,23 +534,23 @@ static int drun_token_match ( const Mode *data, GRegex **tokens, unsigned int in
GRegex *ftokens[2] = { tokens[j], NULL };
// Match name
if ( !test && rmpd->entry_list[index].name &&
- token_match ( ftokens, rmpd->entry_list[index].name ) ) {
+ helper_token_match ( ftokens, rmpd->entry_list[index].name ) ) {
test = 1;
}
// Match generic name
if ( !test && rmpd->entry_list[index].generic_name &&
- token_match ( ftokens, rmpd->entry_list[index].generic_name ) ) {
+ helper_token_match ( ftokens, rmpd->entry_list[index].generic_name ) ) {
test = 1;
}
// Match executable name.
- if ( !test && token_match ( ftokens, rmpd->entry_list[index].exec ) ) {
+ if ( !test && helper_token_match ( ftokens, rmpd->entry_list[index].exec ) ) {
test = 1;
}
// Match against category.
if ( !test ) {
gchar **list = g_key_file_get_locale_string_list ( rmpd->entry_list[index].key_file, "Desktop Entry", "Categories", NULL, NULL, NULL );
for ( int iter = 0; !test && list && list[iter]; iter++ ) {
- test = token_match ( ftokens, list[iter] );
+ test = helper_token_match ( ftokens, list[iter] );
}
g_strfreev ( list );
}
diff --git a/source/dialogs/help-keys.c b/source/dialogs/help-keys.c
index f7772a93..2e0a3101 100644
--- a/source/dialogs/help-keys.c
+++ b/source/dialogs/help-keys.c
@@ -108,7 +108,7 @@ static int help_keys_token_match ( const Mode *data,
)
{
KeysHelpModePrivateData *rmpd = (KeysHelpModePrivateData *) mode_get_private_data ( data );
- return token_match ( tokens, rmpd->messages[index] );
+ return helper_token_match ( tokens, rmpd->messages[index] );
}
static unsigned int help_keys_mode_get_num_entries ( const Mode *sw )
diff --git a/source/dialogs/run.c b/source/dialogs/run.c
index 6138e2d1..bf55b77b 100644
--- a/source/dialogs/run.c
+++ b/source/dialogs/run.c
@@ -433,7 +433,7 @@ static char *_get_display_value ( const Mode *sw, unsigned int selected_line, G_
static int run_token_match ( const Mode *sw, GRegex **tokens, unsigned int index )
{
const RunModePrivateData *rmpd = (const RunModePrivateData *) sw->private_data;
- return token_match ( tokens, rmpd->cmd_list[index] );
+ return helper_token_match ( tokens, rmpd->cmd_list[index] );
}
#include "mode-private.h"
diff --git a/source/dialogs/script.c b/source/dialogs/script.c
index 2b018ff6..5744f5ac 100644
--- a/source/dialogs/script.c
+++ b/source/dialogs/script.c
@@ -169,7 +169,7 @@ static char *_get_display_value ( const Mode *sw, unsigned int selected_line, G_
static int script_token_match ( const Mode *sw, GRegex **tokens, unsigned int index )
{
ScriptModePrivateData *rmpd = sw->private_data;
- return token_match ( tokens, rmpd->cmd_list[index] );
+ return helper_token_match ( tokens, rmpd->cmd_list[index] );
}
#include "mode-private.h"
diff --git a/source/dialogs/ssh.c b/source/dialogs/ssh.c
index 31a19f7e..9a0909a0 100644
--- a/source/dialogs/ssh.c
+++ b/source/dialogs/ssh.c
@@ -483,7 +483,7 @@ static char *_get_display_value ( const Mode *sw, unsigned int selected_line, G_
static int ssh_token_match ( const Mode *sw, GRegex **tokens, unsigned int index )
{
SSHModePrivateData *rmpd = (SSHModePrivateData *) mode_get_private_data ( sw );
- return token_match ( tokens, rmpd->hosts_list[index] );
+ return helper_token_match ( tokens, rmpd->hosts_list[index] );
}
#include "mode-private.h"
Mode ssh_mode =
diff --git a/source/dialogs/window.c b/source/dialogs/window.c
index 319558e5..4171ad2f 100644
--- a/source/dialogs/window.c
+++ b/source/dialogs/window.c
@@ -333,28 +333,28 @@ static int window_match ( const Mode *sw, GRegex **tokens, unsigned int index )
if ( tokens ) {
for ( int j = 0; match && tokens != NULL && tokens[j] != NULL; j++ ) {
int test = 0;
- // Dirty hack. Normally token_match does _all_ the matching,
+ // Dirty hack. Normally helper_token_match does _all_ the matching,
// Now we want it to match only one item at the time.
// If hack not in place it would not match queries spanning multiple fields.
// e.g. when searching 'title element' and 'class element'
GRegex *ftokens[2] = { tokens[j], NULL };
if ( !test && c->title != NULL && c->title[0] != '\0' ) {
- test = token_match ( ftokens, c->title );
+ test = helper_token_match ( ftokens, c->title );
}
if ( !test && c->class != NULL && c->class[0] != '\0' ) {
- test = token_match ( ftokens, c->class );
+ test = helper_token_match ( ftokens, c->class );
}
if ( !test && c->role != NULL && c->role[0] != '\0' ) {
- test = token_match ( ftokens, c->role );
+ test = helper_token_match ( ftokens, c->role );
}
if ( !test && c->name != NULL && c->name[0] != '\0' ) {
- test = token_match ( ftokens, c->name );
+ test = helper_token_match ( ftokens, c->name );
}
if ( !test && c->wmdesktopstr != NULL && c->wmdesktopstr[0] != '\0' ) {
- test = token_match ( ftokens, c->wmdesktopstr );
+ test = helper_token_match ( ftokens, c->wmdesktopstr );
}
if ( test == 0 ) {
diff --git a/source/helper.c b/source/helper.c
index e71a5f3c..b6871c19 100644
--- a/source/helper.c
+++ b/source/helper.c
@@ -442,7 +442,7 @@ PangoAttrList *token_match_get_pango_attr ( ThemeHighlight th, GRegex **tokens,
return retv;
}
-int token_match ( GRegex * const *tokens, const char *input )
+int helper_token_match ( GRegex * const *tokens, const char *input )
{
int match = TRUE;
// Do a tokenized match.
diff --git a/test/helper-tokenize.c b/test/helper-tokenize.c
index dcfe6985..c5c6bafa 100644
--- a/test/helper-tokenize.c
+++ b/test/helper-tokenize.c
@@ -50,204 +50,204 @@ int main ( int argc, char ** argv )
config.matching_method = MM_NORMAL;
GRegex **tokens = tokenize ( "noot", FALSE );
- TASSERT ( token_match ( tokens, "aap noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nootap mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap Noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "Nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "noOTap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap Noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "Nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "noOTap mies") == TRUE );
tokenize_free ( tokens );
tokens = tokenize ( "noot", TRUE );
- TASSERT ( token_match ( tokens, "aap noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nootap mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap Noot mies") == FALSE );
- TASSERT ( token_match ( tokens, "Nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "noOTap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap Noot mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "Nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "noOTap mies") == FALSE );
tokenize_free ( tokens );
tokens = tokenize ( "no ot", FALSE );
- TASSERT ( token_match ( tokens, "aap noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nootap mies") == TRUE );
- TASSERT ( token_match ( tokens, "noap miesot") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "noap miesot") == TRUE );
tokenize_free ( tokens );
}
{
config.matching_method = MM_GLOB;
GRegex **tokens = tokenize ( "noot", FALSE );
- TASSERT ( token_match ( tokens, "aap noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nootap mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap Noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "Nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "noOTap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap Noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "Nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "noOTap mies") == TRUE );
tokenize_free ( tokens );
tokens = tokenize ( "noot", TRUE );
- TASSERT ( token_match ( tokens, "aap noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nootap mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap Noot mies") == FALSE );
- TASSERT ( token_match ( tokens, "Nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "noOTap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap Noot mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "Nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "noOTap mies") == FALSE );
tokenize_free ( tokens );
tokens = tokenize ( "no ot", FALSE );
- TASSERT ( token_match ( tokens, "aap noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nootap mies") == TRUE );
- TASSERT ( token_match ( tokens, "noap miesot") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "noap miesot") == TRUE );
tokenize_free ( tokens );
tokens = tokenize ( "n?ot", FALSE );
- TASSERT ( token_match ( tokens, "aap noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nootap mies") == TRUE );
- TASSERT ( token_match ( tokens, "noap miesot") == FALSE);
+ TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "noap miesot") == FALSE);
tokenize_free ( tokens );
tokens = tokenize ( "n*ot", FALSE );
- TASSERT ( token_match ( tokens, "aap noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nootap mies") == TRUE );
- TASSERT ( token_match ( tokens, "noap miesot") == TRUE);
+ TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "noap miesot") == TRUE);
tokenize_free ( tokens );
tokens = tokenize ( "n* ot", FALSE );
- TASSERT ( token_match ( tokens, "aap noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nootap mies") == TRUE );
- TASSERT ( token_match ( tokens, "noap miesot") == TRUE);
- TASSERT ( token_match ( tokens, "ot nap mies") == TRUE);
+ TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "noap miesot") == TRUE);
+ TASSERT ( helper_token_match ( tokens, "ot nap mies") == TRUE);
tokenize_free ( tokens );
}
{
config.matching_method = MM_FUZZY;
GRegex **tokens = tokenize ( "noot", FALSE );
- TASSERT ( token_match ( tokens, "aap noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nootap mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap Noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "Nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "noOTap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap Noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "Nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "noOTap mies") == TRUE );
tokenize_free ( tokens );
tokens = tokenize ( "noot", TRUE );
- TASSERT ( token_match ( tokens, "aap noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nootap mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap Noot mies") == FALSE );
- TASSERT ( token_match ( tokens, "Nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "noOTap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap Noot mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "Nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "noOTap mies") == FALSE );
tokenize_free ( tokens );
tokens = tokenize ( "no ot", FALSE );
- TASSERT ( token_match ( tokens, "aap noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nootap mies") == TRUE );
- TASSERT ( token_match ( tokens, "noap miesot") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "noap miesot") == TRUE );
tokenize_free ( tokens );
tokens = tokenize ( "n ot", FALSE );
- TASSERT ( token_match ( tokens, "aap noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nootap mies") == TRUE );
- TASSERT ( token_match ( tokens, "noap miesot") == TRUE);
+ TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "noap miesot") == TRUE);
tokenize_free ( tokens );
tokens = tokenize ( "ont", FALSE );
- TASSERT ( token_match ( tokens, "aap noot mies") == FALSE);
- TASSERT ( token_match ( tokens, "aap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nootap nmiest") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap noot mies") == FALSE);
+ TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nootap nmiest") == TRUE );
tokenize_free ( tokens );
tokens = tokenize ( "o n t", FALSE );
- TASSERT ( token_match ( tokens, "aap noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nootap mies") == TRUE );
- TASSERT ( token_match ( tokens, "noap miesot") == TRUE);
- TASSERT ( token_match ( tokens, "ot nap mies") == TRUE);
+ TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "noap miesot") == TRUE);
+ TASSERT ( helper_token_match ( tokens, "ot nap mies") == TRUE);
tokenize_free ( tokens );
}
{
config.matching_method = MM_REGEX;
GRegex **tokens = tokenize ( "noot", FALSE );
- TASSERT ( token_match ( tokens, "aap noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nootap mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap Noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "Nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "noOTap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap Noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "Nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "noOTap mies") == TRUE );
tokenize_free ( tokens );
tokens = tokenize ( "noot", TRUE );
- TASSERT ( token_match ( tokens, "aap noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nootap mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap Noot mies") == FALSE );
- TASSERT ( token_match ( tokens, "Nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "noOTap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap Noot mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "Nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "noOTap mies") == FALSE );
tokenize_free ( tokens );
tokens = tokenize ( "no ot", FALSE );
- TASSERT ( token_match ( tokens, "aap noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nootap mies") == TRUE );
- TASSERT ( token_match ( tokens, "noap miesot") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "noap miesot") == TRUE );
tokenize_free ( tokens );
tokens = tokenize ( "n.?ot", FALSE );
- TASSERT ( token_match ( tokens, "aap noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nootap mies") == TRUE );
- TASSERT ( token_match ( tokens, "noap miesot") == FALSE);
+ TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "noap miesot") == FALSE);
tokenize_free ( tokens );
tokens = tokenize ( "n[oa]{2}t", FALSE );
- TASSERT ( token_match ( tokens, "aap noot mies") == TRUE );
- TASSERT ( token_match ( tokens, "aap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nootap mies") == TRUE );
- TASSERT ( token_match ( tokens, "noat miesot") == TRUE);
- TASSERT ( token_match ( tokens, "noaat miesot") == FALSE);
+ TASSERT ( helper_token_match ( tokens, "aap noot mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "aap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nootap mies") == TRUE );
+ TASSERT ( helper_token_match ( tokens, "noat miesot") == TRUE);
+ TASSERT ( helper_token_match ( tokens, "noaat miesot") == FALSE);
tokenize_free ( tokens );
tokens = tokenize ( "^(aap|noap)\\sMie.*", FALSE );
- TASSERT ( token_match ( tokens, "aap noot mies") == FALSE );
- TASSERT ( token_match ( tokens, "aap mies") == TRUE);
- TASSERT ( token_match ( tokens, "nooaap mies") == FALSE );
- TASSERT ( token_match ( tokens, "nootap mies") == FALSE );
- TASSERT ( token_match ( tokens, "noap miesot") == TRUE);
- TASSERT ( token_match ( tokens, "ot nap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "aap noot mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "aap mies") == TRUE);
+ TASSERT ( helper_token_match ( tokens, "nooaap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "nootap mies") == FALSE );
+ TASSERT ( helper_token_match ( tokens, "noap miesot") == TRUE);
+ TASSERT ( helper_token_match ( tokens, "ot nap mies") == FALSE );
tokenize_free ( tokens );
}
}