I wrote my own strin tokenizer function a while back and here is what I came up with. The only draw back is that you have to specify the length of the strings. In most cases it would work fine since most tokens would be under 255. However in other cases it where the limit of the token may be million characters in length since this would be an unsatisfactory use of memory. At any rate this simple function has the advantage of detecting back to back delimiters as a token where strtok does not recognized them as a token.
Code:
#include <iostream>
using namespace std;
// Return Value: number of tokens read
// pBuffer - The string where the data is store as a whole
// chDelimiter - char delimiter that separates the values
// pTokens string array
// iMaxTokens - Maximum number of tokens expected
int StringTokenizer(char* pBuffer, char chDelimiter, char pTokens[][255], int iMaxTokens);
void main( void )
{
char strTokens[10][255];
char chDelimiter = '|';
char stringline[] = {"JOE|SCHMUCK|1111|ONEWAY||"};
int iTokens;
iTokens = StringTokenizer(stringline, chDelimiter, strTokens, 10);
for( int i = 0; i < iTokens; i++)
{
cout << "\nToken " << i + 1 << "." << strTokens[i];
}
cout << endl;
}
int StringTokenizer(char* pBuffer, char chDelimiter, char pTokens[][255], int iMaxTokens)
{
int i = 0;
int j = 0;
while( *pBuffer != '\0' && i < iMaxTokens)
{
j = 0;
while( *pBuffer != chDelimiter && *pBuffer != '\0')
{
pTokens[i][j] = *pBuffer;
j++;
pBuffer++;
}
pTokens[i][j] = '\0';
if( *pBuffer == '\0')
{
i++;
break;
}
pBuffer++;
i++;
}
return i;
}