>If you wanted to use C style string functions, you could just use C style strings!
The only way to call strtok with an std::string would be to call c_str, but it returns a const char * and strtok modifies its argument. Such code wouldn't be conductive to smart programming.
Here is a tokenizer class that I wrote up quickly a while ago. It comes in handy when you don't want to use a more complex object such as the boost tokenizer:
Code:
#include <sstream>
#include <string>
#include <vector>
class Tokenizer {
public:
explicit Tokenizer ( const std::string& s, char delim = ' ' );
explicit Tokenizer ( const std::string& s, const std::string& delim );
public:
std::string next() { return !done() ? *current++ : std::string(); }
bool done() const { return current == tokens.end(); }
private:
std::vector<std::string> tokens;
std::vector<std::string>::iterator current;
};
Tokenizer::Tokenizer ( const std::string& s, char delim )
{
std::istringstream grabber ( s );
std::string token;
while ( getline ( grabber, token, delim ) ) {
if ( !token.empty() )
tokens.push_back ( token );
}
current = tokens.begin();
}
Tokenizer::Tokenizer ( const std::string& s, const std::string& delim )
{
std::string token;
std::string::size_type front = 0;
std::string::size_type back = 0;
while ( true ) {
if ( back == std::string::npos )
break;
front = s.find_first_not_of ( delim, front );
if ( front == std::string::npos )
break;
back = s.find_first_of ( delim, front );
token = s.substr ( front, back - front );
tokens.push_back ( token );
front = back + delim.length();
}
current = tokens.begin();
}