tokenize: fix tokenization of equals
This commit is contained in:
parent
b4e35c442e
commit
c1c9115e5d
1 changed files with 1 additions and 1 deletions
|
@ -27,7 +27,7 @@ lif_next_token(char **buf)
|
|||
out++;
|
||||
|
||||
char *end = out;
|
||||
while (*end && !isspace(*end) && *out != '=')
|
||||
while (*end && !isspace(*end) && *end != '=')
|
||||
end++;
|
||||
|
||||
*end++ = '\0';
|
||||
|
|
Loading…
Reference in a new issue