tokenize: fix tokenization of equals

This commit is contained in:
Ariadne Conill 2020-09-14 17:24:44 -06:00
parent b4e35c442e
commit c1c9115e5d

View file

@ -27,7 +27,7 @@ lif_next_token(char **buf)
out++; out++;
char *end = out; char *end = out;
while (*end && !isspace(*end) && *out != '=') while (*end && !isspace(*end) && *end != '=')
end++; end++;
*end++ = '\0'; *end++ = '\0';