@@ -23,6 +23,7 @@ import string_list;
2323import string_pool;
2424import token local;
2525import utf8;
26+ import warning_flags;
2627
2728import string;
2829import stdlib;
@@ -264,6 +265,8 @@ public type Tokenizer struct {
264265
265266 string_pool.Pool* pool; // no ownership
266267 string_buffer.Buf* buf; // no ownership, used for strings and character constants
268+ const warning_flags.Flags* warnings;
269+
267270 ErrorFn on_error;
268271 void* on_error_arg;
269272
@@ -277,7 +280,7 @@ public type Tokenizer struct {
277280
278281 char[256] error_msg;
279282}
280- static_assert(408 , sizeof(Tokenizer));
283+ static_assert(416 , sizeof(Tokenizer));
281284
282285public fn void Tokenizer.init(Tokenizer* t,
283286 string_pool.Pool* pool,
@@ -286,6 +289,7 @@ public fn void Tokenizer.init(Tokenizer* t,
286289 SrcLoc loc_start,
287290 const keywords.Info* kwinfo,
288291 const string_list.List* features,
292+ const warning_flags.Flags* warnings,
289293 ErrorFn on_error,
290294 void* on_error_arg,
291295 bool raw_mode)
@@ -299,6 +303,7 @@ public fn void Tokenizer.init(Tokenizer* t,
299303 t.line_start = input;
300304 t.pool = pool;
301305 t.buf = buf;
306+ t.warnings = warnings;
302307 t.on_error = on_error;
303308 t.on_error_arg = on_error_arg;
304309
@@ -684,6 +689,15 @@ fn void Tokenizer.error(Tokenizer* t, Token* result, const char* format @(printf
684689 if (t.on_error) t.on_error(t.on_error_arg, FatalError, result.loc, t.error_msg);
685690}
686691
692+ fn void Tokenizer.warning(Tokenizer* t, SrcLoc loc, const char* format @(printf_format), ...) {
693+ va_list args;
694+ va_start(args, format);
695+ vsnprintf(t.error_msg, sizeof(t.error_msg), format, args);
696+ va_end(args);
697+
698+ if (t.on_error) t.on_error(t.on_error_arg, Warning, loc, t.error_msg);
699+ }
700+
687701// generate an error but keep parsing
688702fn void Tokenizer.num_error(Tokenizer* t, Token* result, const char* p, const char* format @(printf_format), ...) {
689703 va_list args;
@@ -719,9 +733,8 @@ fn void Tokenizer.lex_identifier(Tokenizer* t, Token* result) {
719733 while (Identifier_char[(u8)(*end)]) end++;
720734
721735 usize len = (usize)(end - start);
722- if (len > constants.MaxIdentifierLen && !t.raw_mode) {
723- t.error(result, "identifier too long (max %d chars)", constants.MaxIdentifierLen);
724- return;
736+ if (len > constants.MaxIdentifierLen && !t.raw_mode && t.warnings && !t.warnings.no_max_identifier_length) {
737+ t.warning(result.loc, "identifier too long (max %d chars)", constants.MaxIdentifierLen);
725738 }
726739 t.cur += len;
727740 result.name_idx = t.pool.add(start, len, true);
0 commit comments