@@ -380,19 +380,10 @@ export class _Tokenizer<ParserOutput = string, RendererOutput = string> {
380380 }
381381 }
382382
383- let istask : RegExpExecArray | null = null ;
384- // Check for task list items
385- if ( this . options . gfm ) {
386- istask = this . rules . other . listIsTask . exec ( itemContents ) ;
387- if ( istask ) {
388- itemContents = itemContents . replace ( this . rules . other . listReplaceTask , '' ) ;
389- }
390- }
391-
392383 list . items . push ( {
393384 type : 'list_item' ,
394385 raw,
395- task : ! ! istask ,
386+ task : ! ! this . options . gfm && this . rules . other . listIsTask . test ( itemContents ) ,
396387 loose : false ,
397388 text : itemContents ,
398389 tokens : [ ] ,
@@ -417,6 +408,19 @@ export class _Tokenizer<ParserOutput = string, RendererOutput = string> {
417408 this . lexer . state . top = false ;
418409 item . tokens = this . lexer . blockTokens ( item . text , [ ] ) ;
419410 if ( item . task ) {
411+ // Remove checkbox markdown from item tokens
412+ item . text = item . text . replace ( this . rules . other . listReplaceTask , '' ) ;
413+ if ( item . tokens [ 0 ] ?. type === 'text' || item . tokens [ 0 ] ?. type === 'paragraph' ) {
414+ item . tokens [ 0 ] . raw = item . tokens [ 0 ] . raw . replace ( this . rules . other . listReplaceTask , '' ) ;
415+ item . tokens [ 0 ] . text = item . tokens [ 0 ] . text . replace ( this . rules . other . listReplaceTask , '' ) ;
416+ for ( let i = this . lexer . inlineQueue . length - 1 ; i >= 0 ; i -- ) {
417+ if ( this . rules . other . listIsTask . test ( this . lexer . inlineQueue [ i ] . src ) ) {
418+ this . lexer . inlineQueue [ i ] . src = this . lexer . inlineQueue [ i ] . src . replace ( this . rules . other . listReplaceTask , '' ) ;
419+ break ;
420+ }
421+ }
422+ }
423+
420424 const taskRaw = this . rules . other . listTaskCheckbox . exec ( item . raw ) ;
421425 if ( taskRaw ) {
422426 const checkboxToken : Tokens . Checkbox = {
0 commit comments