@@ -2669,10 +2669,10 @@ generation_outputs gpttype_generate(const generation_inputs inputs)
26692669    banned_token_ids.clear ();
26702670    if (banned_tokens.size ()>0 )
26712671    {
2672-         if (debugmode==1 )
2673-         {
2674-              printf (" \n Banning %zu single character sequences..."  ,banned_tokens.size ());
2675-         }
2672+         //   if(debugmode==1)
2673+         //   {
2674+         printf (" \n Banning %zu single character sequences..."  ,banned_tokens.size ());
2675+         //   }
26762676        for (int  v=0 ;v<n_vocab;++v)
26772677        {
26782678            std::string word = FileFormatTokenizeID (v,file_format, true );
@@ -2686,13 +2686,14 @@ generation_outputs gpttype_generate(const generation_inputs inputs)
26862686                }
26872687            }
26882688        }
2689-         if (debugmode==1 )
2690-         {
2691-              printf (" \n Banned a total of %zu individual tokens.\n "  ,banned_token_ids.size ());
2692-         }
2689+         //   if(debugmode==1)
2690+         //   {
2691+         printf (" \n Banned a total of %zu individual tokens.\n "  ,banned_token_ids.size ());
2692+         //   }
26932693    }
26942694
2695-     if (debugmode==1  && banned_phrases.size ()>0 )
2695+     //  if(debugmode==1 && banned_phrases.size()>0)
2696+     if (banned_phrases.size ()>0 )
26962697    {
26972698        printf (" \n Banned a total of %zu phrases, with max token count of %d.\n "  ,banned_phrases.size (),delayed_generated_tokens_limit);
26982699    }
@@ -2793,10 +2794,10 @@ generation_outputs gpttype_generate(const generation_inputs inputs)
27932794            const  int  MAX_CHAR_LEN = 40 ;
27942795            const  int  MAX_SEQ_LEN = 20 ;
27952796
2796-             if  (debugmode == 1 )
2797-             { 
2798-                  printf (" \n Processing %zu dry break strings..."  , kcpp_data->dry_sequence_breakers .size ());
2799-             } 
2797+             //   if (debugmode == 1)
2798+ 
2799+             printf (" \n Processing %zu dry break strings..."  , kcpp_data->dry_sequence_breakers .size ());
2800+ 
28002801            for  (auto  sequence_break : kcpp_data->dry_sequence_breakers )
28012802            {
28022803                if  (sequence_break.size () > MAX_CHAR_LEN)
@@ -3350,7 +3351,7 @@ generation_outputs gpttype_generate(const generation_inputs inputs)
33503351            // handle temp bans from antislop
33513352            if  (antislop_banned_token_ids.find (n_past) != antislop_banned_token_ids.end ()) {
33523353                std::vector<int >& bans = antislop_banned_token_ids[n_past];
3353-                 // print_tok_vec_str(bans);
3354+                 print_tok_vec_str (bans);
33543355                for (int  t=0 ;t<bans.size ();++t)
33553356                {
33563357                    logitsPtr[bans[t]]=lowestLogit;
@@ -3466,7 +3467,8 @@ generation_outputs gpttype_generate(const generation_inputs inputs)
34663467                            std::vector<int >& current_ids = antislop_banned_token_ids[banindex];
34673468                            current_ids.push_back (last_tok);
34683469
3469-                             if  (allow_regular_prints && debugmode == 1 )
3470+                             //  if (allow_regular_prints && debugmode == 1)
3471+                             if  (allow_regular_prints)
34703472                            {
34713473                                auto  match_clean = matched;
34723474                                replace_all (match_clean, " \n "  , " \\ n"  );
0 commit comments