File tree Expand file tree Collapse file tree 2 files changed +3
-2
lines changed Expand file tree Collapse file tree 2 files changed +3
-2
lines changed Original file line number Diff line number Diff line change 4343### Performance
4444
4545<!-- Changes that improve Black's performance. -->
46+ - Speed up the ` is_fstring_start ` function in Black's tokenizer (#4541 )
4647
4748### Output
4849
Original file line number Diff line number Diff line change @@ -221,7 +221,7 @@ def _combinations(*l: str) -> set[str]:
221221 | {f"{ prefix } '" for prefix in _strprefixes | _fstring_prefixes }
222222 | {f'{ prefix } "' for prefix in _strprefixes | _fstring_prefixes }
223223)
224- fstring_prefix : Final = (
224+ fstring_prefix : Final = tuple (
225225 {f"{ prefix } '" for prefix in _fstring_prefixes }
226226 | {f'{ prefix } "' for prefix in _fstring_prefixes }
227227 | {f"{ prefix } '''" for prefix in _fstring_prefixes }
@@ -459,7 +459,7 @@ def untokenize(iterable: Iterable[TokenInfo]) -> str:
459459
460460
461461def is_fstring_start (token : str ) -> bool :
462- return builtins . any ( token .startswith (prefix ) for prefix in fstring_prefix )
462+ return token .startswith (fstring_prefix )
463463
464464
465465def _split_fstring_start_and_middle (token : str ) -> tuple [str , str ]:
You can’t perform that action at this time.
0 commit comments