From 7a0732b8c1a138daa411b2c695bf85dd434749d8 Mon Sep 17 00:00:00 2001
From: Alexandre Detiste <alexandre.detiste@gmail.com>
Date: Sun, 6 Oct 2024 17:13:11 +0200
Subject: [PATCH 1/3] remove dependency on six

---
 .github/workflows/build-and-test.yml |  2 +-
 asttokens/asttokens.py               | 15 +++--------
 asttokens/mark_tokens.py             | 21 +--------------
 asttokens/util.py                    |  7 +++--
 docs/requirements.in                 |  1 -
 docs/requirements.txt                |  2 --
 setup.cfg                            |  1 -
 tests/test_asttokens.py              |  3 +--
 tests/test_mark_tokens.py            | 40 +++++++++++-----------------
 tests/test_util.py                   |  4 +--
 10 files changed, 26 insertions(+), 70 deletions(-)

diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml
index 5fcfd14..79b14b7 100644
--- a/.github/workflows/build-and-test.yml
+++ b/.github/workflows/build-and-test.yml
@@ -62,7 +62,7 @@ jobs:
       - name: Mypy testing
         run: |
           # Not an exact mypy version, as we need 0.942 for pypy-3.8 support, but it's not available on 3.5
-          pip install types-six "mypy>=0.910,<=0.942"
+          pip install "mypy>=0.910,<=0.942"
           python -m mypy asttokens tests/*.py
 
       - name: Fast tests with coverage
diff --git a/asttokens/asttokens.py b/asttokens/asttokens.py
index 77d993f..9f8c5a1 100644
--- a/asttokens/asttokens.py
+++ b/asttokens/asttokens.py
@@ -20,9 +20,6 @@
 from ast import Module
 from typing import Iterable, Iterator, List, Optional, Tuple, Any, cast, TYPE_CHECKING
 
-import six
-from six.moves import xrange  # pylint: disable=redefined-builtin
-
 from .line_numbers import LineNumbers
 from .util import (
   Token, match_token, is_non_coding_token, patched_generate_tokens, last_stmt,
@@ -33,18 +30,14 @@
   from .util import AstNode, TokenInfo
 
 
-class ASTTextBase(six.with_metaclass(abc.ABCMeta, object)):
-  def __init__(self, source_text, filename):
-    # type: (Any, str) -> None
-    # FIXME: Strictly, the type of source_text is one of the six string types, but hard to specify with mypy given
-    # https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases
-
+class ASTTextBase(metaclass=abc.ABCMeta):
+  def __init__(self, source_text: str, filename: str) -> None:
     self._filename = filename
 
     # Decode source after parsing to let Python 2 handle coding declarations.
     # (If the encoding was not utf-8 compatible, then even if it parses correctly,
     # we'll fail with a unicode error here.)
-    source_text = six.ensure_text(source_text)
+    source_text = str(source_text)
 
     self._text = source_text
     self._line_numbers = LineNumbers(source_text)
@@ -249,7 +242,7 @@ def token_range(self,
     Yields all tokens in order from first_token through and including last_token. If
     include_extra is True, includes non-coding tokens such as tokenize.NL and .COMMENT.
     """
-    for i in xrange(first_token.index, last_token.index + 1):
+    for i in range(first_token.index, last_token.index + 1):
       if include_extra or not is_non_coding_token(self._tokens[i].type):
         yield self._tokens[i]
 
diff --git a/asttokens/mark_tokens.py b/asttokens/mark_tokens.py
index a58c78f..c8a7811 100644
--- a/asttokens/mark_tokens.py
+++ b/asttokens/mark_tokens.py
@@ -19,8 +19,6 @@
 from ast import Module
 from typing import Callable, List, Union, cast, Optional, Tuple, TYPE_CHECKING
 
-import six
-
 from . import util
 from .asttokens import ASTTokens
 from .util import AstConstant
@@ -186,16 +184,6 @@ def visit_listcomp(self, node, first_token, last_token):
       # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
       return self.handle_comp('[', node, first_token, last_token)
 
-  if six.PY2:
-    # We shouldn't do this on PY3 because its SetComp/DictComp already have a correct start.
-    def visit_setcomp(self, node, first_token, last_token):
-      # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
-      return self.handle_comp('{', node, first_token, last_token)
-
-    def visit_dictcomp(self, node, first_token, last_token):
-      # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
-      return self.handle_comp('{', node, first_token, last_token)
-
   def visit_comprehension(self,
                           node,  # type: AstNode
                           first_token,  # type: util.Token
@@ -435,7 +423,7 @@ def visit_const(self, node, first_token, last_token):
     assert isinstance(node, AstConstant) or isinstance(node, nc.Const)
     if isinstance(node.value, numbers.Number):
       return self.handle_num(node, node.value, first_token, last_token)
-    elif isinstance(node.value, (six.text_type, six.binary_type)):
+    elif isinstance(node.value, (str, bytes)):
       return self.visit_str(node, first_token, last_token)
     return (first_token, last_token)
 
@@ -473,13 +461,6 @@ def visit_assignname(self, node, first_token, last_token):
       first_token = last_token = self._code.prev_token(colon)
     return (first_token, last_token)
 
-  if six.PY2:
-    # No need for this on Python3, which already handles 'with' nodes correctly.
-    def visit_with(self, node, first_token, last_token):
-      # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
-      first = self._code.find_token(first_token, token.NAME, 'with', reverse=True)
-      return (first, last_token)
-
   # Async nodes should typically start with the word 'async'
   # but Python < 3.7 doesn't put the col_offset there
   # AsyncFunctionDef is slightly different because it might have
diff --git a/asttokens/util.py b/asttokens/util.py
index a360553..58856b4 100644
--- a/asttokens/util.py
+++ b/asttokens/util.py
@@ -23,7 +23,6 @@
 from typing import Callable, Dict, Iterable, Iterator, List, Optional, Tuple, Union, cast, Any, TYPE_CHECKING
 
 import astroid
-from six import iteritems
 
 
 if TYPE_CHECKING:  # pragma: no cover
@@ -139,7 +138,7 @@ def iter_children_astroid(node, include_joined_str=False):
   return node.get_children()
 
 
-SINGLETONS = {c for n, c in iteritems(ast.__dict__) if isinstance(c, type) and
+SINGLETONS = {c for n, c in ast.__dict__.items() if isinstance(c, type) and
               issubclass(c, (ast.expr_context, ast.boolop, ast.operator, ast.unaryop, ast.cmpop))}
 
 
@@ -165,9 +164,9 @@ def iter_children_ast(node, include_joined_str=False):
       yield child
 
 
-stmt_class_names = {n for n, c in iteritems(ast.__dict__)
+stmt_class_names = {n for n, c in ast.__dict__.items()
                     if isinstance(c, type) and issubclass(c, ast.stmt)}
-expr_class_names = ({n for n, c in iteritems(ast.__dict__)
+expr_class_names = ({n for n, c in ast.__dict__.items()
                     if isinstance(c, type) and issubclass(c, ast.expr)} |
                     {'AssignName', 'DelName', 'Const', 'AssignAttr', 'DelAttr'})
 
diff --git a/docs/requirements.in b/docs/requirements.in
index 41062f6..5c9cd05 100644
--- a/docs/requirements.in
+++ b/docs/requirements.in
@@ -5,4 +5,3 @@
 sphinx
 sphinx_rtd_theme
 readthedocs-sphinx-search
-six
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 6990a76..14783d7 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -32,8 +32,6 @@ readthedocs-sphinx-search==0.3.2
     # via -r requirements.in
 requests==2.31.0
     # via sphinx
-six==1.16.0
-    # via -r requirements.in
 snowballstemmer==2.2.0
     # via sphinx
 sphinx==6.2.1
diff --git a/setup.cfg b/setup.cfg
index c0b786c..55d0132 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -30,7 +30,6 @@ classifiers =
 [options]
 packages = asttokens
 install_requires =
-    six >= 1.12.0
     typing; python_version < "3.5"
 setup_requires = setuptools>=44; setuptools_scm[toml]>=3.4.3
 python_requires = >=3.8
diff --git a/tests/test_asttokens.py b/tests/test_asttokens.py
index 78d9591..b9489cb 100644
--- a/tests/test_asttokens.py
+++ b/tests/test_asttokens.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 import ast
-import six
 import token
 import tokenize
 import unittest
@@ -103,7 +102,7 @@ def test_unicode_offsets(self):
     # translate correctly.
     source = "foo('фыва',a,b)\n"
     atok = asttokens.ASTTokens(source)
-    self.assertEqual([six.text_type(t) for t in atok.tokens], [
+    self.assertEqual([str(t) for t in atok.tokens], [
       "NAME:'foo'",
       "OP:'('",
       'STRING:"%s"' % repr('фыва').lstrip('u'),
diff --git a/tests/test_mark_tokens.py b/tests/test_mark_tokens.py
index 33b64f7..483c3b6 100644
--- a/tests/test_mark_tokens.py
+++ b/tests/test_mark_tokens.py
@@ -13,7 +13,6 @@
 from time import time
 
 import astroid
-import six
 from asttokens import util, ASTTokens
 
 from . import tools
@@ -139,7 +138,7 @@ def verify_fixture_file(self, path):
     m = self.create_mark_checker(source, verify=False)
     tested_nodes = m.verify_all_nodes(self)
 
-    exp_index = (0 if six.PY2 else 1) + (3 if self.is_astroid_test else 0)
+    exp_index = 1 + (3 if self.is_astroid_test else 0)
     if not self.is_astroid_test:
       # For ast on Python 3.9, slices are expressions, we handle them and test them.
       if issubclass(ast.Slice, ast.expr):
@@ -280,7 +279,7 @@ def test_print_function(self):
     # verify_all_nodes doesn't work on Python 2 because the print() call parsed in isolation
     # is viewed as a Print node since it doesn't see the future import
     source = tools.read_fixture('astroid/nonregr.py')
-    m = self.create_mark_checker(source, verify=six.PY3)
+    m = self.create_mark_checker(source, verify=True)
 
     # Line 16 is: [indent 8] print(v.get('yo'))
     self.assertEqual(m.view_nodes_at(16, 8),
@@ -290,8 +289,7 @@ def test_print_function(self):
   # To make sure we can handle various hard cases, we include tests for issues reported for a
   # similar project here: https://bitbucket.org/plas/thonny
 
-  if not six.PY2:
-    def test_nonascii(self):
+  def test_nonascii(self):
       # Test of https://bitbucket.org/plas/thonny/issues/162/weird-range-marker-crash-with-non-ascii
       # Only on PY3 because Py2 doesn't support unicode identifiers.
       for source in (
@@ -400,8 +398,7 @@ def print_all(a, b, c, d, e):
     m = self.create_mark_checker(source)
     self.assertEqual(m.view_nodes_at(5, 0),
         { "Expr:print_all(*arr)", "Call:print_all(*arr)", "Name:print_all" })
-    if not six.PY2 or self.is_astroid_test:
-      self.assertEqual(m.view_nodes_at(5, 10), { "Starred:*arr" })
+    self.assertEqual(m.view_nodes_at(5, 10), { "Starred:*arr" })
     self.assertEqual(m.view_nodes_at(5, 11), { "Name:arr" })
 
 
@@ -419,16 +416,12 @@ def test_conditional_expr(self):
     m = self.create_mark_checker(source)
     name_a = 'AssignName:a' if self.is_astroid_test else 'Name:a'
     const_true = ('Const:True' if self.is_astroid_test else
-                  'Name:True' if six.PY2 else
                   'Constant:True')
     self.assertEqual(m.view_nodes_at(1, 0),
                      {name_a, "Assign:a = True if True else False", "Module:" + source})
     self.assertEqual(m.view_nodes_at(1, 4),
                      {const_true, 'IfExp:True if True else False'})
-    if six.PY2:
-      self.assertEqual(m.view_nodes_at(2, 0), {"Print:print(a)"})
-    else:
-      self.assertEqual(m.view_nodes_at(2, 0), {"Name:print", "Call:print(a)", "Expr:print(a)"})
+    self.assertEqual(m.view_nodes_at(2, 0), {"Name:print", "Call:print(a)", "Expr:print(a)"})
 
   def test_calling_lambdas(self):
     # See https://bitbucket.org/plas/thonny/issues/96/calling-lambdas-crash-the-debugger
@@ -502,8 +495,7 @@ def test_del_dict(self):
     self.assertEqual(m.view_nodes_at(2, 0), {'Delete:del x[4]'})
     self.assertEqual(m.view_nodes_at(2, 4), {'Name:x', 'Subscript:x[4]'})
 
-  if not six.PY2:
-    def test_bad_tokenless_types(self):
+  def test_bad_tokenless_types(self):
       # Cases where _get_text_positions_tokenless is incorrect in 3.8.
       source = textwrap.dedent("""
         def foo(*, name: str):  # keyword-only argument with type annotation
@@ -513,7 +505,7 @@ def foo(*, name: str):  # keyword-only argument with type annotation
       """)
       self.create_mark_checker(source)
 
-    def test_return_annotation(self):
+  def test_return_annotation(self):
       # See https://bitbucket.org/plas/thonny/issues/9/range-marker-crashes-on-function-return
       source = textwrap.dedent("""
         def liida_arvud(x: int, y: int) -> int:
@@ -602,7 +594,7 @@ def f(x):
         log(x)
       ''')
     # verification fails on Python2 which turns `with X, Y` turns into `with X: with Y`.
-    m = self.create_mark_checker(source, verify=six.PY3)
+    m = self.create_mark_checker(source, verify=True)
     self.assertEqual(m.view_nodes_at(5, 4), {
       'With:with B() as b, C() as c: log(b, c)'
     })
@@ -679,9 +671,8 @@ def test_complex_slice_and_parens(self):
     source = 'f((x)[:, 0])'
     self.create_mark_checker(source)
 
-  if six.PY3:
-    @pytest.mark.slow
-    def test_sys_modules(self):
+  @pytest.mark.slow
+  def test_sys_modules(self):
       """
       Verify all nodes on source files obtained from sys.modules.
 
@@ -735,11 +726,10 @@ def test_sys_modules(self):
           # it's purely an astroid bug that we can safely ignore.
           continue
 
-  if six.PY3:
-    def test_dict_merge(self):
+  def test_dict_merge(self):
       self.create_mark_checker("{**{}}")
 
-    def test_async_def(self):
+  def test_async_def(self):
       self.create_mark_checker("""
 async def foo():
   pass
@@ -749,7 +739,7 @@ async def foo():
   pass
 """)
 
-    def test_async_for_and_with(self):
+  def test_async_for_and_with(self):
       # Can't verify all nodes because in < 3.7
       # async for/with outside of a function is invalid syntax
       m = self.create_mark_checker("""
@@ -760,7 +750,7 @@ async def foo():
       assert m.view_nodes_at(3, 2) == {"AsyncFor:async for x in y: pass"}
       assert m.view_nodes_at(4, 2) == {"AsyncWith:async with x as y: pass"}
 
-    def test_await(self):
+  def test_await(self):
       # Can't verify all nodes because in astroid
       # await outside of an async function is invalid syntax
       m = self.create_mark_checker("""
@@ -923,7 +913,7 @@ def assert_nodes_equal(self, t1, t2):
       )
     else:
       # Weird bug in astroid that collapses spaces in docstrings sometimes maybe
-      if self.is_astroid_test and isinstance(t1, six.string_types):
+      if self.is_astroid_test and isinstance(t1, str):
         t1 = re.sub(r'^ +$', '', t1, flags=re.MULTILINE)
         t2 = re.sub(r'^ +$', '', t2, flags=re.MULTILINE)
 
diff --git a/tests/test_util.py b/tests/test_util.py
index a1e4973..a38fef2 100644
--- a/tests/test_util.py
+++ b/tests/test_util.py
@@ -7,7 +7,6 @@
 
 import astroid
 import pytest
-import six
 
 from .context import asttokens
 from .tools import get_node_name
@@ -115,8 +114,7 @@ def test_expect_token():
     asttokens.util.expect_token(tok, token.OP)
 
 
-if six.PY3:
-  def test_combine_tokens():
+def test_combine_tokens():
     from tokenize import TokenInfo, generate_tokens, ERRORTOKEN, OP, NUMBER, NAME
     from asttokens.util import combine_tokens, patched_generate_tokens
 

From 631875314519039c45c9635e78810f561378880f Mon Sep 17 00:00:00 2001
From: Alexandre Detiste <alexandre.detiste@gmail.com>
Date: Sun, 6 Oct 2024 17:21:30 +0200
Subject: [PATCH 2/3] fix indent

---
 tests/test_mark_tokens.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tests/test_mark_tokens.py b/tests/test_mark_tokens.py
index 483c3b6..cd8ee03 100644
--- a/tests/test_mark_tokens.py
+++ b/tests/test_mark_tokens.py
@@ -303,7 +303,7 @@ def test_nonascii(self):
           "%s:%s" % ("AssignName" if self.is_astroid_test else "Name", source.split("=")[0]),
         })
 
-    def test_bytes_smoke(self):
+  def test_bytes_smoke(self):
       const = 'Const' if self.is_astroid_test else (
           'Constant'
           if sys.version_info >= (3, 8)

From 11d726829ed2a4d7b2d417380ed13b77ce1f55d0 Mon Sep 17 00:00:00 2001
From: Alexandre Detiste <alexandre.detiste@gmail.com>
Date: Sun, 6 Oct 2024 17:36:44 +0200
Subject: [PATCH 3/3] trim setup.cfg more

---
 setup.cfg | 6 ++----
 1 file changed, 2 insertions(+), 4 deletions(-)

diff --git a/setup.cfg b/setup.cfg
index 55d0132..c39bf49 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -29,16 +29,14 @@ classifiers =
 
 [options]
 packages = asttokens
-install_requires =
-    typing; python_version < "3.5"
 setup_requires = setuptools>=44; setuptools_scm[toml]>=3.4.3
 python_requires = >=3.8
 
 [options.extras_require]
 astroid =
-    astroid >=2, <4; python_version >= "3"
+    astroid >=2, <4
 test =
-    astroid >=2, <4; python_version >= "3"
+    astroid >=2, <4
     pytest
     pytest-cov
     pytest-xdist