Skip to content

Commit

Permalink
Internal updates
Browse files Browse the repository at this point in the history
- Pylint fixes
- Move to subprocess run instead of call and Popen
- Switch to f-strings
  • Loading branch information
firecat53 committed Dec 20, 2021
1 parent d0b20bf commit 71afc30
Show file tree
Hide file tree
Showing 4 changed files with 53 additions and 51 deletions.
4 changes: 1 addition & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,7 @@ Urlscan is a small program that is designed to integrate with the "mutt"
mailreader to allow you to easily launch a Web browser for URLs contained in
email messages. It is a replacement for the "urlview" program.

*NOTE* The last version that is Python 2 compatible is 0.9.3.

Requires: Python 3.6+ and the python-urwid library
Requires: Python 3.7+ and the python-urwid library

## Features

Expand Down
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

def long_description():
"""Generate long description from README"""
with open("README.md") as readme:
with open("README.md", encoding='utf-8') as readme:
return readme.read()


Expand Down Expand Up @@ -35,10 +35,10 @@ def long_description():
'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Topic :: Utilities'],
keywords="urlscan, urlview, email, mutt, tmux"
)
77 changes: 41 additions & 36 deletions urlscan/urlchoose.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from os.path import dirname, exists, expanduser
import re
import shlex
from subprocess import call, Popen, PIPE, DEVNULL
import subprocess
import sys
from threading import Thread
import webbrowser
Expand Down Expand Up @@ -82,7 +82,7 @@ def splittext(text, search, attr):
"""
if search:
pat = re.compile("({})".format(re.escape(search)), re.IGNORECASE)
pat = re.compile(f"({re.escape(search)})", re.IGNORECASE)
else:
return text
final = pat.split(text)
Expand Down Expand Up @@ -151,20 +151,20 @@ def __init__(self, extractedurls, compact=False, reverse=False, nohelp=False, de
('urlref:url', 'white', 'black', 'standout'),
('url:sel', 'black', 'light gray', 'bold')]
# Boruch's colorized palette
colorized =[('header','brown','black','standout'),
('footer','white','dark red','standout'),
('search','white','dark green','standout'),
('msgtext','light cyan','black'),
('msgtext:ellipses','light gray','black'),
('urlref:number:braces','light gray','black'),
('urlref:number','yellow','black','standout'),
('urlref:url','dark green','black','standout'),
('url:sel','white','black','')]
colorized = [('header', 'brown', 'black', 'standout'),
('footer', 'white', 'dark red', 'standout'),
('search', 'white', 'dark green', 'standout'),
('msgtext', 'light cyan', 'black'),
('msgtext:ellipses', 'light gray', 'black'),
('urlref:number:braces', 'light gray', 'black'),
('urlref:number', 'yellow', 'black', 'standout'),
('urlref:url', 'dark green', 'black', 'standout'),
('url:sel', 'white', 'black', '')]
self.palettes.update([("default", default), ("bw", blw), ("colorized", colorized)])
if genconf is True:
self._config_create()
try:
with open(self.conf, 'r') as conf_file:
with open(self.conf, 'r', encoding=sys.getdefaultencoding()) as conf_file:
data = json.load(conf_file)
try:
for pal_name, pal in data['palettes'].items():
Expand All @@ -177,7 +177,7 @@ def __init__(self, extractedurls, compact=False, reverse=False, nohelp=False, de
if value:
if value == "open_url":
urwid.Button._command_map._command[key] = 'activate'
value = getattr(self, "_{}".format(value))
value = getattr(self, f"_{value}")
else:
del self.keys[key]
continue
Expand All @@ -187,7 +187,7 @@ def __init__(self, extractedurls, compact=False, reverse=False, nohelp=False, de
except FileNotFoundError:
pass
try:
call(['xdg-open'], stdout=DEVNULL)
subprocess.run(['xdg-open'], check=False, stdout=subprocess.DEVNULL)
self.xdg = True
except OSError:
self.xdg = False
Expand Down Expand Up @@ -277,7 +277,7 @@ def handle_keys(self, keys, raw):
"""
for j, k in enumerate(keys):
if self.search is True:
text = "Search: {}".format(self.search_string)
text = f"Search: {self.search_string}"
if k == 'enter':
# Catch 'enter' key to prevent opening URL in mkbrowseto
self.enter = True
Expand Down Expand Up @@ -346,7 +346,7 @@ def _quit(self):
def _open_url(self):
"""<Enter> or <space>"""
load_text = "Loading URL..." if self.link_open_modes[0] != (self.run or self.runsafe) \
else "Executing: {}".format(self.run or self.runsafe)
else f"Executing: {self.run or self.runsafe}"
if os.environ.get('BROWSER') not in ['elinks', 'links', 'w3m', 'lynx']:
self._footer_display(load_text, 5)

Expand All @@ -362,8 +362,9 @@ def _queue(self, mode=2):
Args: mode - 2 for new tab, 1 for new window
"""
load_text = "Loading URLs in queue..." if self.link_open_modes[0] != (self.run or self.runsafe) \
else "Executing: {}".format(self.run or self.runsafe)
load_text = "Loading URLs in queue..." \
if self.link_open_modes[0] != (self.run or self.runsafe) \
else f"Executing: {self.run or self.runsafe}"
if os.environ.get('BROWSER') in ['elinks', 'links', 'w3m', 'lynx']:
self._footer_display("Opening multiple links not support in text browsers", 5)
else:
Expand Down Expand Up @@ -417,7 +418,7 @@ def _help_menu(self):
"""F1"""
if self.help_menu is False:
self.focus_pos_saved = self.top.base_widget.body.focus_position
help_men = "\n".join(["{} - {}".format(i, j.__name__.strip('_'))
help_men = "\n".join([f"{i} - {j.__name__.strip('_')}"
for i, j in self.keys.items() if j.__name__ !=
'_digits'])
help_men = "KEYBINDINGS\n" + help_men + "\n<0-9> - Jump to item"
Expand Down Expand Up @@ -491,7 +492,7 @@ def _digits(self):
pass
self.top.base_widget.keypress(self.size, "") # Trick urwid into redisplaying the cursor
if self.number:
self._footer_display("Selection: {}".format(self.number), 1)
self._footer_display(f"Selection: {self.number}", 1)

def _clear_screen(self):
""" Ctrl-l """
Expand Down Expand Up @@ -600,10 +601,13 @@ def _clipboard(self, pri=False):
cmds = COPY_COMMANDS_PRIMARY if pri else COPY_COMMANDS
for cmd in cmds:
try:
proc = Popen(shlex.split(cmd), stdin=PIPE, stdout=DEVNULL, stderr=DEVNULL)
proc.communicate(input=url.encode(sys.getdefaultencoding()))
self._footer_display("Copied url to {} selection".format(
"primary" if pri is True else "clipboard"), 5)
subprocess.run(shlex.split(cmd),
check=False,
input=url.encode(sys.getdefaultencoding()),
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
self._footer_display("Copied url to "
f"{'primary' if pri is True else 'clipboard'} selection", 5)
except OSError:
continue
if self.single is True:
Expand Down Expand Up @@ -633,7 +637,7 @@ def _config_create(self):
os.makedirs(dirname(expanduser(self.conf)), exist_ok=True)
keys = dict(zip(self.keys.keys(),
[i.__name__.strip('_') for i in self.keys.values()]))
with open(expanduser(self.conf), 'w') as pals:
with open(expanduser(self.conf), 'w', encoding=sys.getdefaultencoding()) as pals:
pals.writelines(json.dumps({"palettes": self.palettes, "keys": keys},
indent=4))
print("Created ~/.config/urlscan/config.json")
Expand All @@ -655,7 +659,7 @@ def _footer_callback(self, _loop, _data):
"""
self.number = "" # Clear URL selection number
text = "Search: {}".format(self.search_string)
text = f"Search: {self.search_string}"
if self.search_string:
footer = 'search'
else:
Expand All @@ -680,7 +684,7 @@ def _search(self):
""" Search - search URLs and text.
"""
text = "Search: {}".format(self.search_string)
text = f"Search: {self.search_string}"
footerwid = urwid.AttrMap(urwid.Text(text), 'footer')
self.top.base_widget.footer = footerwid
search_items = []
Expand Down Expand Up @@ -745,7 +749,7 @@ def mkbrowseto(self, url, thread=False, mode=0):
another function with the URL.
"""
def browse(*args):
def browse(*args): # pylint: disable=unused-argument
# These 3 lines prevent any stderr messages from webbrowser or xdg
savout = os.dup(2)
os.close(2)
Expand All @@ -760,20 +764,21 @@ def browse(*args):
elif self.link_open_modes[0] == "Web Browser":
webbrowser.open(url, new=mode)
elif self.link_open_modes[0] == "Xdg-Open":
run = 'xdg-open "{}"'.format(url)
process = Popen(shlex.split(run), stdout=PIPE, stdin=PIPE)
subprocess.run(shlex.split(f'xdg-open "{url}"'), check=False)
elif self.link_open_modes[0] == self.runsafe:
if self.pipe:
process = Popen(shlex.split(self.runsafe), stdout=PIPE, stdin=PIPE)
process.communicate(input=url.encode(sys.getdefaultencoding()))
subprocess.run(shlex.split(self.runsafe),
check=False,
input=url.encode(sys.getdefaultencoding()))
else:
cmd = [i.format(url) for i in shlex.split(self.runsafe)]
Popen(cmd).communicate()
subprocess.run(cmd, check=False)
elif self.link_open_modes[0] == self.run and self.pipe:
process = Popen(shlex.split(self.run), stdout=PIPE, stdin=PIPE)
process.communicate(input=url.encode(sys.getdefaultencoding()))
subprocess.run(shlex.split(self.run),
check=False,
input=url.encode(sys.getdefaultencoding()))
else:
Popen(self.run.format(url), shell=True).communicate()
subprocess.run(self.run.format(url), check=False, shell=True)

if self.single is True:
self._quit()
Expand Down
19 changes: 9 additions & 10 deletions urlscan/urlscan.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import locale
import os
import re
from sys import getdefaultencoding


class Chunk:
Expand All @@ -40,8 +41,7 @@ def __init__(self, markup, url):
self.url = url

def __str__(self):
return 'Chunk(markup = %s, url= %s)' % (repr(self.markup),
repr(self.url))
return f'Chunk(markup = {repr(self.markup)}, url= {repr(self.url)})'

def __repr__(self):
return self.__str__()
Expand Down Expand Up @@ -115,12 +115,11 @@ def end_list_para(self):
if tag == 'ul':
depth = len([t for t in self.list_stack if t[0] == tag])
ul_tags = HTMLChunker.ul_tags
chunk = Chunk('%s ' % (ul_tags[depth % len(ul_tags)]),
self.cur_url())
chunk = Chunk(f"{ul_tags[depth % len(ul_tags)]} ", self.cur_url())
else:
counter = self.list_stack[-1][1]
self.list_stack[-1] = (tag, counter + 1)
chunk = Chunk("%2d." % counter, self.cur_url())
chunk = Chunk(f"{counter:%2d.}", self.cur_url())
self.add_chunk(chunk)
else:
self.end_para()
Expand Down Expand Up @@ -226,7 +225,7 @@ def handle_charref(self, name):
elif char in HTMLChunker.extrachars:
name = HTMLChunker.extrachars[char]
else:
name = '&#%s;' % name
name = f"&#{name};"
self.handle_data(name)

entities = {'nbsp': ' ',
Expand All @@ -243,7 +242,7 @@ def handle_entityref(self, name):
else:
# If you see a reference, it needs to be
# added above.
self.handle_data('&%s;' % name)
self.handle_data(f"&{name};")


URLINTERNALPATTERN = r'[{}()@\w/\\\-%?!&.=:;+,#~]'
Expand All @@ -260,7 +259,7 @@ def load_tlds():
file = os.path.join(os.path.dirname(__file__),
'assets',
'tlds-alpha-by-domain.txt')
with open(file) as fobj:
with open(file, encoding=getdefaultencoding()) as fobj:
return [elem for elem in fobj.read().lower().splitlines()[1:]
if "--" not in elem]

Expand Down Expand Up @@ -316,7 +315,7 @@ def parse_text_urls(mesg, regex=None):
else:
email = match.group("email")
if email and "mailto" not in email:
mailto = "mailto:{}".format(email)
mailto = f"mailto:{email}"
else:
mailto = match.group(1)
rval.append(Chunk(None, mailto))
Expand Down Expand Up @@ -412,7 +411,7 @@ def extracturls(mesg, regex=None):
# lines with more than one entry or one entry that's
# a URL are the only lines containing URLs.

linechunks = [parse_text_urls(l, regex=regex) for l in lines]
linechunks = [parse_text_urls(i, regex=regex) for i in lines]

return extract_with_context(linechunks,
lambda chunk: len(chunk) > 1 or
Expand Down

0 comments on commit 71afc30

Please sign in to comment.