Skip to content
This repository has been archived by the owner on Jun 18, 2023. It is now read-only.

Commit

Permalink
Merge pull request #38 from samapriya/master
Browse files Browse the repository at this point in the history
updated v0.4.7
  • Loading branch information
lepisma authored Mar 5, 2020
2 parents 2abf820 + ac1abdb commit 75a0bd7
Show file tree
Hide file tree
Showing 5 changed files with 54 additions and 44 deletions.
11 changes: 10 additions & 1 deletion README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ pipwin
.. image:: https://img.shields.io/appveyor/ci/lepisma/pipwin.svg?style=flat-square
:target: https://ci.appveyor.com/project/lepisma/pipwin
:alt: Build

.. image:: https://img.shields.io/pypi/l/pipwin.svg?style=flat-square
:target: https://pypi.python.org/pypi/pipwin/
:alt: License
Expand Down Expand Up @@ -70,3 +70,12 @@ Details
<package>``

**Free software: BSD license**


Changelog
---------

v0.4.7
~~~~~~

- Using postman client type to overcome use of robobrowser to create package cache.
2 changes: 1 addition & 1 deletion pipwin/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-

__version__ = "0.4.6"
__version__ = "0.4.7"
79 changes: 40 additions & 39 deletions pipwin/pipwin.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
import pip
import pip._internal
import requests
from robobrowser import RoboBrowser
from os.path import expanduser, join, isfile, exists
import os
import subprocess
Expand All @@ -12,12 +11,14 @@
from itertools import product
import pyprind
import js2py
from bs4 import BeautifulSoup
import re
import ssl
from requests.adapters import HTTPAdapter
from urllib3.poolmanager import PoolManager
from urllib3.util.ssl_ import create_urllib3_context
import logging

try:
unicode
except NameError:
Expand All @@ -33,16 +34,14 @@

MAIN_URL = "http://www.lfd.uci.edu/~gohlke/pythonlibs/"

# Added header for postman client
HEADER = {
"Host": "download.lfd.uci.edu",
"User-Agent": "PostmanRuntime/7.22.0",
"Accept": "*/*",
"Cache-Control": "no-cache",
"Accept-Encoding": "gzip, deflate, br",
"Referer": "https://www.lfd.uci.edu/~gohlke/pythonlibs",
"Connection": "keep-alive",
"Cache-Control": "max-age=0",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"Upgrade-Insecure-Requests": "1",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2552.0 Safari/537.3",
"DNT": "1",
"Accept-Encoding": "gzip, deflate, sdch",
"Accept-Language": "en-US,en;q=0.8",
}


Expand All @@ -59,14 +58,19 @@ class DESAdapter(HTTPAdapter):
connection is potentially vulnerable to attack!
"""

CIPHERS = 'RSA+3DES:ECDH+3DES:DH+3DES'
CIPHERS = "RSA+3DES:ECDH+3DES:DH+3DES"

def init_poolmanager(self, connections, maxsize, block=False, *args, **kwargs):
context = create_urllib3_context(ciphers=DESAdapter.CIPHERS)
kwargs['ssl_context'] = context
kwargs["ssl_context"] = context
self.poolmanager = PoolManager(
num_pools=connections, maxsize=maxsize,
block=block, ssl_version=ssl.PROTOCOL_SSLv23, *args, **kwargs)
num_pools=connections,
maxsize=maxsize,
block=block,
ssl_version=ssl.PROTOCOL_SSLv23,
*args,
**kwargs
)


def build_cache():
Expand All @@ -77,37 +81,34 @@ def build_cache():
-------
Dictionary containing package details
"""

data = {}
response = requests.request("GET", MAIN_URL, headers=HEADER)

sess = requests.Session()
#sess.mount('https://', DESAdapter())

soup = RoboBrowser(session=sess, parser="html.parser")
soup.open(MAIN_URL)
soup = BeautifulSoup(response.text, features="html.parser")

# We mock out a little javascript environment within which to run Gohlke's obfuscation code
context = js2py.EvalJs()
context.execute("""
context.execute(
"""
top = {location: {href: ''}};
location = {href: ''};
function setTimeout(f, t) {
f();
};
""")
"""
)

# We grab Gohlke's code and evaluate it within py2js
dl_function = re.search(r'function dl.*\}', soup.find("script").text).group(0)
dl_function = re.search(r"function dl.*\}", soup.find("script").text).group(0)
context.execute(dl_function)

links = soup.find(class_="pylibs").find_all("a")
for link in links:
if link.get("onclick") is not None:
# Evaluate the obfuscation javascript, store the result (squirreled away within location.href) into url
regex_result = re.search(r'dl\(.*\)', link.get("onclick"))
regex_result = re.search(r"dl\(.*\)", link.get("onclick"))
if regex_result is None:
logger.info(u'Skip %s (wrong link format)' %
unicode(link.string))
logger.info(u"Skip %s (wrong link format)" % unicode(link.string))
continue
context.execute(regex_result.group(0))
url = context.location.href
Expand All @@ -118,11 +119,10 @@ def build_cache():

# Not using EXEs and ZIPs
if len(details) != 5:
logger.info(u'Skip %s (wrong name format)' %
unicode(link.string))
logger.info(u"Skip %s (wrong name format)" % unicode(link.string))
continue
else:
logger.debug(u'Add %s' % unicode(link.string))
logger.debug(u"Add %s" % unicode(link.string))
# arch = win32 / win_amd64 / any
arch = details[4]
arch = arch.split(".")[0]
Expand All @@ -131,7 +131,7 @@ def build_cache():
py_ver = details[2]

py_ver_key = py_ver + "-" + arch

# print({py_ver_key: {pkg_ver: url}})
if pkg in data.keys():
if py_ver_key in data[pkg].keys():
data[pkg][py_ver_key].update({pkg_ver: url})
Expand All @@ -141,8 +141,7 @@ def build_cache():
data[pkg] = {py_ver_key: {pkg_ver: url}}
else:
if link.string:
logger.debug(u'Skip %s (missing link)' %
unicode(link.string))
logger.debug(u"Skip %s (missing link)" % unicode(link.string))
return data


Expand Down Expand Up @@ -210,10 +209,11 @@ def __init__(self, refresh=False):
self.data = build_cache()

with open(self.cache_file, "w") as fp:
fp.write(json.dumps(self.data,
sort_keys=True,
indent=4,
separators=(",", ": ")))
fp.write(
json.dumps(
self.data, sort_keys=True, indent=4, separators=(",", ": ")
)
)

print("Done")

Expand Down Expand Up @@ -251,7 +251,9 @@ def search(self, requirement):
return [False, found]

def _get_url(self, requirement):
versions = list(requirement.specifier.filter(self.sys_data[requirement.name].keys()))
versions = list(
requirement.specifier.filter(self.sys_data[requirement.name].keys())
)
if not versions:
raise ValueError("Could not satisfy requirement %s" % (str(requirement)))
return self.sys_data[requirement.name][max(versions)]
Expand Down Expand Up @@ -312,15 +314,14 @@ def install(self, requirement):
Install a package
"""
wheel_file = self.download(requirement)
subprocess.check_call([executable, '-m', 'pip', 'install', wheel_file])
subprocess.check_call([executable, "-m", "pip", "install", wheel_file])
os.remove(wheel_file)

def uninstall(self, requirement):
"""
Uninstall a package
"""
subprocess.check_call([executable, '-m', 'pip', 'uninstall', requirement.name])

subprocess.check_call([executable, "-m", "pip", "uninstall", requirement.name])


def refresh():
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
wheel
docopt
robobrowser
requests
beautifulsoup4
pyprind
six
js2py
Expand Down
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,16 @@
requirements = [
"docopt",
"requests",
"robobrowser",
"pyprind",
"six",
"beautifulsoup4",
"js2py",
"packaging",
]

setup(
name="pipwin",
version="0.4.6",
version="0.4.7",
description="pipwin installs compiled python binaries on windows provided by Christoph Gohlke",
long_description=readme,
author="lepisma",
Expand Down

0 comments on commit 75a0bd7

Please sign in to comment.