Skip to content

Commit

Permalink
fixed spell error.
Browse files Browse the repository at this point in the history
  • Loading branch information
Kingson committed Mar 27, 2015
1 parent 16e6203 commit a653d61
Showing 1 changed file with 25 additions and 25 deletions.
50 changes: 25 additions & 25 deletions NetEase News/Sources/neteasenews.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,48 +14,48 @@ def set_cache(query):
"""
if query == "headline":
result = requests.get('http://c.m.163.com/nc/article/headline/T1348647853363/0-20.html').json()
respone = result['T1348647853363']
response = result['T1348647853363']
cache1 = []
for i in range(len(respone)):
if 'url_3w' in respone[i].keys():
cache1.append(dict(title=respone[i]['title'], digest=respone[i]['digest'],
url=respone[i]['url_3w']))
for i in range(len(response)):
if 'url_3w' in response[i].keys():
cache1.append(dict(title=response[i]['title'], digest=response[i]['digest'],
url=response[i]['url_3w']))
alfred.cache.set('headline.list', cache1, expire=600)
elif query == "sport":
result = requests.get('http://c.m.163.com/nc/article/list/T1348649079062/0-20.html').json()
respone = result['T1348649079062']
response = result['T1348649079062']
cache2 = []
for i in range(len(respone)):
if 'url_3w' in respone[i].keys():
cache2.append(dict(title=respone[i]['title'], digest=respone[i]['digest'],
url=respone[i]['url_3w']))
for i in range(len(response)):
if 'url_3w' in response[i].keys():
cache2.append(dict(title=response[i]['title'], digest=response[i]['digest'],
url=response[i]['url_3w']))
alfred.cache.set('sport.list', cache2, expire=600)
elif query == "finance":
result = requests.get('http://c.m.163.com/nc/article/list/T1348648756099/0-20.html').json()
respone = result['T1348648756099']
response = result['T1348648756099']
cache3 = []
for i in range(len(respone)):
if 'url_3w' in respone[i].keys():
cache3.append(dict(title=respone[i]['title'], digest=respone[i]['digest'],
url=respone[i]['url_3w']))
for i in range(len(response)):
if 'url_3w' in response[i].keys():
cache3.append(dict(title=response[i]['title'], digest=response[i]['digest'],
url=response[i]['url_3w']))
alfred.cache.set('finance.list', cache3, expire=600)
elif query == "gossip":
result = requests.get('http://c.m.163.com/nc/article/list/T1348648517839/0-20.html').json()
respone = result['T1348648517839']
response = result['T1348648517839']
cache4 = []
for i in range(len(respone)):
if 'url_3w' in respone[i].keys():
cache4.append(dict(title=respone[i]['title'], digest=respone[i]['digest'],
url=respone[i]['url_3w']))
for i in range(len(response)):
if 'url_3w' in response[i].keys():
cache4.append(dict(title=response[i]['title'], digest=response[i]['digest'],
url=response[i]['url_3w']))
alfred.cache.set('gossip.list', cache4, expire=600)
elif query == "keji":
result = requests.get('http://c.m.163.com/nc/article/list/T1348649580692/0-20.html').json()
respone = result['T1348649580692']
response = result['T1348649580692']
cache5 = []
for i in range(len(respone)):
if 'url_3w' in respone[i].keys():
cache5.append(dict(title=respone[i]['title'], digest=respone[i]['digest'],
url=respone[i]['url_3w']))
for i in range(len(response)):
if 'url_3w' in response[i].keys():
cache5.append(dict(title=response[i]['title'], digest=response[i]['digest'],
url=response[i]['url_3w']))
alfred.cache.set('keji.list', cache5, expire=600)


Expand Down

0 comments on commit a653d61

Please sign in to comment.