-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathpost-processing.py
73 lines (64 loc) · 1.92 KB
/
post-processing.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os, sys
import sqlite3
query='algues vertes AND sangliers'
query='bipolaire'
query='algues vertes AND sangliers'
query='risk assessment'
#query='biofuel'
crawler=con=sqlite3.connect('ouput/'+query+'_crawl.db')
cur=con.execute("select urlid,domain from urlcorpus ")
res=cur.fetchall()
print res
pages = {}
for result in res:
#pages[result[0]]=result[1].replace(',','_')[10:70]
#pages[result[0]]='/'.join(result[1].replace('http://','').split('/')[:1]).replace("www.",'')
pages[result[0]]=result[1]
print pages
def unique(list):
list_clean = []
for item in list:
if not item in list_clean:
list_clean.append(item)
return list_clean
print (len(pages)), ' web pages '
print (len(unique(pages.values()))), ' unique sites '
cur=con.execute("select fromid,toid from link ")
links=cur.fetchall()
output = open('ouput/net.csv','w')
num_links=0
#for page in pages:
# output.write(pages[page]+'\t'+pages[page]+'\n')
link_list=[]
for link in links:
(fromid,toid)=link
if fromid in pages and toid in pages:
chaine=pages[fromid]+'\t'+pages[toid]
if not chaine in link_list:
link_list.append(chaine)
if 1:# not 'alvinet' in chaine and not 'cle.wn.com' in chaine:
output.write(chaine+'\n')
num_links+=1
print num_links,'total links'
print len(link_list),'total unique links'
cur=con.execute("select fromid,toid from link_whole ")
links=cur.fetchall()
output = open('ouput/net_whole.csv','w')
num_links=0
#for page in pages:
# output.write(pages[page]+'\t'+pages[page]+'\n')
link_list=[]
for link in links:
(fromid,toid)=link
if fromid in pages and toid in pages :
chaine=pages[fromid]+'\t'+pages[toid]
if not pages[fromid]==pages[toid]:
if not chaine in link_list:
link_list.append(chaine)
if 1:# not 'alvinet' in chaine and not 'cle.wn.com' in chaine:
output.write(chaine+'\n')
num_links+=1
print num_links,'total links'
print len(link_list),'total unique links'