1
1
# Define proxy Profile here.
2
2
3
- def get_data_on_profile (profile ,soup ,number ):
3
+ # def get_data_on_profile(profile,soup,number):
4
+ # # Define profile here
5
+ # # Return a List with ip and port, example: [["127.0.0.1","1080","http"]]
6
+ # result=[]
7
+ #
8
+ # if profile=='cnproxy':
9
+ # proxy_list = soup.select("tbody tr")
10
+ # for i in range(0, number):
11
+ # try:
12
+ # result.append([proxy_list[i].select("td:nth-of-type(1)")[0].text.strip("-").strip(),
13
+ # proxy_list[i].select("td:nth-of-type(2)")[0].text.strip(), 'http'])
14
+ # except Exception as e:
15
+ # pass
16
+ # elif profile=='proxynova':
17
+ # proxy_list = soup.select("tbody tr")
18
+ # for i in range(0, number):
19
+ # try:
20
+ # result.append( [proxy_list[i].select("td:nth-of-type(1)")[0].abbr.attrs['title'].strip(),
21
+ # proxy_list[i].select("td:nth-of-type(2)")[0].text.strip(),'http'])
22
+ # except:
23
+ # pass
24
+ # elif profile=='spysone':
25
+ #
26
+ # proxy_list = soup.select("tr.spy1x,tr.spy1xx")
27
+ # for i in range(0,number):
28
+ # proxy_list[i].select("td:nth-of-type(1)")
29
+ # print(soup)
30
+ # return result
31
+
32
+
33
+ def get_data_on_profile (profile ,driver ,number ):
4
34
# Define profile here
5
35
# Return a List with ip and port, example: [["127.0.0.1","1080","http"]]
6
36
result = []
7
37
8
38
if profile == 'cnproxy' :
9
- proxy_list = soup . select ("tbody tr" )
39
+ proxy_list = driver . find_elements_by_css_selector ("tbody tr" )
10
40
for i in range (0 , number ):
11
41
try :
12
- result .append ([proxy_list [i ].select ("td:nth-of-type(1)" )[0 ].text .strip ("-" ).strip (),
13
- proxy_list [i ].select ("td:nth-of-type(2)" )[0 ].text .strip (), 'http ' ])
42
+ result .append ([proxy_list [i ].find_elements_by_css_selector ("td:nth-of-type(1)" )[0 ].text .strip ("-" ).strip (),
43
+ proxy_list [i ].find_elements_by_css_selector ("td:nth-of-type(2)" )[0 ].text .strip (), 'HTTP ' ])
14
44
except Exception as e :
15
45
pass
16
46
elif profile == 'proxynova' :
17
- proxy_list = soup . select ("tbody tr" )
47
+ proxy_list = driver . find_elements_by_css_selector ("tbody tr" )
18
48
for i in range (0 , number ):
19
49
try :
20
- result .append ( [proxy_list [i ].select ("td:nth-of-type(1)" )[0 ].abbr . attrs [ 'title' ] .strip (),
21
- proxy_list [i ].select ("td:nth-of-type(2)" )[0 ].text .strip (),'http ' ])
50
+ result .append ( [proxy_list [i ].find_elements_by_css_selector ("td:nth-of-type(1)" )[0 ].text .strip (),
51
+ proxy_list [i ].find_elements_by_css_selector ("td:nth-of-type(2)" )[0 ].text .strip (),'HTTP ' ])
22
52
except :
23
53
pass
24
54
elif profile == 'spysone' :
25
- proxy_list = soup .select ("tr.spy1x,tr.spy1xx" )
26
- for i in range (0 ,number ):
27
- proxy_list [i ].select ("td:nth-of-type(1)" )
28
- print (soup )
55
+
56
+ proxy_list = driver .find_elements_by_css_selector ("tr.spy1x,tr.spy1xx" )
57
+ if len (proxy_list )< number + 2 :
58
+ number = len (proxy_list )- 2
59
+ for i in range (2 ,number + 2 ):
60
+ proxy_info = proxy_list [i ].text .strip ().split (" " )
61
+ result .append ([proxy_info [1 ].split (":" )[0 ],proxy_info [1 ].split (":" )[1 ],proxy_info [2 ]])
62
+ print (driver )
29
63
return result
0 commit comments