-
Notifications
You must be signed in to change notification settings - Fork 0
/
app.py
94 lines (76 loc) · 3.69 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
from quart import Quart, render_template, request, jsonify, Response
from hypercorn.config import Config
from hypercorn.asyncio import serve
from dotenv import load_dotenv
from jinja2 import Environment
from bs4 import BeautifulSoup
import datetime
import requests
import asyncio
import aiohttp
import os
import re
load_dotenv()
app = Quart(__name__)
def max(*args):
return max(*args)
async def make_request(session, url):
response = await session.get(url, timeout=3)
return await response.text()
env = Environment()
env.filters['max'] = max
@app.route("/")
async def index():
try:
async with aiohttp.ClientSession() as session:
endpoints = [
(session, f"{request.url_root}api?type=anon"),
(session, f"{request.url_root}api?type=ssl"),
(session, f"{request.url_root}api?type=uk"),
(session, f"{request.url_root}api?type=us"),
(session, f"{request.url_root}api?type=latest")
]
responses = await asyncio.gather(*[make_request(*endpoint) for endpoint in endpoints])
except Exception as error:
print(error)
responses = ["None "] * 5
proxies = [[proxy for proxy in response.splitlines() if proxy.strip()] for response in responses]
anon_proxy, ssl_proxy, uk_proxy, us_proxy, latest_proxy = proxies
return await render_template("index.html", anon_proxy=anon_proxy, ssl_proxy=ssl_proxy, uk_proxy=uk_proxy,
us_proxy=us_proxy, latest_proxy=latest_proxy)
@app.route("/api")
async def scrape():
if not request.args.get('type'):
return jsonify({'error': ''''type' not found in request argument...''',
'time': datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")})
else:
if "anon" in request.args.get('type'):
request_url = "https://free-proxy-list.net/anonymous-proxy.html"
elif "ssl" in request.args.get('type'):
request_url = "https://www.sslproxies.org/"
elif "uk" in request.args.get('type'):
request_url = "https://free-proxy-list.net/uk-proxy.html"
elif "us" in request.args.get('type'):
request_url = "https://www.us-proxy.org/"
elif "latest" in request.args.get('type'):
request_url = "https://free-proxy-list.net/"
else:
return jsonify({'error': 'requested proxy type not found...',
'time': datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")})
async with aiohttp.ClientSession() as session:
async with session.get(request_url, verify_ssl=True, headers={'Referer': 'https://google.com.tr',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36'}) as response:
soup = BeautifulSoup(await response.text(), 'html.parser')
proxy_list_text = soup.find('textarea').text
proxy_list = proxy_list_text[proxy_list_text.rfind('UTC.'):]
if request.args.get('download') == "":
return Response(
proxy_list.replace("UTC.", ""),
mimetype='text/plain',
headers={'Content-disposition': f'attachment; filename={request.args.get("type")}_list.txt'})
else:
return proxy_list.replace("UTC.", "")
if __name__ == "__main__":
config = Config()
config.bind = f"{os.getenv('SERV_ADDRESS')}"
asyncio.run(serve(app, config=config))