-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathproxify.py
More file actions
146 lines (121 loc) · 6.75 KB
/
proxify.py
File metadata and controls
146 lines (121 loc) · 6.75 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
import requests
from concurrent.futures import ThreadPoolExecutor
# Simple Banner
print("Proxify - Made By Fsociety\n")
# Ask for the filename before starting
filename = input("Enter file name to save live proxies (proxies.txt): ")
# Safety fallback, incase filename is empty
if (filename == ""):
filename = "proxies.txt"
# Proxy sources - 35+ free public proxy lists
PROXY_SOURCES = [
# Original sources
"https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt",
"https://raw.githubusercontent.com/mmpx12/proxy-list/master/http.txt",
"https://api.proxyscrape.com/v2/?request=getproxies&protocol=http&timeout=1000&country=all",
"https://www.proxy-list.download/api/v1/get?type=http",
"https://www.proxy-list.download/api/v1/get?type=https",
"https://spys.me/proxy.txt",
"https://proxylist.icu/proxy.txt",
"https://raw.githubusercontent.com/ShiftyTR/Proxy-List/master/http.txt",
"https://raw.githubusercontent.com/ShiftyTR/Proxy-List/master/https.txt",
"https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt",
"https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/https.txt",
"https://raw.githubusercontent.com/UptimerBot/proxy-list/main/proxies/http.txt",
"https://raw.githubusercontent.com/UptimerBot/proxy-list/main/proxies/https.txt",
"https://raw.githubusercontent.com/Zaeem20/FREE_PROXIES_LIST/master/http.txt",
"https://openproxy.space/list/http",
"https://openproxy.space/list/https",
# +20 Additional Free Sources
"https://raw.githubusercontent.com/theriturajps/proxy-list/main/http_proxies.txt",
"https://raw.githubusercontent.com/theriturajps/proxy-list/main/https_proxies.txt",
"https://raw.githubusercontent.com/ShiftyTR/Proxy-List/master/socks4.txt",
"https://raw.githubusercontent.com/ShiftyTR/Proxy-List/master/socks5.txt",
"https://raw.githubusercontent.com/roosterkid/openproxylist/main/HTTPS_RAW.txt",
"https://raw.githubusercontent.com/roosterkid/openproxylist/main/HTTP_RAW.txt",
"https://raw.githubusercontent.com/proxy4parsing/proxy-list/main/http.txt",
"https://raw.githubusercontent.com/proxy4parsing/proxy-list/main/https.txt",
"https://raw.githubusercontent.com/casals-ar/proxy-list/main/http.txt",
"https://raw.githubusercontent.com/casals-ar/proxy-list/main/https.txt",
"https://raw.githubusercontent.com/yemixzy/proxy-list/main/http.txt",
"https://raw.githubusercontent.com/yemixzy/proxy-list/main/https.txt",
"https://raw.githubusercontent.com/mmpx12/proxy-list/master/socks4.txt",
"https://raw.githubusercontent.com/mmpx12/proxy-list/master/socks5.txt",
"https://api.proxyscrape.com/v4/free-proxy-list/get?request=displayproxies&protocol=http",
"https://api.proxyscrape.com/v4/free-proxy-list/get?request=displayproxies&protocol=https",
"https://api.proxyscrape.com/v4/free-proxy-list/get?request=displayproxies&protocol=socks4",
"https://api.proxyscrape.com/v4/free-proxy-list/get?request=displayproxies&protocol=socks5",
"https://raw.githubusercontent.com/clarketm/proxy-list/master/proxy-list-raw.txt",
"https://raw.githubusercontent.com/sunny9577/proxy-scraper/master/proxies.txt",
"https://raw.githubusercontent.com/fahimscirex/proxybd/master/proxylist.txt",
"https://raw.githubusercontent.com/mahdibland/ShadowsocksAggregator/master/Eternity.txt",
"https://raw.githubusercontent.com/BlackSnowDot/proxylist-update-every-minute/main/https.txt",
"https://raw.githubusercontent.com/BlackSnowDot/proxylist-update-every-minute/main/http.txt",
"https://raw.githubusercontent.com/B4RC0DE-TM/proxy-list/main/HTTP.txt",
"https://raw.githubusercontent.com/B4RC0DE-TM/proxy-list/main/HTTPS.txt",
"https://raw.githubusercontent.com/sashikes/proxy-list/main/http.txt",
"https://raw.githubusercontent.com/sashikes/proxy-list/main/https.txt",
"https://raw.githubusercontent.com/proxy-lists/proxy-lists/main/http.txt",
"https://raw.githubusercontent.com/proxy-lists/proxy-lists/main/https.txt",
"https://raw.githubusercontent.com/opsxcq/proxy-list/master/list.txt",
"https://raw.githubusercontent.com/hookzof/socks5_list/master/proxy.txt",
"https://raw.githubusercontent.com/andigw/proxy-list/main/http.txt",
"https://raw.githubusercontent.com/andigw/proxy-list/main/https.txt",
"https://raw.githubusercontent.com/ObcbO/getproxy/main/http.txt",
"https://raw.githubusercontent.com/ObcbO/getproxy/main/https.txt",
]
# Timeout for checking proxies
PROXY_TIMEOUT = 3 # seconds
THREADS = 50 # Number of threads for proxy checking
def fetch_proxies():
"""Fetch proxies from multiple sources and return a list."""
proxies = set()
for url in PROXY_SOURCES:
try:
response = requests.get(url, timeout=5)
if response.status_code == 200:
for line in response.text.splitlines():
proxy = line.strip()
# Clean proxy format: IP:PORT
if proxy and ":" in proxy and not proxy.startswith(('#', '//', '<')):
# Extract just IP:PORT if line contains extra data
parts = proxy.split()
candidate = parts[0] if parts else proxy
if candidate.count(':') >= 1 and candidate.replace(':', '').replace('.', '').isdigit() or ':' in candidate:
proxies.add(candidate)
except Exception:
continue
print(f"Total proxies collected: {len(proxies)}")
return list(proxies)
def check_proxy(proxy):
"""Check if a proxy is live."""
try:
proxies = {"http": f"http://{proxy}", "https": f"http://{proxy}"}
response = requests.get("http://www.google.com", proxies=proxies, timeout=PROXY_TIMEOUT)
if response.status_code == 200:
print(f"[LIVE] {proxy}")
return proxy
except Exception:
return None
def save_live_proxies(live_proxies):
"""Save only live proxies to the file (overwrite)."""
with open(filename, "w") as file:
file.write("\n".join(live_proxies) + "\n")
print(f"Saved {len(live_proxies)} live proxies to {filename}")
def main():
"""Main function to fetch, check, and save live proxies."""
print("\nFetching proxies...")
proxies = fetch_proxies()
if not proxies:
print("No proxies found. Exiting...")
return
print("Checking proxies for liveliness...")
with ThreadPoolExecutor(max_workers=THREADS) as executor:
results = list(executor.map(check_proxy, proxies))
live_proxies = [proxy for proxy in results if proxy]
print(f"Total live proxies found: {len(live_proxies)}")
if live_proxies:
save_live_proxies(live_proxies)
print("Task completed. Exiting.")
if __name__ == "__main__":
main()