libreredirect-instances/instances.py

702 行
22 KiB
Python
Raw 通常表示 履歴

2023-01-16 02:02:36 +09:00
#!/usr/bin/python3
import traceback
import logging
import requests
import json
from urllib.parse import urlparse
import re
from colorama import Fore, Style
import socket
import yaml
2023-01-16 02:02:36 +09:00
mightyList = {}
networks = {}
startRegex = r"https?:\/{2}(?:[^\s\/]+\.)*"
endRegex = "(?:\/[^\s\/]+)*\/?"
torRegex = startRegex + "onion" + endRegex
i2pRegex = startRegex + "i2p" + endRegex
lokiRegex = startRegex + "loki" + endRegex
authRegex = r"https?:\/{2}\S+:\S+@(?:[^\s\/]+\.)*[a-zA-Z0-9]+" + endRegex
with open('networks.json', 'rt') as tmp:
networks = json.load(tmp)
def filterLastSlash(urlList):
tmp = {}
for frontend in urlList:
tmp[frontend] = {}
for network in urlList[frontend]:
tmp[frontend][network] = []
for url in urlList[frontend][network]:
if url.endswith('/'):
tmp[frontend][network].append(url[:-1])
print(Fore.YELLOW + "Fixed " + Style.RESET_ALL + url)
else:
tmp[frontend][network].append(url)
return tmp
def idnaEncode(urlList):
tmp = {}
for frontend in urlList:
tmp[frontend] = {}
for network in urlList[frontend]:
tmp[frontend][network] = []
for url in urlList[frontend][network]:
try:
encodedUrl = url.encode("idna").decode("utf8")
tmp[frontend][network].append(encodedUrl)
if (encodedUrl != url):
print(Fore.YELLOW + "Fixed " + Style.RESET_ALL + url)
except Exception:
tmp[frontend][network].append(url)
return tmp
def ip2bin(ip): return "".join(
map(
str,
[
"{0:08b}".format(int(x)) for x in ip.split(".")
]
)
)
def get_cloudflare_ips():
r = requests.get('https://www.cloudflare.com/ips-v4')
return r.text.split('\n')
cloudflare_ips = get_cloudflare_ips()
def is_cloudflare(url):
instance_ip = None
try:
instance_ip = socket.gethostbyname(urlparse(url).hostname)
if instance_ip is None:
return False
except Exception:
return False
instance_bin = ip2bin(instance_ip)
for cloudflare_ip_mask in cloudflare_ips:
cloudflare_ip = cloudflare_ip_mask.split('/')[0]
cloudflare_bin = ip2bin(cloudflare_ip)
mask = int(cloudflare_ip_mask.split('/')[1])
cloudflare_bin_masked = cloudflare_bin[:mask]
instance_bin_masked = instance_bin[:mask]
if cloudflare_bin_masked == instance_bin_masked:
print(url + ' is behind ' + Fore.RED +
'cloudflare' + Style.RESET_ALL)
return True
return False
def fetchCache(frontend):
2023-01-16 02:02:36 +09:00
try:
with open('./data.json') as file:
mightyList[frontend] = json.load(file)[frontend]
print(Fore.YELLOW + 'Failed' + Style.RESET_ALL + ' to fetch ' + frontend)
2023-01-16 02:02:36 +09:00
except Exception:
print(Fore.RED + 'Failed' + Style.RESET_ALL +
' to get cached ' + frontend)
2023-01-16 02:02:36 +09:00
def fetchFromFile(frontend):
2023-01-16 02:02:36 +09:00
with open('./fixed/' + frontend + '.json') as file:
mightyList[frontend] = json.load(file)
print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + frontend)
2023-01-16 02:02:36 +09:00
def fetchJsonList(frontend, url, urlItem, jsonObject):
2023-01-16 02:02:36 +09:00
try:
r = requests.get(url)
2023-01-16 02:02:36 +09:00
rJson = json.loads(r.text)
if jsonObject:
rJson = rJson['instances']
_list = {}
for network in networks:
_list[network] = []
if type(urlItem) == dict:
for item in rJson:
for network in networks:
if urlItem[network] is not None:
if urlItem[network] in item and item[urlItem[network]] is not None:
2023-02-03 16:17:39 +09:00
if type(item[urlItem[network]]) == list:
for i in item[urlItem[network]]:
if i.strip() != '':
_list[network].append(i)
elif item[urlItem[network]].strip() != '':
2023-01-16 02:02:36 +09:00
_list[network].append(item[urlItem[network]])
else:
for item in rJson:
tmpItem = item
if urlItem is not None:
tmpItem = item[urlItem]
if tmpItem.strip() == '':
continue
elif re.search(torRegex, tmpItem):
_list['tor'].append(tmpItem)
elif re.search(i2pRegex, tmpItem):
_list['i2p'].append(tmpItem)
elif re.search(lokiRegex, tmpItem):
_list['loki'].append(tmpItem)
else:
_list['clearnet'].append(tmpItem)
mightyList[frontend] = _list
print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + frontend)
2023-01-16 02:02:36 +09:00
except Exception:
2023-02-08 03:21:29 +09:00
fetchCache(frontend)
2023-01-16 02:02:36 +09:00
logging.error(traceback.format_exc())
def fetchRegexList(frontend, url, regex):
2023-01-16 02:02:36 +09:00
try:
r = requests.get(url)
2023-01-16 02:02:36 +09:00
_list = {}
for network in networks:
_list[network] = []
tmp = re.findall(regex, r.text)
for item in tmp:
if item.strip() == "":
continue
elif re.search(torRegex, item):
_list['tor'].append(item)
elif re.search(i2pRegex, item):
_list['i2p'].append(item)
elif re.search(lokiRegex, item):
_list['loki'].append(item)
else:
_list['clearnet'].append(item)
mightyList[frontend] = _list
print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + frontend)
2023-01-16 02:02:36 +09:00
except Exception:
2023-02-08 03:21:29 +09:00
fetchCache(frontend)
2023-01-16 02:02:36 +09:00
logging.error(traceback.format_exc())
def fetchTextList(frontend, url, prepend):
2023-01-16 02:02:36 +09:00
try:
_list = {}
for network in networks:
_list[network] = []
if type(url) == dict:
for network in networks:
if url[network] is not None:
r = requests.get(url[network])
2023-01-16 02:02:36 +09:00
tmp = r.text.strip().split('\n')
for item in tmp:
item = prepend[network] + item
_list[network].append(item)
else:
r = requests.get(url)
2023-01-16 02:02:36 +09:00
tmp = r.text.strip().split('\n')
for item in tmp:
item = prepend + item
if re.search(torRegex, item):
_list['tor'].append(item)
elif re.search(i2pRegex, item):
_list['i2p'].append(item)
elif re.search(lokiRegex, item):
_list['loki'].append(item)
else:
_list['clearnet'].append(item)
mightyList[frontend] = _list
print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + frontend)
2023-01-16 02:02:36 +09:00
except Exception:
2023-02-08 03:21:29 +09:00
fetchCache(frontend)
2023-01-16 02:02:36 +09:00
logging.error(traceback.format_exc())
def invidious():
name = 'Invidious'
frontend = 'invidious'
url = 'https://api.invidious.io/instances.json'
try:
_list = {}
_list['clearnet'] = []
_list['tor'] = []
_list['i2p'] = []
_list['loki'] = []
r = requests.get(url)
2023-01-16 02:02:36 +09:00
rJson = json.loads(r.text)
for instance in rJson:
if instance[1]['type'] == 'https':
_list['clearnet'].append(instance[1]['uri'])
elif instance[1]['type'] == 'onion':
_list['tor'].append(instance[1]['uri'])
elif instance[1]['type'] == 'i2p':
_list['i2p'].append(instance[1]['uri'])
mightyList[frontend] = _list
print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + name)
except Exception:
2023-02-08 03:21:29 +09:00
fetchCache(frontend)
2023-01-16 02:02:36 +09:00
logging.error(traceback.format_exc())
def piped():
frontend = 'piped'
try:
_list = {}
_list['clearnet'] = []
_list['tor'] = []
_list['i2p'] = []
_list['loki'] = []
r = requests.get(
'https://raw.githubusercontent.com/wiki/TeamPiped/Piped/Instances.md')
2023-01-16 02:02:36 +09:00
tmp = re.findall(
r' \| (https:\/{2}(?:[^\s\/]+\.)+[a-zA-Z]+) \| ', r.text)
2023-01-16 02:02:36 +09:00
for item in tmp:
try:
url = requests.get(item, timeout=5).url
2023-01-16 02:02:36 +09:00
if url.strip("/") == item:
continue
else:
_list['clearnet'].append(url)
except Exception:
logging.error(traceback.format_exc())
continue
mightyList[frontend] = _list
_list['clearnet'].remove("https://piped.video")
2023-02-19 22:33:41 +09:00
print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + frontend)
2023-01-16 02:02:36 +09:00
except Exception:
2023-02-08 03:21:29 +09:00
fetchCache(frontend)
2023-01-16 02:02:36 +09:00
logging.error(traceback.format_exc())
def pipedMaterial():
fetchRegexList('pipedMaterial', 'https://raw.githubusercontent.com/mmjee/Piped-Material/master/README.md',
2023-01-16 02:02:36 +09:00
r"\| (https?:\/{2}(?:\S+\.)+[a-zA-Z0-9]*) +\| Production")
def cloudtube():
fetchFromFile('cloudtube')
2023-01-16 02:02:36 +09:00
def proxitok():
fetchRegexList('proxiTok', 'https://raw.githubusercontent.com/wiki/pablouser1/ProxiTok/Public-instances.md',
2023-01-16 02:02:36 +09:00
r"\| \[.*\]\(([-a-zA-Z0-9@:%_\+.~#?&//=]{2,}\.[a-z]{2,}\b(?:\/[-a-zA-Z0-9@:%_\+.~#?&//=]*)?)\)(?: \(Official\))? +\|(?:(?: [A-Z]*.*\|.*\|)|(?:$))")
def send():
fetchRegexList('send', 'https://gitlab.com/timvisee/send-instances/-/raw/master/README.md',
r"(https.*?) \|.*?\n")
2023-01-16 02:02:36 +09:00
def nitter():
frontend = 'nitter'
try:
r = requests.get(
'https://raw.githubusercontent.com/wiki/zedeus/nitter/Instances.md')
_list = {}
_list['clearnet'] = []
public = re.findall(r"## Public((?:\n|.*)+?)##", r.text)
for line in public[0].split('\n'):
result = re.findall(r"^\| \[.*?\]\((https.*?)\)", line)
if len(result) > 0:
_list['clearnet'].append(result[0])
_list['tor'] = []
public = re.findall(r"## Tor((?:\n|.*)+?)##", r.text)
for line in public[0].split('\n'):
result = re.findall(r"^\| <(http.*?)\/?>", line)
if len(result) > 0:
_list['tor'].append(result[0])
_list['i2p'] = []
public = re.findall(r"## I2P((?:\n|.*)+?)##", r.text)
for line in public[0].split('\n'):
result = re.findall(r"^- <(http.*?)\/?>", line)
if len(result) > 0:
_list['i2p'].append(result[0])
_list['loki'] = []
public = re.findall(r"## Lokinet((?:\n|.*)+?)##", r.text)
for line in public[0].split('\n'):
result = re.findall(r"^- <(http.*?)\/?>", line)
if len(result) > 0:
_list['loki'].append(result[0])
mightyList[frontend] = _list
print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + frontend)
except Exception:
fetchCache(frontend)
logging.error(traceback.format_exc())
# fetchRegexList('nitter', '',
# r"(?:(?:\| )|(?:- ))\[(?:(?:\S+\.)+[a-zA-Z0-9]+)\/?\]\((https?:\/{2}(?:\S+\.)+[a-zA-Z0-9]+)\/?\)(?:(?: (?:\((?:\S+ ?\S*)\) )? *\| [^❌]{1,4} +\|(?:(?:\n)|(?: ❌)|(?: ✅)|(?: ❓)|(?: \[)))|(?:\n))")
2023-01-16 02:02:36 +09:00
2023-02-03 16:17:39 +09:00
2023-01-16 02:02:36 +09:00
def libreddit():
fetchJsonList('libreddit', 'https://github.com/libreddit/libreddit-instances/raw/master/instances.json',
2023-01-16 02:02:36 +09:00
{'clearnet': 'url', 'tor': 'onion', 'i2p': 'i2p', 'loki': None}, True)
def teddit():
fetchJsonList('teddit', 'https://codeberg.org/teddit/teddit/raw/branch/main/instances.json',
2023-01-16 02:02:36 +09:00
{'clearnet': 'url', 'tor': 'onion', 'i2p': 'i2p', 'loki': None}, False)
2023-05-30 19:02:42 +09:00
def spliti():
fetchFromFile('spliti')
def vixip():
fetchFromFile('vixip')
2023-01-16 02:02:36 +09:00
def scribe():
fetchJsonList(
'scribe', 'https://git.sr.ht/~edwardloveall/scribe/blob/main/docs/instances.json', None, False)
2023-01-16 02:02:36 +09:00
def quetre():
fetchRegexList('quetre', 'https://raw.githubusercontent.com/zyachel/quetre/main/README.md',
2023-01-16 02:02:36 +09:00
r"\| \[.*\]\(([-a-zA-Z0-9@:%_\+.~#?&//=]{2,}\.[a-z0-9]{2,}\b(?:\/[-a-zA-Z0-9@:%_\+.~#?&//=]*)?)\)*\|*[A-Z]{0,}.*\|.*\|")
def libremdb():
fetchRegexList('libremdb', 'https://raw.githubusercontent.com/zyachel/libremdb/main/README.md',
2023-01-16 02:02:36 +09:00
r"\| \[.*\]\(([-a-zA-Z0-9@:%_\+.~#?&//=]{2,}\.[a-z0-9]{2,}\b(?:\/[-a-zA-Z0-9@:%_\+.~#?&//=]*)?)\)*\|*[A-Z]{0,}.*\|.*\|")
def simplytranslate():
fetchTextList('simplyTranslate', {'clearnet': 'https://simple-web.org/instances/simplytranslate', 'tor': 'https://simple-web.org/instances/simplytranslate_onion',
2023-01-16 02:02:36 +09:00
'i2p': 'https://simple-web.org/instances/simplytranslate_i2p', 'loki': 'https://simple-web.org/instances/simplytranslate_loki'}, {'clearnet': 'https://', 'tor': 'http://', 'i2p': 'http://', 'loki': 'http://'})
def linvgatranslate():
fetchJsonList(
'lingva', 'https://raw.githubusercontent.com/TheDavidDelta/lingva-translate/main/instances.json', None, False)
2023-01-16 02:02:36 +09:00
def searxng():
2023-01-16 02:02:36 +09:00
r = requests.get(
'https://searx.space/data/instances.json')
2023-01-16 02:02:36 +09:00
rJson = json.loads(r.text)
searxngList = {}
searxngList['clearnet'] = []
searxngList['tor'] = []
searxngList['i2p'] = []
searxngList['loki'] = []
2023-01-16 02:02:36 +09:00
for item in rJson['instances']:
2023-05-16 20:53:13 +09:00
if re.search(torRegex, item[:-1]) and rJson['instances'][item].get('generator') == 'searxng':
searxngList['tor'].append(item[:-1])
2023-05-16 20:53:13 +09:00
elif re.search(i2pRegex, item[:-1]) and rJson['instances'][item].get('generator') == 'searxng':
searxngList['i2p'].append(item[:-1])
elif rJson['instances'][item].get('generator') == 'searxng':
searxngList['clearnet'].append(item[:-1])
2023-01-16 02:02:36 +09:00
mightyList['searxng'] = searxngList
2023-05-16 20:53:13 +09:00
print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'SearXNG')
2023-01-16 02:02:36 +09:00
def searx():
searxList = {}
searxList['clearnet'] = []
searxList['tor'] = []
searxList['i2p'] = []
searxList['loki'] = []
r = requests.get(
'https://raw.githubusercontent.com/searx/searx-instances/master/searxinstances/instances.yml')
data = yaml.safe_load(r.text)
for key in data:
searxList['clearnet'].append(key)
if 'additional_urls' in data[key]:
for additional_url in data[key]['additional_urls']:
if data[key]['additional_urls'][additional_url] == "Hidden Service":
searxList['tor'].append(additional_url)
mightyList['searx'] = searxList
2023-05-16 20:53:13 +09:00
print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'SearXNG')
2023-01-16 02:02:36 +09:00
def whoogle():
fetchRegexList('whoogle', 'https://raw.githubusercontent.com/benbusby/whoogle-search/main/README.md',
2023-01-16 02:02:36 +09:00
r"\| \[https?:\/{2}(?:[^\s\/]+\.)*(?:[^\s\/]+\.)+[a-zA-Z0-9]+\]\((https?:\/{2}(?:[^\s\/]+\.)*(?:[^\s\/]+\.)+[a-zA-Z0-9]+)\/?\) \| ")
def librex():
fetchJsonList('librex', 'https://raw.githubusercontent.com/hnhx/librex/main/instances.json',
2023-01-16 02:02:36 +09:00
{'clearnet': 'clearnet', 'tor': 'tor', 'i2p': 'i2p', 'loki': None}, True)
def rimgo():
fetchJsonList('rimgo', 'https://codeberg.org/video-prize-ranch/rimgo/raw/branch/main/instances.json',
2023-01-16 02:02:36 +09:00
{'clearnet': 'url', 'tor': 'onion', 'i2p': 'i2p', 'loki': None}, False)
2023-05-30 22:47:12 +09:00
def librarian():
fetchJsonList('librarian', 'https://gitler.moe/suwako/librarian/raw/branch/master/instances.json', 'url', True)
2023-05-31 07:17:40 +09:00
def simpleamazon():
fetchFromFile('simpleamazon')
2023-01-16 02:02:36 +09:00
def beatbump():
fetchFromFile('beatbump')
2023-01-16 02:02:36 +09:00
def hyperpipe():
fetchJsonList(
'hyperpipe', 'https://codeberg.org/Hyperpipe/pages/raw/branch/main/api/frontend.json', 'url', False)
2023-01-16 02:02:36 +09:00
def facil():
fetchFromFile('facil')
2023-01-16 02:02:36 +09:00
def osm():
fetchFromFile('osm')
2023-01-16 02:02:36 +09:00
def libreTranslate():
fetchRegexList('libreTranslate', 'https://raw.githubusercontent.com/LibreTranslate/LibreTranslate/main/README.md',
2023-01-16 02:02:36 +09:00
r"\[(?:[^\s\/]+\.)+[a-zA-Z0-9]+\]\((https?:\/{2}(?:[^\s\/]+\.)+[a-zA-Z0-9]+)\/?\)\|")
def breezeWiki():
fetchJsonList(
'breezeWiki', 'https://docs.breezewiki.com/files/instances.json', 'instance', False)
2023-01-16 02:02:36 +09:00
def privateBin():
fetchJsonList('privateBin', 'https://privatebin.info/directory/api?top=100&https_redirect=true&min_rating=A&csp_header=true&min_uptime=100&attachments=true', 'url', False)
2023-01-16 02:02:36 +09:00
2023-02-03 16:17:39 +09:00
2023-02-01 18:18:06 +09:00
def neuters():
fetchFromFile('neuters')
def libMedium():
fetchRegexList('libMedium', 'https://raw.githubusercontent.com/realaravinth/libmedium/master/README.md',
r"\| (https?:\/{2}(?:[^\s\/]+\.)+[a-zA-Z0-9]+)\/? +\|")
def dumb():
fetchRegexList('dumb', 'https://raw.githubusercontent.com/rramiachraf/dumb/main/README.md',
r"\| <(https?:\/{2}(?:[^\s\/]+\.)+[a-zA-Z0-9]+)\/?> +\|")
2023-01-16 02:02:36 +09:00
2023-02-03 16:17:39 +09:00
def ruralDictionary():
fetchJsonList('ruralDictionary',
2023-02-19 22:33:41 +09:00
'https://codeberg.org/zortazert/rural-dictionary/raw/branch/master/instances.json',
{'clearnet': 'clearnet', 'tor': 'tor',
'i2p': 'i2p', 'loki': None},
False
)
def anonymousOverflow():
fetchRegexList('anonymousOverflow', 'https://raw.githubusercontent.com/httpjamesm/AnonymousOverflow/main/README.md',
r"\| \[(?:[^\s\/]+\.)+[a-zA-Z0-9]+\]\((https?:\/{2}(?:[^\s\/]+\.)+[a-zA-Z0-9]+)\/?\) +\|")
2023-02-03 16:17:39 +09:00
2023-02-06 03:27:11 +09:00
def wikiless():
fetchFromFile('wikiless')
2023-02-06 03:37:13 +09:00
def biblioReads():
fetchJsonList(
'biblioReads',
'https://raw.githubusercontent.com/nesaku/BiblioReads/main/instances.json',
{'clearnet': 'url', 'tor': 'onion', 'i2p': 'i2p', 'loki': None},
False
)
2023-02-08 03:21:29 +09:00
def suds():
fetchJsonList(
'suds', 'https://git.vern.cc/cobra/Suds/raw/branch/main/instances.json',
{
'clearnet': 'clearnet',
'tor': 'tor',
'i2p': 'i2p',
'loki': None
},
False,
)
def poketube():
frontend = 'poketube'
try:
r = requests.get(
'https://codeberg.org/Ashley/poketube/raw/branch/main/instances.json')
rJson = json.loads(r.text)
_list = {
'clearnet': [],
'tor': [],
'i2p': [],
'loki': []
}
for element in rJson:
_list['clearnet'].append(element[1]['uri'])
mightyList[frontend] = _list
print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + frontend)
except Exception:
fetchCache(frontend)
logging.error(traceback.format_exc())
2023-02-14 20:58:54 +09:00
def gothub():
2023-05-17 01:23:40 +09:00
fetchJsonList(
2023-02-14 20:58:54 +09:00
'gothub',
2023-05-17 01:23:40 +09:00
'https://codeberg.org/gothub/gothub-instances/raw/branch/master/instances.json',
{
'clearnet': 'link',
'tor': None,
'i2p': None,
'loki': None
},
False
2023-02-14 20:58:54 +09:00
)
2023-02-14 20:58:54 +09:00
def mikuInvidious():
fetchFromFile('mikuInvidious')
2023-02-19 22:33:41 +09:00
def tent():
fetchFromFile('tent')
wolfreeAlpha_url_list = [
"https://gqq.gitlab.io",
2023-02-19 22:33:41 +09:00
"https://jqq.gitlab.io",
"https://rqq.gitlab.io",
"https://sqq.gitlab.io",
"https://uqq.gitlab.io"
]
wolfreeAlpha_url_list_i = 0
2023-02-19 22:33:41 +09:00
def wolfreeAlpha(i):
global wolfreeAlpha_url_list_i
frontend = 'wolfreeAlpha'
try:
r = requests.get(wolfreeAlpha_url_list[i]+"/instances.json")
if r.status_code != 200:
wolfreeAlpha_url_list_i += 1
wolfreeAlpha(wolfreeAlpha_url_list_i)
else:
rJson = json.loads(r.text)
networks = rJson['wolfree']
_list = {}
for i in networks.keys():
_list[i] = networks[i]
mightyList[frontend] = _list
print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + frontend)
except:
wolfreeAlpha_url_list_i += 1
wolfreeAlpha(wolfreeAlpha_url_list_i)
2023-05-17 12:18:33 +09:00
def jiti():
fetchRegexList('jitsi', "https://raw.githubusercontent.com/jitsi/handbook/master/docs/community/instances.md",
r"\|(?:(?: | )+)+((?:[a-z]+\.)+[a-z]+)(?:(?: | )+)+\|")
mightyList['jitsi']['clearnet'] = list(
map(lambda x: "https://"+x, mightyList['jitsi']['clearnet']))
2023-05-17 12:18:33 +09:00
mightyList['jitsi']['clearnet'].insert(0, 'https://8x8.vc')
mightyList['jitsi']['clearnet'].insert(0, 'https://meet.jit.si')
2023-01-16 02:02:36 +09:00
def isValid(url): # This code is contributed by avanitrachhadiya2155
try:
result = urlparse(url)
return all([result.scheme, result.netloc])
except Exception:
return False
2023-05-26 03:26:25 +09:00
invidious()
piped()
pipedMaterial()
cloudtube()
proxitok()
2023-05-17 12:32:02 +09:00
send()
2023-05-26 03:26:25 +09:00
nitter()
libreddit()
teddit()
scribe()
quetre()
libremdb()
simplytranslate()
linvgatranslate()
libreTranslate()
searxng()
searx()
whoogle()
librex()
rimgo()
2023-05-30 22:47:12 +09:00
librarian()
2023-05-26 03:26:25 +09:00
beatbump()
hyperpipe()
facil()
osm()
breezeWiki()
privateBin()
neuters()
ruralDictionary()
libMedium()
dumb()
anonymousOverflow()
wikiless()
biblioReads()
suds()
poketube()
gothub()
mikuInvidious()
wolfreeAlpha(wolfreeAlpha_url_list_i)
jiti()
2023-05-30 19:02:42 +09:00
spliti()
vixip()
2023-02-19 22:33:41 +09:00
2023-01-16 02:02:36 +09:00
mightyList = filterLastSlash(mightyList)
mightyList = idnaEncode(mightyList)
cloudflare = []
for k1, v1 in mightyList.items():
if type(mightyList[k1]) is dict:
for k2, v2 in mightyList[k1].items():
for instance in mightyList[k1][k2]:
if (not isValid(instance)):
mightyList[k1][k2].remove(instance)
print("removed " + instance)
else:
if not instance.endswith('.onion') and not instance.endswith('.i2p') and not instance.endswith('.loki') and is_cloudflare(instance):
cloudflare.append(instance)
blacklist = {
'cloudflare': cloudflare
2023-01-16 02:02:36 +09:00
}
# Writing to file
json_object = json.dumps(mightyList, ensure_ascii=False, indent=2)
with open('./data.json', 'w') as outfile:
outfile.write(json_object)
print(Fore.BLUE + 'wrote ' + Style.RESET_ALL + 'instances/data.json')
json_object = json.dumps(blacklist, ensure_ascii=False, indent=2)
with open('./blacklist.json', 'w') as outfile:
outfile.write(json_object)
print(Fore.BLUE + 'wrote ' + Style.RESET_ALL + 'instances/blacklist.json')
# print(json_object)