forked from nat/sludge
create puppygirl.systems fork
This commit is contained in:
parent
cf2fa1f0c0
commit
bce0d97e23
2
.gitignore
vendored
2
.gitignore
vendored
@ -1,5 +1,7 @@
|
||||
config.yaml
|
||||
log
|
||||
cert.pem
|
||||
key.pem
|
||||
|
||||
# ---> Python
|
||||
# Byte-compiled / optimized / DLL files
|
||||
|
@ -1,4 +1,4 @@
|
||||
# sludge: webthing for natalieee.net
|
||||
# sludge: webthing for ~~natalieee.net~~ puppygirl.systems
|
||||
it rhymes with kludge.
|
||||
|
||||
## config
|
||||
|
@ -2,119 +2,8 @@ from .response import Response
|
||||
|
||||
from typing import Callable, List
|
||||
|
||||
import re
|
||||
import random
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
type Patcher = Callable[[Response, 'Request'], Response]
|
||||
|
||||
def find_substring_in_lines(s, substring):
|
||||
for line_index, line in enumerate(s.splitlines()):
|
||||
position = line.find(substring)
|
||||
if position != -1:
|
||||
return line_index
|
||||
|
||||
return 0
|
||||
|
||||
def extract_words_from_line(line):
|
||||
clean_line = re.sub(r'<[^>]+>', '', line)
|
||||
words = clean_line.split()
|
||||
return words
|
||||
|
||||
def uwuify_text(text):
|
||||
replacements = [
|
||||
(r'r', 'w'),
|
||||
(r'l', 'w'),
|
||||
(r'R', 'W'),
|
||||
(r'L', 'W'),
|
||||
(r'no', 'nyo'),
|
||||
(r'No', 'Nyo'),
|
||||
(r'u', 'uwu'),
|
||||
(r'U', 'Uwu')
|
||||
]
|
||||
|
||||
for pattern, replacement in replacements:
|
||||
text = re.sub(pattern, replacement, text)
|
||||
|
||||
expressions = [" owo", " UwU", " rawr", " >w<"]
|
||||
sentences = text.split('. ')
|
||||
uwuified_sentences = []
|
||||
|
||||
for sentence in sentences:
|
||||
sentence = sentence.strip()
|
||||
if sentence:
|
||||
uwuified_sentences.append(sentence + (random.choice(expressions) if random.randint(0, 5) > 4 else ''))
|
||||
|
||||
return '. '.join(uwuified_sentences)
|
||||
|
||||
def apply_url_params(body, params: str):
|
||||
body = body.decode('utf-8')
|
||||
soup = BeautifulSoup(body, 'html.parser')
|
||||
|
||||
for a_tag in soup.find_all('a', href=True):
|
||||
original_href = a_tag['href']
|
||||
if '?' in original_href:
|
||||
new_href = f"{original_href}&{params}"
|
||||
else:
|
||||
new_href = f"{original_href}?{params}"
|
||||
a_tag['href'] = new_href
|
||||
|
||||
return str(soup)
|
||||
|
||||
def uwuify(body):
|
||||
body = body.decode('utf-8')
|
||||
soup = BeautifulSoup(body, 'html.parser')
|
||||
|
||||
for text in soup.find_all(text=True):
|
||||
if text.parent.name not in ['script', 'style']:
|
||||
original_text = text.string
|
||||
words = extract_words_from_line(original_text)
|
||||
uwuified_words = [uwuify_text(word) for word in words]
|
||||
uwuified_text = ' '.join(uwuified_words)
|
||||
text.replace_with(uwuified_text)
|
||||
|
||||
for a_tag in soup.find_all('a', href=True):
|
||||
original_href = a_tag['href']
|
||||
if '?' in original_href:
|
||||
new_href = f"{original_href}&uwu=true"
|
||||
else:
|
||||
new_href = f"{original_href}?uwu=true"
|
||||
a_tag['href'] = new_href
|
||||
|
||||
|
||||
return str(soup)
|
||||
|
||||
def is_subdict(sub_dict, main_dict):
|
||||
for key, value in sub_dict.items():
|
||||
if key not in main_dict or main_dict[key] != value:
|
||||
return False
|
||||
return True
|
||||
type Patcher = Callable
|
||||
|
||||
patchers: List[Patcher] = [
|
||||
# lambda response, request: Response(
|
||||
# response.code,
|
||||
# response.headers,
|
||||
# "\n".join(line.replace('e', 'a') if index > find_substring_in_lines(response.body.decode('utf-8'), '</head>') else line for index, line in enumerate(response.body.decode('utf-8').splitlines())).encode('utf-8')
|
||||
# ) if 'text/html' in response.headers.values() else response
|
||||
lambda response, request: Response(
|
||||
response.code,
|
||||
response.headers,
|
||||
uwuify(response.body).encode('utf-8')
|
||||
) if 'text/html' in response.headers.values() and is_subdict({'uwu': 'true'}, request.path.params) else response,
|
||||
lambda response, request: Response(
|
||||
response.code,
|
||||
response.headers,
|
||||
re.sub(r'sludge', lambda match: 'sludge' + ' (/slʌd͡ʒ/)' if random.randint(0, 5) < 1 else 'sludge', response.body.decode()).encode('utf-8')
|
||||
) if 'text/html' in response.headers.values() else response,
|
||||
lambda response, request: Response(
|
||||
response.code,
|
||||
response.headers,
|
||||
apply_url_params(response.body.replace(b'<head>', b'<head><style>:root,body,body>main>section{animation:swing 180s infinite ease-in-out;transform-origin:center}@keyframes swing{0%{transform:rotate(0deg)}50%{transform:rotate(-1deg)}100%{transform:rotate(1deg)}}</style>'), 'swing=true').encode('utf-8')
|
||||
) if 'text/html' in response.headers.values() and (random.randint(0, 100) < 1 or is_subdict({'swing': 'true'}, request.path.params)) else response,
|
||||
# spiin!
|
||||
lambda response, request: Response(
|
||||
response.code,
|
||||
response.headers,
|
||||
apply_url_params(response.body.replace(b'<head>', b'<head><style>:root,body,body>main>section,body>main>section>flex-grid>flex-grid-item{animation:spiin 480s infinite ease-in-out;transform-origin:center}@keyframes spiin{0%{transform:rotate(0deg)}50%{transform:rotate(180)}100%{transform:rotate(360deg)}}</style>'), 'spiin=true').encode('utf-8')
|
||||
) if 'text/html' in response.headers.values() and (random.randint(0, 1000) < 1 or is_subdict({'spiin': 'true'}, request.path.params)) else response
|
||||
lambda response, request: response
|
||||
]
|
||||
|
@ -38,142 +38,35 @@ class Route:
|
||||
return self.matcher(request.path)
|
||||
|
||||
routes = [
|
||||
Route(
|
||||
lambda request: request.path == '/style.css',
|
||||
[Method.GET],
|
||||
lambda request, *_: Response(
|
||||
ResponseCode.OK,
|
||||
*raw_file_contents('./style.css')
|
||||
)
|
||||
),
|
||||
Route(
|
||||
lambda request: request.path == '/',
|
||||
[Method.GET, Method.POST],
|
||||
lambda request, *_: Response(
|
||||
ResponseCode.OK,
|
||||
{'Content-Type': 'text/html'},
|
||||
((parse_file('./home.html', dict(prev='\\/')).encode('utf-8') if not 'Nim httpclient' in request.headers.get('user-agent') else error_page(200).body) if request.method == Method.GET else (
|
||||
[
|
||||
(lambda form_data: (
|
||||
(lambda time: (
|
||||
f:=open(f'./files/posts-to-homepage/post_{time}.txt', 'w'),
|
||||
f.write(f"<i style='font-family: MapleMonoItalic'>{form_data['name']}</i>@{time}<br>{form_data['text']}<br><br>"),
|
||||
f.close()
|
||||
))(datetime.now().strftime('%Y-%m-%d_%H:%M:%S-%f')[:-3]) if set(form_data.keys()) == set(['text', 'name']) else None
|
||||
))(
|
||||
reduce(
|
||||
lambda acc, d: acc.update(d) or acc,
|
||||
map(lambda key_value_pair: {key_value_pair[0]: remove_html_tags(key_value_pair[1])}, request.body.data.items()),
|
||||
{}
|
||||
)),
|
||||
parse_file('./home.html').encode('utf-8')
|
||||
][1]
|
||||
))
|
||||
) if len(request.body.data) > 0 or request.method != Method.POST else error_page(400)
|
||||
),
|
||||
Route(
|
||||
lambda path: os.path.isdir('.' + path.path),
|
||||
[Method.GET],
|
||||
lambda request, *_: Response(
|
||||
ResponseCode.OK,
|
||||
{'Content-Type': 'text/html'},
|
||||
parse_file('./dir_index.html', dict(path='.' + request.path.path, prev=request.headers.get('Referer').replace('/', '\\/') if request.headers.has('Referer') else '')).encode('utf-8')
|
||||
parse_file('./index.html').encode('utf-8')
|
||||
)
|
||||
),
|
||||
Route(
|
||||
lambda path: os.path.isfile('.' + path.path) and path.path.startswith('/html/') and (path.path.endswith('.html') or '/thoughts/' in path.path),
|
||||
lambda request: os.path.isfile('.' + request.path) and request.path.endswith('.html'),
|
||||
[Method.GET],
|
||||
lambda request, *_: [print(request.path), Response(
|
||||
lambda request, *_: Response(
|
||||
ResponseCode.OK,
|
||||
{'Content-Type': 'text/html'},
|
||||
parse_file('.' + request.path.path, dict((k, v.replace('\'', '')) for k, v in map(lambda item: (item[0], item[1]), request.path.params.items()))).encode('utf-8')
|
||||
)][-1]
|
||||
parse_file('.' + request.path.path).encode('utf-8')
|
||||
)
|
||||
),
|
||||
Route(
|
||||
lambda path: os.path.isfile('.' + path.path) and (path.path.startswith('/font/') or path.path.startswith('/files/')),
|
||||
lambda request: os.path.isfile('.' + request.path),
|
||||
[Method.GET],
|
||||
lambda request, *_: Response(
|
||||
ResponseCode.OK,
|
||||
*raw_file_contents('.' + request.path.path)
|
||||
)
|
||||
),
|
||||
Route(
|
||||
lambda request: request.path == '/status',
|
||||
[Method.GET],
|
||||
lambda *_: Response(
|
||||
ResponseCode.OK,
|
||||
{'Content-Type': 'text/html'},
|
||||
parse('<style>$[cat style.css]</style>$[neofetch | ansi2html]').encode('utf-8')
|
||||
)
|
||||
),
|
||||
Route(
|
||||
lambda request: request.path == '/stats/is-its-computer-online',
|
||||
[Method.GET],
|
||||
lambda *_: Response(
|
||||
ResponseCode.OK,
|
||||
{'Content-Type': 'text/html'},
|
||||
page("online-p", """
|
||||
seconds since last heartbeat message (less than 60: online; less than 120: maybe; more than 120: probably not): $[echo $(( $(date +%s) - $(stat -c %Y ./files/stats/heartbeat) ))]
|
||||
""")
|
||||
)
|
||||
),
|
||||
Route(
|
||||
lambda request: request.path == '/stats/what-song-is-it-listening-to',
|
||||
[Method.GET],
|
||||
lambda *_: Response(
|
||||
ResponseCode.OK,
|
||||
{'Content-type': 'text/html'},
|
||||
page("song?", """
|
||||
it is listening to $[cat ./files/stats/song] as of $[echo $(( $(date +%s) - $(stat -c %Y ./files/stats/song) ))] seconds ago.
|
||||
""")
|
||||
)
|
||||
),
|
||||
Route(
|
||||
lambda request: request.path == '/stats/is-this-server-online',
|
||||
[Method.GET],
|
||||
lambda *_: Response(
|
||||
ResponseCode.OK,
|
||||
{'Content-type': 'text/html'},
|
||||
page("server online-p", """
|
||||
I think so.
|
||||
""")
|
||||
)
|
||||
),
|
||||
Route(
|
||||
lambda request: request.path == '/stats/what-is-its-servers-uptime',
|
||||
[Method.GET],
|
||||
lambda *_: Response(
|
||||
ResponseCode.OK,
|
||||
{'Content-type': 'text/html'},
|
||||
page("uptime", """
|
||||
$[uptime]
|
||||
""")
|
||||
)
|
||||
),
|
||||
Route(
|
||||
lambda request: request.path == '/stats/what-vim-buffers-does-it-have-open',
|
||||
[Method.GET],
|
||||
lambda *_: Response(
|
||||
ResponseCode.OK,
|
||||
{'Content-type': 'text/html'},
|
||||
page("vim bufs", """
|
||||
$[cat ./files/stats/vim-bufs | xargs -I% echo %'<br>']
|
||||
""")
|
||||
)
|
||||
),
|
||||
Route(
|
||||
lambda request: request.path == '/stats',
|
||||
[Method.GET],
|
||||
lambda request, *_: Response(
|
||||
ResponseCode.OK,
|
||||
{'Content-Type': 'text/html'},
|
||||
parse_file('./html/stats.html', dict(prev=request.headers.get('Referer').replace('/', '\\/') if request.headers.has('Referer') else '')).encode('utf-8')
|
||||
)
|
||||
),
|
||||
Route(
|
||||
lambda _: True,
|
||||
[Method.GET],
|
||||
lambda *_: error_page(404)
|
||||
)
|
||||
]
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user