add scripts
Signed-off-by: Martin Matous <m@matous.dev>
This commit is contained in:
parent
c6c66648ea
commit
5a4419bb4e
12 changed files with 802 additions and 1 deletions
131
README.md
131
README.md
|
|
@ -1,2 +1,131 @@
|
|||
# scripts
|
||||
# Various useful-ish scripts
|
||||
Scripts I wrote at some point or another. Inconsistent quality, no guarantees.
|
||||
|
||||
It's likely some never worked or that I got bored writing halfway.
|
||||
|
||||
## availability-monitor.py
|
||||
Low-budget homemade handmade monitoring for homeserver.
|
||||
|
||||
Status: active use
|
||||
|
||||
Dependencies: python, dnspython, Matrix account
|
||||
|
||||
Usage: Run as a daemon, `/usr/local/bin/availability-monitor.py <interval-sec>`
|
||||
|
||||
## dnf-search-install.py
|
||||
Wrapper, marks already installed packages for `dnf search`. Slow.
|
||||
|
||||
Status: active use
|
||||
|
||||
Dependencies: dnf, python
|
||||
|
||||
Setup:
|
||||
```bash
|
||||
sudo cp ./dnf-search-installed.py /usr/local/bin/.
|
||||
alias -s dnf '/usr/local/bin/dnf-search-installed.py'
|
||||
```
|
||||
Usage: `dnf search <package>`
|
||||
|
||||
|
||||
## gasquery.py
|
||||
Query Alchemy API for current ETH L1 basefee.
|
||||
Intended for consumption by i3status-rs' custom block.
|
||||
|
||||
Status: active use
|
||||
|
||||
Dependencies: Alchemy API key
|
||||
|
||||
Usage: `gasquery.py <alchemy-api-key> <notification-threshold>`
|
||||
|
||||
|
||||
## gentoo-chroot.sh
|
||||
Automate chrooting from live USB to fix installed system.
|
||||
|
||||
Status: active use :(
|
||||
|
||||
Dependencies: Nothing unusual
|
||||
|
||||
Usage: `chroot.sh`
|
||||
|
||||
|
||||
## gtokei.sh
|
||||
Wrapper, count lines of code for git repos with [tokei](https://github.com/XAMPPRocky/tokei).
|
||||
|
||||
Status: active use
|
||||
|
||||
Dependencies: tokei, git
|
||||
|
||||
Usage: `gtokei.sh https://github.com/some/repo`
|
||||
|
||||
|
||||
## kernel-update.py
|
||||
Automate chores when configuring, compiling and updating kernel.
|
||||
|
||||
Install step can be simplified by using properly setup installkernel/kernel-install
|
||||
invocation by `make install`.
|
||||
|
||||
Status: active use
|
||||
|
||||
Dependencies: python-magic, Gentoo (not really but genkernel, eselect, specific names and paths...)
|
||||
|
||||
Usage: `kernel-update.py <old-version> <new-version>`
|
||||
|
||||
|
||||
---
|
||||
|
||||
## flac-convert.py
|
||||
Convert all .m4a into max-compressed .flac
|
||||
|
||||
Status: ancient one-off, likely low quality
|
||||
|
||||
Dependencies: ffmpeg
|
||||
|
||||
Usage: `flac-convert.py /path/to/music`
|
||||
|
||||
|
||||
## from-ca-to-server.sh
|
||||
Create own CA and use it to sign a certificate
|
||||
|
||||
Status: ancient one-off, unknown purpose
|
||||
|
||||
Dependencies: openssl
|
||||
|
||||
Usage: ???
|
||||
|
||||
|
||||
## jxl-restore.py
|
||||
Check whether JPEG version of .jxl exists, remove .jxl if does.
|
||||
Attempt at rescuing collection where conversion got messed up.
|
||||
|
||||
Will fix one day. Maybe.
|
||||
|
||||
Status: ancient one-off, broken
|
||||
|
||||
Dependencies: None
|
||||
|
||||
Usage: None
|
||||
|
||||
|
||||
## invoke-magic.bat
|
||||
Add watermark to photo and create thumbnail.
|
||||
Contains detailed parameter explanation. Created for friend's blog.
|
||||
|
||||
Status: ancient but probably still working
|
||||
|
||||
Dependencies: imagemagick
|
||||
|
||||
Usage: Run next to `logo.png` and `workdir` directory with photos
|
||||
|
||||
|
||||
## sync-apparmor.py
|
||||
Scan source directory of profiles and binaries in the system.
|
||||
Copy profiles that would have a use
|
||||
Useful for initial profile dir setup and mopping up old profiles.
|
||||
|
||||
Doesn't work for subprofiles
|
||||
|
||||
Status: one-off
|
||||
|
||||
Dependencies: apparmor
|
||||
|
||||
Usage: `sync-apparmor.py <src> <dst>`
|
||||
143
availability-monitor.py
Executable file
143
availability-monitor.py
Executable file
|
|
@ -0,0 +1,143 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# nonstdlib requirements: dnspython
|
||||
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.primitives import serialization, hashes
|
||||
from typing import Dict, List
|
||||
|
||||
import dns.resolver
|
||||
import imaplib
|
||||
import os
|
||||
import requests
|
||||
import smtplib
|
||||
import ssl
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
MATRIX_ACCESS_TOKEN = os.environ['MATRIX_ACCESS_TOKEN']
|
||||
MATRIX_NOTIFICATION_ROOM = os.environ['MATRIX_NOTIFICATION_ROOM']
|
||||
CHECK_INTERVAL = int(sys.argv[1])
|
||||
|
||||
MONITORED_URLS = [
|
||||
|
||||
]
|
||||
|
||||
MONITORED_MAIL = [
|
||||
|
||||
]
|
||||
|
||||
HTTPS_PORT = 443
|
||||
SMTP_PORT = 25
|
||||
SMTPS_PORT = 465
|
||||
|
||||
def check_tlsa(url: str, port: int, dercert: bytes) -> Dict[str, bytes]:
|
||||
cert = x509.load_der_x509_certificate(dercert)
|
||||
pubkey = cert.public_key()
|
||||
pubkey = pubkey.public_bytes(
|
||||
serialization.Encoding.DER,
|
||||
serialization.PublicFormat.SubjectPublicKeyInfo
|
||||
)
|
||||
digest = hashes.Hash(hashes.SHA256())
|
||||
digest.update(pubkey)
|
||||
keyhash = digest.finalize()
|
||||
|
||||
port = str(port)
|
||||
tlsa = dns.resolver.resolve(f'_{port}._tcp.{url}', 'TLSA')
|
||||
|
||||
return {'cert': tlsa[0].cert, 'dns': keyhash}
|
||||
|
||||
# check that websites are alive
|
||||
def web_check() -> List[str]:
|
||||
errors = []
|
||||
for url in MONITORED_URLS:
|
||||
try:
|
||||
res = requests.head(url)
|
||||
if not res.ok:
|
||||
errors.append(f'{url} HEAD returned {res.status_code}: {res.reason}')
|
||||
except Exception as e:
|
||||
errors.append(f'{url} check failed: {e}')
|
||||
return errors
|
||||
|
||||
def mail_check() -> List[str]:
|
||||
errors = []
|
||||
for url in MONITORED_MAIL:
|
||||
# check that SMTP(S) is alive
|
||||
try:
|
||||
with smtplib.SMTP_SSL(url, port=SMTPS_PORT) as smtp:
|
||||
res = smtp.noop()
|
||||
if res != (250, b'2.0.0 I have sucessfully done nothing'):
|
||||
errors.append(f'{url}:{SMTPS_PORT} check returned {res}')
|
||||
except Exception as e:
|
||||
errors.append(f'{url} SMTPS check failed: {e}')
|
||||
try:
|
||||
with smtplib.SMTP(url, port=SMTP_PORT) as smtp:
|
||||
smtp.starttls()
|
||||
res = smtp.noop()
|
||||
if res != (250, b'2.0.0 I have sucessfully done nothing'):
|
||||
errors.append(f'{url}:{SMTP_PORT} check returned {res}')
|
||||
except Exception as e:
|
||||
errors.append(f'{url}:{SMTP_PORT} SMTP check failed: {e}')
|
||||
|
||||
# check that IMAP is alive
|
||||
try:
|
||||
with imaplib.IMAP4_SSL(url) as imap:
|
||||
res = imap.noop()
|
||||
if res != ('OK', [b'NOOP completed']):
|
||||
errors.append(f'{url} IMAP noop returned {res}')
|
||||
except Exception as e:
|
||||
errors.append(f'{url} IMAP check failed: {e}')
|
||||
|
||||
# check that SMTP TLSA records are valid
|
||||
try:
|
||||
with smtplib.SMTP(url, port=SMTP_PORT) as smtp:
|
||||
smtp.starttls()
|
||||
dercert = smtp.sock.getpeercert(binary_form=True)
|
||||
tlsa_hash = check_tlsa(url, SMTP_PORT, dercert)
|
||||
if tlsa_hash['cert'] != tlsa_hash['dns']:
|
||||
errors.append(
|
||||
f'{url}:{SMTP_PORT} TLSA record \
|
||||
{str(tlsa_hash["cert"])} != {str(tlsa_hash["dns"])}'
|
||||
)
|
||||
except Exception as e:
|
||||
errors.append(f'{url}:{SMTP_PORT} TLSA check failed: {e}')
|
||||
|
||||
return errors
|
||||
|
||||
def report_results(errors: List[str]) -> None:
|
||||
if not errors:
|
||||
errors = 'All systems nominal'
|
||||
print(errors)
|
||||
|
||||
txn_id_nonce = str(int(time.time()))
|
||||
url = f'https://conduit.koesters.xyz/_matrix/client/r0/rooms/{MATRIX_NOTIFICATION_ROOM}/send/m.room.message/{txn_id_nonce}'
|
||||
header_dict = {
|
||||
'Accept': 'application/json',
|
||||
'Authorization' : f'Bearer {MATRIX_ACCESS_TOKEN}',
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
body = f'"msgtype":"m.text", "body":"{errors}"'
|
||||
body = '{' + body + '}'
|
||||
try:
|
||||
res = requests.put(url, data=body, headers=header_dict)
|
||||
if res.status_code != 200:
|
||||
print(res.json())
|
||||
subprocess.run(['notify-send', f'Sending error report failed.\nPlease run {sys.argv[0]}\nError {res.json()}'])
|
||||
except Exception as e:
|
||||
print(e)
|
||||
subprocess.run(['notify-send', f'Sending error report failed.\nPlease run {sys.argv[0]}\nError {e}'])
|
||||
|
||||
errors = []
|
||||
prev_errors = []
|
||||
print('Monitoring...')
|
||||
while True:
|
||||
errors += web_check()
|
||||
errors += mail_check()
|
||||
if errors != prev_errors:
|
||||
report_results(errors)
|
||||
prev_errors = errors.copy()
|
||||
errors = []
|
||||
sys.stdout.flush()
|
||||
time.sleep(CHECK_INTERVAL)
|
||||
|
||||
47
dnf-search-installed.py
Normal file
47
dnf-search-installed.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import dnf
|
||||
import subprocess
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from typing import Set
|
||||
|
||||
DNF = 'dnf'
|
||||
|
||||
def get_installed() -> Set[str]:
|
||||
base = dnf.Base()
|
||||
base.fill_sack()
|
||||
q = base.sack.query()
|
||||
return {f'{p.name}.{p.arch}' for p in q.installed()}
|
||||
|
||||
def colorize_g_fg(s: str) -> str:
|
||||
return f'\x1b[32m{s}\033[0m'
|
||||
|
||||
if sys.argv[1] != 'search':
|
||||
os.execvp(DNF, sys.argv[1:])
|
||||
sys.exit(0)
|
||||
|
||||
# call CLI to get all the formatting and highlighting for free
|
||||
res = subprocess.run(
|
||||
[DNF, '--color=always'] + sys.argv[1:],
|
||||
capture_output=True,
|
||||
encoding='utf-8',
|
||||
)
|
||||
lines = res.stdout.split('\n')
|
||||
installed = get_installed()
|
||||
printlines = ''
|
||||
# matching bold, magenta and normal codes from
|
||||
# https://github.com/rpm-software-management/dnf/blob/master/dnf/cli/term.py
|
||||
dnf_color = re.compile(r'\x1b\[35m|\x1b\[1m|\x1b\(B\x1b\[m')
|
||||
i = colorize_g_fg('*')
|
||||
for line in lines:
|
||||
if not line or line.startswith('='):
|
||||
printlines += (line + '\n')
|
||||
continue
|
||||
(package, _) = line.split(' : ', maxsplit=1)
|
||||
package = dnf_color.sub('', package.rstrip())
|
||||
indicator = i if package in installed else ' '
|
||||
printlines += f'{indicator} {line}\n'
|
||||
print(printlines, end='')
|
||||
20
flac-convert.py
Executable file
20
flac-convert.py
Executable file
|
|
@ -0,0 +1,20 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
for root, dirs, files in os.walk(sys.argv[1]):
|
||||
for name in files:
|
||||
input_name, input_ext = os.path.splitext(name)
|
||||
l_input_ext = input_ext.lower()
|
||||
if l_input_ext != '.m4a':
|
||||
continue
|
||||
full_name_i = os.path.join(root, name)
|
||||
full_name_o = os.path.join(root, input_name + '.flac')
|
||||
process = subprocess.run(['ffmpeg', '-y', '-i', full_name_i, '-compression_level', '12', full_name_o],
|
||||
check=True,
|
||||
capture_output=True)
|
||||
print('deleting ', full_name_i)
|
||||
print('out ', full_name_o)
|
||||
os.remove(full_name_i)
|
||||
79
from-ca-to-server.sh
Normal file
79
from-ca-to-server.sh
Normal file
|
|
@ -0,0 +1,79 @@
|
|||
# generate key
|
||||
sudo openssl ecparam -out ca-key.pem -name secp384r1 -genkey
|
||||
# generate certificate signing request
|
||||
sudo openssl req -config dassem-ca.conf -new -key ca-key.pem -out ca-cert-req.pem -sha384 -extensions v3_ca
|
||||
# sign request
|
||||
sudo openssl x509 -in ca-cert-req.pem -out ca-cert.pem -req -signkey ca-key.pem -days 365 -extfile dassem-ca.conf -extensions v3_ca
|
||||
# verify output
|
||||
sudo openssl x509 -in ca-cert.pem -text -noout
|
||||
|
||||
# generate key for server
|
||||
sudo openssl ecparam -out glados-key.pem -name secp384r1 -genkey
|
||||
# generate request
|
||||
sudo openssl req -config dassem-ca.conf -new -key glados-key.pem -out glados-cert-req.pem -sha384 -extensions v3_req
|
||||
# sign it with our CA
|
||||
sudo openssl ca -in glados-cert-req.pem -out glados-cert.pem -config dassem-ca.conf -extensions v3_req -policy signing_policy
|
||||
|
||||
#config file used:
|
||||
HOME = .
|
||||
RANDFILE = /root/.rnd
|
||||
|
||||
####################################################################
|
||||
[ ca ]
|
||||
default_ca = CA_default # The default ca section
|
||||
|
||||
[ CA_default ]
|
||||
|
||||
default_days = 1000 # how long to certify for
|
||||
default_crl_days = 30 # how long before next CRL
|
||||
default_md = sha384 # use public key default MD
|
||||
preserve = no # keep passed DN ordering
|
||||
|
||||
x509_extensions = v3_ca # The extensions to add to the cert
|
||||
|
||||
email_in_dn = no # Don't concat the email in the DN
|
||||
copy_extensions = copy # Required to copy SANs from CSR to cert
|
||||
|
||||
####################################################################
|
||||
[ req ]
|
||||
default_bits = 384
|
||||
default_keyfile = ca-key.pem
|
||||
distinguished_name = ca_distinguished_name
|
||||
x509_extensions = v3_ca
|
||||
req_extensions = v3_req
|
||||
string_mask = utf8only
|
||||
|
||||
####################################################################
|
||||
[ ca_distinguished_name ]
|
||||
countryName = Country Name (2 letter code)
|
||||
countryName_default = ME
|
||||
|
||||
stateOrProvinceName = State or Province Name (full name)
|
||||
stateOrProvinceName_default = Malazan Empire
|
||||
|
||||
localityName = Locality Name (eg, city)
|
||||
localityName_default = Malaz City
|
||||
|
||||
organizationName = Organization Name (eg, company)
|
||||
organizationName_default = Malazan military forces
|
||||
|
||||
organizationalUnitName = Organizational Unit (eg, division)
|
||||
organizationalUnitName_default = "Dassem's First Sword"
|
||||
|
||||
commonName = Common Name (e.g. server FQDN or YOUR name)
|
||||
commonName_default = Dassem Ultor
|
||||
|
||||
emailAddress = Email Address
|
||||
emailAddress_default = dassem@dessembrae.com
|
||||
|
||||
####################################################################
|
||||
[ v3_req ]
|
||||
basicConstraints = CA:FALSE
|
||||
keyUsage = digitalSignature, nonRepudiation, keyEncipherment
|
||||
|
||||
[ v3_ca ]
|
||||
subjectKeyIdentifier = hash
|
||||
authorityKeyIdentifier = keyid:always, issuer
|
||||
basicConstraints = critical, CA:TRUE, pathlen:0
|
||||
keyUsage = keyCertSign, cRLSign
|
||||
|
||||
36
gasquery.py
Executable file
36
gasquery.py
Executable file
|
|
@ -0,0 +1,36 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
import requests
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
API_KEY = sys.argv[1]
|
||||
BASEFEE_THRESHOLD = int(sys.argv[2])
|
||||
|
||||
def to_gwei(wei: int) -> int:
|
||||
return int(wei/(10**9))
|
||||
|
||||
response = requests.post(
|
||||
url=f'https://eth-mainnet.alchemyapi.io/v2/{API_KEY}',
|
||||
data='{"jsonrpc":"2.0","method":"eth_gasPrice","params":[],"id":0}'
|
||||
)
|
||||
if not response.ok:
|
||||
subprocess.run(['notify-send', 'Gasquery', f'Query returned {response.status_code}: {response.reason}'])
|
||||
print('{"text": "⚠️NaN"}')
|
||||
sys.exit()
|
||||
|
||||
basefee = response.json()['result']
|
||||
basefee = to_gwei(int(basefee, base=16))
|
||||
if basefee <= BASEFEE_THRESHOLD:
|
||||
subprocess.run(['notify-send', 'Gasquery', f'Current basefee is {basefee}'])
|
||||
|
||||
state = 'Idle'
|
||||
if basefee < BASEFEE_THRESHOLD:
|
||||
state = 'Good'
|
||||
elif basefee > 150:
|
||||
state = 'Warning'
|
||||
elif basefee > 200:
|
||||
state = 'Critical'
|
||||
text = '{' + f'"state": "{state}", "text": "⛽ {basefee}"' + '}'
|
||||
print(text)
|
||||
18
gentoo-chroot.sh
Executable file
18
gentoo-chroot.sh
Executable file
|
|
@ -0,0 +1,18 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
MOUNTPOINT=/mnt/gentoo
|
||||
|
||||
mkdir -p "${MOUNTPOINT}" &&
|
||||
mount --types proc /proc "${MOUNTPOINT}"/proc &&
|
||||
mount --rbind /sys "${MOUNTPOINT}"/sys &&
|
||||
mount --make-rslave "${MOUNTPOINT}"/sys &&
|
||||
mount --rbind /dev "${MOUNTPOINT}"/dev &&
|
||||
mount --make-rslave "${MOUNTPOINT}"/dev &&
|
||||
mount --bind /run "${MOUNTPOINT}"/run &&
|
||||
mount --make-slave "${MOUNTPOINT}"/run &&
|
||||
mount LABEL=BOOT "${MOUNTPOINT}"/boot &&
|
||||
mount --rbind /tmp "${MOUNTPOINT}"/tmp &&
|
||||
chroot "${MOUNTPOINT}" /bin/bash
|
||||
|
||||
# gentoo install media not running systemd
|
||||
# systemd-nspawn -D /mnt/mychroot --bind=/tmp --resolv-conf=/etc/resolv.conf
|
||||
5
gtokei.sh
Executable file
5
gtokei.sh
Executable file
|
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
tmpdir=$(mktemp --directory)
|
||||
git clone --depth 1 "$1" "$tmpdir" && tokei "$tmpdir"
|
||||
rm -rf "$tmpdir"
|
||||
27
invoke-magic.bat
Executable file
27
invoke-magic.bat
Executable file
|
|
@ -0,0 +1,27 @@
|
|||
@echo off
|
||||
|
||||
IF NOT EXIST ./\conv mkdir conv
|
||||
FOR %%f in (./workdir/*.jpg) DO (
|
||||
magick convert "./workdir/%%f" ^
|
||||
-resize 1500 -quality 85 ^
|
||||
logo.png -gravity SouthEast -geometry +0+50 ^
|
||||
-composite "./conv/%%f"
|
||||
)
|
||||
|
||||
|
||||
|
||||
IF NOT EXIST ./\thm mkdir thm
|
||||
magick mogrify -path ./thm -thumbnail 250 -quality 85 ./workdir/*
|
||||
|
||||
:: Requires ImageMagick installed, modified PATH (gets done during install)
|
||||
:: resize - [NUM]: width, height get computed; [xNUM]: height, width gets computed;
|
||||
:: [NUM1xNUM] is max(NUM1,NUM2) that's aspect preserving
|
||||
:: quality - compression, not consistent between various SW
|
||||
:: gravity - where to put news element
|
||||
:: geometry - offset from borders, X,Y
|
||||
:: composite - compose in order of "background, foreground"
|
||||
:: path - output, original gets replaced if omitted
|
||||
:: thumbnail - optimalized resize + compression + strip
|
||||
:: "*" means "gobble whatever you get your hands on"
|
||||
:: strip - discard metadata
|
||||
::
|
||||
38
jxl-restore.py
Executable file
38
jxl-restore.py
Executable file
|
|
@ -0,0 +1,38 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Final
|
||||
|
||||
source: Final = Path(sys.argv[1])
|
||||
dest: Final = Path(sys.argv[2])
|
||||
|
||||
def find_source_jpg(dest_name: Path) -> Path:
|
||||
ext_list = ['.jpg', '.jpeg', '.JPG', '.JPEG']
|
||||
for ext in ext_list:
|
||||
res = (Path(source) / relative_path / (dest_name + ext))
|
||||
if os.path.exists(dest_file_path):
|
||||
return res
|
||||
raise RuntimeError('No original found', dest_name)
|
||||
|
||||
for dest_root, dirs, files in os.walk(dest):
|
||||
for name in files:
|
||||
dest_name, dest_ext = os.path.splitext(name)
|
||||
if dest_ext.lower() != '.jxl':
|
||||
continue
|
||||
dest_file_path = (Path(dest_root) / (dest_name + '.jpg'))
|
||||
print('scanning for ', dest_file_path, '\n')
|
||||
if os.path.exists(dest_file_path): # both .jxl and jpg exist
|
||||
print(dest_file_path, ' already exists\n')
|
||||
continue
|
||||
relative_path = os.path.relpath(dest_root, dest)
|
||||
print('relapath ', relative_path, '\n')
|
||||
src_file_path = find_source_jpg(dest_name)
|
||||
shutil.copy2(src_file_path, dest_file_path)
|
||||
print('restored ', dest_file_path, ' from ', src_file_path, '\n')
|
||||
#dest_jxl = (Path(dest_root) / (dest_name + '.jxl'))
|
||||
#os.remove(dest_jxl)
|
||||
#print('deleted ', dest_jxl, '\n')
|
||||
188
kernel-update.py
Executable file
188
kernel-update.py
Executable file
|
|
@ -0,0 +1,188 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import argparse
|
||||
import magic
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from packaging.version import Version
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Tuple
|
||||
|
||||
BOOT_FILES = {
|
||||
'config': Path('config-gentoo'),
|
||||
'initramfs': Path('initramfs-gentoo.img'),
|
||||
'kernel': Path('vmlinuz-gentoo'),
|
||||
'system_map': Path('System.map-gentoo'),
|
||||
}
|
||||
|
||||
SRC_DIR = Path('/usr/src')
|
||||
BOOT_DIR = Path('/boot')
|
||||
|
||||
|
||||
def backup_kernel(boot_dir: Path, files: Dict[str, Path]) -> None:
|
||||
for f in files.values():
|
||||
src = boot_dir/f
|
||||
dst = boot_dir/(f.name + '.old')
|
||||
print(f'Backing-up {src} to {dst}')
|
||||
shutil.copy2(src, dst)
|
||||
|
||||
|
||||
def rollback_impl(boot_dir: Path, files: Dict[str, Path]) -> None:
|
||||
for f in files.values():
|
||||
src = boot_dir/(f.name + '.old')
|
||||
dst = boot_dir/f
|
||||
print(f'Restoring {src} to {dst}')
|
||||
shutil.copy2(src, dst)
|
||||
|
||||
|
||||
def update_config(old_path: Path, new_path: Path, make_cmd: List[str]) -> None:
|
||||
if old_path == new_path:
|
||||
return
|
||||
old_config = old_path/'.config'
|
||||
new_config = new_path/'.config'
|
||||
while new_config.is_file():
|
||||
response = input('New config present. Overwrite? [y/N]').strip().lower()
|
||||
if response == 'y':
|
||||
break
|
||||
elif response == '' or response == 'n':
|
||||
return
|
||||
else:
|
||||
print("unrecognized option {}", response)
|
||||
print(f'Copying config from {old_config} to {new_config}')
|
||||
shutil.copy2(old_config, new_config)
|
||||
|
||||
print(f'Setting symlink to {new_path}')
|
||||
subprocess.run(['eselect', 'kernel', 'set', new_path.name])
|
||||
print(f'Migrating config options')
|
||||
migrate = make_cmd + ['-C', new_path.as_posix(), 'oldconfig']
|
||||
subprocess.run(migrate)
|
||||
menuconfig = make_cmd + ['-C', new_path.as_posix(), 'menuconfig']
|
||||
while True:
|
||||
subprocess.run(menuconfig)
|
||||
response = input('Stop editing? [Y/n]').strip().lower()
|
||||
if response == '' or response == 'y':
|
||||
break
|
||||
elif response == 'n':
|
||||
continue
|
||||
else:
|
||||
print("unrecognized option {}", response)
|
||||
|
||||
|
||||
def compile_kernel(new_path: Path, make_cmd: List[str]) -> None:
|
||||
cc = make_cmd + ['-C', new_path.as_posix()]
|
||||
subprocess.run(cc)
|
||||
|
||||
|
||||
def install_kernel(kernel_dir: Path, boot_dir: Path, boot_files: Dict[str, Path], make_cmd: List[str]) -> None:
|
||||
make_files = {
|
||||
'config': Path('.config'),
|
||||
'system_map': Path('System.map'),
|
||||
'kernel': Path('arch/x86/boot/bzImage')
|
||||
}
|
||||
|
||||
config = (kernel_dir/make_files['config']).as_posix()
|
||||
# subprocess.run(make_cmd.extend(['-C', kernel_dir, 'install'])) # this would create unwanted entries
|
||||
common_keys = make_files.keys() & boot_files.keys()
|
||||
for key in common_keys:
|
||||
src = kernel_dir/make_files[key]
|
||||
dst = boot_dir/boot_files[key]
|
||||
print(f'Installing {src} to {dst}')
|
||||
shutil.copy2(src, dst)
|
||||
|
||||
install_modules = make_cmd + ['-C', kernel_dir.as_posix(), 'modules_install']
|
||||
subprocess.run(install_modules)
|
||||
|
||||
genkernel = ['genkernel', f'--kernel-config={config}', '--microcode', 'initramfs']
|
||||
subprocess.run(genkernel)
|
||||
|
||||
|
||||
def linux_folder(src_dir: Path, version: str) -> Path:
|
||||
return (src_dir / (f'linux-{version}-gentoo'))
|
||||
|
||||
|
||||
def module_check(boot_dir: Path, boot_files: Dict[str, Path]) -> Tuple[bool, Path]:
|
||||
kernel_name = boot_files['kernel'].name + '.old'
|
||||
old_kernel: Path = boot_dir/kernel_name
|
||||
magic_list = magic.from_file(old_kernel).split()
|
||||
version = Path(magic_list[magic_list.index('version') + 1])
|
||||
modules = Path('/lib/modules')/version
|
||||
if not modules.exists():
|
||||
return (False, version)
|
||||
return (True, version)
|
||||
|
||||
def rollback_kernel(boot_dir: Path, boot_files: Dict[str, Path], _args: Any) -> None:
|
||||
check = module_check(boot_dir, boot_files)
|
||||
if not check[0]:
|
||||
err = f'Module directory not found for {check[1]}.\nRefusing to proceed.'
|
||||
raise RuntimeError(err)
|
||||
rollback_impl(boot_dir, boot_files)
|
||||
|
||||
def update_kernel(boot_dir: Path, boot_files: Dict[str, Path], args: Any) -> None:
|
||||
old_path = linux_folder(SRC_DIR, args.old_version)
|
||||
new_path = linux_folder(SRC_DIR, args.new_version)
|
||||
new_version = new_path.name # rename to new_folder
|
||||
make_cmd = ['make', f'-j{len(os.sched_getaffinity(0))}']
|
||||
clang_env = ['CC=clang', 'LD=ld.lld', 'LLVM=1', 'LLVM_IAS=1']
|
||||
|
||||
if args.llvm:
|
||||
make_cmd.extend(clang_env)
|
||||
none_selected = not (args.backup or args.config or args.compile or args.install or args.rollback)
|
||||
if none_selected:
|
||||
backup_kernel(BOOT_DIR, BOOT_FILES)
|
||||
update_config(old_path, new_path, make_cmd)
|
||||
compile_kernel(new_path, make_cmd)
|
||||
install_kernel(new_path, BOOT_DIR, BOOT_FILES, make_cmd)
|
||||
if args.backup:
|
||||
backup_kernel(BOOT_DIR, BOOT_FILES)
|
||||
if args.config:
|
||||
update_config(old_path, new_path, make_cmd)
|
||||
if args.compile:
|
||||
compile_kernel(new_path, make_cmd)
|
||||
if args.install:
|
||||
install_kernel(new_path, BOOT_DIR, BOOT_FILES, make_cmd)
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description='Convenience for manual kernel updates')
|
||||
subparsers = parser.add_subparsers()
|
||||
|
||||
rollback = subparsers.add_parser('rollback')
|
||||
rollback.set_defaults(func=rollback_kernel)
|
||||
|
||||
update = subparsers.add_parser('update',
|
||||
usage=f'{sys.argv[0]} update 5.15.12 5.16.3',
|
||||
)
|
||||
update.add_argument(
|
||||
'--llvm', '-l', action='store_true',
|
||||
help="Use clang/llvm to compile kernel")
|
||||
update.add_argument(
|
||||
'--backup', '-b', action='store_true',
|
||||
help="Backup old kernel files as .old")
|
||||
update.add_argument(
|
||||
'--rollback', '-r', action='store_true',
|
||||
help='Restore .old kernel files as main boot choice')
|
||||
update.add_argument(
|
||||
'--config', '-C', action='store_true',
|
||||
help='Migrate config from old kernel')
|
||||
update.add_argument(
|
||||
'--compile', '-c', action='store_true',
|
||||
help='Compile new kernel')
|
||||
update.add_argument(
|
||||
'--install', '-i', action='store_true',
|
||||
help='Install new kernel')
|
||||
update.add_argument(
|
||||
'old_version', type=Version,
|
||||
help='Old kernel version')
|
||||
update.add_argument(
|
||||
'new_version', type=Version,
|
||||
help='New kernel version')
|
||||
update.set_defaults(func=update_kernel)
|
||||
|
||||
args = parser.parse_args()
|
||||
args.func(BOOT_DIR, BOOT_FILES, args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
71
sync-apparmor.py
Executable file
71
sync-apparmor.py
Executable file
|
|
@ -0,0 +1,71 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
def prune_unused_profiles(dest_profile_folder: Path) -> None:
|
||||
for root, dirs, files in os.walk(dest_profile_folder):
|
||||
for name in files:
|
||||
target_binary = Path('/' + name.replace('.', '/'))
|
||||
if not target_binary.exists():
|
||||
profile_path = os.path.join(root, name)
|
||||
print(f'Removing {profile_path}')
|
||||
os.remove(profile_path)
|
||||
|
||||
for folder in dirs:
|
||||
fullpath = os.path.join(root, folder)
|
||||
if not os.listdir(fullpath):
|
||||
print(f'Removing empty directory {fullpath}')
|
||||
os.rmdir(fullpath)
|
||||
|
||||
|
||||
def install_profiles(source_folder: Path, dest_profile_folder: Path) -> None:
|
||||
for root, dirs, files in os.walk(source_folder):
|
||||
for name in files:
|
||||
target_binary = Path('/' + name.replace('.', '/'))
|
||||
print(f'Testing {target_binary}')
|
||||
if target_binary.exists():
|
||||
print(f'Adding profile for {target_binary}')
|
||||
profile_path = os.path.join(root, name)
|
||||
shutil.copy2(profile_path, dest_profile_folder)
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Install or prune apparmor profiles',
|
||||
usage='/sync-apparmor.py ~/playground/apparmor-profiles/ /etc/apparmor.d/local/',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--dry-run', '-d', action='store_true',
|
||||
help="Don't change files, only output what would be done")
|
||||
parser.add_argument(
|
||||
'--prune-destination', '-r', action='store_true',
|
||||
help="Check whether target binaries for profiles in dest exist. Delete profiles if not.")
|
||||
parser.add_argument(
|
||||
'--sync-source', '-s', action='store_true',
|
||||
help="Check whether target binaries for profiles in dest exist. Copy profiles from source if so.")
|
||||
parser.add_argument(
|
||||
'source_folder', type=Path,
|
||||
help='Folder to copy AppArmor profiles from')
|
||||
parser.add_argument(
|
||||
'dest', type=Path,
|
||||
help='Folder to copy AppArmor profiles to')
|
||||
args = parser.parse_args()
|
||||
|
||||
if Path.home() == Path('/root'):
|
||||
print('$HOME is /root, maybe you forgot to use sudo -E?')
|
||||
|
||||
if args.dry_run:
|
||||
shutil.copy2 = lambda *args: None
|
||||
os.remove = lambda *args: None
|
||||
os.rmdir = lambda *args: None
|
||||
none_defined = not(args.prune_destination and args.sync_source)
|
||||
if none_defined:
|
||||
prune_unused_profiles(args.dest)
|
||||
install_profiles(args.source_folder, args.dest)
|
||||
if args.prune_destination:
|
||||
prune_unused_profiles(args.dest)
|
||||
if args.sync_source:
|
||||
install_profiles(args.source_folder, args.dest)
|
||||
Loading…
Add table
Add a link
Reference in a new issue