forked from localhorst/media_management_scripts
Compare commits
7 Commits
fix-instal
...
master
Author | SHA1 | Date | |
---|---|---|---|
d88229dddf | |||
b7ac2a2ae5 | |||
96bbb2d579 | |||
62a68990a4 | |||
a0dfa826d9 | |||
4cb989db86 | |||
b8de81a302 |
@ -21,7 +21,7 @@
|
||||
|
||||
## Usage
|
||||
|
||||
`python ./check_metadata.py path`
|
||||
`python ./check_metadata.py path codec_filter`
|
||||
|
||||
## Features
|
||||
- find all video files in path
|
||||
|
4
check_Names_and_Paths.sh
Normal file → Executable file
4
check_Names_and_Paths.sh
Normal file → Executable file
@ -58,7 +58,7 @@ do
|
||||
|
||||
###### check extension ######
|
||||
ext="${filename##*.}"
|
||||
if [ "$ext" != "mkv" ] && [ "$ext" != "mp4" ]
|
||||
if [ "$ext" != "mkv" ] && [ "$ext" != "mp4" ] && [ "$ext" != "nfo" ]
|
||||
then
|
||||
echo "Incident: Incorrect extension: $file"
|
||||
fi
|
||||
@ -117,7 +117,7 @@ for show in tvshows/*; do
|
||||
|
||||
###### check extension ######
|
||||
ext="${episodename##*.}"
|
||||
if [ "$ext" != "mkv" ] && [ "$ext" != "mp4" ]
|
||||
if [ "$ext" != "mkv" ] && [ "$ext" != "mp4" ] && [ "$ext" != "nfo" ]
|
||||
then
|
||||
echo "Incident: Incorrect extension: $episode"
|
||||
fi
|
||||
|
29
check_metadata/checkMetadata/__main__.py → check_metadata/check_metadata.py
Normal file → Executable file
29
check_metadata/checkMetadata/__main__.py → check_metadata/check_metadata.py
Normal file → Executable file
@ -13,6 +13,8 @@ import subprocess
|
||||
import datetime
|
||||
from dataclasses import dataclass
|
||||
from tqdm import tqdm
|
||||
import operator
|
||||
|
||||
|
||||
@dataclass
|
||||
class MediaFile:
|
||||
@ -25,7 +27,7 @@ class MediaFile:
|
||||
duration: int #in sec
|
||||
|
||||
def supported_file_extension(filename):
|
||||
if filename.endswith('.mp4') or filename.endswith('.mkv'):
|
||||
if filename.endswith('.mp4') or filename.endswith('.mkv') or filename.endswith('.m4v'):
|
||||
return True
|
||||
return False
|
||||
|
||||
@ -96,9 +98,19 @@ def scan_files(path):
|
||||
pbar.close()
|
||||
return media_files
|
||||
|
||||
def print_all(media_files):
|
||||
def print_all(media_files, filter_mode):
|
||||
media_files_filtered = list() #stores all found files that match the given codec filter
|
||||
media_files.sort(key=lambda x: x.size, reverse=True)
|
||||
for media_file in media_files:
|
||||
print ("{:<64} | {:<8} | {:<16} | {:<8} | {:<16}".format(cut_file_name(media_file.name, 64), str(datetime.timedelta(seconds=media_file.duration)).split(".")[0], human_readable_size(media_file.size), media_file.codec, str(media_file.resolution[0])+"x"+str(media_file.resolution[1])))
|
||||
if((filter_mode != " ") and (media_file.codec == filter_mode) ):
|
||||
media_files_filtered.append(media_file.name)
|
||||
|
||||
if(filter_mode != " "):
|
||||
print("\nFound files with selected filter: " + filter_mode + "\n")
|
||||
for media_file_filtered in media_files_filtered:
|
||||
print ('"'+media_file_filtered+'", ')
|
||||
print("\n")
|
||||
|
||||
def print_codecs(media_files):
|
||||
codecs = list()
|
||||
@ -124,15 +136,20 @@ def print_codecs(media_files):
|
||||
print("\ntotal "+ str(len(media_files)) + str(" files"))
|
||||
|
||||
def main() -> None:
|
||||
if(len(sys.argv) != 2):
|
||||
path = '.' #use current pwd
|
||||
else:
|
||||
if(len(sys.argv) == 3):
|
||||
path = sys.argv[1] #use arg0 as path
|
||||
filter_mode = sys.argv[2] #use arg2 as filter input
|
||||
elif (len(sys.argv) == 2):
|
||||
path = sys.argv[1] #use arg0 as path
|
||||
filter_mode = " "
|
||||
else:
|
||||
path = '.' #use current pwd
|
||||
filter_mode = " "
|
||||
|
||||
media_files = scan_files(path) #scan all media files
|
||||
|
||||
print("")
|
||||
print_all(media_files)
|
||||
print_all(media_files, filter_mode)
|
||||
|
||||
print("")
|
||||
print_codecs(media_files)
|
186
convert/convert.py
Normal file
186
convert/convert.py
Normal file
@ -0,0 +1,186 @@
|
||||
#!/usr/bin/env python3
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import time
|
||||
import requests
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
PROM_URL = "http://127.0.0.1:9104/metrics"
|
||||
MIN_SOC = 0.5 # 50%
|
||||
MIN_SOLAR_POWER = 500 # 500W
|
||||
|
||||
def load_config(path):
|
||||
"""Load JSON configuration file."""
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
|
||||
import requests
|
||||
import time
|
||||
|
||||
def wait_for_solar_power(prom_url, check_interval=300, max_retries=None):
|
||||
"""
|
||||
Wait until:
|
||||
- fronius_site_power_photovoltaic > 500
|
||||
- fronius_inverter_soc{inverter="1"} > 0.5
|
||||
|
||||
Args:
|
||||
prom_url (str): Prometheus metrics endpoint.
|
||||
check_interval (int): Seconds between checks (default: 300).
|
||||
max_retries (int or None): Optional limit on checks. If None, loop indefinitely.
|
||||
|
||||
Returns:
|
||||
bool: True if conditions met, False if retries exhausted.
|
||||
"""
|
||||
attempts = 0
|
||||
while True:
|
||||
try:
|
||||
r = requests.get(prom_url, timeout=5)
|
||||
r.raise_for_status()
|
||||
|
||||
pv_power = None
|
||||
inverter_soc = None
|
||||
|
||||
for line in r.text.splitlines():
|
||||
if line.startswith("#"): # Skip comments and HELP/TYPE lines
|
||||
continue
|
||||
parts = line.split()
|
||||
if len(parts) != 2:
|
||||
continue
|
||||
|
||||
metric, value_str = parts
|
||||
try:
|
||||
value = float(value_str)
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
if metric == "fronius_site_power_photovoltaic":
|
||||
pv_power = value
|
||||
elif (
|
||||
metric.startswith("fronius_inverter_soc")
|
||||
and '{inverter="1"}' in metric
|
||||
):
|
||||
inverter_soc = value
|
||||
|
||||
if pv_power is not None and inverter_soc is not None:
|
||||
if pv_power > MIN_SOLAR_POWER and inverter_soc > MIN_SOC:
|
||||
print(
|
||||
f"[INFO] Solar power available: PV={pv_power:.2f} W, SOC={inverter_soc:.2f} – starting conversion."
|
||||
)
|
||||
return True
|
||||
|
||||
print(
|
||||
f"[INFO] Conditions not met (PV={pv_power}, SOC={inverter_soc}). Waiting {check_interval} seconds..."
|
||||
)
|
||||
|
||||
except requests.RequestException as e:
|
||||
print(f"[WARN] Could not reach Prometheus: {e}")
|
||||
|
||||
attempts += 1
|
||||
if max_retries is not None and attempts >= max_retries:
|
||||
print("[ERROR] Max retries reached. Exiting without solar power.")
|
||||
return False
|
||||
|
||||
time.sleep(check_interval)
|
||||
|
||||
def analyze_codecs(oldfile, newfile, dst_folder):
|
||||
"""
|
||||
Run codecVis to compare old and new files.
|
||||
Then rename output.png to newfile.png.
|
||||
"""
|
||||
cmd = ["codecVis", oldfile, newfile]
|
||||
print(f"[CMD] {' '.join(cmd)}")
|
||||
subprocess.run(cmd, cwd=dst_folder, check=True)
|
||||
|
||||
# Rename output.png to match the new file
|
||||
output_png = Path(dst_folder) / "output.png"
|
||||
new_png = Path(dst_folder) / (Path(newfile).name + ".png")
|
||||
if output_png.exists():
|
||||
output_png.rename(new_png)
|
||||
print(f"[INFO] Analysis image saved as {new_png}")
|
||||
else:
|
||||
print("[WARN] output.png not found!")
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 2:
|
||||
print("Give path to config file as argument.")
|
||||
sys.exit(1)
|
||||
|
||||
cfg = load_config(sys.argv[1])
|
||||
dst_folder = Path(cfg["dst_folder"])
|
||||
src_folder = Path(cfg["src_folder"])
|
||||
dst_folder.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for job in cfg["jobs"]:
|
||||
src_file = src_folder / job
|
||||
tmp_movie_name = "tmp_" + Path(job).stem + ".mkv"
|
||||
movie_name = Path(job).stem + ".mkv"
|
||||
|
||||
tmp_dst_file = dst_folder / tmp_movie_name
|
||||
dst_file = dst_folder / movie_name
|
||||
|
||||
print(f"Source: {src_file}")
|
||||
print(f"Temp name: {tmp_movie_name}")
|
||||
print(f"Final name: {movie_name}")
|
||||
|
||||
# Remove leftover temporary file
|
||||
if tmp_dst_file.exists():
|
||||
print(f"[INFO] File {tmp_dst_file} already exists. --> Delete!")
|
||||
tmp_dst_file.unlink()
|
||||
|
||||
# Skip if final file already exists
|
||||
if dst_file.exists():
|
||||
print(f"[INFO] Skip {dst_file}, already exists. --> Convert already done!")
|
||||
continue
|
||||
|
||||
wait_for_solar_power(PROM_URL)
|
||||
|
||||
try:
|
||||
cmd = [
|
||||
"taskset",
|
||||
"-c",
|
||||
"0,1,2,3", # limit to first 4 CPU cores
|
||||
"ffmpeg",
|
||||
"-i",
|
||||
str(src_file),
|
||||
"-c:v",
|
||||
"libaom-av1",
|
||||
"-c:a",
|
||||
"libopus",
|
||||
"-mapping_family",
|
||||
"1",
|
||||
"-af",
|
||||
"aformat=channel_layouts=5.1",
|
||||
"-c:s",
|
||||
"copy",
|
||||
"-map",
|
||||
"0",
|
||||
"-crf",
|
||||
"24",
|
||||
"-b:v",
|
||||
"0",
|
||||
"-b:a",
|
||||
"128k",
|
||||
"-cpu-used",
|
||||
"4",
|
||||
"-row-mt",
|
||||
"1",
|
||||
"-tiles",
|
||||
"2x2",
|
||||
str(tmp_dst_file),
|
||||
]
|
||||
print(f"[CMD] {' '.join(cmd)}")
|
||||
subprocess.run(cmd, check=True)
|
||||
|
||||
# Rename temp file to final name
|
||||
tmp_dst_file.rename(dst_file)
|
||||
|
||||
# Run codec analysis
|
||||
analyze_codecs(str(src_file), str(dst_file), dst_folder)
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"[ERROR] Processing failed for {job}: {e}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
8
convert/convert_jobs.json
Normal file
8
convert/convert_jobs.json
Normal file
@ -0,0 +1,8 @@
|
||||
{
|
||||
"dst_folder": "/mnt/mainstorage/media/converted",
|
||||
"src_folder": "/mnt/mainstorage/media/movies",
|
||||
|
||||
"jobs": [
|
||||
"BigBuckBunny_320x180.mp4",
|
||||
]
|
||||
}
|
79
find_duplicates.py
Executable file
79
find_duplicates.py
Executable file
@ -0,0 +1,79 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
""" Author: Hendrik Schutter, localhorst@mosad.xyz
|
||||
Date of creation: 2023/02/22
|
||||
Date of last modification: 2023/02/22
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import subprocess
|
||||
import datetime
|
||||
from dataclasses import dataclass
|
||||
from tqdm import tqdm
|
||||
import operator
|
||||
|
||||
@dataclass
|
||||
class MediaFile:
|
||||
name: str #without extension
|
||||
extension: str #without dot
|
||||
full_path: str
|
||||
|
||||
def supported_file_extension(filename):
|
||||
if filename.endswith('.mp4') or filename.endswith('.mkv') or filename.endswith('.m4v'):
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_number_of_files(path):
|
||||
#filter(supported_file_extension, files)
|
||||
return sum([len(list(filter(supported_file_extension, files))) for r, d, files in os.walk(path)])
|
||||
|
||||
def cut_file_name(filename, max_lenght, ellipsis="..."):
|
||||
if len(filename) > max_lenght:
|
||||
return filename[:max_lenght-len(ellipsis)] + ellipsis
|
||||
else:
|
||||
return filename
|
||||
|
||||
def scan_files(path):
|
||||
total_numbers_to_scan = get_number_of_files(path)
|
||||
|
||||
media_files = list() #stores all found files with metadata
|
||||
|
||||
pbar = tqdm(total=total_numbers_to_scan) #print progress bar
|
||||
|
||||
for root, dirs, files in os.walk(path, topdown=True):
|
||||
for name in filter(supported_file_extension, files):
|
||||
pbar.set_description("Processing %s" % str("{:<32}".format(cut_file_name(name, 32))))
|
||||
full_path = os.path.join(root, name)
|
||||
media_files.append(MediaFile(name=os.path.splitext(name)[0], extension=os.path.splitext(name)[1], full_path=full_path))
|
||||
pbar.update(1)
|
||||
pbar.close()
|
||||
return media_files
|
||||
|
||||
def print_all(media_files, path):
|
||||
for media_file in media_files:
|
||||
if (media_file.extension == ".mp4"):
|
||||
#print(media_file.name)
|
||||
file_test_path = path + media_file.name + ".mkv"
|
||||
#print("Testing for: " + file_test_path)
|
||||
if (os.path.isfile(file_test_path)):
|
||||
print(media_file.full_path)
|
||||
#os.remove(media_file.full_path)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
if(len(sys.argv) != 2):
|
||||
path = '.' #use current pwd
|
||||
else:
|
||||
path = sys.argv[1] #use arg0 as path
|
||||
|
||||
media_files = scan_files(path) #scan all media files
|
||||
|
||||
print("")
|
||||
print_all(media_files, path)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
Reference in New Issue
Block a user