diff --git a/README.md b/README.md index 3733357..c0b04e8 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,36 @@ -# funkwlplay +# funkwlmpv +### Deprecated +Just for fun. A simple TUI media player for FunkWhale instances. Writen on python -basic functional for create playlist tracks. Be simple as possible. No implement player and tui. Just bare cli solution. +Features: +* Simple interface +* Listening to tracks +* Listening to radios (including users/libraries) +* Selecting and listening to albums +* Selecting and listening to artists +* Search by albums, artists +* Switching instance from the public list[1] and the official instances list network.funkwhale.audio (if avalaible) +* All other features are working 50/50 + +Dependencies: +* python3.9+ +* [mpv](https://mpv.io) +* [fzf](https://github.com/junegunn/fzf) + +Python dependencies: +```pip install -r requirements.txt``` +Optional: brotli + +For virtual env just type (ofc after install mpv, fzf, python3): +`./run` + +### About cache folder +funkwlmpv has to cache tracks before playing (default disabled "enable_persistent_cache"). Cache is persistent and you should manage it manually because the (script/program) can't clean it automatically. +Cache structure: cache/domain.tld/[track uuid] +You can play tracks offline, example: mpv --shuffle cache/*/* +cache_helper.sh - it might be useful for compression cache (lossy: vorbis 128 kbps, no thumbnail) + +Also, tnx Inex for his FunkWhale instance (set by default instance) [1]**Warning:** may content _unofficial instances_ **Warning 2:** This software not userfriendly. Maybe developerfriendly. diff --git a/cache_helper.sh b/cache_helper.sh new file mode 100755 index 0000000..e96f9f7 --- /dev/null +++ b/cache_helper.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +if [ ! -n "$1" ]; then + echo 'Usage: cache_helper.sh path/to/cache' + exit 1 +fi + +total_before=0 +total_after=0 +for i in "$1"/*/*; do + if [ $(ffprobe -hide_banner -print_format json -select_streams a:0 -show_streams "$i" | jq -r '.streams[0].bit_rate') -gt 128000 ]; then + size_before=$(stat --format=%s "$i") + total_before=$(( $total_before + $size_before )) + ffmpeg -hide_banner -loglevel error -i "$i" -vn "$i".ogg + if [ $? -eq 0 ]; then + size_after=$(stat --format=%s "$i".ogg) + total_after=$(( $total_after + $size_after )) + + size_reduced=$(( $size_before - $size_after )) + echo "Reduced: $(echo $size_reduced | numfmt --to=iec)" + mv "$i".ogg "$i" + else + echo "$i convert failed" + fi + else + echo "$i already OK" + fi +done + +echo "Total before: $(echo $total_before | numfmt --to=iec)" +echo "Total after: $(echo $total_after | numfmt --to=iec)" +echo "Note: only included processed tracks" diff --git a/funkwlmpv b/funkwlmpv new file mode 100755 index 0000000..eede962 --- /dev/null +++ b/funkwlmpv @@ -0,0 +1,144 @@ +#!/usr/bin/env python3 + +from src.fw_api import current_instance, get_instance_settings, get_node_info +from src.fw_radios import list_radios +from src.fw_artists import list_artists +from src.fw_albums import list_albums +from src.fw_tracks import list_tracks +from src.fw_channels import list_channels +from src.fw_playlists import list_playlists +from src.fw_recents import list_fav_or_history +from src.fw_instances import instances_menu +import src.mpv_control +import json +import os +from shlex import quote +from shutil import get_terminal_size +from pyfzf.pyfzf import FzfPrompt + +os.environ['FZF_DEFAULT_OPTS'] = "--margin 2,0,0,0 --preview-window down:2:hidden:wrap --bind ?:toggle-preview --preview 'echo {}'" +fzf = FzfPrompt() + +os.system('clear') +if get_terminal_size().columns > 32: + print('\n\n') + os.system('cat .icon.txt') + +def main(): + + while True: + support_message = '' + instance_title = '' + menu = ['Radios', + 'Artists', + 'Albums', + 'Tracks', + 'Channels', + 'Playlists', + 'Favorites', + 'Recently listened', + 'About instance', + 'Switch instance'] + try: + ins_nodeinfo = get_node_info() + support_message = ins_nodeinfo['metadata']['instanceSupportMessage'] + instance_title = ins_nodeinfo['metadata']['nodeName'] + instance_stats = [] + for k, v in ins_nodeinfo['metadata']['library'].items(): + if k == 'anonymousCanListen' and v == False and not current_instance.s.headers.get('Authorization'): + instance_stats.append(f'!!! {k}: {v} !!!') + menu = ['Switch instance', 'About instance'] + continue + instance_stats.append(f'{k}: {v}') + instance_stats.append(ins_nodeinfo['software']['version']) + instance_stats = '\n'.join(instance_stats) + + main_menu_header = quote(f'''{instance_title}\n{instance_stats}'''.strip()) + except Exception as E: + splitted = ':\n'.join(str(E).split(':')) + main_menu_header = quote(f'''Connection failed:\n{splitted}'''.strip()) + menu = ['Switch instance'] + + if not current_instance.s.headers.get('Authorization'): + menu.append('Sign in') + if support_message != '': + menu.append('Donate') + if src.mpv_control.player.playlist_playing_pos != -1: + menu.insert(0, 'Player') + selected = fzf.prompt(menu, f"--header={main_menu_header}") + if selected == []: + print('bye-bye :3') + break + else: + selected = selected[0] + + if selected == 'Radios': + list_radios() + if selected == 'Artists': + list_artists() + if selected == 'Albums': + list_albums() + if selected == 'Tracks': + list_tracks() + if selected == 'Channels': + list_channels() + if selected == 'Playlists': + list_playlists() + if selected == 'Favorites': + list_fav_or_history() + if selected == 'Recently listened': + list_fav_or_history(is_history_view=True) + if selected == 'Switch instance': + instances_menu() + if selected == 'Sign in': + print(f''' +If You want sign in, please visit: +https://{current_instance.instance}/settings/applications/new +And fill Name funkwlmpv +Scopes: +Read | Write (optional): + write:libraries + write:favorites + write:listenings + write:follows + write:filters + +Insert token from "Access token" here''') + register_token = input() + with open('.auth.json', 'rt') as f: + tkns = json.load(f) + with open('.auth.json', 'wt') as f: + tkns[current_instance.instance] = register_token + f.write(json.dumps(tkns, indent=4)) + del tkns + del register_token + del f + os.system('clear') + + current_instance.select_instance(current_instance.instance) + if selected == 'Donate': + os.system(f'less < playlist.m3u8 +for i in $(cat instances); do + get "https://$i/api/v1/tracks?playable=true&hidden=false&include_channels=false&order=random&$search" | jq -r ".results.[] | \"#EXTINF:-1, \\(.title)\nhttps://$i\\(.listen_url)\"" >> playlist.m3u8 & +done + + diff --git a/funkwlplay.py b/funkwlplay.py old mode 100755 new mode 100644 index 5332974..4ec6b41 --- a/funkwlplay.py +++ b/funkwlplay.py @@ -1,4 +1,3 @@ -#!/bin/env python3 import requests import concurrent.futures import argparse @@ -10,48 +9,26 @@ with open('instances') as instances: parser = argparse.ArgumentParser( - prog='funkwlplay', + prog='funkwhale playlist', description='Create playlist from query or just random playlist tracks from funkwhale instances') -parser.add_argument('-s', '--search', help='This global search on funkwhale instances, it matches artists, albums, tracks, etc...') -parser.add_argument('-t', '--tag', help='This tag search, use this as genre search') -parser.add_argument('-i', '--instance', help='Specify instance, by default search on all instances in instances file') -parser.add_argument('-r', '--recursion', type=int, default=0, help='Use recursion if instance contain more than 50 tracks') -parser.add_argument('-d', '--depth', type=int, default=5, help='Depth of recursion, default is 5 pages, 250 tracks') +parser.add_argument('-s', '--search') +parser.add_argument('-t', '--tag') +parser.add_argument('-i', '--instance') +parser.add_argument('-r', '--recursion', type=int, default=0) +parser.add_argument('-d', '--depth', type=int, default=5) args = parser.parse_args() if args.instance: instances = [args.instance] def create_playlist_file(track_list): - filename = 'playlist.m3u8' - with open(filename, 'w') as file: + with open('playlist.m3u8', 'w') as file: file.write('#EXTM3U\n') for i in track_list: file.write('\n' + i) - print(f'Playlist saved as {filename}') def filter_tracks(tracks): - def remove_unreach_tracks(track): - try: - r = requests.head(track['listen_url'], timeout=1) - r.raise_for_status() - return 1 - except: - return 0 - with concurrent.futures.ThreadPoolExecutor(max_workers=50) as executor: - before = len(tracks) - res = [executor.submit(remove_unreach_tracks, track) for track in tracks] - concurrent.futures.wait(res) - avalaible = [] - for idx, track in enumerate(tracks): - is_avalaible = res[idx].result() - if is_avalaible == 1: - avalaible.append(track) - tracks = avalaible - after = before - len(tracks) - print(f'-{after} unreach tracks') - Path('filter_tags').touch() Path('filter_artists').touch() Path('filter_raw_urls').touch() @@ -74,14 +51,10 @@ def filter_tracks(tracks): tracks_stor.append(i) - def search_tracks_on_instance(instance, tag='', query='', recursion=args.recursion): - r = requests.get(f'https://{instance}/api/v1/tracks', params={'tag': tag, 'q': query, + tracks = requests.get(f'https://{instance}/api/v1/tracks', params={'tag': tag, 'q': query, 'local': True, 'playable': True, - 'ordering': 'random'}, timeout=10) - r.raise_for_status() - tracks = r.json() - + 'ordering': 'random'}, timeout=10).json() count = tracks['count'] print(f'found {count} tracks on {instance}') if recursion == 1: diff --git a/instances b/instances index b73f800..7f14ec4 100644 --- a/instances +++ b/instances @@ -55,4 +55,3 @@ tanukitunes.com tunez.awadwatt.com watts.refchat.net zik.goe.land -funkwhale.agapimou.top diff --git a/requirements.txt b/requirements.txt index f229360..5c773a4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1,4 @@ +loguru requests +pyfzf +mpv diff --git a/run b/run new file mode 100755 index 0000000..af616c6 --- /dev/null +++ b/run @@ -0,0 +1,12 @@ +#!/bin/bash + +if [ ! -d env_fw ]; then + python3 -m venv env_fw + . env_fw/bin/activate + pip3 install -r requirements.txt || pip install -r requirements.txt || exit 1 + ./funkwlmpv +else + . env_fw/bin/activate + ./funkwlmpv +fi + diff --git a/src/android_termux_api.py b/src/android_termux_api.py new file mode 100644 index 0000000..a9fbbc8 --- /dev/null +++ b/src/android_termux_api.py @@ -0,0 +1,42 @@ +import subprocess +import os +import json +import threading +import time +from src.mpv_control import player +from loguru import logger +'''Warning! This module can be very battery drain''' + + +@logger.catch +def handle_vol_lvl_as_switch_track(): + volume_diff = [] + while True: + for a in range(2): # ~2 secs + volume = subprocess.Popen("termux-volume", stdout=subprocess.PIPE).stdout + json_volume = json.loads(volume.read()) + time.sleep(0.300) + for i in json_volume: + if i['stream'] == 'music': + volume_diff.append(i['volume']) + if len(volume_diff) == 2: + before, after = volume_diff + difference = after - before + if difference == 2: + try: + player.playlist_next() + except: + pass + os.system(f'termux-volume music {before}') + elif difference == -2: + try: + player.playlist_prev() + except: + pass + os.system(f'termux-volume music {before}') + volume_diff = [] + + +handle_vol_lvl = threading.Thread( + target=handle_vol_lvl_as_switch_track, daemon=True) +handle_vol_lvl.start() diff --git a/src/fw_albums.py b/src/fw_albums.py new file mode 100644 index 0000000..9a533d9 --- /dev/null +++ b/src/fw_albums.py @@ -0,0 +1,80 @@ +import src.fw_artists +from src.fw_api import get_artists, get_tracks, get_albums, get_audio_file +from src.fw_libraries import libraries +from src.settings import get_config +from src.mpv_control import player, player_menu, track_url_to_uuid +from pyfzf.pyfzf import FzfPrompt +from loguru import logger + +fzf = FzfPrompt() + + +@logger.catch +def list_albums(albums=None, pg=None, search=None, artist=None, library=None, include_channels=None, refresh=False): + show_artist_name_in_albums = get_config('show_artist_name_in_albums') + albums_next = None + albums_prev = None + play_artist_albums = False + if not albums: + albums = get_albums(q=search, artist=artist, library=library, + include_channels=include_channels, refresh=refresh, pg=pg) + albums_next = albums.get('next') + albums_prev = albums.get('previous') + albums_results = albums.get('results') + if artist: + play_artist_albums = True + else: + play_artist_albums = True + albums_results = albums + view = ['Search', 'Library'] + if play_artist_albums: + view.append('Play all') + if albums_next: + view.append('Next page') + if albums_prev: + view.append('Prev page') + + for i in albums_results: + index = albums_results.index(i) + album_name = i.get('title') + album_tracks_count = i.get('tracks_count') + option_str = f'{index}.{album_name} | {album_tracks_count}' + artist_name = i.get('artist') + if show_artist_name_in_albums and isinstance(artist_name, dict): + artist_name = artist_name.get('name') + option_str += f' | {artist_name}' + view.append(f'{option_str}') + select = fzf.prompt(view, '--header=\'map: album | tracks count | opt. artist\'') + if select == []: + return + else: + select = select[0].split('.', 1)[0] + + if select == 'Next page': + list_albums(pg=albums_next) + elif select == 'Prev page': + list_albums(pg=albums_prev) + elif select == 'Search': + print('Search by albums: ') + list_albums(search=input()) + elif select == 'Library': + select_lib = libraries() + list_albums(library=select_lib) + elif select == 'Play all': + if artist: + src.fw_artists.play_artist(artist) + else: + src.fw_artists.play_artist(albums_results[0].get('artist')) + else: + play_album(album_id=albums_results[int(select)].get('id')) + + +def play_album(album_id): + tracks = get_tracks(album=album_id, ordering='disc_number,position', include_channels=True) + tracks_results = tracks.get('results') + storage = {} + for i in tracks_results: + listen_url = get_audio_file(i['listen_url'], True) + storage[track_url_to_uuid(listen_url)] = i + player.loadfile(listen_url, 'append-play') + player_menu("Album playing...", storage) diff --git a/src/fw_api.py b/src/fw_api.py new file mode 100644 index 0000000..09a237f --- /dev/null +++ b/src/fw_api.py @@ -0,0 +1,431 @@ +from src.settings import get_config +import requests +import json +import time +import urllib.parse +import os +from loguru import logger + +attempts_for_federate = 3 +auth_file = '.auth.json' +if os.path.exists(auth_file): + with open('.auth.json', 'rt') as f: + auth = json.loads(f.read()) +else: + # The default umask is 0o22 which turns off write permission of group and others + os.umask(0) + + descriptor = os.open( + path=auth_file, + flags=( + os.O_WRONLY # access mode: write only + | os.O_CREAT # create if not exists + | os.O_TRUNC # truncate the file to zero + ), + mode=0o600) + with open(descriptor, 'wt') as f: + f.write('{}') + auth = {} + + +class current_instance: + s = requests.Session() + s.headers.update({ + "Accept-encoding": 'gzip, br, deflate', + "User-Agent": "funkwlmpv/latest-commit; +https://git.phreedom.club/localhost_frssoft/funkwlmpv" + }) + instance = get_config('instance') + token = auth.get(instance) + listen_token = None + + @logger.catch + def select_instance(new_instance=None): + current_instance.instance = new_instance + with open(auth_file, 'rt') as f: + auth = json.load(f) + new_token = auth.get(current_instance.instance) + current_instance.s.headers.update({"Authorization": None}) + current_instance.token = new_token + current_instance.listen_token = None + if new_token: + current_instance.s.get(f'https://{current_instance.instance}', timeout=30) + current_instance.s.headers.update({"Authorization": "Bearer " + new_token}) + + +if current_instance.token: + current_instance.s.headers.update({"Authorization": "Bearer " + current_instance.token}) +else: + # Get cookies from unauthorized instance for working some functionality (radios) + current_instance.s.get(f'https://{current_instance.instance}/', timeout=30) + + +@logger.catch +def get_me(): + '''Return current user if token avalaible''' + if not current_instance.token: + return + r = current_instance.s.get(f'https://{current_instance.instance}/api/v1/users/me') + r.raise_for_status() + resp = r.json() + current_instance.listen_token = resp['tokens']['listen'] + return resp + + +def get_instance_settings(): + r = current_instance.s.get( + f'https://{current_instance.instance}/api/v1/instance/settings') + return r.json() + + +def get_node_info(): + r = current_instance.s.get( + f'https://{current_instance.instance}/api/v1/instance/nodeinfo/2.0/', + timeout=30) + r.raise_for_status() + return r.json() + + +@logger.catch +def get_audio_file(track_uuid, listen_url=False, download=False, + external_transcoding=get_config('external_transcoder_http_proxy_path'), + transcoding=get_config('enable_server_transcoding'), to='ogg', max_bitrate=128): + params = { + "download": download, + "to": to, + "max_bitrate": max_bitrate + } + if current_instance.token and not current_instance.listen_token: + get_me() + if current_instance.listen_token: + params['token'] = current_instance.listen_token + + if not transcoding: + del params['to'] + del params['max_bitrate'] + if listen_url: + url = f'https://{current_instance.instance}{track_uuid}?' + else: + url = f'https://{current_instance.instance}/api/v1/listen/{track_uuid}?' + if external_transcoding != "": + url = external_transcoding + url + return url + urllib.parse.urlencode(params, doseq=True) + + +@logger.catch +def get_tracks(page=None, ordering=None, q=None, page_size=None, + artist=None, album=None, library=None, + tag=None, favourites=None, include_channels=None, pg=None): + '''This function get tracks by params''' + params = { + 'page': page, + 'page_size': page_size, + 'ordering': ordering, + 'q': q, + 'artist': artist, + 'album': album, + 'tag': tag, + 'favourites': favourites, + 'library': library, + 'include_channels': include_channels + } + if pg: + r = current_instance.s.get(pg) + else: + r = current_instance.s.get( + f'https://{current_instance.instance}/api/v1/tracks', params=params) + return r.json() + + +@logger.catch +def get_favorires_tracks(page=None, q=None, scope=None, include_channels=None, pg=None): + '''Get favorites tracks (not only for user)''' + params = { + 'page': page, + 'q': q, + 'scope': scope, + 'include_channels': include_channels + } + if pg: + r = current_instance.s.get(pg) + else: + r = current_instance.s.get( + f'https://{current_instance.instance}/api/v1/favorites/tracks/', params=params) + return r.json() + + +@logger.catch +def get_recently_listened(page=None, q=None, scope=None, include_channels=None, pg=None): + '''Get recently listened tracks (not only for user)''' + params = { + 'page': page, + 'q': q, + 'scope': scope, + 'include_channels': include_channels + } + if pg: + r = current_instance.s.get(pg) + else: + r = current_instance.s.get( + f'https://{current_instance.instance}/api/v1/history/listenings', params=params) + return r.json() + + +@logger.catch +def get_artists(page=None, q=None, artist=None, album=None, tag=None, + library=None, scope=None, favourites=None, refresh=False, pg=None): + '''This function get artists by params''' + params = { + 'page': page, + 'q': q, + 'artist': artist, + 'album': album, + 'tag': tag, + 'library': library, + 'scope': scope, + 'favourites': favourites, + 'refresh': refresh + } + if pg: + r = current_instance.s.get(pg) + else: + r = current_instance.s.get( + f'https://{current_instance.instance}/api/v1/artists', params=params) + return r.json() + + +@logger.catch +def get_albums(page=None, q=None, ordering=None, + artist=None, library=None, include_channels=None, refresh=False, pg=None): + '''This function get artists by params''' + params = { + 'page': page, + 'ordering': ordering, + 'q': q, + 'artist': artist, + 'library': library, + 'include_channels': include_channels, + 'refresh': refresh + } + if pg: + r = current_instance.s.get(pg) + else: + r = current_instance.s.get( + f'https://{current_instance.instance}/api/v1/albums', params=params) + return r.json() + + +@logger.catch +def get_channels(page=None, q=None, tag=None, pg=None): + params = { + 'page': page, + 'q': q, + 'tag': tag + } + if pg: + r = current_instance.s.get(pg) + else: + r = current_instance.s.get( + f'https://{current_instance.instance}/api/v1/channels', params=params) + return r.json() + + +@logger.catch +def get_playlists(page=None, page_size=None, q=None, ordering='-modification_date', pg=None): + '''List playlists''' + params = { + 'page': page, + 'page_size': page_size, + 'q': q, + 'ordering': ordering + } + if pg: + r = current_instance.s.get(pg) + else: + r = current_instance.s.get( + f'https://{current_instance.instance}/api/v1/playlists', params=params) + r.raise_for_status() + return r.json() + + +@logger.catch +def get_playlist_tracks(playlist_id, pg=None): + '''Retrieve all tracks in the playlist''' + if pg: + r = current_instance.s.get(pg) + else: + r = current_instance.s.get( + f'https://{current_instance.instance}/api/v1/playlists/{playlist_id}/tracks') + return r.json() + + +@logger.catch +def list_libraries(page=None, page_size=None, q=None, scope='all', pg=None): + params = { + 'page': page, + 'page_size': page_size, + 'q': q, + 'scope': scope, + } + if pg: + r = current_instance.s.get(pg) + else: + r = current_instance.s.get( + f'https://{current_instance.instance}/api/v1/libraries', params=params) + return r.json() + + +@logger.catch +def assigned_libraries_on_track(track_id, page=None, page_size=None, pg=None): + params = { + 'page': page, + 'page_size': page_size, + } + if pg: + r = current_instance.s.get(pg) + else: + r = current_instance.s.get( + f'https://{current_instance.instance}/api/v1/tracks/{track_id}/libraries', params=params) + return r.json() + + +@logger.catch +def get_tags(q=None, ordering='-creation_date', pg=None): + params = { + 'q': q, + 'ordering': ordering + } + if pg: + r = current_instance.s.get(pg) + else: + r = current_instance.s.get( + f'https://{current_instance.instance}/api/v1/tags', params=params) + return r.json() + + +@logger.catch +def federate_search_by_url(object): + params = { + 'object': object + } + for i in range(attempts_for_federate): + try: + r = current_instance.s.post( + f'https://{current_instance.instance}/api/v1/federation/fetches', json=params) + r.raise_for_status() + break + except Exception as Err: + logger.error(f'Attempt {i}: {Err}') + time.sleep(3) + return r.json() + + +@logger.catch +def federate_remote_library(fid): + '''Fetch remote library for follow and scan''' + params = { + 'fid': fid + } + for i in range(attempts_for_federate): + try: + r = current_instance.s.post( + f'https://{current_instance.instance}/api/v1/federation/libraries/fetch', json=params) + r.raise_for_status() + break + except Exception as Err: + logger.error(f'Attempt {i}: {Err}') + time.sleep(3) + return r.json() + + +@logger.catch +def scan_remote_library(uuid): + for i in range(attempts_for_federate): + try: + r = current_instance.s.post( + f'https://{current_instance.instance}/api/v1/federation/libraries/{uuid}/scan') + r.raise_for_status() + break + except Exception as Err: + logger.error(f'Attempt {i}: {Err}') + time.sleep(3) + return r.json() + + +@logger.catch +def follow_on_remote_library(uuid): + params = {'target': uuid} + for i in range(attempts_for_federate): + try: + r = current_instance.s.post( + f'https://{current_instance.instance}/api/v1/federation/follows/library/', + json=params) + r.raise_for_status() + break + except Exception as Err: + logger.error(f'Attempt follow {i}: {Err}') + time.sleep(3) + return r.json() + + +@logger.catch +def unfollow_remote_library(uuid): + r = current_instance.s.delete( + f'https://{current_instance.instance}/api/v1/federation/follows/library/{uuid}/') + r.raise_for_status() + return r.json() + + +@logger.catch +def record_track_in_history(track_id): + params = { + 'track': int(track_id) + } + r = current_instance.s.post( + f'https://{current_instance.instance}/api/v1/history/listenings', json=params) + r.raise_for_status() + return r.json + + +@logger.catch +def favorite_track(track_id): + r = current_instance.s.post( + f'https://{current_instance.instance}/api/v1/favorites/tracks', json={'track': int(track_id)}) + r.raise_for_status() + return r.json + + +@logger.catch +def unfavorite_track(track_id): + r = current_instance.s.post( + f'https://{current_instance.instance}/api/v1/favorites/tracks/delete', json={'track': int(track_id)}) + r.raise_for_status() + return r.json + + +@logger.catch +def hide_content(content): + '''This function hide content (write permission)''' + r = current_instance.s.post( + f'https://{current_instance.instance}/api/v1/moderation/content-filters/', json=content) + r.raise_for_status() + return r.json + + +# [FunkWhale radios] +def get_radios(): + r = current_instance.s.get( + f'https://{current_instance.instance}/api/v1/radios/radios/') + return r.json() + + +def post_radio_session(requested_radio): + r = current_instance.s.post( + f'https://{current_instance.instance}/api/v1/radios/sessions/', json=requested_radio) + return r.json() + + +@logger.catch +def get_track_radio(radio_session): + r = current_instance.s.post( + f'https://{current_instance.instance}/api/v1/radios/tracks/', json=radio_session) + return r.json() diff --git a/src/fw_api_hints.py b/src/fw_api_hints.py new file mode 100644 index 0000000..a6e63a9 --- /dev/null +++ b/src/fw_api_hints.py @@ -0,0 +1,10 @@ +def hint_scope(): + print( +'''Limit the results to a given user or pod: + Use all (or do not specify the property to disable scope filtering) + Use me to retrieve content relative to the current user + Use subscribed to retrieve content in libraries you follow + Use actor:alice@example.com to retrieve content relative to the account `alice@example.com + Use domain:example.com to retrieve content relative to the domain `example.com +You can specify multiple coma separated scopes, e.g me,subscribed to retrieve content matching either scopes +''') diff --git a/src/fw_artists.py b/src/fw_artists.py new file mode 100644 index 0000000..d97c763 --- /dev/null +++ b/src/fw_artists.py @@ -0,0 +1,84 @@ +from src.fw_api import get_artists, get_tracks, get_audio_file +from src.fw_api_hints import hint_scope +from src.fw_albums import list_albums +from src.fw_libraries import libraries +from src.fw_tags import list_tags +from src.mpv_control import player, player_menu, track_url_to_uuid +from pyfzf.pyfzf import FzfPrompt +from loguru import logger + +fzf = FzfPrompt() + + +@logger.catch +def list_artists(pg=None, search=None, library=None, scope=None, tag=None): + artists = get_artists(q=search, library=library, pg=pg, scope=scope, tag=tag) + artists_next = artists.get('next') + artists_prev = artists.get('previous') + artists_results = artists.get('results') + view = ['Search', 'Tag', 'Library', 'Limit by scope'] + if artists_next: + view.append('Next page') + if artists_prev: + view.append('Prev page') + + for i in artists_results: + index = artists_results.index(i) + artist_name = i.get('name') + artist_tracks_count = i.get('tracks_count') + view.append(f'{index}.{artist_name} | {artist_tracks_count}') + select = fzf.prompt(view, '--header=\'map: artist | tracks count\'') + if select == []: + return + else: + select = select[0].split('.', 1)[0] + + if select == 'Next page': + list_artists(pg=artists_next) + elif select == 'Prev page': + list_artists(pg=artists_prev) + elif select == 'Search': + print('Search by artist:') + list_artists(search=input()) + elif select == 'Tag': + list_artists(tag=list_tags()) + elif select == 'Library': + select_lib = libraries() + list_artists(library=select_lib) + elif select == 'Limit by scope': + hint_scope() + scope = input() + list_artists(scope=scope) + else: + albums = artists_results[int(select)].get('albums') + if albums: + list_albums(albums=albums) + else: # Fallback on tracks of selected artist + play_artist(artists_results[int(select)]['id']) + + +def play_artist(artist_id): + tracks = get_tracks(artist=artist_id, ordering='disc_number,position', + include_channels=True, pg=None) + tracks_next = tracks.get('next') + tracks_count = tracks.get('count') + storage = {} + if tracks_count > 50: + print(f'Loading {tracks_count} tracks...') + elif tracks_count == 0: + logger.warning('Empty tracks. Nothing to do') + return + while True: + tracks_results = tracks.get('results') + tracks_next = tracks.get('next') + for i in tracks_results: + listen_url = get_audio_file(i['listen_url'], True) + storage[track_url_to_uuid(listen_url)] = i + player.loadfile(listen_url, 'append-play') + if tracks_next: + tracks = get_tracks( + artist=artist_id, include_channels=True, pg=tracks_next) + else: + break + artist_name = tracks.get('results')[0]['artist']['name'] + player_menu(f"Artist {artist_name} playing...", storage) diff --git a/src/fw_channels.py b/src/fw_channels.py new file mode 100644 index 0000000..fc61f40 --- /dev/null +++ b/src/fw_channels.py @@ -0,0 +1,43 @@ +from src.fw_api import get_channels +from src.fw_albums import list_albums +from loguru import logger +from pyfzf.pyfzf import FzfPrompt + +fzf = FzfPrompt() + + +@logger.catch +def list_channels(pg=None, search=None): + channels = get_channels(q=search, pg=pg) + channels_next = channels.get('next') + channels_prev = channels.get('previous') + channels_results = channels.get('results') + view = ['Search'] + if channels_next: + view.append('Next page') + if channels_prev: + view.append('Prev page') + + for i in channels_results: + index = channels_results.index(i) + channel_name = i.get('artist').get('name') + view.append(f'{index}.{channel_name}') + select = fzf.prompt(view)[0].split('.', 1) + if select == []: + return + else: + select = select[0] + + if select == 'Next page': + list_channels(pg=channels_next) + elif select == 'Prev page': + list_channels(pg=channels_prev) + elif select == 'Search': + print('Search by channel:') + list_channels(search=input()) + else: + refresh = False + if channels_results[int(select)].get('artist').get('is_local') == False: + refresh = True + list_albums(artist=channels_results[int(select)].get( + 'artist').get('id'), include_channels=True, refresh=refresh) diff --git a/src/fw_instances.py b/src/fw_instances.py new file mode 100644 index 0000000..4161e87 --- /dev/null +++ b/src/fw_instances.py @@ -0,0 +1,151 @@ +from src.fw_api import current_instance +import src.settings as settings +from pyfzf.pyfzf import FzfPrompt +from shlex import quote +from loguru import logger +import json +import time +import concurrent +import requests + +fzf = FzfPrompt() + + +@logger.catch +def get_new_funkwhale_servers(): + # Uses official API network.funkwhale.audio for getting new instances + public_server_api = 'https://network.funkwhale.audio/dashboards/api/tsdb/query' + now = int(time.time()) + timeback = now - 86400 + + request_public_servers = { + 'from': f"{timeback}", + 'to': f"{now}", + 'queries': [ + { + 'refId': "A", + 'intervalMs': 60000, + 'maxDataPoints': 1174, + 'datasourceId': 1, + 'rawSql': "SELECT * FROM (\n SELECT\n DISTINCT on (c.domain) c.domain as \"Name\",\n c.up as \"Is up\",\n coalesce(c.open_registrations, false) as \"Open registrations\",\n coalesce(anonymous_can_listen, false) as \"Anonymous can listen\",\n coalesce(c.usage_users_total, 0) as \"Total users\",\n coalesce(c.usage_users_active_month, 0) as \"Active users (this month)\",\n coalesce(c.software_version_major, 0)::text || '.' || coalesce(c.software_version_minor, 0)::text || '.' || coalesce(c.software_version_patch, 0)::text as \"Version\",\n c.time as \"Last checked\",\n d.first_seen as \"First seen\"\n FROM checks as c\n INNER JOIN domains AS d ON d.name = c.domain\n WHERE d.blocked = false AND c.up = true AND c.time > now() - INTERVAL '7 days'\n AND c.anonymous_can_listen IN ('true')\n AND c.open_registrations IN ('true','false')\n\n ORDER BY c.domain, c.time DESC\n) as t ORDER BY \"Active users (this month)\" DESC", + 'format': "table" + } + ] + } + try: + r = requests.post(public_server_api, json=request_public_servers) + results = r.json() + new_instances = {} + if results: + new_instances_list = results['results']['A']['tables'][0]['rows'] + for i in new_instances_list: + anonymousCanListen = i[1] + if anonymousCanListen: + new_instances[i[0]] = f'{anonymousCanListen} | ?' + + for i in get_new_funkwhale_servers_fediverse_observer(): + new_instances[i] = "?" + return new_instances + except: # If any errors then return empty list + return {} + + +def get_new_funkwhale_servers_fediverse_observer(): + try: + graphQL_request = { + 'query': + '{\n nodes(softwarename: \"funkwhale\") {\n domain\n metanodeinfo\n }\n}' + } + r = requests.post('https://api.fediverse.observer/', + headers={'Accept-Encoding': 'gzip, deflate'}, + json=graphQL_request) + new_instances = [] + for i in r.json()['data']['nodes']: + if i.get('metanodeinfo'): + auth_no_required = json.loads(i['metanodeinfo'])['library']['anonymousCanListen'] + if auth_no_required and i['domain']: + new_instances.append(i['domain']) + return new_instances + except: + return [] + + +def fetch_instances_nodeinfo_and_avalaibility(instances): + extended_instances_info = {} + + def request_nodeinfo(instance): + return requests.get('https://' + instance + '/api/v1/instance/nodeinfo/2.0/', + headers={ + 'Accept-Encoding': 'gzip, brotli, deflate', + 'User-Agent': 'funkwlmpv/latest-commit; +https://git.phreedom.club/localhost_frssoft/funkwlmpv'}, + timeout=10).json() + + with concurrent.futures.ThreadPoolExecutor() as executor: # optimally defined number of threads + res = [executor.submit(request_nodeinfo, instance) for instance in instances] + concurrent.futures.wait(res) + for idx, v in enumerate(instances): + try: + data_for_instance = res[idx].result() + anon = data_for_instance['metadata']['library']['anonymousCanListen'] + tracks = data_for_instance['metadata']['library']['tracks']['total'] + extended_instances_info[v] = f'{anon} | {tracks}' + except: + extended_instances_info[v] = 'fail' + return extended_instances_info + + +def instances_menu(fetch_manually=False, fetch_node_info=False): + with open('config.json', 'rt') as f: + conf = json.loads(f.read()) + if conf.get('automatic_fetch_new_instances') or fetch_manually: + public_server_list_instances = get_new_funkwhale_servers() + new_ins_count = len(public_server_list_instances) + else: + public_server_list_instances = {} + new_ins_count = 'Disabled' + + list_instances = conf.get('public_list_instances_extended') + if public_server_list_instances != {}: + list_instances_merge = {**list_instances, **public_server_list_instances} + settings.set_config('public_list_instances_extended', list_instances_merge) + list_instances = list_instances_merge + + map_in_extend_mode = '' + if fetch_node_info: + list_instances = fetch_instances_nodeinfo_and_avalaibility([instance.split('|')[0].strip() for instance in list_instances.keys()]) + settings.set_config('public_list_instances_extended', list_instances) + map_in_extend_mode = '\nmap: instance | anonymousCanListen | tracks' + instance_menu_selector = ['Add new instance', + 'Fetch new instances', + 'Fetch nodeinfo and avalaibility', + 'Remove unreachible instances', + 'Shuffle'] + + instance = fzf.prompt( + instance_menu_selector + + [f'{instance} | {info}' for instance, info in list_instances.items()], + '--header='+quote(f'Select instance\nNew instances: {new_ins_count}{map_in_extend_mode}')) + if instance == []: + return + else: + instance = instance[0].split('|')[0].strip() + if instance == 'Add new instance': + new = input('example.com\n').strip() + list_instances[new] = 'added by user' + settings.set_config('public_list_instances_extended', list_instances) + instance = new + if instance == 'Fetch new instances': + return instances_menu(fetch_manually=True) + if instance == 'Fetch nodeinfo and avalaibility': + return instances_menu(fetch_node_info=True) + if instance == 'Shuffle': + import random + instance = random.choice(list(list_instances.keys())) + if instance == 'Remove unreachible instances': + clean_unreach = {} + for ins, info in list_instances.items(): + if 'fail' not in info.split(): + clean_unreach[ins] = info + settings.set_config('public_list_instances_extended', clean_unreach) + return instances_menu() + current_instance.select_instance(instance) diff --git a/src/fw_libraries.py b/src/fw_libraries.py new file mode 100644 index 0000000..308f1ed --- /dev/null +++ b/src/fw_libraries.py @@ -0,0 +1,78 @@ +from src.fw_api import current_instance, list_libraries, federate_remote_library, scan_remote_library, follow_on_remote_library +from pyfzf.pyfzf import FzfPrompt +from loguru import logger +import time + +fzf = FzfPrompt() + + +@logger.catch +def libraries(pg=None, radio=False, search=None): + libs_res = list_libraries(pg=pg, q=search) + libs_count = libs_res.get('count') + libs_next = libs_res.get('next') + libs_prev = libs_res.get('previous') + libs = libs_res.get('results') + libraries_listing = ['Search'] + if libs_next: + libraries_listing.append('Next') + if libs_prev: + libraries_listing.append('Prev') + if current_instance.s.headers.get('Authorization'): + libraries_listing.append('Add remote library') + + for lib_i in libs: + index = libs.index(lib_i) + lib_name = lib_i.get('name') + lib_tracks_count = lib_i.get('uploads_count') + lib_access = lib_i.get('privacy_level') + lib_by = lib_i.get('actor').get('full_username') + libraries_listing.append(f'{index}.{lib_name} | {lib_by} | {lib_tracks_count} | {lib_access}') + lib_select = fzf.prompt( + libraries_listing, + f'--header=\'found {libs_count} libraries\nmap: library name | owner | tracks count\'') + if lib_select == []: + return + else: + lib_select = lib_select[0].split('.', 1) + + if lib_select[0] == 'Next': + return libraries(pg=libs_next, search=search) + elif lib_select[0] == 'Prev': + return libraries(pg=libs_prev, search=search) + elif lib_select[0] == 'Search': + q = input('Name of library:\n') + return libraries(search=q) + elif lib_select[0] == 'Add remote library': + print('Search a remote library (url\\fid):') + new_library = federate_remote_library(input().strip()) + if new_library.get('detail'): + logger.error(new_library['detail']) + return + if new_library.get('count') > 0: + print('Library found') + one_lib = new_library['results'][0] + if one_lib['privacy_level'] == 'private': + logger.warning('This library is private, you should wait until your request is approved') + + follow_on_remote_library(one_lib['uuid']) + scan = scan_remote_library(one_lib['uuid']) + if scan.get('detail'): + logger.error(scan['detail']) + return + status = scan['status'] + if status == 'scheduled': + print(f'Scanning {status}. Please wait few minutes for scan and open libraries menu again') + else: + print(f'Scan is {status}') + time.sleep(3) + return + else: + lib_addr = lib_select[0] + lib_name = lib_select[1] + lib_uuid = libs[int(lib_addr)]['uuid'] + lib_fid = libs[int(lib_addr)]['fid'] + if radio: + return None, 'library', f'{lib_name}\n{lib_fid}', lib_uuid + else: + return lib_uuid diff --git a/src/fw_playlists.py b/src/fw_playlists.py new file mode 100644 index 0000000..bfaf50f --- /dev/null +++ b/src/fw_playlists.py @@ -0,0 +1,64 @@ +from src.fw_api import get_playlists, get_playlist_tracks, get_audio_file +from src.mpv_control import player, player_menu, track_url_to_uuid +from pyfzf.pyfzf import FzfPrompt +from loguru import logger + +fzf = FzfPrompt() + + +@logger.catch +def list_playlists(pg=None, search=None): + playlists = get_playlists(q=search, pg=pg) + playlists_next = playlists.get('next') + playlists_prev = playlists.get('previous') + playlists_results = playlists.get('results') + view = ['Search'] + if playlists_next: + view.append('Next page') + if playlists_prev: + view.append('Prev page') + + for i in playlists_results: + index = playlists_results.index(i) + playlist_name = i.get('name') + view.append(f'{index}.{playlist_name}') + select = fzf.prompt(view) + if select == []: + return + else: + select = select[0].split('.', 1)[0] + + if select == 'Next page': + list_playlists(pg=playlists_next) + elif select == 'Prev page': + list_playlists(pg=playlists_prev) + elif select == 'Search': + print('Search by playlist:') + list_playlists(search=input()) + else: + play_playlist(playlist_id=playlists_results[int(select)].get('id')) + + +def play_playlist(playlist_id): + tracks = get_playlist_tracks(playlist_id, pg=None) + tracks_next = tracks.get('next') + tracks_count = tracks.get('count') + storage = {} + if tracks_count > 50: + print(f'Loading {tracks_count} tracks...') + elif tracks_count == 0: + logger.warning('Empty tracks. Nothing to do') + return + while True: + tracks_results = tracks.get('results') + tracks_next = tracks.get('next') + for i in tracks_results: + track = i.get('track') + listen_url = get_audio_file(track['listen_url'], True) + storage[track_url_to_uuid(listen_url)] = track + player.loadfile(listen_url, 'append-play') + if tracks_next: + tracks = get_playlist_tracks(playlist=playlist_id, pg=tracks_next) + else: + break + player_menu(f"Playlist playing...", storage) diff --git a/src/fw_radios.py b/src/fw_radios.py new file mode 100644 index 0000000..6525f44 --- /dev/null +++ b/src/fw_radios.py @@ -0,0 +1,207 @@ +from src.fw_api import current_instance, get_radios, post_radio_session, get_track_radio, list_libraries, favorite_track, get_audio_file, hide_content +from src.fw_libraries import libraries +from src.fw_tags import list_tags +from src.utils import download_track, track_info_output +from src.mpv_control import player, track_url_to_uuid, player_fw_storage, soft_volume_reduce, send_listen_activity +from src.settings import get_config +from pyfzf.pyfzf import FzfPrompt +from loguru import logger +from shlex import quote +import threading +import time + +fzf = FzfPrompt() + + +@logger.catch +def list_radios(error_given=None): + radios = get_radios() + count = radios.get('count') + + results = radios.get('results') + view = [] + for i in results: + index = results.index(i) + id_radio = i.get('id') + name = i.get('name') + descr = i.get('description') + radio_option = name + if descr and descr != "": + radio_option += f' | {descr}' + view.append(f'{index}.{radio_option}') + # Radios avalaible only for auth user + if current_instance.s.headers.get('Authorization'): + view.append('Favourites') + view.append('Less listened') + view.extend(['Tag', 'Random', 'Libraries', 'Users', 'Recently Added']) + header = f'Found {count} radios' + if error_given: + header += f'\n{error_given}' + header = quote(header) + selected = fzf.prompt( + view, f'--header {header} --read0', delimiter="\0") + if selected == []: + return + else: + selected = selected[0].split('.', 1) + + if 'Favourites' in selected: + radio_load(id_radio, 'favorites', name='your favorites tracks') + elif 'Tag' in selected: + tag = list_tags() + radio_load(type_radio='tag', name=f'by tag: {tag}', related_object=tag) + elif 'Random' in selected: + radio_load(id_radio, 'random', name='totally random') + elif 'Libraries' in selected: + id_radio, type_radio, name_radio, related_obj = libraries(radio=True) + radio_load(id_radio, type_radio, name_radio, related_obj) + elif 'Users' in selected: + libs = list_libraries()['results'] + libraries_listing = [] + for lib_i in libs: + lib_name = lib_i.get('actor').get('full_username') + if lib_name not in libraries_listing: + libraries_listing.append(lib_name) + libraries_listing.append('Custom') + lib_addr = fzf.prompt(libraries_listing)[0] + if lib_addr == 'Custom': + print('Input remote user library (ex. nick@funkwhale.domain.example: ') + lib_addr = input() + radio_load(None, 'actor-content', lib_addr, lib_addr) + elif 'Recently Added' in selected: + radio_load(id_radio, 'recently-added', + name='Newest content on the network') + elif 'Less listened' in selected: + radio_load(id_radio, 'less-listened', name="Less listened tracks") + else: + id_selected = selected[0] + id_radio = results[int(id_selected)].get('id') + name_radio = results[int(id_selected)].get('name') + radio_load(id_radio, name=name_radio) + + +def radio_generator(radio_session_id): + count_t = 0 + while radio_session_id != '': + time.sleep(1) + if not radio_event_gen.wait(0): + break + count_t += 1 + if count_t >= 60: + count_t = 0 + playlist_remaining = len(player.playlist) - \ + player.playlist_current_pos + if playlist_remaining <= 2: + radio_get_track(radio_session_id) + print('\rRadio generator stopped', flush=True) + + +radio_event_gen = threading.Event() + + +@logger.catch +def radio_load(id_radio=None, type_radio='custom', name=None, related_object=None): + show_like_button = get_config('show_like_button') + player.volume = get_config('mpv_volume') + requested_radio = { + 'custom_radio': id_radio, + 'radio_type': type_radio, + 'related_object_id': related_object + } + radio_session_id = post_radio_session(requested_radio).get('id') + for i in range(0, 2): + try: + radio_get_track(radio_session_id, first_run=True) + except Exception as E: + return list_radios(error_given=f'Error: {E}') + + radio_event_gen.set() + radio_task = threading.Thread( + target=radio_generator, args=(radio_session_id,), daemon=True) + radio_task.start() + player_items_menu = ['Next', 'Prev', 'Pause', 'Download', 'Info', 'Share'] + + if show_like_button: + player_items_menu.append('Like') + player_items_menu.extend(['Hide artist', 'Exit']) + while True: + try: + if player.pause: + player_items_menu[2] = 'Play' + else: + player_items_menu[2] = 'Pause' + try: + select = fzf.prompt(player_items_menu, + quote(f"--header=\'Radio {name} playing...\'"))[0] + except: + select = 'Exit' + + if select == 'Next': + playlist_remaining = player.playlist_count - player.playlist_current_pos + if playlist_remaining <= 2: + threading.Thread(target=radio_get_track, args=( + radio_session_id,), daemon=True).start() + if playlist_remaining > 1: + player.playlist_next() + else: + print('No more tracks, please wait for new...') + time.sleep(3) + elif select == 'Prev': + player.playlist_prev() + elif select in ('Pause', 'Play'): + if player.pause: + player.pause = False + else: + player.pause = True + elif select == 'Hide artist': + track = player_fw_storage.storage.get(track_url_to_uuid()) + hide_content( + {'target': {'id': track.get('artist').get('id'), 'type': 'artist'}}) + elif select == 'Download': + name_downloaded = download_track(player.stream_open_filename) + elif select == 'Info': + track = player_fw_storage.storage.get(track_url_to_uuid()) + track['direct_url'] = player.stream_open_filename + track_info_output(track) + elif select == 'Share': + send_listen_activity() + elif select == 'Like': + favorite_track(player_fw_storage.storage.get( + track_url_to_uuid())['id']) + elif select == 'Exit': + radio_event_gen.clear() + soft_volume_reduce() + player.playlist_clear() + player.stop() + player_fw_storage.storage = {} + break + except Exception as E: + radio_event_gen.clear() + player.playlist_clear() + player.stop() + player_fw_storage.storage = {} + logger.exception(f'Radio force stopped: {E}') + break + + +def radio_get_track(radio_session_id, first_run=False): + radio_context = get_track_radio({'session': radio_session_id}) + if not radio_context: + return + if isinstance(radio_context, str): + logger.error(radio_context) + if radio_context == "Radio doesn't have more candidates": + radio_event_gen.clear() + if first_run: + radio_context = 'This radio may be private or haven\'t tracks' + raise IOError(radio_context) + return + if radio_context.get('error'): + logger.error(radio_context.get('error')) + return + else: + track = radio_context.get('track') + listen_url = track['listen_url'] + player_fw_storage.storage[track_url_to_uuid(listen_url)] = track + player.loadfile(get_audio_file( + listen_url, listen_url=True), 'append-play') diff --git a/src/fw_recents.py b/src/fw_recents.py new file mode 100644 index 0000000..5784cd3 --- /dev/null +++ b/src/fw_recents.py @@ -0,0 +1,58 @@ +from src.fw_api import get_favorires_tracks, get_recently_listened, get_audio_file +from src.fw_api_hints import hint_scope +from src.mpv_control import player, player_menu, track_url_to_uuid, player_fw_storage, play_track +from pyfzf.pyfzf import FzfPrompt +from loguru import logger + +fzf = FzfPrompt() + + +@logger.catch +def list_fav_or_history(pg=None, search=None, scope=None, is_history_view=False): + if is_history_view: + action = 'listened' + tracks = get_recently_listened(q=search, scope=scope, pg=pg) + else: + action = 'liked' + tracks = get_favorires_tracks(q=search, scope=scope, pg=pg) + tracks_next = tracks.get('next') + tracks_prev = tracks.get('previous') + tracks_results = tracks.get('results') + view = ['Search', 'Limit by scope', 'Play this page'] + if tracks_next: + view.append('Next page') + if tracks_prev: + view.append('Prev page') + + for i in tracks_results: + index = tracks_results.index(i) + track_name = i['track'].get('title') + who_user = i['user'].get('username') + view.append(f'{index}.{track_name} | {who_user}') + select = fzf.prompt(view, f'--multi --header=\'map: track title | who {action}\'') + if select == []: + return + + if 'Next page' in select: + list_fav_or_history(pg=tracks_next, is_history_view=is_history_view) + elif 'Prev page' in select: + list_fav_or_history(pg=tracks_prev, is_history_view=is_history_view) + elif 'Search' in select: + print('Search by track:') + list_fav_or_history(search=input(), is_history_view=is_history_view) + elif 'Limit by scope' in select: + hint_scope() + scope = input() + list_fav_or_history(scope=scope, search=search, is_history_view=is_history_view) + elif 'Play this page' in select: + for i in tracks_results: + play_track(track=i['track'], multi=True) + player_fw_storage.menu_ctx = list_fav_or_history + player_fw_storage.menu_ctx_args = [pg, search, scope, is_history_view] + elif len(select) > 1: + for i in select: + play_track(track=tracks_results[int( + i.split('.', 1)[0])]['track'], multi=True) + else: + play_track(track=tracks_results[int( + select[0].split('.', 1)[0])]['track']) diff --git a/src/fw_tags.py b/src/fw_tags.py new file mode 100644 index 0000000..d11ac0d --- /dev/null +++ b/src/fw_tags.py @@ -0,0 +1,36 @@ +from src.fw_api import get_tags, get_tracks +from pyfzf.pyfzf import FzfPrompt +from loguru import logger + +fzf = FzfPrompt() + + +@logger.catch +def list_tags(pg=None, search=None, error_given=''): + tags = get_tags(q=search, pg=pg) + tags_next = tags.get('next') + tags_prev = tags.get('previous') + tags_results = tags.get('results') + view = ['Search'] + if tags_next: + view.append('Next page') + if tags_prev: + view.append('Prev page') + + for i in tags_results: + index = tags_results.index(i) + tag_name = i.get('name') + view.append(f'{index}.{tag_name}') + select = fzf.prompt(view, f"--header=\'{error_given}\'")[0].split('.', 1)[0] + if select == 'Next page': + return list_tags(pg=tags_next) + elif select == 'Prev page': + return list_tags(pg=tags_prev) + elif select == 'Search': + print('Search by tag:') + return list_tags(search=input()) + else: + selected_tag = tags_results[int(select)].get('name') + if get_tracks(tag=selected_tag, page_size=1)['count'] == 0: + return list_tags(pg=pg, search=search, error_given='This tag no contain tracks, select another') + return selected_tag diff --git a/src/fw_tracks.py b/src/fw_tracks.py new file mode 100644 index 0000000..bd49dee --- /dev/null +++ b/src/fw_tracks.py @@ -0,0 +1,71 @@ +from src.fw_api import get_tracks +from src.fw_tags import list_tags +from src.fw_libraries import libraries +from src.mpv_control import play_track, player_fw_storage +from pyfzf.pyfzf import FzfPrompt +from loguru import logger +import time + +fzf = FzfPrompt() + + +@logger.catch +def list_tracks(pg=None, search=None, tag=None, library=None): + tracks = get_tracks(q=search, pg=pg, tag=tag, library=library) + tracks_count = tracks.get('count') + tracks_next = tracks.get('next') + tracks_prev = tracks.get('previous') + tracks_results = tracks.get('results') + view = ['Search', 'Tags', 'Library', 'Play this page'] + if tracks_next: + view.append('Play all pages') + view.append('Next page') + if tracks_prev: + view.append('Prev page') + + for i in tracks_results: + index = tracks_results.index(i) + track_name = i.get('title') + view.append(f'{index}.{track_name}') + select = fzf.prompt(view, f'--header=\'Found {tracks_count} tracks\'') + if select == []: + return + else: + select = select[0].split('.', 1)[0] + if select == 'Next page': + list_tracks(pg=tracks_next) + elif select == 'Prev page': + list_tracks(pg=tracks_prev) + elif select == 'Search': + print('Search by track:') + list_tracks(search=input()) + elif select == 'Tags': + select_tag = list_tags() + list_tracks(tag=select_tag) + elif select == 'Library': + select_lib = libraries() + list_tracks(library=select_lib) + elif select == 'Play this page': + for i in tracks_results: + play_track(track=i, multi=True) + player_fw_storage.menu_ctx = list_tracks + player_fw_storage.menu_ctx_args = [pg, search, tag, library] + elif select == 'Play all pages': + if tracks_count > 500: + yn = input('WARNING: you really want add more than 500 tracks? (y/[n] or number of tracks)\n').lower() + try: + tracks_count = int(yn) + except: + if yn != 'y': + return + count_loaded = 0 + while tracks_count > count_loaded: + for i in tracks_results: + play_track(track=i, multi=True) + count_loaded += 1 + time.sleep(0.2) + tracks = get_tracks(pg=tracks_next) + tracks_next = tracks.get('next') + tracks_results = tracks.get('results') + else: + play_track(track=tracks_results[int(select)]) diff --git a/src/mpv_control.py b/src/mpv_control.py new file mode 100644 index 0000000..0fd3132 --- /dev/null +++ b/src/mpv_control.py @@ -0,0 +1,240 @@ +import src.fw_api +from src.utils import download_track, print_there, track_info_output, indices +from src.settings import get_config +from loguru import logger +from pyfzf.pyfzf import FzfPrompt +from shutil import get_terminal_size +from shlex import quote +from contextlib import suppress +import mpv +import time +import re +import requests + +fzf = FzfPrompt() + +if get_config('enable_persistent_cache'): + player = mpv.MPV(cache=True, + scripts='src/mpv_scripts/mpv_cache.lua:src/mpv_scripts/streamsave.lua', + script_opts='streamsave-save_directory=cache,streamsave-dump_mode=continuous,streansave-force_extension=.mkv,streamsave-autostart=no,output_label=overwrite') + player.command('script-message', 'streamsave-path', 'cache') +else: + player = mpv.MPV(cache=True, demuxer_max_bytes=25*1024*1024) +player.ytdl = False # Prevent attempts load track with yt-dlp +player.volume = get_config('mpv_volume') +player.prefetch_playlist = get_config('prefetch_playlist') +show_like_button = get_config('show_like_button') +share_to_fediverse_token = get_config('share_to_fediverse_token') +share_to_fediverse_instance = get_config('share_to_fediverse_instance') +shuffle = False + +if get_config('termux_handle_track_switch_by_volume'): + import src.android_termux_api + + +class player_fw_storage: + storage = {} + menu_ctx = None + menu_ctx_args = None + + +@logger.catch +def track_url_to_uuid(listen_url=None): + '''Attempt get uuid from track listen url or current playing url''' + hex = '[0-9a-fA-F]+' + find_uuid = f'{hex}-{hex}-{hex}-{hex}-{hex}' + + if listen_url: + uuid = re.findall(find_uuid, listen_url) + else: + uuid = re.findall(find_uuid, player.stream_open_filename) + + return uuid[0] + + +def send_listen_activity(): + try: + track = player_fw_storage.storage.get(track_url_to_uuid()) + except: + return + if src.fw_api.current_instance.token is not None: + track_id = track.get('id') + + if track_id: + src.fw_api.record_track_in_history(track_id) + else: + logger.error("Can't write track to history: No track id") + if share_to_fediverse_token != '': + fid = track.get('fid') + artist = track['artist'].get('name') + album = track['album'].get('title') + title = track.get('title') + tags = track.get('tags') + if tags: + tags = [f'#{tag}' for tag in tags] + tags = ' '.join(tags) + if tags == []: + tags = '' + status_obj = {'spoiler_text': 'funkwlmpv music share', + 'visibility': 'unlisted', + 'status': f'🎧 {artist} - {album} - {title}\n{fid}\n#NowPlaying {tags}'} + requests.post(f'https://{share_to_fediverse_instance}/api/v1/statuses', + json=status_obj, + headers={'Authorization': f'Bearer {share_to_fediverse_token}'}) + + +def osd_observer(value): + '''Sumulate osd playing message in console''' + if value: + osd_message = [] + for i in value.items(): + if i[0] in ('Artist', 'Album', 'Title'): + osd_message.append(i[1]) + osd_string = ' - '.join(osd_message) + term_len = get_terminal_size().columns + print_there(0, 0, '\r'+' '*term_len) + print_there(0, 0, '\r'+osd_string[:term_len]) + else: + print_there(0, 0, '\rNo metadata...') + + +@player.event_callback('start-file') +@logger.catch +def starting_file_handler(value): + '''just show loading state''' + print_there(0, 0, '\rLoading track...') + + +@player.property_observer('percent-pos') +@logger.catch +def universal_observer(_name, value): + if value: + percent = int(value) + if player.audio_bitrate: + kbps = int(player.audio_bitrate/1024) + else: + kbps = '?' + if player.file_size: + track_size = round(player.file_size/1024/1024, 1) + else: + track_size = '?' + if player.cache_speed: + speed_load = player.cache_speed + if speed_load >= 3*1024*1024: + cache_speed = '| <<<' + elif speed_load >= 1*1024*1024: + cache_speed = '| <<*' + else: + cache_speed = '| <=>' + else: + cache_speed = '' + if player.playlist_count > -1: + player_pos = f'{player.playlist_pos_1}/{player.playlist_count}' + else: + player_pos = '-/-' + osd_observer(player.filtered_metadata) + print_there(2, 2, f'\r'+' '*get_terminal_size().columns) + print_there(2, 2, f'\r{player_pos} | {kbps} kbps | {percent}% | {track_size}MB {cache_speed}') + time.sleep(1) + + +def soft_volume_reduce(): + while player.volume > 10: + player.volume = player.volume - 1 + time.sleep(0.050) + + +@logger.catch +def player_menu(header='', storage={}): + for i in player.playlist_filenames: + count_same_tracks = indices(player.playlist_filenames, i) + while len(count_same_tracks) > 1: + with suppress(SystemError): + player.playlist_remove(count_same_tracks[-1]) + count_same_tracks = indices(player.playlist_filenames, i) + player_fw_storage.storage.update(storage) + player.volume = get_config("mpv_volume") + global shuffle + while True: + try: + player_items_menu = ['Next', 'Prev', 'Pause', + 'Shuffle', 'Download', 'Info', 'Share', 'Jump to'] + if player.pause: + player_items_menu[2] = 'Play' + else: + player_items_menu[2] = 'Pause' + if shuffle: + player_items_menu[3] = 'Unshuffle' + else: + player_items_menu[3] = 'Shuffle' + if show_like_button: + player_items_menu.append('Like') + if player_fw_storage.menu_ctx: + player_items_menu.append('Add more tracks') + player_items_menu.extend(['Hide artist', 'Exit']) + + select = fzf.prompt(player_items_menu, quote(f"--header=\'{header}\'")) + if select == []: + break + else: + select = select[0] + + if select == 'Next': + try: + player.playlist_next() + except: + print('No more next tracks') + elif select == 'Prev': + player.playlist_prev() + elif select in ('Pause', 'Play'): + player.cycle('pause') + elif select in ('Shuffle', 'Unshuffle'): + if shuffle: + shuffle = False + player.playlist_unshuffle() + else: + shuffle = True + player.playlist_shuffle() + player.playlist_play_index(0) + elif select == 'Download': + name_downloaded = download_track(player.stream_open_filename) + elif select == 'Info': + track = player_fw_storage.storage.get(track_url_to_uuid()) + track['direct_url'] = player.stream_open_filename + track_info_output(track) + elif select == 'Share': + send_listen_activity() + elif select == 'Jump to': + jump_to_idx = int(fzf.prompt(range(1, len(player.playlist_filenames)+1))[0]) + jump_to_idx -= 1 + player.playlist_play_index(jump_to_idx) + elif select == 'Like': + src.fw_api.favorite_track( + player_fw_storage.storage.get(track_url_to_uuid())['id']) + elif select == 'Add more tracks': + player_fw_storage.menu_ctx(*player_fw_storage.menu_ctx_args) + elif select == 'Hide artist': + track = player_fw_storage.storage.get(track_url_to_uuid()) + player.playlist_remove('current') + src.fw_api.hide_content( + {'target': {'id': track.get('artist').get('id'), 'type': 'artist'}}) + elif select == 'Exit': + shuffle = False + soft_volume_reduce() + player.playlist_clear() + player.stop() + player_fw_storage.storage = {} + break + except KeyboardInterrupt: + break + + +def play_track(track, multi=False): + listen_url = src.fw_api.get_audio_file(track['listen_url'], True) + player_fw_storage.storage[track_url_to_uuid(listen_url)] = track + if multi: + player.loadfile(listen_url, 'append-play') + else: + player.loadfile(listen_url, 'append-play') + track_name = track.get('title') + player_menu(f"{track_name} playing...", player_fw_storage.storage) diff --git a/src/mpv_scripts/mpv_cache.lua b/src/mpv_scripts/mpv_cache.lua new file mode 100644 index 0000000..b280d89 --- /dev/null +++ b/src/mpv_scripts/mpv_cache.lua @@ -0,0 +1,66 @@ +local utils = require 'mp.utils' +local msg = require 'mp.msg' +local options = require 'mp.options' + + +function sleep(n) + os.execute("sleep " .. tonumber(n)) +end + + +function createDir(dirname) + os.execute("mkdir -p -m 711 " .. dirname) +end + + +function file_exists(name) + local f = io.open(name, "r") + return f ~= nil and io.close(f) +end + + +function get_url_host(s) + return (s.."/"):match("://(.-)/") +end + + +function make_cache_track(url) + mp.command('script-message streamsave-autostart no') + find_uuid = "%x+-%x+-%x+-%x+-%x+" + uuid = string.sub(url, string.find(url, find_uuid)) + host = get_url_host(url) + cache_path_file = 'cache/' .. host .. '/' .. uuid .. '.mkv' + cache_path_named_file = 'cache/' .. host .. '/' .. uuid .. '.mkv' + if false == file_exists(cache_path_file) then + createDir('cache/' .. host .. '/') + msg.verbose('Caching ' .. cache_path_file .. '') + mp.command('script-message streamsave-title ' .. uuid .. '') + mp.command('script-message streamsave-force_title ' .. uuid .. '') + mp.command('script-message streamsave-label overwrite') + mp.set_property('script-opts/media-uuid', uuid) + mp.command('script-message streamsave-extension .mkv') + mp.command('script-message streamsave-path cache/' .. host .. '') + mp.command('script-message streamsave-autostart yes') + else + msg.verbose('Already cached ' .. cache_path_file .. '') + os.execute('touch ' .. cache_path_file .. '') + mp.set_property("stream-open-filename", cache_path_file) + end +end + + +mp.add_hook("on_load", 11, function() + msg.verbose('reusable cache hook activated') + local url = mp.get_property("stream-open-filename", "") + if true == (url:find("https?://") == 1) then + make_cache_track(url) + end +end) + +mp.register_event("file-loaded", function() + msg.verbose('reusable cache post-hook activated') + local url = mp.get_property("stream-open-filename", "") + if true == (url:find("https?://") == 1) then + make_cache_track(url) + end +end) diff --git a/src/mpv_scripts/streamsave.lua b/src/mpv_scripts/streamsave.lua new file mode 100644 index 0000000..00eab59 --- /dev/null +++ b/src/mpv_scripts/streamsave.lua @@ -0,0 +1,1219 @@ +--[[ + +streamsave.lua +Version 0.23.2 +2023-5-21 +https://github.com/Sagnac/streamsave + +mpv script aimed at saving live streams and clipping online videos without encoding. + +Essentially a wrapper around mpv's cache dumping commands, the script adds the following functionality: + +* Automatic determination of the output file name and format +* Option to specify the preferred output directory +* Switch between 5 different dump modes: + (clip mode, full/continuous dump, write from beginning to current position, current chapter, all chapters) +* Prevention of file overwrites +* Acceptance of inverted loop ranges, allowing the end point to be set first +* Dynamic chapter indicators on the OSC displaying the clipping interval +* Option to track HLS packet drops +* Automated stream saving +* Workaround for some DAI HLS streams served from .m3u8 where the host changes + +By default the A-B loop points (set using the `l` key in mpv) determine the portion of the cache written to disk. + +It is advisable that you set --demuxer-max-bytes and --demuxer-max-back-bytes to larger values +(e.g. at least 1GiB) in order to have a larger cache. +If you want to use with local files set cache=yes in mpv.conf + +Options are specified in ~~/script-opts/streamsave.conf + +Runtime changes to all user options are supported via the `script-opts` property by using mpv's `set` or +`change-list` input commands and the `streamsave-` prefix. + +General Options: + +save_directory sets the output file directory. Don't use quote marks or a trailing slash when specifying paths here. +Example: save_directory=C:\User Directory +mpv double tilde paths ~~/ and home path shortcuts ~/ are also accepted. +By default files are dumped in the current directory. + +dump_mode=continuous will use dump-cache, setting the initial timestamp to 0 and leaving the end timestamp unset. + +Use this mode if you want to dump the entire cache. +This process will continue as packets are read and until the streams change, the player is closed, +or the user presses the stop keybind. + +Under this mode pressing the cache-write keybind again will stop writing the first file and +initiate another file starting at 0 and continuing as the cache increases. + +If you want continuous dumping with a different starting point use the default A-B mode instead +and only set the first loop point then press the cache-write keybind. + +dump_mode=current will dump the cache from timestamp 0 to the current playback position in the file. + +dump_mode=chapter will write the current chapter to file. + +dump_mode=segments writes out all chapters to individual files. + +If you wish to output a single chapter using a numerical input instead you can specify it with a command at runtime: +script-message streamsave-chapter 7 + +The output_label option allows you to choose how the output filename is tagged. +The default uses iterated step increments for every file output; i.e. file-1.mkv, file-2.mkv, etc. + +There are 4 other choices: + +output_label=timestamp will append Unix timestamps to the file name. + +output_label=range will tag the file with the A-B loop range instead using the format HH.MM.SS +e.g. file-[00.15.00 - 00.20.00].mkv + +output_label=overwrite will not tag the file and will overwrite any existing files with the same name. + +output_label=chapter uses the chapter title for the file name if using one of the chapter modes. + +The force_extension option allows you to force a preferred format and sidestep the automatic detection. +If using this option it is recommended that a highly flexible container is used (e.g. Matroska). +The format is specified as the extension including the dot (e.g. force_extension=.mkv). +If this option is set, `script-message streamsave-extension revert` will run the automatic determination at runtime; +running this command again will reset the extension to what's specified in force_extension. + +The force_title option will set the title used for the filename. By default the script uses the media-title. +This is specified without double quote marks in streamsave.conf, e.g. force_title=Example Title +The output_label is still used here and file overwrites are prevented if desired. +Changing the filename title to the media-title is still possible at runtime by using the revert argument, +as in the force_extension example. + +The range_marks option allows the script to set temporary chapters at A-B loop points. +If chapters already exist they are stored and cleared whenever any A-B points are set. +Once the A-B points are cleared the original chapters are restored. +Any chapters added after A-B mode is entered are added to the initial chapter list. +This option is disabled by default; set range_marks=yes in streamsave.conf in order to enable it. + +The track_packets option adds chapters to positions where packet loss occurs for HLS streams. + +Automation Options: + +The autostart and autoend options are used for automated stream capturing. +Set autostart=yes if you want the script to trigger cache writing immediately on stream load. +Set autoend to a time format of the form HH:MM:SS (e.g. autoend=01:20:08) if you want the file writing +to stop at that time. + +The hostchange option enables an experimental workaround for DAI HLS .m3u8 streams in which the host changes. +If enabled this will result in multiple files being output as the stream reloads. +The autostart option must also be enabled in order to autosave these types of streams. +The `on_demand` option is a suboption of the hostchange option which, if enabled, triggers reloads immediately across +segment switches without waiting until playback has reached the end of the last segment. + +The `quit=HH:MM:SS` option will set a one shot timer from script load to the specified time, +at which point the player will exit. This serves as a replacement for autoend when using hostchange. +Running `script-message streamsave-quit HH:MM:SS` at runtime will reset and restart the timer. + +Set piecewise=yes if you want to save a stream in parts automatically, useful for +e.g. saving long streams on slow systems. Set autoend to the duration preferred for each output file. +This feature requires autostart=yes. + +mpv's script-message command can be used at runtime to set the dump mode, override the output title +or file extension, change the save directory, or switch the output label. +If you override the title, the file extension, or the directory, the revert argument can be used +to set it back to the default value. + +Examples: +script-message streamsave-mode continuous +script-message streamsave-title "Example Title" +script-message streamsave-extension .mkv +script-message streamsave-extension revert +script-message streamsave-path ~/streams +script-message streamsave-label range + + ]] + +local options = require 'mp.options' +local utils = require 'mp.utils' +local msg = require 'mp.msg' + +local unpack = unpack or table.unpack + +-- default user options +-- change these in streamsave.conf +local opts = { + save_directory = [[.]], -- output file directory + dump_mode = "ab", -- + output_label = "increment", -- + force_extension = "no", -- extension will be .ext if set + force_title = "no", -- custom title used for the filename + range_marks = false, -- set chapters at A-B loop points? + track_packets = false, -- track HLS packet drops + autostart = false, -- automatically dump cache at start? + autoend = "no", -- cache time to stop at + hostchange = false, -- use if the host changes mid stream + on_demand = false, -- hostchange suboption, instant reloads + quit = "no", -- quits player at specified time + piecewise = false, -- writes stream in parts with autoend +} + +-- for internal use +local file = { + name, -- file name (full path to file) + path, -- directory the file is written to + title, -- media title + inc, -- filename increments + ext, -- file extension + loaded, -- flagged once the initial load has taken place + pending, -- number of files pending write completion (max 2) + queue, -- cache_write queue in case of multiple write requests + writing, -- file writing object returned by the write command + quitsec, -- user specified quit time in seconds + quit_timer, -- player quit timer set according to quitsec + oldtitle, -- initialized if title is overridden, allows revert + oldext, -- initialized if format is overridden, allows revert + oldpath, -- initialized if directory is overriden, allows revert +} + +local loop = { + a, -- A loop point as number type + b, -- B loop point as number type + a_revert, -- A loop point prior to keyframe alignment + b_revert, -- B loop point prior to keyframe alignment + range, -- A-B loop range + aligned, -- are the loop points aligned to keyframes? + continuous, -- is the writing continuous? +} + +local cache = { + dumped, -- autowrite cache state (serves as an autowrite request) + observed, -- whether the cache time is being observed + endsec, -- user specified autoend cache time in seconds + prior, -- cache duration prior to staging the seamless reload mechanism + seekend, -- seekable cache end timestamp + part, -- approx. end time of last piece / start time of next piece + switch, -- request to observe track switches and seeking + use, -- use cache_time instead of seekend for initial piece + id, -- number of times the packet tracking event has fired + packets, -- table of periodic timers indexed by cache id stamps +} + +local track = { + vid, -- video track id + aid, -- audio track id + sid, -- subtitle track id + restart, -- hostchange interval where subsequent reloads are immediate + suspend, -- suspension interval on track-list changes +} + +local segments = {} -- chapter segments set for writing +local chapter_list = {} -- initial chapter list +local ab_chapters = {} -- A-B loop point chapters + +local title_change +local container +local get_chapters +local chapter_points +local reset +local get_seekable_cache +local automatic +local autoquit +local packet_events +local observe_cache +local observe_tracks + +local function convert_time(value) + local H, M, S = value:match("^(%d+):([0-5]%d):([0-5]%d)$") + if H then + return H*3600 + M*60 + S + end +end + +local function validate_opts() + if opts.output_label ~= "increment" and + opts.output_label ~= "range" and + opts.output_label ~= "timestamp" and + opts.output_label ~= "overwrite" and + opts.output_label ~= "chapter" + then + msg.error("Invalid output_label '" .. opts.output_label .. "'") + opts.output_label = "increment" + end + if opts.dump_mode ~= "ab" and + opts.dump_mode ~= "current" and + opts.dump_mode ~= "continuous" and + opts.dump_mode ~= "chapter" and + opts.dump_mode ~= "segments" + then + msg.error("Invalid dump_mode '" .. opts.dump_mode .. "'") + opts.dump_mode = "ab" + end + if opts.autoend ~= "no" then + if not cache.part then + cache.endsec = convert_time(opts.autoend) + end + if not convert_time(opts.autoend) then + msg.error("Invalid autoend value '" .. opts.autoend .. + "'. Use HH:MM:SS format.") + opts.autoend = "no" + end + end + if opts.quit ~= "no" then + file.quitsec = convert_time(opts.quit) + if not file.quitsec then + msg.error("Invalid quit value '" .. opts.quit .. + "'. Use HH:MM:SS format.") + opts.quit = "no" + end + end +end + +local function update_opts(changed) + validate_opts() + -- expand mpv meta paths (e.g. ~~/directory) + file.path = mp.command_native({"expand-path", opts.save_directory}) + if opts.force_title ~= "no" then + file.title = opts.force_title + elseif changed["force_title"] then + title_change(_, mp.get_property("media-title"), true) + end + if opts.force_extension ~= "no" then + file.ext = opts.force_extension + elseif changed["force_extension"] then + container(_, _, true) + end + if changed["range_marks"] then + if opts.range_marks then + chapter_points() + else + if not get_chapters() then + mp.set_property_native("chapter-list", chapter_list) + end + ab_chapters = {} + end + end + if changed["autoend"] then + cache.endsec = convert_time(opts.autoend) + observe_cache() + end + if changed["autostart"] then + observe_cache() + end + if changed["hostchange"] then + observe_tracks(opts.hostchange) + end + if changed["quit"] then + autoquit() + end + if changed["piecewise"] and not opts.piecewise then + cache.part = 0 + elseif changed["piecewise"] then + cache.endsec = convert_time(opts.autoend) + end + if changed["track_packets"] then + packet_events(opts.track_packets) + end +end + +options.read_options(opts, "streamsave", update_opts) +update_opts{} + +-- dump mode switching +local function mode_switch(value) + value = value or opts.dump_mode + if value == "cycle" then + if opts.dump_mode == "ab" then + value = "current" + elseif opts.dump_mode == "current" then + value = "continuous" + elseif opts.dump_mode == "continuous" then + value = "chapter" + elseif opts.dump_mode == "chapter" then + value = "segments" + else + value = "ab" + end + end + if value == "continuous" then + opts.dump_mode = "continuous" + print("Continuous mode") + mp.osd_message("Cache write mode: Continuous") + elseif value == "ab" then + opts.dump_mode = "ab" + print("A-B loop mode") + mp.osd_message("Cache write mode: A-B loop") + elseif value == "current" then + opts.dump_mode = "current" + print("Current position mode") + mp.osd_message("Cache write mode: Current position") + elseif value == "chapter" then + opts.dump_mode = "chapter" + print("Chapter mode (single chapter)") + mp.osd_message("Cache write mode: Chapter") + elseif value == "segments" then + opts.dump_mode = "segments" + print("Segments mode (all chapters)") + mp.osd_message("Cache write mode: Segments") + else + msg.error("Invalid dump mode '" .. value .. "'") + end +end + +-- Set the principal part of the file name using the media title +function title_change(_, media_title, req) + if opts.force_title ~= "no" and not req then + file.title = opts.force_title + return end + if media_title then + -- Replacement of reserved file name characters on Windows + file.title = media_title:gsub("[\\/:*?\"<>|]", ".") + file.oldtitle = nil + end +end + +-- Determine container for standard formats +function container(_, _, req) + local audio = mp.get_property("audio-codec-name") + local video = mp.get_property("video-format") + local file_format = mp.get_property("file-format") + if not file_format then + reset() + observe_tracks() + return end + if opts.force_extension ~= "no" and not req then + file.ext = opts.force_extension + observe_cache() + return end + if string.match(file_format, "mp4") + or ((video == "h264" or video == "av1" or not video) and + (audio == "aac" or not audio)) + then + file.ext = ".mp4" + elseif (video == "vp8" or video == "vp9" or not video) + and (audio == "opus" or audio == "vorbis" or not audio) + then + file.ext = ".webm" + else + file.ext = ".mkv" + end + observe_cache() + observe_tracks() + file.oldext = nil +end + +local function format_override(ext) + ext = ext or file.ext + file.oldext = file.oldext or file.ext + if ext == "revert" and file.ext == opts.force_extension then + container(_, _, true) + elseif ext == "revert" and opts.force_extension ~= "no" then + file.ext = opts.force_extension + elseif ext == "revert" then + file.ext = file.oldext + else + file.ext = ext + end + print("file extension changed to " .. file.ext) + mp.osd_message("streamsave: file extension changed to " .. file.ext) +end + +local function title_override(title) + title = title or file.title + file.oldtitle = file.oldtitle or file.title + if title == "revert" and file.title == opts.force_title then + title_change(_, mp.get_property("media-title"), true) + elseif title == "revert" and opts.force_title ~= "no" then + file.title = opts.force_title + elseif title == "revert" then + file.title = file.oldtitle + else + file.title = title + end + print("title changed to " .. file.title) + mp.osd_message("streamsave: title changed to " .. file.title) +end + +local function path_override(value) + value = value or opts.save_directory + file.oldpath = file.oldpath or opts.save_directory + if value == "revert" then + opts.save_directory = file.oldpath + else + opts.save_directory = value + end + file.path = mp.command_native({"expand-path", opts.save_directory}) + print("Output directory changed to " .. opts.save_directory) + mp.osd_message("streamsave: directory changed to " .. opts.save_directory) +end + +local function label_override(value) + if value == "cycle" then + if opts.output_label == "increment" then + value = "range" + elseif opts.output_label == "range" then + value = "timestamp" + elseif opts.output_label == "timestamp" then + value = "overwrite" + elseif opts.output_label == "overwrite" then + value = "chapter" + else + value = "increment" + end + end + opts.output_label = value or opts.output_label + validate_opts() + print("File label changed to " .. opts.output_label) + mp.osd_message("streamsave: label changed to " .. opts.output_label) +end + +local function marks_override(value) + if not value or value == "no" then + opts.range_marks = false + if not get_chapters() then + mp.set_property_native("chapter-list", chapter_list) + end + ab_chapters = {} + print("Range marks disabled") + mp.osd_message("streamsave: range marks disabled") + elseif value == "yes" then + opts.range_marks = true + chapter_points() + print("Range marks enabled") + mp.osd_message("streamsave: range marks enabled") + else + msg.error("Invalid input '" .. value .. "'. Use yes or no.") + mp.osd_message("streamsave: invalid input; use yes or no") + end +end + +local function autostart_override(value) + if not value or value == "no" then + opts.autostart = false + print("Autostart disabled") + mp.osd_message("streamsave: autostart disabled") + elseif value == "yes" then + opts.autostart = true + print("Autostart enabled") + mp.osd_message("streamsave: autostart enabled") + else + msg.error("Invalid input '" .. value .. "'. Use yes or no.") + mp.osd_message("streamsave: invalid input; use yes or no") + return + end + observe_cache() +end + +local function autoend_override(value) + opts.autoend = value or opts.autoend + validate_opts() + cache.endsec = convert_time(opts.autoend) + observe_cache() + print("Autoend set to " .. opts.autoend) + mp.osd_message("streamsave: autoend set to " .. opts.autoend) +end + +local function hostchange_override(value) + local hostchange = opts.hostchange + value = value == "cycle" and (not opts.hostchange and "yes" or "no") or value + if not value or value == "no" then + opts.hostchange = false + print("Hostchange disabled") + mp.osd_message("streamsave: hostchange disabled") + elseif value == "yes" then + opts.hostchange = true + print("Hostchange enabled") + mp.osd_message("streamsave: hostchange enabled") + elseif value == "on_demand" then + opts.on_demand = not opts.on_demand + opts.hostchange = opts.on_demand or opts.hostchange + local status = opts.on_demand and "enabled" or "disabled" + print("Hostchange: On Demand " .. status) + mp.osd_message("streamsave: hostchange on_demand " .. status) + else + local allowed = "yes, no, cycle, or on_demand" + msg.error("Invalid input '" .. value .. "'. Use " .. allowed .. ".") + mp.osd_message("streamsave: invalid input; use " .. allowed) + return + end + if opts.hostchange ~= hostchange then + observe_tracks(opts.hostchange) + end +end + +local function quit_override(value) + opts.quit = value or opts.quit + validate_opts() + autoquit() + print("Quit set to " .. opts.quit) + mp.osd_message("streamsave: quit set to " .. opts.quit) +end + +local function piecewise_override(value) + if not value or value == "no" then + opts.piecewise = false + cache.part = 0 + print("Piecewise dumping disabled") + mp.osd_message("streamsave: piecewise dumping disabled") + elseif value == "yes" then + opts.piecewise = true + cache.endsec = convert_time(opts.autoend) + print("Piecewise dumping enabled") + mp.osd_message("streamsave: piecewise dumping enabled") + else + msg.error("Invalid input '" .. value .. "'. Use yes or no.") + mp.osd_message("streamsave: invalid input; use yes or no") + end +end + +local function packet_override(value) + local track_packets = opts.track_packets + if value == "cycle" then + value = not track_packets and "yes" or "no" + end + if not value or value == "no" then + opts.track_packets = false + print("Track packets disabled") + mp.osd_message("streamsave: track packets disabled") + elseif value == "yes" then + opts.track_packets = true + print("Track packets enabled") + mp.osd_message("streamsave: track packets enabled") + else + msg.error("Invalid input '" .. value .. "'. Use yes or no.") + mp.osd_message("streamsave: invalid input; use yes or no") + end + if opts.track_packets ~= track_packets then + packet_events(opts.track_packets) + end +end + +local function range_flip() + loop.a = mp.get_property_number("ab-loop-a") + loop.b = mp.get_property_number("ab-loop-b") + if (loop.a and loop.b) and (loop.a > loop.b) then + loop.a, loop.b = loop.b, loop.a + mp.set_property_number("ab-loop-a", loop.a) + mp.set_property_number("ab-loop-b", loop.b) + end +end + +local function loop_range() + local a_loop_osd = mp.get_property_osd("ab-loop-a") + local b_loop_osd = mp.get_property_osd("ab-loop-b") + loop.range = a_loop_osd .. " - " .. b_loop_osd + return loop.range +end + +local function set_name(label) + return file.path .. "/" .. file.title .. label .. file.ext +end + +local function increment_filename() + if set_name(-(file.inc or 1)) ~= file.name then + file.inc = 1 + file.name = set_name(-file.inc) + end + -- check if file exists + while utils.file_info(file.name) do + file.inc = file.inc + 1 + file.name = set_name(-file.inc) + end +end + +local function range_stamp(mode) + local file_range + if mode == "ab" then + file_range = "-[" .. loop_range():gsub(":", ".") .. "]" + elseif mode == "current" then + local file_pos = mp.get_property_osd("playback-time", "0") + file_range = "-[" .. 0 .. " - " .. file_pos:gsub(":", ".") .. "]" + else + -- range tag is incompatible with full dump, fallback to increments + increment_filename() + return + end + file.name = set_name(file_range) + -- check if file exists, append increments if so + local i = 1 + while utils.file_info(file.name) do + i = i + 1 + file.name = set_name(file_range .. -i) + end +end + +local function write_chapter(chapter) + get_chapters() + if chapter_list[chapter] or chapter == 0 then + segments[1] = { + ["start"] = chapter == 0 and 0 or chapter_list[chapter]["time"], + ["end"] = chapter_list[chapter + 1] + and chapter_list[chapter + 1]["time"] + or mp.get_property_number("duration", "no"), + ["title"] = chapter .. ". " .. (chapter ~= 0 + and chapter_list[chapter]["title"] or file.title) + } + print("Writing chapter " .. chapter .. " ....") + return true + else + msg.error("Chapter not found.") + end +end + +local function extract_segments(n) + for i = 1, n - 1 do + segments[i] = { + ["start"] = chapter_list[i]["time"], + ["end"] = chapter_list[i + 1]["time"], + ["title"] = i .. ". " .. (chapter_list[i]["title"] or file.title) + } + end + if chapter_list[1]["time"] ~= 0 then + table.insert(segments, 1, { + ["start"] = 0, + ["end"] = chapter_list[1]["time"], + ["title"] = "0. " .. file.title + }) + end + table.insert(segments, { + ["start"] = chapter_list[n]["time"], + ["end"] = mp.get_property_number("duration", "no"), + ["title"] = n .. ". " .. (chapter_list[n]["title"] or file.title) + }) + print("Writing out all " .. #segments .. " chapters to separate files ....") +end + +local function write_set(mode, file_name, file_pos, quiet) + local command = { + _flags = { + (not quiet or nil) and "osd-msg", + }, + filename = file_name, + } + if mode == "ab" then + command["name"] = "ab-loop-dump-cache" + elseif (mode == "chapter" or mode == "segments") and segments[1] then + command["name"] = "dump-cache" + command["start"] = segments[1]["start"] + command["end"] = segments[1]["end"] + table.remove(segments, 1) + else + command["name"] = "dump-cache" + command["start"] = 0 + command["end"] = file_pos or "no" + end + return command +end + +local function on_write_finish(cache_write, mode, file_name) + return function(success, _, command_error) + command_error = command_error and msg.error(command_error) + -- check if file is written + if utils.file_info(file_name) then + if success then + print("Finished writing cache to: " .. file_name) + else + msg.warn("Possibly broken file created at: " .. file_name) + end + else + msg.error("File not written.") + end + if loop.continuous and file.pending == 2 then + print("Dumping cache continuously to: " .. file.name) + end + file.pending = file.pending - 1 + -- fulfil any write requests now that the pending queue has been serviced + if next(segments) then + cache_write("segments", true) + elseif mode == "segments" then + mp.osd_message("Cache dumping successfully ended.") + end + if file.queue and next(file.queue) and not segments[1] then + cache_write(unpack(file.queue[1])) + table.remove(file.queue, 1) + end + end +end + +local function cache_write(mode, quiet, chapter) + if not (file.title and file.ext) then + return end + if file.pending == 2 + or segments[1] and file.pending > 0 and not loop.continuous + then + file.queue = file.queue or {} + -- honor extra write requests when pending queue is full + -- but limit number of outstanding write requests to be fulfilled + if #file.queue < 10 then + table.insert(file.queue, {mode, quiet, chapter}) + end + return end + range_flip() + -- set the output list for the chapter modes + if mode == "segments" and not segments[1] then + get_chapters() + local n = #chapter_list + if n > 0 then + extract_segments(n) + quiet = true + mp.osd_message("Cache dumping started.") + else + mode = "continuous" + end + elseif mode == "chapter" and not segments[1] then + chapter = chapter or mp.get_property_number("chapter", -1) + 1 + if not write_chapter(chapter) then + return + end + end + -- evaluate tagging conditions and set file name + if opts.output_label == "increment" then + increment_filename() + elseif opts.output_label == "range" then + range_stamp(mode) + elseif opts.output_label == "timestamp" then + file.name = set_name(-os.time()) + elseif opts.output_label == "overwrite" then + file.name = set_name("") + elseif opts.output_label == "chapter" then + if segments[1] then + file.name = file.path .. "/" .. segments[1]["title"] .. file.ext + else + increment_filename() + end + end + -- dump cache according to mode + local file_pos + file.pending = (file.pending or 0) + 1 + loop.continuous = mode == "continuous" + or mode == "ab" and loop.a and not loop.b + or segments[1] and segments[1]["end"] == "no" + if mode == "current" then + file_pos = mp.get_property_number("playback-time", 0) + elseif loop.continuous and file.pending == 1 then + print("Dumping cache continuously to: " .. file.name) + end + local commands = write_set(mode, file.name, file_pos, quiet) + local callback = on_write_finish(cache_write, mode, file.name) + file.writing = mp.command_native_async(commands, callback) + return true +end + +--[[ This command attempts to align the A-B loop points to keyframes. +Use align-cache if you want to know which range will likely be dumped. +Keep in mind this changes the A-B loop points you've set. +This is sometimes inaccurate. Calling align_cache() again will reset the points +to their initial values. ]] +local function align_cache() + if not loop.aligned then + range_flip() + loop.a_revert = loop.a + loop.b_revert = loop.b + mp.command("ab-loop-align-cache") + loop.aligned = true + print("Adjusted range: " .. loop_range()) + else + mp.set_property_native("ab-loop-a", loop.a_revert) + mp.set_property_native("ab-loop-b", loop.b_revert) + loop.aligned = false + print("Loop points reverted to: " .. loop_range()) + mp.osd_message("A-B loop: " .. loop.range) + end +end + +function get_chapters() + local current_chapters = mp.get_property_native("chapter-list", {}) + local updated -- do the stored chapters reflect the current chapters ? + -- make sure master list is up to date + if not current_chapters[1] or + not string.match(current_chapters[1]["title"], "^[AB] loop point$") + then + chapter_list = current_chapters + updated = true + -- if a script has added chapters after A-B points are set then + -- add those to the original chapter list + elseif #current_chapters > #ab_chapters then + for i = #ab_chapters + 1, #current_chapters do + table.insert(chapter_list, current_chapters[i]) + end + end + return updated +end + +-- creates chapters at A-B loop points +function chapter_points() + if not opts.range_marks then + return end + local updated = get_chapters() + ab_chapters = {} + -- restore original chapter list if A-B points are cleared + -- otherwise set chapters to A-B points + range_flip() + if not loop.a and not loop.b then + if not updated then + mp.set_property_native("chapter-list", chapter_list) + end + else + if loop.a then + ab_chapters[1] = { + title = "A loop point", + time = loop.a + } + end + if loop.b and not loop.a then + ab_chapters[1] = { + title = "B loop point", + time = loop.b + } + elseif loop.b then + ab_chapters[2] = { + title = "B loop point", + time = loop.b + } + end + mp.set_property_native("chapter-list", ab_chapters) + end +end + +-- stops writing the file +local function stop() + mp.abort_async_command(file.writing or {}) +end + +function reset() + if cache.observed or cache.dumped then + stop() + mp.unobserve_property(automatic) + mp.unobserve_property(get_seekable_cache) + cache.endsec = convert_time(opts.autoend) + cache.observed = false + end + cache.part = 0 + cache.dumped = false + cache.switch = true +end +reset() + +-- reload on demand (hostchange) +local function reload() + reset() + observe_tracks() + msg.warn("Reloading stream due to host change.") + mp.command("playlist-play-index current") +end + +local function stabilize() + if mp.get_property_number("demuxer-cache-time", 0) > 1500 then + reload() + end +end + +local function suspend() + if not track.suspend then + track.suspend = mp.add_timeout(25, stabilize) + else + track.suspend:resume() + end +end + +function get_seekable_cache(prop, range_check) + -- use the seekable part of the cache for more accurate timestamps + local cache_state = mp.get_property_native("demuxer-cache-state", {}) + local seekable_ranges = cache_state["seekable-ranges"] or {} + if prop then + if range_check ~= false and + (#seekable_ranges == 0 + or not cache_state["cache-end"]) + then + reset() + cache.use = opts.piecewise + observe_cache() + end + return + end + local seekable_ends = {0} + for i, range in ipairs(seekable_ranges) do + seekable_ends[i] = range["end"] or 0 + end + return math.max(0, unpack(seekable_ends)) +end + +-- seamlessly reload on inserts (hostchange) +local function seamless(_, cache_state) + cache_state = cache_state or {} + local reader = math.abs(cache_state["reader-pts"] or 0) + local cache_duration = math.abs(cache_state["cache-duration"] or cache.prior) + -- wait until playback of the loaded cache has practically ended + -- or there's a timestamp reset / position shift + if reader >= cache.seekend - 0.25 + or cache.prior - cache_duration > 3000 + or cache_state["underrun"] + then + reload() + track.restart = track.restart or mp.add_timeout(300, function() end) + track.restart:resume() + end +end + +-- detect stream switches (hostchange) +local function detect() + local eq = true + local t = { + vid = mp.get_property_number("current-tracks/video/id", 0), + aid = mp.get_property_number("current-tracks/audio/id", 0), + sid = mp.get_property_number("current-tracks/sub/id", 0) + } + for k, v in pairs(t) do + eq = track[k] == v and eq + track[k] = v + end + -- do not initiate a reload process if the track ids do not match + -- or the track loading suspension interval is active + if not eq then + return + end + if track.suspend:is_enabled() then + stabilize() + return + end + -- bifurcate + if track.restart and track.restart:is_enabled() then + track.restart:kill() + reload() + elseif opts.on_demand then + reload() + else + -- watch the cache state outside of the interval + -- and use it to decide when to reload + reset() + observe_tracks(false) + cache.observed = true + cache.prior = math.abs(mp.get_property_number("demuxer-cache-duration", 4E3)) + cache.seekend = get_seekable_cache() + mp.observe_property("demuxer-cache-state", "native", seamless) + end +end + +function automatic(_, cache_time) + if not cache_time then + reset() + cache.use = opts.piecewise + observe_cache() + return + end + -- cache write according to automatic options + if opts.autostart and not cache.dumped + and (not cache.endsec or cache_time < cache.endsec + or opts.piecewise) + then + if opts.piecewise and cache.part ~= 0 then + cache.dumped = cache_write("ab") + else + cache.dumped = cache_write("continuous", opts.hostchange) + -- update the piece time if there's a track/seeking reset + cache.part = cache.use and cache.dumped and cache_time or 0 + cache.use = cache.use and cache.part == 0 + end + end + -- the seekable ranges update slowly, which is why they're used to check + -- against switches for increased certainty, but this means the switch properties + -- should be watched only when the ranges exist + if cache.switch and get_seekable_cache() ~= 0 then + cache.switch = false + mp.observe_property("current-tracks/audio/id", "number", get_seekable_cache) + mp.observe_property("current-tracks/video/id", "number", get_seekable_cache) + mp.observe_property("seeking", "bool", get_seekable_cache) + end + -- unobserve cache time if not needed + if cache.dumped and not cache.switch and not cache.endsec then + mp.unobserve_property(automatic) + cache.observed = false + return + end + -- stop cache dump + if cache.endsec and cache.dumped and + cache_time - cache.part >= cache.endsec + then + if opts.piecewise then + cache.part = get_seekable_cache() + mp.set_property_number("ab-loop-a", cache.part) + mp.set_property("ab-loop-b", "no") + -- try and make the next piece start on the final keyframe of this piece + loop.aligned = false + align_cache() + cache.dumped = false + else + cache.endsec = nil + end + stop() + end +end + +function autoquit() + if opts.quit == "no" then + if file.quit_timer then + file.quit_timer:kill() + end + elseif not file.quit_timer then + file.quit_timer = mp.add_timeout(file.quitsec, + function() + stop() + mp.command("quit") + print("Quit after " .. opts.quit) + end) + else + file.quit_timer["timeout"] = file.quitsec + file.quit_timer:kill() + file.quit_timer:resume() + end +end +autoquit() + +local function fragment_chapters(packets, cache_time, stamp) + local no_loop_chapters = get_chapters() + local title = string.format("%s segment(s) dropped [%s]", packets, stamp) + for _, chapter in ipairs(chapter_list) do + if chapter["title"] == title then + cache.packets[stamp]:kill() + cache.packets[stamp] = nil + return + end + end + table.insert(chapter_list, { + title = title, + time = cache_time + }) + if no_loop_chapters then + mp.set_property_native("chapter-list", chapter_list) + end +end + +local function packet_handler(t) + if not opts.track_packets then -- second layer in case unregistering is async + return + end + if t.prefix == "ffmpeg/demuxer" then + local packets = t.text:match("^hls: skipping (%d+)") + if packets then + local cache_time = mp.get_property_number("demuxer-cache-time") + if cache_time then + -- ensure the chapters set + cache.id = cache.id + 1 + local stamp = string.format("%#x", cache.id) + cache.packets[stamp] = mp.add_periodic_timer(3, + function() + fragment_chapters(packets, cache_time, stamp) + end + ) + end + end + end +end + +function packet_events(state) + if not state then + mp.unregister_event(packet_handler) + for _, timer in pairs(cache.packets) do + timer:kill() + end + cache.id = nil + cache.packets = nil + local no_loop_chapters = get_chapters() + local n = #chapter_list + for i = n, 1, -1 do + if chapter_list[i]["title"]:match("%d+ segment%(s%) dropped") then + table.remove(chapter_list, i) + end + end + if no_loop_chapters and n > #chapter_list then + mp.set_property_native("chapter-list", chapter_list) + end + else + cache.id = 0 + cache.packets = {} + mp.enable_messages("warn") + mp.register_event("log-message", packet_handler) + end +end +if opts.track_packets then + packet_events(true) +end + +-- cache time observation switch for runtime changes +function observe_cache() + local network = mp.get_property_bool("demuxer-via-network") + local obs_xyz = opts.autostart or cache.endsec + if not cache.observed and obs_xyz and network then + cache.dumped = (file.pending or 0) ~= 0 + mp.observe_property("demuxer-cache-time", "number", automatic) + cache.observed = true + elseif (cache.observed or cache.dumped) and (not obs_xyz or not network) then + reset() + end +end + +-- track-list observation switch for runtime changes +function observe_tracks(state) + if state then + suspend() + mp.observe_property("track-list", "native", detect) + elseif state == false then + mp.unobserve_property(detect) + mp.unobserve_property(seamless) + cache.prior = nil + local timer = track.restart and track.restart:kill() + -- reset the state on manual reloads + elseif cache.prior then + observe_tracks(false) + observe_tracks(true) + elseif opts.hostchange then + suspend() + end +end + +if opts.hostchange then + observe_tracks(true) +end + +mp.observe_property("media-title", "string", title_change) + +--[[ video and audio formats observed in order to handle track changes +useful if e.g. --script-opts=ytdl_hook-all_formats=yes +or script-opts=ytdl_hook-use_manifests=yes ]] +mp.observe_property("audio-codec-name", "string", container) +mp.observe_property("video-format", "string", container) +mp.observe_property("file-format", "string", container) + +--[[ Loading chapters can be slow especially if they're passed from +an external file, so make sure existing chapters are not overwritten +by observing A-B loop changes only after the file is loaded. ]] +local function on_file_load() + if file.loaded then + chapter_points() + else + mp.observe_property("ab-loop-a", "native", chapter_points) + mp.observe_property("ab-loop-b", "native", chapter_points) + file.loaded = true + end +end +mp.register_event("file-loaded", on_file_load) + +mp.register_script_message("streamsave-mode", mode_switch) +mp.register_script_message("streamsave-title", title_override) +mp.register_script_message("streamsave-extension", format_override) +mp.register_script_message("streamsave-path", path_override) +mp.register_script_message("streamsave-label", label_override) +mp.register_script_message("streamsave-marks", marks_override) +mp.register_script_message("streamsave-autostart", autostart_override) +mp.register_script_message("streamsave-autoend", autoend_override) +mp.register_script_message("streamsave-hostchange", hostchange_override) +mp.register_script_message("streamsave-quit", quit_override) +mp.register_script_message("streamsave-piecewise", piecewise_override) +mp.register_script_message("streamsave-packets", packet_override) +mp.register_script_message("streamsave-chapter", + function(chapter) + cache_write("chapter", _, tonumber(chapter)) + end +) + +mp.add_key_binding("Alt+z", "mode-switch", function() mode_switch("cycle") end) +mp.add_key_binding("Ctrl+x", "stop-cache-write", stop) +mp.add_key_binding("Alt+x", "align-cache", align_cache) +mp.add_key_binding("Ctrl+z", "cache-write", + function() cache_write(opts.dump_mode) + end) diff --git a/src/settings.py b/src/settings.py new file mode 100644 index 0000000..7fd9b2a --- /dev/null +++ b/src/settings.py @@ -0,0 +1,104 @@ +import json +import os +from os.path import exists +from loguru import logger +from pyfzf.pyfzf import FzfPrompt + +fzf = FzfPrompt() + +conf_file = 'config.json' + +default_conf = { + 'instance': 'fw.ponychord.rocks', + 'public_list_instances_extended': + { + "open.audio": None, + "audio.liberta.vip": None, + "dance.with.tewi.and.reisen": None, + "tanukitunes.com": None, + "funkwhale.juniorjpdj.pl": None, + "audio.securetown.in.ua": None, + "funkwhale.thurk.org": None, + "buzzworkers.com": None, + "soundship.de": None, + "funkwhale.kameha.click": None, + "music.chosto.me": None, + "zik.goe.land": None, + "music.humanoids.be": None, + "music.hempton.us": None, + "mizik.o-k-i.net": None, + "klh.radiolivre.org": None, + "hudba.feildel.fr": None, + "funk.deko.cloud": None, + "audio.graz.social": None, + "funkwhale.desmu.fr": None, + "listen.knsm.cc": None, + "funkwhale.gegeweb.eu": None, + }, + 'automatic_fetch_new_instances': False, + 'enable_server_transcoding': False, + 'external_transcoder_http_proxy_path': "", + 'share_to_fediverse_token': "", + 'share_to_fediverse_instance': "", + 'prefetch_playlist': True, + 'enable_persistent_cache': False, + 'mpv_volume': 100, + 'show_like_button': True, + 'show_artist_name_in_albums': False, + 'termux_handle_track_switch_by_volume': False +} + + +def set_defaults(corrected_config=None): + conf_rewrite = default_conf + if corrected_config: + conf_rewrite = corrected_config + descriptor = os.open( + path=conf_file, + flags=( + os.O_WRONLY # access mode: write only + | os.O_CREAT # create if not exists + | os.O_TRUNC # truncate the file to zero + ), + mode=0o600) + with open(descriptor, 'wt') as f: + f.write(json.dumps(conf_rewrite, indent=4)) + + +def check_config(): + '''Check config and remove or add keys if not found in default config''' + with open(conf_file, 'rt') as f: + loaded_config = json.loads(f.read()) + correct_conf = {} + for k, v in loaded_config.items(): + if k in default_conf.keys(): + correct_conf[k] = v + else: + logger.warning(f'{k} from config will be removed. Value: {v}') + for k, v in default_conf.items(): + if k not in correct_conf.keys(): + correct_conf[k] = v + logger.warning(f'{k} added in config. Value: {v}') + set_defaults(correct_conf) + return correct_conf + + +if not exists(conf_file): + set_defaults() +else: + check_config() + + +def get_config(key): + '''return value from config by key''' + with open(conf_file, 'rt') as f: + loaded_config = json.loads(f.read()) + return loaded_config.get(key) + + +def set_config(key, value): + '''set value new value config by key''' + read_conf = check_config() + with open(conf_file, 'wt') as f: + read_conf[key] = value + f.write(json.dumps(read_conf, indent=4)) diff --git a/src/utils.py b/src/utils.py new file mode 100644 index 0000000..0023f19 --- /dev/null +++ b/src/utils.py @@ -0,0 +1,85 @@ +import src.fw_api + +import os +import sys +import shutil +from urllib.parse import unquote + + +def get_remote_file_name(url): + '''This function return filename by content-disposition header''' + r = src.fw_api.current_instance.s.head(url) + content_dispos = r.headers.get('content-disposition') + if content_dispos.startswith('attachment; filename*=UTF-8\'\''): + return unquote(content_dispos.split('attachment; filename*=UTF-8\'\'')[-1]) + + +def download_track(url, name=None): + if not url.startswith('http'): + copy_from_cache(url) + return + + url = url.split('?')[0] # Stripe all params from url + r = src.fw_api.current_instance.s.get(url, stream=True) + if not name: + name = get_remote_file_name(url) + if not name: + name = url.split(r'/')[-1] + + with open(name.replace('/', '_'), 'wb') as f: + print(f"Downloading {name}") + total_length = r.headers.get('content-length') + + if total_length is None: # no content length header + f.write(r.content) + else: + dl = 0 + total_length = int(total_length) + for data in r.iter_content(chunk_size=4096): + dl += len(data) + f.write(data) + done = int(50 * dl / total_length) + # base progress bar + sys.stdout.write("\r[%s%s]" % ('=' * done, ' ' * (50-done))) + sys.stdout.flush() + return name + + +def copy_from_cache(url_file): + uuid = url_file.split(r'/')[-1] + original_name = get_remote_file_name(f'https://{src.fw_api.current_instance.instance}/api/v1/listen/{uuid}') + shutil.copyfile(url_file, original_name) + + +def print_there(x, y, text): + '''Print at position x, y caption in terminal (Linux only)''' + sys.stdout.write("\x1b7\x1b[%d;%df%s\x1b8" % (x, y, text)) + sys.stdout.flush() + + +def track_info_output(track): + output = [] + for k, v in track.items(): + if k not in ('cover', 'uploads', 'listen_url', 'mbid', 'id', 'is_playable') and v is not None and v != []: + if isinstance(v, dict): + for i in ('title', 'name', 'fid'): + if v.get(i): + val_override = v.get(i) + output.append(f'{k}: {val_override}') + else: + output.append(f'{k}: {v}') + output.append('Related Libraries:') + try: + assigned_libs = src.fw_api.assigned_libraries_on_track(track['id'])['results'] + for i in assigned_libs: + for prop in ('fid', 'name', 'description', 'creation_date'): + output.append(i.get(prop)) + except: + output.append('Failed get related') + output = '\n'.join(output) + os.system(f'less <