Skip to content

Commit

Permalink
Merge development into master
Browse files Browse the repository at this point in the history
  • Loading branch information
github-actions[bot] authored Oct 2, 2024
2 parents ad80ac4 + 7000d2a commit 5c56866
Show file tree
Hide file tree
Showing 36 changed files with 394 additions and 2,692 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ If you need something that is not already part of Bazarr, feel free to create a
- Karagarga.in
- Ktuvit (Get `hashed_password` using method described [here](https://github.com/XBMCil/service.subtitles.ktuvit))
- LegendasDivx
- Legendas.net
- Napiprojekt
- Napisy24
- Nekur
Expand Down
14 changes: 14 additions & 0 deletions bazarr/api/system/status.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,12 @@

from flask_restx import Resource, Namespace
from tzlocal import get_localzone_name
from alembic.migration import MigrationContext

from radarr.info import get_radarr_info
from sonarr.info import get_sonarr_info
from app.get_args import args
from app.database import engine, database, select
from init import startTime

from ..utils import authenticate
Expand All @@ -34,13 +36,25 @@ def get(self):
timezone = "Exception while getting time zone name."
logging.exception("BAZARR is unable to get configured time zone name.")

try:
database_version = ".".join([str(x) for x in engine.dialect.server_version_info])
except Exception:
database_version = ""

try:
database_migration = MigrationContext.configure(engine.connect()).get_current_revision()
except Exception:
database_migration = "unknown"

system_status = {}
system_status.update({'bazarr_version': os.environ["BAZARR_VERSION"]})
system_status.update({'package_version': package_version})
system_status.update({'sonarr_version': get_sonarr_info.version()})
system_status.update({'radarr_version': get_radarr_info.version()})
system_status.update({'operating_system': platform.platform()})
system_status.update({'python_version': platform.python_version()})
system_status.update({'database_engine': f'{engine.dialect.name.capitalize()} {database_version}'})
system_status.update({'database_migration': database_migration})
system_status.update({'bazarr_directory': os.path.dirname(os.path.dirname(os.path.dirname(
os.path.dirname(__file__))))})
system_status.update({'bazarr_config_directory': args.config_dir})
Expand Down
29 changes: 29 additions & 0 deletions bazarr/app/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -528,3 +528,32 @@ def upgrade_languages_profile_hi_values():
.values({"items": json.dumps(items)})
.where(TableLanguagesProfiles.profileId == languages_profile.profileId)
)


def fix_languages_profiles_with_duplicate_ids():
languages_profiles = database.execute(
select(TableLanguagesProfiles.profileId, TableLanguagesProfiles.items, TableLanguagesProfiles.cutoff)).all()
for languages_profile in languages_profiles:
if languages_profile.cutoff:
# ignore profiles that have a cutoff set
continue
languages_profile_ids = []
languages_profile_has_duplicate = False
languages_profile_items = json.loads(languages_profile.items)
for items in languages_profile_items:
if items['id'] in languages_profile_ids:
languages_profile_has_duplicate = True
break
else:
languages_profile_ids.append(items['id'])

if languages_profile_has_duplicate:
item_id = 0
for items in languages_profile_items:
item_id += 1
items['id'] = item_id
database.execute(
update(TableLanguagesProfiles)
.values({"items": json.dumps(languages_profile_items)})
.where(TableLanguagesProfiles.profileId == languages_profile.profileId)
)
7 changes: 6 additions & 1 deletion bazarr/app/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def filter(self, record):
# no filtering in debug mode or if originating from us
return True

if record.level != loggin.ERROR:
if record.levelno < logging.ERROR:
return False

unwantedMessages = [
Expand Down Expand Up @@ -172,9 +172,14 @@ def configure_logging(debug=False):
logging.getLogger("rebulk").setLevel(logging.WARNING)
logging.getLogger("stevedore.extension").setLevel(logging.CRITICAL)

def empty_file(filename):
# Open the log file in write mode to clear its contents
with open(filename, 'w'):
pass # Just opening and closing the file will clear it

def empty_log():
fh.doRollover()
empty_file(get_log_file_path())
logging.info('BAZARR Log file emptied')


Expand Down
2 changes: 2 additions & 0 deletions bazarr/languages/custom_lang.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,8 @@ class ChineseTraditional(CustomLanguage):
)
_extensions_hi = (
".cht.hi", ".tc.hi", ".zht.hi", "hant.hi", ".big5.hi", "繁體中文.hi", "雙語.hi", ".zh-tw.hi",
".cht.cc", ".tc.cc", ".zht.cc", "hant.cc", ".big5.cc", "繁體中文.cc", "雙語.cc", ".zh-tw.cc",
".cht.sdh", ".tc.sdh", ".zht.sdh", "hant.sdh", ".big5.sdh", "繁體中文.sdh", "雙語.sdh", ".zh-tw.sdh",
)
_extensions_fuzzy = ("繁", "雙語")
_extensions_disamb_fuzzy = ("简", "双语")
Expand Down
4 changes: 3 additions & 1 deletion bazarr/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@
# there's missing embedded packages after a commit
check_if_new_update()

from app.database import System, database, update, migrate_db, create_db_revision, upgrade_languages_profile_hi_values # noqa E402
from app.database import (System, database, update, migrate_db, create_db_revision, upgrade_languages_profile_hi_values,
fix_languages_profiles_with_duplicate_ids) # noqa E402
from app.notifier import update_notifier # noqa E402
from languages.get_languages import load_language_in_db # noqa E402
from app.signalr_client import sonarr_signalr_client, radarr_signalr_client # noqa E402
Expand All @@ -50,6 +51,7 @@
else:
migrate_db(app)
upgrade_languages_profile_hi_values()
fix_languages_profiles_with_duplicate_ids()

configure_proxy_func()

Expand Down
4 changes: 3 additions & 1 deletion bazarr/subtitles/refiners/anidb.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,10 +218,12 @@ def refine_anidb_ids(video):
)

if not anidb_series_id:
logger.error(f'Could not find anime series {video.series}')
return video

logger.debug(f'AniDB refinement identified {video.series} as {anidb_series_id}.')

anidb_episode_id = None

if anidb_client.has_api_credentials:
if anidb_client.is_throttled:
logger.warning(f'API daily limit reached. Skipping episode ID refinement for {video.series}')
Expand Down
23 changes: 15 additions & 8 deletions bazarr/subtitles/tools/delete.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,40 +36,47 @@ def delete_subtitles(media_type, language, forced, hi, media_path, subtitles_pat
language_log += ':forced'
language_string += ' forced'

if media_type == 'series':
pr = path_mappings.path_replace
prr = path_mappings.path_replace_reverse
else:
pr = path_mappings.path_replace_movie
prr = path_mappings.path_replace_reverse_movie

result = ProcessSubtitlesResult(message=f"{language_string} subtitles deleted from disk.",
reversed_path=path_mappings.path_replace_reverse(media_path),
reversed_path=prr(media_path),
downloaded_language_code2=language_log,
downloaded_provider=None,
score=None,
forced=None,
subtitle_id=None,
reversed_subtitles_path=path_mappings.path_replace_reverse(subtitles_path),
reversed_subtitles_path=prr(subtitles_path),
hearing_impaired=None)

if media_type == 'series':
try:
os.remove(path_mappings.path_replace(subtitles_path))
os.remove(pr(subtitles_path))
except OSError:
logging.exception(f'BAZARR cannot delete subtitles file: {subtitles_path}')
store_subtitles(path_mappings.path_replace_reverse(media_path), media_path)
store_subtitles(prr(media_path), media_path)
return False
else:
history_log(0, sonarr_series_id, sonarr_episode_id, result)
store_subtitles(path_mappings.path_replace_reverse(media_path), media_path)
store_subtitles(prr(media_path), media_path)
notify_sonarr(sonarr_series_id)
event_stream(type='series', action='update', payload=sonarr_series_id)
event_stream(type='episode-wanted', action='update', payload=sonarr_episode_id)
return True
else:
try:
os.remove(path_mappings.path_replace_movie(subtitles_path))
os.remove(pr(subtitles_path))
except OSError:
logging.exception(f'BAZARR cannot delete subtitles file: {subtitles_path}')
store_subtitles_movie(path_mappings.path_replace_reverse_movie(media_path), media_path)
store_subtitles_movie(prr(media_path), media_path)
return False
else:
history_log_movie(0, radarr_id, result)
store_subtitles_movie(path_mappings.path_replace_reverse_movie(media_path), media_path)
store_subtitles_movie(prr(media_path), media_path)
notify_radarr(radarr_id)
event_stream(type='movie-wanted', action='update', payload=radarr_id)
return True
9 changes: 7 additions & 2 deletions bazarr/subtitles/tools/subsyncer.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,14 +112,19 @@ def sync(self, video_path, srt_path, srt_lang, hi, forced,
f"{offset_seconds} seconds and a framerate scale factor of "
f"{f'{framerate_scale_factor:.2f}'}.")

if sonarr_series_id:
prr = path_mappings.path_replace_reverse
else:
prr = path_mappings.path_replace_reverse_movie

result = ProcessSubtitlesResult(message=message,
reversed_path=path_mappings.path_replace_reverse(self.reference),
reversed_path=prr(self.reference),
downloaded_language_code2=srt_lang,
downloaded_provider=None,
score=None,
forced=forced,
subtitle_id=None,
reversed_subtitles_path=srt_path,
reversed_subtitles_path=prr(self.srtin),
hearing_impaired=hi)

if sonarr_episode_id:
Expand Down
20 changes: 16 additions & 4 deletions bazarr/subtitles/tools/translate.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from sonarr.history import history_log
from subtitles.processing import ProcessSubtitlesResult
from app.event_handler import show_progress, hide_progress
from utilities.path_mappings import path_mappings


def translate_subtitles_file(video_path, source_srt_file, from_lang, to_lang, forced, hi, media_type, sonarr_series_id,
Expand All @@ -27,9 +28,15 @@ def translate_subtitles_file(video_path, source_srt_file, from_lang, to_lang, fo
}

to_lang = alpha3_from_alpha2(to_lang)
lang_obj = CustomLanguage.from_value(to_lang, "alpha3")
if not lang_obj:
try:
lang_obj = Language(to_lang)
except ValueError:
custom_lang_obj = CustomLanguage.from_value(to_lang, "alpha3")
if custom_lang_obj:
lang_obj = CustomLanguage.subzero_language(custom_lang_obj)
else:
logging.debug(f'BAZARR is unable to translate to {to_lang} for this subtitles: {source_srt_file}')
return False
if forced:
lang_obj = Language.rebuild(lang_obj, forced=True)
if hi:
Expand Down Expand Up @@ -104,14 +111,19 @@ def translate_line(id, line, attempt):

message = f"{language_from_alpha2(from_lang)} subtitles translated to {language_from_alpha3(to_lang)}."

if media_type == 'series':
prr = path_mappings.path_replace_reverse
else:
prr = path_mappings.path_replace_reverse_movie

result = ProcessSubtitlesResult(message=message,
reversed_path=video_path,
reversed_path=prr(video_path),
downloaded_language_code2=to_lang,
downloaded_provider=None,
score=None,
forced=forced,
subtitle_id=None,
reversed_subtitles_path=dest_srt_file,
reversed_subtitles_path=prr(dest_srt_file),
hearing_impaired=hi)

if media_type == 'series':
Expand Down
21 changes: 20 additions & 1 deletion bazarr/utilities/health.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
# coding=utf-8

import json

from app.config import settings
from app.database import TableShowsRootfolder, TableMoviesRootfolder, database, select
from app.database import TableShowsRootfolder, TableMoviesRootfolder, TableLanguagesProfiles, database, select
from app.event_handler import event_stream
from .path_mappings import path_mappings
from sonarr.rootfolder import check_sonarr_rootfolder
Expand Down Expand Up @@ -47,4 +49,21 @@ def get_health_issues():
health_issues.append({'object': path_mappings.path_replace_movie(item.path),
'issue': item.error})

# get languages profiles duplicate ids issues when there's a cutoff set
languages_profiles = database.execute(
select(TableLanguagesProfiles.items, TableLanguagesProfiles.name, TableLanguagesProfiles.cutoff)).all()
for languages_profile in languages_profiles:
if not languages_profile.cutoff:
# ignore profiles that don't have a cutoff set
continue
languages_profile_ids = []
for items in json.loads(languages_profile.items):
if items['id'] in languages_profile_ids:
health_issues.append({'object': languages_profile.name,
'issue': 'This languages profile has duplicate IDs. You need to edit this profile'
' and make sure to select the proper cutoff if required.'})
break
else:
languages_profile_ids.append(items['id'])

return health_issues
33 changes: 23 additions & 10 deletions custom_libs/subliminal_patch/providers/subdivx.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,12 @@
import re

from requests import Session
from subliminal import __short_version__
from subliminal.video import Episode
from subliminal.video import Movie
from subliminal import ProviderError
from subliminal.video import Episode, Movie
from subliminal_patch.exceptions import APIThrottled
from subliminal_patch.providers import Provider
from subliminal_patch.providers.utils import get_archive_from_bytes
from subliminal_patch.providers.utils import get_subtitle_from_archive
from subliminal_patch.providers.utils import update_matches
from subliminal_patch.providers.utils import USER_AGENTS
from subliminal_patch.providers.utils import (get_archive_from_bytes, get_subtitle_from_archive, update_matches,
USER_AGENTS)
from subliminal_patch.subtitle import Subtitle
from subzero.language import Language

Expand Down Expand Up @@ -111,7 +108,6 @@ def __init__(self):
self.session = Session()

def initialize(self):
# self.session.headers["User-Agent"] = f"Subliminal/{__short_version__}"
self.session.headers["User-Agent"] = random.choice(USER_AGENTS)
self.session.cookies.update({"iduser_cookie": _IDUSER_COOKIE})

Expand Down Expand Up @@ -166,9 +162,26 @@ def _query(self, video, languages):
return subtitles

def _query_results(self, query, video):
token_link = f"{_SERVER_URL}/inc/gt.php?gt=1"

token_response = self.session.get(token_link, timeout=30)

if token_response.status_code != 200:
raise ProviderError("Unable to obtain a token")

try:
token_response_json = token_response.json()
except JSONDecodeError:
raise ProviderError("Unable to parse JSON response")
else:
if 'token' in token_response_json and token_response_json['token']:
token = token_response_json['token']
else:
raise ProviderError("Response doesn't include a token")

search_link = f"{_SERVER_URL}/inc/ajax.php"

payload = {"tabla": "resultados", "filtros": "", "buscar": query}
payload = {"tabla": "resultados", "filtros": "", "buscar393": query, "token": token}

logger.debug("Query: %s", query)

Expand Down Expand Up @@ -197,7 +210,7 @@ def _query_results(self, query, video):
# Iterate over each subtitle in the response
for item in data["aaData"]:
id = item["id"]
page_link = f"{_SERVER_URL}/descargar.php?id={id}"
page_link = f"{_SERVER_URL}/{id}"
title = _clean_title(item["titulo"])
description = item["descripcion"]
uploader = item["nick"]
Expand Down
Loading

0 comments on commit 5c56866

Please sign in to comment.