diff --git a/pyproject.toml b/pyproject.toml index 38eb436f8c..1b959f4e44 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -148,7 +148,7 @@ ignore_missing_imports = true [tool.ruff.lint] select = ["D"] -ignore = ["D105", "D203", "D211", "D212", "D213", "D214", "D401", "D404", "D406", "D407", "D412", "D413", "D416", "D417"] +ignore = ["D105", "D211", "D213", "D214", "D401", "D404", "D406", "D407", "D412", "D413", "D416", "D417"] [tool.ruff.lint.per-file-ignores] "pywikibot/families/*" = ["D102"] diff --git a/pywikibot/__init__.py b/pywikibot/__init__.py index 2889125969..7cd07b5855 100644 --- a/pywikibot/__init__.py +++ b/pywikibot/__init__.py @@ -272,8 +272,7 @@ def Site(code: str | None = None, # noqa: N802 def showDiff(oldtext: str, # noqa: N802 newtext: str, context: int = 0) -> None: - """ - Output a string showing the differences between oldtext and newtext. + """Output a string showing the differences between oldtext and newtext. The differences are highlighted (only on compatible systems) to show which changes were made. diff --git a/pywikibot/_wbtypes.py b/pywikibot/_wbtypes.py index 66374df215..ee06fcc126 100644 --- a/pywikibot/_wbtypes.py +++ b/pywikibot/_wbtypes.py @@ -101,8 +101,7 @@ def __init__(self, lat: float, lon: float, alt: float | None = None, site: DataSite | None = None, globe_item: ItemPageStrNoneType = None, primary: bool = False) -> None: - """ - Represent a geo coordinate. + """Represent a geo coordinate. :param lat: Latitude :param lon: Longitude @@ -150,8 +149,7 @@ def entity(self) -> str: return self._entity def toWikibase(self) -> dict[str, Any]: - """ - Export the data to a JSON object for the Wikibase API. + """Export the data to a JSON object for the Wikibase API. FIXME: Should this be in the DataSite object? @@ -167,8 +165,7 @@ def toWikibase(self) -> dict[str, Any]: @classmethod def fromWikibase(cls, data: dict[str, Any], site: DataSite | None = None) -> Coordinate: - """ - Constructor to create an object from Wikibase's JSON output. + """Constructor to create an object from Wikibase's JSON output. :param data: Wikibase JSON :param site: The Wikibase site @@ -230,8 +227,7 @@ def precision(self, value: float) -> None: self._precision = value def precisionToDim(self) -> int | None: - """ - Convert precision from Wikibase to GeoData's dim and return the latter. + """Convert precision from Wikibase to GeoData's dim. dim is calculated if the Coordinate doesn't have a dimension, and precision is set. When neither dim nor precision are set, ValueError @@ -273,8 +269,7 @@ def precisionToDim(self) -> int | None: def get_globe_item(self, repo: DataSite | None = None, lazy_load: bool = False) -> pywikibot.ItemPage: - """ - Return the ItemPage corresponding to the globe. + """Return the ItemPage corresponding to the globe. Note that the globe need not be in the same data repository as the Coordinate itself. @@ -720,8 +715,7 @@ def toTimestr(self, force_iso: bool = False) -> str: self.hour, self.minute, self.second) def toTimestamp(self, timezone_aware: bool = False) -> Timestamp: - """ - Convert the data to a pywikibot.Timestamp. + """Convert the data to a pywikibot.Timestamp. .. versionchanged:: 8.0.1 *timezone_aware* parameter was added. @@ -764,8 +758,7 @@ def toWikibase(self) -> dict[str, Any]: @classmethod def fromWikibase(cls, data: dict[str, Any], site: DataSite | None = None) -> WbTime: - """ - Create a WbTime from the JSON data given by the Wikibase API. + """Create a WbTime from the JSON data given by the Wikibase API. :param data: Wikibase JSON :param site: The Wikibase site. If not provided, retrieves the data @@ -784,8 +777,7 @@ class WbQuantity(WbRepresentation): @staticmethod def _require_errors(site: DataSite | None) -> bool: - """ - Check if Wikibase site is so old it requires error bounds to be given. + """Check if Wikibase site is old and requires error bounds to be given. If no site item is supplied it raises a warning and returns True. @@ -800,8 +792,7 @@ def _require_errors(site: DataSite | None) -> bool: @staticmethod def _todecimal(value: ToDecimalType) -> Decimal | None: - """ - Convert a string to a Decimal for use in WbQuantity. + """Convert a string to a Decimal for use in WbQuantity. None value is returned as is. @@ -815,8 +806,7 @@ def _todecimal(value: ToDecimalType) -> Decimal | None: @staticmethod def _fromdecimal(value: Decimal | None) -> str | None: - """ - Convert a Decimal to a string representation suitable for WikiBase. + """Convert a Decimal to a string representation suitable for WikiBase. None value is returned as is. @@ -830,8 +820,7 @@ def __init__( error: ToDecimalType | tuple[ToDecimalType, ToDecimalType] = None, site: DataSite | None = None, ) -> None: - """ - Create a new WbQuantity object. + """Create a new WbQuantity object. :param amount: number representing this quantity :param unit: the Wikibase item for the unit or the entity URI of this @@ -878,8 +867,7 @@ def unit(self) -> str: def get_unit_item(self, repo: DataSite | None = None, lazy_load: bool = False) -> pywikibot.ItemPage: - """ - Return the ItemPage corresponding to the unit. + """Return the ItemPage corresponding to the unit. Note that the unit need not be in the same data repository as the WbQuantity itself. @@ -901,8 +889,7 @@ def get_unit_item(self, repo: DataSite | None = None, return self._unit def toWikibase(self) -> dict[str, Any]: - """ - Convert the data to a JSON object for the Wikibase API. + """Convert the data to a JSON object for the Wikibase API. :return: Wikibase JSON """ @@ -916,8 +903,7 @@ def toWikibase(self) -> dict[str, Any]: @classmethod def fromWikibase(cls, data: dict[str, Any], site: DataSite | None = None) -> WbQuantity: - """ - Create a WbQuantity from the JSON data given by the Wikibase API. + """Create a WbQuantity from the JSON data given by the Wikibase API. :param data: Wikibase JSON :param site: The Wikibase site @@ -934,13 +920,13 @@ def fromWikibase(cls, data: dict[str, Any], class WbMonolingualText(WbRepresentation): + """A Wikibase monolingual text representation.""" _items = ('text', 'language') def __init__(self, text: str, language: str) -> None: - """ - Create a new WbMonolingualText object. + """Create a new WbMonolingualText object. :param text: text string :param language: language code of the string @@ -951,8 +937,7 @@ def __init__(self, text: str, language: str) -> None: self.language = language def toWikibase(self) -> dict[str, Any]: - """ - Convert the data to a JSON object for the Wikibase API. + """Convert the data to a JSON object for the Wikibase API. :return: Wikibase JSON """ @@ -964,8 +949,7 @@ def toWikibase(self) -> dict[str, Any]: @classmethod def fromWikibase(cls, data: dict[str, Any], site: DataSite | None = None) -> WbMonolingualText: - """ - Create a WbMonolingualText from the JSON data given by Wikibase API. + """Create a WbMonolingualText from the JSON data given by Wikibase API. :param data: Wikibase JSON :param site: The Wikibase site @@ -974,6 +958,7 @@ def fromWikibase(cls, data: dict[str, Any], class WbDataPage(WbRepresentation): + """An abstract Wikibase representation for data pages. .. warning:: Perhaps a temporary implementation until :phab:`T162336` @@ -986,8 +971,7 @@ class WbDataPage(WbRepresentation): @classmethod @abc.abstractmethod def _get_data_site(cls, repo_site: DataSite) -> APISite: - """ - Return the site serving as a repository for a given data type. + """Return the site serving as a repository for a given data type. .. note:: implemented in the extended class. @@ -998,8 +982,7 @@ def _get_data_site(cls, repo_site: DataSite) -> APISite: @classmethod @abc.abstractmethod def _get_type_specifics(cls, site: DataSite) -> dict[str, Any]: - """ - Return the specifics for a given data type. + """Return the specifics for a given data type. .. note:: Must be implemented in the extended class. @@ -1017,8 +1000,7 @@ def _get_type_specifics(cls, site: DataSite) -> dict[str, Any]: @staticmethod def _validate(page: pywikibot.Page, data_site: BaseSite, ending: str, label: str) -> None: - """ - Validate the provided page against general and type specific rules. + """Validate the provided page against general and type specific rules. :param page: Page containing the data. :param data_site: The site serving as a repository for the given @@ -1073,8 +1055,7 @@ def __hash__(self) -> int: return hash(self.toWikibase()) def toWikibase(self) -> str: - """ - Convert the data to the value required by the Wikibase API. + """Convert the data to the value required by the Wikibase API. :return: title of the data page incl. namespace """ @@ -1097,12 +1078,12 @@ def fromWikibase(cls, page_name: str, site: DataSite | None) -> WbDataPage: class WbGeoShape(WbDataPage): + """A Wikibase geo-shape representation.""" @classmethod def _get_data_site(cls, site: DataSite) -> APISite: - """ - Return the site serving as a geo-shape repository. + """Return the site serving as a geo-shape repository. :param site: The Wikibase site """ @@ -1110,8 +1091,7 @@ def _get_data_site(cls, site: DataSite) -> APISite: @classmethod def _get_type_specifics(cls, site: DataSite) -> dict[str, Any]: - """ - Return the specifics for WbGeoShape. + """Return the specifics for WbGeoShape. :param site: The Wikibase site """ @@ -1124,12 +1104,12 @@ def _get_type_specifics(cls, site: DataSite) -> dict[str, Any]: class WbTabularData(WbDataPage): + """A Wikibase tabular-data representation.""" @classmethod def _get_data_site(cls, site: DataSite) -> APISite: - """ - Return the site serving as a tabular-data repository. + """Return the site serving as a tabular-data repository. :param site: The Wikibase site """ @@ -1137,8 +1117,7 @@ def _get_data_site(cls, site: DataSite) -> APISite: @classmethod def _get_type_specifics(cls, site: DataSite) -> dict[str, Any]: - """ - Return the specifics for WbTabularData. + """Return the specifics for WbTabularData. :param site: The Wikibase site """ @@ -1151,6 +1130,7 @@ def _get_type_specifics(cls, site: DataSite) -> dict[str, Any]: class WbUnknown(WbRepresentation): + """A Wikibase representation for unknown data type. This will prevent the bot from breaking completely when a new type @@ -1166,8 +1146,7 @@ class WbUnknown(WbRepresentation): _items = ('json',) def __init__(self, json: dict[str, Any], warning: str = '') -> None: - """ - Create a new WbUnknown object. + """Create a new WbUnknown object. :param json: Wikibase JSON :param warning: a warning message which is shown once if @@ -1192,8 +1171,7 @@ def toWikibase(self) -> dict[str, Any]: @classmethod def fromWikibase(cls, data: dict[str, Any], site: DataSite | None = None) -> WbUnknown: - """ - Create a WbUnknown from the JSON data given by the Wikibase API. + """Create a WbUnknown from the JSON data given by the Wikibase API. :param data: Wikibase JSON :param site: The Wikibase site diff --git a/pywikibot/bot.py b/pywikibot/bot.py index 59780d3469..c5227e7a1b 100644 --- a/pywikibot/bot.py +++ b/pywikibot/bot.py @@ -646,8 +646,7 @@ def input_list_choice(question: str, answers: AnswerType, default: int | str | None = None, force: bool = False) -> str: - """ - Ask the user the question and return one of the valid answers. + """Ask the user the question and return one of the valid answers. :param question: The question asked without trailing spaces. :param answers: The valid answers each containing a full length answer. @@ -674,8 +673,7 @@ def calledModuleName() -> str: def handle_args(args: Iterable[str] | None = None, do_help: bool = True) -> list[str]: - """ - Handle global command line arguments and return the rest as a list. + """Handle global command line arguments and return the rest as a list. Takes the command line arguments as strings, processes all :ref:`global parameters` such as ``-lang`` or @@ -908,8 +906,7 @@ def suggest_help(missing_parameters: Sequence[str] | None = None, missing_action: bool = False, additional_text: str = '', missing_dependencies: Sequence[str] | None = None) -> bool: - """ - Output error message to use -help with additional text before it. + """Output error message to use -help with additional text before it. :param missing_parameters: A list of parameters which are missing. :param missing_generator: Whether a generator is missing. @@ -952,8 +949,7 @@ def suggest_help(missing_parameters: Sequence[str] | None = None, def writeToCommandLogFile() -> None: - """ - Save name of the called module along with all params to logs/commands.log. + """Save name of the called module along with all params to logfile. This can be used by user later to track errors or report bugs. """ @@ -1258,8 +1254,7 @@ def user_confirm(self, question: str) -> bool: def userPut(self, page: pywikibot.page.BasePage, oldtext: str, newtext: str, **kwargs: Any) -> bool: - """ - Save a new revision of a page, with user confirmation as required. + """Save a new revision of a page, with user confirmation as required. Print differences, ask user for confirmation, and puts the page if needed. @@ -1294,8 +1289,7 @@ def userPut(self, page: pywikibot.page.BasePage, oldtext: str, def _save_page(self, page: pywikibot.page.BasePage, func: Callable[..., Any], *args: Any, **kwargs: Any) -> bool: - """ - Helper function to handle page save-related option error handling. + """Helper function to handle page save-related option error handling. .. note:: Do no use it directly. Use :meth:`userPut` instead. @@ -1598,8 +1592,7 @@ def run(self) -> None: # a site previously defined class Bot(BaseBot): - """ - Generic bot subclass for multiple sites. + """Generic bot subclass for multiple sites. If possible the MultipleSitesBot or SingleSiteBot classes should be used instead which specifically handle multiple or single sites. @@ -1627,8 +1620,7 @@ def site(self) -> BaseSite | None: @site.setter def site(self, site: BaseSite | None) -> None: - """ - Set the Site that the bot is using. + """Set the Site that the bot is using. When Bot.run() is managing the generator and site property, this is set each time a page is on a site different from the previous page. @@ -1682,8 +1674,7 @@ def init_page(self, item: Any) -> pywikibot.page.BasePage: class SingleSiteBot(BaseBot): - """ - A bot only working on one site and ignoring the others. + """A bot only working on one site and ignoring the others. If no site is given from the start it'll use the first page's site. Any page after the site has been defined and is not on the defined site will be @@ -1693,8 +1684,7 @@ class SingleSiteBot(BaseBot): def __init__(self, site: BaseSite | bool | None = True, **kwargs: Any) -> None: - """ - Create a SingleSiteBot instance. + """Create a SingleSiteBot instance. :param site: If True it'll be set to the configured site using pywikibot.Site. @@ -1746,8 +1736,7 @@ def skip_page(self, page: pywikibot.page.BasePage) -> bool: class MultipleSitesBot(BaseBot): - """ - A bot class working on multiple sites. + """A bot class working on multiple sites. The bot should accommodate for that case and not store site specific information on only one site. @@ -1814,8 +1803,7 @@ def set_options(self, **kwargs: Any) -> None: class CurrentPageBot(BaseBot): - """ - A bot which automatically sets 'current_page' on each treat(). + """A bot which automatically sets 'current_page' on each treat(). This class should be always used together with either the MultipleSitesBot or SingleSiteBot class as there is no site management in this class. @@ -1838,8 +1826,7 @@ def put_current(self, new_text: str, ignore_save_related_errors: bool | None = None, ignore_server_errors: bool | None = None, **kwargs: Any) -> bool: - """ - Call :py:obj:`Bot.userPut` but use the current page. + """Call :py:obj:`Bot.userPut` but use the current page. It compares the new_text to the current page text. @@ -1865,8 +1852,7 @@ def put_current(self, new_text: str, class AutomaticTWSummaryBot(CurrentPageBot): - """ - A class which automatically defines ``summary`` for ``put_current``. + """A class which automatically defines ``summary`` for ``put_current``. The class must defined a ``summary_key`` string which contains the i18n key for :py:obj:`i18n.twtranslate`. It can also @@ -2002,8 +1988,7 @@ def skip_page(self, page: pywikibot.page.BasePage) -> bool: class WikidataBot(Bot, ExistingPageBot): - """ - Generic Wikidata Bot to be subclassed. + """Generic Wikidata Bot to be subclassed. Source claims (P143) can be created for specific sites @@ -2041,8 +2026,7 @@ def __init__(self, **kwargs: Any) -> None: f'{self.site} is not connected to a data repository') def cacheSources(self) -> None: - """ - Fetch the sources from the list on Wikidata. + """Fetch the sources from the list on Wikidata. It is stored internally and reused by getSource() """ @@ -2055,8 +2039,7 @@ def cacheSources(self) -> None: self.repo, family[source_lang]) def get_property_by_name(self, property_name: str) -> str: - """ - Find given property and return its ID. + """Find given property and return its ID. Method first uses site.search() and if the property isn't found, then asks user to provide the property ID. @@ -2078,8 +2061,7 @@ def user_edit_entity(self, entity: pywikibot.page.WikibasePage, ignore_save_related_errors: bool | None = None, ignore_server_errors: bool | None = None, **kwargs: Any) -> bool: - """ - Edit entity with data provided, with user confirmation as required. + """Edit entity with data provided, with user confirmation as required. :param entity: page to be edited :param data: data to be saved, or None if the diff should be created @@ -2119,8 +2101,7 @@ def user_add_claim(self, item: pywikibot.page.ItemPage, claim: pywikibot.page.Claim, source: BaseSite | None = None, bot: bool = True, **kwargs: Any) -> bool: - """ - Add a claim to an item, with user confirmation as required. + """Add a claim to an item, with user confirmation as required. :param item: page to be edited :param claim: claim to be saved @@ -2149,8 +2130,7 @@ def user_add_claim(self, item: pywikibot.page.ItemPage, return self._save_page(item, item.addClaim, claim, bot=bot, **kwargs) def getSource(self, site: BaseSite) -> pywikibot.page.Claim | None: - """ - Create a Claim usable as a source for Wikibase statements. + """Create a Claim usable as a source for Wikibase statements. :param site: site that is the source of assertions. @@ -2170,8 +2150,7 @@ def user_add_claim_unless_exists( source: BaseSite | None = None, logger_callback: Callable[[str], Any] = pwb_logging.log, **kwargs: Any) -> bool: - """ - Decorator of :py:obj:`user_add_claim`. + """Decorator of :py:obj:`user_add_claim`. Before adding a new claim, it checks if we can add it, using provided filters. @@ -2261,8 +2240,7 @@ def create_item_for_page(self, page: pywikibot.page.BasePage, summary: str | None = None, **kwargs: Any ) -> pywikibot.page.ItemPage | None: - """ - Create an ItemPage with the provided page as the sitelink. + """Create an ItemPage with the provided page as the sitelink. :param page: the page for which the item will be created :param data: additional data to be included in the new item (optional). @@ -2345,8 +2323,7 @@ def treat_page(self) -> None: def treat_page_and_item(self, page: pywikibot.page.BasePage, item: pywikibot.page.ItemPage) -> None: - """ - Treat page together with its item (if it exists). + """Treat page together with its item (if it exists). Must be implemented in subclasses. """ diff --git a/pywikibot/bot_choice.py b/pywikibot/bot_choice.py index 2548a8b3ab..b7dd803544 100644 --- a/pywikibot/bot_choice.py +++ b/pywikibot/bot_choice.py @@ -46,8 +46,7 @@ class Option(ABC): - """ - A basic option for input_choice. + """A basic option for input_choice. The following methods need to be implemented: @@ -97,8 +96,7 @@ def stop(self) -> bool: return self._stop def handled(self, value: str) -> Option | None: - """ - Return the Option object that applies to the given value. + """Return the Option object that applies to the given value. If this Option object doesn't know which applies it returns None. """ @@ -167,8 +165,7 @@ class StandardOption(Option): """An option with a description and shortcut and returning the shortcut.""" def __init__(self, option: str, shortcut: str, **kwargs: Any) -> None: - """ - Initializer. + """Initializer. :param option: option string :param shortcut: Shortcut of the option @@ -217,8 +214,7 @@ def out(self) -> str: class NestedOption(OutputOption, StandardOption): - """ - An option containing other options. + """An option containing other options. It will return True in test if this option applies but False if a sub option applies while handle returns the sub option. diff --git a/pywikibot/comms/eventstreams.py b/pywikibot/comms/eventstreams.py index 5bc6530771..5f46511f1f 100644 --- a/pywikibot/comms/eventstreams.py +++ b/pywikibot/comms/eventstreams.py @@ -195,8 +195,7 @@ def url(self): since=f'?since={self._since}' if self._since else '') def set_maximum_items(self, value: int) -> None: - """ - Set the maximum number of items to be retrieved from the stream. + """Set the maximum number of items to be retrieved from the stream. If not called, most queries will continue as long as there is more data to be retrieved from the stream. diff --git a/pywikibot/comms/http.py b/pywikibot/comms/http.py index edda981c33..ceb6d6288c 100644 --- a/pywikibot/comms/http.py +++ b/pywikibot/comms/http.py @@ -1,5 +1,4 @@ -""" -Basic HTTP access interface. +"""Basic HTTP access interface. This module handles communication between the bot and the HTTP threads. @@ -162,8 +161,7 @@ def get_value(self, key, args, kwargs): def user_agent_username(username=None): - """ - Reduce username to a representation permitted in HTTP headers. + """Reduce username to a representation permitted in HTTP headers. To achieve that, this function: 1) replaces spaces (' ') with '_' @@ -251,8 +249,7 @@ def request(site: pywikibot.site.BaseSite, uri: str | None = None, headers: dict | None = None, **kwargs) -> requests.Response: - """ - Request to Site with default error handling and response decoding. + """Request to Site with default error handling and response decoding. See :py:obj:`requests.Session.request` for additional parameters. @@ -309,8 +306,7 @@ def get_authentication(uri: str) -> tuple[str, str] | None: def error_handling_callback(response): - """ - Raise exceptions and log alerts. + """Raise exceptions and log alerts. :param response: Response returned by Session.request(). :type response: :py:obj:`requests.Response` @@ -365,8 +361,7 @@ def error_handling_callback(response): def fetch(uri: str, method: str = 'GET', headers: dict | None = None, default_error_handling: bool = True, use_fake_user_agent: bool | str = False, **kwargs): - """ - HTTP request. + """HTTP request. See :py:obj:`requests.Session.request` for parameters. diff --git a/pywikibot/config.py b/pywikibot/config.py index b5c9032f5c..1e45f5b989 100644 --- a/pywikibot/config.py +++ b/pywikibot/config.py @@ -1,5 +1,4 @@ -""" -Module to define and load pywikibot configuration default and user preferences. +"""Module to define pywikibot configuration default and user preferences. User preferences are loaded from a python file called `user-config.py`, which may be located in directory specified by the environment variable diff --git a/pywikibot/cosmetic_changes.py b/pywikibot/cosmetic_changes.py index 8df7cd4322..c2337fe5fa 100644 --- a/pywikibot/cosmetic_changes.py +++ b/pywikibot/cosmetic_changes.py @@ -1,5 +1,4 @@ -""" -This module can do slight modifications to tidy a wiki page's source code. +"""This module can do slight modifications to tidy a wiki page's source code. The changes are not supposed to change the look of the rendered wiki page. @@ -330,8 +329,7 @@ def change(self, text: str) -> bool | str: return new_text def fixSelfInterwiki(self, text: str) -> str: - """ - Interwiki links to the site itself are displayed like local links. + """Interwiki links to the site itself are displayed like local links. Remove their language code prefix. """ @@ -767,8 +765,7 @@ def removeUselessSpaces(self, text: str) -> str: return text def removeNonBreakingSpaceBeforePercent(self, text: str) -> str: - """ - Remove a non-breaking space between number and percent sign. + """Remove a non-breaking space between number and percent sign. Newer MediaWiki versions automatically place a non-breaking space in front of a percent sign, so it is no longer required to place it @@ -779,8 +776,7 @@ def removeNonBreakingSpaceBeforePercent(self, text: str) -> str: return text def cleanUpSectionHeaders(self, text: str) -> str: - """ - Add a space between the equal signs and the section title. + """Add a space between the equal signs and the section title. Example:: @@ -805,8 +801,7 @@ def cleanUpSectionHeaders(self, text: str) -> str: ['comment', 'math', 'nowiki', 'pre']) def putSpacesInLists(self, text: str) -> str: - """ - Add a space between the * or # and the text. + """Add a space between the * or # and the text. .. note:: This space is recommended in the syntax help on the English, German and French Wikipedias. It might be that it @@ -1069,8 +1064,7 @@ def fixArabicLetters(self, text: str) -> str: return text def commonsfiledesc(self, text: str) -> str: - """ - Clean up file descriptions on Wikimedia Commons. + """Clean up file descriptions on Wikimedia Commons. It works according to [1] and works only on pages in the file namespace on Wikimedia Commons. diff --git a/pywikibot/data/api/__init__.py b/pywikibot/data/api/__init__.py index c6475673af..ca065a31ea 100644 --- a/pywikibot/data/api/__init__.py +++ b/pywikibot/data/api/__init__.py @@ -46,8 +46,7 @@ def _invalidate_superior_cookies(family) -> None: - """ - Clear cookies for site's second level domain. + """Clear cookies for site's second level domain. The http module takes care of all the cookie stuff. This is a workaround for requests bug, see :phab:`T224712` diff --git a/pywikibot/data/api/_generators.py b/pywikibot/data/api/_generators.py index c4fef30368..006f8d024d 100644 --- a/pywikibot/data/api/_generators.py +++ b/pywikibot/data/api/_generators.py @@ -95,8 +95,7 @@ def __init__( data_name: str = 'data', **kwargs ) -> None: - """ - Initialize an APIGenerator object. + """Initialize an APIGenerator object. kwargs are used to create a Request object; see that object's documentation for values. @@ -123,8 +122,7 @@ def __init__( self.request[self.limit_name] = self.query_increment def set_query_increment(self, value: int) -> None: - """ - Set the maximum number of items to be retrieved per API query. + """Set the maximum number of items to be retrieved per API query. If not called, the default is config.step. @@ -137,8 +135,7 @@ def set_query_increment(self, value: int) -> None: .format(type(self).__name__, self.query_increment)) def set_maximum_items(self, value: int | str | None) -> None: - """ - Set the maximum number of items to be retrieved from the wiki. + """Set the maximum number of items to be retrieved from the wiki. If not called, most queries will continue as long as there is more data to be retrieved from the API. @@ -157,8 +154,7 @@ def set_maximum_items(self, value: int | str | None) -> None: @property def generator(self): - """ - Submit request and iterate the response. + """Submit request and iterate the response. Continues response as needed until limit (if defined) is reached. @@ -764,8 +760,7 @@ class PropertyGenerator(QueryGenerator): """ def __init__(self, prop: str, **kwargs) -> None: - """ - Initializer. + """Initializer. Required and optional parameters are as for ``Request``, except that action=query is assumed and prop is required. @@ -844,8 +839,7 @@ class ListGenerator(QueryGenerator): """ def __init__(self, listaction: str, **kwargs) -> None: - """ - Initializer. + """Initializer. Required and optional parameters are as for ``Request``, except that action=query is assumed and listaction is required. @@ -988,8 +982,7 @@ def _update_coordinates(page, coordinates) -> None: def update_page(page: pywikibot.Page, pagedict: dict[str, Any], props: Iterable[str] | None = None) -> None: - """ - Update attributes of Page object *page*, based on query data in *pagedict*. + """Update attributes of *page*, based on query data in *pagedict*. :param page: object to be updated :param pagedict: the contents of a *page* element of a query diff --git a/pywikibot/data/api/_optionset.py b/pywikibot/data/api/_optionset.py index d31f0e4ed8..75019c74e0 100644 --- a/pywikibot/data/api/_optionset.py +++ b/pywikibot/data/api/_optionset.py @@ -17,8 +17,7 @@ class OptionSet(MutableMapping): - """ - A class to store a set of options which can be either enabled or not. + """A class to store a set of options which can be either enabled or not. If it is instantiated with the associated site, module and parameter it will only allow valid names as options. If instantiated 'lazy loaded' it @@ -95,8 +94,7 @@ def _set_site(self, site, module: str, param: str, self._site_set = True def from_dict(self, dictionary): - """ - Load options from the dict. + """Load options from the dict. The options are not cleared before. If changes have been made previously, but only the dict values should be applied it needs to be @@ -153,8 +151,7 @@ def __setitem__(self, name, value): raise ValueError(f'Invalid value "{value}"') def __getitem__(self, name) -> bool | None: - """ - Return whether the option is enabled. + """Return whether the option is enabled. :return: If the name has been set it returns whether it is enabled. Otherwise it returns None. If the site has been set it raises a diff --git a/pywikibot/data/api/_paraminfo.py b/pywikibot/data/api/_paraminfo.py index b7cb846b30..7d95660e9d 100644 --- a/pywikibot/data/api/_paraminfo.py +++ b/pywikibot/data/api/_paraminfo.py @@ -139,8 +139,7 @@ def fetch(self, modules: Iterable | str) -> None: self._fetch(modules) def _fetch(self, modules: set | frozenset) -> None: - """ - Fetch paraminfo for multiple modules without initializing beforehand. + """Get paraminfo for multiple modules without initializing beforehand. :param modules: API modules to load and which haven't been loaded yet. """ diff --git a/pywikibot/data/api/_requests.py b/pywikibot/data/api/_requests.py index 0e84593b73..66797496d4 100644 --- a/pywikibot/data/api/_requests.py +++ b/pywikibot/data/api/_requests.py @@ -147,8 +147,7 @@ def __init__(self, site=None, retry_wait: int | None = None, use_get: bool | None = None, parameters=_PARAM_DEFAULT, **kwargs) -> None: - """ - Create a new Request instance with the given parameters. + """Create a new Request instance with the given parameters. The parameters for the request can be defined via either the 'parameters' parameter or the keyword arguments. The keyword arguments @@ -284,8 +283,7 @@ def _warn_kwargs(cls) -> None: @classmethod def clean_kwargs(cls, kwargs: dict) -> dict: - """ - Convert keyword arguments into new parameters mode. + """Convert keyword arguments into new parameters mode. If there are no other arguments in kwargs apart from the used arguments by the class' initializer it'll just return kwargs and otherwise remove @@ -331,8 +329,7 @@ def clean_kwargs(cls, kwargs: dict) -> dict: return kwargs def _format_value(self, value): - """ - Format the MediaWiki API request parameter. + """Format the MediaWiki API request parameter. Converts from Python datatypes to MediaWiki API parameter values. @@ -404,8 +401,7 @@ def iteritems(self): return iter(self.items()) def _add_defaults(self): - """ - Add default parameters to the API request. + """Add default parameters to the API request. This method will only add them once. """ @@ -449,8 +445,7 @@ def _add_defaults(self): self.__defaulted = True # skipcq: PTC-W0037 def _encoded_items(self) -> dict[str, str | bytes]: - """ - Build a dict of params with minimal encoding needed for the site. + """Build a dict of params with minimal encoding needed for the site. This helper method only prepares params for serialisation or transmission, so it only encodes values which are not ASCII, @@ -496,8 +491,7 @@ def _encoded_items(self) -> dict[str, str | bytes]: return params def _http_param_string(self): - """ - Return the parameters as a HTTP URL query fragment. + """Return the parameters as a HTTP URL query fragment. URL encodes the parameters provided by _encoded_items() @@ -615,8 +609,7 @@ def _use_get(self): @classmethod def _build_mime_request(cls, params: dict, mime_params: dict) -> tuple[dict, bytes]: - """ - Construct a MIME multipart form post. + """Construct a MIME multipart form post. :param params: HTTP request params :param mime_params: HTTP request parts which must be sent in the body @@ -1187,8 +1180,7 @@ def create_simple(cls, req_site, **kwargs): @classmethod def _get_cache_dir(cls) -> Path: - """ - Return the base directory path for cache entries. + """Return the base directory path for cache entries. The directory will be created if it does not already exist. diff --git a/pywikibot/data/memento.py b/pywikibot/data/memento.py index 9433e4282d..1a26d5b70d 100644 --- a/pywikibot/data/memento.py +++ b/pywikibot/data/memento.py @@ -224,8 +224,7 @@ def is_memento(uri: str, response: requests.Response | None = None, session: requests.Session | None = None, timeout: int | None = None) -> bool: - """ - Determines if the URI given is indeed a Memento. + """Determines if the URI given is indeed a Memento. The simple case is to look for a Memento-Datetime header in the request, but not all archives are Memento-compliant yet. diff --git a/pywikibot/data/sparql.py b/pywikibot/data/sparql.py index 69e26e8dac..addf5215f1 100644 --- a/pywikibot/data/sparql.py +++ b/pywikibot/data/sparql.py @@ -28,6 +28,7 @@ class SparqlQuery(WaitingMixin): + """SPARQL Query class. This class allows to run SPARQL queries against any SPARQL endpoint. @@ -42,8 +43,7 @@ def __init__(self, entity_url: str | None = None, repo=None, max_retries: int | None = None, retry_wait: float | None = None) -> None: - """ - Create endpoint. + """Create endpoint. :param endpoint: SPARQL endpoint URL :param entity_url: URL prefix for any entities returned in a query. @@ -89,8 +89,7 @@ def __init__(self, self.retry_wait = retry_wait def get_last_response(self): - """ - Return last received response. + """Return last received response. :return: Response object from last request or None """ @@ -101,8 +100,7 @@ def select(self, full_data: bool = False, headers: dict[str, str] | None = None ) -> list[dict[str, str]] | None: - """ - Run SPARQL query and return the result. + """Run SPARQL query and return the result. The response is assumed to be in format defined by: https://www.w3.org/TR/2013/REC-sparql11-results-json-20130321/ @@ -184,8 +182,7 @@ def query(self, query: str, headers: dict[str, str] | None = None): def ask(self, query: str, headers: dict[str, str] | None = None) -> bool: - """ - Run SPARQL ASK query and return boolean result. + """Run SPARQL ASK query and return boolean result. :param query: Query text """ @@ -195,8 +192,7 @@ def ask(self, query: str, return data['boolean'] def get_items(self, query, item_name: str = 'item', result_type=set): - """ - Retrieve items which satisfy given query. + """Retrieve items which satisfy given query. Items are returned as Wikibase IDs. @@ -216,6 +212,7 @@ def get_items(self, query, item_name: str = 'item', result_type=set): class SparqlNode: + """Base class for SPARQL nodes.""" def __init__(self, value) -> None: @@ -227,6 +224,7 @@ def __str__(self) -> str: class URI(SparqlNode): + """Representation of URI result type.""" def __init__(self, data: dict, entity_url, **kwargs) -> None: @@ -235,8 +233,7 @@ def __init__(self, data: dict, entity_url, **kwargs) -> None: self.entity_url = entity_url def getID(self): # noqa: N802 - """ - Get ID of Wikibase object identified by the URI. + """Get ID of Wikibase object identified by the URI. :return: ID of Wikibase object, e.g. Q1234 """ @@ -249,6 +246,7 @@ def __repr__(self) -> str: class Literal(SparqlNode): + """Representation of RDF literal result type.""" def __init__(self, data: dict, **kwargs) -> None: @@ -266,6 +264,7 @@ def __repr__(self) -> str: class Bnode(SparqlNode): + """Representation of blank node.""" def __init__(self, data: dict, **kwargs) -> None: diff --git a/pywikibot/data/superset.py b/pywikibot/data/superset.py index 6e223df1b4..101b96d89e 100644 --- a/pywikibot/data/superset.py +++ b/pywikibot/data/superset.py @@ -23,6 +23,7 @@ class SupersetQuery(WaitingMixin): + """Superset Query class. This class allows to run SQL queries against wikimedia superset diff --git a/pywikibot/data/wikistats.py b/pywikibot/data/wikistats.py index 23b2547869..4c6a0c08fa 100644 --- a/pywikibot/data/wikistats.py +++ b/pywikibot/data/wikistats.py @@ -20,8 +20,7 @@ class WikiStats: - """ - Light wrapper around WikiStats data, caching responses and data. + """Light wrapper around WikiStats data, caching responses and data. The methods accept a Pywikibot family name as the WikiStats table name, mapping the names before calling the WikiStats API. @@ -114,8 +113,7 @@ def get_dict(self, table: str) -> dict: def sorted(self, table: str, key: str, reverse: bool | None = None) -> list: - """ - Reverse numerical sort of data. + """Reverse numerical sort of data. :param table: name of table of data :param key: data table key diff --git a/pywikibot/date.py b/pywikibot/date.py index 62f64908ad..3a97e7579f 100644 --- a/pywikibot/date.py +++ b/pywikibot/date.py @@ -116,8 +116,7 @@ def dh_noConv(value: int, pattern: str, limit: Callable[[int], bool]) -> str: def dh_dayOfMnth(value: int, pattern: str) -> str: - """ - Helper for decoding a single integer value. + """Helper for decoding a single integer value. The single integer should be <=31, no conversion, no rounding (used in days of month). @@ -127,8 +126,7 @@ def dh_dayOfMnth(value: int, pattern: str) -> str: def dh_mnthOfYear(value: int, pattern: str) -> str: - """ - Helper for decoding a single integer value. + """Helper for decoding a single integer value. The value should be >=1000, no conversion, no rounding (used in month of the year) @@ -137,8 +135,7 @@ def dh_mnthOfYear(value: int, pattern: str) -> str: def dh_decAD(value: int, pattern: str) -> str: - """ - Helper for decoding a single integer value. + """Helper for decoding a single integer value. It should be no conversion, round to decimals (used in decades) """ @@ -147,8 +144,7 @@ def dh_decAD(value: int, pattern: str) -> str: def dh_decBC(value: int, pattern: str) -> str: - """ - Helper for decoding a single integer value. + """Helper for decoding a single integer value. It should be no conversion, round to decimals (used in decades) """ @@ -276,8 +272,7 @@ def _(value: str, ind: int, match: str) -> int: def alwaysTrue(x: Any) -> bool: - """ - Return True, always. + """Return True, always. Used for multiple value selection function to accept all other values. @@ -1963,8 +1958,7 @@ def _format_limit_dom(days: int) -> tuple[Callable[[int], bool], int, int]: def getAutoFormat(lang: str, title: str, ignoreFirstLetterCase: bool = True ) -> tuple[str | None, str | None]: - """ - Return first matching formatted date value. + """Return first matching formatted date value. :param lang: language code :param title: value to format @@ -2022,8 +2016,7 @@ def formatYear(lang: str, year: int) -> str: def apply_month_delta(date: datetime.date, month_delta: int = 1, add_overlap: bool = False) -> datetime.date: - """ - Add or subtract months from the date. + """Add or subtract months from the date. By default if the new month has less days then the day of the date it chooses the last day in the new month. For example a date in the March 31st @@ -2055,8 +2048,7 @@ def apply_month_delta(date: datetime.date, month_delta: int = 1, def get_month_delta(date1: datetime.date, date2: datetime.date) -> int: - """ - Return the difference between two dates in months. + """Return the difference between two dates in months. It does only work on calendars with 12 months per year, and where the months are consecutive and non-negative numbers. diff --git a/pywikibot/diff.py b/pywikibot/diff.py index 49f5f72051..c5c1ab3c1c 100644 --- a/pywikibot/diff.py +++ b/pywikibot/diff.py @@ -43,8 +43,7 @@ def __init__(self, a: str | Sequence[str], b: str | Sequence[str], grouped_opcode: Sequence[tuple[str, int, int, int, int]] ) -> None: - """ - Initializer. + """Initializer. :param a: sequence of lines :param b: sequence of lines diff --git a/pywikibot/editor.py b/pywikibot/editor.py index 7b470780c1..ec42bdd0c8 100644 --- a/pywikibot/editor.py +++ b/pywikibot/editor.py @@ -101,8 +101,7 @@ def _concat(command: Sequence[str]) -> str: def edit(self, text: str, jumpIndex: int | None = None, highlight: str | None = None) -> str | None: - """ - Call the editor and thus allows the user to change the text. + """Call the editor and thus allows the user to change the text. Halts the thread's operation until the editor is closed. diff --git a/pywikibot/exceptions.py b/pywikibot/exceptions.py index ec403e20df..de528b4611 100644 --- a/pywikibot/exceptions.py +++ b/pywikibot/exceptions.py @@ -262,8 +262,7 @@ class UploadError(APIError): def __init__(self, code: str, message: str, file_key: str | None = None, offset: int | bool = 0) -> None: - """ - Create a new UploadError instance. + """Create a new UploadError instance. :param file_key: The file_key of the uploaded file to reuse it later. If no key is known or it is an incomplete file it may be None. @@ -282,8 +281,7 @@ def message(self) -> str: class PageRelatedError(Error): - """ - Abstract Exception, used when the exception concerns a particular Page. + """Abstract Exception, used when the exception concerns a particular Page. This class should be used when the Exception concerns a particular Page, and when a generic message can be written once for all. @@ -295,8 +293,7 @@ class PageRelatedError(Error): def __init__(self, page: pywikibot.page.BasePage, message: str | None = None) -> None: - """ - Initializer. + """Initializer. :param page: Page that caused the exception """ @@ -479,8 +476,7 @@ class CircularRedirectError(PageRelatedError): class InterwikiRedirectPageError(PageRelatedError): - """ - Page is a redirect to another site. + """Page is a redirect to another site. This is considered invalid in Pywikibot. See bug :phab:`T75184`. @@ -699,8 +695,7 @@ class NoWikibaseEntityError(WikiBaseError): """This entity doesn't exist.""" def __init__(self, entity: pywikibot.page.WikibaseEntity) -> None: - """ - Initializer. + """Initializer. :param entity: Wikibase entity """ diff --git a/pywikibot/families/wikiquote_family.py b/pywikibot/families/wikiquote_family.py index a1e2e2f9ef..1a032a039b 100644 --- a/pywikibot/families/wikiquote_family.py +++ b/pywikibot/families/wikiquote_family.py @@ -70,8 +70,7 @@ class Family(family.SubdomainFamily, family.WikimediaFamily): } def encodings(self, code): - """ - Return a list of historical encodings for a specific language. + """Return a list of historical encodings for a specific language. :param code: site code """ diff --git a/pywikibot/family.py b/pywikibot/family.py index 6cb4ece163..673f1715a8 100644 --- a/pywikibot/family.py +++ b/pywikibot/family.py @@ -482,8 +482,7 @@ def protocol(self, code: str) -> str: return 'https' def verify_SSL_certificate(self, code: str) -> bool: - """ - Return whether a HTTPS certificate should be verified. + """Return whether a HTTPS certificate should be verified. .. versionadded:: 5.3 renamed from ignore_certificate_error @@ -535,8 +534,7 @@ def _hostname(self, code, protocol=None): return protocol, host def base_url(self, code: str, uri: str, protocol=None) -> str: - """ - Prefix uri with port and hostname. + """Prefix uri with port and hostname. :param code: The site code :param uri: The absolute path after the hostname @@ -712,8 +710,7 @@ def isPublic(self, code) -> bool: return True def post_get_convert(self, site, getText): - """ - Do a conversion on the retrieved text from the Wiki. + """Do a conversion on the retrieved text from the Wiki. For example a :wiki:`X-conversion in Esperanto `. @@ -721,8 +718,7 @@ def post_get_convert(self, site, getText): return getText def pre_put_convert(self, site, putText): - """ - Do a conversion on the text to insert on the Wiki. + """Do a conversion on the text to insert on the Wiki. For example a :wiki:`X-conversion in Esperanto `. @@ -731,8 +727,7 @@ def pre_put_convert(self, site, putText): @property def obsolete(self) -> types.MappingProxyType[str, str | None]: - """ - Old codes that are not part of the family. + """Old codes that are not part of the family. Interwiki replacements override removals for the same code. @@ -744,8 +739,7 @@ def obsolete(self) -> types.MappingProxyType[str, str | None]: @classproperty def domains(cls) -> set[str]: - """ - Get list of unique domain names included in this family. + """Get list of unique domain names included in this family. These domains may also exist in another family. """ @@ -1165,8 +1159,7 @@ def globes(self, code): def AutoFamily(name: str, url: str) -> SingleSiteFamily: - """ - Family that automatically loads the site configuration. + """Family that automatically loads the site configuration. :param name: Name for the family :param url: API endpoint URL of the wiki diff --git a/pywikibot/flow.py b/pywikibot/flow.py index 1e1db84a58..614800d456 100644 --- a/pywikibot/flow.py +++ b/pywikibot/flow.py @@ -423,8 +423,7 @@ def __init__(self, page: Topic, uuid: str) -> None: @classmethod def fromJSON(cls, page: Topic, post_uuid: str, # noqa: N802 data: dict[str, Any]) -> Post: - """ - Create a Post object using the data returned from the API call. + """Create a Post object using the data returned from the API call. :param page: A Flow topic :param post_uuid: The UUID of the post diff --git a/pywikibot/i18n.py b/pywikibot/i18n.py index f3ac0fb866..e07f0ddc3a 100644 --- a/pywikibot/i18n.py +++ b/pywikibot/i18n.py @@ -368,8 +368,7 @@ def set_messages_package(package_name: str) -> None: def messages_available() -> bool: - """ - Return False if there are no i18n messages available. + """Return False if there are no i18n messages available. To determine if messages are available, it looks for the package name set using :py:obj:`set_messages_package` for a message bundle called @@ -438,8 +437,7 @@ def _get_bundle(lang: str, dirname: str) -> dict[str, str]: def _get_translation(lang: str, twtitle: str) -> str | None: - """ - Return message of certain twtitle if exists. + """Return message of certain twtitle if exists. For internal use, don't use it directly. """ @@ -515,8 +513,7 @@ def replace_plural(match: Match[str]) -> str: class _PluralMappingAlias(abc.Mapping): - """ - Aliasing class to allow non mappings in _extract_plural. + """Aliasing class to allow non mappings in _extract_plural. That function only uses __getitem__ so this is only implemented here. """ @@ -699,8 +696,7 @@ def twtranslate( only_plural: bool = False, bot_prefix: bool = False ) -> str | None: - r""" - Translate a message using JSON files in messages_package_name. + r"""Translate a message using JSON files in messages_package_name. fallback parameter must be True for i18n and False for L10N or testing purposes. @@ -823,8 +819,7 @@ def twtranslate( def twhas_key(source: str | pywikibot.site.BaseSite, twtitle: str) -> bool: - """ - Check if a message has a translation in the specified language code. + """Check if a message has a translation in the specified language code. The translations are retrieved from i18n., based on the callers import table. @@ -842,8 +837,7 @@ def twhas_key(source: str | pywikibot.site.BaseSite, twtitle: str) -> bool: def twget_keys(twtitle: str) -> list[str]: - """ - Return all language codes for a special message. + """Return all language codes for a special message. :param twtitle: The TranslateWiki string title, in - format @@ -938,8 +932,7 @@ def input(twtitle: str, parameters: Mapping[str, int] | None = None, password: bool = False, fallback_prompt: str | None = None) -> str: - """ - Ask the user a question, return the user's answer. + """Ask the user a question, return the user's answer. The prompt message is retrieved via :py:obj:`twtranslate` and uses the config variable 'userinterface_lang'. diff --git a/pywikibot/interwiki_graph.py b/pywikibot/interwiki_graph.py index 3efacb522a..e19c8a546c 100644 --- a/pywikibot/interwiki_graph.py +++ b/pywikibot/interwiki_graph.py @@ -23,8 +23,7 @@ class GraphSavingThread(threading.Thread): - """ - Threaded graph renderer. + """Threaded graph renderer. Rendering a graph can take extremely long. We use multithreading because of that. @@ -176,8 +175,7 @@ def saveGraphFile(self) -> None: thread.start() def createGraph(self) -> None: - """ - Create graph of the interwiki links. + """Create graph of the interwiki links. For more info see https://meta.wikimedia.org/wiki/Interwiki_graphs """ @@ -202,8 +200,7 @@ def createGraph(self) -> None: def getFilename(page: pywikibot.page.Page, extension: str | None = None) -> str: - """ - Create a filename that is unique for the page. + """Create a filename that is unique for the page. :param page: page used to create the new filename :param extension: file extension diff --git a/pywikibot/logentries.py b/pywikibot/logentries.py index 8442df3594..642e974982 100644 --- a/pywikibot/logentries.py +++ b/pywikibot/logentries.py @@ -104,8 +104,7 @@ def params(self) -> dict[str, Any]: @cached def page(self) -> int | pywikibot.page.Page: - """ - Page on which action was performed. + """Page on which action was performed. :return: page on action was performed """ @@ -140,8 +139,7 @@ def page(self) -> pywikibot.page.User: class BlockEntry(LogEntry): - """ - Block or unblock log entry. + """Block or unblock log entry. It might contain a block or unblock depending on the action. The duration, expiry and flags are not available on unblock log entries. @@ -161,8 +159,7 @@ def __init__(self, apidata: dict[str, Any], self._blockid = int(self['title'][pos + 1:]) def page(self) -> int | pywikibot.page.Page: - """ - Return the blocked account or IP. + """Return the blocked account or IP. :return: the Page object of username or IP if this block action targets a username or IP, or the blockid if this log reflects @@ -176,8 +173,7 @@ def page(self) -> int | pywikibot.page.Page: @cached def flags(self) -> list[str]: - """ - Return a list of (str) flags associated with the block entry. + """Return a list of (str) flags associated with the block entry. It raises an Error if the entry is an unblocking log entry. @@ -190,8 +186,7 @@ def flags(self) -> list[str]: @cached def duration(self) -> datetime.timedelta | None: - """ - Return a datetime.timedelta representing the block duration. + """Return a datetime.timedelta representing the block duration. :return: datetime.timedelta, or None if block is indefinite. """ @@ -299,8 +294,7 @@ def auto(self) -> bool: class LogEntryFactory: - """ - LogEntry Factory. + """LogEntry Factory. Only available method is create() """ @@ -315,8 +309,7 @@ class LogEntryFactory: def __init__(self, site: pywikibot.site.BaseSite, logtype: str | None = None) -> None: - """ - Initializer. + """Initializer. :param site: The site on which the log entries are created. :param logtype: The log type of the log entries, if known in advance. @@ -333,8 +326,7 @@ def __init__(self, site: pywikibot.site.BaseSite, self._creator = lambda data: logclass(data, self._site) def create(self, logdata: dict[str, Any]) -> LogEntry: - """ - Instantiate the LogEntry object representing logdata. + """Instantiate the LogEntry object representing logdata. :param logdata: returned by the api @@ -343,8 +335,7 @@ def create(self, logdata: dict[str, Any]) -> LogEntry: return self._creator(logdata) def get_valid_entry_class(self, logtype: str) -> LogEntry: - """ - Return the class corresponding to the @logtype string parameter. + """Return the class corresponding to the @logtype string parameter. :return: specified subclass of LogEntry :raise KeyError: logtype is not valid @@ -356,8 +347,7 @@ def get_valid_entry_class(self, logtype: str) -> LogEntry: @classmethod def get_entry_class(cls, logtype: str) -> LogEntry: - """ - Return the class corresponding to the @logtype string parameter. + """Return the class corresponding to the @logtype string parameter. :return: specified subclass of LogEntry @@ -384,8 +374,7 @@ def get_entry_class(cls, logtype: str) -> LogEntry: return cls._logtypes[logtype] def _create_from_data(self, logdata: dict[str, Any]) -> LogEntry: - """ - Check for logtype from data, and creates the correct LogEntry. + """Check for logtype from data, and creates the correct LogEntry. :param logdata: log entry data """ diff --git a/pywikibot/login.py b/pywikibot/login.py index c109a21171..2c9c8c6d27 100644 --- a/pywikibot/login.py +++ b/pywikibot/login.py @@ -46,8 +46,7 @@ class _PasswordFileWarning(UserWarning): class LoginStatus(IntEnum): - """ - Enum for Login statuses. + """Enum for Login statuses. >>> LoginStatus.NOT_ATTEMPTED LoginStatus(-3) @@ -80,8 +79,7 @@ class LoginManager: def __init__(self, password: str | None = None, site: pywikibot.site.BaseSite | None = None, user: str | None = None) -> None: - """ - Initializer. + """Initializer. All parameters default to defaults in user-config. @@ -114,8 +112,7 @@ def __init__(self, password: str | None = None, self.readPassword() def check_user_exists(self) -> None: - """ - Check that the username exists on the site. + """Check that the username exists on the site. .. seealso:: :api:`Users` @@ -148,8 +145,7 @@ def check_user_exists(self) -> None: .format(main_username, self.site)) def botAllowed(self) -> bool: - """ - Check whether the bot is listed on a specific page. + """Check whether the bot is listed on a specific page. This allows bots to comply with the policy on the respective wiki. """ @@ -264,8 +260,7 @@ def readPassword(self) -> None: } def login(self, retry: bool = False, autocreate: bool = False) -> bool: - """ - Attempt to log into the server. + """Attempt to log into the server. .. seealso:: :api:`Login` @@ -481,8 +476,7 @@ class BotPassword: """BotPassword object for storage in password file.""" def __init__(self, suffix: str, password: str) -> None: - """ - Initializer. + """Initializer. BotPassword function by using a separate password paired with a suffixed username of the form @. @@ -499,8 +493,7 @@ def __init__(self, suffix: str, password: str) -> None: self.password = password def login_name(self, username: str) -> str: - """ - Construct the login name from the username and suffix. + """Construct the login name from the username and suffix. :param user: username (without suffix) """ @@ -517,8 +510,7 @@ class OauthLoginManager(LoginManager): def __init__(self, password: str | None = None, site: pywikibot.site.BaseSite | None = None, user: str | None = None) -> None: - """ - Initializer. + """Initializer. All parameters default to defaults in user-config. @@ -543,8 +535,7 @@ def __init__(self, password: str | None = None, self._access_token: tuple[str, str] | None = None def login(self, retry: bool = False, force: bool = False) -> bool: - """ - Attempt to log into the server. + """Attempt to log into the server. .. seealso:: :api:`Login` @@ -582,8 +573,7 @@ def login(self, retry: bool = False, force: bool = False) -> bool: @property def consumer_token(self) -> tuple[str, str]: - """ - Return OAuth consumer key token and secret token. + """Return OAuth consumer key token and secret token. .. seealso:: :api:`Tokens` """ @@ -591,8 +581,7 @@ def consumer_token(self) -> tuple[str, str]: @property def access_token(self) -> tuple[str, str] | None: - """ - Return OAuth access key token and secret token. + """Return OAuth access key token and secret token. .. seealso:: :api:`Tokens` """ diff --git a/pywikibot/page/_basepage.py b/pywikibot/page/_basepage.py index 1d675fe091..85d255a68c 100644 --- a/pywikibot/page/_basepage.py +++ b/pywikibot/page/_basepage.py @@ -60,8 +60,7 @@ class BasePage(ComparableMixin): - """ - BasePage: Base object for a MediaWiki page. + """BasePage: Base object for a MediaWiki page. This object only implements internally methods that do not require reading from or writing to the wiki. All other methods are delegated @@ -79,8 +78,7 @@ class BasePage(ComparableMixin): ) def __init__(self, source, title: str = '', ns=0) -> None: - """ - Instantiate a Page object. + """Instantiate a Page object. Three calling formats are supported: @@ -144,8 +142,7 @@ def site(self): return self._link.site def version(self): - """ - Return MediaWiki version number of the page site. + """Return MediaWiki version number of the page site. This is needed to use @need_version() decorator for methods of Page objects. @@ -163,8 +160,7 @@ def data_repository(self): return self.site.data_repository() def namespace(self) -> Namespace: - """ - Return the namespace of the page. + """Return the namespace of the page. :return: namespace of the page """ @@ -172,8 +168,7 @@ def namespace(self) -> Namespace: @property def content_model(self): - """ - Return the content model for this page. + """Return the content model for this page. If it cannot be reliably determined via the API, None is returned. @@ -194,8 +189,7 @@ def depth(self) -> int: @property def pageid(self) -> int: - """ - Return pageid of the page. + """Return pageid of the page. :return: pageid or 0 if page does not exist """ @@ -218,8 +212,7 @@ def title( insite=None, without_brackets: bool = False ) -> str: - """ - Return the title of this Page, as a string. + """Return the title of this Page, as a string. :param underscore: (not used with as_link) if true, replace all ' ' characters with '_' @@ -314,8 +307,7 @@ def __repr__(self) -> str: return f'{self.__class__.__name__}({self.title()!r})' def _cmpkey(self): - """ - Key for comparison of Page objects. + """Key for comparison of Page objects. Page objects are "equal" if and only if they are on the same site and have the same normalized title, including section if any. @@ -325,8 +317,7 @@ def _cmpkey(self): return (self.site, self.namespace(), self.title()) def __hash__(self): - """ - A stable identifier to be used as a key in hash-tables. + """A stable identifier to be used as a key in hash-tables. This relies on the fact that the string representation of an instance cannot change after the construction. @@ -340,8 +331,7 @@ def full_url(self): @cached def autoFormat(self): - """ - Return :py:obj:`date.getAutoFormat` dictName and value, if any. + """Return :py:obj:`date.getAutoFormat` dictName and value, if any. Value can be a year, date, etc., and dictName is 'YearBC', 'Year_December', or another dictionary name. Please note that two @@ -411,8 +401,7 @@ def get(self, force: bool = False, get_redirect: bool = False) -> str: return text def has_content(self) -> bool: - """ - Page has been loaded. + """Page has been loaded. Not existing pages are considered loaded. @@ -428,8 +417,7 @@ def _latest_cached_revision(self): return None def _getInternals(self): - """ - Helper function for get(). + """Helper function for get(). Stores latest revision in self if it doesn't contain it, doesn't think. * Raises exceptions from previous runs. @@ -493,8 +481,7 @@ def latest_revision_id(self): @latest_revision_id.deleter def latest_revision_id(self) -> None: - """ - Remove the latest revision id set for this Page. + """Remove the latest revision id set for this Page. All internal cached values specifically for the latest revision of this page are cleared. @@ -608,8 +595,7 @@ def text(self) -> None: del self._raw_extracted_templates def preloadText(self) -> str: - """ - The text returned by EditFormPreloadText. + """The text returned by EditFormPreloadText. See API module "info". @@ -702,8 +688,7 @@ def extract(self, variant: str = 'plain', *, return '\n'.join(text_lines[:min(lines, len(text_lines))]) def properties(self, force: bool = False) -> dict: - """ - Return the properties of the page. + """Return the properties of the page. :param force: force updating from the live site """ @@ -713,8 +698,7 @@ def properties(self, force: bool = False) -> dict: return self._pageprops def defaultsort(self, force: bool = False) -> str | None: - """ - Extract value of the {{DEFAULTSORT:}} magic word from the page. + """Extract value of the {{DEFAULTSORT:}} magic word from the page. :param force: force updating from the live site """ @@ -765,8 +749,7 @@ def isIpEdit(self) -> bool: @cached def lastNonBotUser(self) -> str | None: - """ - Return name or IP address of last human/non-bot user to edit page. + """Return name or IP address of last human/non-bot user to edit page. Determine the most recent human editor out of the last revisions. If it was not able to retrieve a human user, returns None. @@ -887,8 +870,7 @@ def isTalkPage(self): return ns >= 0 and ns % 2 == 1 def toggleTalkPage(self) -> pywikibot.Page | None: - """ - Return other member of the article-talk page pair for this Page. + """Return other member of the article-talk page pair for this Page. If self is a talk page, returns the associated content page; otherwise, returns the associated talk page. The returned page need @@ -986,8 +968,7 @@ def getReferences(self, namespaces=None, total: int | None = None, content: bool = False) -> Iterable[pywikibot.Page]: - """ - Return an iterator all pages that refer to or embed the page. + """Return an iterator all pages that refer to or embed the page. If you need a full list of referring pages, use ``pages = list(s.getReferences())`` @@ -1026,8 +1007,7 @@ def backlinks(self, namespaces=None, total: int | None = None, content: bool = False) -> Iterable[pywikibot.Page]: - """ - Return an iterator for pages that link to this page. + """Return an iterator for pages that link to this page. :param follow_redirects: if True, also iterate pages that link to a redirect pointing to the page. @@ -1052,8 +1032,7 @@ def embeddedin(self, namespaces=None, total: int | None = None, content: bool = False) -> Iterable[pywikibot.Page]: - """ - Return an iterator for pages that embed this page as a template. + """Return an iterator for pages that embed this page as a template. :param filter_redirects: if True, only iterate redirects; if False, omit redirects; if None, do not filter @@ -1078,8 +1057,7 @@ def redirects( total: int | None = None, content: bool = False ) -> Iterable[pywikibot.Page]: - """ - Return an iterable of redirects to this page. + """Return an iterable of redirects to this page. :param filter_fragments: If True, only return redirects with fragments. If False, only return redirects without fragments. If None, return @@ -1161,8 +1139,7 @@ def has_permission(self, action: str = 'edit') -> bool: return self.site.page_can_be_edited(self, action) def botMayEdit(self) -> bool: - """ - Determine whether the active bot is allowed to edit the page. + """Determine whether the active bot is allowed to edit the page. This will be True if the page doesn't contain {{bots}} or {{nobots}} or any other template from edit_restricted_templates list @@ -1458,8 +1435,7 @@ def put(self, newtext: str, **kwargs) def watch(self, unwatch: bool = False) -> bool: - """ - Add or remove this page to/from bot account's watchlist. + """Add or remove this page to/from bot account's watchlist. :param unwatch: True to unwatch, False (default) to watch. :return: True if successful, False otherwise. @@ -1474,8 +1450,7 @@ def clear_cache(self) -> None: delattr(self, attr) def purge(self, **kwargs) -> bool: - """ - Purge the server's cache for this page. + """Purge the server's cache for this page. :keyword redirects: Automatically resolve redirects. :type redirects: bool @@ -1570,8 +1545,7 @@ def interwiki( self, expand: bool = True, ) -> Generator[pywikibot.page.Link, None, None]: - """ - Yield interwiki links in the page text, excluding language links. + """Yield interwiki links in the page text, excluding language links. :param expand: if True (default), include interwiki links found in templates transcluded onto this page; if False, only iterate @@ -1602,8 +1576,7 @@ def langlinks( self, include_obsolete: bool = False, ) -> list[pywikibot.Link]: - """ - Return a list of all inter-language Links on this page. + """Return a list of all inter-language Links on this page. :param include_obsolete: if true, return even Link objects whose site is obsolete @@ -1733,8 +1706,7 @@ def imagelinks( total: int | None = None, content: bool = False, ) -> Iterable[pywikibot.FilePage]: - """ - Iterate FilePage objects for images displayed on this Page. + """Iterate FilePage objects for images displayed on this Page. :param total: iterate no more than this number of pages in total :param content: if True, retrieve the content of the current version @@ -1749,8 +1721,7 @@ def categories( total: int | None = None, content: bool = False, ) -> Iterable[pywikibot.Page]: - """ - Iterate categories that the article is in. + """Iterate categories that the article is in. :param with_sort_key: if True, include the sort key in each Category. :param total: iterate no more than this number of pages in total @@ -1775,8 +1746,7 @@ def categories( return self.site.pagecategories(self, total=total, content=content) def extlinks(self, total: int | None = None) -> Iterable[str]: - """ - Iterate all external URLs (not interwiki links) from this page. + """Iterate all external URLs (not interwiki links) from this page. :param total: iterate no more than this number of pages in total :return: a generator that yields str objects containing URLs. @@ -1784,8 +1754,7 @@ def extlinks(self, total: int | None = None) -> Iterable[str]: return self.site.page_extlinks(self, total=total) def coordinates(self, primary_only: bool = False): - """ - Return a list of Coordinate objects for points on the page. + """Return a list of Coordinate objects for points on the page. Uses the MediaWiki extension GeoData. @@ -1805,8 +1774,7 @@ def coordinates(self, primary_only: bool = False): return list(self._coords) def page_image(self): - """ - Return the most appropriate image on the page. + """Return the most appropriate image on the page. Uses the MediaWiki extension PageImages. @@ -1907,8 +1875,7 @@ def getVersionHistoryTable(self, def contributors(self, total: int | None = None, starttime=None, endtime=None): - """ - Compile contributors of this page with edit counts. + """Compile contributors of this page with edit counts. :param total: iterate no more than this number of revisions in total :param starttime: retrieve revisions starting at this Timestamp @@ -1967,8 +1934,7 @@ def move(self, movetalk: bool = True, noredirect: bool = False, movesubpages: bool = True) -> pywikibot.page.Page: - """ - Move this page to a new title. + """Move this page to a new title. .. versionchanged:: 7.2 The *movesubpages* parameter was added @@ -1997,8 +1963,7 @@ def delete( *, deletetalk: bool = False ) -> int: - """ - Delete the page from the wiki. Requires administrator status. + """Delete the page from the wiki. Requires administrator status. .. versionchanged:: 7.1 keyword only parameter *deletetalk* was added. @@ -2229,8 +2194,7 @@ def change_category(self, old_cat, new_cat, in_place: bool = True, include: list[str] | None = None, show_diff: bool = False) -> bool: - """ - Remove page from oldCat and add it to newCat. + """Remove page from oldCat and add it to newCat. .. versionadded:: 7.0 The `show_diff` parameter diff --git a/pywikibot/page/_category.py b/pywikibot/page/_category.py index 9bda6cd6e4..95b0993ccf 100644 --- a/pywikibot/page/_category.py +++ b/pywikibot/page/_category.py @@ -22,8 +22,7 @@ class Category(Page): """A page in the Category: namespace.""" def __init__(self, source, title: str = '', sort_key=None) -> None: - """ - Initializer. + """Initializer. All parameters are the same as for Page() Initializer. """ @@ -138,8 +137,7 @@ def articles(self, *, recurse: int | bool = False, total: int | None = None, **kwargs: Any) -> Generator[Page, None, None]: - """ - Yield all articles in the current category. + """Yield all articles in the current category. Yields all pages in the category that are not subcategories. Duplicates are filtered. To enable duplicates use :meth:`members` @@ -289,8 +287,7 @@ def newest_pages( self, total: int | None = None ) -> Generator[Page, None, None]: - """ - Return pages in a category ordered by the creation date. + """Return pages in a category ordered by the creation date. If two or more pages are created at the same time, the pages are returned in the order they were added to the category. The most diff --git a/pywikibot/page/_collections.py b/pywikibot/page/_collections.py index ee0edebd6b..17c0ad826e 100644 --- a/pywikibot/page/_collections.py +++ b/pywikibot/page/_collections.py @@ -25,8 +25,7 @@ class BaseDataDict(MutableMapping): - """ - Base structure holding data for a Wikibase entity. + """Base structure holding data for a Wikibase entity. Data are mappings from a language to a value. It will be specialised in subclasses. @@ -82,8 +81,7 @@ def normalizeKey(key) -> str: class LanguageDict(BaseDataDict): - """ - A structure holding language data for a Wikibase entity. + """A structure holding language data for a Wikibase entity. Language data are mappings from a language to a string. It can be labels, descriptions and others. @@ -133,8 +131,7 @@ def toJSON(self, diffto: dict | None = None) -> dict: class AliasesDict(BaseDataDict): - """ - A structure holding aliases for a Wikibase entity. + """A structure holding aliases for a Wikibase entity. It is a mapping from a language to a list of strings. """ @@ -200,6 +197,7 @@ def toJSON(self, diffto: dict | None = None) -> dict: class ClaimCollection(MutableMapping): + """A structure holding claims for a Wikibase entity.""" def __init__(self, repo) -> None: @@ -319,11 +317,11 @@ def set_on_item(self, item) -> None: class SiteLinkCollection(MutableMapping): + """A structure holding SiteLinks for a Wikibase item.""" def __init__(self, repo, data=None) -> None: - """ - Initializer. + """Initializer. :param repo: the Wikibase site on which badges are defined :type repo: pywikibot.site.DataSite @@ -346,8 +344,7 @@ def fromJSON(cls, data, repo): @staticmethod def getdbName(site): - """ - Helper function to obtain a dbName for a Site. + """Helper function to obtain a dbName for a Site. :param site: The site to look up. :type site: pywikibot.site.BaseSite or str @@ -357,8 +354,7 @@ def getdbName(site): return site def __getitem__(self, key): - """ - Get the SiteLink with the given key. + """Get the SiteLink with the given key. :param key: site key as Site instance or db key :type key: pywikibot.Site or str @@ -380,8 +376,7 @@ def __setitem__( key: str | pywikibot.site.APISite, val: str | dict[str, Any] | pywikibot.page.SiteLink, ) -> None: - """ - Set the SiteLink for a given key. + """Set the SiteLink for a given key. This only sets the value given as str, dict or SiteLink. If a str or dict is given the SiteLink object is created later in @@ -424,8 +419,7 @@ def _extract_json(cls, obj): @classmethod def normalizeData(cls, data) -> dict: - """ - Helper function to expand data into the Wikibase API structure. + """Helper function to expand data into the Wikibase API structure. :param data: Data to normalize :type data: list or dict @@ -456,8 +450,7 @@ def normalizeData(cls, data) -> dict: return norm_data def toJSON(self, diffto: dict | None = None) -> dict: - """ - Create JSON suitable for Wikibase API. + """Create JSON suitable for Wikibase API. When diffto is provided, JSON representing differences to the provided data is created. @@ -501,8 +494,7 @@ class SubEntityCollection(MutableSequence): """Ordered collection of sub-entities indexed by their ids.""" def __init__(self, repo, data=None): - """ - Initializer. + """Initializer. :param repo: Wikibase site :type repo: pywikibot.site.DataSite @@ -570,8 +562,7 @@ def fromJSON(cls, data, repo): @classmethod def normalizeData(cls, data: list) -> dict: - """ - Helper function to expand data into the Wikibase API structure. + """Helper function to expand data into the Wikibase API structure. :param data: Data to normalize :type data: list @@ -581,8 +572,7 @@ def normalizeData(cls, data: list) -> dict: raise NotImplementedError # TODO def toJSON(self, diffto: dict | None = None) -> dict: - """ - Create JSON suitable for Wikibase API. + """Create JSON suitable for Wikibase API. When diffto is provided, JSON representing differences to the provided data is created. diff --git a/pywikibot/page/_filepage.py b/pywikibot/page/_filepage.py index d91c1819d2..d8b1b303d4 100644 --- a/pywikibot/page/_filepage.py +++ b/pywikibot/page/_filepage.py @@ -74,8 +74,7 @@ def __init__(self, source, title: str = '', *, ) def _load_file_revisions(self, imageinfo) -> None: - """ - Store an Image revision of FilePage (a FileInfo object) in local cache. + """Save a file revision of FilePage (a FileInfo object) in local cache. Metadata shall be added lazily to the revision already present in cache. @@ -97,8 +96,7 @@ def _load_file_revisions(self, imageinfo) -> None: @property def latest_file_info(self): - """ - Retrieve and store information of latest Image rev. of FilePage. + """Retrieve and store information of latest Image rev. of FilePage. At the same time, the whole history of Image is fetched and cached in self._file_revisions @@ -112,8 +110,7 @@ def latest_file_info(self): @property def oldest_file_info(self): - """ - Retrieve and store information of oldest Image rev. of FilePage. + """Retrieve and store information of oldest Image rev. of FilePage. At the same time, the whole history of Image is fetched and cached in self._file_revisions @@ -126,8 +123,7 @@ def oldest_file_info(self): return self._file_revisions[oldest_ts] def get_file_info(self, ts) -> dict: - """ - Retrieve and store information of a specific Image rev. of FilePage. + """Retrieve and store information of a specific Image rev. of FilePage. This function will load also metadata. It is also used as a helper in FileInfo to load metadata lazily. @@ -142,8 +138,7 @@ def get_file_info(self, ts) -> dict: return self._file_revisions[ts] def get_file_history(self) -> dict: - """ - Return the file's version history. + """Return the file's version history. :return: dictionary with: key: timestamp of the entry @@ -285,8 +280,7 @@ def file_is_used(self) -> bool: return bool(list(self.using_pages(total=1))) def upload(self, source: str, **kwargs) -> bool: - """ - Upload this file to the wiki. + """Upload this file to the wiki. keyword arguments are from site.upload() method. @@ -425,8 +419,7 @@ def download(self, return False def globalusage(self, total=None): - """ - Iterate all global usage for this page. + """Iterate all global usage for this page. .. seealso:: :meth:`using_pages` @@ -438,8 +431,7 @@ def globalusage(self, total=None): return self.site.globalusage(self, total=total) def data_item(self): - """ - Convenience function to get the associated Wikibase item of the file. + """Function to get the associated Wikibase item of the file. If WikibaseMediaInfo extension is available (e.g., on Commons), the method returns the associated mediainfo entity. Otherwise, @@ -460,8 +452,7 @@ def data_item(self): class FileInfo: - """ - A structure holding imageinfo of latest rev. of FilePage. + """A structure holding imageinfo of latest rev. of FilePage. All keys of API imageinfo dictionary are mapped to FileInfo attributes. Attributes can be retrieved both as self['key'] or self.key. diff --git a/pywikibot/page/_links.py b/pywikibot/page/_links.py index 4373132d06..335067415f 100644 --- a/pywikibot/page/_links.py +++ b/pywikibot/page/_links.py @@ -34,8 +34,7 @@ class BaseLink(ComparableMixin): - """ - A MediaWiki link (local or interwiki). + """A MediaWiki link (local or interwiki). Has the following attributes: @@ -49,8 +48,7 @@ class BaseLink(ComparableMixin): _items = ('title', 'namespace', '_sitekey') def __init__(self, title: str, namespace=None, site=None) -> None: - """ - Initializer. + """Initializer. :param title: the title of the page linked to (str); does not include namespace or section @@ -86,8 +84,7 @@ def __repr__(self) -> str: return f"pywikibot.page.{type(self).__name__}({', '.join(attrs)})" def lookup_namespace(self): - """ - Look up the namespace given the provided namespace id or name. + """Look up the namespace given the provided namespace id or name. :rtype: pywikibot.Namespace """ @@ -112,8 +109,7 @@ def lookup_namespace(self): @property def site(self): - """ - Return the site of the link. + """Return the site of the link. :rtype: pywikibot.Site """ @@ -123,8 +119,7 @@ def site(self): @property def namespace(self): - """ - Return the namespace of the link. + """Return the namespace of the link. :rtype: pywikibot.Namespace """ @@ -140,8 +135,7 @@ def canonical_title(self) -> str: return self.title def ns_title(self, onsite=None): - """ - Return full page title, including namespace. + """Return full page title, including namespace. :param onsite: site object if specified, present title using onsite local namespace, @@ -169,8 +163,7 @@ def ns_title(self, onsite=None): return self.title def astext(self, onsite=None) -> str: - """ - Return a text representation of the link. + """Return a text representation of the link. :param onsite: if specified, present as a (possibly interwiki) link from the given site; otherwise, present as an internal link on @@ -192,8 +185,7 @@ def astext(self, onsite=None) -> str: return f'[[{self.site.sitename}:{title}]]' def _cmpkey(self): - """ - Key for comparison of BaseLink objects. + """Key for comparison of BaseLink objects. BaseLink objects are "equal" if and only if they are on the same site and have the same normalized title. @@ -212,8 +204,7 @@ def __hash__(self): @classmethod def fromPage(cls, page): # noqa: N802 - """ - Create a BaseLink to a Page. + """Create a BaseLink to a Page. :param page: target pywikibot.page.Page :type page: pywikibot.page.Page @@ -229,8 +220,7 @@ def fromPage(cls, page): # noqa: N802 class Link(BaseLink): - """ - A MediaWiki wikitext link (local or interwiki). + """A MediaWiki wikitext link (local or interwiki). Constructs a Link object based on a wikitext link and a source site. @@ -258,8 +248,7 @@ class Link(BaseLink): ) def __init__(self, text, source=None, default_namespace=0) -> None: - """ - Initializer. + """Initializer. :param text: the link text (everything appearing between [[ and ]] on a wiki page) @@ -332,8 +321,7 @@ def __init__(self, text, source=None, default_namespace=0) -> None: self._text = source.title(with_section=False) + self._text def parse_site(self) -> tuple: - """ - Parse only enough text to determine which site the link points to. + """Parse only enough text to determine which site the link points to. This method does not parse anything after the first ":"; links with multiple interwiki prefixes (such as "wikt:fr:Parlais") need @@ -372,8 +360,7 @@ def parse_site(self) -> tuple: return (fam.name, code) # text before : doesn't match any known prefix def parse(self): - """ - Parse wikitext of the link. + """Parse wikitext of the link. Called internally when accessing attributes. """ @@ -493,8 +480,7 @@ def parse(self): @property def site(self): - """ - Return the site of the link. + """Return the site of the link. :rtype: pywikibot.Site """ @@ -504,8 +490,7 @@ def site(self): @property def namespace(self): - """ - Return the namespace of the link. + """Return the namespace of the link. :rtype: pywikibot.Namespace """ @@ -535,8 +520,7 @@ def anchor(self) -> str: return self._anchor def astext(self, onsite=None): - """ - Return a text representation of the link. + """Return a text representation of the link. :param onsite: if specified, present as a (possibly interwiki) link from the given site; otherwise, present as an internal link on @@ -551,8 +535,7 @@ def astext(self, onsite=None): return text def _cmpkey(self): - """ - Key for comparison of Link objects. + """Key for comparison of Link objects. Link objects are "equal" if and only if they are on the same site and have the same normalized title, including section if any. @@ -563,8 +546,7 @@ def _cmpkey(self): @classmethod def fromPage(cls, page, source=None): # noqa: N802 - """ - Create a Link to a Page. + """Create a Link to a Page. :param page: target Page :type page: pywikibot.page.Page @@ -587,8 +569,7 @@ def fromPage(cls, page, source=None): # noqa: N802 @classmethod def langlinkUnsafe(cls, lang, title, source): # noqa: N802 - """ - Create a "lang:title" Link linked from source. + """Create a "lang:title" Link linked from source. Assumes that the lang & title come clean, no checks are made. @@ -628,8 +609,7 @@ def langlinkUnsafe(cls, lang, title, source): # noqa: N802 @classmethod def create_separated(cls, link, source, default_namespace=0, section=None, label=None): - """ - Create a new instance but overwrite section or label. + """Create a new instance but overwrite section or label. The returned Link instance is already parsed. @@ -661,8 +641,7 @@ def create_separated(cls, link, source, default_namespace=0, section=None, class SiteLink(BaseLink): - """ - A single sitelink in a Wikibase item. + """A single sitelink in a Wikibase item. Extends BaseLink by the following attribute: @@ -675,8 +654,7 @@ class SiteLink(BaseLink): _items = ('_sitekey', '_rawtitle', 'badges') def __init__(self, title, site=None, badges=None) -> None: - """ - Initializer. + """Initializer. :param title: the title of the linked page including namespace :type title: str @@ -699,8 +677,7 @@ def __init__(self, title, site=None, badges=None) -> None: @staticmethod def _parse_namespace(title, site=None): - """ - Parse enough of a title with a ':' to determine the namespace. + """Parse enough of a title with a ':' to determine the namespace. :param site: the Site object for the wiki linked to. Can be provided as either a Site instance or a db key, defaults to pywikibot.Site(). @@ -727,8 +704,7 @@ def _parse_namespace(title, site=None): @property def badges(self): - """ - Return a list of all badges associated with the link. + """Return a list of all badges associated with the link. :rtype: [pywikibot.ItemPage] """ @@ -740,8 +716,7 @@ def fromJSON( # noqa: N802 data: dict[str, Any], site: pywikibot.site.DataSite | None = None, ) -> SiteLink: - """ - Create a SiteLink object from JSON returned in the API call. + """Create a SiteLink object from JSON returned in the API call. :param data: JSON containing SiteLink data :param site: The Wikibase site @@ -754,8 +729,7 @@ def fromJSON( # noqa: N802 return sl def toJSON(self) -> dict[str, str | list[str]]: # noqa: N802 - """ - Convert the SiteLink to a JSON object for the Wikibase API. + """Convert the SiteLink to a JSON object for the Wikibase API. :return: Wikibase JSON """ @@ -807,8 +781,7 @@ def toJSON(self) -> dict[str, str | list[str]]: # noqa: N802 def html2unicode(text: str, ignore=None, exceptions=None) -> str: - """ - Replace HTML entities with equivalent unicode. + """Replace HTML entities with equivalent unicode. :param ignore: HTML entities to ignore :param ignore: list of int diff --git a/pywikibot/page/_page.py b/pywikibot/page/_page.py index 991a98563b..48c74c8c6f 100644 --- a/pywikibot/page/_page.py +++ b/pywikibot/page/_page.py @@ -184,8 +184,7 @@ def set_redirect_target( self.save(**kwargs) def get_best_claim(self, prop: str): - """ - Return the first best Claim for this page. + """Return the first best Claim for this page. Return the first 'preferred' ranked Claim specified by Wikibase property or the first 'normal' one otherwise. diff --git a/pywikibot/page/_user.py b/pywikibot/page/_user.py index 01ed5e6194..49e76e80b5 100644 --- a/pywikibot/page/_user.py +++ b/pywikibot/page/_user.py @@ -26,15 +26,13 @@ class User(Page): - """ - A class that represents a Wiki user. + """A class that represents a Wiki user. This class also represents the Wiki page User: """ def __init__(self, source, title: str = '') -> None: - """ - Initializer for a User object. + """Initializer for a User object. All parameters are the same as for Page() Initializer. """ @@ -56,8 +54,7 @@ def __init__(self, source, title: str = '') -> None: @property def username(self) -> str: - """ - The username. + """The username. Convenience method that returns the title of the page with namespace prefix omitted, which is the username. @@ -67,8 +64,7 @@ def username(self) -> str: return self.title(with_ns=False) def isRegistered(self, force: bool = False) -> bool: # noqa: N802 - """ - Determine if the user is registered on the site. + """Determine if the user is registered on the site. It is possible to have a page named User:xyz and not have a corresponding user with username xyz. @@ -91,8 +87,7 @@ def is_CIDR(self) -> bool: # noqa: N802 return is_ip_network(self.username) def getprops(self, force: bool = False) -> dict: - """ - Return a properties about the user. + """Return a properties about the user. :param force: if True, forces reloading the data from API """ @@ -109,8 +104,7 @@ def getprops(self, force: bool = False) -> dict: def registration(self, force: bool = False) -> pywikibot.Timestamp | None: - """ - Fetch registration date for this user. + """Fetch registration date for this user. :param force: if True, forces reloading the data from API """ @@ -121,8 +115,7 @@ def registration(self, return None def editCount(self, force: bool = False) -> int: # noqa: N802 - """ - Return edit count for a registered user. + """Return edit count for a registered user. Always returns 0 for 'anonymous' users. @@ -162,16 +155,14 @@ def is_locked(self, force: bool = False) -> bool: return self.site.is_locked(self.username, force) def isEmailable(self, force: bool = False) -> bool: # noqa: N802 - """ - Determine whether emails may be send to this user through MediaWiki. + """Determine whether emails may be send to this user through MediaWiki. :param force: if True, forces reloading the data from API """ return not self.isAnonymous() and 'emailable' in self.getprops(force) def groups(self, force: bool = False) -> list: - """ - Return a list of groups to which this user belongs. + """Return a list of groups to which this user belongs. The list of groups may be empty. @@ -199,8 +190,7 @@ def rights(self, force: bool = False) -> list: return self.getprops(force).get('rights', []) def getUserPage(self, subpage: str = '') -> Page: # noqa: N802 - """ - Return a Page object relative to this user's main page. + """Return a Page object relative to this user's main page. :param subpage: subpage part to be appended to the main page title (optional) @@ -216,8 +206,7 @@ def getUserPage(self, subpage: str = '') -> Page: # noqa: N802 return Page(Link(self.title() + subpage, self.site)) def getUserTalkPage(self, subpage: str = '') -> Page: # noqa: N802 - """ - Return a Page object relative to this user's main talk page. + """Return a Page object relative to this user's main talk page. :param subpage: subpage part to be appended to the main talk page title (optional) @@ -234,8 +223,7 @@ def getUserTalkPage(self, subpage: str = '') -> Page: # noqa: N802 self.site, default_namespace=3)) def send_email(self, subject: str, text: str, ccme: bool = False) -> bool: - """ - Send an email to this user via MediaWiki's email interface. + """Send an email to this user via MediaWiki's email interface. :param subject: the subject header of the mail :param text: mail body @@ -266,8 +254,7 @@ def send_email(self, subject: str, text: str, ccme: bool = False) -> bool: and maildata['emailuser']['result'] == 'Success') def block(self, *args, **kwargs): - """ - Block user. + """Block user. Refer :py:obj:`APISite.blockuser` method for parameters. @@ -282,8 +269,7 @@ def block(self, *args, **kwargs): raise def unblock(self, reason: str | None = None) -> None: - """ - Remove the block for the user. + """Remove the block for the user. :param reason: Reason for the unblock. """ @@ -424,8 +410,7 @@ def deleted_contributions( yield page, Revision(**contrib) def uploadedImages(self, total: int = 10): # noqa: N802 - """ - Yield tuples describing files uploaded by this user. + """Yield tuples describing files uploaded by this user. Each tuple is composed of a pywikibot.Page, the timestamp (str in ISO8601 format), comment (str) and a bool for pageid > 0. @@ -443,8 +428,7 @@ def uploadedImages(self, total: int = 10): # noqa: N802 @property def is_thankable(self) -> bool: - """ - Determine if the user has thanks notifications enabled. + """Determine if the user has thanks notifications enabled. .. note:: This doesn't accurately determine if thanks is enabled for user. diff --git a/pywikibot/page/_wikibase.py b/pywikibot/page/_wikibase.py index 7382c3c12c..bea70cd926 100644 --- a/pywikibot/page/_wikibase.py +++ b/pywikibot/page/_wikibase.py @@ -1,5 +1,4 @@ -""" -Objects representing various types of Wikibase pages and structures. +"""Objects representing various types of Wikibase pages and structures. This module also includes objects: @@ -78,8 +77,7 @@ class WikibaseEntity: - """ - The base interface for Wikibase entities. + """The base interface for Wikibase entities. Each entity is identified by a data repository it belongs to and an identifier. @@ -100,8 +98,7 @@ class WikibaseEntity: DATA_ATTRIBUTES: dict[str, Any] = {} def __init__(self, repo, id_: str | None = None) -> None: - """ - Initializer. + """Initializer. :param repo: Entity repository. :type repo: DataSite @@ -122,8 +119,7 @@ def __repr__(self) -> str: @classmethod def is_valid_id(cls, entity_id: str) -> bool: - """ - Whether the string can be a valid id of the entity type. + """Whether the string can be a valid id of the entity type. :param entity_id: The ID to test. """ @@ -147,8 +143,7 @@ def _initialize_empty(self): setattr(self, key, cls.new_empty(self.repo)) def _defined_by(self, singular: bool = False) -> dict[str, str]: - """ - Internal function to provide the API parameters to identify the entity. + """Function to provide the API parameters to identify the entity. An empty dict is returned if the entity has not been created yet. @@ -165,8 +160,7 @@ def _defined_by(self, singular: bool = False) -> dict[str, str]: return params def getID(self, numeric: bool = False) -> int | str: - """ - Get the identifier of this entity. + """Get the identifier of this entity. :param numeric: Strip the first letter and return an int """ @@ -175,16 +169,14 @@ def getID(self, numeric: bool = False) -> int | str: return self.id def get_data_for_new_entity(self) -> dict: - """ - Return data required for creation of a new entity. + """Return data required for creation of a new entity. Override it if you need. """ return {} def toJSON(self, diffto: dict | None = None) -> dict: - """ - Create JSON suitable for Wikibase API. + """Create JSON suitable for Wikibase API. When diffto is provided, JSON representing differences to the provided data is created. @@ -206,8 +198,7 @@ def toJSON(self, diffto: dict | None = None) -> dict: @classmethod def _normalizeData(cls, data: dict) -> dict: - """ - Helper function to expand data into the Wikibase API structure. + """Helper function to expand data into the Wikibase API structure. :param data: The dict to normalize :return: The dict with normalized data @@ -220,8 +211,7 @@ def _normalizeData(cls, data: dict) -> dict: @property def latest_revision_id(self) -> int | None: - """ - Get the revision identifier for the most recent revision of the entity. + """Get the revision id for the most recent revision of the entity. :rtype: int or None if it cannot be determined :raise NoWikibaseEntityError: if the entity doesn't exist @@ -252,8 +242,7 @@ def exists(self) -> bool: return 'missing' not in self._content def get(self, force: bool = False) -> dict: - """ - Fetch all entity data and cache it. + """Fetch all entity data and cache it. :param force: override caching :raise NoWikibaseEntityError: if this entity doesn't exist @@ -360,8 +349,7 @@ def editEntity( target_ref.hash = ref_stat['hash'] def concept_uri(self) -> str: - """ - Return the full concept URI. + """Return the full concept URI. :raise NoWikibaseEntityError: if this entity's id is not known """ @@ -408,8 +396,7 @@ def _assert_has_id(self): self.id = 'M' + str(self.file.pageid) def _defined_by(self, singular: bool = False) -> dict: - """ - Internal function to provide the API parameters to identify the entity. + """Function to provide the API parameters to identify the entity. .. versionadded:: 8.5 @@ -591,8 +578,7 @@ def removeClaims(self, claims, **kwargs) -> None: class WikibasePage(BasePage, WikibaseEntity): - """ - Mixin base class for Wikibase entities which are also pages (eg. items). + """Mixin base class for Wikibase entities which are also pages (eg. items). There should be no need to instantiate this directly. """ @@ -600,8 +586,7 @@ class WikibasePage(BasePage, WikibaseEntity): _cache_attrs = (*BasePage._cache_attrs, '_content') def __init__(self, site, title: str = '', **kwargs) -> None: - """ - Initializer. + """Initializer. If title is provided, either ns or entity_type must also be provided, and will be checked against the title parsed using the Page @@ -697,8 +682,7 @@ def __init__(self, site, title: str = '', **kwargs) -> None: self._link.title) def namespace(self) -> int: - """ - Return the number of the namespace of the entity. + """Return the number of the namespace of the entity. :return: Namespace id """ @@ -715,8 +699,7 @@ def exists(self) -> bool: return 'missing' not in self._content def botMayEdit(self) -> bool: - """ - Return whether bots may edit this page. + """Return whether bots may edit this page. Because there is currently no system to mark a page that it shouldn't be edited by bots on Wikibase pages it always returns True. The content @@ -728,8 +711,7 @@ def botMayEdit(self) -> bool: return True def get(self, force: bool = False, *args, **kwargs) -> dict: - """ - Fetch all page data, and cache it. + """Fetch all page data, and cache it. :param force: override caching :raise NotImplementedError: a value in args or kwargs @@ -763,8 +745,7 @@ def get(self, force: bool = False, *args, **kwargs) -> dict: @property def latest_revision_id(self) -> int: - """ - Get the revision identifier for the most recent revision of the entity. + """Get the revision id for the most recent revision of the entity. :rtype: int :raise pywikibot.exceptions.NoPageError: if the entity doesn't exist @@ -886,8 +867,7 @@ def set_redirect_target( save: bool = True, **kwargs ): - """ - Set target of a redirect for a Wikibase page. + """Set target of a redirect for a Wikibase page. Has not been implemented in the Wikibase API yet, except for ItemPage. """ @@ -895,8 +875,7 @@ def set_redirect_target( @allow_asynchronous def addClaim(self, claim, bot: bool = True, **kwargs): - """ - Add a claim to the entity. + """Add a claim to the entity. :param claim: The claim to add :type claim: pywikibot.page.Claim @@ -919,8 +898,7 @@ def addClaim(self, claim, bot: bool = True, **kwargs): claim.on_item = self def removeClaims(self, claims, **kwargs) -> None: - """ - Remove the claims from the entity. + """Remove the claims from the entity. :param claims: list of claims to be removed :type claims: list or pywikibot.Claim @@ -960,8 +938,7 @@ class ItemPage(WikibasePage): } def __init__(self, site, title=None, ns=None) -> None: - """ - Initializer. + """Initializer. :param site: data repository :type site: pywikibot.site.DataSite @@ -989,8 +966,7 @@ def __init__(self, site, title=None, ns=None) -> None: assert self.id == self._link.title def _defined_by(self, singular: bool = False) -> dict: - """ - Internal function to provide the API parameters to identify the item. + """Function to provide the API parameters to identify the item. The API parameters may be 'id' if the ItemPage has one, or 'site'&'title' if instantiated via ItemPage.fromPage with @@ -1036,8 +1012,7 @@ def _defined_by(self, singular: bool = False) -> dict: return params def title(self, **kwargs): - """ - Return ID as title of the ItemPage. + """Return ID as title of the ItemPage. If the ItemPage was lazy-loaded via ItemPage.fromPage, this method will fetch the Wikibase item ID for the page, potentially raising @@ -1070,8 +1045,7 @@ def title(self, **kwargs): return super().title(**kwargs) def getID(self, numeric: bool = False, force: bool = False): - """ - Get the entity identifier. + """Get the entity identifier. :param numeric: Strip the first letter and return an int :param force: Force an update of new data @@ -1082,8 +1056,7 @@ def getID(self, numeric: bool = False, force: bool = False): @classmethod def fromPage(cls, page, lazy_load: bool = False): - """ - Get the ItemPage for a Page that links to it. + """Get the ItemPage for a Page that links to it. :param page: Page to look for corresponding data item :type page: pywikibot.page.Page @@ -1122,8 +1095,7 @@ def fromPage(cls, page, lazy_load: bool = False): @classmethod def from_entity_uri(cls, site, uri: str, lazy_load: bool = False): - """ - Get the ItemPage from its entity uri. + """Get the ItemPage from its entity uri. :param site: The Wikibase site for the item. :type site: pywikibot.site.DataSite @@ -1160,8 +1132,7 @@ def get( *args, **kwargs ) -> dict[str, Any]: - """ - Fetch all item data, and cache it. + """Fetch all item data, and cache it. :param force: override caching :param get_redirect: return the item content, do not follow the @@ -1211,8 +1182,7 @@ def getRedirectTarget(self, *, ignore_section: bool = True): return self.__class__(target.site, target.title(), target.namespace()) def iterlinks(self, family=None): - """ - Iterate through all the sitelinks. + """Iterate through all the sitelinks. :param family: string/Family object which represents what family of links to iterate @@ -1269,8 +1239,7 @@ def setSitelink(self, sitelink: SITELINK_TYPE, **kwargs) -> None: self.setSitelinks([sitelink], **kwargs) def removeSitelink(self, site: LANGUAGE_IDENTIFIER, **kwargs) -> None: - """ - Remove a sitelink. + """Remove a sitelink. A site can either be a Site object, or it can be a dbName. """ @@ -1278,8 +1247,7 @@ def removeSitelink(self, site: LANGUAGE_IDENTIFIER, **kwargs) -> None: def removeSitelinks(self, sites: list[LANGUAGE_IDENTIFIER], **kwargs ) -> None: - """ - Remove sitelinks. + """Remove sitelinks. Sites should be a list, with values either being Site objects, or dbNames. @@ -1303,8 +1271,7 @@ def setSitelinks(self, sitelinks: list[SITELINK_TYPE], **kwargs) -> None: self.editEntity(data, **kwargs) def mergeInto(self, item, **kwargs) -> None: - """ - Merge the item into another item. + """Merge the item into another item. :param item: The item to merge into :type item: pywikibot.page.ItemPage @@ -1370,8 +1337,7 @@ def isRedirectPage(self): class Property: - """ - A Wikibase property. + """A Wikibase property. While every Wikibase property has a Page on the data repository, this object is for when the property is used as part of another concept @@ -1420,8 +1386,7 @@ class Property: } def __init__(self, site, id: str, datatype: str | None = None) -> None: - """ - Initializer. + """Initializer. :param site: data repository :type site: pywikibot.site.DataSite @@ -1459,8 +1424,7 @@ def type(self) -> str: return self.repo.get_property_type(self) def getID(self, numeric: bool = False): - """ - Get the identifier of this property. + """Get the identifier of this property. :param numeric: Strip the first letter and return an int """ @@ -1494,8 +1458,7 @@ class PropertyPage(WikibasePage, Property): } def __init__(self, source, title=None, datatype=None) -> None: - """ - Initializer. + """Initializer. :param source: data repository property is on :type source: pywikibot.site.DataSite @@ -1523,8 +1486,7 @@ def __init__(self, source, title=None, datatype=None) -> None: Property.__init__(self, source, self.id) def get(self, force: bool = False, *args, **kwargs) -> dict: - """ - Fetch the property entity, and cache it. + """Fetch the property entity, and cache it. :param force: override caching :raise NotImplementedError: a value in args or kwargs @@ -1552,8 +1514,7 @@ def newClaim(self, *args, **kwargs) -> Claim: **kwargs) def getID(self, numeric: bool = False): - """ - Get the identifier of this property. + """Get the identifier of this property. :param numeric: Strip the first letter and return an int """ @@ -1571,8 +1532,7 @@ def get_data_for_new_entity(self): class Claim(Property): - """ - A Claim on a Wikibase entity. + """A Claim on a Wikibase entity. Claims are standard claims as well as references and qualifiers. """ @@ -1614,8 +1574,7 @@ def __init__( rank: str = 'normal', **kwargs ) -> None: - """ - Initializer. + """Initializer. Defined by the "snak" value, supplemented by site + pid @@ -1733,8 +1692,7 @@ def same_as( return True def copy(self): - """ - Create an independent copy of this object. + """Create an independent copy of this object. :rtype: pywikibot.page.Claim """ @@ -1806,8 +1764,7 @@ def fromJSON(cls, site, data: dict[str, Any]) -> Claim: @classmethod def referenceFromJSON(cls, site, data) -> dict: - """ - Create a dict of claims from reference JSON returned in the API call. + """Create a dict of claims from reference JSON fetched in the API call. Reference objects are represented a bit differently, and require some more handling. @@ -1830,8 +1787,7 @@ def referenceFromJSON(cls, site, data) -> dict: @classmethod def qualifierFromJSON(cls, site, data): - """ - Create a Claim for a qualifier from JSON. + """Create a Claim for a qualifier from JSON. Qualifier objects are represented a bit differently like references, but I'm not @@ -1892,8 +1848,7 @@ def toJSON(self) -> dict: return data def setTarget(self, value): - """ - Set the target value in the local object. + """Set the target value in the local object. :param value: The new target value. :type value: object @@ -1912,8 +1867,7 @@ def changeTarget( snaktype: str = 'value', **kwargs ) -> None: - """ - Set the target value in the data repository. + """Set the target value in the data repository. :param value: The new target value. :type value: object @@ -1931,8 +1885,7 @@ def changeTarget( self.on_item.latest_revision_id = data['pageinfo']['lastrevid'] def getTarget(self): - """ - Return the target value of this Claim. + """Return the target value of this Claim. None is returned if no target is set @@ -1941,16 +1894,14 @@ def getTarget(self): return self.target def getSnakType(self) -> str: - """ - Return the type of snak. + """Return the type of snak. :return: str ('value', 'somevalue' or 'novalue') """ return self.snaktype def setSnakType(self, value): - """ - Set the type of snak. + """Set the type of snak. :param value: Type of snak :type value: str ('value', 'somevalue', or 'novalue') @@ -1978,8 +1929,7 @@ def changeRank(self, rank, **kwargs): return self.on_item.repo.save_claim(self, **kwargs) def changeSnakType(self, value=None, **kwargs) -> None: - """ - Save the new snak value. + """Save the new snak value. TODO: Is this function really needed? """ @@ -1992,8 +1942,7 @@ def getSources(self) -> list: return self.sources def addSource(self, claim, **kwargs) -> None: - """ - Add the claim as a source. + """Add the claim as a source. :param claim: the claim to add :type claim: pywikibot.Claim @@ -2001,8 +1950,7 @@ def addSource(self, claim, **kwargs) -> None: self.addSources([claim], **kwargs) def addSources(self, claims, **kwargs): - """ - Add the claims as one source. + """Add the claims as one source. :param claims: the claims to add :type claims: list of pywikibot.Claim @@ -2026,8 +1974,7 @@ def addSources(self, claims, **kwargs): self.sources.append(source) def removeSource(self, source, **kwargs) -> None: - """ - Remove the source. Call removeSources(). + """Remove the source. Call removeSources(). :param source: the source to remove :type source: pywikibot.Claim @@ -2035,8 +1982,7 @@ def removeSource(self, source, **kwargs) -> None: self.removeSources([source], **kwargs) def removeSources(self, sources, **kwargs) -> None: - """ - Remove the sources. + """Remove the sources. :param sources: the sources to remove :type sources: list of pywikibot.Claim @@ -2071,8 +2017,7 @@ def addQualifier(self, qualifier, **kwargs): self.qualifiers[qualifier.getID()] = [qualifier] def removeQualifier(self, qualifier, **kwargs) -> None: - """ - Remove the qualifier. Call removeQualifiers(). + """Remove the qualifier. Call removeQualifiers(). :param qualifier: the qualifier to remove :type qualifier: pywikibot.page.Claim @@ -2080,8 +2025,7 @@ def removeQualifier(self, qualifier, **kwargs) -> None: self.removeQualifiers([qualifier], **kwargs) def removeQualifiers(self, qualifiers, **kwargs) -> None: - """ - Remove the qualifiers. + """Remove the qualifiers. :param qualifiers: the qualifiers to remove :type qualifiers: list of pywikibot.Claim @@ -2095,8 +2039,7 @@ def removeQualifiers(self, qualifiers, **kwargs) -> None: qualifier.on_item = None def target_equals(self, value) -> bool: - """ - Check whether the Claim's target is equal to specified value. + """Check whether the Claim's target is equal to specified value. The function checks for: @@ -2137,8 +2080,7 @@ def target_equals(self, value) -> bool: return self.target == value def has_qualifier(self, qualifier_id: str, target) -> bool: - """ - Check whether Claim contains specified qualifier. + """Check whether Claim contains specified qualifier. :param qualifier_id: id of the qualifier :param target: qualifier target to check presence of @@ -2177,8 +2119,7 @@ def _formatValue(self) -> dict: return value def _formatDataValue(self) -> dict: - """ - Format the target into the proper JSON datavalue that Wikibase wants. + """Format the target into the proper JSON datavalue for Wikibase. :return: Wikibase API representation with type and value. """ @@ -2224,8 +2165,7 @@ class LexemePage(WikibasePage): } def __init__(self, site, title=None) -> None: - """ - Initializer. + """Initializer. :param site: data repository :type site: pywikibot.site.DataSite @@ -2251,8 +2191,7 @@ def get_data_for_new_entity(self): raise NotImplementedError # TODO def toJSON(self, diffto: dict | None = None) -> dict: - """ - Create JSON suitable for Wikibase API. + """Create JSON suitable for Wikibase API. When diffto is provided, JSON representing differences to the provided data is created. @@ -2271,8 +2210,7 @@ def toJSON(self, diffto: dict | None = None) -> dict: return data def get(self, force=False, get_redirect=False, *args, **kwargs): - """ - Fetch all lexeme data, and cache it. + """Fetch all lexeme data, and cache it. :param force: override caching :type force: bool @@ -2308,8 +2246,7 @@ def get(self, force=False, get_redirect=False, *args, **kwargs): @classmethod def _normalizeData(cls, data: dict) -> dict: - """ - Helper function to expand data into the Wikibase API structure. + """Helper function to expand data into the Wikibase API structure. :param data: The dict to normalize :return: the altered dict from parameter data. @@ -2326,8 +2263,7 @@ def _normalizeData(cls, data: dict) -> dict: @allow_asynchronous def add_form(self, form, **kwargs): - """ - Add a form to the lexeme. + """Add a form to the lexeme. :param form: The form to add :type form: Form @@ -2356,8 +2292,7 @@ def add_form(self, form, **kwargs): self.latest_revision_id = data['lastrevid'] def remove_form(self, form, **kwargs) -> None: - """ - Remove a form from the lexeme. + """Remove a form from the lexeme. :param form: The form to remove :type form: pywikibot.LexemeForm @@ -2371,8 +2306,7 @@ def remove_form(self, form, **kwargs) -> None: # todo: senses def mergeInto(self, lexeme, **kwargs): - """ - Merge the lexeme into another lexeme. + """Merge the lexeme into another lexeme. :param lexeme: The lexeme to merge into :type lexeme: LexemePage @@ -2437,8 +2371,7 @@ def on_lexeme(self): @allow_asynchronous def addClaim(self, claim, **kwargs): - """ - Add a claim to the form. + """Add a claim to the form. :param claim: The claim to add :type claim: Claim @@ -2458,8 +2391,7 @@ def addClaim(self, claim, **kwargs): claim.on_item = self def removeClaims(self, claims, **kwargs) -> None: - """ - Remove the claims from the form. + """Remove the claims from the form. :param claims: list of claims to be removed :type claims: list or pywikibot.Claim @@ -2511,8 +2443,7 @@ def _normalizeData(cls, data): return new_data def get(self, force: bool = False) -> dict: - """ - Fetch all form data, and cache it. + """Fetch all form data, and cache it. :param force: override caching @@ -2532,8 +2463,7 @@ def get(self, force: bool = False) -> dict: return data def edit_elements(self, data: dict, **kwargs) -> None: - """ - Update form elements. + """Update form elements. :param data: Data to be saved """ diff --git a/pywikibot/pagegenerators/__init__.py b/pywikibot/pagegenerators/__init__.py index 64c8089644..b6a0abf215 100644 --- a/pywikibot/pagegenerators/__init__.py +++ b/pywikibot/pagegenerators/__init__.py @@ -567,8 +567,7 @@ def PageClassGenerator(generator: Iterable[pywikibot.page.Page] ) -> Generator[pywikibot.page.Page, None, None]: - """ - Yield pages from another generator as Page subclass objects. + """Yield pages from another generator as Page subclass objects. The page class type depends on the page namespace. Objects may be Category, FilePage, Userpage or Page. @@ -714,8 +713,7 @@ def PreloadingEntityGenerator( generator: Iterable[pywikibot.page.WikibaseEntity], groupsize: int = 50, ) -> Generator[pywikibot.page.WikibaseEntity, None, None]: - """ - Yield preloaded pages taken from another generator. + """Yield preloaded pages taken from another generator. Function basically is copied from above, but for Wikibase entities. diff --git a/pywikibot/pagegenerators/_factory.py b/pywikibot/pagegenerators/_factory.py index 0e2c135db9..b61104717c 100644 --- a/pywikibot/pagegenerators/_factory.py +++ b/pywikibot/pagegenerators/_factory.py @@ -91,8 +91,7 @@ def __init__(self, site: BaseSite | None = None, positional_arg_name: str | None = None, enabled_options: Iterable[str] | None = None, disabled_options: Iterable[str] | None = None) -> None: - """ - Initializer. + """Initializer. :param site: Site for generator results :param positional_arg_name: generator to use for positional args, @@ -160,8 +159,7 @@ def _validate_options(self, @property def site(self) -> pywikibot.site.BaseSite: - """ - Generator site. + """Generator site. The generator site should not be accessed until after the global arguments have been handled, otherwise the default Site may be changed @@ -177,8 +175,7 @@ def site(self) -> pywikibot.site.BaseSite: @property def namespaces(self) -> frozenset[pywikibot.site.Namespace]: - """ - List of Namespace parameters. + """List of Namespace parameters. Converts int or string namespaces to Namespace objects and change the storage to immutable once it has been accessed. @@ -317,8 +314,7 @@ def getCombinedGenerator(self, # noqa: N802 def getCategory(self, category: str # noqa: N802 ) -> tuple[pywikibot.Category, str | None]: - """ - Return Category and start as defined by category. + """Return Category and start as defined by category. :param category: category name with start parameter """ @@ -347,8 +343,7 @@ def getCategoryGen(self, category: str, # noqa: N802 recurse: int | bool = False, content: bool = False, gen_func: Callable | None = None) -> Any: - """ - Return generator based on Category defined by category and gen_func. + """Return generator based on Category defined by category and gen_func. :param category: category name with start parameter :param recurse: if not False or 0, also iterate articles in diff --git a/pywikibot/pagegenerators/_filters.py b/pywikibot/pagegenerators/_filters.py index 5ee3a71c5f..a9b3203799 100644 --- a/pywikibot/pagegenerators/_filters.py +++ b/pywikibot/pagegenerators/_filters.py @@ -50,8 +50,7 @@ def NamespaceFilterPageGenerator( | Sequence[str | Namespace], site: BaseSite | None = None, ) -> Generator[pywikibot.page.BasePage, None, None]: - """ - A generator yielding pages from another generator in given namespaces. + """A generator yielding pages from another generator in given namespaces. If a site is provided, the namespaces are validated using the namespaces of that site, otherwise the namespaces are validated using the default @@ -87,8 +86,7 @@ def PageTitleFilterPageGenerator( generator: Iterable[pywikibot.page.BasePage], ignore_list: dict[str, dict[str, str]], ) -> Generator[pywikibot.page.BasePage, None, None]: - """ - Yield only those pages are not listed in the ignore list. + """Yield only those pages are not listed in the ignore list. :param ignore_list: family names are mapped to dictionaries in which language codes are mapped to lists of page titles. Each title must @@ -115,8 +113,7 @@ def RedirectFilterPageGenerator( no_redirects: bool = True, show_filtered: bool = False, ) -> Generator[pywikibot.page.BasePage, None, None]: - """ - Yield pages from another generator that are redirects or not. + """Yield pages from another generator that are redirects or not. :param no_redirects: Exclude redirects if True, else only include redirects. @@ -150,8 +147,7 @@ def __filter_match(cls, prop: str, claim: str, qualifiers: dict[str, str]) -> bool: - """ - Return true if the page contains the claim given. + """Return true if the page contains the claim given. :param page: the page to check :return: true if page contains the claim, false otherwise @@ -188,8 +184,7 @@ def filter( qualifiers: dict[str, str] | None = None, negate: bool = False, ) -> Generator[pywikibot.page.WikibasePage, None, None]: - """ - Yield all ItemPages which contain certain claim in a property. + """Yield all ItemPages which contain certain claim in a property. :param prop: property id to check :param claim: value of the property to check. Can be exact value (for @@ -213,8 +208,7 @@ def SubpageFilterGenerator(generator: Iterable[pywikibot.page.BasePage], max_depth: int = 0, show_filtered: bool = False ) -> Generator[pywikibot.page.BasePage, None, None]: - """ - Generator which filters out subpages based on depth. + """Generator which filters out subpages based on depth. It looks at the namespace of each page and checks if that namespace has subpages enabled. If so, pages with forward slashes ('/') are excluded. @@ -325,8 +319,7 @@ def QualityFilterPageGenerator( generator: Iterable[pywikibot.page.BasePage], quality: list[int], ) -> Generator[pywikibot.page.BasePage, None, None]: - """ - Wrap a generator to filter pages according to quality levels. + """Wrap a generator to filter pages according to quality levels. This is possible only for pages with content_model 'proofread-page'. In all the other cases, no filter is applied. @@ -347,8 +340,7 @@ def CategoryFilterPageGenerator( generator: Iterable[pywikibot.page.BasePage], category_list: Sequence[pywikibot.page.Category], ) -> Generator[pywikibot.page.BasePage, None, None]: - """ - Wrap a generator to filter pages by categories specified. + """Wrap a generator to filter pages by categories specified. :param generator: A generator object :param category_list: categories used to filter generated pages @@ -377,8 +369,7 @@ def EdittimeFilterPageGenerator( first_edit_end: datetime.datetime | None = None, show_filtered: bool = False, ) -> Generator[pywikibot.page.BasePage, None, None]: - """ - Wrap a generator to filter pages outside last or first edit range. + """Wrap a generator to filter pages outside last or first edit range. :param generator: A generator object :param last_edit_start: Only yield pages last edited after this time @@ -438,8 +429,7 @@ def UserEditFilterGenerator( max_revision_depth: int | None = None, show_filtered: bool = False ) -> Generator[pywikibot.page.BasePage, None, None]: - """ - Generator which will yield Pages modified by username. + """Generator which will yield Pages modified by username. It only looks at the last editors given by max_revision_depth. If timestamp is set in MediaWiki format JJJJMMDDhhmmss, older edits are @@ -473,8 +463,7 @@ def WikibaseItemFilterPageGenerator( has_item: bool = True, show_filtered: bool = False, ) -> Generator[pywikibot.page.BasePage, None, None]: - """ - A wrapper generator used to exclude if page has a Wikibase item or not. + """A wrapper generator used to exclude if page has a Wikibase item or not. :param generator: Generator to wrap. :param has_item: Exclude pages without an item if True, or only diff --git a/pywikibot/pagegenerators/_generators.py b/pywikibot/pagegenerators/_generators.py index 2956b358c7..d15ca69607 100644 --- a/pywikibot/pagegenerators/_generators.py +++ b/pywikibot/pagegenerators/_generators.py @@ -84,8 +84,7 @@ def PrefixingPageGenerator(prefix: str, total: int | None = None, content: bool = False ) -> Iterable[pywikibot.page.Page]: - """ - Prefixed Page generator. + """Prefixed Page generator. :param prefix: The prefix of the pages. :param namespace: Namespace to retrieve pages from @@ -125,8 +124,7 @@ def LogeventsPageGenerator(logtype: str | None = None, end: Timestamp | None = None, reverse: bool = False ) -> Generator[pywikibot.page.Page, None, None]: - """ - Generate Pages for specified modes of logevents. + """Generate Pages for specified modes of logevents. :param logtype: Mode of logs to retrieve :param user: User of logs retrieved @@ -176,8 +174,7 @@ def RecentChangesPageGenerator( Iterable[pywikibot.Page]]) = None, **kwargs: Any ) -> Generator[pywikibot.Page, None, None]: - """ - Generate pages that are in the recent changes list, including duplicates. + """Generate recent changes pages, including duplicates. For keyword parameters refer :meth:`APISite.recentchanges() `. @@ -414,8 +411,7 @@ def TextIOPageGenerator(source: str | None = None, def PagesFromTitlesGenerator(iterable: Iterable[str], site: BaseSite | None = None ) -> Generator[pywikibot.page.Page, None, None]: - """ - Generate pages from the titles (strings) yielded by iterable. + """Generate pages from the titles (strings) yielded by iterable. :param site: Site for generator results. """ @@ -480,8 +476,7 @@ def UserContributionsGenerator(username: str, def NewimagesPageGenerator(total: int | None = None, site: BaseSite | None = None ) -> Generator[pywikibot.page.Page, None, None]: - """ - New file generator. + """New file generator. :param total: Maximum number of pages to retrieve in total :param site: Site for generator results. @@ -494,8 +489,7 @@ def NewimagesPageGenerator(total: int | None = None, def WikibaseItemGenerator(gen: Iterable[pywikibot.page.Page] ) -> Generator[pywikibot.page.ItemPage, None, None]: - """ - A wrapper generator used to yield Wikibase items of another generator. + """A wrapper generator used to yield Wikibase items of another generator. :param gen: Generator to wrap. :return: Wrapped generator @@ -516,8 +510,7 @@ def AncientPagesPageGenerator( total: int = 100, site: BaseSite | None = None ) -> Generator[pywikibot.page.Page, None, None]: - """ - Ancient page generator. + """Ancient page generator. :param total: Maximum number of pages to retrieve in total :param site: Site for generator results. @@ -686,8 +679,7 @@ def DeadendPagesPageGenerator( def LongPagesPageGenerator(total: int = 100, site: BaseSite | None = None ) -> Generator[pywikibot.page.Page, None, None]: - """ - Long page generator. + """Long page generator. :param total: Maximum number of pages to retrieve in total :param site: Site for generator results. @@ -700,8 +692,7 @@ def LongPagesPageGenerator(total: int = 100, def ShortPagesPageGenerator(total: int = 100, site: BaseSite | None = None ) -> Generator[pywikibot.page.Page, None, None]: - """ - Short page generator. + """Short page generator. :param total: Maximum number of pages to retrieve in total :param site: Site for generator results. @@ -785,8 +776,7 @@ def SearchPageGenerator( def LiveRCPageGenerator(site: BaseSite | None = None, total: int | None = None ) -> Generator[pywikibot.page.Page, None, None]: - """ - Yield pages from a socket.io RC stream. + """Yield pages from a socket.io RC stream. Generates pages based on the EventStreams Server-Sent-Event (SSE) recent changes stream. @@ -816,6 +806,7 @@ def LiveRCPageGenerator(site: BaseSite | None = None, class GoogleSearchPageGenerator(GeneratorWrapper): + """Page generator using Google search results. To use this generator, you need to install the package 'google': @@ -834,8 +825,7 @@ class GoogleSearchPageGenerator(GeneratorWrapper): def __init__(self, query: str | None = None, site: BaseSite | None = None) -> None: - """ - Initializer. + """Initializer. :param site: Site for generator results. """ @@ -896,8 +886,7 @@ def generator(self) -> Generator[pywikibot.page.Page, None, None]: def MySQLPageGenerator(query: str, site: BaseSite | None = None, verbose: bool | None = None ) -> Generator[pywikibot.page.Page, None, None]: - """ - Yield a list of pages based on a MySQL query. + """Yield a list of pages based on a MySQL query. The query should return two columns, page namespace and page title pairs from some table. An example query that yields all ns0 pages might look @@ -1055,6 +1044,7 @@ def SupersetPageGenerator(query: str, class XMLDumpPageGenerator(abc.Iterator): # type: ignore[type-arg] + """Xml iterator that yields Page objects. .. versionadded:: 7.2 @@ -1133,8 +1123,7 @@ def __init__(self, *args, **kwargs): def YearPageGenerator(start: int = 1, end: int = 2050, site: BaseSite | None = None ) -> Generator[pywikibot.page.Page, None, None]: - """ - Year page generator. + """Year page generator. :param site: Site for generator results. """ @@ -1153,8 +1142,7 @@ def YearPageGenerator(start: int = 1, end: int = 2050, def DayPageGenerator(start_month: int = 1, end_month: int = 12, site: BaseSite | None = None, year: int = 2000 ) -> Generator[pywikibot.page.Page, None, None]: - """ - Day page generator. + """Day page generator. :param site: Site for generator results. :param year: considering leap year. @@ -1238,8 +1226,7 @@ def WikibaseSearchItemPageGenerator( total: int | None = None, site: BaseSite | None = None, ) -> Generator[pywikibot.page.ItemPage, None, None]: - """ - Generate pages that contain the provided text. + """Generate pages that contain the provided text. :param text: Text to look for. :param language: Code of the language to search in. If not specified, @@ -1259,6 +1246,7 @@ def WikibaseSearchItemPageGenerator( class PetScanPageGenerator(GeneratorWrapper): + """Queries PetScan to generate pages. .. seealso:: https://petscan.wmflabs.org/ @@ -1275,8 +1263,7 @@ def __init__( site: BaseSite | None = None, extra_options: dict[Any, Any] | None = None ) -> None: - """ - Initializer. + """Initializer. :param categories: List of category names to retrieve pages from :param subset_combination: Combination mode. @@ -1298,8 +1285,7 @@ def __init__( def buildQuery(self, categories: Sequence[str], subset_combination: bool, namespaces: Iterable[int | pywikibot.site.Namespace] | None, extra_options: dict[Any, Any] | None) -> dict[str, Any]: - """ - Get the querystring options to query PetScan. + """Get the querystring options to query PetScan. :param categories: List of categories (as strings) :param subset_combination: Combination mode. @@ -1370,6 +1356,7 @@ def generator(self) -> Generator[pywikibot.page.Page, None, None]: class PagePilePageGenerator(GeneratorWrapper): + """Queries PagePile to generate pages. .. seealso:: https://pagepile.toolforge.org/ diff --git a/pywikibot/proofreadpage.py b/pywikibot/proofreadpage.py index 727527dff3..06f4146182 100644 --- a/pywikibot/proofreadpage.py +++ b/pywikibot/proofreadpage.py @@ -58,6 +58,7 @@ def _bs4_soup(*args: Any, **kwargs: Any) -> None: class TagAttr: + """Tag attribute of . Represent a single attribute. It is used internally in @@ -164,6 +165,7 @@ def __repr__(self): class TagAttrDesc: + """A descriptor tag. .. versionadded:: 8.0 @@ -192,6 +194,7 @@ def __delete__(self, obj): class PagesTagParser(collections.abc.Container): + """Parser for tag ````. .. seealso:: @@ -1091,8 +1094,7 @@ def _parse_redlink(href: str) -> str | None: return None def save(self, *args: Any, **kwargs: Any) -> None: # See Page.save(). - """ - Save page after validating the content. + """Save page after validating the content. Trying to save any other content fails silently with a parameterless INDEX_TEMPLATE being saved. diff --git a/pywikibot/scripts/generate_family_file.py b/pywikibot/scripts/generate_family_file.py index 066d4bf4ea..3ec90561ba 100755 --- a/pywikibot/scripts/generate_family_file.py +++ b/pywikibot/scripts/generate_family_file.py @@ -32,7 +32,7 @@ If the url scheme is missing, ``https`` will be used. """ # -# (C) Pywikibot team, 2010-2023 +# (C) Pywikibot team, 2010-2024 # # Distributed under the terms of the MIT license. # @@ -64,8 +64,7 @@ def __init__(self, name: str | None = None, dointerwiki: str | None = None, verify: str | None = None) -> None: - """ - Parameters are optional. If not given the script asks for the values. + """Parameters are optional. If missing the script asks for the values. :param url: an url from where the family settings are loaded :param name: the family name without "_family.py" tail. diff --git a/pywikibot/scripts/generate_user_files.py b/pywikibot/scripts/generate_user_files.py index 2864f4beb0..f0e0a98898 100755 --- a/pywikibot/scripts/generate_user_files.py +++ b/pywikibot/scripts/generate_user_files.py @@ -114,8 +114,7 @@ def get_site_and_lang( default_username: str | None = None, force: bool = False ) -> tuple[str, str, str]: - """ - Ask the user for the family, site code and username. + """Ask the user for the family, site code and username. :param default_family: The default family which should be chosen. :param default_lang: The default site code which should be chosen, @@ -353,8 +352,7 @@ def create_user_config( main_username: str, force: bool = False ): - """ - Create a user-config.py in base_dir. + """Create a user-config.py in base_dir. Create a user-password.py if necessary. """ @@ -497,8 +495,7 @@ def ask_for_dir_change(force) -> tuple[bool, bool]: def main(*args: str) -> None: - """ - Process command line arguments and generate user-config. + """Process command line arguments and generate user-config. If args is an empty list, sys.argv is used. diff --git a/pywikibot/scripts/login.py b/pywikibot/scripts/login.py index ce3f29f5a3..75c6088224 100755 --- a/pywikibot/scripts/login.py +++ b/pywikibot/scripts/login.py @@ -122,8 +122,7 @@ def login_one_site(code, family, oauth, logout, autocreate): def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/pywikibot/site/_apisite.py b/pywikibot/site/_apisite.py index 2a3ec9cf1d..0d6f64f848 100644 --- a/pywikibot/site/_apisite.py +++ b/pywikibot/site/_apisite.py @@ -155,8 +155,7 @@ def __setstate__(self, state: dict[str, Any]) -> None: self._tokens = TokenWallet(self) def interwiki(self, prefix: str) -> BaseSite: - """ - Return the site for a corresponding interwiki prefix. + """Return the site for a corresponding interwiki prefix. :raises pywikibot.exceptions.SiteDefinitionError: if the url given in the interwiki table doesn't match any of the existing families. @@ -165,8 +164,7 @@ def interwiki(self, prefix: str) -> BaseSite: return self._interwikimap[prefix].site def interwiki_prefix(self, site: BaseSite) -> list[str]: - """ - Return the interwiki prefixes going to that site. + """Return the interwiki prefixes going to that site. The interwiki prefixes are ordered first by length (shortest first) and then alphabetically. :py:obj:`interwiki(prefix)` is not @@ -186,8 +184,7 @@ def interwiki_prefix(self, site: BaseSite) -> list[str]: return sorted(prefixes, key=lambda p: (len(p), p)) def local_interwiki(self, prefix: str) -> bool: - """ - Return whether the interwiki prefix is local. + """Return whether the interwiki prefix is local. A local interwiki prefix is handled by the target site like a normal link. So if that link also contains an interwiki link it does follow @@ -289,8 +286,7 @@ def _generator( @staticmethod def _request_class(kwargs: dict[str, Any]) -> type[api.Request]: - """ - Get the appropriate class. + """Get the appropriate class. Inside this class kwargs use the parameters mode but QueryGenerator may use the old kwargs mode. @@ -459,8 +455,7 @@ def _relogin(self) -> None: self.login() def logout(self) -> None: - """ - Logout of the site and load details for the logged out user. + """Logout of the site and load details for the logged out user. Also logs out of the global account if linked to the user. @@ -797,8 +792,7 @@ def is_locked(self, return 'locked' in self.get_globaluserinfo(user, force) def get_searched_namespaces(self, force: bool = False) -> set[Namespace]: - """ - Retrieve the default searched namespaces for the user. + """Retrieve the default searched namespaces for the user. If no user is logged in, it returns the namespaces used by default. Otherwise it returns the user preferences. It caches the last result @@ -1125,8 +1119,7 @@ def expand_text( return req.submit()['expandtemplates']['wikitext'] def getcurrenttimestamp(self) -> str: - """ - Return the server time as a MediaWiki timestamp string. + """Return the server time as a MediaWiki timestamp string. It calls :py:obj:`server_time` first so it queries the server to get the current server time. @@ -1136,8 +1129,7 @@ def getcurrenttimestamp(self) -> str: return self.server_time().totimestampformat() def server_time(self) -> pywikibot.Timestamp: - """ - Return a Timestamp object representing the current server time. + """Return a Timestamp object representing the current server time. It uses the 'time' property of the siteinfo 'general'. It'll force a reload before returning the time. @@ -1293,8 +1285,7 @@ def image_repository(self) -> BaseSite | None: return None def data_repository(self) -> pywikibot.site.DataSite | None: - """ - Return the data repository connected to this site. + """Return the data repository connected to this site. :return: The data repository if one is connected or None otherwise. """ @@ -1337,8 +1328,7 @@ def page_from_repository( self, item: str ) -> pywikibot.page.Page | None: - """ - Return a Page for this site object specified by Wikibase item. + """Return a Page for this site object specified by Wikibase item. Usage: @@ -2747,8 +2737,7 @@ def undelete( } def protection_types(self) -> set[str]: - """ - Return the protection types available on this site. + """Return the protection types available on this site. **Example:** @@ -2764,8 +2753,7 @@ def protection_types(self) -> set[str]: @need_version('1.27.3') def protection_levels(self) -> set[str]: - """ - Return the protection levels available on this site. + """Return the protection levels available on this site. **Example:** @@ -2862,8 +2850,7 @@ def blockuser( reblock: bool = False, allowusertalk: bool = False ) -> dict[str, Any]: - """ - Block a user for certain amount of time and for a certain reason. + """Block a user for certain amount of time and for a certain reason. .. seealso:: :api:`Block` @@ -2908,8 +2895,7 @@ def unblockuser( user: pywikibot.page.User, reason: str | None = None ) -> dict[str, Any]: - """ - Remove the block for the user. + """Remove the block for the user. .. seealso:: :api:`Block` @@ -2957,8 +2943,7 @@ def purgepages( converttitles: bool = False, redirects: bool = False ) -> bool: - """ - Purge the server's cache for one or multiple pages. + """Purge the server's cache for one or multiple pages. :param pages: list of Page objects :param redirects: Automatically resolve redirects. @@ -3054,8 +3039,7 @@ def upload( return Uploader(self, filepage, **kwargs).upload() def get_property_names(self, force: bool = False) -> list[str]: - """ - Get property names for pages_with_property(). + """Get property names for pages_with_property(). .. seealso:: :api:`Pagepropnames` @@ -3072,8 +3056,7 @@ def compare( diff: _CompType, difftype: str = 'table' ) -> str: - """ - Corresponding method to the 'action=compare' API action. + """Corresponding method to the 'action=compare' API action. .. hint:: Use :func:`diff.html_comparator` function to parse result. diff --git a/pywikibot/site/_basesite.py b/pywikibot/site/_basesite.py index 570f23c724..bf65bd21dd 100644 --- a/pywikibot/site/_basesite.py +++ b/pywikibot/site/_basesite.py @@ -37,8 +37,7 @@ class BaseSite(ComparableMixin): """Site methods that are independent of the communication interface.""" def __init__(self, code: str, fam=None, user=None) -> None: - """ - Initializer. + """Initializer. :param code: the site's language code :type code: str @@ -122,8 +121,7 @@ def family(self): @property def code(self): - """ - The identifying code for this Site equal to the wiki prefix. + """The identifying code for this Site equal to the wiki prefix. By convention, this is usually an ISO language code, but it does not have to be. @@ -296,8 +294,7 @@ def pagename2codes(self) -> list[str]: return ['PAGENAMEE'] def lock_page(self, page, block: bool = True): - """ - Lock page for writing. Must be called before writing any page. + """Lock page for writing. Must be called before writing any page. We don't want different threads trying to write to the same page at the same time, even to different sections. @@ -317,8 +314,7 @@ def lock_page(self, page, block: bool = True): self._locked_pages.add(title) def unlock_page(self, page) -> None: - """ - Unlock page. Call as soon as a write operation has completed. + """Unlock page. Call as soon as a write operation has completed. :param page: the page to be locked :type page: pywikibot.Page @@ -385,8 +381,7 @@ def redirect_regex(self) -> Pattern[str]: re.IGNORECASE | re.DOTALL) def sametitle(self, title1: str, title2: str) -> bool: - """ - Return True if title1 and title2 identify the same wiki page. + """Return True if title1 and title2 identify the same wiki page. title1 and title2 may be unequal but still identify the same page, if they use different aliases for the same namespace. diff --git a/pywikibot/site/_datasite.py b/pywikibot/site/_datasite.py index b69562dd65..802f5fe309 100644 --- a/pywikibot/site/_datasite.py +++ b/pywikibot/site/_datasite.py @@ -50,8 +50,7 @@ def __init__(self, *args, **kwargs) -> None: } def get_repo_for_entity_type(self, entity_type: str) -> DataSite: - """ - Get the data repository for the entity type. + """Get the data repository for the entity type. When no foreign repository is defined for the entity type, the method returns this repository itself even if it does not @@ -86,8 +85,7 @@ def _cache_entity_namespaces(self) -> None: break def get_namespace_for_entity_type(self, entity_type): - """ - Return namespace for given entity type. + """Return namespace for given entity type. :return: corresponding namespace :rtype: Namespace @@ -103,8 +101,7 @@ def get_namespace_for_entity_type(self, entity_type): @property def item_namespace(self): - """ - Return namespace for items. + """Return namespace for items. :return: item namespace :rtype: Namespace @@ -115,8 +112,7 @@ def item_namespace(self): @property def property_namespace(self): - """ - Return namespace for properties. + """Return namespace for properties. :return: property namespace :rtype: Namespace @@ -127,8 +123,7 @@ def property_namespace(self): return self._property_namespace def get_entity_for_entity_id(self, entity_id): - """ - Return a new instance for given entity id. + """Return a new instance for given entity id. :raises pywikibot.exceptions.NoWikibaseEntityError: there is no entity with the id @@ -145,8 +140,7 @@ def get_entity_for_entity_id(self, entity_id): @property @need_version('1.28-wmf.3') def sparql_endpoint(self): - """ - Return the sparql endpoint url, if any has been set. + """Return the sparql endpoint url, if any has been set. :return: sparql endpoint url :rtype: str|None @@ -156,8 +150,7 @@ def sparql_endpoint(self): @property @need_version('1.28-wmf.23') def concept_base_uri(self): - """ - Return the base uri for concepts/entities. + """Return the base uri for concepts/entities. :return: concept base uri :rtype: str @@ -182,8 +175,7 @@ def tabular_data_repository(self): return None def loadcontent(self, identification, *props): - """ - Fetch the current content of a Wikibase item. + """Fetch the current content of a Wikibase item. This is called loadcontent since wbgetentities does not support fetching old @@ -451,8 +443,7 @@ def save_claim(self, summary: str | None = None, bot: bool = True, tags: str | None = None): - """ - Save the whole claim to the wikibase site. + """Save the whole claim to the wikibase site. .. versionchanged:: 9.4 *tags* parameter was added @@ -776,8 +767,7 @@ def mergeItems(self, @need_extension('WikibaseLexeme') def mergeLexemes(self, from_lexeme, to_lexeme, summary=None, *, bot: bool = True) -> dict: - """ - Merge two lexemes together. + """Merge two lexemes together. :param from_lexeme: Lexeme to merge from :type from_lexeme: pywikibot.LexemePage @@ -802,8 +792,7 @@ def mergeLexemes(self, from_lexeme, to_lexeme, summary=None, *, @need_right('item-redirect') def set_redirect_target(self, from_item, to_item, bot: bool = True): - """ - Make a redirect to another item. + """Make a redirect to another item. :param to_item: title of target item. :type to_item: pywikibot.ItemPage @@ -823,8 +812,7 @@ def set_redirect_target(self, from_item, to_item, bot: bool = True): def search_entities(self, search: str, language: str, total: int | None = None, **kwargs): - """ - Search for pages or properties that contain the given text. + """Search for pages or properties that contain the given text. :param search: Text to find. :param language: Language to search in. @@ -858,8 +846,7 @@ def parsevalue(self, datatype: str, values: list[str], options: dict[str, Any] | None = None, language: str | None = None, validate: bool = False) -> list[Any]: - """ - Send data values to the wikibase parser for interpretation. + """Send data values to the wikibase parser for interpretation. .. versionadded:: 7.5 .. seealso:: `wbparsevalue API @@ -913,8 +900,7 @@ def parsevalue(self, datatype: str, values: list[str], @need_right('edit') def _wbset_action(self, itemdef, action: str, action_data, **kwargs) -> dict: - """ - Execute wbset{action} on a Wikibase entity. + """Execute wbset{action} on a Wikibase entity. Supported actions are: wbsetaliases, wbsetdescription, wbsetlabel and wbsetsitelink @@ -1028,16 +1014,14 @@ def prepare_data(action, data): return req.submit() def wbsetaliases(self, itemdef, aliases, **kwargs): - """ - Set aliases for a single Wikibase entity. + """Set aliases for a single Wikibase entity. See self._wbset_action() for parameters """ return self._wbset_action(itemdef, 'wbsetaliases', aliases, **kwargs) def wbsetdescription(self, itemdef, description, **kwargs): - """ - Set description for a single Wikibase entity. + """Set description for a single Wikibase entity. See self._wbset_action() """ @@ -1045,16 +1029,14 @@ def wbsetdescription(self, itemdef, description, **kwargs): **kwargs) def wbsetlabel(self, itemdef, label, **kwargs): - """ - Set label for a single Wikibase entity. + """Set label for a single Wikibase entity. See self._wbset_action() for parameters """ return self._wbset_action(itemdef, 'wbsetlabel', label, **kwargs) def wbsetsitelink(self, itemdef, sitelink, **kwargs): - """ - Set, remove or modify a sitelink on a Wikibase item. + """Set, remove or modify a sitelink on a Wikibase item. See self._wbset_action() for parameters """ @@ -1064,8 +1046,7 @@ def wbsetsitelink(self, itemdef, sitelink, **kwargs): @need_extension('WikibaseLexeme') def add_form(self, lexeme, form, *, bot: bool = True, baserevid=None) -> dict: - """ - Add a form. + """Add a form. :param lexeme: Lexeme to modify :type lexeme: pywikibot.LexemePage @@ -1091,8 +1072,7 @@ def add_form(self, lexeme, form, *, bot: bool = True, @need_right('edit') @need_extension('WikibaseLexeme') def remove_form(self, form, *, bot: bool = True, baserevid=None) -> dict: - """ - Remove a form. + """Remove a form. :param form: Form to be removed :type form: pywikibot.LexemeForm @@ -1116,8 +1096,7 @@ def remove_form(self, form, *, bot: bool = True, baserevid=None) -> dict: @need_extension('WikibaseLexeme') def edit_form_elements(self, form, data, *, bot: bool = True, baserevid=None) -> dict: - """ - Edit lexeme form elements. + """Edit lexeme form elements. :param form: Form :type form: pywikibot.LexemeForm diff --git a/pywikibot/site/_extensions.py b/pywikibot/site/_extensions.py index 00b15a1b11..77a8344032 100644 --- a/pywikibot/site/_extensions.py +++ b/pywikibot/site/_extensions.py @@ -190,8 +190,7 @@ class PageImagesMixin: @need_extension('PageImages') def loadpageimage(self, page) -> None: - """ - Load [[mw:Extension:PageImages]] info. + """Load [[mw:Extension:PageImages]] info. :param page: The page for which to obtain the image :type page: pywikibot.Page @@ -399,8 +398,7 @@ class FlowMixin: @need_extension('Flow') @deprecated(since='9.4.0') def load_board(self, page): - """ - Retrieve the data for a Flow board. + """Retrieve the data for a Flow board. :param page: A Flow board :type page: Board @@ -425,8 +423,7 @@ def load_topiclist(self, offset_id: str | None = None, reverse: bool = False, include_offset: bool = False) -> dict[str, Any]: - """ - Retrieve the topiclist of a Flow board. + """Retrieve the topiclist of a Flow board. .. versionchanged:: 8.0 All parameters except *page* are keyword only parameters. @@ -460,8 +457,7 @@ def load_topiclist(self, @need_extension('Flow') @deprecated(since='9.4.0') def load_topic(self, page, content_format: str): - """ - Retrieve the data for a Flow topic. + """Retrieve the data for a Flow topic. :param page: A Flow topic :type page: Topic @@ -479,8 +475,7 @@ def load_topic(self, page, content_format: str): @need_extension('Flow') @deprecated(since='9.4.0') def load_post_current_revision(self, page, post_id, content_format: str): - """ - Retrieve the data for a post to a Flow topic. + """Retrieve the data for a post to a Flow topic. :param page: A Flow topic :type page: Topic @@ -501,8 +496,7 @@ def load_post_current_revision(self, page, post_id, content_format: str): @need_extension('Flow') @deprecated(since='9.4.0') def create_new_topic(self, page, title, content, content_format): - """ - Create a new topic on a Flow board. + """Create a new topic on a Flow board. :param page: A Flow board :type page: Board @@ -550,8 +544,7 @@ def reply_to_post(self, page, reply_to_uuid: str, content: str, @need_extension('Flow') @deprecated(since='9.4.0') def lock_topic(self, page, lock, reason): - """ - Lock or unlock a Flow topic. + """Lock or unlock a Flow topic. :param page: A Flow topic :type page: Topic @@ -575,8 +568,7 @@ def lock_topic(self, page, lock, reason): @need_extension('Flow') @deprecated(since='9.4.0') def moderate_topic(self, page, state, reason): - """ - Moderate a Flow topic. + """Moderate a Flow topic. :param page: A Flow topic :type page: Topic @@ -598,8 +590,7 @@ def moderate_topic(self, page, state, reason): @need_extension('Flow') @deprecated(since='9.4.0') def summarize_topic(self, page, summary): - """ - Add summary to Flow topic. + """Add summary to Flow topic. :param page: A Flow topic :type page: Topic @@ -623,8 +614,7 @@ def summarize_topic(self, page, summary): @need_extension('Flow') @deprecated(since='9.4.0') def delete_topic(self, page, reason): - """ - Delete a Flow topic. + """Delete a Flow topic. :param page: A Flow topic :type page: Topic @@ -639,8 +629,7 @@ def delete_topic(self, page, reason): @need_extension('Flow') @deprecated(since='9.4.0') def hide_topic(self, page, reason): - """ - Hide a Flow topic. + """Hide a Flow topic. :param page: A Flow topic :type page: Topic @@ -655,8 +644,7 @@ def hide_topic(self, page, reason): @need_extension('Flow') @deprecated(since='9.4.0') def suppress_topic(self, page, reason): - """ - Suppress a Flow topic. + """Suppress a Flow topic. :param page: A Flow topic :type page: Topic @@ -671,8 +659,7 @@ def suppress_topic(self, page, reason): @need_extension('Flow') @deprecated(since='9.4.0') def restore_topic(self, page, reason): - """ - Restore a Flow topic. + """Restore a Flow topic. :param page: A Flow topic :type page: Topic @@ -687,8 +674,7 @@ def restore_topic(self, page, reason): @need_extension('Flow') @deprecated(since='9.4.0') def moderate_post(self, post, state, reason): - """ - Moderate a Flow post. + """Moderate a Flow post. :param post: A Flow post :type post: Post @@ -713,8 +699,7 @@ def moderate_post(self, post, state, reason): @need_extension('Flow') @deprecated(since='9.4.0') def delete_post(self, post, reason): - """ - Delete a Flow post. + """Delete a Flow post. :param post: A Flow post :type post: Post @@ -729,8 +714,7 @@ def delete_post(self, post, reason): @need_extension('Flow') @deprecated(since='9.4.0') def hide_post(self, post, reason): - """ - Hide a Flow post. + """Hide a Flow post. :param post: A Flow post :type post: Post @@ -745,8 +729,7 @@ def hide_post(self, post, reason): @need_extension('Flow') @deprecated(since='9.4.0') def suppress_post(self, post, reason): - """ - Suppress a Flow post. + """Suppress a Flow post. :param post: A Flow post :type post: Post @@ -761,8 +744,7 @@ def suppress_post(self, post, reason): @need_extension('Flow') @deprecated(since='9.4.0') def restore_post(self, post, reason): - """ - Restore a Flow post. + """Restore a Flow post. :param post: A Flow post :type post: Post @@ -780,8 +762,7 @@ class UrlShortenerMixin: @need_extension('UrlShortener') def create_short_link(self, url): - """ - Return a shortened link. + """Return a shortened link. Note that on Wikimedia wikis only metawiki supports this action, and this wiki can process links to all WM domains. diff --git a/pywikibot/site/_generators.py b/pywikibot/site/_generators.py index 8b1221dedc..5c08ebf963 100644 --- a/pywikibot/site/_generators.py +++ b/pywikibot/site/_generators.py @@ -66,8 +66,7 @@ def load_pages_from_pageids( self, pageids: str | Iterable[int | str], ) -> Generator[pywikibot.Page, None, None]: - """ - Return a page generator from pageids. + """Return a page generator from pageids. Pages are iterated in the same order than in the underlying pageids. @@ -387,8 +386,7 @@ def pagereferences( total: int | None = None, content: bool = False, ) -> Iterable[pywikibot.Page]: - """ - Convenience method combining pagebacklinks and page_embeddedin. + """Convenience method combining pagebacklinks and page_embeddedin. :param namespaces: If present, only return links from the namespaces in this list. @@ -1791,8 +1789,7 @@ def alldeletedrevisions( total: int | None = None, **kwargs, ) -> Generator[dict[str, Any], None, None]: - """ - Yield all deleted revisions. + """Yield all deleted revisions. .. seealso:: :api:`Alldeletedrevisions` .. warning:: *user* keyword argument must be given together with diff --git a/pywikibot/site/_interwikimap.py b/pywikibot/site/_interwikimap.py index 8711194846..aa4da13eb5 100644 --- a/pywikibot/site/_interwikimap.py +++ b/pywikibot/site/_interwikimap.py @@ -35,8 +35,7 @@ class _InterwikiMap: """A representation of the interwiki map of a site.""" def __init__(self, site) -> None: - """ - Create an empty uninitialized interwiki map for the given site. + """Create an empty uninitialized interwiki map for the given site. :param site: Given site for which interwiki map is to be created :type site: pywikibot.site.APISite @@ -62,8 +61,7 @@ def _iw_sites(self): return self._map def __getitem__(self, prefix): - """ - Return the site, locality and url for the requested prefix. + """Return the site, locality and url for the requested prefix. :param prefix: Interwiki prefix :type prefix: Dictionary key @@ -81,8 +79,7 @@ def __getitem__(self, prefix): .format(prefix, type(self._iw_sites[prefix].site))) def get_by_url(self, url: str) -> set[str]: - """ - Return a set of prefixes applying to the URL. + """Return a set of prefixes applying to the URL. :param url: URL for the interwiki """ diff --git a/pywikibot/site/_namespace.py b/pywikibot/site/_namespace.py index 1d707442ef..b1d0568053 100644 --- a/pywikibot/site/_namespace.py +++ b/pywikibot/site/_namespace.py @@ -71,8 +71,7 @@ def __new__(cls, name, bases, dic): class Namespace(Iterable, ComparableMixin, metaclass=MetaNamespace): - """ - Namespace site data object. + """Namespace site data object. This is backwards compatible with the structure of entries in site._namespaces which were a list of:: @@ -303,8 +302,7 @@ def builtin_namespaces(cls, case: str = 'first-letter'): @staticmethod def normalize_name(name): - """ - Remove an optional colon before and after name. + """Remove an optional colon before and after name. TODO: reject illegal characters. """ @@ -329,8 +327,7 @@ def normalize_name(name): class NamespacesDict(Mapping): - """ - An immutable dictionary containing the Namespace instances. + """An immutable dictionary containing the Namespace instances. It adds a deprecation message when called as the 'namespaces' property of APISite was callable. @@ -350,8 +347,7 @@ def __iter__(self): return iter(self._namespaces) def __getitem__(self, key: Namespace | int | str) -> Namespace: - """ - Get the namespace with the given key. + """Get the namespace with the given key. :param key: namespace key """ @@ -369,8 +365,7 @@ def __getitem__(self, key: Namespace | int | str) -> Namespace: return super().__getitem__(key) def __getattr__(self, attr: Namespace | int | str) -> Namespace: - """ - Get the namespace with the given key. + """Get the namespace with the given key. :param attr: namespace key """ @@ -390,8 +385,7 @@ def __len__(self) -> int: return len(self._namespaces) def lookup_name(self, name: str) -> Namespace | None: - """ - Find the Namespace for a name also checking aliases. + """Find the Namespace for a name also checking aliases. :param name: Name of the namespace. """ @@ -401,8 +395,7 @@ def lookup_name(self, name: str) -> Namespace | None: return self.lookup_normalized_name(name.lower()) def lookup_normalized_name(self, name: str) -> Namespace | None: - """ - Find the Namespace for a name also checking aliases. + """Find the Namespace for a name also checking aliases. The name has to be normalized and must be lower case. @@ -411,8 +404,7 @@ def lookup_normalized_name(self, name: str) -> Namespace | None: return self._namespace_names.get(name) def resolve(self, identifiers) -> list[Namespace]: - """ - Resolve namespace identifiers to obtain Namespace objects. + """Resolve namespace identifiers to obtain Namespace objects. Identifiers may be any value for which int() produces a valid namespace id, except bool, or any string which Namespace.lookup_name diff --git a/pywikibot/site/_obsoletesites.py b/pywikibot/site/_obsoletesites.py index a165bad4e8..bc99cc4121 100644 --- a/pywikibot/site/_obsoletesites.py +++ b/pywikibot/site/_obsoletesites.py @@ -18,6 +18,7 @@ class RemovedSite(BaseSite): class ClosedSite(APISite): + """Site closed to read-only mode.""" def _closed_error(self, notice: str = '') -> None: diff --git a/pywikibot/site/_siteinfo.py b/pywikibot/site/_siteinfo.py index d0e92e1e6a..5471959113 100644 --- a/pywikibot/site/_siteinfo.py +++ b/pywikibot/site/_siteinfo.py @@ -20,8 +20,7 @@ class Siteinfo(Container): - """ - A 'dictionary' like container for siteinfo. + """A 'dictionary' like container for siteinfo. This class queries the server to get the requested siteinfo property. Optionally it can cache this directly in the instance so that later @@ -90,8 +89,7 @@ def _post_process(prop, data) -> None: data[p] = p in data def _get_siteinfo(self, prop, expiry) -> dict: - """ - Retrieve a siteinfo property. + """Retrieve a siteinfo property. All properties which the site doesn't support contain the default value. Because pre-1.12 no data was @@ -179,8 +177,7 @@ def _is_expired(cache_date, expire): return cache_date + expire < pywikibot.Timestamp.nowutc() def _get_general(self, key: str, expiry): - """ - Return a siteinfo property which is loaded by default. + """Return a siteinfo property which is loaded by default. The property 'general' will be queried if it wasn't yet or it's forced. Additionally all uncached default properties are queried. This way @@ -229,8 +226,7 @@ def get( cache: bool = True, expiry: datetime.datetime | float | bool = False ) -> Any: - """ - Return a siteinfo property. + """Return a siteinfo property. It will never throw an APIError if it only stated, that the siteinfo property doesn't exist. Instead it will use the default value. @@ -325,8 +321,7 @@ def is_recognised(self, key: str) -> bool | None: return None if time is None else bool(time) def get_requested_time(self, key: str): - """ - Return when 'key' was successfully requested from the server. + """Return when 'key' was successfully requested from the server. If the property is actually in the siprop 'general' it returns the last request from the 'general' siprop. diff --git a/pywikibot/site_detect.py b/pywikibot/site_detect.py index 5d3a61cbe8..cbaa93faa3 100644 --- a/pywikibot/site_detect.py +++ b/pywikibot/site_detect.py @@ -39,8 +39,7 @@ class MWSite: """Minimal wiki site class.""" def __init__(self, fromurl, **kwargs) -> None: - """ - Initializer. + """Initializer. :raises pywikibot.exceptions.ServerError: a server error occurred while loading the site diff --git a/pywikibot/specialbots/_upload.py b/pywikibot/specialbots/_upload.py index c10f519639..c3396edb8e 100644 --- a/pywikibot/specialbots/_upload.py +++ b/pywikibot/specialbots/_upload.py @@ -375,16 +375,14 @@ def abort_on_warn(self, warn_code): return self.aborts is True or warn_code in self.aborts def ignore_on_warn(self, warn_code: str): - """ - Determine if the warning message should be ignored. + """Determine if the warning message should be ignored. :param warn_code: The warning message """ return self.ignore_warning is True or warn_code in self.ignore_warning def upload_file(self, file_url: str) -> str | None: - """ - Upload the image at file_url to the target wiki. + """Upload the image at file_url to the target wiki. .. seealso:: :api:`Upload` diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py index 4ab11ca2f3..b25ce1c1fd 100644 --- a/pywikibot/textlib.py +++ b/pywikibot/textlib.py @@ -112,8 +112,7 @@ def to_local_digits(phrase: str | int, lang: str) -> str: - """ - Change Latin digits based on language to localized version. + """Change Latin digits based on language to localized version. Be aware that this function only works for several languages, and that it returns an unchanged string if an unsupported language is given. @@ -380,8 +379,7 @@ def replaceExcept(text: str, marker: str = '', site: pywikibot.site.BaseSite | None = None, count: int = 0) -> str: - """ - Return text with *old* replaced by *new*, ignoring specified types of text. + """Return text with *old* replaced by *new*, ignoring specified text types. Skip occurrences of *old* within *exceptions*; e.g. within nowiki tags or HTML comments. If *caseInsensitive* is true, then use case @@ -500,8 +498,7 @@ def removeDisabledParts(text: str, include: Container | None = None, site: pywikibot.site.BaseSite | None = None ) -> str: - """ - Return text without portions where wiki markup is disabled. + """Return text without portions where wiki markup is disabled. Parts that will be removed by default are: @@ -619,8 +616,7 @@ def handle_endtag(self, tag) -> None: def isDisabled(text: str, index: int, tags=None) -> bool: - """ - Return True if text[index] is disabled, e.g. by a comment or nowiki tags. + """Return True if text[index] is disabled, e.g. by a comment or nowiki tag. For the tags parameter, see :py:obj:`removeDisabledParts`. """ @@ -643,8 +639,7 @@ def findmarker(text: str, startwith: str = '@@', def expandmarker(text: str, marker: str = '', separator: str = '') -> str: - """ - Return a marker expanded whitespace and the separator. + """Return a marker expanded whitespace and the separator. It searches for the first occurrence of the marker and gets the combination of the separator and whitespace directly before it. @@ -1251,8 +1246,7 @@ def removeLanguageLinks(text: str, site=None, marker: str = '') -> str: def removeLanguageLinksAndSeparator(text: str, site=None, marker: str = '', separator: str = '') -> str: - """ - Return text with inter-language links and preceding separators removed. + """Return text with inter-language links and preceding separators removed. If a link to an unknown language is encountered, a warning is printed. @@ -1532,8 +1526,7 @@ def removeCategoryLinks(text: str, site=None, marker: str = '') -> str: def removeCategoryLinksAndSeparator(text: str, site=None, marker: str = '', separator: str = '') -> str: - """ - Return text with category links and preceding separators removed. + """Return text with category links and preceding separators removed. :param text: The text that needs to be modified. :param site: The site that the text is coming from. @@ -1556,8 +1549,7 @@ def removeCategoryLinksAndSeparator(text: str, site=None, marker: str = '', def replaceCategoryInPlace(oldtext, oldcat, newcat, site=None, add_only: bool = False) -> str: - """ - Replace old category with new one and return the modified text. + """Replace old category with new one and return the modified text. :param oldtext: Content of the old category :param oldcat: pywikibot.Category object of the old category @@ -1613,8 +1605,7 @@ def replaceCategoryLinks(oldtext: str, new: Iterable, site: pywikibot.site.BaseSite | None = None, add_only: bool = False) -> str: - """ - Replace all existing category links with new category links. + """Replace all existing category links with new category links. :param oldtext: The text that needs to be replaced. :param new: Should be a list of Category objects or strings @@ -1866,8 +1857,7 @@ def explicit(param): def extract_templates_and_params_regex_simple(text: str): - """ - Extract top-level templates with params using only a simple regex. + """Extract top-level templates with params using only a simple regex. This function uses only a single regex, and returns an entry for each template called at the top-level of the wikitext. @@ -1922,8 +1912,7 @@ def glue_template_and_params(template_and_params) -> str: # -------------------------- def does_text_contain_section(pagetext: str, section: str) -> bool: - """ - Determine whether the page text contains the given section title. + """Determine whether the page text contains the given section title. It does not care whether a section string may contain spaces or underlines. Both will match. @@ -1963,6 +1952,7 @@ def reformat_ISBNs(text: str, match_func) -> str: class TimeStripperPatterns(NamedTuple): + """Hold precompiled timestamp patterns for :class:`TimeStripper`. Attribute order is important to avoid mismatch when searching. @@ -2143,8 +2133,7 @@ def _last_match_and_replace(self, m = all_matches[-1] def marker(m: Match[str]): - """ - Replace exactly the same number of matched characters. + """Replace exactly the same number of matched characters. Same number of chars shall be replaced, in order to be able to compare pos for matches reliably (absolute pos of a match @@ -2186,8 +2175,7 @@ def _valid_date_dict_positions(dateDict) -> bool: return not min_pos < time_pos < max_pos def timestripper(self, line: str) -> pywikibot.Timestamp | None: - """ - Find timestamp in line and convert it to time zone aware datetime. + """Find timestamp in line and convert it to time zone aware datetime. All the following items must be matched, otherwise None is returned: -. year, month, hour, time, day, minute, tzinfo diff --git a/pywikibot/throttle.py b/pywikibot/throttle.py index d50dafb873..20680a80c2 100644 --- a/pywikibot/throttle.py +++ b/pywikibot/throttle.py @@ -33,6 +33,7 @@ class ProcEntry(NamedTuple): + """ProcEntry namedtuple.""" module_id: str diff --git a/pywikibot/time.py b/pywikibot/time.py index 002ce516f6..4f4e002dbd 100644 --- a/pywikibot/time.py +++ b/pywikibot/time.py @@ -318,8 +318,7 @@ def fromtimestampformat(cls, return cls._from_mw(ts) def isoformat(self, sep: str = 'T') -> str: # type: ignore[override] - """ - Convert object to an ISO 8601 timestamp accepted by MediaWiki. + """Convert object to an ISO 8601 timestamp accepted by MediaWiki. datetime.datetime.isoformat does not postfix the ISO formatted date with a 'Z' unless a timezone is included, which causes MediaWiki @@ -332,8 +331,7 @@ def totimestampformat(self) -> str: return self.strftime(self.mediawikiTSFormat) def posix_timestamp(self) -> float: - """ - Convert object to a POSIX timestamp. + """Convert object to a POSIX timestamp. See Note in datetime.timestamp(). @@ -504,8 +502,7 @@ def now(cls, tz=None) -> Timestamp: class TZoneFixedOffset(datetime.tzinfo): - """ - Class building tzinfo objects for fixed-offset time zones. + """Class building tzinfo objects for fixed-offset time zones. :param offset: a number indicating fixed offset in minutes east from UTC :param name: a string with name of the timezone @@ -541,8 +538,7 @@ def str2timedelta( string: str, timestamp: datetime.datetime | None = None, ) -> datetime.timedelta: - """ - Return a timedelta for a shorthand duration. + """Return a timedelta for a shorthand duration. :param string: a string defining a time period: @@ -575,8 +571,7 @@ def str2timedelta( def parse_duration(string: str) -> tuple[str, int]: - """ - Return the key and duration extracted from the string. + """Return the key and duration extracted from the string. :param string: a string defining a time period diff --git a/pywikibot/titletranslate.py b/pywikibot/titletranslate.py index ace5183e67..4004fb84e8 100644 --- a/pywikibot/titletranslate.py +++ b/pywikibot/titletranslate.py @@ -17,8 +17,7 @@ def translate( removebrackets: bool = False, site=None ) -> list[pywikibot.Link]: - """ - Return a list of links to pages on other sites based on hints. + """Return a list of links to pages on other sites based on hints. Entries for single page titles list those pages. Page titles for entries such as "all:" or "xyz:" or "20:" are first built from the page title of diff --git a/pywikibot/tools/__init__.py b/pywikibot/tools/__init__.py index 00ce4c35c5..641e14a4fc 100644 --- a/pywikibot/tools/__init__.py +++ b/pywikibot/tools/__init__.py @@ -159,8 +159,7 @@ def has_module(module: str, version: str | None = None) -> bool: class classproperty: # noqa: N801 - """ - Descriptor class to access a class method as a property. + """Descriptor class to access a class method as a property. This class may be used as a decorator:: @@ -297,8 +296,7 @@ def __ne__(self, other): def first_lower(string: str) -> str: - """ - Return a string with the first character uncapitalized. + """Return a string with the first character uncapitalized. Empty strings are supported. The original string is not changed. @@ -313,8 +311,7 @@ def first_lower(string: str) -> str: def first_upper(string: str) -> str: - """ - Return a string with the first character capitalized. + """Return a string with the first character capitalized. Empty strings are supported. The original string is not changed. @@ -419,8 +416,7 @@ def normalize_username(username) -> str | None: @total_ordering class MediaWikiVersion: - """ - Version object to allow comparing 'wmf' versions with normal ones. + """Version object to allow comparing 'wmf' versions with normal ones. The version mainly consist of digits separated by periods. After that is a suffix which may only be 'wmf', 'alpha', @@ -449,8 +445,7 @@ class MediaWikiVersion: r'(\d+(?:\.\d+)+)(-?wmf\.?(\d+)|alpha|beta(\d+)|-?rc\.?(\d+)|.*)?') def __init__(self, version_str: str) -> None: - """ - Initializer. + """Initializer. :param version_str: version to parse """ @@ -527,8 +522,7 @@ def __lt__(self, other: Any) -> bool: def open_archive(filename: str, mode: str = 'rb', use_extension: bool = True): - """ - Open a file and uncompress it if needed. + """Open a file and uncompress it if needed. This function supports bzip2, gzip, 7zip, lzma, and xz as compression containers. It uses the packages available in the @@ -632,8 +626,7 @@ def open_archive(filename: str, mode: str = 'rb', use_extension: bool = True): def merge_unique_dicts(*args, **kwargs): - """ - Return a merged dict and make sure that the original dicts keys are unique. + """Return a merged dict and make sure that the original keys are unique. The positional arguments are the dictionaries to be merged. It is also possible to define an additional dict using the keyword diff --git a/pywikibot/tools/_deprecate.py b/pywikibot/tools/_deprecate.py index bff2a18c8a..c1e6512871 100644 --- a/pywikibot/tools/_deprecate.py +++ b/pywikibot/tools/_deprecate.py @@ -109,8 +109,7 @@ def get_wrapper_depth(wrapper): def add_full_name(obj): - """ - A decorator to add __full_name__ to the function being decorated. + """A decorator to add __full_name__ to the function being decorated. This should be done for all decorators used in pywikibot, as any decorator that does not add __full_name__ will prevent other @@ -522,8 +521,7 @@ def wrapper(*args: Any, **kwargs: Any) -> Any: def remove_last_args(arg_names): - """ - Decorator to declare all args additionally provided deprecated. + """Decorator to declare all args additionally provided deprecated. All positional arguments appearing after the normal arguments are marked deprecated. It marks also all keyword arguments present in arg_names as @@ -591,8 +589,7 @@ def redirect_func(target, *, class_name: str | None = None, since: str = '', future_warning: bool = True): - """ - Return a function which can be used to redirect to 'target'. + """Return a function which can be used to redirect to 'target'. It also acts like marking that function deprecated and copies all parameters. @@ -675,8 +672,7 @@ def add_deprecated_attr(self, name: str, replacement: Any = None, *, warning_message: str | None = None, since: str = '', future_warning: bool = True): - """ - Add the name to the local deprecated names dict. + """Add the name to the local deprecated names dict. .. versionchanged:: 7.0 ``since`` parameter must be a release number, not a timestamp. diff --git a/pywikibot/tools/collections.py b/pywikibot/tools/collections.py index cd4d9a9a2d..3a1570f870 100644 --- a/pywikibot/tools/collections.py +++ b/pywikibot/tools/collections.py @@ -148,8 +148,7 @@ class CombinedError(KeyError, IndexError): class EmptyDefault(str, Mapping): - """ - A default for a not existing siteinfo property. + """A default for a not existing siteinfo property. It should be chosen if there is no better default known. It acts like an empty collections, so it can be iterated through it safely if treated as a diff --git a/pywikibot/tools/djvu.py b/pywikibot/tools/djvu.py index 43b1af3459..24b76b44ae 100644 --- a/pywikibot/tools/djvu.py +++ b/pywikibot/tools/djvu.py @@ -15,8 +15,7 @@ def _call_cmd(args, lib: str = 'djvulibre') -> tuple: - """ - Tiny wrapper around subprocess.Popen(). + """Tiny wrapper around subprocess.Popen(). :param args: same as Popen() :type args: str or typing.Sequence[string] @@ -52,8 +51,7 @@ class DjVuFile: """ def __init__(self, file: str) -> None: - """ - Initializer. + """Initializer. :param file: filename (including path) to djvu file """ @@ -108,8 +106,7 @@ def wrapper(obj, *args, **kwargs): @check_cache def number_of_images(self, force: bool = False): - """ - Return the number of images in the djvu file. + """Return the number of images in the djvu file. :param force: if True, refresh the cached data """ @@ -122,8 +119,7 @@ def number_of_images(self, force: bool = False): @check_page_number def page_info(self, n: int, force: bool = False): - """ - Return a tuple (id, (size, dpi)) for page n of djvu file. + """Return a tuple (id, (size, dpi)) for page n of djvu file. :param n: page n of djvu file :param force: if True, refresh the cached data @@ -134,9 +130,9 @@ def page_info(self, n: int, force: bool = False): @check_cache def _get_page_info(self, force: bool = False): - """ - Return a dict of tuples (id, (size, dpi)) for all pages of djvu file. + """Return a dict of tuples for all pages of djvu file. + The tuples consist of (id, (size, dpi)). :param force: if True, refresh the cached data """ if not hasattr(self, '_page_info'): @@ -181,8 +177,7 @@ def get_most_common_info(self): @check_cache def has_text(self, force: bool = False): - """ - Test if the djvu file has a text-layer. + """Test if the djvu file has a text-layer. :param force: if True, refresh the cached data """ @@ -212,8 +207,7 @@ def _remove_control_chars(data): @check_page_number @check_cache def get_page(self, n: int, force: bool = False): - """ - Get page n for djvu file. + """Get page n for djvu file. :param n: page n of djvu file :param force: if True, refresh the cached data diff --git a/pywikibot/tools/formatter.py b/pywikibot/tools/formatter.py index 59c3aced71..fa2f3df133 100644 --- a/pywikibot/tools/formatter.py +++ b/pywikibot/tools/formatter.py @@ -69,8 +69,7 @@ def output(self) -> None: @deprecated('New color format pattern like <>colored text<>', since='7.2.0') def color_format(text: str, *args, **kwargs) -> str: - r""" - Do ``str.format`` without having to worry about colors. + r"""Do ``str.format`` without having to worry about colors. It is automatically adding \\03 in front of color fields so it's unnecessary to add them manually. Any other \\03 in the text is diff --git a/pywikibot/tools/itertools.py b/pywikibot/tools/itertools.py index 41fc5cac61..df3fffddb0 100644 --- a/pywikibot/tools/itertools.py +++ b/pywikibot/tools/itertools.py @@ -65,8 +65,7 @@ def itergroup(iterable, def islice_with_ellipsis(iterable, *args, marker: str = '…'): - """ - Generator which yields the first n elements of the iterable. + """Generator which yields the first n elements of the iterable. If more elements are available and marker is True, it returns an extra string marker as continuation mark. @@ -216,8 +215,7 @@ def roundrobin_generators(*iterables) -> Generator[Any, None, None]: def filter_unique(iterable, container=None, key=None, add=None): - """ - Yield unique items from an iterable, omitting duplicates. + """Yield unique items from an iterable, omitting duplicates. By default, to provide uniqueness, it puts the generated items into a set created as a local variable. It only yields items which are not diff --git a/pywikibot/tools/threading.py b/pywikibot/tools/threading.py index 7ea4590def..b4820ff30b 100644 --- a/pywikibot/tools/threading.py +++ b/pywikibot/tools/threading.py @@ -22,6 +22,7 @@ class RLock: + """Context manager which implements extended reentrant lock objects. This RLock is implicit derived from threading.RLock but provides a diff --git a/pywikibot/userinterfaces/gui.py b/pywikibot/userinterfaces/gui.py index 43774f3450..6a18d84f4d 100644 --- a/pywikibot/userinterfaces/gui.py +++ b/pywikibot/userinterfaces/gui.py @@ -219,8 +219,7 @@ def replace_event(self, event=None) -> str: return 'break' def find_all(self, s): - """ - Highlight all occurrences of string s, and select the first one. + """Highlight all occurrences of string s, and select the first one. If the string has already been highlighted, jump to the next occurrence after the current selection. (You cannot go backwards using the @@ -394,8 +393,7 @@ def __init__(self, parent=None, **kwargs) -> None: def edit(self, text: str, jumpIndex: int | None = None, # noqa: N803 highlight: str | None = None) -> str | None: - """ - Provide user with editor to modify text. + """Provide user with editor to modify text. :param text: the text to be edited :param jumpIndex: position at which to put the caret @@ -441,8 +439,7 @@ def config_dialog(self, event=None) -> None: ConfigDialog(self, 'Settings') def pressedOK(self) -> None: # noqa: N802 - """ - Perform OK operation. + """Perform OK operation. Called when user pushes the OK button. Saves the buffer into a variable, and closes the window. diff --git a/pywikibot/userinterfaces/terminal_interface.py b/pywikibot/userinterfaces/terminal_interface.py index 6c3d240ce1..ce0b5c85e5 100644 --- a/pywikibot/userinterfaces/terminal_interface.py +++ b/pywikibot/userinterfaces/terminal_interface.py @@ -1,5 +1,4 @@ -""" -Platform independent terminal interface module. +"""Platform independent terminal interface module. It imports the appropriate operating system specific implementation. """ diff --git a/pywikibot/userinterfaces/terminal_interface_base.py b/pywikibot/userinterfaces/terminal_interface_base.py index c905a246fa..335eca2afe 100644 --- a/pywikibot/userinterfaces/terminal_interface_base.py +++ b/pywikibot/userinterfaces/terminal_interface_base.py @@ -70,8 +70,7 @@ class UI(ABUIC): split_col_pat = re.compile(r'(\w+);?(\w+)?') def __init__(self) -> None: - """ - Initialize the UI. + """Initialize the UI. This caches the std-streams locally so any attempts to monkey-patch the streams later will not work. @@ -145,8 +144,7 @@ def encounter_color(self, color, target_stream): @classmethod def divide_color(cls, color): - """ - Split color label in a tuple. + """Split color label in a tuple. Received color is a string like 'fg_color;bg_color' or 'fg_color'. Returned values are (fg_color, bg_color) or (fg_color, None). @@ -347,8 +345,7 @@ def input(self, question: str, password: bool = False, default: str | None = '', force: bool = False) -> str: - """ - Ask the user a question and return the answer. + """Ask the user a question and return the answer. Works like raw_input(), but returns a unicode string instead of ASCII. diff --git a/pywikibot/userinterfaces/transliteration.py b/pywikibot/userinterfaces/transliteration.py index 4ad1d85606..7101aac8b4 100644 --- a/pywikibot/userinterfaces/transliteration.py +++ b/pywikibot/userinterfaces/transliteration.py @@ -1105,8 +1105,7 @@ class Transliterator: """Class to transliterating text.""" def __init__(self, encoding: str) -> None: - """ - Initialize the transliteration mapping. + """Initialize the transliteration mapping. :param encoding: the encoding available. Any transliterated character which can't be mapped, will be removed from the mapping. diff --git a/pywikibot/version.py b/pywikibot/version.py index 3d33a29438..67cd656953 100644 --- a/pywikibot/version.py +++ b/pywikibot/version.py @@ -304,8 +304,7 @@ def getversion_onlinerepo(path: str = 'branches/master') -> str: def get_module_filename(module) -> str | None: - """ - Retrieve filename from an imported pywikibot module. + """Retrieve filename from an imported pywikibot module. It uses the __file__ attribute of the module. If it's file extension ends with py and another character the last character is discarded when the py @@ -327,8 +326,7 @@ def get_module_filename(module) -> str | None: def get_module_mtime(module): - """ - Retrieve the modification time from an imported module. + """Retrieve the modification time from an imported module. :param module: The module instance. :type module: module diff --git a/pywikibot/xmlreader.py b/pywikibot/xmlreader.py index 2788ff741e..a1994cffbf 100644 --- a/pywikibot/xmlreader.py +++ b/pywikibot/xmlreader.py @@ -256,8 +256,7 @@ def _fetch_revs(self, elem: Element, with_id=False) -> Iterator[RawRev]: @staticmethod def parse_restrictions(restrictions: str) -> tuple[str | None, str | None]: - """ - Parse the characters within a restrictions tag. + """Parse the characters within a restrictions tag. Returns strings representing user groups allowed to edit and to move a page, where None means there are no restrictions. diff --git a/scripts/add_text.py b/scripts/add_text.py index 216ca023ea..9e84fd1cfc 100755 --- a/scripts/add_text.py +++ b/scripts/add_text.py @@ -176,8 +176,7 @@ def treat_page(self) -> None: def main(*argv: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. @@ -199,8 +198,7 @@ def main(*argv: str) -> None: def parse(argv: Sequence[str], generator_factory: pagegenerators.GeneratorFactory ) -> dict[str, bool | str]: - """ - Parses our arguments and provide a dictionary with their values. + """Parses our arguments and provide a dictionary with their values. :param argv: input arguments to be parsed :param generator_factory: factory that will determine what pages to diff --git a/scripts/archivebot.py b/scripts/archivebot.py index 3a2b22085c..70cb5465f3 100755 --- a/scripts/archivebot.py +++ b/scripts/archivebot.py @@ -287,8 +287,7 @@ def calc_md5_hexdigest(txt, salt) -> str: class DiscussionThread: - """ - An object representing a discussion thread on a page. + """An object representing a discussion thread on a page. It represents something that is of the form:: @@ -906,8 +905,7 @@ def show_md5_key(calc, salt, site) -> bool: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/basic.py b/scripts/basic.py index 446554440f..5d9483f7c1 100755 --- a/scripts/basic.py +++ b/scripts/basic.py @@ -75,8 +75,7 @@ class BasicBot( AutomaticTWSummaryBot, # Automatically defines summary; needs summary_key ): - """ - An incomplete sample bot. + """An incomplete sample bot. :ivar summary_key: Edit summary message key. The message that should be used is placed on /i18n subdirectory. The file containing these @@ -129,8 +128,7 @@ def treat_page(self) -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/blockpageschecker.py b/scripts/blockpageschecker.py index 02f5562aa1..0a2c6d9ae6 100755 --- a/scripts/blockpageschecker.py +++ b/scripts/blockpageschecker.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -A bot to remove stale protection templates from pages that are not protected. +"""A bot to remove stale protection templates from unprotected pages. Very often sysops block the pages for a set time but then they forget to remove the warning! This script is useful if you want to remove those @@ -438,8 +437,7 @@ def understand_block(): def main(*args: str) -> None: - """ - Process command line arguments and perform task. + """Process command line arguments and perform task. If args is an empty list, sys.argv is used. diff --git a/scripts/category.py b/scripts/category.py index 83ff20b4c1..9798d27877 100755 --- a/scripts/category.py +++ b/scripts/category.py @@ -239,8 +239,7 @@ def determine_type_target( self, page: pywikibot.Page ) -> pywikibot.Page | None: - """ - Return page to be categorized by type. + """Return page to be categorized by type. :param page: Existing, missing or redirect page to be processed. :return: Page to be categorized. @@ -284,8 +283,7 @@ def determine_type_target( return None def determine_template_target(self, page) -> pywikibot.Page: - """ - Return template page to be categorized. + """Return template page to be categorized. Categories for templates can be included in section of template doc page. @@ -716,8 +714,7 @@ def __init__(self, oldcat, self.move_comment = move_comment if move_comment else self.comment def run(self) -> None: - """ - The main bot function that does all the work. + """The main bot function that does all the work. For readability it is split into several helper functions: - _movecat() @@ -824,8 +821,7 @@ def _delete(self, moved_page, moved_talk) -> None: self.counter['delete talk'] += 1 def _change(self, gen) -> None: - """ - Private function to move category contents. + """Private function to move category contents. Do not use this function from outside the class. @@ -1052,8 +1048,8 @@ def run(self) -> None: class CategoryTidyRobot(Bot, CategoryPreprocess): - """ - Robot to move members of a category into sub- or super-categories. + + """Robot to move members of a category into sub- or super-categories. Specify the category title on the command line. The robot will pick up the page, look for all sub- and super-categories, and show @@ -1098,8 +1094,7 @@ def move_to_category(self, member: pywikibot.Page, original_cat: pywikibot.Category, current_cat: pywikibot.Category) -> None: - """ - Ask whether to move it to one of the sub- or super-categories. + """Ask whether to move it to one of the sub- or super-categories. Given a page in the original_cat category, ask the user whether to move it to one of original_cat's sub- or super-categories. @@ -1113,6 +1108,7 @@ def move_to_category(self, :param current_cat: a category which is questioned. """ class CatContextOption(ContextOption): + """An option to show more and more context and categories.""" @property @@ -1137,6 +1133,7 @@ def out(self) -> str: return text class CatIntegerOption(IntegerOption): + """An option allowing a range of integers.""" @staticmethod @@ -1499,8 +1496,7 @@ def treat(self, child) -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/category_graph.py b/scripts/category_graph.py index fd46d3042a..77c6b012a6 100755 --- a/scripts/category_graph.py +++ b/scripts/category_graph.py @@ -67,6 +67,7 @@ class CategoryGraphBot(SingleSiteBot): + """Bot to create graph of the category structure.""" @staticmethod @@ -216,8 +217,7 @@ def run(self) -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/category_redirect.py b/scripts/category_redirect.py index 6f1b1e510b..462f36525f 100755 --- a/scripts/category_redirect.py +++ b/scripts/category_redirect.py @@ -560,8 +560,7 @@ def teardown(self) -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/change_pagelang.py b/scripts/change_pagelang.py index fc7ee8239f..587fce7ec1 100755 --- a/scripts/change_pagelang.py +++ b/scripts/change_pagelang.py @@ -124,8 +124,7 @@ def treat(self, page) -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/checkimages.py b/scripts/checkimages.py index 6dd9bedccd..60abaf6ebe 100755 --- a/scripts/checkimages.py +++ b/scripts/checkimages.py @@ -781,8 +781,7 @@ def load_hidden_templates(self) -> None: def important_image( list_given: list[tuple[float, pywikibot.FilePage]] ) -> pywikibot.FilePage: - """ - Get tuples of image and time, return the most used or oldest image. + """Get tuples of image and time, return the most used or oldest image. .. versionchanged:: 7.2 itertools.zip_longest is used to stop `using_pages` as soon as @@ -1165,8 +1164,7 @@ def mini_template_check(self, template) -> bool: return False def template_in_list(self) -> None: - """ - Check if template is in list. + """Check if template is in list. The problem is the calls to the MediaWiki system because they can be pretty slow. While searching in a list of objects is really fast, so @@ -1184,8 +1182,7 @@ def template_in_list(self) -> None: break def smart_detection(self) -> tuple[str, bool]: - """ - Detect templates. + """Detect templates. The bot instead of checking if there's a simple template in the image's description, checks also if that template is a license or @@ -1325,8 +1322,7 @@ def skip_images(self, skip_number, limit) -> bool: @staticmethod def wait(generator, wait_time) -> Generator[pywikibot.FilePage]: - """ - Skip the images uploaded before x seconds. + """Skip the images uploaded before x seconds. Let the users to fix the image's problem alone in the first x seconds. """ @@ -1538,8 +1534,7 @@ def check_step(self) -> None: def main(*args: str) -> bool: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/claimit.py b/scripts/claimit.py index 3f1ba9e490..68724c9ba0 100755 --- a/scripts/claimit.py +++ b/scripts/claimit.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -A script that adds claims to Wikidata items based on a list of pages. +"""A script that adds claims to Wikidata items based on a list of pages. These command line parameters can be used to specify which pages to work on: @@ -99,8 +98,7 @@ def treat_page_and_item(self, page, item) -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/commons_information.py b/scripts/commons_information.py index 7da3aa6763..794408a2bb 100755 --- a/scripts/commons_information.py +++ b/scripts/commons_information.py @@ -296,8 +296,7 @@ def treat_page(self) -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/commonscat.py b/scripts/commonscat.py index b71a5597a5..c2ce6786f8 100755 --- a/scripts/commonscat.py +++ b/scripts/commonscat.py @@ -280,8 +280,7 @@ def skipPage(page) -> bool: return False def treat_page(self) -> None: - """ - Add CommonsCat template to page. + """Add CommonsCat template to page. Take a page. Go to all the interwiki page looking for a commonscat template. When all the interwiki's links are checked and a proper @@ -531,8 +530,7 @@ def checkCommonscatLink(self, name: str = ''): def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/coordinate_import.py b/scripts/coordinate_import.py index c802a700f6..e161127a0e 100755 --- a/scripts/coordinate_import.py +++ b/scripts/coordinate_import.py @@ -76,8 +76,7 @@ def __init__(self, **kwargs) -> None: self.create_missing_item = self.opt.create def has_coord_qualifier(self, claims) -> str | None: - """ - Check if self.prop is used as property for a qualifier. + """Check if self.prop is used as property for a qualifier. :param claims: the Wikibase claims to check in :type claims: dict @@ -91,8 +90,7 @@ def has_coord_qualifier(self, claims) -> str | None: return None def item_has_coordinates(self, item) -> bool: - """ - Check if the item has coordinates. + """Check if the item has coordinates. :return: whether the item has coordinates """ @@ -124,8 +122,7 @@ def treat_page_and_item(self, page, item) -> None: self.try_import_coordinates_from_page(page, item) def try_import_coordinates_from_page(self, page, item) -> bool: - """ - Try import coordinate from the given page to the given item. + """Try import coordinate from the given page to the given item. :return: whether any coordinates were found and the import was successful @@ -153,8 +150,7 @@ def try_import_coordinates_from_page(self, page, item) -> bool: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/cosmetic_changes.py b/scripts/cosmetic_changes.py index a916aef7f7..346d721309 100755 --- a/scripts/cosmetic_changes.py +++ b/scripts/cosmetic_changes.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -This module can do slight modifications to tidy a wiki page's source code. +"""This module can do slight modifications to tidy a wiki page's source code. The changes are not supposed to change the look of the rendered wiki page. @@ -91,8 +90,7 @@ def treat_page(self) -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/create_isbn_edition.py b/scripts/create_isbn_edition.py index 427b7e1894..75bda4960d 100755 --- a/scripts/create_isbn_edition.py +++ b/scripts/create_isbn_edition.py @@ -763,8 +763,7 @@ def show_final_information(number, doi): def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/data_ingestion.py b/scripts/data_ingestion.py index 0ee55dd3a6..97933af969 100755 --- a/scripts/data_ingestion.py +++ b/scripts/data_ingestion.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -r""" -A generic bot to do data ingestion (batch uploading) of photos or other files. +r"""A generic bot to do data ingestion (batch uploading) of photos or other files. In addition it installs related metadata. The uploading is primarily from a url to a wiki-site. @@ -120,8 +119,7 @@ class Photo(pywikibot.FilePage): def __init__(self, url: str, metadata: dict[str, Any], site: pywikibot.site.APISite | None = None) -> None: - """ - Initializer. + """Initializer. :param url: URL of photo :param metadata: metadata about the photo that can be referred to @@ -144,8 +142,7 @@ def __init__(self, url: str, metadata: dict[str, Any], super().__init__(site, self.get_title('%(_filename)s.%(_ext)s')) def download_photo(self) -> BinaryIO: - """ - Download the photo and store it in an io.BytesIO object. + """Download the photo and store it in an io.BytesIO object. TODO: Add exception handling """ @@ -155,8 +152,7 @@ def download_photo(self) -> BinaryIO: return self.contents def find_duplicate_images(self) -> list[str]: - """ - Find duplicates of the photo. + """Find duplicates of the photo. Calculates the SHA1 hash and asks the MediaWiki API for a list of duplicates. @@ -170,8 +166,7 @@ def find_duplicate_images(self) -> list[str]: sha1=base64.b16encode(hash_object.digest()))] def get_title(self, fmt: str) -> str: - """ - Populate format string with %(name)s entries using metadata. + """Populate format string with %(name)s entries using metadata. .. note:: this does not clean the title, so it may be unusable as a MediaWiki page title, and cause an API exception when used. @@ -215,8 +210,7 @@ class DataIngestionBot(pywikibot.Bot): """Data ingestion bot.""" def __init__(self, titlefmt: str, pagefmt: str, **kwargs) -> None: - """ - Initializer. + """Initializer. :param titlefmt: Title format :param pagefmt: Page format @@ -252,8 +246,7 @@ def treat(self, page) -> None: @classmethod def parse_configuration_page(cls, configuration_page) -> dict[str, str]: - """ - Parse a Page which contains the configuration. + """Parse a Page which contains the configuration. :param configuration_page: page with configuration :type configuration_page: :py:obj:`pywikibot.Page` @@ -282,8 +275,7 @@ def parse_configuration_page(cls, configuration_page) -> dict[str, str]: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/dataextend.py b/scripts/dataextend.py index 1a802cb229..2cdabc38c2 100755 --- a/scripts/dataextend.py +++ b/scripts/dataextend.py @@ -17728,8 +17728,7 @@ def findmixedrefs(self, html: str): def main(*args: tuple[str, ...]) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/delete.py b/scripts/delete.py index 977e3e5d00..05b4e3cf07 100755 --- a/scripts/delete.py +++ b/scripts/delete.py @@ -78,8 +78,7 @@ class PageWithRefs(Page): - """ - A subclass of Page with convenience methods for reference checking. + """A subclass of Page with convenience methods for reference checking. Supports the same interface as Page, with some added methods. """ @@ -99,8 +98,7 @@ def get_ref_table(self, *args, **kwargs) -> RefTable: @property def ref_table(self) -> RefTable: - """ - Build link reference table lazily. + """Build link reference table lazily. This property gives a default table without any parameter set for getReferences(), whereas self.get_ref_table() is able to accept @@ -111,8 +109,7 @@ def ref_table(self) -> RefTable: return self._ref_table def namespaces_with_ref_to_page(self, namespaces=None) -> set[Namespace]: - """ - Check if current page has links from pages in namepaces. + """Check if current page has links from pages in namepaces. If namespaces is None, all namespaces are checked. Returns a set with namespaces where a ref to page is present. @@ -148,8 +145,7 @@ def __init__(self, summary: str, **kwargs) -> None: self.generator = (PageWithRefs(p) for p in self.generator) def display_references(self) -> None: - """ - Display pages that link to the current page, sorted per namespace. + """Display pages that link to the current page, sorted per namespace. Number of pages to display per namespace is provided by: - self.opt.isorphan diff --git a/scripts/delinker.py b/scripts/delinker.py index a2b3c90f70..aa23d20a0e 100755 --- a/scripts/delinker.py +++ b/scripts/delinker.py @@ -222,8 +222,7 @@ def teardown(self): def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/djvutext.py b/scripts/djvutext.py index 6e8d30596e..91898e2c60 100755 --- a/scripts/djvutext.py +++ b/scripts/djvutext.py @@ -56,8 +56,7 @@ class DjVuTextBot(SingleSiteBot): - """ - A bot that uploads text-layer from djvu files to Page:namespace. + """A bot that uploads text-layer from djvu files to Page:namespace. Works only on sites with Proofread Page extension installed. @@ -77,8 +76,7 @@ def __init__( pages: tuple | None = None, **kwargs ) -> None: - """ - Initializer. + """Initializer. :param djvu: djvu from where to fetch the text layer :type djvu: DjVuFile object @@ -137,8 +135,7 @@ def treat(self, page) -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/download_dump.py b/scripts/download_dump.py index 46a5890f17..6436b93235 100755 --- a/scripts/download_dump.py +++ b/scripts/download_dump.py @@ -186,8 +186,7 @@ def convert_from_bytes(total_bytes): def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/fixing_redirects.py b/scripts/fixing_redirects.py index d1aea725f7..a505c4e6fa 100755 --- a/scripts/fixing_redirects.py +++ b/scripts/fixing_redirects.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -Correct all redirect links in featured pages or only one page of each wiki. +"""Correct all redirect links in featured pages or only one page of each wiki. Can be used with: @@ -200,8 +199,7 @@ def treat_page(self) -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/harvest_template.py b/scripts/harvest_template.py index e495cc207e..30ca6c46fa 100755 --- a/scripts/harvest_template.py +++ b/scripts/harvest_template.py @@ -280,8 +280,7 @@ def template_link_target(item: pywikibot.ItemPage, return linked_item def _get_option_with_fallback(self, handler, option) -> Any: - """ - Compare bot's (global) and provided (local) options. + """Compare bot's (global) and provided (local) options. .. seealso:: :class:`OptionHandler` """ @@ -510,8 +509,7 @@ def handle_commonsmedia( def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/illustrate_wikidata.py b/scripts/illustrate_wikidata.py index 67ca848596..135c1e0cfe 100755 --- a/scripts/illustrate_wikidata.py +++ b/scripts/illustrate_wikidata.py @@ -81,8 +81,7 @@ def treat_page_and_item(self, page, item) -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/image.py b/scripts/image.py index 3bd14f6c44..c3d9d9e403 100755 --- a/scripts/image.py +++ b/scripts/image.py @@ -65,8 +65,7 @@ class ImageRobot(ReplaceBot): def __init__(self, generator, old_image: str, new_image: str = '', **kwargs) -> None: - """ - Initializer. + """Initializer. :param generator: the pages to work on :type generator: iterable @@ -121,8 +120,7 @@ def __init__(self, generator, old_image: str, def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/imagetransfer.py b/scripts/imagetransfer.py index 829e766cbb..c6f7f3b70b 100755 --- a/scripts/imagetransfer.py +++ b/scripts/imagetransfer.py @@ -202,8 +202,7 @@ def __init__(self, **kwargs) -> None: self.opt.target = pywikibot.Site(self.opt.target) def transfer_image(self, sourceImagePage) -> None: - """ - Download image and its description, and upload it to another site. + """Download image and its description, and upload it to another site. :return: the filename which was used to upload the image """ @@ -365,8 +364,7 @@ def transfer_allowed(self, image) -> bool: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/interwiki.py b/scripts/interwiki.py index 6b02cd5a17..7cff604166 100755 --- a/scripts/interwiki.py +++ b/scripts/interwiki.py @@ -579,8 +579,7 @@ def readOptions(self, option: str) -> bool: class Subject(interwiki_graph.Subject): - """ - Class to follow the progress of a single 'subject'. + """Class to follow the progress of a single 'subject'. (i.e. a page with all its translations) @@ -675,8 +674,7 @@ def __init__(self, origin=None, hints=None, conf=None) -> None: self.workonme = True def getFoundDisambig(self, site): - """ - Return the first disambiguation found. + """Return the first disambiguation found. If we found a disambiguation on the given site while working on the subject, this method returns it. If several ones have been found, the @@ -690,8 +688,7 @@ def getFoundDisambig(self, site): return None def getFoundNonDisambig(self, site): - """ - Return the first non-disambiguation found. + """Return the first non-disambiguation found. If we found a non-disambiguation on the given site while working on the subject, this method returns it. If several ones have been found, the @@ -708,8 +705,7 @@ def getFoundNonDisambig(self, site): return None def getFoundInCorrectNamespace(self, site): - """ - Return the first page in the extended namespace. + """Return the first page in the extended namespace. If we found a page that has the expected namespace on the given site while working on the subject, this method returns it. If several ones @@ -755,8 +751,7 @@ def translate(self, hints=None, keephintedsites: bool = False) -> None: self.hintedsites.add(page.site) def openSites(self): - """ - Iterator. + """Iterator. Yields (site, count) pairs: * site is a site where we still have work to do on @@ -797,8 +792,7 @@ def makeForcedStop(self, counter) -> None: self.forcedStop = True def addIfNew(self, page, counter, linkingPage) -> bool: - """ - Add the pagelink given to the todo list, if it hasn't been seen yet. + """Add the pagelink given to the todo list, if it hasn't been seen yet. If it is added, update the counter accordingly. @@ -847,8 +841,7 @@ def get_alternative( return pywikibot.Page(site, title) if title else None def namespaceMismatch(self, linkingPage, linkedPage, counter) -> bool: - """ - Check whether or not the given page has a different namespace. + """Check whether or not the given page has a different namespace. Returns True if the namespaces are different and the user has selected not to follow the linked page. @@ -917,8 +910,7 @@ def namespaceMismatch(self, linkingPage, linkedPage, counter) -> bool: return False def disambigMismatch(self, page, counter): - """ - Check whether the given page has a different disambiguation status. + """Check whether the given page has a different disambiguation status. Returns a tuple (skip, alternativePage). @@ -1248,8 +1240,7 @@ def check_page(self, page, counter) -> None: break def batchLoaded(self, counter) -> None: - """ - Notify that the promised batch of pages was loaded. + """Notify that the promised batch of pages was loaded. This is called by a worker to tell us that the promised batch of pages was loaded. @@ -1416,8 +1407,7 @@ def assemble(self): return result def finish(self): - """ - Round up the subject, making any necessary changes. + """Round up the subject, making any necessary changes. This should be called exactly once after the todo list has gone empty. @@ -1745,12 +1735,11 @@ def replaceLinks(self, page, newPages) -> bool: @staticmethod def reportBacklinks(new, updatedSites) -> None: - """ - Report missing back links. This will be called from finish() if needed. - - updatedSites is a list that contains all sites we changed, to avoid - reporting of missing backlinks for pages we already fixed + """Report missing back links. + This will be called from :meth:`finish` if needed. *updatedSites* + is a list that contains all sites that are changed, to avoid + reporting of missing backlinks for already fixed pages. """ # use sets because searching an element is faster than in lists expectedPages = set(new.values()) @@ -1801,8 +1790,7 @@ def reportBacklinks(new, updatedSites) -> None: class InterwikiBot: - """ - A class keeping track of a list of subjects. + """A class keeping track of a list of subjects. It controls which pages are queried from which languages when. """ @@ -1830,8 +1818,7 @@ def add(self, page, hints=None) -> None: self.plus(site, count) def setPageGenerator(self, pageGenerator, number=None, until=None) -> None: - """ - Add a generator of subjects. + """Add a generator of subjects. Once the list of subjects gets too small, this generator is called to produce more Pages. @@ -1910,8 +1897,7 @@ def firstSubject(self) -> Subject | None: return self.subjects[0] if self.subjects else None def maxOpenSite(self): - """ - Return the site that has the most open queries plus the number. + """Return the site that has the most open queries plus the number. If there is nothing left, return None. Only sites that are todo for the first Subject are returned. @@ -1965,8 +1951,7 @@ def selectQuerySite(self): return self.maxOpenSite() def oneQuery(self) -> bool: - """ - Perform one step in the solution process. + """Perform one step in the solution process. Returns True if pages could be preloaded, or false otherwise. @@ -2117,8 +2102,7 @@ def botMayEdit(page) -> bool: def page_empty_check(page) -> bool: - """ - Return True if page should be skipped as it is almost empty. + """Return True if page should be skipped as it is almost empty. Pages in content namespaces are considered empty if they contain less than 50 characters, and other pages are considered empty if they are not diff --git a/scripts/interwikidata.py b/scripts/interwikidata.py index 018c606ba0..328c011bb1 100755 --- a/scripts/interwikidata.py +++ b/scripts/interwikidata.py @@ -146,8 +146,7 @@ def create_item(self) -> pywikibot.ItemPage: return item def handle_complicated(self) -> bool: - """ - Handle pages when they have interwiki conflict. + """Handle pages when they have interwiki conflict. When this method returns True it means conflict has resolved and it's okay to clean old interwiki links. @@ -232,8 +231,7 @@ def try_to_merge(self, item) -> pywikibot.ItemPage | bool | None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/listpages.py b/scripts/listpages.py index 64688ffddb..b052b2138a 100755 --- a/scripts/listpages.py +++ b/scripts/listpages.py @@ -136,8 +136,7 @@ class Formatter: fmt_need_lang = [k for k, v in fmt_options.items() if 'trs_title' in v] def __init__(self, page, outputlang=None, default: str = '******') -> None: - """ - Initializer. + """Initializer. :param page: the page to be formatted. :type page: Page object. @@ -271,8 +270,7 @@ def teardown(self) -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/maintenance/cache.py b/scripts/maintenance/cache.py index e3cbb68f16..929d5d88d5 100755 --- a/scripts/maintenance/cache.py +++ b/scripts/maintenance/cache.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -r""" -This script runs commands on each entry in the API caches. +r"""This script runs commands on each entry in the API caches. Syntax: diff --git a/scripts/maintenance/make_i18n_dict.py b/scripts/maintenance/make_i18n_dict.py index 5c22a3b20c..b8f361c6d0 100755 --- a/scripts/maintenance/make_i18n_dict.py +++ b/scripts/maintenance/make_i18n_dict.py @@ -112,8 +112,7 @@ def read(self, oldmsg, newmsg=None): print('WARNING: "en" key missing for message ' + newmsg) def run(self, quiet=False): - """ - Run the bot, read the messages from source and print the dict. + """Run the bot, read the messages from source and print the dict. :param quiet: print the result if False :type quiet: bool @@ -124,8 +123,7 @@ def run(self, quiet=False): self.print_all() def to_json(self, quiet=True): - """ - Run the bot and create json files. + """Run the bot and create json files. :param quiet: Print the result if False :type quiet: bool diff --git a/scripts/misspelling.py b/scripts/misspelling.py index ef3a50d674..12edc75ed7 100755 --- a/scripts/misspelling.py +++ b/scripts/misspelling.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -This script fixes links that contain common spelling mistakes. +"""This script fixes links that contain common spelling mistakes. This is only possible on wikis that have a template for these misspellings. @@ -105,8 +104,7 @@ def generator(self) -> Generator[pywikibot.Page]: yield from pagegenerators.PreloadingGenerator(chain(*generators)) def findAlternatives(self, page) -> bool: - """ - Append link target to a list of alternative links. + """Append link target to a list of alternative links. Overrides the BaseDisambigBot method. @@ -142,8 +140,7 @@ def findAlternatives(self, page) -> bool: return False def setSummaryMessage(self, page, *args, **kwargs) -> None: - """ - Setup the summary message. + """Setup the summary message. Overrides the BaseDisambigBot method. """ @@ -154,8 +151,7 @@ def setSummaryMessage(self, page, *args, **kwargs) -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/movepages.py b/scripts/movepages.py index 266e369fab..8086cd9d56 100755 --- a/scripts/movepages.py +++ b/scripts/movepages.py @@ -212,8 +212,7 @@ def treat_page(self) -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/newitem.py b/scripts/newitem.py index 6a05b84be0..a3301fbec9 100755 --- a/scripts/newitem.py +++ b/scripts/newitem.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -This script creates new items on Wikidata based on certain criteria. +"""This script creates new items on Wikidata based on certain criteria. * When was the (Wikipedia) page created? * When was the last edit on the page? @@ -182,8 +181,7 @@ def treat_page_and_item(self, page, item) -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/noreferences.py b/scripts/noreferences.py index a13a1dcb0c..4c373e6d7d 100755 --- a/scripts/noreferences.py +++ b/scripts/noreferences.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -This script adds a missing references section to pages. +"""This script adds a missing references section to pages. It goes over multiple pages, searches for pages where is missing although a tag is present, and in that case adds a new @@ -620,8 +619,7 @@ def lacksReferences(self, text) -> bool: return True def addReferences(self, oldText) -> str: - """ - Add a references tag into an existing section where it fits into. + """Add a references tag into an existing section where it fits into. If there is no such section, creates a new section containing the references tag. Also repair malformed references tags. @@ -807,8 +805,7 @@ def treat_page(self) -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/nowcommons.py b/scripts/nowcommons.py index e8bda2ceb7..02f60d7a1c 100755 --- a/scripts/nowcommons.py +++ b/scripts/nowcommons.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -r""" -Script to delete files that are also present on Wikimedia Commons. +r"""Script to delete files that are also present on Wikimedia Commons. Do not run this script on Wikimedia Commons itself. It works based on a given array of templates defined below. @@ -386,8 +385,7 @@ def teardown(self): def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/pagefromfile.py b/scripts/pagefromfile.py index 5cba3d1fac..2e0e09d0ad 100755 --- a/scripts/pagefromfile.py +++ b/scripts/pagefromfile.py @@ -103,8 +103,7 @@ def __init__(self, offset) -> None: class PageFromFileRobot(SingleSiteBot, CurrentPageBot): - """ - Responsible for writing pages to the wiki. + """Responsible for writing pages to the wiki. Titles and contents are given by a PageFromFileReader. @@ -281,8 +280,7 @@ def find_page(self, text) -> tuple[int, str, str]: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/parser_function_count.py b/scripts/parser_function_count.py index 09a6aabfbb..da298bb0eb 100755 --- a/scripts/parser_function_count.py +++ b/scripts/parser_function_count.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -Used to find expensive templates that are subject to be converted to Lua. +"""Used to find expensive templates that are subject to be converted to Lua. It counts parser functions and then orders templates by number of these and uploads the first n titles or alternatively templates having count()>n. diff --git a/scripts/patrol.py b/scripts/patrol.py index ec0259b6e1..dd9d40f1e8 100755 --- a/scripts/patrol.py +++ b/scripts/patrol.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -The bot is meant to mark the edits based on info obtained by whitelist. +"""The bot is meant to mark the edits based on info obtained by whitelist. This bot obtains a list of recent changes and newpages and marks the edits as patrolled based on a whitelist. @@ -83,8 +82,7 @@ class PatrolBot(BaseBot): } def __init__(self, site=None, **kwargs) -> None: - """ - Initializer. + """Initializer. :keyword ask: If True, confirm each patrol action :keyword whitelist: page title for whitelist (optional) diff --git a/scripts/protect.py b/scripts/protect.py index b9440d24eb..a6681af28b 100755 --- a/scripts/protect.py +++ b/scripts/protect.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -This script can be used to protect and unprotect pages en masse. +"""This script can be used to protect and unprotect pages en masse. Of course, you will need an admin account on the relevant wiki. These command line parameters can be used to specify which pages to work on: @@ -87,8 +86,7 @@ class ProtectionRobot(SingleSiteBot, ConfigParserBot, CurrentPageBot): } def __init__(self, protections, **kwargs) -> None: - """ - Create a new ProtectionRobot. + """Create a new ProtectionRobot. :param protections: protections as a dict with "type": "level" :type protections: dict @@ -150,8 +148,7 @@ def check_protection_level(operation, level, levels, default=None) -> str: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/redirect.py b/scripts/redirect.py index 1e0cb11ab2..aeee362e90 100755 --- a/scripts/redirect.py +++ b/scripts/redirect.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -Script to resolve double redirects, and to delete broken redirects. +"""Script to resolve double redirects, and to delete broken redirects. Requires access to MediaWiki's maintenance pages or to a XML dump file. Delete function requires adminship. @@ -139,8 +138,7 @@ def get_redirects_from_dump( self, alsoGetPageTitles: bool = False ) -> tuple[dict[str, str], set[str]]: - """ - Extract redirects from dump. + """Extract redirects from dump. Load a local XML dump file, look at all pages which have the redirect flag set, and find out where they're pointing at. Return @@ -230,8 +228,7 @@ def get_redirects_via_api( self, maxlen: int = 8 ) -> Generator[tuple[str, int | None, str, str | None]]: - r""" - Return a generator that yields tuples of data about redirect Pages. + r"""Return a generator that yields tuples of data about redirect Pages. .. versionchanged:: 7.0 only yield tuple if type of redirect is not 1 (normal redirect) @@ -693,8 +690,7 @@ def treat(self, page) -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/reflinks.py b/scripts/reflinks.py index 95b719e67d..2f7f10e5a5 100755 --- a/scripts/reflinks.py +++ b/scripts/reflinks.py @@ -265,8 +265,7 @@ def transform(self, ispdf: bool = False) -> None: # TODO : remove HTML when both opening and closing tags are included def avoid_uppercase(self) -> None: - """ - Convert to title()-case if title is 70% uppercase characters. + """Convert to title()-case if title is 70% uppercase characters. Skip title that has less than 6 characters. """ @@ -744,8 +743,7 @@ def treat(self, page) -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/replace.py b/scripts/replace.py index 0a1daa644d..c4fc4a6d02 100755 --- a/scripts/replace.py +++ b/scripts/replace.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -r""" -This bot will make direct text replacements. +r"""This bot will make direct text replacements. It will retrieve information on which pages might need changes either from an XML dump or a text file, or only change a single page. @@ -325,8 +324,7 @@ def get_inside_exceptions(self): class ReplacementList(list): - """ - A list of replacements which all share some properties. + """A list of replacements which all share some properties. The shared properties are: * use_regex @@ -412,8 +410,7 @@ def get_inside_exceptions(self): class XmlDumpReplacePageGenerator: - """ - Iterator that will yield Pages that might contain text to replace. + """Iterator that will yield Pages that might contain text to replace. These pages will be retrieved from a local XML dump file. @@ -606,8 +603,7 @@ def isTextExcepted(self, text, exceptions=None) -> bool: return False def apply_replacements(self, original_text, applied, page=None): - """ - Apply all replacements to the given text. + """Apply all replacements to the given text. :rtype: str, set """ diff --git a/scripts/replicate_wiki.py b/scripts/replicate_wiki.py index 4f11c209cd..d77ee59b44 100755 --- a/scripts/replicate_wiki.py +++ b/scripts/replicate_wiki.py @@ -226,8 +226,7 @@ def check_page(self, pagename) -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/revertbot.py b/scripts/revertbot.py index 94b1136162..bf2f442910 100755 --- a/scripts/revertbot.py +++ b/scripts/revertbot.py @@ -155,8 +155,7 @@ def revert(self, item) -> str | bool: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/solve_disambiguation.py b/scripts/solve_disambiguation.py index 2f309d897d..2b5775243e 100755 --- a/scripts/solve_disambiguation.py +++ b/scripts/solve_disambiguation.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -Script to help a human solve disambiguations by presenting a set of options. +"""Script to help a human solve disambiguations by presenting a set of options. Specify the disambiguation page on the command line. @@ -441,8 +440,7 @@ def __iter__(self) -> Generator[pywikibot.Page]: class PrimaryIgnoreManager: - """ - Primary ignore manager. + """Primary ignore manager. If run with the -primary argument, reads from a file which pages should not be worked on; these are the ones where the user pressed n last time. @@ -630,8 +628,7 @@ def __init__(self, *args, **kwargs) -> None: self.dn_template_str = i18n.translate(self.site, dn_template) def checkContents(self, text: str) -> str | None: # noqa: N802 - """ - Check if the text matches any of the ignore regexes. + """Check if the text matches any of the ignore regexes. :param text: wikitext of a page :return: None if none of the regular expressions @@ -1221,8 +1218,7 @@ def treat(self, page) -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/speedy_delete.py b/scripts/speedy_delete.py index 1c753ed8a5..e7f1935ecb 100755 --- a/scripts/speedy_delete.py +++ b/scripts/speedy_delete.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -Help sysops to quickly check and/or delete pages listed for speedy deletion. +"""Help sysops to quickly check and/or delete pages listed for speedy deletion. This bot trawls through candidates for speedy deletion in a fast and semi-automated fashion. It displays the contents of each page @@ -38,6 +37,7 @@ class SpeedyBot(SingleSiteBot, ExistingPageBot): + """Bot to delete pages which are tagged as speedy deletion. This bot will load a list of pages from the category of candidates for diff --git a/scripts/template.py b/scripts/template.py index d76108ace7..770fc42a84 100755 --- a/scripts/template.py +++ b/scripts/template.py @@ -134,8 +134,7 @@ class TemplateRobot(ReplaceBot): } def __init__(self, generator, templates: dict, **kwargs) -> None: - """ - Initializer. + """Initializer. :param generator: the pages to work on :type generator: iterable @@ -209,8 +208,7 @@ def __init__(self, generator, templates: dict, **kwargs) -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/templatecount.py b/scripts/templatecount.py index cd4d744a01..266acc286f 100755 --- a/scripts/templatecount.py +++ b/scripts/templatecount.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -Display the list of pages transcluding a given list of templates. +"""Display the list of pages transcluding a given list of templates. It can also be used to simply count the number of pages (rather than listing each individually). @@ -49,8 +48,7 @@ class TemplateCountRobot: @classmethod def count_templates(cls, templates, namespaces) -> None: - """ - Display number of transclusions for a list of templates. + """Display number of transclusions for a list of templates. Displays the number of transcluded page in the given 'namespaces' for each template given by 'templates' list. @@ -75,8 +73,7 @@ def count_templates(cls, templates, namespaces) -> None: @classmethod def list_templates(cls, templates, namespaces) -> None: - """ - Display transcluded pages for a list of templates. + """Display transcluded pages for a list of templates. Displays each transcluded page in the given 'namespaces' for each template given by 'templates' list. @@ -103,8 +100,7 @@ def list_templates(cls, templates, namespaces) -> None: @classmethod def template_dict(cls, templates, namespaces) -> dict[ str, list[pywikibot.Page]]: - """ - Create a dict of templates and its transcluded pages. + """Create a dict of templates and its transcluded pages. The names of the templates are the keys, and lists of pages transcluding templates in the given namespaces are the values. @@ -119,8 +115,7 @@ def template_dict(cls, templates, namespaces) -> dict[ @staticmethod def template_dict_generator(templates, namespaces) -> Generator[ tuple[str, list[pywikibot.Page]], None, None]: - """ - Yield transclusions of each template in 'templates'. + """Yield transclusions of each template in 'templates'. For each template in 'templates', yield a tuple (template, transclusions), where 'transclusions' is a list of all pages @@ -140,8 +135,7 @@ def template_dict_generator(templates, namespaces) -> Generator[ def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/touch.py b/scripts/touch.py index 9004e634ee..2793c36c2e 100755 --- a/scripts/touch.py +++ b/scripts/touch.py @@ -137,8 +137,7 @@ def purgepages(self, flush=False): def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/transferbot.py b/scripts/transferbot.py index 6c8b3b67c9..252bde8041 100755 --- a/scripts/transferbot.py +++ b/scripts/transferbot.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -r""" -This script transfers pages from a source wiki to a target wiki. +r"""This script transfers pages from a source wiki to a target wiki. It also copies edit history to a subpage. @@ -61,8 +60,7 @@ def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/transwikiimport.py b/scripts/transwikiimport.py index dc493cc1ab..7a8a18e343 100755 --- a/scripts/transwikiimport.py +++ b/scripts/transwikiimport.py @@ -161,8 +161,7 @@ def api_query(site, params: dict[str, str]): def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/unusedfiles.py b/scripts/unusedfiles.py index 82feda7f30..0618b5193c 100755 --- a/scripts/unusedfiles.py +++ b/scripts/unusedfiles.py @@ -162,8 +162,7 @@ def append_text(self, page, apptext): def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/upload.py b/scripts/upload.py index 07d9aec5fe..9432928060 100755 --- a/scripts/upload.py +++ b/scripts/upload.py @@ -112,8 +112,7 @@ def get_chunk_size(match) -> int: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/watchlist.py b/scripts/watchlist.py index f6c1df66d3..d5a2e1c901 100755 --- a/scripts/watchlist.py +++ b/scripts/watchlist.py @@ -122,8 +122,7 @@ def refresh_new() -> None: def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/scripts/weblinkchecker.py b/scripts/weblinkchecker.py index bd58cafb7b..c5c4842105 100755 --- a/scripts/weblinkchecker.py +++ b/scripts/weblinkchecker.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -This bot is used for checking external links found at the wiki. +"""This bot is used for checking external links found at the wiki. It checks several pages at once, with a limit set by the config variable max_external_links, which defaults to 50. @@ -190,8 +189,7 @@ def weblinks_from_text( without_bracketed: bool = False, only_bracketed: bool = False ): - """ - Yield web links from text. + """Yield web links from text. Only used as text predicate for XmlDumpPageGenerator to speed up generator. @@ -323,8 +321,7 @@ def run(self): class History: - """ - Store previously found dead links. + """Store previously found dead links. The URLs are dictionary keys, and values are lists of tuples where each tuple represents one time the URL was @@ -419,8 +416,7 @@ def set_dead_link(self, url, error, page, weblink_dead_days) -> None: self.history_dict[url] = [(page.title(), now, error)] def set_link_alive(self, url) -> bool: - """ - Record that the link is now alive. + """Record that the link is now alive. If link was previously found dead, remove it from the .dat file. @@ -441,8 +437,7 @@ def save(self) -> None: class DeadLinkReportThread(threading.Thread): - """ - A Thread that is responsible for posting error reports on talk pages. + """A Thread that is responsible for posting error reports on talk pages. There is only one DeadLinkReportThread, and it is using a semaphore to make sure that two LinkCheckerThreads cannot access the queue at the same time. @@ -536,8 +531,7 @@ def run(self) -> None: class WeblinkCheckerRobot(SingleSiteBot, ExistingPageBot): - """ - Bot which will search for dead weblinks. + """Bot which will search for dead weblinks. It uses several LinkCheckThreads at once to process pages from generator. """ @@ -640,8 +634,7 @@ def RepeatPageGenerator(): # noqa: N802 def main(*args: str) -> None: - """ - Process command line arguments and invoke bot. + """Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. diff --git a/tests/api_tests.py b/tests/api_tests.py index 14e56c9962..8c21f4d594 100755 --- a/tests/api_tests.py +++ b/tests/api_tests.py @@ -779,8 +779,7 @@ def test_internals(self): class TestLazyLoginBase(TestCase): - """ - Test that it tries to login when read API access is denied. + """Test that it tries to login when read API access is denied. Because there is no such family configured it creates an AutoFamily and BaseSite on it's own. It's testing against steward.wikimedia.org. diff --git a/tests/archivebot_tests.py b/tests/archivebot_tests.py index 479e57b16b..49f2c780fd 100755 --- a/tests/archivebot_tests.py +++ b/tests/archivebot_tests.py @@ -163,8 +163,7 @@ def test_archivebot(self, code=None): class TestArchiveBotAfterDateUpdate(TestCase): - """ - Test archivebot script on failures on Wikipedia sites. + """Test archivebot script on failures on Wikipedia sites. If failure is due to updated date format on wiki, test pages with new format only. diff --git a/tests/aspects.py b/tests/aspects.py index 7a6da4e6d7..6fefe0cb39 100644 --- a/tests/aspects.py +++ b/tests/aspects.py @@ -1,5 +1,4 @@ -""" -Test aspects to allow fine grained control over what tests are executed. +"""Test aspects to allow fine grained control over what tests are executed. Several parts of the test infrastructure are implemented as mixins, such as API result caching and excessive test durations. @@ -241,8 +240,7 @@ def assertPageTitlesCountEqual( def assertAPIError(self, code, info=None, callable_obj=None, *args, regex=None, **kwargs): - """ - Assert that a specific APIError wrapped around :py:obj:`assertRaises`. + """Assert that a specific APIError wrapped around :exc:`assertRaises`. If no callable object is defined and it returns a context manager, that context manager will return the underlying context manager used by @@ -438,8 +436,7 @@ class CheckHostnameMixin(TestCaseBase): @classmethod def setUpClass(cls): - """ - Set up the test class. + """Set up the test class. Prevent tests running if the host is down. """ @@ -579,8 +576,7 @@ def setUpClass(cls): f'{cls.__name__}: Not able to login to {site}') def setUp(self): - """ - Set up the test case. + """Set up the test case. Login to the site if it is not logged in. """ @@ -892,8 +888,7 @@ class TestCase(TestCaseBase, metaclass=MetaTestCaseClass): @classmethod def setUpClass(cls): - """ - Set up the test class. + """Set up the test class. Prefetch the Site object for each of the sites the test class has declared are needed. @@ -1058,8 +1053,7 @@ def add_patch(decorated): return add_patch def patch(self, obj, attr_name, replacement): - """ - Patch the obj's attribute with the replacement. + """Patch the obj's attribute with the replacement. It will be reset after each ``tearDown``. """ @@ -1105,8 +1099,7 @@ class DefaultSiteTestCase(TestCase): @classmethod def override_default_site(cls, site): - """ - Override the default site. + """Override the default site. :param site: site tests should use :type site: BaseSite @@ -1200,8 +1193,7 @@ class WikibaseTestCase(TestCase): @classmethod def setUpClass(cls): - """ - Set up the test class. + """Set up the test class. Checks that all sites are configured with a Wikibase repository, with Site.has_data_repository() returning True, and all sites @@ -1249,8 +1241,7 @@ class WikibaseClientTestCase(WikibaseTestCase): @classmethod def setUpClass(cls): - """ - Set up the test class. + """Set up the test class. Checks that all sites are configured as a Wikibase client, with Site.has_data_repository returning True. @@ -1285,8 +1276,7 @@ class DefaultWikidataClientTestCase(DefaultWikibaseClientTestCase): @classmethod def setUpClass(cls): - """ - Set up the test class. + """Set up the test class. Require the data repository is wikidata.org. """ @@ -1446,8 +1436,7 @@ def _build_message(cls, return msg def assertDeprecationParts(self, deprecated=None, instead=None): - """ - Assert that a deprecation warning happened. + """Assert that a deprecation warning happened. To simplify deprecation tests it just requires the to separated parts and forwards the result to :py:obj:`assertDeprecation`. @@ -1465,8 +1454,7 @@ def assertDeprecationParts(self, deprecated=None, instead=None): self.assertDeprecation(self._build_message(deprecated, instead)) def assertDeprecation(self, msg=None): - """ - Assert that a deprecation warning happened. + """Assert that a deprecation warning happened. :param msg: Either the specific message or None to allow any generic message. When set to ``INSTEAD`` it only counts those supplying an @@ -1497,8 +1485,7 @@ def assertDeprecation(self, msg=None): def assertOneDeprecationParts(self, deprecated=None, instead=None, count=1): - """ - Assert that exactly one deprecation message happened and reset. + """Assert that exactly one deprecation message happened and reset. It uses the same arguments as :py:obj:`assertDeprecationParts`. """ @@ -1564,8 +1551,7 @@ def tearDown(self): class HttpbinTestCase(TestCase): - """ - Custom test case class, which allows dry httpbin tests with pytest-httpbin. + """Custom test case class, which allows dry httpbin tests. Test cases, which use httpbin, need to inherit this class. """ diff --git a/tests/basepage.py b/tests/basepage.py index 50880971c9..21ced3026e 100644 --- a/tests/basepage.py +++ b/tests/basepage.py @@ -25,8 +25,7 @@ def setUp(self): class BasePageLoadRevisionsCachingTestBase(BasePageTestBase): - """ - Test site.loadrevisions() caching. + """Test site.loadrevisions() caching. This test class monkey patches site.loadrevisions, which will cause the pickling tests in site_tests and page_tests to fail, if it diff --git a/tests/bot_tests.py b/tests/bot_tests.py index 0f5cb99a53..7641abb72c 100755 --- a/tests/bot_tests.py +++ b/tests/bot_tests.py @@ -40,8 +40,7 @@ class TestBotTreatExit: """Mixin to provide handling for treat and exit.""" def _treat(self, pages, post_treat=None): - """ - Get tests which are executed on each treat. + """Get tests which are executed on each treat. It uses pages as an iterator and compares the page given to the page returned by pages iterator. It checks that the bot's _site and site @@ -71,8 +70,7 @@ def treat(page): return treat def _treat_page(self, pages=True, post_treat=None): - """ - Adjust to CurrentPageBot signature. + """Adjust to CurrentPageBot signature. It uses almost the same logic as _treat but returns a wrapper function which itself calls the function returned by _treat. diff --git a/tests/category_bot_tests.py b/tests/category_bot_tests.py index 0ce04ce8f9..efc28ce373 100755 --- a/tests/category_bot_tests.py +++ b/tests/category_bot_tests.py @@ -68,6 +68,7 @@ def _runtest_strip_cfd_templates(self, template_start, template_end): class TestPreprocessingCategory(TestCase): + """Test determining template or type categorization target.""" family = 'wikipedia' diff --git a/tests/cosmetic_changes_tests.py b/tests/cosmetic_changes_tests.py index 251d38b8f3..8714183abf 100755 --- a/tests/cosmetic_changes_tests.py +++ b/tests/cosmetic_changes_tests.py @@ -434,8 +434,7 @@ def test_translate_magic_words(self): @unittest.expectedFailure def test_translateMagicWords_fail(self): - """ - Test translateMagicWords method. + """Test translateMagicWords method. The current implementation doesn't check whether the magic word is inside a template. diff --git a/tests/edit_tests.py b/tests/edit_tests.py index 7fb89e9e6a..9f3e3515c4 100755 --- a/tests/edit_tests.py +++ b/tests/edit_tests.py @@ -72,6 +72,7 @@ def test_appendtext(self): class TestSiteMergeHistory(TestCase): + """Test history merge action.""" family = 'wikipedia' diff --git a/tests/fixing_redirects_tests.py b/tests/fixing_redirects_tests.py index a60b121fc9..d1597a23a0 100755 --- a/tests/fixing_redirects_tests.py +++ b/tests/fixing_redirects_tests.py @@ -13,6 +13,7 @@ class TestFixingRedirects(TestCase): + """Test fixing redirects.""" family = 'wikipedia' diff --git a/tests/flow_tests.py b/tests/flow_tests.py index aa718b07cd..62e19f5734 100755 --- a/tests/flow_tests.py +++ b/tests/flow_tests.py @@ -239,6 +239,7 @@ def test_invalid_data(self): class TestFlowTopic(TestCase): + """Test Topic functions.""" family = 'wikipedia' diff --git a/tests/http_tests.py b/tests/http_tests.py index 5139c11e90..f94250fb85 100755 --- a/tests/http_tests.py +++ b/tests/http_tests.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 """Tests for http module.""" # -# (C) Pywikibot team, 2014-2023 +# (C) Pywikibot team, 2014-2024 # # Distributed under the terms of the MIT license. # @@ -520,8 +520,7 @@ def test_no_params(self): self.assertEqual(r.json()['args'], {}) def test_unencoded_params(self): - """ - Test fetch method with unencoded parameters to be encoded internally. + """Test fetch method with unencoded parameters to be encoded inside. HTTPBin returns the args in their urldecoded form, so what we put in should be the same as what we get out. @@ -537,8 +536,7 @@ def test_unencoded_params(self): self.assertEqual(r.json()['args'], {'fish&chips': 'delicious'}) def test_encoded_params(self): - """ - Test fetch method with encoded parameters to be re-encoded internally. + """Test fetch method with encoded parameters to be re-encoded inside. HTTPBin returns the args in their urldecoded form, so what we put in should be the same as what we get out. @@ -555,6 +553,7 @@ def test_encoded_params(self): class DataBodyParameterTestCase(HttpbinTestCase): + """Test data and body params of fetch/request methods are equivalent.""" maxDiff = None diff --git a/tests/link_tests.py b/tests/link_tests.py index 65a26ee17d..8134cd65f8 100755 --- a/tests/link_tests.py +++ b/tests/link_tests.py @@ -59,8 +59,7 @@ def test(self): class TestLink(DefaultDrySiteTestCase): - """ - Test parsing links with DrySite. + """Test parsing links with DrySite. The DrySite is using the builtin namespaces which behaviour is controlled in this repository so namespace aware tests do work, even when the actual diff --git a/tests/logentries_tests.py b/tests/logentries_tests.py index 2e9247423d..2aa90ce959 100755 --- a/tests/logentries_tests.py +++ b/tests/logentries_tests.py @@ -26,8 +26,7 @@ class TestLogentriesBase(TestCase): - """ - Base class for log entry tests. + """Base class for log entry tests. It uses the German Wikipedia for a current representation of the log entries and the test Wikipedia for the future representation. diff --git a/tests/login_tests.py b/tests/login_tests.py index f27d0e3a52..0e95d182e9 100755 --- a/tests/login_tests.py +++ b/tests/login_tests.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -Tests for LoginManager classes. +"""Tests for LoginManager classes. e.g. used to test password-file based login. """ @@ -21,12 +20,14 @@ class FakeFamily: + """Mock.""" name = '~FakeFamily' class FakeSite: + """Mock.""" code = '~FakeCode' @@ -37,6 +38,7 @@ class FakeSite: class FakeConfig: + """Mock.""" usernames = defaultdict(dict) @@ -46,6 +48,7 @@ class FakeConfig: @mock.patch('pywikibot.Site', FakeSite) @mock.patch('pywikibot.login.config', FakeConfig) class TestOfflineLoginManager(DefaultDrySiteTestCase): + """Test offline operation of login.LoginManager.""" dry = True @@ -82,6 +85,7 @@ def test_star_family(self): @mock.patch('pywikibot.Site', FakeSite) class TestPasswordFile(DefaultDrySiteTestCase): + """Test parsing password files.""" def patch(self, name): diff --git a/tests/noreferences_tests.py b/tests/noreferences_tests.py index d64d2d6125..80ed3b6cb8 100755 --- a/tests/noreferences_tests.py +++ b/tests/noreferences_tests.py @@ -13,6 +13,7 @@ class TestAddingReferences(TestCase): + """Test adding references to section.""" family = 'wikipedia' diff --git a/tests/page_tests.py b/tests/page_tests.py index 716bb8c13e..6b9c294371 100755 --- a/tests/page_tests.py +++ b/tests/page_tests.py @@ -597,8 +597,7 @@ def test_depth(self): self.assertEqual(page_d3.depth, 3) def test_page_image(self): - """ - Test ``Page.page_image`` function. + """Test ``Page.page_image`` function. Since we are not sure what the wiki will return, we mainly test types """ @@ -943,8 +942,7 @@ def test_revisions_time_interval_true(self): class TestPageRedirects(TestCase): - """ - Test redirects. + """Test redirects. This is using the pages 'User:Legoktm/R1', 'User:Legoktm/R2' and 'User:Legoktm/R3' on the English Wikipedia. 'R1' is redirecting to 'R2', @@ -1237,6 +1235,7 @@ def test_invalid_entities(self): class TestPermalink(TestCase): + """Test that permalink links are correct.""" family = 'wikipedia' @@ -1261,6 +1260,7 @@ def test_permalink(self): class TestShortLink(TestCase): + """Test that short link management is correct.""" login = True diff --git a/tests/pagegenerators_tests.py b/tests/pagegenerators_tests.py index 4e4205383c..e3ba1ed4db 100755 --- a/tests/pagegenerators_tests.py +++ b/tests/pagegenerators_tests.py @@ -815,8 +815,7 @@ class TestItemClaimFilterPageGenerator(WikidataTestCase): """Test item claim filter page generator generator.""" def _simple_claim_test(self, prop, claim, qualifiers, valid, negate=False): - """ - Test given claim on sample (India) page. + """Test given claim on sample (India) page. :param prop: the property to check :param claim: the claim the property should contain @@ -860,8 +859,7 @@ def test_invalid_qualifiers(self): False) def test_nonexisting_qualifiers(self): - """ - Test ItemClaimFilterPageGenerator on sample page. + """Test ItemClaimFilterPageGenerator on sample page. The item does not have the searched qualifiers. """ diff --git a/tests/proofreadpage_tests.py b/tests/proofreadpage_tests.py index c6811d81be..063b289ff9 100755 --- a/tests/proofreadpage_tests.py +++ b/tests/proofreadpage_tests.py @@ -29,6 +29,7 @@ class TestPagesTagParser(TestCase): + """Test TagAttr class.""" net = False diff --git a/tests/pwb_tests.py b/tests/pwb_tests.py index 130ee4a927..5bfbd952e9 100755 --- a/tests/pwb_tests.py +++ b/tests/pwb_tests.py @@ -48,8 +48,7 @@ def _do_check(self, name): return (direct, vpwb) def test_env(self): - """ - Test external environment of pywikibot. + """Test external environment of pywikibot. Make sure the environment is not contaminated, and is the same as the environment we get when directly running a script. @@ -57,8 +56,7 @@ def test_env(self): self._do_check('print_env') def test_locals(self): - """ - Test internal environment of pywikibot. + """Test internal environment of pywikibot. Make sure the environment is not contaminated, and is the same as the environment we get when directly running a script. diff --git a/tests/reflinks_tests.py b/tests/reflinks_tests.py index ed87e5ed05..4c5553369b 100755 --- a/tests/reflinks_tests.py +++ b/tests/reflinks_tests.py @@ -83,8 +83,7 @@ def test_start_variants(self): class TestReferencesBotConstructor(ScriptMainTestCase): - """ - Test reflinks with run() removed. + """Test reflinks with run() removed. These tests can't verify the order of the pages in the XML as the constructor is given a preloading generator. diff --git a/tests/site_detect_tests.py b/tests/site_detect_tests.py index ea9852605b..715fb24515 100755 --- a/tests/site_detect_tests.py +++ b/tests/site_detect_tests.py @@ -29,8 +29,7 @@ class SiteDetectionTestCase(TestCase): net = True def assertSite(self, url: str): - """ - Assert a MediaWiki site can be loaded from the url. + """Assert a MediaWiki site can be loaded from the url. :param url: Url of tested site :raises AssertionError: Site under url is not MediaWiki powered diff --git a/tests/site_generators_tests.py b/tests/site_generators_tests.py index 96550295b2..3f17ffc999 100755 --- a/tests/site_generators_tests.py +++ b/tests/site_generators_tests.py @@ -38,6 +38,7 @@ class TestSiteGenerators(DefaultSiteTestCase): + """Test cases for Site methods.""" cached = True @@ -709,6 +710,7 @@ def test_unconnected(self): class TestSiteGeneratorsUsers(DefaultSiteTestCase): + """Test cases for Site methods with users.""" cached = True diff --git a/tests/site_login_logout_tests.py b/tests/site_login_logout_tests.py index c4805c8fd1..beb1770402 100755 --- a/tests/site_login_logout_tests.py +++ b/tests/site_login_logout_tests.py @@ -58,6 +58,7 @@ def test_login_logout(self): class TestClearCookies(TestCase): + """Test cookies are cleared after logout.""" login = True diff --git a/tests/site_tests.py b/tests/site_tests.py index 83f0c7de2e..9e56f9caa1 100755 --- a/tests/site_tests.py +++ b/tests/site_tests.py @@ -294,6 +294,7 @@ def test_ratelimit(self): class TestLockingPage(DefaultSiteTestCase): + """Test cases for lock/unlock a page within threads.""" cached = True diff --git a/tests/sparql_tests.py b/tests/sparql_tests.py index 01cc2434f3..5ea73464b6 100755 --- a/tests/sparql_tests.py +++ b/tests/sparql_tests.py @@ -88,6 +88,7 @@ class Container: + """Simple test container for return values.""" def __init__(self, value): @@ -100,6 +101,7 @@ def json(self): class TestSparql(WikidataTestCase): + """Test SPARQL queries.""" @patch.object(sparql.http, 'fetch') @@ -184,6 +186,7 @@ def testQueryAsk(self, mock_method): class TestCommonsQueryService(TestCase): + """Test Commons Query Service auth.""" family = 'commons' @@ -211,9 +214,11 @@ def testLoginAndOauthPermisson(self): class Shared: + """Shared test placeholder.""" class SparqlNodeTests(TestCase): + """Tests encoding issues.""" net = False @@ -233,6 +238,7 @@ def test__str__returnsStringType(self): class LiteralTests(Shared.SparqlNodeTests): + """Tests for sparql.Literal.""" net = False @@ -241,6 +247,7 @@ class LiteralTests(Shared.SparqlNodeTests): class BnodeTests(Shared.SparqlNodeTests): + """Tests for sparql.Bnode.""" net = False @@ -248,6 +255,7 @@ class BnodeTests(Shared.SparqlNodeTests): class URITests(Shared.SparqlNodeTests): + """Tests for sparql.URI.""" net = False diff --git a/tests/superset_tests.py b/tests/superset_tests.py index 744b430280..6712c856ca 100755 --- a/tests/superset_tests.py +++ b/tests/superset_tests.py @@ -21,6 +21,7 @@ class TestSupersetWithoutAuth(TestCase): + """Test Superset without auth.""" family = 'meta' @@ -46,6 +47,7 @@ def test_init(self): class TestSupersetWithAuth(TestCase): + """Test Superset with auth.""" login = True diff --git a/tests/textlib_tests.py b/tests/textlib_tests.py index 5acd7568a8..bdd979a1be 100755 --- a/tests/textlib_tests.py +++ b/tests/textlib_tests.py @@ -189,8 +189,7 @@ def test_add_text(self): class TestCategoryRearrangement(DefaultDrySiteTestCase): - """ - Ensure that sorting keys are not being lost. + """Ensure that sorting keys are not being lost. Tests .getCategoryLinks() and .replaceCategoryLinks(), with both a newline and an empty string as separators. @@ -966,6 +965,7 @@ def test_replace_interwiki_links(self): class TestReplaceLinksNonDry(TestCase): + """Test the replace_links function in textlib non-dry.""" family = 'wikipedia' diff --git a/tests/tools_deprecate_tests.py b/tests/tools_deprecate_tests.py index 335f3b87ea..3d25942ac0 100755 --- a/tests/tools_deprecate_tests.py +++ b/tests/tools_deprecate_tests.py @@ -122,8 +122,7 @@ def deprecated_func_docstring_arg(foo=None): @deprecated def deprecated_func_docstring_arg2(foo=None): - """ - DEPRECATED. + """DEPRECATED. :param foo: Foo. DEPRECATED. """ @@ -327,7 +326,7 @@ def test_deprecated_function_multiline_docstring(self): Python 3.13 strips the doc string, see https://docs.python.org/3.13/whatsnew/3.13.html#other-language-changes """ - doc = '\n DEPRECATED.\n\n :param foo: Foo. DEPRECATED.\n ' + doc = 'DEPRECATED.\n\n :param foo: Foo. DEPRECATED.\n ' if PYTHON_VERSION < (3, 13): self.assertEqual(deprecated_func_docstring_arg2.__doc__, doc) else: diff --git a/tests/ui_tests.py b/tests/ui_tests.py index d688b0c1b3..dab9f0236c 100755 --- a/tests/ui_tests.py +++ b/tests/ui_tests.py @@ -495,8 +495,7 @@ class FakeUnixTest(FakeUIColorizedTestBase, FakeUITest): class FakeWin32Test(FakeUIColorizedTestBase, FakeUITest): - """ - Test case to allow doing colorized Win32 tests in any environment. + """Test case to allow doing colorized Win32 tests in any environment. This only patches the ctypes import in the terminal_interface_win32 module. As the Win32CtypesUI is using the std-streams from another diff --git a/tests/upload_tests.py b/tests/upload_tests.py index 66cd0e8f4e..8e90d05649 100755 --- a/tests/upload_tests.py +++ b/tests/upload_tests.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -Site upload test. +"""Site upload test. These tests write to the wiki. """ diff --git a/tests/uploadbot_tests.py b/tests/uploadbot_tests.py index 7bffec2a18..9c286ba964 100755 --- a/tests/uploadbot_tests.py +++ b/tests/uploadbot_tests.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -UploadRobot test. +"""UploadRobot test. These tests write to the wiki. """ diff --git a/tests/utils.py b/tests/utils.py index 25b4547914..3e5e73fbed 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -31,8 +31,7 @@ def expected_failure_if(expect): - """ - Unit test decorator to expect failure under conditions. + """Unit test decorator to expect failure under conditions. :param expect: Flag to check if failure is expected :type expect: bool @@ -75,8 +74,7 @@ def entered_loop(iterable): class WarningSourceSkipContextManager(warnings.catch_warnings): - """ - Warning context manager that adjusts source of warning. + """Warning context manager that adjusts source of warning. The source of the warning will be moved further down the stack to skip a list of objects that have been monkey @@ -84,8 +82,7 @@ class WarningSourceSkipContextManager(warnings.catch_warnings): """ def __init__(self, skip_list): - """ - Initializer. + """Initializer. :param skip_list: List of objects to be skipped. The source of any warning that matches the skip_list won't be adjusted. @@ -96,8 +93,7 @@ def __init__(self, skip_list): @property def skip_list(self): - """ - Return list of filename and line ranges to skip. + """Return list of filename and line ranges to skip. :rtype: list of (obj, str, int, int) """ @@ -105,8 +101,7 @@ def skip_list(self): @skip_list.setter def skip_list(self, value): - """ - Set list of objects to be skipped. + """Set list of objects to be skipped. :param value: List of objects to be skipped :type value: list of object or (obj, str, int, int) @@ -173,8 +168,7 @@ def detailed_show_warning(*args, **kwargs): class AssertAPIErrorContextManager: - """ - Context manager to assert certain APIError exceptions. + """Context manager to assert certain APIError exceptions. This is build similar to the :py:obj:`unittest.TestCase.assertError` implementation which creates a context manager. It then calls diff --git a/tests/wikibase_edit_tests.py b/tests/wikibase_edit_tests.py index d9069d6b15..338db402d5 100755 --- a/tests/wikibase_edit_tests.py +++ b/tests/wikibase_edit_tests.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -Tests for editing Wikibase items. +"""Tests for editing Wikibase items. Tests which should fail should instead be in the TestWikibaseSaveTest class in edit_failiure_tests.py @@ -242,8 +241,7 @@ class TestWikibaseMakeClaim(WikibaseTestCase): @staticmethod def _clean_item(repo, prop: str): - """ - Return an item without any existing claims of the given property. + """Return an item without any existing claims of the given property. :param repo: repository to fetch item from :type repo: pywikibot.site.DataSite @@ -617,8 +615,7 @@ class TestWikibaseAddClaimToExisting(WikibaseTestCase): @staticmethod def _clean_item_temp(repo, prop: str): - """ - Return an item without any existing claims of the given property. + """Return an item without any existing claims of the given property. :param repo: repository to fetch item from :type repo: pywikibot.site.DataSite diff --git a/tests/wikibase_tests.py b/tests/wikibase_tests.py index 0a1f9c29b1..413ab3b363 100755 --- a/tests/wikibase_tests.py +++ b/tests/wikibase_tests.py @@ -175,8 +175,7 @@ def test_Coordinate_entity_uri_globe(self): class TestWikibaseCoordinateNonDry(WbRepresentationTestCase): - """ - Test Wikibase Coordinate data type (non-dry). + """Test Wikibase Coordinate data type (non-dry). These can be moved to TestWikibaseCoordinate once DrySite has been bumped to the appropriate version. @@ -779,8 +778,7 @@ def test_WbQuantity_unit_fromWikibase(self): class TestWbQuantityNonDry(WbRepresentationTestCase): - """ - Test Wikibase WbQuantity data type (non-dry). + """Test Wikibase WbQuantity data type (non-dry). These can be moved to TestWbQuantity once DrySite has been bumped to the appropriate version. @@ -944,6 +942,7 @@ def test_WbMonolingualText_errors(self): class TestWikibaseParser(WikidataTestCase): + """Test passing various datatypes to wikibase parser.""" def test_wbparse_strings(self): @@ -992,8 +991,7 @@ def test_wbparse_raises_valueerror(self): class TestWbGeoShapeNonDry(WbRepresentationTestCase): - """ - Test Wikibase WbGeoShape data type (non-dry). + """Test Wikibase WbGeoShape data type (non-dry). These require non dry tests due to the page.exists() call. """ @@ -1068,8 +1066,7 @@ def test_WbGeoShape_error_on_wrong_page_type(self): class TestWbTabularDataNonDry(WbRepresentationTestCase): - """ - Test Wikibase WbTabularData data type (non-dry). + """Test Wikibase WbTabularData data type (non-dry). These require non dry tests due to the page.exists() call. """ @@ -1220,8 +1217,7 @@ class MyItemPage(ItemPage): class TestItemLoad(WikidataTestCase): - """ - Test item creation. + """Test item creation. Tests for item creation include: 1. by Q id @@ -1307,8 +1303,7 @@ def test_load_item_set_id(self): self.assertEqual(item.title(), 'Q60') def test_reuse_item_set_id(self): - """ - Test modifying item.id attribute. + """Test modifying item.id attribute. Some scripts are using item.id = 'Q60' semantics, which does work but modifying item.id does not currently work, and this test @@ -1334,8 +1329,7 @@ def test_reuse_item_set_id(self): # self.assertTrue(item.labels['en'].lower().endswith('main page')) def test_empty_item(self): - """ - Test empty wikibase item. + """Test empty wikibase item. should not raise an error as the constructor only requires the site parameter, with the title parameter defaulted to None. @@ -1365,8 +1359,7 @@ def test_item_invalid_titles(self): ItemPage(wikidata, '') def test_item_untrimmed_title(self): - """ - Test intrimmed titles of wikibase items. + """Test intrimmed titles of wikibase items. Spaces in the title should not cause an error. """ @@ -1540,8 +1533,7 @@ def _test_fromPage_noitem(self, link): ItemPage.fromPage(page) def test_fromPage_redirect(self): - """ - Test item from redirect page. + """Test item from redirect page. A redirect should not have a wikidata item. """ @@ -1549,8 +1541,7 @@ def test_fromPage_redirect(self): self._test_fromPage_noitem(link) def test_fromPage_missing(self): - """ - Test item from deleted page. + """Test item from deleted page. A deleted page should not have a wikidata item. """ @@ -1558,8 +1549,7 @@ def test_fromPage_missing(self): self._test_fromPage_noitem(link) def test_fromPage_noitem(self): - """ - Test item from new page. + """Test item from new page. A new created page should not have a wikidata item yet. """ @@ -1754,8 +1744,7 @@ class TestClaim(WikidataTestCase): """Test Claim object functionality.""" def test_claim_eq_simple(self): - """ - Test comparing two claims. + """Test comparing two claims. If they have the same property and value, they are equal. """ @@ -1768,8 +1757,7 @@ def test_claim_eq_simple(self): self.assertEqual(claim2, claim1) def test_claim_eq_simple_different_value(self): - """ - Test comparing two claims. + """Test comparing two claims. If they have the same property and different values, they are not equal. @@ -1783,8 +1771,7 @@ def test_claim_eq_simple_different_value(self): self.assertNotEqual(claim2, claim1) def test_claim_eq_simple_different_rank(self): - """ - Test comparing two claims. + """Test comparing two claims. If they have the same property and value and different ranks, they are equal. @@ -1799,8 +1786,7 @@ def test_claim_eq_simple_different_rank(self): self.assertEqual(claim2, claim1) def test_claim_eq_simple_different_snaktype(self): - """ - Test comparing two claims. + """Test comparing two claims. If they have the same property and different snaktypes, they are not equal. @@ -1814,8 +1800,7 @@ def test_claim_eq_simple_different_snaktype(self): self.assertNotEqual(claim2, claim1) def test_claim_eq_simple_different_property(self): - """ - Test comparing two claims. + """Test comparing two claims. If they have the same value and different properties, they are not equal. @@ -1829,8 +1814,7 @@ def test_claim_eq_simple_different_property(self): self.assertNotEqual(claim2, claim1) def test_claim_eq_with_qualifiers(self): - """ - Test comparing two claims. + """Test comparing two claims. If they have the same property, value and qualifiers, they are equal. """ @@ -1849,8 +1833,7 @@ def test_claim_eq_with_qualifiers(self): self.assertEqual(claim2, claim1) def test_claim_eq_with_different_qualifiers(self): - """ - Test comparing two claims. + """Test comparing two claims. If they have the same property and value and different qualifiers, they are not equal. @@ -1870,8 +1853,7 @@ def test_claim_eq_with_different_qualifiers(self): self.assertNotEqual(claim2, claim1) def test_claim_eq_one_without_qualifiers(self): - """ - Test comparing two claims. + """Test comparing two claims. If they have the same property and value and one of them has no qualifiers while the other one does, they are not equal. @@ -1888,8 +1870,7 @@ def test_claim_eq_one_without_qualifiers(self): self.assertNotEqual(claim2, claim1) def test_claim_eq_with_different_sources(self): - """ - Test comparing two claims. + """Test comparing two claims. If they have the same property and value and different sources, they are equal. @@ -1909,8 +1890,7 @@ def test_claim_eq_with_different_sources(self): self.assertEqual(claim2, claim1) def test_claim_copy_is_equal(self): - """ - Test making a copy of a claim. + """Test making a copy of a claim. The copy of a claim should be always equal to the claim. """ @@ -1927,8 +1907,7 @@ def test_claim_copy_is_equal(self): self.assertEqual(claim, copy) def test_claim_copy_is_equal_qualifier(self): - """ - Test making a copy of a claim. + """Test making a copy of a claim. The copy of a qualifier should be always equal to the qualifier. """ @@ -1941,8 +1920,7 @@ def test_claim_copy_is_equal_qualifier(self): self.assertTrue(copy.isQualifier) def test_claim_copy_is_equal_source(self): - """ - Test making a copy of a claim. + """Test making a copy of a claim. The copy of a source should be always equal to the source. """ @@ -2204,8 +2182,7 @@ class TestNamespaces(WikidataTestCase): """Test cases to test namespaces of Wikibase entities.""" def test_empty_wikibase_page(self): - """ - Test empty wikibase page. + """Test empty wikibase page. As a base class it should be able to instantiate it with minimal arguments