Ugrás a tartalomhoz

Szerkesztő:BinBot/huwiki/flaggedrevs.py

A Wikipédiából, a szabad enciklopédiából

Explanation / Magyarázat

Kézikönyv
  • FLAGGED_NAMESPACES: az ellenőrzött névterek felsorolása
  • Flags: beszédes nevek az ellenőrzöttség állapotaihoz
  • cosmeticable(): a lap nem ellenőrizetlen (azaz ellenőrzött vagy nem létezik); kozmetikázás vagy bámilyen változtatás végezhető rajta bottal, ami az ellenőrizetlen lapokon megnehezítené a járőrök dolgát
  • CosmeticableFilterPageGenerator(): burkológenerátor, másik generátorból vagy listából csak az ellenőrzött lapokat engedi át, ezeken lehet kozmetikai változtatásokat végezni
  • flagged_state(): visszaadja a lap állapotát a fenti nevek valamelyikével
  • one_pending(): igaz értéket ad vissza, ha csak a lap utolsó egy változtatása vár ellenőrzésre (pl. adott szerkesztő munkájának tömeges ellenőrzéséhez hasznos)
  • review(): ellenőrzötté vagy ellenőrizetlenné teszi egy lap legutolsó változatát
  • review_id(): ellenőrzötté vagy ellenőrizetlenné teszi egy lap megadott változatát (oldid)

Példák a modul használatára

"""This module handles Flagged Revisions (jelölt lapváltozatok).

Working on unreviewed pages makes diffs less readable, and thus makes
the work of patrols hard. This should be avoided when possible, and
especially when making cosmetic changes.
A sample code to handle the problem:
from huwiki.flaggedrevs import cosmeticable
if cosmeticable(page):
    # Do cosmetic changes.
or
if cosmeticable(title):
    # Do cosmetic changes.
"""
#
# (C) Bináris, 2023-24
#
# Distributed under the terms of the MIT license.

# https://www.mediawiki.org/wiki/Extension:FlaggedRevs#API
# https://en.wikipedia.org/w/api.php?action=help&modules=query%2Bflagged
# https://www.mediawiki.org/wiki/API:Main_page
# https://www.mediawiki.org/wiki/Help:Extension:FlaggedRevs

from enum import Enum
from typing import Union
import pywikibot
from pywikibot.backports import List, Dict, Iterable, Iterator
from pywikibot.exceptions import APIError

site = pywikibot.Site()
_cache: Dict[str, 'Flags'] = {}

FLAGGED_NAMESPACES = (
    0,    # main
    6,    # file
    10,   # template
    14,   # category
    100,  # portal
    828,  # module
)

class Flags(Enum):
    """State codes with meaningful names."""

    UNFLAGGED = -1  # Not a flagged namespace / ellenőrizetlen névtér
    UNREVIEWED = 0  # A page that was never reviewed / ellenőrizetlen lap
    PENDING = 1  # Reviewed page w/ pending changes / elavult ellenőrzött lap
    STABLE = 2  # Reviewed page revision / ellenőrzött lap
    REDPAGE = 3  # Page does not exist / nem létező lap
    ERROR = 4  # We should never get this

    def __str__(self) -> str:
        return self.name

def cosmeticable(
        page: Union[pywikibot.Page, str],
        force: bool = False) -> bool:
    """Return True if the bot is allowed to make cosmetic changes.

    Although a red page sounds funny to make cosmetic changes on,
    it is included because cc is only the main use case, and there may
    be others. Creating pages won't cause problems for patrols. We have
    no reason to exclude non-existing pages from bot operations.

    Not safe for misspelled titles (lower case first letter).

    :param page: a wikipage to test for flagged state.
    Argument may be given as title or a Page object.
    :param force: if True, won't use the cached results
    """

    return flagged_state(page, force) in (
        Flags.UNFLAGGED,
        Flags.STABLE,
        Flags.REDPAGE)

def _cache50page(pages: List[Union[pywikibot.Page, str]]) -> None:
    """Cache flag results for pages.

    API limit is currently set to 50. More than 50 causes APIError.
    The experience is that 50 results arrive without continuation
    (as one result).
    APIError may indicate an API change.

    Not safe for misspelled titles (lower case first letter).
    """
    if len(pages) > 50:
        raise TypeError('List is too long. The limit is 50.')

    titles = []
    for page in pages:
        if isinstance(page, pywikibot.Page):
            title, ns = (page.title(), page.namespace())
        elif isinstance(page, str):
            title, ns = (page, pywikibot.Page(site, page).namespace())
        else:
            raise TypeError(f'Argument must be a Page or a str: {page}.')

        if not ns in FLAGGED_NAMESPACES:
            _cache[title] = Flags.UNFLAGGED
            continue
        titles.append(title)

    if not len(titles):
        return
    request = site.simple_request(action='query',
                                  prop='flagged',
                                  titles=titles)
    result = request.submit()
    # The following two lines originate from _apisite.py:
    assert 'query' in result, "API userinfo response lacks 'query' key"
    assert 'pages' in result['query'], "API response lacks 'pages' key"
    pageinfo = result['query']['pages']
    # Pageinfo must be a dict with k-v pairs the page id being the key.
    if not isinstance(pageinfo, dict):
        raise APIError('Wrong answer. API changed?', str(pageinfo))

    for data in list(pageinfo.values()):
        try:
            title = data['title']  # Must be a dict.
        except:
            continue  # Will be handled at the end.
        _cache[title] = {
                            'flag': _decide(data),
                            'data': data,
                        }

    # Handle skipped errors (if title starts with lower case, blame yourself)
    for title in titles:
        if title not in _cache:
            _cache[title] = {
                                'flag': Flags.ERROR,
                                'data': None,
                            }

def _decide(data: dict) -> Flags:
    if 'missing' in data:
        return Flags.REDPAGE
    try:
        if 'pending_since' in data['flagged']:
            return Flags.PENDING
        return Flags.STABLE
    except KeyError:
        return Flags.UNREVIEWED

def _cachepages(pages: List[Union[pywikibot.Page, str]]) -> None:
    """Caches flag results for pages.

    Feed _cache50page() with at most 50 pages at a time.
    """
    for i in range(0, len(pages), 50):
        _cache50page(pages[i : i+50])

def _get_page_from_cache(
        page: Union[pywikibot.Page, str],
        force: bool = False) -> Dict:
    """Return a Flags value for a single page.

    The returned value may be compared to the above state codes, e.g.
    == Flags.PENDING.
    :param page: a wikipage to test for flagged state.
    Argument may be given as title or a Page object.
    :param force: if True, won't use the cached results
    """

    if isinstance(page, pywikibot.Page):
        title = page.title()
    elif isinstance(page, str):
        title = page
    else:
        raise TypeError('Argument must be a Page or a str.')

    if force or title not in _cache:
        _cache50page([title,])
    return _cache.get(title)

def flagged_state(page, force=False) -> Flags:
    """Return a Flags value for a single page.

    The returned value may be compared to the above state codes, e.g.
    == Flags.PENDING.
    For parameters see _get_page_from_cache().
    """

    return _get_page_from_cache(page, force)['flag']

def flag_data(page, force=False) -> Dict:
    """Return the complete data dictionary for a single page.

    For parameters see _get_page_from_cache().
    """

    return _get_page_from_cache(page, force)['data']

def stable_version(page_or_title, force=False) -> pywikibot.Page:
    """Return the stable revision of the page.

    For parameters see _get_page_from_cache().
    """
    dic = _get_page_from_cache(page_or_title, force)
    if dic['flag'] not in (Flags.STABLE, Flags.PENDING):
        return None
    if isinstance(page_or_title, pywikibot.Page):
        page = page_or_title
    else:
        # Must be a title at this point.
        page = pywikibot.Page(site, page_or_title)  
    if dic['flag'] == Flags.STABLE:
        return page
    stable_id = dic['data']['flagged']['stable_revid']
    # This strange method will OVERWRITE the page object itself.
    site.loadrevisions(page, content=True, revids=stable_id)
    return page._revisions[stable_id]

def stable_text(page_or_title, force=False) -> str:
    """Return the text of the stable revision of the page.

    For parameters see _get_page_from_cache().
    """
    return stable_version(page_or_title, force).text

def one_pending(page_or_title, force=False) -> bool:
    """Return true if the page has exactly one (the last) pending revision.

    For parameters see _get_page_from_cache().
    """
    dic = _get_page_from_cache(page_or_title, force)
    if dic['flag'] != Flags.PENDING:
        return False
    last = dic['data']['flagged']['stable_revid']
    if isinstance(page_or_title, pywikibot.Page):
        page = page_or_title
    else:
        # Must be a title at this point.
        page = pywikibot.Page(site, page_or_title)  
    parent = page.latest_revision.parentid
    return last == parent

def review_id(page: pywikibot.Page,
              id: int,
              comment: str = '',
              unreview: bool = False) -> None:
    """Review or unreview a given oldid.

    Read more about tokens and requests in _apisite.py
    """
    token = site.get_tokens(['csrf']).get('csrf')  # get a single token
    params = {'action': 'review',
              'revid': id,
              'token': token,
              'comment': comment
             }
    if unreview:
        params['unapprove'] = 1
    request = site.simple_request(**params)
    print(request)
    request.submit()

    # Todo: handle APIError: permissiondenied: You don't have permission to review revisions.

def review(page: pywikibot.Page,
              comment: str = '',
              unreview: bool = False) -> None:
    """Review or unreview a given page."""
    review_id(page, page.latest_revision_id, comment, unreview)

    # Todo: unreview page from a given revision

def CosmeticableFilterPageGenerator(
        generator: Iterable[pywikibot.Page],
        mass_cache: bool = False
        ) -> Iterator[pywikibot.Page]:
    """A wrapper generator to filter pages suitable for cosmetic changes.

    :param mass_cache: if True, will collect all the pages, and query
    their review state from API en masse. We expect this to be faster.
    """

    if mass_cache:
        pages = list(generator)
        _cachepages(pages)
        for page in pages:
            if cosmeticable(page):
                yield page
        return

    for page in generator:
        if cosmeticable(page):
            yield page