diff --git a/2.0.0/404.html b/2.0.0/404.html new file mode 100644 index 00000000..1572dd35 --- /dev/null +++ b/2.0.0/404.html @@ -0,0 +1,660 @@ + + + + + + + + + + + + + + + + Omicron + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ +

404 - Not found

+ +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/2.0.0/_static/Omicron_Windows10.docx b/2.0.0/_static/Omicron_Windows10.docx new file mode 100644 index 00000000..dbe1b06a Binary files /dev/null and b/2.0.0/_static/Omicron_Windows10.docx differ diff --git a/2.0.0/_static/jetbrains-variant-3.svg b/2.0.0/_static/jetbrains-variant-3.svg new file mode 100644 index 00000000..a37e7326 --- /dev/null +++ b/2.0.0/_static/jetbrains-variant-3.svg @@ -0,0 +1,73 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/2.0.0/_static/strategy_lifecycle.drawio b/2.0.0/_static/strategy_lifecycle.drawio new file mode 100644 index 00000000..62644630 --- /dev/null +++ b/2.0.0/_static/strategy_lifecycle.drawio @@ -0,0 +1,98 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/2.0.0/api/board/index.html b/2.0.0/api/board/index.html new file mode 100644 index 00000000..fe75621e --- /dev/null +++ b/2.0.0/api/board/index.html @@ -0,0 +1,2032 @@ + + + + + + + + + + + + + + + + board - Omicron + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + +
+ + +
+ + + + +
+ + + + + + + + + +
+ + + +

+ +Board + + + +

+ +
+ + +
+ Source code in omicron/models/board.py +
class Board:
+    server_ip: str
+    server_port: int
+    measurement = "board_bars_1d"
+
+    @classmethod
+    def init(cls, ip: str, port: int = 3180):
+        cls.server_ip = ip
+        cls.server_port = port
+
+    @classmethod
+    async def _rpc_call(cls, url: str, param: str):
+        _url = f"http://{cls.server_ip}:{cls.server_port}/api/board/{url}"
+
+        async with httpx.AsyncClient() as client:
+            r = await client.post(_url, json=param, timeout=10)
+            if r.status_code != 200:
+                logger.error(
+                    f"failed to post RPC call, {_url}: {param}, response: {r.content.decode()}"
+                )
+                return {"rc": r.status_code}
+
+            rsp = json.loads(r.content)
+            return {"rc": 200, "data": rsp}
+
+    @classmethod
+    async def board_list(cls, _btype: BoardType = BoardType.CONCEPT) -> List[List]:
+        """获取板块列表
+
+        Args:
+            _btype: 板块类别,可选值`BoardType.CONCEPT`和`BoardType.INDUSTRY`.
+
+        Returns:
+            板块列表。每一个子元素仍为一个列表,由板块代码(str), 板块名称(str)和成员数组成。示例:
+            ```
+            [
+                ['881101', '种植业与林业', 24],
+                ['881102', '养殖业', 27],
+                ['881103', '农产品加工', 41],
+                ['881104', '农业服务', 16],
+            ]
+            ```
+        """
+        rsp = await cls._rpc_call("board_list", {"board_type": _btype.value})
+        if rsp["rc"] != 200:
+            return {"status": 500, "msg": "httpx RPC call failed"}
+
+        return rsp["data"]
+
+    @classmethod
+    async def fuzzy_match_board_name(
+        cls, pattern: str, _btype: BoardType = BoardType.CONCEPT
+    ) -> dict:
+        """模糊查询板块代码的名字
+
+        Examples:
+        ```python
+        await Board.fuzzy_match_board_name("汽车", BoardType.INDUSTRY)
+
+        # returns:
+        [
+            '881125 汽车整车',
+            '881126 汽车零部件',
+            '881127 非汽车交运',
+            '881128 汽车服务',
+            '884107 汽车服务Ⅲ',
+            '884194 汽车零部件Ⅲ'
+        ]
+        ```
+        Args:
+            pattern: 待查询模式串
+            _btype: 查询类型
+
+        Returns:
+            包含以下key的dict: code(板块代码), name(板块名), stocks(股票数)
+        """
+        if not pattern:
+            return []
+
+        rsp = await cls._rpc_call(
+            "fuzzy_match_name", {"board_type": _btype.value, "pattern": pattern}
+        )
+        if rsp["rc"] != 200:
+            return {"status": 500, "msg": "httpx RPC call failed"}
+
+        return rsp["data"]
+
+    @classmethod
+    async def board_info_by_id(cls, board_id: str, full_mode: bool = False) -> dict:
+        """通过板块代码查询板块信息(名字,成员数目或清单)
+
+        Examples:
+        ```python
+        board_code = '881128' # 汽车服务 可自行修改
+        board_info = await Board.board_info_by_id(board_code)
+        print(board_info) # 字典形式
+
+        # returns
+        {'code': '881128', 'name': '汽车服务', 'stocks': 14}
+        ```
+
+        Returns:
+            {'code': '301505', 'name': '医疗器械概念', 'stocks': 242}
+            or
+            {'code': '301505', 'name': '医疗器械概念', 'stocks': [['300916', '朗特智能'], ['300760', '迈瑞医疗']]}
+        """
+
+        if not board_id:
+            return {}
+        if board_id[0] == "3":
+            _btype = BoardType.CONCEPT
+        else:
+            _btype = BoardType.INDUSTRY
+
+        _mode = 0
+        if full_mode:  # 转换bool类型
+            _mode = 1
+
+        rsp = await cls._rpc_call(
+            "info",
+            {"board_type": _btype.value, "board_id": board_id, "fullmode": _mode},
+        )
+        if rsp["rc"] != 200:
+            return {"status": 500, "msg": "httpx RPC call failed"}
+
+        return rsp["data"]
+
+    @classmethod
+    async def board_info_by_security(
+        cls, security: str, _btype: BoardType = BoardType.CONCEPT
+    ) -> List[dict]:
+        """获取股票所在板块信息:名称,代码
+
+        Examples:
+        ```python
+        stock_code = '002236'  # 大华股份,股票代码不带字母后缀
+        stock_in_board = await Board.board_info_by_security(stock_code, _btype=BoardType.CONCEPT)
+        print(stock_in_board)
+
+        # returns:
+        [
+            {'code': '301715', 'name': '证金持股', 'stocks': 208},
+            {'code': '308870', 'name': '数字经济', 'stocks': 195},
+            {'code': '308642', 'name': '数据中心', 'stocks': 188},
+            ...,
+            {'code': '300008', 'name': '新能源汽车', 'stocks': 603}
+        ]
+        ```
+
+        Returns:
+            [{'code': '301505', 'name': '医疗器械概念'}]
+        """
+
+        if not security:
+            return []
+
+        rsp = await cls._rpc_call(
+            "info_by_sec", {"board_type": _btype.value, "security": security}
+        )
+        if rsp["rc"] != 200:
+            return {"status": 500, "msg": "httpx RPC call failed"}
+
+        return rsp["data"]
+
+    @classmethod
+    async def board_filter_members(
+        cls,
+        included: List[str],
+        excluded: List[str] = [],
+        _btype: BoardType = BoardType.CONCEPT,
+    ) -> List:
+        """根据板块名筛选股票,参数为include, exclude
+
+        Fixme:
+            this function doesn't work
+            Raise status 500
+
+        Returns:
+            [['300181', '佐力药业'], ['600056', '中国医药']]
+        """
+        if not included:
+            return []
+        if excluded is None:
+            excluded = []
+
+        rsp = await cls._rpc_call(
+            "board_filter_members",
+            {
+                "board_type": _btype.value,
+                "include_boards": included,
+                "exclude_boards": excluded,
+            },
+        )
+        if rsp["rc"] != 200:
+            return {"status": 500, "msg": "httpx RPC call failed"}
+
+        return rsp["data"]
+
+    @classmethod
+    async def new_concept_boards(cls, days: int = 10):
+        raise NotImplementedError("not ready")
+
+    @classmethod
+    async def latest_concept_boards(n: int = 3):
+        raise NotImplementedError("not ready")
+
+    @classmethod
+    async def new_concept_members(days: int = 10, prot: int = None):
+        raise NotImplementedError("not ready")
+
+    @classmethod
+    async def board_filter(
+        cls, industry=None, with_concepts: Optional[List[str]] = None, without=[]
+    ):
+        raise NotImplementedError("not ready")
+
+    @classmethod
+    async def save_bars(cls, bars):
+        client = get_influx_client()
+
+        logger.info(
+            "persisting bars to influxdb: %s, %d secs", cls.measurement, len(bars)
+        )
+        await client.save(bars, cls.measurement, tag_keys=["code"], time_key="frame")
+        return True
+
+    @classmethod
+    async def get_last_date_of_bars(cls, code: str):
+        # 行业板块回溯1年的数据,概念板块只取当年的数据
+        code = f"{code}.THS"
+
+        client = get_influx_client()
+
+        now = datetime.datetime.now()
+        dt_end = tf.day_shift(now, 0)
+        # 250 + 60: 可以得到60个MA250的点, 默认K线图120个节点
+        dt_start = tf.day_shift(now, -310)
+
+        flux = (
+            Flux()
+            .measurement(cls.measurement)
+            .range(dt_start, dt_end)
+            .bucket(client._bucket)
+            .tags({"code": code})
+        )
+
+        data = await client.query(flux)
+        if len(data) == 2:  # \r\n
+            return dt_start
+        ds = DataframeDeserializer(
+            sort_values="_time", usecols=["_time"], time_col="_time", engine="c"
+        )
+        bars = ds(data)
+        secs = bars.to_records(index=False).astype("datetime64[s]")
+
+        _dt = secs[-1].item()
+        return _dt.date()
+
+    @classmethod
+    async def get_bars_in_range(
+        cls, code: str, start: Frame, end: Frame = None
+    ) -> BarsArray:
+        """从持久化数据库中获取介于[`start`, `end`]间的行情记录
+
+        Examples:
+        ```python
+        start = datetime.date(2022, 9, 1)  # 起始时间, 可修改
+        end = datetime.date(2023, 3, 1)  # 截止时间, 可修改
+        board_code = '881128' # 汽车服务, 可修改
+        bars = await Board.get_bars_in_range(board_code, start, end)
+        bars[-3:] # 打印后3条数据
+
+        # prints:
+        rec.array([
+            ('2023-02-27T00:00:00', 1117.748, 1124.364, 1108.741, 1109.525, 1.77208600e+08, 1.13933095e+09, 1.),
+            ('2023-02-28T00:00:00', 1112.246, 1119.568, 1109.827, 1113.43 , 1.32828124e+08, 6.65160380e+08, 1.),
+            ('2023-03-01T00:00:00', 1122.233, 1123.493, 1116.62 , 1123.274, 7.21718910e+07, 3.71172850e+08, 1.)
+           ],
+          dtype=[('frame', '<M8[s]'), ('open', '<f4'), ('high', '<f4'), ('low', '<f4'), ('close', '<f4'), ('volume', '<f8'), ('amount', '<f8'), ('factor', '<f4')])
+        ```
+        Args:
+            code: 板块代码(概念、行业)
+            start: 起始时间
+            end: 结束时间,如果未指明,则取当前时间
+
+        Returns:
+            返回dtype为`coretypes.bars_dtype`的一维numpy数组。
+        """
+        end = end or datetime.datetime.now()
+        code = f"{code}.THS"
+
+        keep_cols = ["_time"] + list(bars_cols[1:])
+
+        flux = (
+            Flux()
+            .bucket(cfg.influxdb.bucket_name)
+            .range(start, end)
+            .measurement(cls.measurement)
+            .fields(keep_cols)
+            .tags({"code": code})
+        )
+
+        serializer = DataframeDeserializer(
+            encoding="utf-8",
+            names=[
+                "_",
+                "table",
+                "result",
+                "frame",
+                "code",
+                "amount",
+                "close",
+                "factor",
+                "high",
+                "low",
+                "open",
+                "volume",
+            ],
+            engine="c",
+            skiprows=0,
+            header=0,
+            usecols=bars_cols,
+            parse_dates=["frame"],
+        )
+
+        client = get_influx_client()
+        result = await client.query(flux, serializer)
+        return result.to_records(index=False).astype(bars_dtype)
+
+
+ + + +
+ + + + + + + + + + + +
+ + + +

+board_filter_members(included, excluded=[], _btype=<BoardType.CONCEPT: 'concept'>) + + + async + classmethod + + +

+ +
+ +

根据板块名筛选股票,参数为include, exclude

+
+

Fixme

+

this function doesn't work +Raise status 500

+
+ +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
List

[['300181', '佐力药业'], ['600056', '中国医药']]

+
+ Source code in omicron/models/board.py +
@classmethod
+async def board_filter_members(
+    cls,
+    included: List[str],
+    excluded: List[str] = [],
+    _btype: BoardType = BoardType.CONCEPT,
+) -> List:
+    """根据板块名筛选股票,参数为include, exclude
+
+    Fixme:
+        this function doesn't work
+        Raise status 500
+
+    Returns:
+        [['300181', '佐力药业'], ['600056', '中国医药']]
+    """
+    if not included:
+        return []
+    if excluded is None:
+        excluded = []
+
+    rsp = await cls._rpc_call(
+        "board_filter_members",
+        {
+            "board_type": _btype.value,
+            "include_boards": included,
+            "exclude_boards": excluded,
+        },
+    )
+    if rsp["rc"] != 200:
+        return {"status": 500, "msg": "httpx RPC call failed"}
+
+    return rsp["data"]
+
+
+
+ +
+ + + +
+ + + +

+board_info_by_id(board_id, full_mode=False) + + + async + classmethod + + +

+ +
+ +

通过板块代码查询板块信息(名字,成员数目或清单)

+ +

Examples:

+ +
1
+2
+3
+4
+5
+6
board_code = '881128' # 汽车服务 可自行修改
+board_info = await Board.board_info_by_id(board_code)
+print(board_info) # 字典形式
+
+# returns
+{'code': '881128', 'name': '汽车服务', 'stocks': 14}
+
+ +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
{'code'

'301505', 'name': '医疗器械概念', 'stocks': 242} +or

+
+ Source code in omicron/models/board.py +
@classmethod
+async def board_info_by_id(cls, board_id: str, full_mode: bool = False) -> dict:
+    """通过板块代码查询板块信息(名字,成员数目或清单)
+
+    Examples:
+    ```python
+    board_code = '881128' # 汽车服务 可自行修改
+    board_info = await Board.board_info_by_id(board_code)
+    print(board_info) # 字典形式
+
+    # returns
+    {'code': '881128', 'name': '汽车服务', 'stocks': 14}
+    ```
+
+    Returns:
+        {'code': '301505', 'name': '医疗器械概念', 'stocks': 242}
+        or
+        {'code': '301505', 'name': '医疗器械概念', 'stocks': [['300916', '朗特智能'], ['300760', '迈瑞医疗']]}
+    """
+
+    if not board_id:
+        return {}
+    if board_id[0] == "3":
+        _btype = BoardType.CONCEPT
+    else:
+        _btype = BoardType.INDUSTRY
+
+    _mode = 0
+    if full_mode:  # 转换bool类型
+        _mode = 1
+
+    rsp = await cls._rpc_call(
+        "info",
+        {"board_type": _btype.value, "board_id": board_id, "fullmode": _mode},
+    )
+    if rsp["rc"] != 200:
+        return {"status": 500, "msg": "httpx RPC call failed"}
+
+    return rsp["data"]
+
+
+
+ +
+ + + +
+ + + +

+board_info_by_security(security, _btype=<BoardType.CONCEPT: 'concept'>) + + + async + classmethod + + +

+ +
+ +

获取股票所在板块信息:名称,代码

+ +

Examples:

+ +
 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+10
+11
+12
stock_code = '002236'  # 大华股份,股票代码不带字母后缀
+stock_in_board = await Board.board_info_by_security(stock_code, _btype=BoardType.CONCEPT)
+print(stock_in_board)
+
+# returns:
+[
+    {'code': '301715', 'name': '证金持股', 'stocks': 208},
+    {'code': '308870', 'name': '数字经济', 'stocks': 195},
+    {'code': '308642', 'name': '数据中心', 'stocks': 188},
+    ...,
+    {'code': '300008', 'name': '新能源汽车', 'stocks': 603}
+]
+
+ +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
[{'code'

'301505', 'name': '医疗器械概念'}]

+
+ Source code in omicron/models/board.py +
@classmethod
+async def board_info_by_security(
+    cls, security: str, _btype: BoardType = BoardType.CONCEPT
+) -> List[dict]:
+    """获取股票所在板块信息:名称,代码
+
+    Examples:
+    ```python
+    stock_code = '002236'  # 大华股份,股票代码不带字母后缀
+    stock_in_board = await Board.board_info_by_security(stock_code, _btype=BoardType.CONCEPT)
+    print(stock_in_board)
+
+    # returns:
+    [
+        {'code': '301715', 'name': '证金持股', 'stocks': 208},
+        {'code': '308870', 'name': '数字经济', 'stocks': 195},
+        {'code': '308642', 'name': '数据中心', 'stocks': 188},
+        ...,
+        {'code': '300008', 'name': '新能源汽车', 'stocks': 603}
+    ]
+    ```
+
+    Returns:
+        [{'code': '301505', 'name': '医疗器械概念'}]
+    """
+
+    if not security:
+        return []
+
+    rsp = await cls._rpc_call(
+        "info_by_sec", {"board_type": _btype.value, "security": security}
+    )
+    if rsp["rc"] != 200:
+        return {"status": 500, "msg": "httpx RPC call failed"}
+
+    return rsp["data"]
+
+
+
+ +
+ + + +
+ + + +

+board_list(_btype=<BoardType.CONCEPT: 'concept'>) + + + async + classmethod + + +

+ +
+ +

获取板块列表

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
_btypeBoardType

板块类别,可选值BoardType.CONCEPTBoardType.INDUSTRY.

<BoardType.CONCEPT: 'concept'>
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
List[List]

板块列表。每一个子元素仍为一个列表,由板块代码(str), 板块名称(str)和成员数组成。示例: +

1
+2
+3
+4
+5
+6
[
+    ['881101', '种植业与林业', 24],
+    ['881102', '养殖业', 27],
+    ['881103', '农产品加工', 41],
+    ['881104', '农业服务', 16],
+]
+

+
+ Source code in omicron/models/board.py +
@classmethod
+async def board_list(cls, _btype: BoardType = BoardType.CONCEPT) -> List[List]:
+    """获取板块列表
+
+    Args:
+        _btype: 板块类别,可选值`BoardType.CONCEPT`和`BoardType.INDUSTRY`.
+
+    Returns:
+        板块列表。每一个子元素仍为一个列表,由板块代码(str), 板块名称(str)和成员数组成。示例:
+        ```
+        [
+            ['881101', '种植业与林业', 24],
+            ['881102', '养殖业', 27],
+            ['881103', '农产品加工', 41],
+            ['881104', '农业服务', 16],
+        ]
+        ```
+    """
+    rsp = await cls._rpc_call("board_list", {"board_type": _btype.value})
+    if rsp["rc"] != 200:
+        return {"status": 500, "msg": "httpx RPC call failed"}
+
+    return rsp["data"]
+
+
+
+ +
+ + + +
+ + + +

+fuzzy_match_board_name(pattern, _btype=<BoardType.CONCEPT: 'concept'>) + + + async + classmethod + + +

+ +
+ +

模糊查询板块代码的名字

+ +

Examples:

+ +
 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+10
+11
await Board.fuzzy_match_board_name("汽车", BoardType.INDUSTRY)
+
+# returns:
+[
+    '881125 汽车整车',
+    '881126 汽车零部件',
+    '881127 非汽车交运',
+    '881128 汽车服务',
+    '884107 汽车服务Ⅲ',
+    '884194 汽车零部件Ⅲ'
+]
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
patternstr

待查询模式串

required
_btypeBoardType

查询类型

<BoardType.CONCEPT: 'concept'>
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
包含以下key的dict

code(板块代码), name(板块名), stocks(股票数)

+
+ Source code in omicron/models/board.py +
@classmethod
+async def fuzzy_match_board_name(
+    cls, pattern: str, _btype: BoardType = BoardType.CONCEPT
+) -> dict:
+    """模糊查询板块代码的名字
+
+    Examples:
+    ```python
+    await Board.fuzzy_match_board_name("汽车", BoardType.INDUSTRY)
+
+    # returns:
+    [
+        '881125 汽车整车',
+        '881126 汽车零部件',
+        '881127 非汽车交运',
+        '881128 汽车服务',
+        '884107 汽车服务Ⅲ',
+        '884194 汽车零部件Ⅲ'
+    ]
+    ```
+    Args:
+        pattern: 待查询模式串
+        _btype: 查询类型
+
+    Returns:
+        包含以下key的dict: code(板块代码), name(板块名), stocks(股票数)
+    """
+    if not pattern:
+        return []
+
+    rsp = await cls._rpc_call(
+        "fuzzy_match_name", {"board_type": _btype.value, "pattern": pattern}
+    )
+    if rsp["rc"] != 200:
+        return {"status": 500, "msg": "httpx RPC call failed"}
+
+    return rsp["data"]
+
+
+
+ +
+ + + +
+ + + +

+get_bars_in_range(code, start, end=None) + + + async + classmethod + + +

+ +
+ +

从持久化数据库中获取介于[start, end]间的行情记录

+ +

Examples:

+ +
 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+10
+11
+12
+13
start = datetime.date(2022, 9, 1)  # 起始时间, 可修改
+end = datetime.date(2023, 3, 1)  # 截止时间, 可修改
+board_code = '881128' # 汽车服务, 可修改
+bars = await Board.get_bars_in_range(board_code, start, end)
+bars[-3:] # 打印后3条数据
+
+# prints:
+rec.array([
+    ('2023-02-27T00:00:00', 1117.748, 1124.364, 1108.741, 1109.525, 1.77208600e+08, 1.13933095e+09, 1.),
+    ('2023-02-28T00:00:00', 1112.246, 1119.568, 1109.827, 1113.43 , 1.32828124e+08, 6.65160380e+08, 1.),
+    ('2023-03-01T00:00:00', 1122.233, 1123.493, 1116.62 , 1123.274, 7.21718910e+07, 3.71172850e+08, 1.)
+   ],
+  dtype=[('frame', '<M8[s]'), ('open', '<f4'), ('high', '<f4'), ('low', '<f4'), ('close', '<f4'), ('volume', '<f8'), ('amount', '<f8'), ('factor', '<f4')])
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
codestr

板块代码(概念、行业)

required
startUnion[datetime.date, datetime.datetime]

起始时间

required
endUnion[datetime.date, datetime.datetime]

结束时间,如果未指明,则取当前时间

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
numpy.ndarray[Any, numpy.dtype[dtype([('frame', '<M8[s]'), ('open', '<f4'), ('high', '<f4'), ('low', '<f4'), ('close', '<f4'), ('volume', '<f8'), ('amount', '<f8'), ('factor', '<f4')])]]

返回dtype为coretypes.bars_dtype的一维numpy数组。

+
+ Source code in omicron/models/board.py +
@classmethod
+async def get_bars_in_range(
+    cls, code: str, start: Frame, end: Frame = None
+) -> BarsArray:
+    """从持久化数据库中获取介于[`start`, `end`]间的行情记录
+
+    Examples:
+    ```python
+    start = datetime.date(2022, 9, 1)  # 起始时间, 可修改
+    end = datetime.date(2023, 3, 1)  # 截止时间, 可修改
+    board_code = '881128' # 汽车服务, 可修改
+    bars = await Board.get_bars_in_range(board_code, start, end)
+    bars[-3:] # 打印后3条数据
+
+    # prints:
+    rec.array([
+        ('2023-02-27T00:00:00', 1117.748, 1124.364, 1108.741, 1109.525, 1.77208600e+08, 1.13933095e+09, 1.),
+        ('2023-02-28T00:00:00', 1112.246, 1119.568, 1109.827, 1113.43 , 1.32828124e+08, 6.65160380e+08, 1.),
+        ('2023-03-01T00:00:00', 1122.233, 1123.493, 1116.62 , 1123.274, 7.21718910e+07, 3.71172850e+08, 1.)
+       ],
+      dtype=[('frame', '<M8[s]'), ('open', '<f4'), ('high', '<f4'), ('low', '<f4'), ('close', '<f4'), ('volume', '<f8'), ('amount', '<f8'), ('factor', '<f4')])
+    ```
+    Args:
+        code: 板块代码(概念、行业)
+        start: 起始时间
+        end: 结束时间,如果未指明,则取当前时间
+
+    Returns:
+        返回dtype为`coretypes.bars_dtype`的一维numpy数组。
+    """
+    end = end or datetime.datetime.now()
+    code = f"{code}.THS"
+
+    keep_cols = ["_time"] + list(bars_cols[1:])
+
+    flux = (
+        Flux()
+        .bucket(cfg.influxdb.bucket_name)
+        .range(start, end)
+        .measurement(cls.measurement)
+        .fields(keep_cols)
+        .tags({"code": code})
+    )
+
+    serializer = DataframeDeserializer(
+        encoding="utf-8",
+        names=[
+            "_",
+            "table",
+            "result",
+            "frame",
+            "code",
+            "amount",
+            "close",
+            "factor",
+            "high",
+            "low",
+            "open",
+            "volume",
+        ],
+        engine="c",
+        skiprows=0,
+        header=0,
+        usecols=bars_cols,
+        parse_dates=["frame"],
+    )
+
+    client = get_influx_client()
+    result = await client.query(flux, serializer)
+    return result.to_records(index=False).astype(bars_dtype)
+
+
+
+ +
+ + + + + + + + + + + +
+ +
+ +
+ + + +
+ + + +

+ +BoardType (Enum) + + + + +

+ +
+ +

An enumeration.

+ +
+ Source code in omicron/models/board.py +
class BoardType(Enum):
+    INDUSTRY = "industry"
+    CONCEPT = "concept"
+
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ +
+ + + + + + + +
+ +
+ +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/2.0.0/api/dal/flux/index.html b/2.0.0/api/dal/flux/index.html new file mode 100644 index 00000000..80ed0394 --- /dev/null +++ b/2.0.0/api/dal/flux/index.html @@ -0,0 +1,3085 @@ + + + + + + + + + + + + + + + + Flux - Omicron + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + +

Flux

+ +

Flux - the query language builder for influxdb

+ + +
+ + +
+ +

Helper functions for building flux query expression

+ +
+ Source code in omicron/dal/influx/flux.py +
class Flux(object):
+    """Helper functions for building flux query expression"""
+
+    EPOCH_START = datetime.datetime(1970, 1, 1, 0, 0, 0)
+
+    def __init__(self, auto_pivot=True, no_sys_cols=True):
+        """初始化Flux对象
+
+        Args:
+            auto_pivot : 是否自动将查询列字段组装成行. Defaults to True.
+            no_sys_cols: 是否自动将系统字段删除. Defaults to True.请参考[drop_sys_cols][omicron.dal.influx.flux.Flux.drop_sys_cols]
+        """
+        self._cols = None
+        self.expressions = defaultdict(list)
+        self._auto_pivot = auto_pivot
+        self._last_n = None
+        self.no_sys_cols = no_sys_cols
+
+    def __str__(self):
+        return self._compose()
+
+    def __repr__(self) -> str:
+        return f"<{self.__class__.__name__}>:\n{self._compose()}"
+
+    def _compose(self):
+        """将所有表达式合并为一个表达式"""
+        if not all(
+            [
+                "bucket" in self.expressions,
+                "measurement" in self.expressions,
+                "range" in self.expressions,
+            ]
+        ):
+            raise AssertionError("bucket, measurement and range must be set")
+
+        expr = [self.expressions[k] for k in ("bucket", "range", "measurement")]
+
+        if self.expressions.get("tags"):
+            expr.append(self.expressions["tags"])
+
+        if self.expressions.get("fields"):
+            expr.append(self.expressions["fields"])
+
+        if "drop" not in self.expressions and self.no_sys_cols:
+            self.drop_sys_cols()
+
+        if self.expressions.get("drop"):
+            expr.append(self.expressions["drop"])
+
+        if self._auto_pivot and "pivot" not in self.expressions:
+            self.pivot()
+
+        if self.expressions.get("pivot"):
+            expr.append(self.expressions["pivot"])
+
+        if self.expressions.get("group"):
+            expr.append(self.expressions["group"])
+
+        if self.expressions.get("sort"):
+            expr.append(self.expressions["sort"])
+
+        if self.expressions.get("limit"):
+            expr.append(self.expressions["limit"])
+
+        # influxdb默认按升序排列,但last_n查询的结果则必然是降序的,所以还需要再次排序
+        if self._last_n:
+            expr.append(
+                "\n".join(
+                    [
+                        f' |> top(n: {self._last_n}, columns: ["_time"])',
+                        ' |> sort(columns: ["_time"], desc: false)',
+                    ]
+                )
+            )
+
+        return "\n".join(expr)
+
+    def bucket(self, bucket: str) -> "Flux":
+        """add bucket to query expression
+
+        Raises:
+            DuplicateOperationError: 一个查询中只允许指定一个source,如果表达式中已经指定了bucket,则抛出异常
+
+        Returns:
+            Flux对象
+        """
+        if "bucket" in self.expressions:
+            raise DuplicateOperationError("bucket has been set")
+
+        self.expressions["bucket"] = f'from(bucket: "{bucket}")'
+
+        return self
+
+    def measurement(self, measurement: str) -> "Flux":
+        """add measurement filter to query
+
+        Raises:
+            DuplicateOperationError: 一次查询中只允许指定一个measurement, 如果表达式中已经存在measurement, 则抛出异常
+
+        Returns:
+            Flux对象自身,以便进行管道操作
+        """
+        if "measurement" in self.expressions:
+            raise DuplicateOperationError("measurement has been set")
+
+        self.expressions[
+            "measurement"
+        ] = f'  |> filter(fn: (r) => r["_measurement"] == "{measurement}")'
+
+        return self
+
+    def range(
+        self, start: Frame, end: Frame, right_close=True, precision="s"
+    ) -> "Flux":
+        """添加时间范围过滤
+
+        必须指定的查询条件,否则influxdb会报unbound查询错,因为这种情况下,返回的数据量将非常大。
+
+        在格式化时间时,需要根据`precision`生成时间字符串。在向Influxdb发送请求时,应该注意查询参数中指定的时间精度与这里使用的保持一致。
+
+        Influxdb的查询结果默认不包含结束时间,当`right_close`指定为True时,我们将根据指定的精度修改`end`时间,使之仅比`end`多一个时间单位,从而保证查询结果会包含`end`。
+
+        Raises:
+            DuplicateOperationError: 一个查询中只允许指定一次时间范围,如果range表达式已经存在,则抛出异常
+        Args:
+            start: 开始时间
+            end: 结束时间
+            right_close: 查询结果是否包含结束时间。
+            precision: 时间精度,默认为秒。
+
+        Returns:
+            Flux对象,以支持管道操作
+        """
+        if "range" in self.expressions:
+            raise DuplicateOperationError("range has been set")
+
+        if precision not in ["s", "ms", "us"]:
+            raise AssertionError("precision must be 's', 'ms' or 'us'")
+
+        end = self.format_time(end, precision, right_close)
+        start = self.format_time(start, precision)
+
+        self.expressions["range"] = f"  |> range(start: {start}, stop: {end})"
+        return self
+
+    def limit(self, limit: int) -> "Flux":
+        """添加返回记录数限制
+
+        Raises:
+            DuplicateOperationError: 一个查询中只允许指定一次limit,如果limit表达式已经存在,则抛出异常
+
+        Args:
+            limit: 返回记录数限制
+
+        Returns:
+            Flux对象,以便进行管道操作
+        """
+        if "limit" in self.expressions:
+            raise DuplicateOperationError("limit has been set")
+
+        self.expressions["limit"] = "  |> limit(n: %d)" % limit
+        return self
+
+    @classmethod
+    def to_timestamp(cls, tm: Frame, precision: str = "s") -> int:
+        """将时间根据精度转换为unix时间戳
+
+        在往influxdb写入数据时,line-protocol要求的时间戳为unix timestamp,并且与其精度对应。
+
+        influxdb始终使用UTC时间,因此,`tm`也必须已经转换成UTC时间。
+
+        Args:
+            tm: 时间
+            precision: 时间精度,默认为秒。
+
+        Returns:
+            时间戳
+        """
+        if precision not in ["s", "ms", "us"]:
+            raise AssertionError("precision must be 's', 'ms' or 'us'")
+
+        # get int repr of tm, in seconds unit
+        if isinstance(tm, np.datetime64):
+            tm = tm.astype("datetime64[s]").astype("int")
+        elif isinstance(tm, datetime.datetime):
+            tm = tm.timestamp()
+        else:
+            tm = arrow.get(tm).timestamp()
+
+        return int(tm * 10 ** ({"s": 0, "ms": 3, "us": 6}[precision]))
+
+    @classmethod
+    def format_time(cls, tm: Frame, precision: str = "s", shift_forward=False) -> str:
+        """将时间转换成客户端对应的精度,并以 RFC3339 timestamps格式串(即influxdb要求的格式)返回。
+
+        如果这个时间是作为查询的range中的结束时间使用时,由于influx查询的时间范围是左闭右开的,因此如果你需要查询的是一个闭区间,则需要将`end`的时间向前偏移一个精度。通过传入`shift_forward = True`可以完成这种转换。
+
+        Examples:
+            >>> # by default, the precision is seconds, and convert a date
+            >>> Flux.format_time(datetime.date(2019, 1, 1))
+            '2019-01-01T00:00:00Z'
+
+            >>> # set precision to ms, convert a time
+            >>> Flux.format_time(datetime.datetime(1978, 7, 8, 12, 34, 56, 123456), precision="ms")
+            '1978-07-08T12:34:56.123Z'
+
+            >>> # convert and forward shift
+            >>> Flux.format_time(datetime.date(1978, 7, 8), shift_forward = True)
+            '1978-07-08T00:00:01Z'
+
+        Args:
+            tm : 待格式化的时间
+            precision: 时间精度,可选值为:'s', 'ms', 'us'
+            shift_forward: 如果为True,则将end向前偏移一个精度
+
+        Returns:
+            调整后符合influx时间规范的时间(字符串表示)
+        """
+        timespec = {"s": "seconds", "ms": "milliseconds", "us": "microseconds"}.get(
+            precision
+        )
+
+        if timespec is None:
+            raise ValueError(
+                f"precision must be one of 's', 'ms', 'us', but got {precision}"
+            )
+
+        tm = arrow.get(tm).naive
+
+        if shift_forward:
+            tm = tm + datetime.timedelta(**{timespec: 1})
+
+        return tm.isoformat(sep="T", timespec=timespec) + "Z"
+
+    def tags(self, tags: DefaultDict[str, List[str]]) -> "Flux":
+        """给查询添加tags过滤条件
+
+        此查询条件为过滤条件,并非必须。如果查询中没有指定tags,则会返回所有记录。
+
+        在实现上,既可以使用`contains`语法,也可以使用`or`语法(由于一条记录只能属于一个tag,所以,当指定多个tag进行查询时,它们之间的关系应该为`or`)。经验证,contains语法会始终先将所有符合条件的记录检索出来,再进行过滤。这样的效率比较低,特别是当tags的数量较少时,会远远比使用or语法慢。
+
+        Raises:
+            DuplicateOperationError: 一个查询中只允许执行一次,如果tag filter表达式已经存在,则抛出异常
+
+        Args:
+            tags : tags是一个{tagname: Union[str,[tag_values]]}对象。
+
+        Examples:
+            >>> flux = Flux()
+            >>> flux.tags({"code": ["000001", "000002"], "name": ["浦发银行"]}).expressions["tags"]
+            '  |> filter(fn: (r) => r["code"] == "000001" or r["code"] == "000002" or r["name"] == "浦发银行")'
+
+
+        Returns:
+            Flux对象,以便进行管道操作
+        """
+        if "tags" in self.expressions:
+            raise DuplicateOperationError("tags has been set")
+
+        filters = []
+        for tag, values in tags.items():
+            assert (
+                isinstance(values, str) or len(values) > 0
+            ), f"tag {tag} should not be empty or None"
+            if isinstance(values, str):
+                values = [values]
+
+            for v in values:
+                filters.append(f'r["{tag}"] == "{v}"')
+
+        op_expression = " or ".join(filters)
+
+        self.expressions["tags"] = f"  |> filter(fn: (r) => {op_expression})"
+
+        return self
+
+    def fields(self, fields: List, reserve_time_stamp: bool = True) -> "Flux":
+        """给查询添加field过滤条件
+
+        此查询条件为过滤条件,用以指定哪些field会出现在查询结果中,并非必须。如果查询中没有指定tags,则会返回所有记录。
+
+        由于一条记录只能属于一个_field,所以,当指定多个_field进行查询时,它们之间的关系应该为`or`。
+
+        Raises:
+            DuplicateOperationError: 一个查询中只允许执行一次,如果filed filter表达式已经存在,则抛出异常
+        Args:
+            fields: 待查询的field列表
+            reserve_time_stamp: 是否保留时间戳`_time`,默认为True
+
+        Returns:
+            Flux对象,以便进行管道操作
+        """
+        if "fields" in self.expressions:
+            raise DuplicateOperationError("fields has been set")
+
+        self._cols = fields.copy()
+
+        if reserve_time_stamp and "_time" not in self._cols:
+            self._cols.append("_time")
+
+        self._cols = sorted(self._cols)
+
+        filters = [f'r["_field"] == "{name}"' for name in self._cols]
+
+        self.expressions["fields"] = f"  |> filter(fn: (r) => {' or '.join(filters)})"
+
+        return self
+
+    def pivot(
+        self,
+        row_keys: List[str] = ["_time"],
+        column_keys=["_field"],
+        value_column: str = "_value",
+    ) -> "Flux":
+        """pivot用来将以列为单位的数据转换为以行为单位的数据
+
+        Flux查询返回的结果通常都是以列为单位的数据,增加本pivot条件后,结果将被转换成为以行为单位的数据再返回。
+
+        这里实现的是measurement内的转换,请参考 [pivot](https://docs.influxdata.com/flux/v0.x/stdlib/universe/pivot/#align-fields-within-each-measurement-that-have-the-same-timestamp)
+
+
+        Args:
+            row_keys: 惟一确定输出中一行数据的列名字, 默认为["_time"]
+            column_keys: 列名称列表,默认为["_field"]
+            value_column: 值列名,默认为"_value"
+
+        Returns:
+            Flux对象,以便进行管道操作
+        """
+        if "pivot" in self.expressions:
+            raise DuplicateOperationError("pivot has been set")
+
+        columns = ",".join([f'"{name}"' for name in column_keys])
+        rowkeys = ",".join([f'"{name}"' for name in row_keys])
+
+        self.expressions[
+            "pivot"
+        ] = f'  |> pivot(columnKey: [{columns}], rowKey: [{rowkeys}], valueColumn: "{value_column}")'
+
+        return self
+
+    def sort(self, by: List[str] = None, desc: bool = False) -> "Flux":
+        """按照指定的列进行排序
+
+        根据[influxdb doc](https://docs.influxdata.com/influxdb/v2.0/query-data/flux/first-last/), 查询返回值默认地按时间排序。因此,如果仅仅是要求查询结果按时间排序,无须调用此API,但是,此API提供了按其它字段排序的能力。
+
+        另外,在一个有5000多个tag,共返回1M条记录的测试中,测试验证返回记录确实按_time升序排列。
+
+        Args:
+            by: 指定排序的列名称列表
+
+        Returns:
+            Flux对象,以便进行管道操作
+        """
+        if "sort" in self.expressions:
+            raise DuplicateOperationError("sort has been set")
+
+        if by is None:
+            by = ["_value"]
+        if isinstance(by, str):
+            by = [by]
+
+        columns_ = ",".join([f'"{name}"' for name in by])
+
+        desc = "true" if desc else "false"
+        self.expressions["sort"] = f"  |> sort(columns: [{columns_}], desc: {desc})"
+
+        return self
+
+    def group(self, by: Tuple[str]) -> "Flux":
+        """[summary]
+
+        Returns:
+            [description]
+        """
+        if "group" in self.expressions:
+            raise DuplicateOperationError("group has been set")
+
+        if isinstance(by, str):
+            by = [by]
+        cols = ",".join([f'"{col}"' for col in by])
+        self.expressions["group"] = f"  |> group(columns: [{cols}])"
+
+        return self
+
+    def latest(self, n: int) -> "Flux":
+        """获取最后n条数据,按时间增序返回
+
+        Flux查询的增强功能,相当于top + sort + limit
+
+        Args:
+            n: 最后n条数据
+
+        Returns:
+            Flux对象,以便进行管道操作
+        """
+        assert "top" not in self.expressions, "top and last_n can not be used together"
+        assert (
+            "sort" not in self.expressions
+        ), "sort and last_n can not be used together"
+        assert (
+            "limit" not in self.expressions
+        ), "limit and last_n can not be used together"
+
+        self._last_n = n
+
+        return self
+
+    @property
+    def cols(self) -> List[str]:
+        """the columns or the return records
+
+        the implementation is buggy. Influx doesn't tell us in which order these columns are.
+
+
+        Returns:
+            the columns name of the return records
+        """
+        # fixme: if keep in expression, then return group key + tag key + value key
+        # if keep not in expression, then stream, table, _time, ...
+        return sorted(self._cols)
+
+    def delete(
+        self,
+        measurement: str,
+        stop: datetime.datetime,
+        tags: dict = {},
+        start: datetime.datetime = None,
+        precision: str = "s",
+    ) -> dict:
+        """构建删除语句。
+
+        according to [delete-predicate](https://docs.influxdata.com/influxdb/v2.1/reference/syntax/delete-predicate/), delete只支持AND逻辑操作,只支持“=”操作,不支持“!=”操作,可以使用任何字段或者tag,但不包括_time和_value字段。
+
+        由于influxdb这一段文档不是很清楚,根据试验结果,目前仅支持按时间范围和tags进行删除较好。如果某个column的值类型是字符串,则也可以通过`tags`参数传入,匹配后删除。但如果传入了非字符串类型的column,则将得到无法预料的结果。
+
+        Args:
+            measurement : [description]
+            stop : [description]
+            tags : 按tags和匹配的值进行删除。传入的tags中,key为tag名称,value为tag要匹配的取值,可以为str或者List[str]。
+            start : 起始时间。如果省略,则使用EPOCH_START.
+            precision : 时间精度。可以为“s”,“ms”,“us”
+        Returns:
+            删除语句
+        """
+        timespec = {"s": "seconds", "ms": "milliseconds", "us": "microseconds"}.get(
+            precision
+        )
+
+        if start is None:
+            start = self.EPOCH_START.isoformat(timespec=timespec) + "Z"
+
+        predicate = [f'_measurement="{measurement}"']
+        for key, value in tags.items():
+            if isinstance(value, list):
+                predicate.extend([f'{key} = "{v}"' for v in value])
+            else:
+                predicate.append(f'{key} = "{value}"')
+
+        command = {
+            "start": start,
+            "stop": f"{stop.isoformat(timespec=timespec)}Z",
+            "predicate": " AND ".join(predicate),
+        }
+
+        return command
+
+    def drop(self, cols: List[str]) -> "Flux":
+        """use this to drop columns before return result
+
+        Args:
+            cols : the name of columns to be dropped
+
+        Returns:
+            Flux object, to support pipe operation
+        """
+        if "drop" in self.expressions:
+            raise DuplicateOperationError("drop operation has been set already")
+
+        # add surrounding quotes
+        _cols = [f'"{c}"' for c in cols]
+        self.expressions["drop"] = f"  |> drop(columns: [{','.join(_cols)}])"
+
+        return self
+
+    def drop_sys_cols(self, cols: List[str] = None) -> "Flux":
+        """use this to drop ["_start", "_stop", "_measurement"], plus columns specified in `cols`, before return query result
+
+        please be noticed, after drop sys columns, there's still two sys columns left, which is "_time" and "table", and "_time" should usually be kept, "table" is one we're not able to removed. If you don't like _time in return result, you can specify it in `cols` parameter.
+
+        Args:
+            cols : the extra columns to be dropped
+
+        Returns:
+            Flux query object
+        """
+        _cols = ["_start", "_stop", "_measurement"]
+        if cols is not None:
+            _cols.extend(cols)
+
+        return self.drop(_cols)
+
+
+ + + +
+ + + + + + + +
+ + + +

+cols: List[str] + + + property + readonly + + +

+ +
+ +

the columns or the return records

+

the implementation is buggy. Influx doesn't tell us in which order these columns are.

+ +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
List[str]

the columns name of the return records

+ +
+ + + + + + +
+ + + +

+__init__(self, auto_pivot=True, no_sys_cols=True) + + + special + + +

+ +
+ +

初始化Flux对象

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
auto_pivot

是否自动将查询列字段组装成行. Defaults to True.

True
no_sys_cols

是否自动将系统字段删除. Defaults to True.请参考drop_sys_cols

True
+
+ Source code in omicron/dal/influx/flux.py +
def __init__(self, auto_pivot=True, no_sys_cols=True):
+    """初始化Flux对象
+
+    Args:
+        auto_pivot : 是否自动将查询列字段组装成行. Defaults to True.
+        no_sys_cols: 是否自动将系统字段删除. Defaults to True.请参考[drop_sys_cols][omicron.dal.influx.flux.Flux.drop_sys_cols]
+    """
+    self._cols = None
+    self.expressions = defaultdict(list)
+    self._auto_pivot = auto_pivot
+    self._last_n = None
+    self.no_sys_cols = no_sys_cols
+
+
+
+ +
+ + + + + +
+ + + +

+bucket(self, bucket) + + +

+ +
+ +

add bucket to query expression

+ +

Exceptions:

+ + + + + + + + + + + + + +
TypeDescription
DuplicateOperationError

一个查询中只允许指定一个source,如果表达式中已经指定了bucket,则抛出异常

+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Flux

Flux对象

+
+ Source code in omicron/dal/influx/flux.py +
def bucket(self, bucket: str) -> "Flux":
+    """add bucket to query expression
+
+    Raises:
+        DuplicateOperationError: 一个查询中只允许指定一个source,如果表达式中已经指定了bucket,则抛出异常
+
+    Returns:
+        Flux对象
+    """
+    if "bucket" in self.expressions:
+        raise DuplicateOperationError("bucket has been set")
+
+    self.expressions["bucket"] = f'from(bucket: "{bucket}")'
+
+    return self
+
+
+
+ +
+ + + +
+ + + +

+delete(self, measurement, stop, tags={}, start=None, precision='s') + + +

+ +
+ +

构建删除语句。

+

according to delete-predicate, delete只支持AND逻辑操作,只支持“=”操作,不支持“!=”操作,可以使用任何字段或者tag,但不包括_time和_value字段。

+

由于influxdb这一段文档不是很清楚,根据试验结果,目前仅支持按时间范围和tags进行删除较好。如果某个column的值类型是字符串,则也可以通过tags参数传入,匹配后删除。但如果传入了非字符串类型的column,则将得到无法预料的结果。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
measurement

[description]

required
stop

[description]

required
tags

按tags和匹配的值进行删除。传入的tags中,key为tag名称,value为tag要匹配的取值,可以为str或者List[str]。

{}
start

起始时间。如果省略,则使用EPOCH_START.

None
precision

时间精度。可以为“s”,“ms”,“us”

's'
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
dict

删除语句

+
+ Source code in omicron/dal/influx/flux.py +
def delete(
+    self,
+    measurement: str,
+    stop: datetime.datetime,
+    tags: dict = {},
+    start: datetime.datetime = None,
+    precision: str = "s",
+) -> dict:
+    """构建删除语句。
+
+    according to [delete-predicate](https://docs.influxdata.com/influxdb/v2.1/reference/syntax/delete-predicate/), delete只支持AND逻辑操作,只支持“=”操作,不支持“!=”操作,可以使用任何字段或者tag,但不包括_time和_value字段。
+
+    由于influxdb这一段文档不是很清楚,根据试验结果,目前仅支持按时间范围和tags进行删除较好。如果某个column的值类型是字符串,则也可以通过`tags`参数传入,匹配后删除。但如果传入了非字符串类型的column,则将得到无法预料的结果。
+
+    Args:
+        measurement : [description]
+        stop : [description]
+        tags : 按tags和匹配的值进行删除。传入的tags中,key为tag名称,value为tag要匹配的取值,可以为str或者List[str]。
+        start : 起始时间。如果省略,则使用EPOCH_START.
+        precision : 时间精度。可以为“s”,“ms”,“us”
+    Returns:
+        删除语句
+    """
+    timespec = {"s": "seconds", "ms": "milliseconds", "us": "microseconds"}.get(
+        precision
+    )
+
+    if start is None:
+        start = self.EPOCH_START.isoformat(timespec=timespec) + "Z"
+
+    predicate = [f'_measurement="{measurement}"']
+    for key, value in tags.items():
+        if isinstance(value, list):
+            predicate.extend([f'{key} = "{v}"' for v in value])
+        else:
+            predicate.append(f'{key} = "{value}"')
+
+    command = {
+        "start": start,
+        "stop": f"{stop.isoformat(timespec=timespec)}Z",
+        "predicate": " AND ".join(predicate),
+    }
+
+    return command
+
+
+
+ +
+ + + +
+ + + +

+drop(self, cols) + + +

+ +
+ +

use this to drop columns before return result

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
cols

the name of columns to be dropped

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Flux

Flux object, to support pipe operation

+
+ Source code in omicron/dal/influx/flux.py +
def drop(self, cols: List[str]) -> "Flux":
+    """use this to drop columns before return result
+
+    Args:
+        cols : the name of columns to be dropped
+
+    Returns:
+        Flux object, to support pipe operation
+    """
+    if "drop" in self.expressions:
+        raise DuplicateOperationError("drop operation has been set already")
+
+    # add surrounding quotes
+    _cols = [f'"{c}"' for c in cols]
+    self.expressions["drop"] = f"  |> drop(columns: [{','.join(_cols)}])"
+
+    return self
+
+
+
+ +
+ + + +
+ + + +

+drop_sys_cols(self, cols=None) + + +

+ +
+ +

use this to drop ["_start", "_stop", "_measurement"], plus columns specified in cols, before return query result

+

please be noticed, after drop sys columns, there's still two sys columns left, which is "_time" and "table", and "_time" should usually be kept, "table" is one we're not able to removed. If you don't like _time in return result, you can specify it in cols parameter.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
cols

the extra columns to be dropped

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Flux

Flux query object

+
+ Source code in omicron/dal/influx/flux.py +
def drop_sys_cols(self, cols: List[str] = None) -> "Flux":
+    """use this to drop ["_start", "_stop", "_measurement"], plus columns specified in `cols`, before return query result
+
+    please be noticed, after drop sys columns, there's still two sys columns left, which is "_time" and "table", and "_time" should usually be kept, "table" is one we're not able to removed. If you don't like _time in return result, you can specify it in `cols` parameter.
+
+    Args:
+        cols : the extra columns to be dropped
+
+    Returns:
+        Flux query object
+    """
+    _cols = ["_start", "_stop", "_measurement"]
+    if cols is not None:
+        _cols.extend(cols)
+
+    return self.drop(_cols)
+
+
+
+ +
+ + + +
+ + + +

+fields(self, fields, reserve_time_stamp=True) + + +

+ +
+ +

给查询添加field过滤条件

+

此查询条件为过滤条件,用以指定哪些field会出现在查询结果中,并非必须。如果查询中没有指定tags,则会返回所有记录。

+

由于一条记录只能属于一个_field,所以,当指定多个_field进行查询时,它们之间的关系应该为or

+ +

Exceptions:

+ + + + + + + + + + + + + +
TypeDescription
DuplicateOperationError

一个查询中只允许执行一次,如果filed filter表达式已经存在,则抛出异常

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
fieldsList

待查询的field列表

required
reserve_time_stampbool

是否保留时间戳_time,默认为True

True
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Flux

Flux对象,以便进行管道操作

+
+ Source code in omicron/dal/influx/flux.py +
def fields(self, fields: List, reserve_time_stamp: bool = True) -> "Flux":
+    """给查询添加field过滤条件
+
+    此查询条件为过滤条件,用以指定哪些field会出现在查询结果中,并非必须。如果查询中没有指定tags,则会返回所有记录。
+
+    由于一条记录只能属于一个_field,所以,当指定多个_field进行查询时,它们之间的关系应该为`or`。
+
+    Raises:
+        DuplicateOperationError: 一个查询中只允许执行一次,如果filed filter表达式已经存在,则抛出异常
+    Args:
+        fields: 待查询的field列表
+        reserve_time_stamp: 是否保留时间戳`_time`,默认为True
+
+    Returns:
+        Flux对象,以便进行管道操作
+    """
+    if "fields" in self.expressions:
+        raise DuplicateOperationError("fields has been set")
+
+    self._cols = fields.copy()
+
+    if reserve_time_stamp and "_time" not in self._cols:
+        self._cols.append("_time")
+
+    self._cols = sorted(self._cols)
+
+    filters = [f'r["_field"] == "{name}"' for name in self._cols]
+
+    self.expressions["fields"] = f"  |> filter(fn: (r) => {' or '.join(filters)})"
+
+    return self
+
+
+
+ +
+ + + +
+ + + +

+format_time(tm, precision='s', shift_forward=False) + + + classmethod + + +

+ +
+ +

将时间转换成客户端对应的精度,并以 RFC3339 timestamps格式串(即influxdb要求的格式)返回。

+

如果这个时间是作为查询的range中的结束时间使用时,由于influx查询的时间范围是左闭右开的,因此如果你需要查询的是一个闭区间,则需要将end的时间向前偏移一个精度。通过传入shift_forward = True可以完成这种转换。

+ +

Examples:

+
>>> # by default, the precision is seconds, and convert a date
+>>> Flux.format_time(datetime.date(2019, 1, 1))
+'2019-01-01T00:00:00Z'
+
+
>>> # set precision to ms, convert a time
+>>> Flux.format_time(datetime.datetime(1978, 7, 8, 12, 34, 56, 123456), precision="ms")
+'1978-07-08T12:34:56.123Z'
+
+
>>> # convert and forward shift
+>>> Flux.format_time(datetime.date(1978, 7, 8), shift_forward = True)
+'1978-07-08T00:00:01Z'
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tm

待格式化的时间

required
precisionstr

时间精度,可选值为:'s', 'ms', 'us'

's'
shift_forward

如果为True,则将end向前偏移一个精度

False
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
str

调整后符合influx时间规范的时间(字符串表示)

+
+ Source code in omicron/dal/influx/flux.py +
@classmethod
+def format_time(cls, tm: Frame, precision: str = "s", shift_forward=False) -> str:
+    """将时间转换成客户端对应的精度,并以 RFC3339 timestamps格式串(即influxdb要求的格式)返回。
+
+    如果这个时间是作为查询的range中的结束时间使用时,由于influx查询的时间范围是左闭右开的,因此如果你需要查询的是一个闭区间,则需要将`end`的时间向前偏移一个精度。通过传入`shift_forward = True`可以完成这种转换。
+
+    Examples:
+        >>> # by default, the precision is seconds, and convert a date
+        >>> Flux.format_time(datetime.date(2019, 1, 1))
+        '2019-01-01T00:00:00Z'
+
+        >>> # set precision to ms, convert a time
+        >>> Flux.format_time(datetime.datetime(1978, 7, 8, 12, 34, 56, 123456), precision="ms")
+        '1978-07-08T12:34:56.123Z'
+
+        >>> # convert and forward shift
+        >>> Flux.format_time(datetime.date(1978, 7, 8), shift_forward = True)
+        '1978-07-08T00:00:01Z'
+
+    Args:
+        tm : 待格式化的时间
+        precision: 时间精度,可选值为:'s', 'ms', 'us'
+        shift_forward: 如果为True,则将end向前偏移一个精度
+
+    Returns:
+        调整后符合influx时间规范的时间(字符串表示)
+    """
+    timespec = {"s": "seconds", "ms": "milliseconds", "us": "microseconds"}.get(
+        precision
+    )
+
+    if timespec is None:
+        raise ValueError(
+            f"precision must be one of 's', 'ms', 'us', but got {precision}"
+        )
+
+    tm = arrow.get(tm).naive
+
+    if shift_forward:
+        tm = tm + datetime.timedelta(**{timespec: 1})
+
+    return tm.isoformat(sep="T", timespec=timespec) + "Z"
+
+
+
+ +
+ + + +
+ + + +

+group(self, by) + + +

+ +
+ +

[summary]

+ +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Flux

[description]

+
+ Source code in omicron/dal/influx/flux.py +
def group(self, by: Tuple[str]) -> "Flux":
+    """[summary]
+
+    Returns:
+        [description]
+    """
+    if "group" in self.expressions:
+        raise DuplicateOperationError("group has been set")
+
+    if isinstance(by, str):
+        by = [by]
+    cols = ",".join([f'"{col}"' for col in by])
+    self.expressions["group"] = f"  |> group(columns: [{cols}])"
+
+    return self
+
+
+
+ +
+ + + +
+ + + +

+latest(self, n) + + +

+ +
+ +

获取最后n条数据,按时间增序返回

+

Flux查询的增强功能,相当于top + sort + limit

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
nint

最后n条数据

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Flux

Flux对象,以便进行管道操作

+
+ Source code in omicron/dal/influx/flux.py +
def latest(self, n: int) -> "Flux":
+    """获取最后n条数据,按时间增序返回
+
+    Flux查询的增强功能,相当于top + sort + limit
+
+    Args:
+        n: 最后n条数据
+
+    Returns:
+        Flux对象,以便进行管道操作
+    """
+    assert "top" not in self.expressions, "top and last_n can not be used together"
+    assert (
+        "sort" not in self.expressions
+    ), "sort and last_n can not be used together"
+    assert (
+        "limit" not in self.expressions
+    ), "limit and last_n can not be used together"
+
+    self._last_n = n
+
+    return self
+
+
+
+ +
+ + + +
+ + + +

+limit(self, limit) + + +

+ +
+ +

添加返回记录数限制

+ +

Exceptions:

+ + + + + + + + + + + + + +
TypeDescription
DuplicateOperationError

一个查询中只允许指定一次limit,如果limit表达式已经存在,则抛出异常

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
limitint

返回记录数限制

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Flux

Flux对象,以便进行管道操作

+
+ Source code in omicron/dal/influx/flux.py +
def limit(self, limit: int) -> "Flux":
+    """添加返回记录数限制
+
+    Raises:
+        DuplicateOperationError: 一个查询中只允许指定一次limit,如果limit表达式已经存在,则抛出异常
+
+    Args:
+        limit: 返回记录数限制
+
+    Returns:
+        Flux对象,以便进行管道操作
+    """
+    if "limit" in self.expressions:
+        raise DuplicateOperationError("limit has been set")
+
+    self.expressions["limit"] = "  |> limit(n: %d)" % limit
+    return self
+
+
+
+ +
+ + + +
+ + + +

+measurement(self, measurement) + + +

+ +
+ +

add measurement filter to query

+ +

Exceptions:

+ + + + + + + + + + + + + +
TypeDescription
DuplicateOperationError

一次查询中只允许指定一个measurement, 如果表达式中已经存在measurement, 则抛出异常

+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Flux

Flux对象自身,以便进行管道操作

+
+ Source code in omicron/dal/influx/flux.py +
def measurement(self, measurement: str) -> "Flux":
+    """add measurement filter to query
+
+    Raises:
+        DuplicateOperationError: 一次查询中只允许指定一个measurement, 如果表达式中已经存在measurement, 则抛出异常
+
+    Returns:
+        Flux对象自身,以便进行管道操作
+    """
+    if "measurement" in self.expressions:
+        raise DuplicateOperationError("measurement has been set")
+
+    self.expressions[
+        "measurement"
+    ] = f'  |> filter(fn: (r) => r["_measurement"] == "{measurement}")'
+
+    return self
+
+
+
+ +
+ + + +
+ + + +

+pivot(self, row_keys=['_time'], column_keys=['_field'], value_column='_value') + + +

+ +
+ +

pivot用来将以列为单位的数据转换为以行为单位的数据

+

Flux查询返回的结果通常都是以列为单位的数据,增加本pivot条件后,结果将被转换成为以行为单位的数据再返回。

+

这里实现的是measurement内的转换,请参考 pivot

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
row_keysList[str]

惟一确定输出中一行数据的列名字, 默认为["_time"]

['_time']
column_keys

列名称列表,默认为["_field"]

['_field']
value_columnstr

值列名,默认为"_value"

'_value'
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Flux

Flux对象,以便进行管道操作

+
+ Source code in omicron/dal/influx/flux.py +
def pivot(
+    self,
+    row_keys: List[str] = ["_time"],
+    column_keys=["_field"],
+    value_column: str = "_value",
+) -> "Flux":
+    """pivot用来将以列为单位的数据转换为以行为单位的数据
+
+    Flux查询返回的结果通常都是以列为单位的数据,增加本pivot条件后,结果将被转换成为以行为单位的数据再返回。
+
+    这里实现的是measurement内的转换,请参考 [pivot](https://docs.influxdata.com/flux/v0.x/stdlib/universe/pivot/#align-fields-within-each-measurement-that-have-the-same-timestamp)
+
+
+    Args:
+        row_keys: 惟一确定输出中一行数据的列名字, 默认为["_time"]
+        column_keys: 列名称列表,默认为["_field"]
+        value_column: 值列名,默认为"_value"
+
+    Returns:
+        Flux对象,以便进行管道操作
+    """
+    if "pivot" in self.expressions:
+        raise DuplicateOperationError("pivot has been set")
+
+    columns = ",".join([f'"{name}"' for name in column_keys])
+    rowkeys = ",".join([f'"{name}"' for name in row_keys])
+
+    self.expressions[
+        "pivot"
+    ] = f'  |> pivot(columnKey: [{columns}], rowKey: [{rowkeys}], valueColumn: "{value_column}")'
+
+    return self
+
+
+
+ +
+ + + +
+ + + +

+range(self, start, end, right_close=True, precision='s') + + +

+ +
+ +

添加时间范围过滤

+

必须指定的查询条件,否则influxdb会报unbound查询错,因为这种情况下,返回的数据量将非常大。

+

在格式化时间时,需要根据precision生成时间字符串。在向Influxdb发送请求时,应该注意查询参数中指定的时间精度与这里使用的保持一致。

+

Influxdb的查询结果默认不包含结束时间,当right_close指定为True时,我们将根据指定的精度修改end时间,使之仅比end多一个时间单位,从而保证查询结果会包含end

+ +

Exceptions:

+ + + + + + + + + + + + + +
TypeDescription
DuplicateOperationError

一个查询中只允许指定一次时间范围,如果range表达式已经存在,则抛出异常

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
startUnion[datetime.date, datetime.datetime]

开始时间

required
endUnion[datetime.date, datetime.datetime]

结束时间

required
right_close

查询结果是否包含结束时间。

True
precision

时间精度,默认为秒。

's'
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Flux

Flux对象,以支持管道操作

+
+ Source code in omicron/dal/influx/flux.py +
def range(
+    self, start: Frame, end: Frame, right_close=True, precision="s"
+) -> "Flux":
+    """添加时间范围过滤
+
+    必须指定的查询条件,否则influxdb会报unbound查询错,因为这种情况下,返回的数据量将非常大。
+
+    在格式化时间时,需要根据`precision`生成时间字符串。在向Influxdb发送请求时,应该注意查询参数中指定的时间精度与这里使用的保持一致。
+
+    Influxdb的查询结果默认不包含结束时间,当`right_close`指定为True时,我们将根据指定的精度修改`end`时间,使之仅比`end`多一个时间单位,从而保证查询结果会包含`end`。
+
+    Raises:
+        DuplicateOperationError: 一个查询中只允许指定一次时间范围,如果range表达式已经存在,则抛出异常
+    Args:
+        start: 开始时间
+        end: 结束时间
+        right_close: 查询结果是否包含结束时间。
+        precision: 时间精度,默认为秒。
+
+    Returns:
+        Flux对象,以支持管道操作
+    """
+    if "range" in self.expressions:
+        raise DuplicateOperationError("range has been set")
+
+    if precision not in ["s", "ms", "us"]:
+        raise AssertionError("precision must be 's', 'ms' or 'us'")
+
+    end = self.format_time(end, precision, right_close)
+    start = self.format_time(start, precision)
+
+    self.expressions["range"] = f"  |> range(start: {start}, stop: {end})"
+    return self
+
+
+
+ +
+ + + +
+ + + +

+sort(self, by=None, desc=False) + + +

+ +
+ +

按照指定的列进行排序

+

根据influxdb doc, 查询返回值默认地按时间排序。因此,如果仅仅是要求查询结果按时间排序,无须调用此API,但是,此API提供了按其它字段排序的能力。

+

另外,在一个有5000多个tag,共返回1M条记录的测试中,测试验证返回记录确实按_time升序排列。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
byList[str]

指定排序的列名称列表

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Flux

Flux对象,以便进行管道操作

+
+ Source code in omicron/dal/influx/flux.py +
def sort(self, by: List[str] = None, desc: bool = False) -> "Flux":
+    """按照指定的列进行排序
+
+    根据[influxdb doc](https://docs.influxdata.com/influxdb/v2.0/query-data/flux/first-last/), 查询返回值默认地按时间排序。因此,如果仅仅是要求查询结果按时间排序,无须调用此API,但是,此API提供了按其它字段排序的能力。
+
+    另外,在一个有5000多个tag,共返回1M条记录的测试中,测试验证返回记录确实按_time升序排列。
+
+    Args:
+        by: 指定排序的列名称列表
+
+    Returns:
+        Flux对象,以便进行管道操作
+    """
+    if "sort" in self.expressions:
+        raise DuplicateOperationError("sort has been set")
+
+    if by is None:
+        by = ["_value"]
+    if isinstance(by, str):
+        by = [by]
+
+    columns_ = ",".join([f'"{name}"' for name in by])
+
+    desc = "true" if desc else "false"
+    self.expressions["sort"] = f"  |> sort(columns: [{columns_}], desc: {desc})"
+
+    return self
+
+
+
+ +
+ + + +
+ + + +

+tags(self, tags) + + +

+ +
+ +

给查询添加tags过滤条件

+

此查询条件为过滤条件,并非必须。如果查询中没有指定tags,则会返回所有记录。

+

在实现上,既可以使用contains语法,也可以使用or语法(由于一条记录只能属于一个tag,所以,当指定多个tag进行查询时,它们之间的关系应该为or)。经验证,contains语法会始终先将所有符合条件的记录检索出来,再进行过滤。这样的效率比较低,特别是当tags的数量较少时,会远远比使用or语法慢。

+ +

Exceptions:

+ + + + + + + + + + + + + +
TypeDescription
DuplicateOperationError

一个查询中只允许执行一次,如果tag filter表达式已经存在,则抛出异常

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tags

tags是一个{tagname: Union[str,[tag_values]]}对象。

required
+

Examples:

+
>>> flux = Flux()
+>>> flux.tags({"code": ["000001", "000002"], "name": ["浦发银行"]}).expressions["tags"]
+'  |> filter(fn: (r) => r["code"] == "000001" or r["code"] == "000002" or r["name"] == "浦发银行")'
+
+ +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Flux

Flux对象,以便进行管道操作

+
+ Source code in omicron/dal/influx/flux.py +
def tags(self, tags: DefaultDict[str, List[str]]) -> "Flux":
+    """给查询添加tags过滤条件
+
+    此查询条件为过滤条件,并非必须。如果查询中没有指定tags,则会返回所有记录。
+
+    在实现上,既可以使用`contains`语法,也可以使用`or`语法(由于一条记录只能属于一个tag,所以,当指定多个tag进行查询时,它们之间的关系应该为`or`)。经验证,contains语法会始终先将所有符合条件的记录检索出来,再进行过滤。这样的效率比较低,特别是当tags的数量较少时,会远远比使用or语法慢。
+
+    Raises:
+        DuplicateOperationError: 一个查询中只允许执行一次,如果tag filter表达式已经存在,则抛出异常
+
+    Args:
+        tags : tags是一个{tagname: Union[str,[tag_values]]}对象。
+
+    Examples:
+        >>> flux = Flux()
+        >>> flux.tags({"code": ["000001", "000002"], "name": ["浦发银行"]}).expressions["tags"]
+        '  |> filter(fn: (r) => r["code"] == "000001" or r["code"] == "000002" or r["name"] == "浦发银行")'
+
+
+    Returns:
+        Flux对象,以便进行管道操作
+    """
+    if "tags" in self.expressions:
+        raise DuplicateOperationError("tags has been set")
+
+    filters = []
+    for tag, values in tags.items():
+        assert (
+            isinstance(values, str) or len(values) > 0
+        ), f"tag {tag} should not be empty or None"
+        if isinstance(values, str):
+            values = [values]
+
+        for v in values:
+            filters.append(f'r["{tag}"] == "{v}"')
+
+    op_expression = " or ".join(filters)
+
+    self.expressions["tags"] = f"  |> filter(fn: (r) => {op_expression})"
+
+    return self
+
+
+
+ +
+ + + +
+ + + +

+to_timestamp(tm, precision='s') + + + classmethod + + +

+ +
+ +

将时间根据精度转换为unix时间戳

+

在往influxdb写入数据时,line-protocol要求的时间戳为unix timestamp,并且与其精度对应。

+

influxdb始终使用UTC时间,因此,tm也必须已经转换成UTC时间。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tmUnion[datetime.date, datetime.datetime]

时间

required
precisionstr

时间精度,默认为秒。

's'
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
int

时间戳

+
+ Source code in omicron/dal/influx/flux.py +
@classmethod
+def to_timestamp(cls, tm: Frame, precision: str = "s") -> int:
+    """将时间根据精度转换为unix时间戳
+
+    在往influxdb写入数据时,line-protocol要求的时间戳为unix timestamp,并且与其精度对应。
+
+    influxdb始终使用UTC时间,因此,`tm`也必须已经转换成UTC时间。
+
+    Args:
+        tm: 时间
+        precision: 时间精度,默认为秒。
+
+    Returns:
+        时间戳
+    """
+    if precision not in ["s", "ms", "us"]:
+        raise AssertionError("precision must be 's', 'ms' or 'us'")
+
+    # get int repr of tm, in seconds unit
+    if isinstance(tm, np.datetime64):
+        tm = tm.astype("datetime64[s]").astype("int")
+    elif isinstance(tm, datetime.datetime):
+        tm = tm.timestamp()
+    else:
+        tm = arrow.get(tm).timestamp()
+
+    return int(tm * 10 ** ({"s": 0, "ms": 3, "us": 6}[precision]))
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/2.0.0/api/dal/influxclient/index.html b/2.0.0/api/dal/influxclient/index.html new file mode 100644 index 00000000..84aa7383 --- /dev/null +++ b/2.0.0/api/dal/influxclient/index.html @@ -0,0 +1,2557 @@ + + + + + + + + + + + + + + + + InfluxClient - Omicron + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + + + + + +

InfluxClient

+ +

InfluxClient - the performanct async client for influxdb

+ + +
+ + +
+ + +
+ Source code in omicron/dal/influx/influxclient.py +
class InfluxClient:
+    def __init__(
+        self,
+        url: str,
+        token: str,
+        bucket: str,
+        org: str = None,
+        enable_compress=False,
+        chunk_size: int = 5000,
+        precision: str = "s",
+    ):
+        """[summary]
+
+        Args:
+            url ([type]): [description]
+            token ([type]): [description]
+            bucket ([type]): [description]
+            org ([type], optional): [description]. Defaults to None.
+            enable_compress ([type], optional): [description]. Defaults to False.
+            chunk_size: number of lines to be saved in one request
+            precision: 支持的时间精度
+        """
+        self._url = url
+        self._bucket = bucket
+        self._enable_compress = enable_compress
+        self._org = org
+        self._org_id = None  # 需要时通过查询获取,此后不再更新
+        self._token = token
+
+        # influxdb 2.0起支持的时间精度有:ns, us, ms, s。本客户端只支持s, ms和us
+        self._precision = precision.lower()
+        if self._precision not in ["s", "ms", "us"]:  # pragma: no cover
+            raise ValueError("precision must be one of ['s', 'ms', 'us']")
+
+        self._chunk_size = chunk_size
+
+        # write
+        self._write_url = f"{self._url}/api/v2/write?org={self._org}&bucket={self._bucket}&precision={self._precision}"
+
+        self._write_headers = {
+            "Content-Type": "text/plain; charset=utf-8",
+            "Authorization": f"Token {token}",
+            "Accept": "application/json",
+        }
+
+        if self._enable_compress:
+            self._write_headers["Content-Encoding"] = "gzip"
+
+        self._query_url = f"{self._url}/api/v2/query?org={self._org}"
+        self._query_headers = {
+            "Authorization": f"Token {token}",
+            "Content-Type": "application/vnd.flux",
+            # influx查询结果格式,无论如何指定(或者不指定),在2.1中始终是csv格式
+            "Accept": "text/csv",
+        }
+
+        if self._enable_compress:
+            self._query_headers["Accept-Encoding"] = "gzip"
+
+        self._delete_url = (
+            f"{self._url}/api/v2/delete?org={self._org}&bucket={self._bucket}"
+        )
+        self._delete_headers = {
+            "Authorization": f"Token {token}",
+            "Content-Type": "application/json",
+        }
+
+    async def save(
+        self,
+        data: Union[np.ndarray, DataFrame],
+        measurement: str = None,
+        tag_keys: List[str] = [],
+        time_key: str = None,
+        global_tags: Dict = {},
+        chunk_size: int = None,
+    ) -> None:
+        """save `data` into influxdb
+
+        if `data` is a pandas.DataFrame or numy structured array, it will be converted to line protocol and saved. If `data` is str, use `write` method instead.
+
+        Args:
+            data: data to be saved
+            measurement: the name of measurement
+            tag_keys: which columns name will be used as tags
+            chunk_size: number of lines to be saved in one request. if it's -1, then all data will be written in one request. If it's None, then it will be set to `self._chunk_size`
+
+        Raises:
+            InfluxDBWriteError: if write failed
+
+        """
+        # todo: add more errors raise
+        if isinstance(data, DataFrame):
+            assert (
+                measurement is not None
+            ), "measurement must be specified when data is a DataFrame"
+
+            if tag_keys:
+                assert set(tag_keys) in set(
+                    data.columns.tolist()
+                ), "tag_keys must be in data.columns"
+
+            serializer = DataframeSerializer(
+                data,
+                measurement,
+                time_key,
+                tag_keys,
+                global_tags,
+                precision=self._precision,
+            )
+            if chunk_size == -1:
+                chunk_size = len(data)
+
+            for lines in serializer.serialize(chunk_size or self._chunk_size):
+                await self.write(lines)
+        elif isinstance(data, np.ndarray):
+            assert (
+                measurement is not None
+            ), "measurement must be specified when data is a numpy array"
+            assert (
+                time_key is not None
+            ), "time_key must be specified when data is a numpy array"
+            serializer = NumpySerializer(
+                data,
+                measurement,
+                time_key,
+                tag_keys,
+                global_tags,
+                time_precision=self._precision,
+            )
+            if chunk_size == -1:
+                chunk_size = len(data)
+            for lines in serializer.serialize(chunk_size or self._chunk_size):
+                await self.write(lines)
+        else:
+            raise TypeError(
+                f"data must be pandas.DataFrame, numpy array, got {type(data)}"
+            )
+
+    async def write(self, line_protocol: str):
+        """将line-protocol数组写入influxdb
+
+        Args:
+            line_protocol: 待写入的数据,以line-protocol数组形式存在
+
+        """
+        # todo: add raise error declaration
+        if self._enable_compress:
+            line_protocol_ = gzip.compress(line_protocol.encode("utf-8"))
+        else:
+            line_protocol_ = line_protocol
+
+        async with ClientSession() as session:
+            async with session.post(
+                self._write_url, data=line_protocol_, headers=self._write_headers
+            ) as resp:
+                if resp.status != 204:
+                    err = await resp.json()
+                    logger.warning(
+                        "influxdb write error when processing: %s, err code: %s, message: %s",
+                        {line_protocol[:100]},
+                        err["code"],
+                        err["message"],
+                    )
+                    logger.debug("data caused error:%s", line_protocol)
+                    raise InfluxDBWriteError(
+                        f"influxdb write failed, err: {err['message']}"
+                    )
+
+    async def query(self, flux: Union[Flux, str], deserializer: Callable = None) -> Any:
+        """flux查询
+
+        flux查询结果是一个以annotated csv格式存储的数据,例如:
+        ```
+        ,result,table,_time,code,amount,close,factor,high,low,open,volume
+        ,_result,0,2019-01-01T00:00:00Z,000001.XSHE,100000000,5.15,1.23,5.2,5,5.1,1000000
+        ```
+
+        上述`result`中,事先通过Flux.keep()限制了返回的字段为_time,code,amount,close,factor,high,low,open,volume。influxdb查询返回结果时,总是按照字段名称升序排列。此外,总是会额外地返回_result, table两个字段。
+
+        如果传入了deserializer,则会调用deserializer将其解析成为python对象。否则,返回bytes数据。
+
+        Args:
+            flux: flux查询语句
+            deserializer: 反序列化函数
+
+        Returns:
+            如果未提供反序列化函数,则返回结果为bytes array(如果指定了compress=True,返回结果为gzip解压缩后的bytes array),否则返回反序列化后的python对象
+        """
+        if isinstance(flux, Flux):
+            flux = str(flux)
+
+        async with ClientSession() as session:
+            async with session.post(
+                self._query_url, data=flux, headers=self._query_headers
+            ) as resp:
+                if resp.status != 200:
+                    err = await resp.json()
+                    logger.warning(
+                        f"influxdb query error: {err} when processing {flux[:500]}"
+                    )
+                    logger.debug("data caused error:%s", flux)
+                    raise InfluxDBQueryError(
+                        f"influxdb query failed, status code: {err['message']}"
+                    )
+                else:
+                    # auto-unzip
+                    body = await resp.read()
+                    if deserializer:
+                        try:
+                            return deserializer(body)
+                        except Exception as e:
+                            logger.exception(e)
+                            logger.warning(
+                                "failed to deserialize data: %s, the query is:%s",
+                                body,
+                                flux[:500],
+                            )
+                            raise
+                    else:
+                        return body
+
+    async def drop_measurement(self, measurement: str):
+        """从influxdb中删除一个measurement
+
+        调用此方法后,实际上该measurement仍然存在,只是没有数据。
+
+        """
+        # todo: add raise error declaration
+        await self.delete(measurement, arrow.now().naive)
+
+    async def delete(
+        self,
+        measurement: str,
+        stop: datetime.datetime,
+        tags: Optional[Dict[str, str]] = {},
+        start: datetime.datetime = None,
+        precision: str = "s",
+    ):
+        """删除influxdb中指定时间段内的数据
+
+        关于参数,请参见[Flux.delete][omicron.dal.influx.flux.Flux.delete]。
+
+        Args:
+            measurement: 指定measurement名字
+            stop: 待删除记录的结束时间
+            start: 待删除记录的开始时间,如果未指定,则使用EPOCH_START
+            tags: 按tag进行过滤的条件
+            precision: 用以格式化起始和结束时间。
+
+        Raises:
+            InfluxDeleteError: 如果删除失败,则抛出此异常
+        """
+        # todo: add raise error declaration
+        command = Flux().delete(
+            measurement, stop, tags, start=start, precision=precision
+        )
+
+        async with ClientSession() as session:
+            async with session.post(
+                self._delete_url, data=json.dumps(command), headers=self._delete_headers
+            ) as resp:
+                if resp.status != 204:
+                    err = await resp.json()
+                    logger.warning(
+                        "influxdb delete error: %s when processin command %s",
+                        err["message"],
+                        command,
+                    )
+                    raise InfluxDeleteError(
+                        f"influxdb delete failed, status code: {err['message']}"
+                    )
+
+    async def list_buckets(self) -> List[Dict]:
+        """列出influxdb中对应token能看到的所有的bucket
+
+        Returns:
+            list of buckets, each bucket is a dict with keys:
+            ```
+            id
+            orgID, a 16 bytes hex string
+            type, system or user
+            description
+            name
+            retentionRules
+            createdAt
+            updatedAt
+            links
+            labels
+        ```
+        """
+        url = f"{self._url}/api/v2/buckets"
+        headers = {"Authorization": f"Token {self._token}"}
+        async with ClientSession() as session:
+            async with session.get(url, headers=headers) as resp:
+                if resp.status != 200:
+                    err = await resp.json()
+                    raise InfluxSchemaError(
+                        f"influxdb list bucket failed, status code: {err['message']}"
+                    )
+                else:
+                    return (await resp.json())["buckets"]
+
+    async def delete_bucket(self, bucket_id: str = None):
+        """删除influxdb中指定bucket
+
+        Args:
+            bucket_id: 指定bucket的id。如果为None,则会删除本client对应的bucket。
+        """
+        if bucket_id is None:
+            buckets = await self.list_buckets()
+            for bucket in buckets:
+                if bucket["type"] == "user" and bucket["name"] == self._bucket:
+                    bucket_id = bucket["id"]
+                    break
+            else:
+                raise BadParameterError(
+                    "bucket_id is None, and we can't find bucket with name: %s"
+                    % self._bucket
+                )
+
+        url = f"{self._url}/api/v2/buckets/{bucket_id}"
+        headers = {"Authorization": f"Token {self._token}"}
+        async with ClientSession() as session:
+            async with session.delete(url, headers=headers) as resp:
+                if resp.status != 204:
+                    err = await resp.json()
+                    logger.warning(
+                        "influxdb delete bucket error: %s when processin command %s",
+                        err["message"],
+                        bucket_id,
+                    )
+                    raise InfluxSchemaError(
+                        f"influxdb delete bucket failed, status code: {err['message']}"
+                    )
+
+    async def create_bucket(
+        self, description=None, retention_rules: List[Dict] = None, org_id: str = None
+    ) -> str:
+        """创建influxdb中指定bucket
+
+        Args:
+            description: 指定bucket的描述
+            org_id: 指定bucket所属的组织id,如果未指定,则使用本client对应的组织id。
+
+        Raises:
+            InfluxSchemaError: 当influxdb返回错误时,比如重复创建bucket等,会抛出此异常
+        Returns:
+            新创建的bucket的id
+        """
+        if org_id is None:
+            org_id = await self.query_org_id()
+
+        url = f"{self._url}/api/v2/buckets"
+        headers = {"Authorization": f"Token {self._token}"}
+        data = {
+            "name": self._bucket,
+            "orgID": org_id,
+            "description": description,
+            "retentionRules": retention_rules,
+        }
+        async with ClientSession() as session:
+            async with session.post(
+                url, data=json.dumps(data), headers=headers
+            ) as resp:
+                if resp.status != 201:
+                    err = await resp.json()
+                    logger.warning(
+                        "influxdb create bucket error: %s when processin command %s",
+                        err["message"],
+                        data,
+                    )
+                    raise InfluxSchemaError(
+                        f"influxdb create bucket failed, status code: {err['message']}"
+                    )
+                else:
+                    result = await resp.json()
+                    return result["id"]
+
+    async def list_organizations(self, offset: int = 0, limit: int = 100) -> List[Dict]:
+        """列出本客户端允许查询的所组织
+
+        Args:
+            offset : 分页起点
+            limit : 每页size
+
+        Raises:
+            InfluxSchemaError: influxdb返回的错误
+
+        Returns:
+            list of organizations, each organization is a dict with keys:
+            ```
+            id      : the id of the org
+            links
+            name    : the name of the org
+            description
+            createdAt
+            updatedAt
+            ```
+        """
+        url = f"{self._url}/api/v2/orgs?offset={offset}&limit={limit}"
+        headers = {"Authorization": f"Token {self._token}"}
+
+        async with ClientSession() as session:
+            async with session.get(url, headers=headers) as resp:
+                if resp.status != 200:
+                    err = await resp.json()
+                    logger.warning("influxdb query orgs err: %s", err["message"])
+                    raise InfluxSchemaError(
+                        f"influxdb query orgs failed, status code: {err['message']}"
+                    )
+                else:
+                    return (await resp.json())["orgs"]
+
+    async def query_org_id(self, name: str = None) -> str:
+        """通过组织名查找组织id
+
+        只能查的本客户端允许查询的组织。如果name未提供,则使用本客户端创建时传入的组织名。
+
+        Args:
+            name: 指定组织名
+
+        Returns:
+            组织id
+        """
+        if name is None:
+            name = self._org
+        orgs = await self.list_organizations()
+        for org in orgs:
+            if org["name"] == name:
+                return org["id"]
+
+        raise BadParameterError(f"can't find org with name: {name}")
+
+
+ + + +
+ + + + + + + + + +
+ + + +

+__init__(self, url, token, bucket, org=None, enable_compress=False, chunk_size=5000, precision='s') + + + special + + +

+ +
+ +

[summary]

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
url[type]

[description]

required
token[type]

[description]

required
bucket[type]

[description]

required
org[type]

[description]. Defaults to None.

None
enable_compress[type]

[description]. Defaults to False.

False
chunk_sizeint

number of lines to be saved in one request

5000
precisionstr

支持的时间精度

's'
+
+ Source code in omicron/dal/influx/influxclient.py +
def __init__(
+    self,
+    url: str,
+    token: str,
+    bucket: str,
+    org: str = None,
+    enable_compress=False,
+    chunk_size: int = 5000,
+    precision: str = "s",
+):
+    """[summary]
+
+    Args:
+        url ([type]): [description]
+        token ([type]): [description]
+        bucket ([type]): [description]
+        org ([type], optional): [description]. Defaults to None.
+        enable_compress ([type], optional): [description]. Defaults to False.
+        chunk_size: number of lines to be saved in one request
+        precision: 支持的时间精度
+    """
+    self._url = url
+    self._bucket = bucket
+    self._enable_compress = enable_compress
+    self._org = org
+    self._org_id = None  # 需要时通过查询获取,此后不再更新
+    self._token = token
+
+    # influxdb 2.0起支持的时间精度有:ns, us, ms, s。本客户端只支持s, ms和us
+    self._precision = precision.lower()
+    if self._precision not in ["s", "ms", "us"]:  # pragma: no cover
+        raise ValueError("precision must be one of ['s', 'ms', 'us']")
+
+    self._chunk_size = chunk_size
+
+    # write
+    self._write_url = f"{self._url}/api/v2/write?org={self._org}&bucket={self._bucket}&precision={self._precision}"
+
+    self._write_headers = {
+        "Content-Type": "text/plain; charset=utf-8",
+        "Authorization": f"Token {token}",
+        "Accept": "application/json",
+    }
+
+    if self._enable_compress:
+        self._write_headers["Content-Encoding"] = "gzip"
+
+    self._query_url = f"{self._url}/api/v2/query?org={self._org}"
+    self._query_headers = {
+        "Authorization": f"Token {token}",
+        "Content-Type": "application/vnd.flux",
+        # influx查询结果格式,无论如何指定(或者不指定),在2.1中始终是csv格式
+        "Accept": "text/csv",
+    }
+
+    if self._enable_compress:
+        self._query_headers["Accept-Encoding"] = "gzip"
+
+    self._delete_url = (
+        f"{self._url}/api/v2/delete?org={self._org}&bucket={self._bucket}"
+    )
+    self._delete_headers = {
+        "Authorization": f"Token {token}",
+        "Content-Type": "application/json",
+    }
+
+
+
+ +
+ + + +
+ + + +

+create_bucket(self, description=None, retention_rules=None, org_id=None) + + + async + + +

+ +
+ +

创建influxdb中指定bucket

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
description

指定bucket的描述

None
org_idstr

指定bucket所属的组织id,如果未指定,则使用本client对应的组织id。

None
+

Exceptions:

+ + + + + + + + + + + + + +
TypeDescription
InfluxSchemaError

当influxdb返回错误时,比如重复创建bucket等,会抛出此异常

+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
str

新创建的bucket的id

+
+ Source code in omicron/dal/influx/influxclient.py +
async def create_bucket(
+    self, description=None, retention_rules: List[Dict] = None, org_id: str = None
+) -> str:
+    """创建influxdb中指定bucket
+
+    Args:
+        description: 指定bucket的描述
+        org_id: 指定bucket所属的组织id,如果未指定,则使用本client对应的组织id。
+
+    Raises:
+        InfluxSchemaError: 当influxdb返回错误时,比如重复创建bucket等,会抛出此异常
+    Returns:
+        新创建的bucket的id
+    """
+    if org_id is None:
+        org_id = await self.query_org_id()
+
+    url = f"{self._url}/api/v2/buckets"
+    headers = {"Authorization": f"Token {self._token}"}
+    data = {
+        "name": self._bucket,
+        "orgID": org_id,
+        "description": description,
+        "retentionRules": retention_rules,
+    }
+    async with ClientSession() as session:
+        async with session.post(
+            url, data=json.dumps(data), headers=headers
+        ) as resp:
+            if resp.status != 201:
+                err = await resp.json()
+                logger.warning(
+                    "influxdb create bucket error: %s when processin command %s",
+                    err["message"],
+                    data,
+                )
+                raise InfluxSchemaError(
+                    f"influxdb create bucket failed, status code: {err['message']}"
+                )
+            else:
+                result = await resp.json()
+                return result["id"]
+
+
+
+ +
+ + + +
+ + + +

+delete(self, measurement, stop, tags={}, start=None, precision='s') + + + async + + +

+ +
+ +

删除influxdb中指定时间段内的数据

+

关于参数,请参见Flux.delete

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
measurementstr

指定measurement名字

required
stopdatetime

待删除记录的结束时间

required
startdatetime

待删除记录的开始时间,如果未指定,则使用EPOCH_START

None
tagsOptional[Dict[str, str]]

按tag进行过滤的条件

{}
precisionstr

用以格式化起始和结束时间。

's'
+

Exceptions:

+ + + + + + + + + + + + + +
TypeDescription
InfluxDeleteError

如果删除失败,则抛出此异常

+
+ Source code in omicron/dal/influx/influxclient.py +
async def delete(
+    self,
+    measurement: str,
+    stop: datetime.datetime,
+    tags: Optional[Dict[str, str]] = {},
+    start: datetime.datetime = None,
+    precision: str = "s",
+):
+    """删除influxdb中指定时间段内的数据
+
+    关于参数,请参见[Flux.delete][omicron.dal.influx.flux.Flux.delete]。
+
+    Args:
+        measurement: 指定measurement名字
+        stop: 待删除记录的结束时间
+        start: 待删除记录的开始时间,如果未指定,则使用EPOCH_START
+        tags: 按tag进行过滤的条件
+        precision: 用以格式化起始和结束时间。
+
+    Raises:
+        InfluxDeleteError: 如果删除失败,则抛出此异常
+    """
+    # todo: add raise error declaration
+    command = Flux().delete(
+        measurement, stop, tags, start=start, precision=precision
+    )
+
+    async with ClientSession() as session:
+        async with session.post(
+            self._delete_url, data=json.dumps(command), headers=self._delete_headers
+        ) as resp:
+            if resp.status != 204:
+                err = await resp.json()
+                logger.warning(
+                    "influxdb delete error: %s when processin command %s",
+                    err["message"],
+                    command,
+                )
+                raise InfluxDeleteError(
+                    f"influxdb delete failed, status code: {err['message']}"
+                )
+
+
+
+ +
+ + + +
+ + + +

+delete_bucket(self, bucket_id=None) + + + async + + +

+ +
+ +

删除influxdb中指定bucket

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
bucket_idstr

指定bucket的id。如果为None,则会删除本client对应的bucket。

None
+
+ Source code in omicron/dal/influx/influxclient.py +
async def delete_bucket(self, bucket_id: str = None):
+    """删除influxdb中指定bucket
+
+    Args:
+        bucket_id: 指定bucket的id。如果为None,则会删除本client对应的bucket。
+    """
+    if bucket_id is None:
+        buckets = await self.list_buckets()
+        for bucket in buckets:
+            if bucket["type"] == "user" and bucket["name"] == self._bucket:
+                bucket_id = bucket["id"]
+                break
+        else:
+            raise BadParameterError(
+                "bucket_id is None, and we can't find bucket with name: %s"
+                % self._bucket
+            )
+
+    url = f"{self._url}/api/v2/buckets/{bucket_id}"
+    headers = {"Authorization": f"Token {self._token}"}
+    async with ClientSession() as session:
+        async with session.delete(url, headers=headers) as resp:
+            if resp.status != 204:
+                err = await resp.json()
+                logger.warning(
+                    "influxdb delete bucket error: %s when processin command %s",
+                    err["message"],
+                    bucket_id,
+                )
+                raise InfluxSchemaError(
+                    f"influxdb delete bucket failed, status code: {err['message']}"
+                )
+
+
+
+ +
+ + + +
+ + + +

+drop_measurement(self, measurement) + + + async + + +

+ +
+ +

从influxdb中删除一个measurement

+

调用此方法后,实际上该measurement仍然存在,只是没有数据。

+ +
+ Source code in omicron/dal/influx/influxclient.py +
async def drop_measurement(self, measurement: str):
+    """从influxdb中删除一个measurement
+
+    调用此方法后,实际上该measurement仍然存在,只是没有数据。
+
+    """
+    # todo: add raise error declaration
+    await self.delete(measurement, arrow.now().naive)
+
+
+
+ +
+ + + +
+ + + +

+list_buckets(self) + + + async + + +

+ +
+ +

列出influxdb中对应token能看到的所有的bucket

+ +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
list of buckets, each bucket is a dict with keys

``` +id +orgID, a 16 bytes hex string +type, system or user +description +name +retentionRules +createdAt +updatedAt +links +labels

```

+ +
+ Source code in omicron/dal/influx/influxclient.py +
async def list_buckets(self) -> List[Dict]:
+    """列出influxdb中对应token能看到的所有的bucket
+
+    Returns:
+        list of buckets, each bucket is a dict with keys:
+        ```
+        id
+        orgID, a 16 bytes hex string
+        type, system or user
+        description
+        name
+        retentionRules
+        createdAt
+        updatedAt
+        links
+        labels
+    ```
+    """
+    url = f"{self._url}/api/v2/buckets"
+    headers = {"Authorization": f"Token {self._token}"}
+    async with ClientSession() as session:
+        async with session.get(url, headers=headers) as resp:
+            if resp.status != 200:
+                err = await resp.json()
+                raise InfluxSchemaError(
+                    f"influxdb list bucket failed, status code: {err['message']}"
+                )
+            else:
+                return (await resp.json())["buckets"]
+
+
+
+ +
+ + + +
+ + + +

+list_organizations(self, offset=0, limit=100) + + + async + + +

+ +
+ +

列出本客户端允许查询的所组织

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
offset

分页起点

0
limit

每页size

100
+

Exceptions:

+ + + + + + + + + + + + + +
TypeDescription
InfluxSchemaError

influxdb返回的错误

+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
list of organizations, each organization is a dict with keys
1
+2
+3
+4
+5
+6
id      : the id of the org
+links
+name    : the name of the org
+description
+createdAt
+updatedAt
+
+
+ Source code in omicron/dal/influx/influxclient.py +
async def list_organizations(self, offset: int = 0, limit: int = 100) -> List[Dict]:
+    """列出本客户端允许查询的所组织
+
+    Args:
+        offset : 分页起点
+        limit : 每页size
+
+    Raises:
+        InfluxSchemaError: influxdb返回的错误
+
+    Returns:
+        list of organizations, each organization is a dict with keys:
+        ```
+        id      : the id of the org
+        links
+        name    : the name of the org
+        description
+        createdAt
+        updatedAt
+        ```
+    """
+    url = f"{self._url}/api/v2/orgs?offset={offset}&limit={limit}"
+    headers = {"Authorization": f"Token {self._token}"}
+
+    async with ClientSession() as session:
+        async with session.get(url, headers=headers) as resp:
+            if resp.status != 200:
+                err = await resp.json()
+                logger.warning("influxdb query orgs err: %s", err["message"])
+                raise InfluxSchemaError(
+                    f"influxdb query orgs failed, status code: {err['message']}"
+                )
+            else:
+                return (await resp.json())["orgs"]
+
+
+
+ +
+ + + +
+ + + +

+query(self, flux, deserializer=None) + + + async + + +

+ +
+ +

flux查询

+

flux查询结果是一个以annotated csv格式存储的数据,例如: +

1
+2
,result,table,_time,code,amount,close,factor,high,low,open,volume
+,_result,0,2019-01-01T00:00:00Z,000001.XSHE,100000000,5.15,1.23,5.2,5,5.1,1000000
+

+

上述result中,事先通过Flux.keep()限制了返回的字段为_time,code,amount,close,factor,high,low,open,volume。influxdb查询返回结果时,总是按照字段名称升序排列。此外,总是会额外地返回_result, table两个字段。

+

如果传入了deserializer,则会调用deserializer将其解析成为python对象。否则,返回bytes数据。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
fluxUnion[omicron.dal.influx.flux.Flux, str]

flux查询语句

required
deserializerCallable

反序列化函数

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Any

如果未提供反序列化函数,则返回结果为bytes array(如果指定了compress=True,返回结果为gzip解压缩后的bytes array),否则返回反序列化后的python对象

+
+ Source code in omicron/dal/influx/influxclient.py +
async def query(self, flux: Union[Flux, str], deserializer: Callable = None) -> Any:
+    """flux查询
+
+    flux查询结果是一个以annotated csv格式存储的数据,例如:
+    ```
+    ,result,table,_time,code,amount,close,factor,high,low,open,volume
+    ,_result,0,2019-01-01T00:00:00Z,000001.XSHE,100000000,5.15,1.23,5.2,5,5.1,1000000
+    ```
+
+    上述`result`中,事先通过Flux.keep()限制了返回的字段为_time,code,amount,close,factor,high,low,open,volume。influxdb查询返回结果时,总是按照字段名称升序排列。此外,总是会额外地返回_result, table两个字段。
+
+    如果传入了deserializer,则会调用deserializer将其解析成为python对象。否则,返回bytes数据。
+
+    Args:
+        flux: flux查询语句
+        deserializer: 反序列化函数
+
+    Returns:
+        如果未提供反序列化函数,则返回结果为bytes array(如果指定了compress=True,返回结果为gzip解压缩后的bytes array),否则返回反序列化后的python对象
+    """
+    if isinstance(flux, Flux):
+        flux = str(flux)
+
+    async with ClientSession() as session:
+        async with session.post(
+            self._query_url, data=flux, headers=self._query_headers
+        ) as resp:
+            if resp.status != 200:
+                err = await resp.json()
+                logger.warning(
+                    f"influxdb query error: {err} when processing {flux[:500]}"
+                )
+                logger.debug("data caused error:%s", flux)
+                raise InfluxDBQueryError(
+                    f"influxdb query failed, status code: {err['message']}"
+                )
+            else:
+                # auto-unzip
+                body = await resp.read()
+                if deserializer:
+                    try:
+                        return deserializer(body)
+                    except Exception as e:
+                        logger.exception(e)
+                        logger.warning(
+                            "failed to deserialize data: %s, the query is:%s",
+                            body,
+                            flux[:500],
+                        )
+                        raise
+                else:
+                    return body
+
+
+
+ +
+ + + +
+ + + +

+query_org_id(self, name=None) + + + async + + +

+ +
+ +

通过组织名查找组织id

+

只能查的本客户端允许查询的组织。如果name未提供,则使用本客户端创建时传入的组织名。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
namestr

指定组织名

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
str

组织id

+
+ Source code in omicron/dal/influx/influxclient.py +
async def query_org_id(self, name: str = None) -> str:
+    """通过组织名查找组织id
+
+    只能查的本客户端允许查询的组织。如果name未提供,则使用本客户端创建时传入的组织名。
+
+    Args:
+        name: 指定组织名
+
+    Returns:
+        组织id
+    """
+    if name is None:
+        name = self._org
+    orgs = await self.list_organizations()
+    for org in orgs:
+        if org["name"] == name:
+            return org["id"]
+
+    raise BadParameterError(f"can't find org with name: {name}")
+
+
+
+ +
+ + + +
+ + + +

+save(self, data, measurement=None, tag_keys=[], time_key=None, global_tags={}, chunk_size=None) + + + async + + +

+ +
+ +

save data into influxdb

+

if data is a pandas.DataFrame or numy structured array, it will be converted to line protocol and saved. If data is str, use write method instead.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
dataUnion[numpy.ndarray, pandas.core.frame.DataFrame]

data to be saved

required
measurementstr

the name of measurement

None
tag_keysList[str]

which columns name will be used as tags

[]
chunk_sizeint

number of lines to be saved in one request. if it's -1, then all data will be written in one request. If it's None, then it will be set to self._chunk_size

None
+

Exceptions:

+ + + + + + + + + + + + + +
TypeDescription
InfluxDBWriteError

if write failed

+
+ Source code in omicron/dal/influx/influxclient.py +
async def save(
+    self,
+    data: Union[np.ndarray, DataFrame],
+    measurement: str = None,
+    tag_keys: List[str] = [],
+    time_key: str = None,
+    global_tags: Dict = {},
+    chunk_size: int = None,
+) -> None:
+    """save `data` into influxdb
+
+    if `data` is a pandas.DataFrame or numy structured array, it will be converted to line protocol and saved. If `data` is str, use `write` method instead.
+
+    Args:
+        data: data to be saved
+        measurement: the name of measurement
+        tag_keys: which columns name will be used as tags
+        chunk_size: number of lines to be saved in one request. if it's -1, then all data will be written in one request. If it's None, then it will be set to `self._chunk_size`
+
+    Raises:
+        InfluxDBWriteError: if write failed
+
+    """
+    # todo: add more errors raise
+    if isinstance(data, DataFrame):
+        assert (
+            measurement is not None
+        ), "measurement must be specified when data is a DataFrame"
+
+        if tag_keys:
+            assert set(tag_keys) in set(
+                data.columns.tolist()
+            ), "tag_keys must be in data.columns"
+
+        serializer = DataframeSerializer(
+            data,
+            measurement,
+            time_key,
+            tag_keys,
+            global_tags,
+            precision=self._precision,
+        )
+        if chunk_size == -1:
+            chunk_size = len(data)
+
+        for lines in serializer.serialize(chunk_size or self._chunk_size):
+            await self.write(lines)
+    elif isinstance(data, np.ndarray):
+        assert (
+            measurement is not None
+        ), "measurement must be specified when data is a numpy array"
+        assert (
+            time_key is not None
+        ), "time_key must be specified when data is a numpy array"
+        serializer = NumpySerializer(
+            data,
+            measurement,
+            time_key,
+            tag_keys,
+            global_tags,
+            time_precision=self._precision,
+        )
+        if chunk_size == -1:
+            chunk_size = len(data)
+        for lines in serializer.serialize(chunk_size or self._chunk_size):
+            await self.write(lines)
+    else:
+        raise TypeError(
+            f"data must be pandas.DataFrame, numpy array, got {type(data)}"
+        )
+
+
+
+ +
+ + + +
+ + + +

+write(self, line_protocol) + + + async + + +

+ +
+ +

将line-protocol数组写入influxdb

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
line_protocolstr

待写入的数据,以line-protocol数组形式存在

required
+
+ Source code in omicron/dal/influx/influxclient.py +
async def write(self, line_protocol: str):
+    """将line-protocol数组写入influxdb
+
+    Args:
+        line_protocol: 待写入的数据,以line-protocol数组形式存在
+
+    """
+    # todo: add raise error declaration
+    if self._enable_compress:
+        line_protocol_ = gzip.compress(line_protocol.encode("utf-8"))
+    else:
+        line_protocol_ = line_protocol
+
+    async with ClientSession() as session:
+        async with session.post(
+            self._write_url, data=line_protocol_, headers=self._write_headers
+        ) as resp:
+            if resp.status != 204:
+                err = await resp.json()
+                logger.warning(
+                    "influxdb write error when processing: %s, err code: %s, message: %s",
+                    {line_protocol[:100]},
+                    err["code"],
+                    err["message"],
+                )
+                logger.debug("data caused error:%s", line_protocol)
+                raise InfluxDBWriteError(
+                    f"influxdb write failed, err: {err['message']}"
+                )
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/2.0.0/api/dal/serialize/index.html b/2.0.0/api/dal/serialize/index.html new file mode 100644 index 00000000..d10245ce --- /dev/null +++ b/2.0.0/api/dal/serialize/index.html @@ -0,0 +1,1596 @@ + + + + + + + + + + + + + + + + Serialize - Omicron + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + + + + + +

Serialize

+ +

Serializer and Deserializer

+

DataFrameDeserializer

+ + +
+ + +
+ + +
+ Source code in omicron/dal/influx/serialize.py +
class DataframeDeserializer(Serializer):
+    def __init__(
+        self,
+        sort_values: Union[str, List[str]] = None,
+        encoding: str = "utf-8",
+        names: List[str] = None,
+        usecols: Union[List[int], List[str]] = None,
+        dtype: dict = None,
+        time_col: Union[int, str] = None,
+        sep: str = ",",
+        header: Union[int, List[int], str] = "infer",
+        engine: str = None,
+        infer_datetime_format=True,
+        lineterminator: str = None,
+        converters: dict = None,
+        skipfooter=0,
+        index_col: Union[int, str, List[int], List[str], bool] = None,
+        skiprows: Union[int, List[int], Callable] = None,
+        **kwargs,
+    ):
+        """constructor a deserializer which convert a csv-like bytes array to pandas.DataFrame
+
+        the args are the same as pandas.read_csv. for details, please refer to the official doc: [pandas.read_csv](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.read_csv.html)
+
+        for performance consideration, please specify the following args:
+            - engine = 'c' or 'pyarrow' when possible. Be noticed that 'pyarrow' is the fastest (multi-threaded supported) but may be error-prone. Only use it when you have thoroughly tested.
+
+            - specify dtype when possible
+
+        use `usecols` to specify the columns to read, and `names` to specify the column names (i.e., rename the columns), otherwise, the column names will be inferred from the first line.
+
+        when `names` is specified, it has to be as same length as actual columns of the data. If this causes column renaming, then you should always use column name specified in `names` to access the data (instead of which in `usecols`).
+
+        Examples:
+            >>> data = ",result,table,_time,code,name\\r\\n,_result,0,2019-01-01T09:31:00Z,000002.XSHE,国联证券"
+            >>> des = DataframeDeserializer(names=["_", "result", "table", "frame", "code", "name"], usecols=["frame", "code", "name"])
+            >>> des(data)
+                              frame         code  name
+            0  2019-01-01T09:31:00Z  000002.XSHE  国联证券
+
+        Args:
+            sort_values: sort the dataframe by the specified columns
+            encoding: if the data is bytes, then encoding is required, due to pandas.read_csv only handle string array
+            sep: the separator/delimiter of each fields
+            header: the row number of the header, default is 'infer'
+            names: the column names of the dataframe
+            index_col: the column number or name of the index column
+            usecols: the column name of the columns to use
+            dtype: the dtype of the columns
+            engine: the engine of the csv file, default is None
+            converters: specify converter for columns.
+            skiprows: the row number to skip
+            skipfooter: the row number to skip at the end of the file
+            time_col: the columns to parse as dates
+            infer_datetime_format: whether to infer the datetime format
+            lineterminator: the line terminator of the csv file, only valid when engine is 'c'
+            kwargs: other arguments
+        """
+        self.sort_values = sort_values
+        self.encoding = encoding
+        self.sep = sep
+        self.header = header
+        self.names = names
+        self.index_col = index_col
+        self.usecols = usecols
+        self.dtype = dtype
+        self.engine = engine
+        self.converters = converters or {}
+        self.skiprows = skiprows
+        self.skipfooter = skipfooter
+        self.infer_datetime_format = infer_datetime_format
+
+        self.lineterminator = lineterminator
+        self.kwargs = kwargs
+
+        if names is not None:
+            self.header = 0
+
+        if time_col is not None:
+            self.converters[time_col] = lambda x: ciso8601.parse_datetime_as_naive(x)
+
+    def __call__(self, data: Union[str, bytes]) -> pd.DataFrame:
+        if isinstance(data, str):
+            # treat data as string
+            stream = io.StringIO(data)
+        else:
+            stream = io.StringIO(data.decode(self.encoding))
+
+        df = pd.read_csv(
+            stream,
+            sep=self.sep,
+            header=self.header,
+            names=self.names,
+            index_col=self.index_col,
+            usecols=self.usecols,
+            dtype=self.dtype,
+            engine=self.engine,
+            converters=self.converters,
+            skiprows=self.skiprows,
+            skipfooter=self.skipfooter,
+            infer_datetime_format=self.infer_datetime_format,
+            lineterminator=self.lineterminator,
+            **self.kwargs,
+        )
+
+        if self.usecols:
+            df = df[list(self.usecols)]
+        if self.sort_values is not None:
+            return df.sort_values(self.sort_values)
+        else:
+            return df
+
+
+ + + +
+ + + + + + + + + + +
+ + + +

+__init__(self, sort_values=None, encoding='utf-8', names=None, usecols=None, dtype=None, time_col=None, sep=',', header='infer', engine=None, infer_datetime_format=True, lineterminator=None, converters=None, skipfooter=0, index_col=None, skiprows=None, **kwargs) + + + special + + +

+ +
+ +

constructor a deserializer which convert a csv-like bytes array to pandas.DataFrame

+

the args are the same as pandas.read_csv. for details, please refer to the official doc: pandas.read_csv

+

for performance consideration, please specify the following args: + - engine = 'c' or 'pyarrow' when possible. Be noticed that 'pyarrow' is the fastest (multi-threaded supported) but may be error-prone. Only use it when you have thoroughly tested.

+
1
- specify dtype when possible
+
+

use usecols to specify the columns to read, and names to specify the column names (i.e., rename the columns), otherwise, the column names will be inferred from the first line.

+

when names is specified, it has to be as same length as actual columns of the data. If this causes column renaming, then you should always use column name specified in names to access the data (instead of which in usecols).

+ +

Examples:

+
>>> data = ",result,table,_time,code,name\r\n,_result,0,2019-01-01T09:31:00Z,000002.XSHE,国联证券"
+>>> des = DataframeDeserializer(names=["_", "result", "table", "frame", "code", "name"], usecols=["frame", "code", "name"])
+>>> des(data)
+                  frame         code  name
+0  2019-01-01T09:31:00Z  000002.XSHE  国联证券
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
sort_valuesUnion[str, List[str]]

sort the dataframe by the specified columns

None
encodingstr

if the data is bytes, then encoding is required, due to pandas.read_csv only handle string array

'utf-8'
sepstr

the separator/delimiter of each fields

','
headerUnion[int, List[int], str]

the row number of the header, default is 'infer'

'infer'
namesList[str]

the column names of the dataframe

None
index_colUnion[int, str, List[int], List[str], bool]

the column number or name of the index column

None
usecolsUnion[List[int], List[str]]

the column name of the columns to use

None
dtypedict

the dtype of the columns

None
enginestr

the engine of the csv file, default is None

None
convertersdict

specify converter for columns.

None
skiprowsUnion[int, List[int], Callable]

the row number to skip

None
skipfooter

the row number to skip at the end of the file

0
time_colUnion[int, str]

the columns to parse as dates

None
infer_datetime_format

whether to infer the datetime format

True
lineterminatorstr

the line terminator of the csv file, only valid when engine is 'c'

None
kwargs

other arguments

{}
+
+ Source code in omicron/dal/influx/serialize.py +
def __init__(
+    self,
+    sort_values: Union[str, List[str]] = None,
+    encoding: str = "utf-8",
+    names: List[str] = None,
+    usecols: Union[List[int], List[str]] = None,
+    dtype: dict = None,
+    time_col: Union[int, str] = None,
+    sep: str = ",",
+    header: Union[int, List[int], str] = "infer",
+    engine: str = None,
+    infer_datetime_format=True,
+    lineterminator: str = None,
+    converters: dict = None,
+    skipfooter=0,
+    index_col: Union[int, str, List[int], List[str], bool] = None,
+    skiprows: Union[int, List[int], Callable] = None,
+    **kwargs,
+):
+    """constructor a deserializer which convert a csv-like bytes array to pandas.DataFrame
+
+    the args are the same as pandas.read_csv. for details, please refer to the official doc: [pandas.read_csv](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.read_csv.html)
+
+    for performance consideration, please specify the following args:
+        - engine = 'c' or 'pyarrow' when possible. Be noticed that 'pyarrow' is the fastest (multi-threaded supported) but may be error-prone. Only use it when you have thoroughly tested.
+
+        - specify dtype when possible
+
+    use `usecols` to specify the columns to read, and `names` to specify the column names (i.e., rename the columns), otherwise, the column names will be inferred from the first line.
+
+    when `names` is specified, it has to be as same length as actual columns of the data. If this causes column renaming, then you should always use column name specified in `names` to access the data (instead of which in `usecols`).
+
+    Examples:
+        >>> data = ",result,table,_time,code,name\\r\\n,_result,0,2019-01-01T09:31:00Z,000002.XSHE,国联证券"
+        >>> des = DataframeDeserializer(names=["_", "result", "table", "frame", "code", "name"], usecols=["frame", "code", "name"])
+        >>> des(data)
+                          frame         code  name
+        0  2019-01-01T09:31:00Z  000002.XSHE  国联证券
+
+    Args:
+        sort_values: sort the dataframe by the specified columns
+        encoding: if the data is bytes, then encoding is required, due to pandas.read_csv only handle string array
+        sep: the separator/delimiter of each fields
+        header: the row number of the header, default is 'infer'
+        names: the column names of the dataframe
+        index_col: the column number or name of the index column
+        usecols: the column name of the columns to use
+        dtype: the dtype of the columns
+        engine: the engine of the csv file, default is None
+        converters: specify converter for columns.
+        skiprows: the row number to skip
+        skipfooter: the row number to skip at the end of the file
+        time_col: the columns to parse as dates
+        infer_datetime_format: whether to infer the datetime format
+        lineterminator: the line terminator of the csv file, only valid when engine is 'c'
+        kwargs: other arguments
+    """
+    self.sort_values = sort_values
+    self.encoding = encoding
+    self.sep = sep
+    self.header = header
+    self.names = names
+    self.index_col = index_col
+    self.usecols = usecols
+    self.dtype = dtype
+    self.engine = engine
+    self.converters = converters or {}
+    self.skiprows = skiprows
+    self.skipfooter = skipfooter
+    self.infer_datetime_format = infer_datetime_format
+
+    self.lineterminator = lineterminator
+    self.kwargs = kwargs
+
+    if names is not None:
+        self.header = 0
+
+    if time_col is not None:
+        self.converters[time_col] = lambda x: ciso8601.parse_datetime_as_naive(x)
+
+
+
+ +
+ + + + + +
+ +
+ +
+ +

NumpyDeserializer

+ + +
+ + +
+ + +
+ Source code in omicron/dal/influx/serialize.py +
class NumpyDeserializer(Serializer):
+    def __init__(
+        self,
+        dtype: List[tuple] = "float",
+        sort_values: Union[str, List[str]] = None,
+        use_cols: Union[List[str], List[int]] = None,
+        parse_date: Union[int, str] = "_time",
+        sep: str = ",",
+        encoding: str = "utf-8",
+        skip_rows: Union[int, List[int]] = 1,
+        header_line: int = 1,
+        comments: str = "#",
+        converters: Mapping[int, Callable] = None,
+    ):
+        """construct a deserializer, which will convert a csv like multiline string/bytes array to a numpy array
+
+        the data to be deserialized will be first split into array of fields, then use use_cols to select which fields to use, and re-order them by the order of use_cols. After that, the fields will be converted to numpy array and converted into dtype.
+
+        by default dtype is float, which means the data will be converted to float. If you need to convert to a numpy structured array, then you can specify the dtype as a list of tuples, e.g.
+
+        ```
+        dtype = [('col_1', 'datetime64[s]'), ('col_2', '<U12'), ('col_3', '<U4')]
+
+        ```
+
+        by default, the deserializer will try to convert every line from the very first line, if the very first lines contains comments and headers, these lines should be skipped by deserializer, you should set skip_rows to number of lines to skip.
+
+        for more information, please refer to [numpy.loadtxt](https://numpy.org/doc/stable/reference/generated/numpy.loadtxt.html)
+
+        Args:
+            dtype: dtype of the output numpy array.
+            sort_values: sort the output numpy array by the specified columns. If it's a string, then it's the name of the column, if it's a list of strings, then it's the names of the columns.
+            use_cols: use only the specified columns. If it's a list of strings, then it's the names of the columns (presented in raw data header line), if it's a list of integers, then it's the column index.
+            parse_date: by default we'll convert "_time" column into python datetime.datetime. Set it to None to turn off the conversion. ciso8601 is default parser. If you need to parse date but just don't like ciso8601, then you can turn off default parser (by set parse_date to None), and specify your own parser in converters.
+            sep: separator of each field
+            encoding: if the input is bytes, then encoding is used to decode the bytes to string.
+            skip_rows: required by np.loadtxt, skip the first n lines
+            header_line: which line contains header, started from 1. If you specify use_cols by list of string, then header line must be specified.
+            comments: required by np.loadtxt, skip the lines starting with this string
+            converters: required by np.loadtxt, a dict of column name to converter function.
+
+        """
+        self.dtype = dtype
+        self.use_cols = use_cols
+        self.sep = sep
+        self.encoding = encoding
+        self.skip_rows = skip_rows
+        self.comments = comments
+        self.converters = converters or {}
+        self.sort_values = sort_values
+        self.parse_date = parse_date
+        self.header_line = header_line
+
+        if header_line is None:
+            assert parse_date is None or isinstance(
+                parse_date, int
+            ), "parse_date must be an integer if data contains no header"
+
+            assert use_cols is None or isinstance(
+                use_cols[0], int
+            ), "use_cols must be a list of integers if data contains no header"
+
+            if len(self.converters) > 1:
+                assert all(
+                    [isinstance(x, int) for x in self.converters.keys()]
+                ), "converters must be a dict of column index to converter function, if there's no header"
+
+        self._parsed_headers = None
+
+    def _parse_header_once(self, stream):
+        """parse header and convert use_cols, if columns is specified in string. And if parse_date is required, add it into converters
+
+        Args:
+            stream : [description]
+
+        Raises:
+            SerializationError: [description]
+        """
+        if self.header_line is None or self._parsed_headers is not None:
+            return
+
+        try:
+            line = stream.readlines(self.header_line)[-1]
+            cols = line.strip().split(self.sep)
+            self._parsed_headers = cols
+
+            use_cols = self.use_cols
+            if use_cols is not None and isinstance(use_cols[0], str):
+                self.use_cols = [cols.index(col) for col in self.use_cols]
+
+            # convert keys of converters to int
+            converters = {cols.index(k): v for k, v in self.converters.items()}
+
+            self.converters = converters
+
+            if isinstance(self.parse_date, str):
+                parse_date = cols.index(self.parse_date)
+                if parse_date in self.converters.keys():
+                    logger.debug(
+                        "specify duplicated converter in both parse_date and converters for col %s, use converters.",
+                        self.parse_date,
+                    )
+                else:  # 增加parse_date到converters
+                    self.converters[
+                        parse_date
+                    ] = lambda x: ciso8601.parse_datetime_as_naive(x)
+
+            stream.seek(0)
+        except (IndexError, ValueError):
+            if line.strip() == "":
+                content = "".join(stream.readlines()).strip()
+                if len(content) > 0:
+                    raise SerializationError(
+                        f"specified heder line {self.header_line} is empty"
+                    )
+                else:
+                    raise EmptyResult()
+            else:
+                raise SerializationError(f"bad header[{self.header_line}]: {line}")
+
+    def __call__(self, data: bytes) -> np.ndarray:
+        if self.encoding and isinstance(data, bytes):
+            stream = io.StringIO(data.decode(self.encoding))
+        else:
+            stream = io.StringIO(data)
+
+        try:
+            self._parse_header_once(stream)
+        except EmptyResult:
+            return np.empty((0,), dtype=self.dtype)
+
+        arr = np.loadtxt(
+            stream.readlines(),
+            delimiter=self.sep,
+            skiprows=self.skip_rows,
+            dtype=self.dtype,
+            usecols=self.use_cols,
+            converters=self.converters,
+            encoding=self.encoding,
+        )
+
+        # 如果返回仅一条记录,有时会出现 shape == ()
+        if arr.shape == tuple():
+            arr = arr.reshape((-1,))
+        if self.sort_values is not None and arr.size > 1:
+            return np.sort(arr, order=self.sort_values)
+        else:
+            return arr
+
+
+ + + +
+ + + + + + + + + + +
+ + + +

+__init__(self, dtype='float', sort_values=None, use_cols=None, parse_date='_time', sep=',', encoding='utf-8', skip_rows=1, header_line=1, comments='#', converters=None) + + + special + + +

+ +
+ +

construct a deserializer, which will convert a csv like multiline string/bytes array to a numpy array

+

the data to be deserialized will be first split into array of fields, then use use_cols to select which fields to use, and re-order them by the order of use_cols. After that, the fields will be converted to numpy array and converted into dtype.

+

by default dtype is float, which means the data will be converted to float. If you need to convert to a numpy structured array, then you can specify the dtype as a list of tuples, e.g.

+
1
dtype = [('col_1', 'datetime64[s]'), ('col_2', '<U12'), ('col_3', '<U4')]
+
+

by default, the deserializer will try to convert every line from the very first line, if the very first lines contains comments and headers, these lines should be skipped by deserializer, you should set skip_rows to number of lines to skip.

+

for more information, please refer to numpy.loadtxt

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
dtypeList[tuple]

dtype of the output numpy array.

'float'
sort_valuesUnion[str, List[str]]

sort the output numpy array by the specified columns. If it's a string, then it's the name of the column, if it's a list of strings, then it's the names of the columns.

None
use_colsUnion[List[str], List[int]]

use only the specified columns. If it's a list of strings, then it's the names of the columns (presented in raw data header line), if it's a list of integers, then it's the column index.

None
parse_dateUnion[int, str]

by default we'll convert "_time" column into python datetime.datetime. Set it to None to turn off the conversion. ciso8601 is default parser. If you need to parse date but just don't like ciso8601, then you can turn off default parser (by set parse_date to None), and specify your own parser in converters.

'_time'
sepstr

separator of each field

','
encodingstr

if the input is bytes, then encoding is used to decode the bytes to string.

'utf-8'
skip_rowsUnion[int, List[int]]

required by np.loadtxt, skip the first n lines

1
header_lineint

which line contains header, started from 1. If you specify use_cols by list of string, then header line must be specified.

1
commentsstr

required by np.loadtxt, skip the lines starting with this string

'#'
convertersMapping[int, Callable]

required by np.loadtxt, a dict of column name to converter function.

None
+
+ Source code in omicron/dal/influx/serialize.py +
def __init__(
+    self,
+    dtype: List[tuple] = "float",
+    sort_values: Union[str, List[str]] = None,
+    use_cols: Union[List[str], List[int]] = None,
+    parse_date: Union[int, str] = "_time",
+    sep: str = ",",
+    encoding: str = "utf-8",
+    skip_rows: Union[int, List[int]] = 1,
+    header_line: int = 1,
+    comments: str = "#",
+    converters: Mapping[int, Callable] = None,
+):
+    """construct a deserializer, which will convert a csv like multiline string/bytes array to a numpy array
+
+    the data to be deserialized will be first split into array of fields, then use use_cols to select which fields to use, and re-order them by the order of use_cols. After that, the fields will be converted to numpy array and converted into dtype.
+
+    by default dtype is float, which means the data will be converted to float. If you need to convert to a numpy structured array, then you can specify the dtype as a list of tuples, e.g.
+
+    ```
+    dtype = [('col_1', 'datetime64[s]'), ('col_2', '<U12'), ('col_3', '<U4')]
+
+    ```
+
+    by default, the deserializer will try to convert every line from the very first line, if the very first lines contains comments and headers, these lines should be skipped by deserializer, you should set skip_rows to number of lines to skip.
+
+    for more information, please refer to [numpy.loadtxt](https://numpy.org/doc/stable/reference/generated/numpy.loadtxt.html)
+
+    Args:
+        dtype: dtype of the output numpy array.
+        sort_values: sort the output numpy array by the specified columns. If it's a string, then it's the name of the column, if it's a list of strings, then it's the names of the columns.
+        use_cols: use only the specified columns. If it's a list of strings, then it's the names of the columns (presented in raw data header line), if it's a list of integers, then it's the column index.
+        parse_date: by default we'll convert "_time" column into python datetime.datetime. Set it to None to turn off the conversion. ciso8601 is default parser. If you need to parse date but just don't like ciso8601, then you can turn off default parser (by set parse_date to None), and specify your own parser in converters.
+        sep: separator of each field
+        encoding: if the input is bytes, then encoding is used to decode the bytes to string.
+        skip_rows: required by np.loadtxt, skip the first n lines
+        header_line: which line contains header, started from 1. If you specify use_cols by list of string, then header line must be specified.
+        comments: required by np.loadtxt, skip the lines starting with this string
+        converters: required by np.loadtxt, a dict of column name to converter function.
+
+    """
+    self.dtype = dtype
+    self.use_cols = use_cols
+    self.sep = sep
+    self.encoding = encoding
+    self.skip_rows = skip_rows
+    self.comments = comments
+    self.converters = converters or {}
+    self.sort_values = sort_values
+    self.parse_date = parse_date
+    self.header_line = header_line
+
+    if header_line is None:
+        assert parse_date is None or isinstance(
+            parse_date, int
+        ), "parse_date must be an integer if data contains no header"
+
+        assert use_cols is None or isinstance(
+            use_cols[0], int
+        ), "use_cols must be a list of integers if data contains no header"
+
+        if len(self.converters) > 1:
+            assert all(
+                [isinstance(x, int) for x in self.converters.keys()]
+            ), "converters must be a dict of column index to converter function, if there's no header"
+
+    self._parsed_headers = None
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/2.0.0/api/extensions/index.html b/2.0.0/api/extensions/index.html new file mode 100644 index 00000000..cd57f0ef --- /dev/null +++ b/2.0.0/api/extensions/index.html @@ -0,0 +1,2801 @@ + + + + + + + + + + + + + + + + Extensions - Omicron + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + + + + + +
+ + +
+ + + + +
+ + + + + + + + + + +
+ + + +

+ decimals + + + +

+ +
+ + + + +
+ + + + + + + + +
+ + + +

+math_round(x, digits) + + +

+ +
+ +

由于浮点数的表示问题,很多语言的round函数与数学上的round函数不一致。下面的函数结果与数学上的一致。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
xfloat

要进行四舍五入的数字

required
digitsint

小数点后保留的位数

required
+
+ Source code in omicron/extensions/decimals.py +
def math_round(x: float, digits: int):
+    """由于浮点数的表示问题,很多语言的round函数与数学上的round函数不一致。下面的函数结果与数学上的一致。
+
+    Args:
+        x: 要进行四舍五入的数字
+        digits: 小数点后保留的位数
+
+    """
+
+    return int(x * (10**digits) + copysign(0.5, x)) / (10**digits)
+
+
+
+ +
+ + + +
+ + + +

+price_equal(x, y) + + +

+ +
+ +

判断股价是否相等

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
x

价格1

required
y

价格2

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
bool

如果相等则返回True,否则返回False

+
+ Source code in omicron/extensions/decimals.py +
def price_equal(x: float, y: float) -> bool:
+    """判断股价是否相等
+
+    Args:
+        x : 价格1
+        y : 价格2
+
+    Returns:
+        如果相等则返回True,否则返回False
+    """
+    return abs(math_round(x, 2) - math_round(y, 2)) < 1e-2
+
+
+
+ +
+ + + + + + +
+ +
+ +
+ + + +
+ + + +

+ np + + + +

+ +
+ +

Extension function related to numpy

+ + + +
+ + + + + + + + +
+ + + +

+array_math_round(arr, digits) + + +

+ +
+ +

将一维数组arr的数据进行四舍五入

+

numpy.around的函数并不是数学上的四舍五入,对1.5和2.5进行round的结果都会变成2,在金融领域计算中,我们必须使用数学意义上的四舍五入。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
arrArrayLike

输入数组

required
digitsintrequired
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
np.ndarray

四舍五入后的一维数组

+
+ Source code in omicron/extensions/np.py +
def array_math_round(arr: Union[float, ArrayLike], digits: int) -> np.ndarray:
+    """将一维数组arr的数据进行四舍五入
+
+    numpy.around的函数并不是数学上的四舍五入,对1.5和2.5进行round的结果都会变成2,在金融领域计算中,我们必须使用数学意义上的四舍五入。
+
+    Args:
+        arr (ArrayLike): 输入数组
+        digits (int):
+
+    Returns:
+        np.ndarray: 四舍五入后的一维数组
+    """
+    # 如果是单个元素,则直接返回
+    if isinstance(arr, float):
+        return decimals.math_round(arr, digits)
+
+    f = np.vectorize(lambda x: decimals.math_round(x, digits))
+    return f(arr)
+
+
+
+ +
+ + + +
+ + + +

+array_price_equal(price1, price2) + + +

+ +
+ +

判断两个价格数组是否相等

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
price1ArrayLike

价格数组

required
price2ArrayLike

价格数组

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
np.ndarray

判断结果

+
+ Source code in omicron/extensions/np.py +
def array_price_equal(price1: ArrayLike, price2: ArrayLike) -> np.ndarray:
+    """判断两个价格数组是否相等
+
+    Args:
+        price1 (ArrayLike): 价格数组
+        price2 (ArrayLike): 价格数组
+
+    Returns:
+        np.ndarray: 判断结果
+    """
+    price1 = array_math_round(price1, 2)
+    price2 = array_math_round(price2, 2)
+
+    return abs(price1 - price2) < 1e-2
+
+
+
+ +
+ + + +
+ + + +

+bars_since(condition, default=None) + + +

+ +
+ +

Return the number of bars since condition sequence was last True, +or if never, return default.

+
1
+2
+3
>>> condition = [True, True, False]
+>>> bars_since(condition)
+1
+
+ +
+ Source code in omicron/extensions/np.py +
def bars_since(condition: Sequence[bool], default=None) -> int:
+    """
+    Return the number of bars since `condition` sequence was last `True`,
+    or if never, return `default`.
+
+        >>> condition = [True, True, False]
+        >>> bars_since(condition)
+        1
+    """
+    return next(compress(range(len(condition)), reversed(condition)), default)
+
+
+
+ +
+ + + +
+ + + +

+bin_cut(arr, n) + + +

+ +
+ +

将数组arr切分成n份

+

todo: use padding + reshape to boost performance

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
arr[type]

[description]

required
n[type]

[description]

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
[type]

[description]

+
+ Source code in omicron/extensions/np.py +
def bin_cut(arr: list, n: int):
+    """将数组arr切分成n份
+
+    todo: use padding + reshape to boost performance
+    Args:
+        arr ([type]): [description]
+        n ([type]): [description]
+
+    Returns:
+        [type]: [description]
+    """
+    result = [[] for i in range(n)]
+
+    for i, e in enumerate(arr):
+        result[i % n].append(e)
+
+    return [e for e in result if len(e)]
+
+
+
+ +
+ + + +
+ + + +

+count_between(arr, start, end) + + +

+ +
+ +

计算数组中,start元素与end元素之间共有多少个元素

+

要求arr必须是已排序。计算结果会包含区间边界点。

+ +

Examples:

+
>>> arr = [20050104, 20050105, 20050106, 20050107, 20050110, 20050111]
+>>> count_between(arr, 20050104, 20050111)
+6
+
+
>>> count_between(arr, 20050104, 20050109)
+4
+
+ +
+ Source code in omicron/extensions/np.py +
def count_between(arr, start, end):
+    """计算数组中,`start`元素与`end`元素之间共有多少个元素
+
+    要求arr必须是已排序。计算结果会包含区间边界点。
+
+    Examples:
+        >>> arr = [20050104, 20050105, 20050106, 20050107, 20050110, 20050111]
+        >>> count_between(arr, 20050104, 20050111)
+        6
+
+        >>> count_between(arr, 20050104, 20050109)
+        4
+    """
+    pos_start = np.searchsorted(arr, start, side="right")
+    pos_end = np.searchsorted(arr, end, side="right")
+
+    counter = pos_end - pos_start + 1
+    if start < arr[0]:
+        counter -= 1
+    if end > arr[-1]:
+        counter -= 1
+
+    return counter
+
+
+
+ +
+ + + +
+ + + +

+dataframe_to_structured_array(df, dtypes=None) + + +

+ +
+ +

convert dataframe (with all columns, and index possibly) to numpy structured arrays

+

len(dtypes) should be either equal to len(df.columns) or len(df.columns) + 1. In the later case, it implies to include df.index into converted array.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
dfDataFrame

the one needs to be converted

required
dtypesList[Tuple]

Defaults to None. If it's None, then dtypes of df is used, in such case, the index of df will not be converted.

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
ArrayLike

[description]

+
+ Source code in omicron/extensions/np.py +
def dataframe_to_structured_array(
+    df: DataFrame, dtypes: List[Tuple] = None
+) -> ArrayLike:
+    """convert dataframe (with all columns, and index possibly) to numpy structured arrays
+
+    `len(dtypes)` should be either equal to `len(df.columns)` or `len(df.columns) + 1`. In the later case, it implies to include `df.index` into converted array.
+
+    Args:
+        df: the one needs to be converted
+        dtypes: Defaults to None. If it's `None`, then dtypes of `df` is used, in such case, the `index` of `df` will not be converted.
+
+    Returns:
+        ArrayLike: [description]
+    """
+    v = df
+    if dtypes is not None:
+        dtypes_in_dict = {key: value for key, value in dtypes}
+
+        col_len = len(df.columns)
+        if len(dtypes) == col_len + 1:
+            v = df.reset_index()
+
+            rename_index_to = set(dtypes_in_dict.keys()).difference(set(df.columns))
+            v.rename(columns={"index": list(rename_index_to)[0]}, inplace=True)
+        elif col_len != len(dtypes):
+            raise ValueError(
+                f"length of dtypes should be either {col_len} or {col_len + 1}, is {len(dtypes)}"
+            )
+
+        # re-arrange order of dtypes, in order to align with df.columns
+        dtypes = []
+        for name in v.columns:
+            dtypes.append((name, dtypes_in_dict[name]))
+    else:
+        dtypes = df.dtypes
+
+    return np.array(np.rec.fromrecords(v.values), dtype=dtypes)
+
+
+
+ +
+ + + +
+ + + +

+dict_to_numpy_array(d, dtype) + + +

+ +
+ +

convert dictionary to numpy array

+ +

Examples:

+ +
+
+
+

d = {"aaron": 5, "jack": 6} +dtype = [("name", "S8"), ("score", "<i4")] +dict_to_numpy_array(d, dtype) +array([(b'aaron', 5), (b'jack', 6)], + dtype=[('name', 'S8'), ('score', '<i4')])

+
+
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
ddict

[description]

required
dtypeList[Tuple]

[description]

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
np.array

[description]

+
+ Source code in omicron/extensions/np.py +
def dict_to_numpy_array(d: dict, dtype: List[Tuple]) -> np.array:
+    """convert dictionary to numpy array
+
+    Examples:
+
+    >>> d = {"aaron": 5, "jack": 6}
+    >>> dtype = [("name", "S8"), ("score", "<i4")]
+    >>> dict_to_numpy_array(d, dtype)
+    array([(b'aaron', 5), (b'jack', 6)],
+          dtype=[('name', 'S8'), ('score', '<i4')])
+
+    Args:
+        d (dict): [description]
+        dtype (List[Tuple]): [description]
+
+    Returns:
+        np.array: [description]
+    """
+    return np.fromiter(d.items(), dtype=dtype, count=len(d))
+
+
+
+ +
+ + + +
+ + + +

+fill_nan(ts) + + +

+ +
+ +

将ts中的NaN替换为其前值

+

如果ts起头的元素为NaN,则用第一个非NaN元素替换。

+

如果所有元素都为NaN,则无法替换。

+ +

Examples:

+
>>> arr = np.arange(6, dtype=np.float32)
+>>> arr[3:5] = np.NaN
+>>> fill_nan(arr)
+...
+array([0., 1., 2., 2., 2., 5.], dtype=float32)
+
+
>>> arr = np.arange(6, dtype=np.float32)
+>>> arr[0:2] = np.nan
+>>> fill_nan(arr)
+...
+array([2., 2., 2., 3., 4., 5.], dtype=float32)
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tsnp.array

[description]

required
+
+ Source code in omicron/extensions/np.py +
def fill_nan(ts: np.ndarray):
+    """将ts中的NaN替换为其前值
+
+    如果ts起头的元素为NaN,则用第一个非NaN元素替换。
+
+    如果所有元素都为NaN,则无法替换。
+
+    Example:
+        >>> arr = np.arange(6, dtype=np.float32)
+        >>> arr[3:5] = np.NaN
+        >>> fill_nan(arr)
+        ... # doctest: +NORMALIZE_WHITESPACE
+        array([0., 1., 2., 2., 2., 5.], dtype=float32)
+
+        >>> arr = np.arange(6, dtype=np.float32)
+        >>> arr[0:2] = np.nan
+        >>> fill_nan(arr)
+        ... # doctest: +NORMALIZE_WHITESPACE
+        array([2., 2., 2., 3., 4., 5.], dtype=float32)
+
+    Args:
+        ts (np.array): [description]
+    """
+    if np.all(np.isnan(ts)):
+        raise ValueError("all of ts are NaN")
+
+    if ts[0] is None or math.isnan(ts[0]):
+        idx = np.argwhere(~np.isnan(ts))[0]
+        ts[0] = ts[idx]
+
+    mask = np.isnan(ts)
+    idx = np.where(~mask, np.arange(mask.size), 0)
+    np.maximum.accumulate(idx, out=idx)
+    return ts[idx]
+
+
+
+ +
+ + + +
+ + + +

+find_runs(x) + + +

+ +
+ +

Find runs of consecutive items in an array.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
xArrayLike

the sequence to find runs in

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Tuple[np.ndarray, np.ndarray, np.ndarray]

A tuple of unique values, start indices, and length of runs

+
+ Source code in omicron/extensions/np.py +
def find_runs(x: ArrayLike) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
+    """Find runs of consecutive items in an array.
+
+    Args:
+        x: the sequence to find runs in
+
+    Returns:
+        A tuple of unique values, start indices, and length of runs
+    """
+
+    # ensure array
+    x = np.asanyarray(x)
+    if x.ndim != 1:
+        raise ValueError("only 1D array supported")
+    n = x.shape[0]
+
+    # handle empty array
+    if n == 0:
+        return np.array([]), np.array([]), np.array([])
+
+    else:
+        # find run starts
+        loc_run_start = np.empty(n, dtype=bool)
+        loc_run_start[0] = True
+        np.not_equal(x[:-1], x[1:], out=loc_run_start[1:])
+        run_starts = np.nonzero(loc_run_start)[0]
+
+        # find run values
+        run_values = x[loc_run_start]
+
+        # find run lengths
+        run_lengths = np.diff(np.append(run_starts, n))
+
+        return run_values, run_starts, run_lengths
+
+
+
+ +
+ + + +
+ + + +

+floor(arr, item) + + +

+ +
+ +

在数据arr中,找到小于等于item的那一个值。如果item小于所有arr元素的值,返回arr[0];如果item +大于所有arr元素的值,返回arr[-1]

+

minute_frames_floor不同的是,本函数不做回绕与进位.

+ +

Examples:

+
>>> a = [3, 6, 9]
+>>> floor(a, -1)
+3
+>>> floor(a, 9)
+9
+>>> floor(a, 10)
+9
+>>> floor(a, 4)
+3
+>>> floor(a,10)
+9
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
arrrequired
itemrequired
+
+ Source code in omicron/extensions/np.py +
def floor(arr, item):
+    """
+    在数据arr中,找到小于等于item的那一个值。如果item小于所有arr元素的值,返回arr[0];如果item
+    大于所有arr元素的值,返回arr[-1]
+
+    与`minute_frames_floor`不同的是,本函数不做回绕与进位.
+
+    Examples:
+        >>> a = [3, 6, 9]
+        >>> floor(a, -1)
+        3
+        >>> floor(a, 9)
+        9
+        >>> floor(a, 10)
+        9
+        >>> floor(a, 4)
+        3
+        >>> floor(a,10)
+        9
+
+    Args:
+        arr:
+        item:
+
+    Returns:
+
+    """
+    if item < arr[0]:
+        return arr[0]
+    index = np.searchsorted(arr, item, side="right")
+    return arr[index - 1]
+
+
+
+ +
+ + + +
+ + + +

+join_by_left(key, r1, r2, mask=True) + + +

+ +
+ +

左连接 r1, r2 by key

+

如果r1中存在r2中没有的行,则该行对应的r2中的那些字段将被mask,或者填充随机数。 +same as numpy.lib.recfunctions.join_by(key, r1, r2, jointype='leftouter'), but allows r1 have duplicate keys

+

Reference: stackoverflow

+ +

Examples:

+
>>> # to join the following
+>>> # [[ 1, 2],
+>>> #  [ 1, 3],   x   [[1, 5],
+>>> #  [ 2, 3]]        [4, 7]]
+>>> # only first two rows in left will be joined
+
+
>>> r1 = np.array([(1, 2), (1,3), (2,3)], dtype=[('seq', 'i4'), ('score', 'i4')])
+>>> r2 = np.array([(1, 5), (4,7)], dtype=[('seq', 'i4'), ('age', 'i4')])
+>>> joined = join_by_left('seq', r1, r2)
+>>> print(joined)
+[(1, 2, 5) (1, 3, 5) (2, 3, --)]
+
+
>>> print(joined.dtype)
+(numpy.record, [('seq', '<i4'), ('score', '<i4'), ('age', '<i4')])
+
+
>>> joined[2][2]
+masked
+
+
>>> joined.tolist()[2][2] == None
+True
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
key

join关键字

required
r1

数据集1

required
r2

数据集2

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription

a numpy array

+
+ Source code in omicron/extensions/np.py +
def join_by_left(key, r1, r2, mask=True):
+    """左连接 `r1`, `r2` by `key`
+
+    如果`r1`中存在`r2`中没有的行,则该行对应的`r2`中的那些字段将被mask,或者填充随机数。
+    same as numpy.lib.recfunctions.join_by(key, r1, r2, jointype='leftouter'), but allows r1 have duplicate keys
+
+    [Reference: stackoverflow](https://stackoverflow.com/a/53261882/13395693)
+
+    Examples:
+        >>> # to join the following
+        >>> # [[ 1, 2],
+        >>> #  [ 1, 3],   x   [[1, 5],
+        >>> #  [ 2, 3]]        [4, 7]]
+        >>> # only first two rows in left will be joined
+
+        >>> r1 = np.array([(1, 2), (1,3), (2,3)], dtype=[('seq', 'i4'), ('score', 'i4')])
+        >>> r2 = np.array([(1, 5), (4,7)], dtype=[('seq', 'i4'), ('age', 'i4')])
+        >>> joined = join_by_left('seq', r1, r2)
+        >>> print(joined)
+        [(1, 2, 5) (1, 3, 5) (2, 3, --)]
+
+        >>> print(joined.dtype)
+        (numpy.record, [('seq', '<i4'), ('score', '<i4'), ('age', '<i4')])
+
+        >>> joined[2][2]
+        masked
+
+        >>> joined.tolist()[2][2] == None
+        True
+
+    Args:
+        key : join关键字
+        r1 : 数据集1
+        r2 : 数据集2
+
+    Returns:
+        a numpy array
+    """
+    # figure out the dtype of the result array
+    descr1 = r1.dtype.descr
+    descr2 = [d for d in r2.dtype.descr if d[0] not in r1.dtype.names]
+    descrm = descr1 + descr2
+
+    # figure out the fields we'll need from each array
+    f1 = [d[0] for d in descr1]
+    f2 = [d[0] for d in descr2]
+
+    # cache the number of columns in f1
+    ncol1 = len(f1)
+
+    # get a dict of the rows of r2 grouped by key
+    rows2 = {}
+    for row2 in r2:
+        rows2.setdefault(row2[key], []).append(row2)
+
+    # figure out how many rows will be in the result
+    nrowm = 0
+    for k1 in r1[key]:
+        if k1 in rows2:
+            nrowm += len(rows2[k1])
+        else:
+            nrowm += 1
+
+    # allocate the return array
+    # ret = np.full((nrowm, ), fill, dtype=descrm)
+    _ret = np.recarray(nrowm, dtype=descrm)
+    if mask:
+        ret = np.ma.array(_ret, mask=True)
+    else:
+        ret = _ret
+
+    # merge the data into the return array
+    i = 0
+    for row1 in r1:
+        if row1[key] in rows2:
+            for row2 in rows2[row1[key]]:
+                ret[i] = tuple(row1[f1]) + tuple(row2[f2])
+                i += 1
+        else:
+            for j in range(ncol1):
+                ret[i][j] = row1[j]
+            i += 1
+
+    return ret
+
+
+
+ +
+ + + +
+ + + +

+numpy_append_fields(base, names, data, dtypes) + + +

+ +
+ +

给现有的数组base增加新的字段

+

实现了numpy.lib.recfunctions.rec_append_fields的功能。提供这个功能,是因为rec_append_fields不能处理data元素的类型为Object的情况。

+

新增的数据列将顺序排列在其它列的右边。

+ +

Examples:

+
>>> # 新增单个字段
+>>> import numpy
+>>> old = np.array([i for i in range(3)], dtype=[('col1', '<f4')])
+>>> new_list = [2 * i for i in range(3)]
+>>> res = numpy_append_fields(old, 'new_col', new_list, [('new_col', '<f4')])
+>>> print(res)
+...
+[(0., 0.) (1., 2.) (2., 4.)]
+
+
>>> # 新增多个字段
+>>> data = [res['col1'].tolist(), res['new_col'].tolist()]
+>>> print(numpy_append_fields(old, ('col3', 'col4'), data, [('col3', '<f4'), ('col4', '<f4')]))
+...
+[(0., 0., 0.) (1., 1., 2.) (2., 2., 4.)]
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
base[numpy.array]

基础数组

required
names[type]

新增字段的名字,可以是字符串(单字段的情况),也可以是字符串列表

required
datalist

增加的字段的数据,list类型

required
dtypes[type]

新增字段的dtype

required
+
+ Source code in omicron/extensions/np.py +
def numpy_append_fields(
+    base: np.ndarray, names: Union[str, List[str]], data: List, dtypes: List
+) -> np.ndarray:
+    """给现有的数组`base`增加新的字段
+
+    实现了`numpy.lib.recfunctions.rec_append_fields`的功能。提供这个功能,是因为`rec_append_fields`不能处理`data`元素的类型为Object的情况。
+
+    新增的数据列将顺序排列在其它列的右边。
+
+    Example:
+        >>> # 新增单个字段
+        >>> import numpy
+        >>> old = np.array([i for i in range(3)], dtype=[('col1', '<f4')])
+        >>> new_list = [2 * i for i in range(3)]
+        >>> res = numpy_append_fields(old, 'new_col', new_list, [('new_col', '<f4')])
+        >>> print(res)
+        ... # doctest: +NORMALIZE_WHITESPACE
+        [(0., 0.) (1., 2.) (2., 4.)]
+
+        >>> # 新增多个字段
+        >>> data = [res['col1'].tolist(), res['new_col'].tolist()]
+        >>> print(numpy_append_fields(old, ('col3', 'col4'), data, [('col3', '<f4'), ('col4', '<f4')]))
+        ... # doctest: +NORMALIZE_WHITESPACE
+        [(0., 0., 0.) (1., 1., 2.) (2., 2., 4.)]
+
+    Args:
+        base ([numpy.array]): 基础数组
+        names ([type]): 新增字段的名字,可以是字符串(单字段的情况),也可以是字符串列表
+        data (list): 增加的字段的数据,list类型
+        dtypes ([type]): 新增字段的dtype
+    """
+    if isinstance(names, str):
+        names = [names]
+        data = [data]
+
+    result = np.empty(base.shape, dtype=base.dtype.descr + dtypes)
+    for col in base.dtype.names:
+        result[col] = base[col]
+
+    for i in range(len(names)):
+        result[names[i]] = data[i]
+
+    return result
+
+
+
+ +
+ + + + +
+ + + +

+remove_nan(ts) + + +

+ +
+ +

ts中去除NaN

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tsnp.array

[description]

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
np.array

[description]

+
+ Source code in omicron/extensions/np.py +
def remove_nan(ts: np.ndarray) -> np.ndarray:
+    """从`ts`中去除NaN
+
+    Args:
+        ts (np.array): [description]
+
+    Returns:
+        np.array: [description]
+    """
+    return ts[~np.isnan(ts.astype(float))]
+
+
+
+ +
+ + + +
+ + + +

+replace_zero(ts, replacement=None) + + +

+ +
+ +

将ts中的0替换为前值, 处理volume数据时常用用到

+

如果提供了replacement, 则替换为replacement

+ +
+ Source code in omicron/extensions/np.py +
def replace_zero(ts: np.ndarray, replacement=None) -> np.ndarray:
+    """将ts中的0替换为前值, 处理volume数据时常用用到
+
+    如果提供了replacement, 则替换为replacement
+
+    """
+    if replacement is not None:
+        return np.where(ts == 0, replacement, ts)
+
+    if np.all(ts == 0):
+        raise ValueError("all of ts are 0")
+
+    if ts[0] == 0:
+        idx = np.argwhere(ts != 0)[0]
+        ts[0] = ts[idx]
+
+    mask = ts == 0
+    idx = np.where(~mask, np.arange(mask.size), 0)
+    np.maximum.accumulate(idx, out=idx)
+    return ts[idx]
+
+
+
+ +
+ + + +
+ + + +

+rolling(x, win, func) + + +

+ +
+ +

对序列x进行窗口滑动计算。

+

如果func要实现的功能是argmax, argmin, max, mean, median, min, rank, std, sum, var等,move_argmax,请使用bottleneck中的move_argmin, move_max, move_mean, move_median, move_min move_rank, move_std, move_sum, move_var。这些函数的性能更好。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
x[type]

[description]

required
win[type]

[description]

required
func[type]

[description]

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
[type]

[description]

+
+ Source code in omicron/extensions/np.py +
def rolling(x, win, func):
+    """对序列`x`进行窗口滑动计算。
+
+    如果`func`要实现的功能是argmax, argmin, max, mean, median, min, rank, std, sum, var等,move_argmax,请使用bottleneck中的move_argmin, move_max, move_mean, move_median, move_min move_rank, move_std, move_sum, move_var。这些函数的性能更好。
+
+    Args:
+        x ([type]): [description]
+        win ([type]): [description]
+        func ([type]): [description]
+
+    Returns:
+        [type]: [description]
+    """
+    results = []
+    for subarray in sliding_window_view(x, window_shape=win):
+        results.append(func(subarray))
+
+    return np.array(results)
+
+
+
+ +
+ + + +
+ + + +

+shift(arr, start, offset) + + +

+ +
+ +

在numpy数组arr中,找到start(或者最接近的一个),取offset对应的元素。

+

要求arr已排序。offset为正,表明向后移位;offset为负,表明向前移位

+ +

Examples:

+
>>> arr = [20050104, 20050105, 20050106, 20050107, 20050110, 20050111]
+>>> shift(arr, 20050104, 1)
+20050105
+
+
>>> shift(arr, 20050105, -1)
+20050104
+
+
>>> # 起始点已右越界,且向右shift,返回起始点
+>>> shift(arr, 20050120, 1)
+20050120
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
arr

已排序的数组

required
start

numpy可接受的数据类型

required
offsetint

[description]

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription

移位后得到的元素值

+
+ Source code in omicron/extensions/np.py +
def shift(arr, start, offset):
+    """在numpy数组arr中,找到start(或者最接近的一个),取offset对应的元素。
+
+    要求`arr`已排序。`offset`为正,表明向后移位;`offset`为负,表明向前移位
+
+    Examples:
+        >>> arr = [20050104, 20050105, 20050106, 20050107, 20050110, 20050111]
+        >>> shift(arr, 20050104, 1)
+        20050105
+
+        >>> shift(arr, 20050105, -1)
+        20050104
+
+        >>> # 起始点已右越界,且向右shift,返回起始点
+        >>> shift(arr, 20050120, 1)
+        20050120
+
+
+    Args:
+        arr : 已排序的数组
+        start : numpy可接受的数据类型
+        offset (int): [description]
+
+    Returns:
+        移位后得到的元素值
+    """
+    pos = np.searchsorted(arr, start, side="right")
+
+    if pos + offset - 1 >= len(arr):
+        return start
+    else:
+        return arr[pos + offset - 1]
+
+
+
+ +
+ + + +
+ + + +

+smallest_n_argpos(ts, n) + + +

+ +
+ +

get smallest n (min->max) elements and return argpos which its value ordered in ascent

+ +

Examples:

+
>>> smallest_n_argpos([np.nan, 4, 3, 9, 8, 5, 2, 1, 0, 6, 7], 2)
+array([8, 7])
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tsnp.array

输入的数组

required
nint

取最小的n个元素

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
np.array

[description]

+
+ Source code in omicron/extensions/np.py +
def smallest_n_argpos(ts: np.array, n: int) -> np.array:
+    """get smallest n (min->max) elements and return argpos which its value ordered in ascent
+
+    Example:
+        >>> smallest_n_argpos([np.nan, 4, 3, 9, 8, 5, 2, 1, 0, 6, 7], 2)
+        array([8, 7])
+
+    Args:
+        ts (np.array): 输入的数组
+        n (int): 取最小的n个元素
+
+    Returns:
+        np.array: [description]
+    """
+    return np.argsort(ts)[:n]
+
+
+
+ +
+ + + +
+ + + +

+to_pydatetime(tm) + + +

+ +
+ +

将numpy.datetime64对象转换成为python的datetime对象

+

numpy.ndarray.item()方法可用以将任何numpy对象转换成python对象,推荐在任何适用的地方使用.item()方法,而不是本方法。示例: +

1
+2
+3
+4
    arr = np.array(['2022-09-08', '2022-09-09'], dtype='datetime64[s]')
+    arr.item(0) # output is datetime.datetime(2022, 9, 8, 0, 0)
+
+    arr[1].item() # output is datetime.datetime(2022, 9, 9, 0, 0)
+

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tm

the input numpy datetime object

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
datetime.datetime

python datetime object

.. deprecated:: 2.0.0 use tm.item() instead

+ +
+ Source code in omicron/extensions/np.py +
@deprecated("2.0.0", details="use `tm.item()` instead")
+def to_pydatetime(tm: np.datetime64) -> datetime.datetime:
+    """将numpy.datetime64对象转换成为python的datetime对象
+
+    numpy.ndarray.item()方法可用以将任何numpy对象转换成python对象,推荐在任何适用的地方使用.item()方法,而不是本方法。示例:
+    ```
+        arr = np.array(['2022-09-08', '2022-09-09'], dtype='datetime64[s]')
+        arr.item(0) # output is datetime.datetime(2022, 9, 8, 0, 0)
+
+        arr[1].item() # output is datetime.datetime(2022, 9, 9, 0, 0)
+    ```
+
+    Args:
+        tm : the input numpy datetime object
+
+    Returns:
+        python datetime object
+    """
+    unix_epoch = np.datetime64(0, "s")
+    one_second = np.timedelta64(1, "s")
+    seconds_since_epoch = (tm - unix_epoch) / one_second
+
+    return datetime.datetime.utcfromtimestamp(seconds_since_epoch)
+
+
+
+ +
+ + + +
+ + + +

+top_n_argpos(ts, n) + + +

+ +
+ +

get top n (max->min) elements and return argpos which its value ordered in descent

+ +

Examples:

+
>>> top_n_argpos([np.nan, 4, 3, 9, 8, 5, 2, 1, 0, 6, 7], 2)
+array([3, 4])
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tsnp.array

[description]

required
nint

[description]

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
np.array

[description]

+
+ Source code in omicron/extensions/np.py +
def top_n_argpos(ts: np.array, n: int) -> np.array:
+    """get top n (max->min) elements and return argpos which its value ordered in descent
+
+    Example:
+        >>> top_n_argpos([np.nan, 4, 3, 9, 8, 5, 2, 1, 0, 6, 7], 2)
+        array([3, 4])
+
+    Args:
+        ts (np.array): [description]
+        n (int): [description]
+
+    Returns:
+        np.array: [description]
+    """
+    ts_ = np.copy(ts)
+    ts_[np.isnan(ts_)] = -np.inf
+    return np.argsort(ts_)[-n:][::-1]
+
+
+
+ +
+ + + + + + +
+ +
+ +
+ + + + +
+ +
+ +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/2.0.0/api/metrics/index.html b/2.0.0/api/metrics/index.html new file mode 100644 index 00000000..73135228 --- /dev/null +++ b/2.0.0/api/metrics/index.html @@ -0,0 +1,1605 @@ + + + + + + + + + + + + + + + + metrics - Omicron + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + +

metrics

+ +

以下功能请使用empyrical包中相关的函数。

+

usage: +

1
from empyrical import aggregate_returns
+

+

aggregate_returns

+

external link

+

alpha

+

external link

+

alpha_aligned

+

external link

+

alpha_beta

+

external link

+

alpha_beta_aligned

+

external link

+

annual_return

+

external link

+

annual_volatility

+

external link

+

beta

+

external link

+

beta_aligned

+

external link

+

beta_fragility_heuristic

+

external link

+

beta_fragility_heuristic_aligned

+

external link

+

cagr

+

external link

+

calmar_ratio

+

external link

+

capture

+

external link

+

compute_exposures

+

external link

+

conditional_value_at_risk

+

external link

+

cum_returns

+

external link

+

cum_returns_final

+

external link

+

down_alpha_beta

+

external link

+

down_capture

+

external link

+

downside_risk

+

external link

+

excess_sharpe

+

external link

+

gpd_risk_estimates

+

external link

+

gpd_risk_estimates_aligned

+

external link

+

max_drawdown

+

external link

+

omega_ratio

+

external link

+

perf_attrib

+

external link

+

periods

+

external link

+

roll_alpha

+

external link

+

roll_alpha_aligned

+

external link

+

roll_alpha_beta

+

external link

+

roll_alpha_beta_aligned

+

external link

+

roll_annual_volatility

+

external link

+

roll_beta

+

external link

+

roll_beta_aligned

+

external link

+

roll_down_capture

+

external link

+

roll_max_drawdown

+

external link

+

roll_sharpe_ratio

+

external link

+

roll_sortino_ratio

+

external link

+

roll_up_capture

+

external link

+

roll_up_down_capture

+

external link

+

sharpe_ratio

+

external link

+

simple_returns

+

external link

+

sortino_ratio

+

external link

+

stability_of_timeseries

+

external link

+

stats

+

external link

+

tail_ratio

+

external link

+

up_alpha_beta

+

external link

+

up_capture

+

external link

+

up_down_capture

+

external link

+

utils

+

external link

+

value_at_risk

+

external link

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/2.0.0/api/omicron/index.html b/2.0.0/api/omicron/index.html new file mode 100644 index 00000000..2780efbd --- /dev/null +++ b/2.0.0/api/omicron/index.html @@ -0,0 +1,4428 @@ + + + + + + + + + + + + + + + + omicron - Omicron + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + +
+ + +
+ +

Omicron提供数据持久化、时间(日历、triggers)、行情数据model、基础运算和基础量化因子

+ + + +
+ + + + + + + + +
+ + + +

+close() + + + async + + +

+ +
+ +

关闭与缓存的连接

+ +
+ Source code in omicron/__init__.py +
async def close():
+    """关闭与缓存的连接"""
+
+    try:
+        await cache.close()
+    except Exception as e:  # noqa
+        pass
+
+
+
+ +
+ + + +
+ + + +

+init(app_cache=5) + + + async + + +

+ +
+ +

初始化Omicron

+

初始化influxDB, 缓存等连接, 并加载日历和证券列表

+

上述初始化的连接,应该在程序退出时,通过调用close()关闭

+ +
+ Source code in omicron/__init__.py +
async def init(app_cache: int = 5):
+    """初始化Omicron
+
+    初始化influxDB, 缓存等连接, 并加载日历和证券列表
+
+    上述初始化的连接,应该在程序退出时,通过调用`close()`关闭
+    """
+    global cache
+
+    await cache.init(app=app_cache)
+    await tf.init()
+
+    from omicron.models.security import Security
+
+    await Security.init()
+
+
+
+ +
+ + + + + + +
+ +
+ +
+ +

Extensions package

+ + +
+ + +
+ + + + +
+ + + + + + + + + + +
+ + + +

+ decimals + + + +

+ +
+ + + + +
+ + + + + + + + +
+ + + +

+math_round(x, digits) + + +

+ +
+ +

由于浮点数的表示问题,很多语言的round函数与数学上的round函数不一致。下面的函数结果与数学上的一致。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
xfloat

要进行四舍五入的数字

required
digitsint

小数点后保留的位数

required
+
+ Source code in omicron/extensions/decimals.py +
def math_round(x: float, digits: int):
+    """由于浮点数的表示问题,很多语言的round函数与数学上的round函数不一致。下面的函数结果与数学上的一致。
+
+    Args:
+        x: 要进行四舍五入的数字
+        digits: 小数点后保留的位数
+
+    """
+
+    return int(x * (10**digits) + copysign(0.5, x)) / (10**digits)
+
+
+
+ +
+ + + +
+ + + +

+price_equal(x, y) + + +

+ +
+ +

判断股价是否相等

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
x

价格1

required
y

价格2

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
bool

如果相等则返回True,否则返回False

+
+ Source code in omicron/extensions/decimals.py +
def price_equal(x: float, y: float) -> bool:
+    """判断股价是否相等
+
+    Args:
+        x : 价格1
+        y : 价格2
+
+    Returns:
+        如果相等则返回True,否则返回False
+    """
+    return abs(math_round(x, 2) - math_round(y, 2)) < 1e-2
+
+
+
+ +
+ + + + + + +
+ +
+ +
+ + + +
+ + + +

+ np + + + +

+ +
+ +

Extension function related to numpy

+ + + +
+ + + + + + + + +
+ + + +

+array_math_round(arr, digits) + + +

+ +
+ +

将一维数组arr的数据进行四舍五入

+

numpy.around的函数并不是数学上的四舍五入,对1.5和2.5进行round的结果都会变成2,在金融领域计算中,我们必须使用数学意义上的四舍五入。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
arrArrayLike

输入数组

required
digitsintrequired
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
np.ndarray

四舍五入后的一维数组

+
+ Source code in omicron/extensions/np.py +
def array_math_round(arr: Union[float, ArrayLike], digits: int) -> np.ndarray:
+    """将一维数组arr的数据进行四舍五入
+
+    numpy.around的函数并不是数学上的四舍五入,对1.5和2.5进行round的结果都会变成2,在金融领域计算中,我们必须使用数学意义上的四舍五入。
+
+    Args:
+        arr (ArrayLike): 输入数组
+        digits (int):
+
+    Returns:
+        np.ndarray: 四舍五入后的一维数组
+    """
+    # 如果是单个元素,则直接返回
+    if isinstance(arr, float):
+        return decimals.math_round(arr, digits)
+
+    f = np.vectorize(lambda x: decimals.math_round(x, digits))
+    return f(arr)
+
+
+
+ +
+ + + +
+ + + +

+array_price_equal(price1, price2) + + +

+ +
+ +

判断两个价格数组是否相等

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
price1ArrayLike

价格数组

required
price2ArrayLike

价格数组

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
np.ndarray

判断结果

+
+ Source code in omicron/extensions/np.py +
def array_price_equal(price1: ArrayLike, price2: ArrayLike) -> np.ndarray:
+    """判断两个价格数组是否相等
+
+    Args:
+        price1 (ArrayLike): 价格数组
+        price2 (ArrayLike): 价格数组
+
+    Returns:
+        np.ndarray: 判断结果
+    """
+    price1 = array_math_round(price1, 2)
+    price2 = array_math_round(price2, 2)
+
+    return abs(price1 - price2) < 1e-2
+
+
+
+ +
+ + + +
+ + + +

+bars_since(condition, default=None) + + +

+ +
+ +

Return the number of bars since condition sequence was last True, +or if never, return default.

+
1
+2
+3
>>> condition = [True, True, False]
+>>> bars_since(condition)
+1
+
+ +
+ Source code in omicron/extensions/np.py +
def bars_since(condition: Sequence[bool], default=None) -> int:
+    """
+    Return the number of bars since `condition` sequence was last `True`,
+    or if never, return `default`.
+
+        >>> condition = [True, True, False]
+        >>> bars_since(condition)
+        1
+    """
+    return next(compress(range(len(condition)), reversed(condition)), default)
+
+
+
+ +
+ + + +
+ + + +

+bin_cut(arr, n) + + +

+ +
+ +

将数组arr切分成n份

+

todo: use padding + reshape to boost performance

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
arr[type]

[description]

required
n[type]

[description]

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
[type]

[description]

+
+ Source code in omicron/extensions/np.py +
def bin_cut(arr: list, n: int):
+    """将数组arr切分成n份
+
+    todo: use padding + reshape to boost performance
+    Args:
+        arr ([type]): [description]
+        n ([type]): [description]
+
+    Returns:
+        [type]: [description]
+    """
+    result = [[] for i in range(n)]
+
+    for i, e in enumerate(arr):
+        result[i % n].append(e)
+
+    return [e for e in result if len(e)]
+
+
+
+ +
+ + + +
+ + + +

+count_between(arr, start, end) + + +

+ +
+ +

计算数组中,start元素与end元素之间共有多少个元素

+

要求arr必须是已排序。计算结果会包含区间边界点。

+ +

Examples:

+
>>> arr = [20050104, 20050105, 20050106, 20050107, 20050110, 20050111]
+>>> count_between(arr, 20050104, 20050111)
+6
+
+
>>> count_between(arr, 20050104, 20050109)
+4
+
+ +
+ Source code in omicron/extensions/np.py +
def count_between(arr, start, end):
+    """计算数组中,`start`元素与`end`元素之间共有多少个元素
+
+    要求arr必须是已排序。计算结果会包含区间边界点。
+
+    Examples:
+        >>> arr = [20050104, 20050105, 20050106, 20050107, 20050110, 20050111]
+        >>> count_between(arr, 20050104, 20050111)
+        6
+
+        >>> count_between(arr, 20050104, 20050109)
+        4
+    """
+    pos_start = np.searchsorted(arr, start, side="right")
+    pos_end = np.searchsorted(arr, end, side="right")
+
+    counter = pos_end - pos_start + 1
+    if start < arr[0]:
+        counter -= 1
+    if end > arr[-1]:
+        counter -= 1
+
+    return counter
+
+
+
+ +
+ + + +
+ + + +

+dataframe_to_structured_array(df, dtypes=None) + + +

+ +
+ +

convert dataframe (with all columns, and index possibly) to numpy structured arrays

+

len(dtypes) should be either equal to len(df.columns) or len(df.columns) + 1. In the later case, it implies to include df.index into converted array.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
dfDataFrame

the one needs to be converted

required
dtypesList[Tuple]

Defaults to None. If it's None, then dtypes of df is used, in such case, the index of df will not be converted.

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
ArrayLike

[description]

+
+ Source code in omicron/extensions/np.py +
def dataframe_to_structured_array(
+    df: DataFrame, dtypes: List[Tuple] = None
+) -> ArrayLike:
+    """convert dataframe (with all columns, and index possibly) to numpy structured arrays
+
+    `len(dtypes)` should be either equal to `len(df.columns)` or `len(df.columns) + 1`. In the later case, it implies to include `df.index` into converted array.
+
+    Args:
+        df: the one needs to be converted
+        dtypes: Defaults to None. If it's `None`, then dtypes of `df` is used, in such case, the `index` of `df` will not be converted.
+
+    Returns:
+        ArrayLike: [description]
+    """
+    v = df
+    if dtypes is not None:
+        dtypes_in_dict = {key: value for key, value in dtypes}
+
+        col_len = len(df.columns)
+        if len(dtypes) == col_len + 1:
+            v = df.reset_index()
+
+            rename_index_to = set(dtypes_in_dict.keys()).difference(set(df.columns))
+            v.rename(columns={"index": list(rename_index_to)[0]}, inplace=True)
+        elif col_len != len(dtypes):
+            raise ValueError(
+                f"length of dtypes should be either {col_len} or {col_len + 1}, is {len(dtypes)}"
+            )
+
+        # re-arrange order of dtypes, in order to align with df.columns
+        dtypes = []
+        for name in v.columns:
+            dtypes.append((name, dtypes_in_dict[name]))
+    else:
+        dtypes = df.dtypes
+
+    return np.array(np.rec.fromrecords(v.values), dtype=dtypes)
+
+
+
+ +
+ + + +
+ + + +

+dict_to_numpy_array(d, dtype) + + +

+ +
+ +

convert dictionary to numpy array

+ +

Examples:

+ +
+
+
+

d = {"aaron": 5, "jack": 6} +dtype = [("name", "S8"), ("score", "<i4")] +dict_to_numpy_array(d, dtype) +array([(b'aaron', 5), (b'jack', 6)], + dtype=[('name', 'S8'), ('score', '<i4')])

+
+
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
ddict

[description]

required
dtypeList[Tuple]

[description]

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
np.array

[description]

+
+ Source code in omicron/extensions/np.py +
def dict_to_numpy_array(d: dict, dtype: List[Tuple]) -> np.array:
+    """convert dictionary to numpy array
+
+    Examples:
+
+    >>> d = {"aaron": 5, "jack": 6}
+    >>> dtype = [("name", "S8"), ("score", "<i4")]
+    >>> dict_to_numpy_array(d, dtype)
+    array([(b'aaron', 5), (b'jack', 6)],
+          dtype=[('name', 'S8'), ('score', '<i4')])
+
+    Args:
+        d (dict): [description]
+        dtype (List[Tuple]): [description]
+
+    Returns:
+        np.array: [description]
+    """
+    return np.fromiter(d.items(), dtype=dtype, count=len(d))
+
+
+
+ +
+ + + +
+ + + +

+fill_nan(ts) + + +

+ +
+ +

将ts中的NaN替换为其前值

+

如果ts起头的元素为NaN,则用第一个非NaN元素替换。

+

如果所有元素都为NaN,则无法替换。

+ +

Examples:

+
>>> arr = np.arange(6, dtype=np.float32)
+>>> arr[3:5] = np.NaN
+>>> fill_nan(arr)
+...
+array([0., 1., 2., 2., 2., 5.], dtype=float32)
+
+
>>> arr = np.arange(6, dtype=np.float32)
+>>> arr[0:2] = np.nan
+>>> fill_nan(arr)
+...
+array([2., 2., 2., 3., 4., 5.], dtype=float32)
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tsnp.array

[description]

required
+
+ Source code in omicron/extensions/np.py +
def fill_nan(ts: np.ndarray):
+    """将ts中的NaN替换为其前值
+
+    如果ts起头的元素为NaN,则用第一个非NaN元素替换。
+
+    如果所有元素都为NaN,则无法替换。
+
+    Example:
+        >>> arr = np.arange(6, dtype=np.float32)
+        >>> arr[3:5] = np.NaN
+        >>> fill_nan(arr)
+        ... # doctest: +NORMALIZE_WHITESPACE
+        array([0., 1., 2., 2., 2., 5.], dtype=float32)
+
+        >>> arr = np.arange(6, dtype=np.float32)
+        >>> arr[0:2] = np.nan
+        >>> fill_nan(arr)
+        ... # doctest: +NORMALIZE_WHITESPACE
+        array([2., 2., 2., 3., 4., 5.], dtype=float32)
+
+    Args:
+        ts (np.array): [description]
+    """
+    if np.all(np.isnan(ts)):
+        raise ValueError("all of ts are NaN")
+
+    if ts[0] is None or math.isnan(ts[0]):
+        idx = np.argwhere(~np.isnan(ts))[0]
+        ts[0] = ts[idx]
+
+    mask = np.isnan(ts)
+    idx = np.where(~mask, np.arange(mask.size), 0)
+    np.maximum.accumulate(idx, out=idx)
+    return ts[idx]
+
+
+
+ +
+ + + +
+ + + +

+find_runs(x) + + +

+ +
+ +

Find runs of consecutive items in an array.

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
xArrayLike

the sequence to find runs in

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Tuple[np.ndarray, np.ndarray, np.ndarray]

A tuple of unique values, start indices, and length of runs

+
+ Source code in omicron/extensions/np.py +
def find_runs(x: ArrayLike) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
+    """Find runs of consecutive items in an array.
+
+    Args:
+        x: the sequence to find runs in
+
+    Returns:
+        A tuple of unique values, start indices, and length of runs
+    """
+
+    # ensure array
+    x = np.asanyarray(x)
+    if x.ndim != 1:
+        raise ValueError("only 1D array supported")
+    n = x.shape[0]
+
+    # handle empty array
+    if n == 0:
+        return np.array([]), np.array([]), np.array([])
+
+    else:
+        # find run starts
+        loc_run_start = np.empty(n, dtype=bool)
+        loc_run_start[0] = True
+        np.not_equal(x[:-1], x[1:], out=loc_run_start[1:])
+        run_starts = np.nonzero(loc_run_start)[0]
+
+        # find run values
+        run_values = x[loc_run_start]
+
+        # find run lengths
+        run_lengths = np.diff(np.append(run_starts, n))
+
+        return run_values, run_starts, run_lengths
+
+
+
+ +
+ + + +
+ + + +

+floor(arr, item) + + +

+ +
+ +

在数据arr中,找到小于等于item的那一个值。如果item小于所有arr元素的值,返回arr[0];如果item +大于所有arr元素的值,返回arr[-1]

+

minute_frames_floor不同的是,本函数不做回绕与进位.

+ +

Examples:

+
>>> a = [3, 6, 9]
+>>> floor(a, -1)
+3
+>>> floor(a, 9)
+9
+>>> floor(a, 10)
+9
+>>> floor(a, 4)
+3
+>>> floor(a,10)
+9
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
arrrequired
itemrequired
+
+ Source code in omicron/extensions/np.py +
def floor(arr, item):
+    """
+    在数据arr中,找到小于等于item的那一个值。如果item小于所有arr元素的值,返回arr[0];如果item
+    大于所有arr元素的值,返回arr[-1]
+
+    与`minute_frames_floor`不同的是,本函数不做回绕与进位.
+
+    Examples:
+        >>> a = [3, 6, 9]
+        >>> floor(a, -1)
+        3
+        >>> floor(a, 9)
+        9
+        >>> floor(a, 10)
+        9
+        >>> floor(a, 4)
+        3
+        >>> floor(a,10)
+        9
+
+    Args:
+        arr:
+        item:
+
+    Returns:
+
+    """
+    if item < arr[0]:
+        return arr[0]
+    index = np.searchsorted(arr, item, side="right")
+    return arr[index - 1]
+
+
+
+ +
+ + + +
+ + + +

+join_by_left(key, r1, r2, mask=True) + + +

+ +
+ +

左连接 r1, r2 by key

+

如果r1中存在r2中没有的行,则该行对应的r2中的那些字段将被mask,或者填充随机数。 +same as numpy.lib.recfunctions.join_by(key, r1, r2, jointype='leftouter'), but allows r1 have duplicate keys

+

Reference: stackoverflow

+ +

Examples:

+
>>> # to join the following
+>>> # [[ 1, 2],
+>>> #  [ 1, 3],   x   [[1, 5],
+>>> #  [ 2, 3]]        [4, 7]]
+>>> # only first two rows in left will be joined
+
+
>>> r1 = np.array([(1, 2), (1,3), (2,3)], dtype=[('seq', 'i4'), ('score', 'i4')])
+>>> r2 = np.array([(1, 5), (4,7)], dtype=[('seq', 'i4'), ('age', 'i4')])
+>>> joined = join_by_left('seq', r1, r2)
+>>> print(joined)
+[(1, 2, 5) (1, 3, 5) (2, 3, --)]
+
+
>>> print(joined.dtype)
+(numpy.record, [('seq', '<i4'), ('score', '<i4'), ('age', '<i4')])
+
+
>>> joined[2][2]
+masked
+
+
>>> joined.tolist()[2][2] == None
+True
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
key

join关键字

required
r1

数据集1

required
r2

数据集2

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription

a numpy array

+
+ Source code in omicron/extensions/np.py +
def join_by_left(key, r1, r2, mask=True):
+    """左连接 `r1`, `r2` by `key`
+
+    如果`r1`中存在`r2`中没有的行,则该行对应的`r2`中的那些字段将被mask,或者填充随机数。
+    same as numpy.lib.recfunctions.join_by(key, r1, r2, jointype='leftouter'), but allows r1 have duplicate keys
+
+    [Reference: stackoverflow](https://stackoverflow.com/a/53261882/13395693)
+
+    Examples:
+        >>> # to join the following
+        >>> # [[ 1, 2],
+        >>> #  [ 1, 3],   x   [[1, 5],
+        >>> #  [ 2, 3]]        [4, 7]]
+        >>> # only first two rows in left will be joined
+
+        >>> r1 = np.array([(1, 2), (1,3), (2,3)], dtype=[('seq', 'i4'), ('score', 'i4')])
+        >>> r2 = np.array([(1, 5), (4,7)], dtype=[('seq', 'i4'), ('age', 'i4')])
+        >>> joined = join_by_left('seq', r1, r2)
+        >>> print(joined)
+        [(1, 2, 5) (1, 3, 5) (2, 3, --)]
+
+        >>> print(joined.dtype)
+        (numpy.record, [('seq', '<i4'), ('score', '<i4'), ('age', '<i4')])
+
+        >>> joined[2][2]
+        masked
+
+        >>> joined.tolist()[2][2] == None
+        True
+
+    Args:
+        key : join关键字
+        r1 : 数据集1
+        r2 : 数据集2
+
+    Returns:
+        a numpy array
+    """
+    # figure out the dtype of the result array
+    descr1 = r1.dtype.descr
+    descr2 = [d for d in r2.dtype.descr if d[0] not in r1.dtype.names]
+    descrm = descr1 + descr2
+
+    # figure out the fields we'll need from each array
+    f1 = [d[0] for d in descr1]
+    f2 = [d[0] for d in descr2]
+
+    # cache the number of columns in f1
+    ncol1 = len(f1)
+
+    # get a dict of the rows of r2 grouped by key
+    rows2 = {}
+    for row2 in r2:
+        rows2.setdefault(row2[key], []).append(row2)
+
+    # figure out how many rows will be in the result
+    nrowm = 0
+    for k1 in r1[key]:
+        if k1 in rows2:
+            nrowm += len(rows2[k1])
+        else:
+            nrowm += 1
+
+    # allocate the return array
+    # ret = np.full((nrowm, ), fill, dtype=descrm)
+    _ret = np.recarray(nrowm, dtype=descrm)
+    if mask:
+        ret = np.ma.array(_ret, mask=True)
+    else:
+        ret = _ret
+
+    # merge the data into the return array
+    i = 0
+    for row1 in r1:
+        if row1[key] in rows2:
+            for row2 in rows2[row1[key]]:
+                ret[i] = tuple(row1[f1]) + tuple(row2[f2])
+                i += 1
+        else:
+            for j in range(ncol1):
+                ret[i][j] = row1[j]
+            i += 1
+
+    return ret
+
+
+
+ +
+ + + +
+ + + +

+numpy_append_fields(base, names, data, dtypes) + + +

+ +
+ +

给现有的数组base增加新的字段

+

实现了numpy.lib.recfunctions.rec_append_fields的功能。提供这个功能,是因为rec_append_fields不能处理data元素的类型为Object的情况。

+

新增的数据列将顺序排列在其它列的右边。

+ +

Examples:

+
>>> # 新增单个字段
+>>> import numpy
+>>> old = np.array([i for i in range(3)], dtype=[('col1', '<f4')])
+>>> new_list = [2 * i for i in range(3)]
+>>> res = numpy_append_fields(old, 'new_col', new_list, [('new_col', '<f4')])
+>>> print(res)
+...
+[(0., 0.) (1., 2.) (2., 4.)]
+
+
>>> # 新增多个字段
+>>> data = [res['col1'].tolist(), res['new_col'].tolist()]
+>>> print(numpy_append_fields(old, ('col3', 'col4'), data, [('col3', '<f4'), ('col4', '<f4')]))
+...
+[(0., 0., 0.) (1., 1., 2.) (2., 2., 4.)]
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
base[numpy.array]

基础数组

required
names[type]

新增字段的名字,可以是字符串(单字段的情况),也可以是字符串列表

required
datalist

增加的字段的数据,list类型

required
dtypes[type]

新增字段的dtype

required
+
+ Source code in omicron/extensions/np.py +
def numpy_append_fields(
+    base: np.ndarray, names: Union[str, List[str]], data: List, dtypes: List
+) -> np.ndarray:
+    """给现有的数组`base`增加新的字段
+
+    实现了`numpy.lib.recfunctions.rec_append_fields`的功能。提供这个功能,是因为`rec_append_fields`不能处理`data`元素的类型为Object的情况。
+
+    新增的数据列将顺序排列在其它列的右边。
+
+    Example:
+        >>> # 新增单个字段
+        >>> import numpy
+        >>> old = np.array([i for i in range(3)], dtype=[('col1', '<f4')])
+        >>> new_list = [2 * i for i in range(3)]
+        >>> res = numpy_append_fields(old, 'new_col', new_list, [('new_col', '<f4')])
+        >>> print(res)
+        ... # doctest: +NORMALIZE_WHITESPACE
+        [(0., 0.) (1., 2.) (2., 4.)]
+
+        >>> # 新增多个字段
+        >>> data = [res['col1'].tolist(), res['new_col'].tolist()]
+        >>> print(numpy_append_fields(old, ('col3', 'col4'), data, [('col3', '<f4'), ('col4', '<f4')]))
+        ... # doctest: +NORMALIZE_WHITESPACE
+        [(0., 0., 0.) (1., 1., 2.) (2., 2., 4.)]
+
+    Args:
+        base ([numpy.array]): 基础数组
+        names ([type]): 新增字段的名字,可以是字符串(单字段的情况),也可以是字符串列表
+        data (list): 增加的字段的数据,list类型
+        dtypes ([type]): 新增字段的dtype
+    """
+    if isinstance(names, str):
+        names = [names]
+        data = [data]
+
+    result = np.empty(base.shape, dtype=base.dtype.descr + dtypes)
+    for col in base.dtype.names:
+        result[col] = base[col]
+
+    for i in range(len(names)):
+        result[names[i]] = data[i]
+
+    return result
+
+
+
+ +
+ + + + +
+ + + +

+remove_nan(ts) + + +

+ +
+ +

ts中去除NaN

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tsnp.array

[description]

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
np.array

[description]

+
+ Source code in omicron/extensions/np.py +
def remove_nan(ts: np.ndarray) -> np.ndarray:
+    """从`ts`中去除NaN
+
+    Args:
+        ts (np.array): [description]
+
+    Returns:
+        np.array: [description]
+    """
+    return ts[~np.isnan(ts.astype(float))]
+
+
+
+ +
+ + + +
+ + + +

+replace_zero(ts, replacement=None) + + +

+ +
+ +

将ts中的0替换为前值, 处理volume数据时常用用到

+

如果提供了replacement, 则替换为replacement

+ +
+ Source code in omicron/extensions/np.py +
def replace_zero(ts: np.ndarray, replacement=None) -> np.ndarray:
+    """将ts中的0替换为前值, 处理volume数据时常用用到
+
+    如果提供了replacement, 则替换为replacement
+
+    """
+    if replacement is not None:
+        return np.where(ts == 0, replacement, ts)
+
+    if np.all(ts == 0):
+        raise ValueError("all of ts are 0")
+
+    if ts[0] == 0:
+        idx = np.argwhere(ts != 0)[0]
+        ts[0] = ts[idx]
+
+    mask = ts == 0
+    idx = np.where(~mask, np.arange(mask.size), 0)
+    np.maximum.accumulate(idx, out=idx)
+    return ts[idx]
+
+
+
+ +
+ + + +
+ + + +

+rolling(x, win, func) + + +

+ +
+ +

对序列x进行窗口滑动计算。

+

如果func要实现的功能是argmax, argmin, max, mean, median, min, rank, std, sum, var等,move_argmax,请使用bottleneck中的move_argmin, move_max, move_mean, move_median, move_min move_rank, move_std, move_sum, move_var。这些函数的性能更好。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
x[type]

[description]

required
win[type]

[description]

required
func[type]

[description]

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
[type]

[description]

+
+ Source code in omicron/extensions/np.py +
def rolling(x, win, func):
+    """对序列`x`进行窗口滑动计算。
+
+    如果`func`要实现的功能是argmax, argmin, max, mean, median, min, rank, std, sum, var等,move_argmax,请使用bottleneck中的move_argmin, move_max, move_mean, move_median, move_min move_rank, move_std, move_sum, move_var。这些函数的性能更好。
+
+    Args:
+        x ([type]): [description]
+        win ([type]): [description]
+        func ([type]): [description]
+
+    Returns:
+        [type]: [description]
+    """
+    results = []
+    for subarray in sliding_window_view(x, window_shape=win):
+        results.append(func(subarray))
+
+    return np.array(results)
+
+
+
+ +
+ + + +
+ + + +

+shift(arr, start, offset) + + +

+ +
+ +

在numpy数组arr中,找到start(或者最接近的一个),取offset对应的元素。

+

要求arr已排序。offset为正,表明向后移位;offset为负,表明向前移位

+ +

Examples:

+
>>> arr = [20050104, 20050105, 20050106, 20050107, 20050110, 20050111]
+>>> shift(arr, 20050104, 1)
+20050105
+
+
>>> shift(arr, 20050105, -1)
+20050104
+
+
>>> # 起始点已右越界,且向右shift,返回起始点
+>>> shift(arr, 20050120, 1)
+20050120
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
arr

已排序的数组

required
start

numpy可接受的数据类型

required
offsetint

[description]

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription

移位后得到的元素值

+
+ Source code in omicron/extensions/np.py +
def shift(arr, start, offset):
+    """在numpy数组arr中,找到start(或者最接近的一个),取offset对应的元素。
+
+    要求`arr`已排序。`offset`为正,表明向后移位;`offset`为负,表明向前移位
+
+    Examples:
+        >>> arr = [20050104, 20050105, 20050106, 20050107, 20050110, 20050111]
+        >>> shift(arr, 20050104, 1)
+        20050105
+
+        >>> shift(arr, 20050105, -1)
+        20050104
+
+        >>> # 起始点已右越界,且向右shift,返回起始点
+        >>> shift(arr, 20050120, 1)
+        20050120
+
+
+    Args:
+        arr : 已排序的数组
+        start : numpy可接受的数据类型
+        offset (int): [description]
+
+    Returns:
+        移位后得到的元素值
+    """
+    pos = np.searchsorted(arr, start, side="right")
+
+    if pos + offset - 1 >= len(arr):
+        return start
+    else:
+        return arr[pos + offset - 1]
+
+
+
+ +
+ + + +
+ + + +

+smallest_n_argpos(ts, n) + + +

+ +
+ +

get smallest n (min->max) elements and return argpos which its value ordered in ascent

+ +

Examples:

+
>>> smallest_n_argpos([np.nan, 4, 3, 9, 8, 5, 2, 1, 0, 6, 7], 2)
+array([8, 7])
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tsnp.array

输入的数组

required
nint

取最小的n个元素

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
np.array

[description]

+
+ Source code in omicron/extensions/np.py +
def smallest_n_argpos(ts: np.array, n: int) -> np.array:
+    """get smallest n (min->max) elements and return argpos which its value ordered in ascent
+
+    Example:
+        >>> smallest_n_argpos([np.nan, 4, 3, 9, 8, 5, 2, 1, 0, 6, 7], 2)
+        array([8, 7])
+
+    Args:
+        ts (np.array): 输入的数组
+        n (int): 取最小的n个元素
+
+    Returns:
+        np.array: [description]
+    """
+    return np.argsort(ts)[:n]
+
+
+
+ +
+ + + +
+ + + +

+to_pydatetime(tm) + + +

+ +
+ +

将numpy.datetime64对象转换成为python的datetime对象

+

numpy.ndarray.item()方法可用以将任何numpy对象转换成python对象,推荐在任何适用的地方使用.item()方法,而不是本方法。示例: +

1
+2
+3
+4
    arr = np.array(['2022-09-08', '2022-09-09'], dtype='datetime64[s]')
+    arr.item(0) # output is datetime.datetime(2022, 9, 8, 0, 0)
+
+    arr[1].item() # output is datetime.datetime(2022, 9, 9, 0, 0)
+

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tm

the input numpy datetime object

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
datetime.datetime

python datetime object

.. deprecated:: 2.0.0 use tm.item() instead

+ +
+ Source code in omicron/extensions/np.py +
@deprecated("2.0.0", details="use `tm.item()` instead")
+def to_pydatetime(tm: np.datetime64) -> datetime.datetime:
+    """将numpy.datetime64对象转换成为python的datetime对象
+
+    numpy.ndarray.item()方法可用以将任何numpy对象转换成python对象,推荐在任何适用的地方使用.item()方法,而不是本方法。示例:
+    ```
+        arr = np.array(['2022-09-08', '2022-09-09'], dtype='datetime64[s]')
+        arr.item(0) # output is datetime.datetime(2022, 9, 8, 0, 0)
+
+        arr[1].item() # output is datetime.datetime(2022, 9, 9, 0, 0)
+    ```
+
+    Args:
+        tm : the input numpy datetime object
+
+    Returns:
+        python datetime object
+    """
+    unix_epoch = np.datetime64(0, "s")
+    one_second = np.timedelta64(1, "s")
+    seconds_since_epoch = (tm - unix_epoch) / one_second
+
+    return datetime.datetime.utcfromtimestamp(seconds_since_epoch)
+
+
+
+ +
+ + + +
+ + + +

+top_n_argpos(ts, n) + + +

+ +
+ +

get top n (max->min) elements and return argpos which its value ordered in descent

+ +

Examples:

+
>>> top_n_argpos([np.nan, 4, 3, 9, 8, 5, 2, 1, 0, 6, 7], 2)
+array([3, 4])
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tsnp.array

[description]

required
nint

[description]

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
np.array

[description]

+
+ Source code in omicron/extensions/np.py +
def top_n_argpos(ts: np.array, n: int) -> np.array:
+    """get top n (max->min) elements and return argpos which its value ordered in descent
+
+    Example:
+        >>> top_n_argpos([np.nan, 4, 3, 9, 8, 5, 2, 1, 0, 6, 7], 2)
+        array([3, 4])
+
+    Args:
+        ts (np.array): [description]
+        n (int): [description]
+
+    Returns:
+        np.array: [description]
+    """
+    ts_ = np.copy(ts)
+    ts_[np.isnan(ts_)] = -np.inf
+    return np.argsort(ts_)[-n:][::-1]
+
+
+
+ +
+ + + + + + +
+ +
+ +
+ + + + +
+ +
+ +
+ +

Notify package

+ + +
+ + +
+ + + + +
+ + + + + + + + + + +
+ + + +

+ dingtalk + + + +

+ +
+ + + + +
+ + + + + + + + + +
+ + + +

+ +DingTalkMessage + + + +

+ +
+ +

钉钉的机器人消息推送类,封装了常用的消息类型以及加密算法 +需要在配置文件中配置钉钉的机器人的access_token +如果配置了加签,需要在配置文件中配置钉钉的机器人的secret +如果配置了自定义关键词,需要在配置文件中配置钉钉的机器人的keyword,多个关键词用英文逗号分隔 +全部的配置文件示例如下, 其中secret和keyword可以不配置, access_token必须配置 +notify: + dingtalk_access_token: xxxx + dingtalk_secret: xxxx

+ +
+ Source code in omicron/notify/dingtalk.py +
class DingTalkMessage:
+    """
+    钉钉的机器人消息推送类,封装了常用的消息类型以及加密算法
+    需要在配置文件中配置钉钉的机器人的access_token
+    如果配置了加签,需要在配置文件中配置钉钉的机器人的secret
+    如果配置了自定义关键词,需要在配置文件中配置钉钉的机器人的keyword,多个关键词用英文逗号分隔
+    全部的配置文件示例如下, 其中secret和keyword可以不配置, access_token必须配置
+    notify:
+      dingtalk_access_token: xxxx
+      dingtalk_secret: xxxx
+    """
+
+    url = "https://oapi.dingtalk.com/robot/send"
+
+    @classmethod
+    def _get_access_token(cls):
+        """获取钉钉机器人的access_token"""
+        if hasattr(cfg.notify, "dingtalk_access_token"):
+            return cfg.notify.dingtalk_access_token
+        else:
+            logger.error(
+                "Dingtalk not configured, please add the following items:\n"
+                "notify:\n"
+                "  dingtalk_access_token: xxxx\n"
+                "  dingtalk_secret: xxxx\n"
+            )
+            raise ConfigError("dingtalk_access_token not found")
+
+    @classmethod
+    def _get_secret(cls):
+        """获取钉钉机器人的secret"""
+        if hasattr(cfg.notify, "dingtalk_secret"):
+            return cfg.notify.dingtalk_secret
+        else:
+            return None
+
+    @classmethod
+    def _get_url(cls):
+        """获取钉钉机器人的消息推送地址,将签名和时间戳拼接在url后面"""
+        access_token = cls._get_access_token()
+        url = f"{cls.url}?access_token={access_token}"
+        secret = cls._get_secret()
+        if secret:
+            timestamp, sign = cls._get_sign(secret)
+            url = f"{url}&timestamp={timestamp}&sign={sign}"
+        return url
+
+    @classmethod
+    def _get_sign(cls, secret: str):
+        """获取签名发送给钉钉机器人"""
+        timestamp = str(round(time.time() * 1000))
+        secret_enc = secret.encode("utf-8")
+        string_to_sign = "{}\n{}".format(timestamp, secret)
+        string_to_sign_enc = string_to_sign.encode("utf-8")
+        hmac_code = hmac.new(
+            secret_enc, string_to_sign_enc, digestmod=hashlib.sha256
+        ).digest()
+        sign = urllib.parse.quote_plus(base64.b64encode(hmac_code))
+        return timestamp, sign
+
+    @classmethod
+    def _send(cls, msg):
+        """发送消息到钉钉机器人"""
+        url = cls._get_url()
+        response = httpx.post(url, json=msg, timeout=30)
+        if response.status_code != 200:
+            logger.error(
+                f"failed to send message, content: {msg}, response from Dingtalk: {response.content.decode()}"
+            )
+            return
+        rsp = json.loads(response.content)
+        if rsp.get("errcode") != 0:
+            logger.error(
+                f"failed to send message, content: {msg}, response from Dingtalk: {rsp}"
+            )
+        return response.content.decode()
+
+    @classmethod
+    async def _send_async(cls, msg):
+        """发送消息到钉钉机器人"""
+        url = cls._get_url()
+        async with httpx.AsyncClient() as client:
+            r = await client.post(url, json=msg, timeout=30)
+            if r.status_code != 200:
+                logger.error(
+                    f"failed to send message, content: {msg}, response from Dingtalk: {r.content.decode()}"
+                )
+                return
+            rsp = json.loads(r.content)
+            if rsp.get("errcode") != 0:
+                logger.error(
+                    f"failed to send message, content: {msg}, response from Dingtalk: {rsp}"
+                )
+            return r.content.decode()
+
+    @classmethod
+    @deprecated("2.0.0", details="use function `ding` instead")
+    def text(cls, content):
+        msg = {"text": {"content": content}, "msgtype": "text"}
+        return cls._send(msg)
+
+
+ + + +
+ + + + + + + + + + +
+ + + +

+text(cls, content) + + + classmethod + + +

+ +
+ +

.. deprecated:: 2.0.0 use function ding instead

+ +
+ Source code in omicron/notify/dingtalk.py +
@classmethod
+@deprecated("2.0.0", details="use function `ding` instead")
+def text(cls, content):
+    msg = {"text": {"content": content}, "msgtype": "text"}
+    return cls._send(msg)
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + + +
+ + + +

+ding(msg) + + +

+ +
+ +

发送消息到钉钉机器人

+

支持发送纯文本消息和markdown格式的文本消息。如果要发送markdown格式的消息,请通过字典传入,必须包含包含"title"和"text"两个字段。更详细信息,请见钉钉开放平台文档

+
+Important +

必须在异步线程(即运行asyncio loop的线程)中调用此方法,否则会抛出异常。 +此方法返回一个Awaitable,您可以等待它完成,也可以忽略返回值,此时它将作为一个后台任务执行,但完成的时间不确定。

+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
msgUnion[str, dict]

待发送消息。

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Awaitable

发送消息的后台任务。您可以使用此返回句柄来取消任务。

+
+ Source code in omicron/notify/dingtalk.py +
def ding(msg: Union[str, dict]) -> Awaitable:
+    """发送消息到钉钉机器人
+
+    支持发送纯文本消息和markdown格式的文本消息。如果要发送markdown格式的消息,请通过字典传入,必须包含包含"title"和"text"两个字段。更详细信息,请见[钉钉开放平台文档](https://open.dingtalk.com/document/orgapp-server/message-type)
+
+    ???+ Important
+        必须在异步线程(即运行asyncio loop的线程)中调用此方法,否则会抛出异常。
+        此方法返回一个Awaitable,您可以等待它完成,也可以忽略返回值,此时它将作为一个后台任务执行,但完成的时间不确定。
+
+    Args:
+        msg: 待发送消息。
+
+    Returns:
+        发送消息的后台任务。您可以使用此返回句柄来取消任务。
+    """
+    if isinstance(msg, str):
+        msg_ = {"text": {"content": msg}, "msgtype": "text"}
+    elif isinstance(msg, dict):
+        msg_ = {
+            "msgtype": "markdown",
+            "markdown": {"title": msg["title"], "text": msg["text"]},
+        }
+    else:
+        raise TypeError
+
+    task = asyncio.create_task(DingTalkMessage._send_async(msg_))
+    return task
+
+
+
+ +
+ + + + + + +
+ +
+ +
+ + + +
+ + + +

+ mail + + + +

+ +
+ + + + +
+ + + + + + + + + +
+ + + +

+compose(subject, plain_txt=None, html=None, attachment=None) + + +

+ +
+ +

编写MIME邮件。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
subjectstr

邮件主题

required
plain_txtstr

纯文本格式的邮件内容

None
htmlstr

html格式的邮件内容. Defaults to None.

None
attachmentstr

附件文件名

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
EmailMessage

MIME mail

+
+ Source code in omicron/notify/mail.py +
def compose(
+    subject: str, plain_txt: str = None, html: str = None, attachment: str = None
+) -> EmailMessage:
+    """编写MIME邮件。
+
+    Args:
+        subject (str): 邮件主题
+        plain_txt (str): 纯文本格式的邮件内容
+        html (str, optional): html格式的邮件内容. Defaults to None.
+        attachment (str, optional): 附件文件名
+    Returns:
+        MIME mail
+    """
+    msg = EmailMessage()
+
+    msg["Subject"] = subject
+
+    if html:
+        msg.preamble = plain_txt or ""
+        msg.set_content(html, subtype="html")
+    else:
+        assert plain_txt, "Either plain_txt or html is required."
+        msg.set_content(plain_txt)
+
+    if attachment:
+        ctype, encoding = mimetypes.guess_type(attachment)
+        if ctype is None or encoding is not None:
+            ctype = "application/octet-stream"
+
+        maintype, subtype = ctype.split("/", 1)
+        with open(attachment, "rb") as f:
+            msg.add_attachment(
+                f.read(), maintype=maintype, subtype=subtype, filename=attachment
+            )
+
+    return msg
+
+
+
+ +
+ + + +
+ + + +

+mail_notify(subject=None, body=None, msg=None, html=False, receivers=None) + + +

+ +
+ +

发送邮件通知。

+

发送者、接收者及邮件服务器等配置请通过cfg4py配置:

+

1
+2
+3
+4
+5
notify:
+    mail_from: aaron_yang@jieyu.ai
+    mail_to:
+        - code@jieyu.ai
+    mail_server: smtp.ym.163.com
+
+验证密码请通过环境变量MAIL_PASSWORD来配置。

+

subject/body与msg必须提供其一。

+
+Important +

必须在异步线程(即运行asyncio loop的线程)中调用此方法,否则会抛出异常。 +此方法返回一个Awaitable,您可以等待它完成,也可以忽略返回值,此时它将作为一个后台任务执行,但完成的时间不确定。

+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
msgEmailMessage

[description]. Defaults to None.

None
subjectstr

[description]. Defaults to None.

None
bodystr

[description]. Defaults to None.

None
htmlbool

body是否按html格式处理? Defaults to False.

False
receiversList[str], Optional

接收者信息。如果不提供,将使用预先配置的接收者信息。

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Awaitable

发送消息的后台任务。您可以使用此返回句柄来取消任务。

+
+ Source code in omicron/notify/mail.py +
def mail_notify(
+    subject: str = None,
+    body: str = None,
+    msg: EmailMessage = None,
+    html=False,
+    receivers=None,
+) -> Awaitable:
+    """发送邮件通知。
+
+    发送者、接收者及邮件服务器等配置请通过cfg4py配置:
+
+    ```
+    notify:
+        mail_from: aaron_yang@jieyu.ai
+        mail_to:
+            - code@jieyu.ai
+        mail_server: smtp.ym.163.com
+    ```
+    验证密码请通过环境变量`MAIL_PASSWORD`来配置。
+
+    subject/body与msg必须提供其一。
+
+    ???+ Important
+        必须在异步线程(即运行asyncio loop的线程)中调用此方法,否则会抛出异常。
+        此方法返回一个Awaitable,您可以等待它完成,也可以忽略返回值,此时它将作为一个后台任务执行,但完成的时间不确定。
+
+    Args:
+        msg (EmailMessage, optional): [description]. Defaults to None.
+        subject (str, optional): [description]. Defaults to None.
+        body (str, optional): [description]. Defaults to None.
+        html (bool, optional): body是否按html格式处理? Defaults to False.
+        receivers (List[str], Optional): 接收者信息。如果不提供,将使用预先配置的接收者信息。
+
+    Returns:
+        发送消息的后台任务。您可以使用此返回句柄来取消任务。
+    """
+    if all([msg is not None, subject or body]):
+        raise TypeError("msg参数与subject/body只能提供其中之一")
+    elif all([msg is None, subject is None, body is None]):
+        raise TypeError("必须提供msg参数或者subjecdt/body参数")
+
+    if msg is None:
+        if html:
+            msg = compose(subject, html=body)
+        else:
+            msg = compose(subject, plain_txt=body)
+
+    cfg = cfg4py.get_instance()
+    if not receivers:
+        receivers = cfg.notify.mail_to
+
+    password = os.environ.get("MAIL_PASSWORD")
+    return send_mail(
+        cfg.notify.mail_from, receivers, password, msg, host=cfg.notify.mail_server
+    )
+
+
+
+ +
+ + + +
+ + + +

+send_mail(sender, receivers, password, msg=None, host=None, port=25, cc=None, bcc=None, subject=None, body=None, username=None) + + +

+ +
+ +

发送邮件通知。

+

如果只发送简单的文本邮件,请使用 send_mail(sender, receivers, subject=subject, plain=plain)。如果要发送较复杂的带html和附件的邮件,请先调用compose()生成一个EmailMessage,然后再调用send_mail(sender, receivers, msg)来发送邮件。

+
+Important +

必须在异步线程(即运行asyncio loop的线程)中调用此方法,否则会抛出异常。 +此方法返回一个Awaitable,您可以等待它完成,也可以忽略返回值,此时它将作为一个后台任务执行,但完成的时间不确定。

+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
senderstr

[description]

required
receiversList[str]

[description]

required
msgEmailMessage

[description]. Defaults to None.

None
hoststr

[description]. Defaults to None.

None
portint

[description]. Defaults to 25.

25
ccList[str]

[description]. Defaults to None.

None
bccList[str]

[description]. Defaults to None.

None
subjectstr

[description]. Defaults to None.

None
plainstr

[description]. Defaults to None.

required
usernamestr

the username used to logon to mail server. if not provided, then sender is used.

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Awaitable

发送消息的后台任务。您可以使用此返回句柄来取消任务。

+
+ Source code in omicron/notify/mail.py +
@retry(aiosmtplib.errors.SMTPConnectError, tries=3, backoff=2, delay=30, logger=logger)
+def send_mail(
+    sender: str,
+    receivers: List[str],
+    password: str,
+    msg: EmailMessage = None,
+    host: str = None,
+    port: int = 25,
+    cc: List[str] = None,
+    bcc: List[str] = None,
+    subject: str = None,
+    body: str = None,
+    username: str = None,
+) -> Awaitable:
+    """发送邮件通知。
+
+    如果只发送简单的文本邮件,请使用 send_mail(sender, receivers, subject=subject, plain=plain)。如果要发送较复杂的带html和附件的邮件,请先调用compose()生成一个EmailMessage,然后再调用send_mail(sender, receivers, msg)来发送邮件。
+
+    ???+ Important
+        必须在异步线程(即运行asyncio loop的线程)中调用此方法,否则会抛出异常。
+        此方法返回一个Awaitable,您可以等待它完成,也可以忽略返回值,此时它将作为一个后台任务执行,但完成的时间不确定。
+
+    Args:
+        sender (str): [description]
+        receivers (List[str]): [description]
+        msg (EmailMessage, optional): [description]. Defaults to None.
+        host (str, optional): [description]. Defaults to None.
+        port (int, optional): [description]. Defaults to 25.
+        cc (List[str], optional): [description]. Defaults to None.
+        bcc (List[str], optional): [description]. Defaults to None.
+        subject (str, optional): [description]. Defaults to None.
+        plain (str, optional): [description]. Defaults to None.
+        username (str, optional): the username used to logon to mail server. if not provided, then `sender` is used.
+
+    Returns:
+        发送消息的后台任务。您可以使用此返回句柄来取消任务。
+    """
+    if all([msg is not None, subject is not None or body is not None]):
+        raise TypeError("msg参数与subject/body只能提供其中之一")
+    elif all([msg is None, subject is None, body is None]):
+        raise TypeError("必须提供msg参数或者subjecdt/body参数")
+
+    msg = msg or EmailMessage()
+
+    if isinstance(receivers, str):
+        receivers = [receivers]
+
+    msg["From"] = sender
+    msg["To"] = ", ".join(receivers)
+
+    if subject:
+        msg["subject"] = subject
+
+    if body:
+        msg.set_content(body)
+
+    if cc:
+        msg["Cc"] = ", ".join(cc)
+    if bcc:
+        msg["Bcc"] = ", ".join(bcc)
+
+    username = username or sender
+
+    if host is None:
+        host = sender.split("@")[-1]
+
+    task = asyncio.create_task(
+        aiosmtplib.send(
+            msg, hostname=host, port=port, username=sender, password=password
+        )
+    )
+
+    return task
+
+
+
+ +
+ + + + + + +
+ +
+ +
+ + + + + +
+ +
+ +
+ +

Backtesting Log Facility

+ + +
+ + +
+ +
+

Info

+

Since 2.0.0.a76

+
+

回测时,打印时间一般要求为回测当时的时间,而非系统时间。这个模块提供了改写日志时间的功能。

+

使用方法:

+

 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+10
+11
+12
+13
+14
+15
from omicron.core.backtestlog import BacktestLogger
+
+logger = BacktestLogger.getLogger(__name__)
+logger.setLevel(logging.INFO)
+
+handler = logging.StreamHandler()
+
+# 通过bt_date域来设置日期,而不是asctime
+handler.setFormatter(Formatter("%(bt_date)s %(message)s"))
+
+logging.basicConfig(level=logging.INFO, handlers=[handler])
+
+# 调用时与普通日志一样,但要增加一个date参数
+
+logger.info("this is info", date=datetime.date(2022, 3, 1))
+
+上述代码将输出:

+
1
2022-03-01 this is info
+
+

使用本日志的核心是上述代码中的第3行和第9行,最后,在输出日志时加上date=...,如第15行所示。

+

注意在第9行,通常是logging.getLogger(__nam__),而这里是BacktestLogger.getLogger(__name__)

+

如果上述调用中没有传入date,则将使用调用时间,此时行为跟原日志系统一致。

+
+

Warning

+

当调用logger.exception时,不能传入date参数。

+
+

配置文件示例

+

如果要通过配置文件来配置,可使用以下示例: +

 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+10
+11
+12
+13
+14
+15
+16
+17
  formatters:
+    backtest: 
+      format: '%(bt_date)s | %(message)s'
+
+    handlers:
+        backtest:
+        class: logging.StreamHandler
+        formatter: backtest
+    omicron.base.strategy:
+        level: INFO
+        handlers: [backtest]
+        propagate: false
+  loggers:
+        omicron.base.strategy:
+            level: INFO
+            handlers: [backtest]
+            propagate: false
+

+ + + +
+ + + + + + + + + + + + +
+ +
+ +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/2.0.0/api/plotting/candlestick/index.html b/2.0.0/api/plotting/candlestick/index.html new file mode 100644 index 00000000..02e8f686 --- /dev/null +++ b/2.0.0/api/plotting/candlestick/index.html @@ -0,0 +1,2644 @@ + + + + + + + + + + + + + + + + CandleStick - Omicron + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + + + + + +
+ + +
+ +

绘制K线图。

+

用法示例

+

注意示例需要在notebook中运行,否则无法生成图。

+
1
+2
+3
+4
+5
from omicron.plotting.candlestick import Candlestick
+
+bars = await Stock.get_bars("000001.XSHE", 120, FrameType.DAY)
+cs = Candlestick(bars)
+cs.plot()
+
+

这将生成下图: +

+

默认地,将显示成交量和RSI指标两个副图。可以通过以下方式来定制: +

1
+2
+3
+4
+5
cs = Candlestick(bars, show_volume=True,
+    show_rsi=True,
+    show_peaks=False
+}
+cs.plot()
+

+

增加标记

+

 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+10
+11
from omicron.plotting.candlestick import Candlestick
+
+bars = await Stock.get_bars("000001.XSHE", 120, FrameType.DAY)
+cs = Candlestick(bars, 
+        show_volume=True,
+        show_rsi=False,
+        show_peaks=True
+    )
+
+cs.add_marks([20, 50])
+cs.plot()
+
+这将在k线上显示两个加号: +

+

显示布林带

+

 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+10
+11
from omicron.plotting.candlestick import Candlestick
+
+bars = await Stock.get_bars("000001.XSHE", 120, FrameType.DAY)
+cs = Candlestick(bars, 
+        show_volume=True,
+        show_rsi=False,
+        show_peaks=True
+    )
+
+cs.add_indicator("bbands", 20)
+cs.plot()
+
+

+

显示平台

+

 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+10
+11
+12
from omicron.plotting.candlestick import Candlestick
+
+bars = await Stock.get_bars("000001.XSHE", 120, FrameType.DAY)
+cs = Candlestick(bars, 
+        show_volume=True,
+        show_rsi=False,
+        show_peaks=True
+    )
+
+
+cs.mark_bbox()
+cs.plot()
+
+

+ + + +
+ + + + + + + +
+ + + +

+ +Candlestick + + + +

+ +
+ + +
+ Source code in omicron/plotting/candlestick.py +
class Candlestick:
+    RED = "#FF4136"
+    GREEN = "#3DAA70"
+    TRANSPARENT = "rgba(0,0,0,0)"
+    LIGHT_GRAY = "rgba(0, 0, 0, 0.1)"
+    MA_COLORS = {
+        5: "#1432F5",
+        10: "#EB52F7",
+        20: "#C0C0C0",
+        30: "#882111",
+        60: "#5E8E28",
+        120: "#4294F7",
+        250: "#F09937",
+    }
+
+    def __init__(
+        self,
+        bars: np.ndarray,
+        ma_groups: List[int] = None,
+        title: str = None,
+        show_volume=True,
+        show_rsi=True,
+        show_peaks=False,
+        width=None,
+        height=None,
+        **kwargs,
+    ):
+        """构造函数
+
+        Args:
+            bars: 行情数据
+            ma_groups: 均线组参数。比如[5, 10, 20]表明向k线图中添加5, 10, 20日均线。如果不提供,将从数组[5, 10, 20, 30, 60, 120, 250]中取直到与`len(bars) - 5`匹配的参数为止。比如bars长度为30,则将取[5, 10, 20]来绘制均线。
+            title: k线图的标题
+            show_volume: 是否显示成交量图
+            show_rsi: 是否显示RSI图。缺省显示参数为6的RSI图。
+            show_peaks: 是否标记检测出来的峰跟谷。
+            width: the width in 'px' units of the figure
+            height: the height in 'px' units of the figure
+        Keyword Args:
+            rsi_win int: default is 6
+        """
+        self.title = title
+        self.bars = bars
+        self.width = width
+        self.height = height
+
+        # traces for main area
+        self.main_traces = {}
+
+        # traces for indicator area
+        self.ind_traces = {}
+
+        self.ticks = self._format_tick(bars["frame"])
+        self._bar_close = array_math_round(bars["close"], 2).astype(np.float64)
+
+        # for every candlestick, it must contain a candlestick plot
+        cs = go.Candlestick(
+            x=self.ticks,
+            open=bars["open"],
+            high=bars["high"],
+            low=bars["low"],
+            close=self._bar_close,
+            line=dict({"width": 1}),
+            name="K线",
+            **kwargs,
+        )
+
+        # Set line and fill colors
+        cs.increasing.fillcolor = "rgba(255,255,255,0.9)"
+        cs.increasing.line.color = self.RED
+        cs.decreasing.fillcolor = self.GREEN
+        cs.decreasing.line.color = self.GREEN
+
+        self.main_traces["ohlc"] = cs
+
+        if show_volume:
+            self.add_indicator("volume")
+
+        if show_peaks:
+            self.add_main_trace("peaks")
+
+        if show_rsi:
+            self.add_indicator("rsi", win=kwargs.get("rsi_win", 6))
+
+        # 增加均线
+        if ma_groups is None:
+            nbars = len(bars)
+            if nbars < 9:
+                ma_groups = []
+            else:
+                groups = np.array([5, 10, 20, 30, 60, 120, 250])
+                idx = max(np.argwhere(groups < (nbars - 5))).item() + 1
+                ma_groups = groups[:idx]
+
+        for win in ma_groups:
+            name = f"ma{win}"
+            if win > len(bars):
+                continue
+            ma = moving_average(self._bar_close, win)
+            line = go.Scatter(
+                y=ma,
+                x=self.ticks,
+                name=name,
+                line=dict(width=1, color=self.MA_COLORS.get(win)),
+            )
+            self.main_traces[name] = line
+
+    @property
+    def figure(self):
+        """返回一个figure对象"""
+        rows = len(self.ind_traces) + 1
+        specs = [[{"secondary_y": False}]] * rows
+        specs[0][0]["secondary_y"] = True
+
+        row_heights = [0.7, *([0.3 / (rows - 1)] * (rows - 1))]
+        print(row_heights)
+        cols = 1
+
+        fig = make_subplots(
+            rows=rows,
+            cols=cols,
+            shared_xaxes=True,
+            vertical_spacing=0.1,
+            subplot_titles=(self.title, *self.ind_traces.keys()),
+            row_heights=row_heights,
+            specs=specs,
+        )
+
+        for _, trace in self.main_traces.items():
+            fig.add_trace(trace, row=1, col=1)
+
+        for i, (_, trace) in enumerate(self.ind_traces.items()):
+            fig.add_trace(trace, row=i + 2, col=1)
+
+        ymin = np.min(self.bars["low"])
+        ymax = np.max(self.bars["high"])
+
+        ylim = [ymin * 0.95, ymax * 1.05]
+
+        # 显示十字光标
+        fig.update_xaxes(
+            showgrid=False,
+            showspikes=True,
+            spikemode="across",
+            spikesnap="cursor",
+            spikecolor="grey",
+            spikedash="solid",
+            spikethickness=1,
+        )
+
+        fig.update_yaxes(
+            showspikes=True,
+            spikemode="across",
+            spikesnap="cursor",
+            spikedash="solid",
+            spikecolor="grey",
+            spikethickness=1,
+            showgrid=True,
+            gridcolor=self.LIGHT_GRAY,
+        )
+
+        fig.update_xaxes(
+            nticks=len(self.bars) // 10,
+            ticklen=10,
+            ticks="outside",
+            minor=dict(nticks=5, ticklen=5, ticks="outside"),
+            row=rows,
+            col=1,
+        )
+
+        # 设置K线显示区域
+        if self.width:
+            win_size = int(self.width // 10)
+        else:
+            win_size = 120
+
+        fig.update_xaxes(
+            type="category", range=[len(self.bars) - win_size, len(self.bars) - 1]
+        )
+
+        fig.update_layout(
+            yaxis=dict(range=ylim),
+            hovermode="x unified",
+            plot_bgcolor=self.TRANSPARENT,
+            xaxis_rangeslider_visible=False,
+        )
+
+        if self.width:
+            fig.update_layout(width=self.width)
+
+        if self.height:
+            fig.update_layout(height=self.height)
+
+        return fig
+
+    def _format_tick(self, tm: np.array) -> NDArray:
+        if tm.item(0).hour == 0:  # assume it's date
+            return np.array(
+                [
+                    f"{x.item().year:02}-{x.item().month:02}-{x.item().day:02}"
+                    for x in tm
+                ]
+            )
+        else:
+            return np.array(
+                [
+                    f"{x.item().month:02}-{x.item().day:02} {x.item().hour:02}:{x.item().minute:02}"
+                    for x in tm
+                ]
+            )
+
+    def _remove_ma(self):
+        traces = {}
+        for name in self.main_traces:
+            if not name.startswith("ma"):
+                traces[name] = self.main_traces[name]
+
+        self.main_traces = traces
+
+    def add_main_trace(self, trace_name: str, **kwargs):
+        """add trace to main plot
+
+        支持的图例类别有peaks, bbox(bounding-box), bt(回测), support_line, resist_line
+        Args:
+            trace_name : 图例名称
+            **kwargs : 其他参数
+
+        """
+        if trace_name == "peaks":
+            self.mark_peaks_and_valleys(
+                kwargs.get("up_thres", 0.03), kwargs.get("down_thres", -0.03)
+            )
+
+        # 标注矩形框
+        elif trace_name == "bbox":
+            self.add_bounding_box(kwargs.get("boxes"))
+
+        # 回测结果
+        elif trace_name == "bt":
+            self.add_backtest_result(kwargs.get("bt"))
+
+        # 增加直线
+        elif trace_name == "support_line":
+            self.add_line("支撑线", kwargs.get("x"), kwargs.get("y"))
+
+        elif trace_name == "resist_line":
+            self.add_line("压力线", kwargs.get("x"), kwargs.get("y"))
+
+    def add_line(self, trace_name: str, x: List[int], y: List[float]):
+        """在k线图上增加以`x`,`y`表示的一条直线
+
+        Args:
+            trace_name : 图例名称
+            x : x轴坐标,所有的x值都必须属于[0, len(self.bars)]
+            y : y值
+        """
+        line = go.Scatter(x=self.ticks[x], y=y, mode="lines", name=trace_name)
+
+        self.main_traces[trace_name] = line
+
+    def mark_support_resist_lines(
+        self, upthres: float = None, downthres: float = None, use_close=True, win=60
+    ):
+        """在K线图上标注支撑线和压力线
+
+        在`win`个k线内,找出所有的局部峰谷点,并以最高的两个峰连线生成压力线,以最低的两个谷连线生成支撑线。
+
+        Args:
+            upthres : 用来检测峰谷时使用的阈值,参见`omicron.talib.morph.peaks_and_valleys`
+            downthres : 用来检测峰谷时使用的阈值,参见`omicron.talib.morph.peaks_and_valleys`.
+            use_close : 是否使用收盘价来进行检测。如果为False,则使用high来检测压力线,使用low来检测支撑线.
+            win : 检测局部高低点的窗口.
+        """
+        bars = self.bars[-win:]
+        clipped = len(self.bars) - win
+
+        if use_close:
+            support, resist, x_start = support_resist_lines(
+                self._bar_close, upthres, downthres
+            )
+            x = np.arange(len(bars))[x_start:]
+
+            self.add_main_trace("support_line", x=x + clipped, y=support(x))
+            self.add_main_trace("resist_line", x=x + clipped, y=resist(x))
+
+        else:  # 使用"high"和"low"
+            bars = self.bars[-win:]
+            support, _, x_start = support_resist_lines(bars["low"], upthres, downthres)
+            x = np.arange(len(bars))[x_start:]
+            self.add_main_trace("support_line", x=x + clipped, y=support(x))
+
+            _, resist, x_start = support_resist_lines(bars["high"], upthres, downthres)
+            x = np.arange(len(bars))[x_start:]
+            self.add_main_trace("resist_line", x=x + clipped, y=resist(x))
+
+    def mark_bbox(self, min_size: int = 20):
+        """在k线图上检测并标注矩形框
+
+        Args:
+            min_size : 矩形框的最小长度
+
+        """
+        boxes = plateaus(self._bar_close, min_size)
+        self.add_main_trace("bbox", boxes=boxes)
+
+    def mark_backtest_result(self, result: dict):
+        """标记买卖点和回测数据
+
+        TODO:
+            此方法可能未与backtest返回值同步。此外,在portofolio回测中,不可能在k线图中使用此方法。
+
+        Args:
+            points : 买卖点的坐标。
+        """
+        trades = result.get("trades")
+        assets = result.get("assets")
+
+        x, y, labels = [], [], []
+        hover = []
+        labels_color = defaultdict(list)
+
+        for trade in trades:
+            trade_date = arrow.get(trade["time"]).date()
+            asset = assets.get(trade_date)
+
+            security = trade["security"]
+            price = trade["price"]
+            volume = trade["volume"]
+
+            side = trade["order_side"]
+
+            x.append(self._format_tick(trade_date))
+
+            bar = self.bars[self.bars["frame"] == trade_date]
+            if side == "买入":
+                hover.append(
+                    f"总资产:{asset}<br><br>{side}:{security}<br>买入价:{price}<br>股数:{volume}"
+                )
+
+                y.append(bar["high"][0] * 1.1)
+                labels.append("B")
+                labels_color["color"].append(self.RED)
+
+            else:
+                y.append(bar["low"][0] * 0.99)
+
+                hover.append(
+                    f"总资产:{asset}<hr><br>{side}:{security}<br>卖出价:{price}<br>股数:{volume}"
+                )
+
+                labels.append("S")
+                labels_color["color"].append(self.GREEN)
+
+                labels_color.append(self.GREEN)
+                # txt.append(f'{side}:{security}<br>卖出价:{price}<br>股数:{volume}')
+
+        trace = go.Scatter(
+            x=x,
+            y=y,
+            mode="text",
+            text=labels,
+            name="backtest",
+            hovertext=hover,
+            textfont=labels_color,
+        )
+
+        self.main_traces["bs"] = trace
+
+    def mark_peaks_and_valleys(
+        self, up_thres: Optional[float] = None, down_thres: Optional[float] = None
+    ):
+        """在K线图上标注峰谷点
+
+        Args:
+            up_thres : 用来检测峰谷时使用的阈值,参见[omicron.talib.morph.peaks_and_valleys][]
+            down_thres : 用来检测峰谷时使用的阈值,参见[omicron.talib.morph.peaks_and_valleys][]
+
+        """
+        bars = self.bars
+
+        flags = peaks_and_valleys(self._bar_close, up_thres, down_thres)
+
+        # 移除首尾的顶底标记,一般情况下它们都不是真正的顶和底。
+        flags[0] = 0
+        flags[-1] = 0
+
+        marker_margin = (max(bars["high"]) - min(bars["low"])) * 0.05
+        ticks_up = self.ticks[flags == 1]
+        y_up = bars["high"][flags == 1] + marker_margin
+        ticks_down = self.ticks[flags == -1]
+        y_down = bars["low"][flags == -1] - marker_margin
+
+        trace = go.Scatter(
+            mode="markers", x=ticks_up, y=y_up, marker_symbol="triangle-down", name="峰"
+        )
+        self.main_traces["peaks"] = trace
+
+        trace = go.Scatter(
+            mode="markers",
+            x=ticks_down,
+            y=y_down,
+            marker_symbol="triangle-up",
+            name="谷",
+        )
+        self.main_traces["valleys"] = trace
+
+    def add_bounding_box(self, boxes: List[Tuple]):
+        """bbox是标记在k线图上某个区间内的矩形框,它以该区间最高价和最低价为上下边。
+
+        Args:
+            boxes: 每个元素(start, width)表示各个bbox的起点和宽度。
+        """
+        for j, box in enumerate(boxes):
+            x, y = [], []
+            i, width = box
+            if len(x):
+                x.append(None)
+                y.append(None)
+
+            group = self.bars[i : i + width]
+
+            mean = np.mean(group["close"])
+            std = 2 * np.std(group["close"])
+
+            # 落在两个标准差以内的实体最上方和最下方值
+            hc = np.max(group[group["close"] < mean + std]["close"])
+            lc = np.min(group[group["close"] > mean - std]["close"])
+
+            ho = np.max(group[group["open"] < mean + std]["open"])
+            lo = np.min(group[group["open"] > mean - std]["open"])
+
+            h = max(hc, ho)
+            low = min(lo, lc)
+
+            x.extend(self.ticks[[i, i + width - 1, i + width - 1, i, i]])
+            y.extend((h, h, low, low, h))
+
+            hover = f"宽度: {width}<br>振幅: {h/low - 1:.2%}"
+            trace = go.Scatter(x=x, y=y, fill="toself", name=f"平台整理{j}", text=hover)
+            self.main_traces[f"bbox-{j}"] = trace
+
+    def add_indicator(self, indicator: str, **kwargs):
+        """向k线图中增加技术指标
+
+        Args:
+            indicator: 当前支持值有'volume', 'rsi', 'bbands'
+            kwargs: 计算某个indicator时,需要的参数。比如计算bbands时,需要传入均线的window
+        """
+        if indicator == "volume":
+            colors = np.repeat(self.RED, len(self.bars))
+            colors[self.bars["close"] <= self.bars["open"]] = self.GREEN
+
+            trace = go.Bar(
+                x=self.ticks,
+                y=self.bars["volume"],
+                showlegend=False,
+                marker={"color": colors},
+            )
+        elif indicator == "rsi":
+            win = kwargs.get("win")
+            rsi = talib.RSI(self._bar_close, win)  # type: ignore
+            trace = go.Scatter(x=self.ticks, y=rsi, showlegend=False)
+        elif indicator == "bbands":
+            self._remove_ma()
+            win = kwargs.get("win")
+            for name, ind in zip(
+                ["bbands-high", "bbands-mean", "bbands-low"],
+                talib.BBANDS(self._bar_close, win),  # type: ignore
+            ):
+                trace = go.Scatter(x=self.ticks, y=ind, showlegend=True, name=name)
+                self.main_traces[name] = trace
+
+            return
+        else:
+            raise ValueError(f"{indicator} not supported")
+
+        self.ind_traces[indicator] = trace
+
+    def add_marks(
+        self,
+        x: List[int],
+        y: List[float],
+        name: str,
+        marker: str = "cross",
+        color: Optional[str] = None,
+    ):
+        """向k线图中增加标记点"""
+        trace = go.Scatter(
+            x=self.ticks[x],
+            y=y,
+            mode="markers",
+            marker_symbol=marker,
+            marker_color=color,
+            name=name,
+        )
+        self.main_traces[name] = trace
+
+    def plot(self):
+        """绘制图表"""
+        fig = self.figure
+        fig.show()
+
+
+ + + +
+ + + + + + + + + + + +
+ + + +

+figure + + + property + readonly + + +

+ +
+ +

返回一个figure对象

+
+ +
+ + + + + + +
+ + + +

+__init__(self, bars, ma_groups=None, title=None, show_volume=True, show_rsi=True, show_peaks=False, width=None, height=None, **kwargs) + + + special + + +

+ +
+ +

构造函数

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
barsndarray

行情数据

required
ma_groupsList[int]

均线组参数。比如[5, 10, 20]表明向k线图中添加5, 10, 20日均线。如果不提供,将从数组[5, 10, 20, 30, 60, 120, 250]中取直到与len(bars) - 5匹配的参数为止。比如bars长度为30,则将取[5, 10, 20]来绘制均线。

None
titlestr

k线图的标题

None
show_volume

是否显示成交量图

True
show_rsi

是否显示RSI图。缺省显示参数为6的RSI图。

True
show_peaks

是否标记检测出来的峰跟谷。

False
width

the width in 'px' units of the figure

None
height

the height in 'px' units of the figure

None
+

Keyword arguments:

+ + + + + + + + + + + + + + + +
NameTypeDescription
rsi_winint

default is 6

+
+ Source code in omicron/plotting/candlestick.py +
def __init__(
+    self,
+    bars: np.ndarray,
+    ma_groups: List[int] = None,
+    title: str = None,
+    show_volume=True,
+    show_rsi=True,
+    show_peaks=False,
+    width=None,
+    height=None,
+    **kwargs,
+):
+    """构造函数
+
+    Args:
+        bars: 行情数据
+        ma_groups: 均线组参数。比如[5, 10, 20]表明向k线图中添加5, 10, 20日均线。如果不提供,将从数组[5, 10, 20, 30, 60, 120, 250]中取直到与`len(bars) - 5`匹配的参数为止。比如bars长度为30,则将取[5, 10, 20]来绘制均线。
+        title: k线图的标题
+        show_volume: 是否显示成交量图
+        show_rsi: 是否显示RSI图。缺省显示参数为6的RSI图。
+        show_peaks: 是否标记检测出来的峰跟谷。
+        width: the width in 'px' units of the figure
+        height: the height in 'px' units of the figure
+    Keyword Args:
+        rsi_win int: default is 6
+    """
+    self.title = title
+    self.bars = bars
+    self.width = width
+    self.height = height
+
+    # traces for main area
+    self.main_traces = {}
+
+    # traces for indicator area
+    self.ind_traces = {}
+
+    self.ticks = self._format_tick(bars["frame"])
+    self._bar_close = array_math_round(bars["close"], 2).astype(np.float64)
+
+    # for every candlestick, it must contain a candlestick plot
+    cs = go.Candlestick(
+        x=self.ticks,
+        open=bars["open"],
+        high=bars["high"],
+        low=bars["low"],
+        close=self._bar_close,
+        line=dict({"width": 1}),
+        name="K线",
+        **kwargs,
+    )
+
+    # Set line and fill colors
+    cs.increasing.fillcolor = "rgba(255,255,255,0.9)"
+    cs.increasing.line.color = self.RED
+    cs.decreasing.fillcolor = self.GREEN
+    cs.decreasing.line.color = self.GREEN
+
+    self.main_traces["ohlc"] = cs
+
+    if show_volume:
+        self.add_indicator("volume")
+
+    if show_peaks:
+        self.add_main_trace("peaks")
+
+    if show_rsi:
+        self.add_indicator("rsi", win=kwargs.get("rsi_win", 6))
+
+    # 增加均线
+    if ma_groups is None:
+        nbars = len(bars)
+        if nbars < 9:
+            ma_groups = []
+        else:
+            groups = np.array([5, 10, 20, 30, 60, 120, 250])
+            idx = max(np.argwhere(groups < (nbars - 5))).item() + 1
+            ma_groups = groups[:idx]
+
+    for win in ma_groups:
+        name = f"ma{win}"
+        if win > len(bars):
+            continue
+        ma = moving_average(self._bar_close, win)
+        line = go.Scatter(
+            y=ma,
+            x=self.ticks,
+            name=name,
+            line=dict(width=1, color=self.MA_COLORS.get(win)),
+        )
+        self.main_traces[name] = line
+
+
+
+ +
+ + + +
+ + + +

+add_bounding_box(self, boxes) + + +

+ +
+ +

bbox是标记在k线图上某个区间内的矩形框,它以该区间最高价和最低价为上下边。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
boxesList[Tuple]

每个元素(start, width)表示各个bbox的起点和宽度。

required
+
+ Source code in omicron/plotting/candlestick.py +
def add_bounding_box(self, boxes: List[Tuple]):
+    """bbox是标记在k线图上某个区间内的矩形框,它以该区间最高价和最低价为上下边。
+
+    Args:
+        boxes: 每个元素(start, width)表示各个bbox的起点和宽度。
+    """
+    for j, box in enumerate(boxes):
+        x, y = [], []
+        i, width = box
+        if len(x):
+            x.append(None)
+            y.append(None)
+
+        group = self.bars[i : i + width]
+
+        mean = np.mean(group["close"])
+        std = 2 * np.std(group["close"])
+
+        # 落在两个标准差以内的实体最上方和最下方值
+        hc = np.max(group[group["close"] < mean + std]["close"])
+        lc = np.min(group[group["close"] > mean - std]["close"])
+
+        ho = np.max(group[group["open"] < mean + std]["open"])
+        lo = np.min(group[group["open"] > mean - std]["open"])
+
+        h = max(hc, ho)
+        low = min(lo, lc)
+
+        x.extend(self.ticks[[i, i + width - 1, i + width - 1, i, i]])
+        y.extend((h, h, low, low, h))
+
+        hover = f"宽度: {width}<br>振幅: {h/low - 1:.2%}"
+        trace = go.Scatter(x=x, y=y, fill="toself", name=f"平台整理{j}", text=hover)
+        self.main_traces[f"bbox-{j}"] = trace
+
+
+
+ +
+ + + +
+ + + +

+add_indicator(self, indicator, **kwargs) + + +

+ +
+ +

向k线图中增加技术指标

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indicatorstr

当前支持值有'volume', 'rsi', 'bbands'

required
kwargs

计算某个indicator时,需要的参数。比如计算bbands时,需要传入均线的window

{}
+
+ Source code in omicron/plotting/candlestick.py +
def add_indicator(self, indicator: str, **kwargs):
+    """向k线图中增加技术指标
+
+    Args:
+        indicator: 当前支持值有'volume', 'rsi', 'bbands'
+        kwargs: 计算某个indicator时,需要的参数。比如计算bbands时,需要传入均线的window
+    """
+    if indicator == "volume":
+        colors = np.repeat(self.RED, len(self.bars))
+        colors[self.bars["close"] <= self.bars["open"]] = self.GREEN
+
+        trace = go.Bar(
+            x=self.ticks,
+            y=self.bars["volume"],
+            showlegend=False,
+            marker={"color": colors},
+        )
+    elif indicator == "rsi":
+        win = kwargs.get("win")
+        rsi = talib.RSI(self._bar_close, win)  # type: ignore
+        trace = go.Scatter(x=self.ticks, y=rsi, showlegend=False)
+    elif indicator == "bbands":
+        self._remove_ma()
+        win = kwargs.get("win")
+        for name, ind in zip(
+            ["bbands-high", "bbands-mean", "bbands-low"],
+            talib.BBANDS(self._bar_close, win),  # type: ignore
+        ):
+            trace = go.Scatter(x=self.ticks, y=ind, showlegend=True, name=name)
+            self.main_traces[name] = trace
+
+        return
+    else:
+        raise ValueError(f"{indicator} not supported")
+
+    self.ind_traces[indicator] = trace
+
+
+
+ +
+ + + +
+ + + +

+add_line(self, trace_name, x, y) + + +

+ +
+ +

在k线图上增加以x,y表示的一条直线

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
trace_name

图例名称

required
x

x轴坐标,所有的x值都必须属于[0, len(self.bars)]

required
y

y值

required
+
+ Source code in omicron/plotting/candlestick.py +
def add_line(self, trace_name: str, x: List[int], y: List[float]):
+    """在k线图上增加以`x`,`y`表示的一条直线
+
+    Args:
+        trace_name : 图例名称
+        x : x轴坐标,所有的x值都必须属于[0, len(self.bars)]
+        y : y值
+    """
+    line = go.Scatter(x=self.ticks[x], y=y, mode="lines", name=trace_name)
+
+    self.main_traces[trace_name] = line
+
+
+
+ +
+ + + +
+ + + +

+add_main_trace(self, trace_name, **kwargs) + + +

+ +
+ +

add trace to main plot

+

支持的图例类别有peaks, bbox(bounding-box), bt(回测), support_line, resist_line

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
trace_name

图例名称

required
**kwargs

其他参数

{}
+
+ Source code in omicron/plotting/candlestick.py +
def add_main_trace(self, trace_name: str, **kwargs):
+    """add trace to main plot
+
+    支持的图例类别有peaks, bbox(bounding-box), bt(回测), support_line, resist_line
+    Args:
+        trace_name : 图例名称
+        **kwargs : 其他参数
+
+    """
+    if trace_name == "peaks":
+        self.mark_peaks_and_valleys(
+            kwargs.get("up_thres", 0.03), kwargs.get("down_thres", -0.03)
+        )
+
+    # 标注矩形框
+    elif trace_name == "bbox":
+        self.add_bounding_box(kwargs.get("boxes"))
+
+    # 回测结果
+    elif trace_name == "bt":
+        self.add_backtest_result(kwargs.get("bt"))
+
+    # 增加直线
+    elif trace_name == "support_line":
+        self.add_line("支撑线", kwargs.get("x"), kwargs.get("y"))
+
+    elif trace_name == "resist_line":
+        self.add_line("压力线", kwargs.get("x"), kwargs.get("y"))
+
+
+
+ +
+ + + +
+ + + +

+add_marks(self, x, y, name, marker='cross', color=None) + + +

+ +
+ +

向k线图中增加标记点

+ +
+ Source code in omicron/plotting/candlestick.py +
def add_marks(
+    self,
+    x: List[int],
+    y: List[float],
+    name: str,
+    marker: str = "cross",
+    color: Optional[str] = None,
+):
+    """向k线图中增加标记点"""
+    trace = go.Scatter(
+        x=self.ticks[x],
+        y=y,
+        mode="markers",
+        marker_symbol=marker,
+        marker_color=color,
+        name=name,
+    )
+    self.main_traces[name] = trace
+
+
+
+ +
+ + + +
+ + + +

+mark_backtest_result(self, result) + + +

+ +
+ +

标记买卖点和回测数据

+
+

Todo

+

此方法可能未与backtest返回值同步。此外,在portofolio回测中,不可能在k线图中使用此方法。

+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
points

买卖点的坐标。

required
+
+ Source code in omicron/plotting/candlestick.py +
def mark_backtest_result(self, result: dict):
+    """标记买卖点和回测数据
+
+    TODO:
+        此方法可能未与backtest返回值同步。此外,在portofolio回测中,不可能在k线图中使用此方法。
+
+    Args:
+        points : 买卖点的坐标。
+    """
+    trades = result.get("trades")
+    assets = result.get("assets")
+
+    x, y, labels = [], [], []
+    hover = []
+    labels_color = defaultdict(list)
+
+    for trade in trades:
+        trade_date = arrow.get(trade["time"]).date()
+        asset = assets.get(trade_date)
+
+        security = trade["security"]
+        price = trade["price"]
+        volume = trade["volume"]
+
+        side = trade["order_side"]
+
+        x.append(self._format_tick(trade_date))
+
+        bar = self.bars[self.bars["frame"] == trade_date]
+        if side == "买入":
+            hover.append(
+                f"总资产:{asset}<br><br>{side}:{security}<br>买入价:{price}<br>股数:{volume}"
+            )
+
+            y.append(bar["high"][0] * 1.1)
+            labels.append("B")
+            labels_color["color"].append(self.RED)
+
+        else:
+            y.append(bar["low"][0] * 0.99)
+
+            hover.append(
+                f"总资产:{asset}<hr><br>{side}:{security}<br>卖出价:{price}<br>股数:{volume}"
+            )
+
+            labels.append("S")
+            labels_color["color"].append(self.GREEN)
+
+            labels_color.append(self.GREEN)
+            # txt.append(f'{side}:{security}<br>卖出价:{price}<br>股数:{volume}')
+
+    trace = go.Scatter(
+        x=x,
+        y=y,
+        mode="text",
+        text=labels,
+        name="backtest",
+        hovertext=hover,
+        textfont=labels_color,
+    )
+
+    self.main_traces["bs"] = trace
+
+
+
+ +
+ + + +
+ + + +

+mark_bbox(self, min_size=20) + + +

+ +
+ +

在k线图上检测并标注矩形框

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
min_size

矩形框的最小长度

20
+
+ Source code in omicron/plotting/candlestick.py +
def mark_bbox(self, min_size: int = 20):
+    """在k线图上检测并标注矩形框
+
+    Args:
+        min_size : 矩形框的最小长度
+
+    """
+    boxes = plateaus(self._bar_close, min_size)
+    self.add_main_trace("bbox", boxes=boxes)
+
+
+
+ +
+ + + +
+ + + +

+mark_peaks_and_valleys(self, up_thres=None, down_thres=None) + + +

+ +
+ +

在K线图上标注峰谷点

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
up_thres

用来检测峰谷时使用的阈值,参见omicron.talib.morph.peaks_and_valleys

None
down_thres

用来检测峰谷时使用的阈值,参见omicron.talib.morph.peaks_and_valleys

None
+
+ Source code in omicron/plotting/candlestick.py +
def mark_peaks_and_valleys(
+    self, up_thres: Optional[float] = None, down_thres: Optional[float] = None
+):
+    """在K线图上标注峰谷点
+
+    Args:
+        up_thres : 用来检测峰谷时使用的阈值,参见[omicron.talib.morph.peaks_and_valleys][]
+        down_thres : 用来检测峰谷时使用的阈值,参见[omicron.talib.morph.peaks_and_valleys][]
+
+    """
+    bars = self.bars
+
+    flags = peaks_and_valleys(self._bar_close, up_thres, down_thres)
+
+    # 移除首尾的顶底标记,一般情况下它们都不是真正的顶和底。
+    flags[0] = 0
+    flags[-1] = 0
+
+    marker_margin = (max(bars["high"]) - min(bars["low"])) * 0.05
+    ticks_up = self.ticks[flags == 1]
+    y_up = bars["high"][flags == 1] + marker_margin
+    ticks_down = self.ticks[flags == -1]
+    y_down = bars["low"][flags == -1] - marker_margin
+
+    trace = go.Scatter(
+        mode="markers", x=ticks_up, y=y_up, marker_symbol="triangle-down", name="峰"
+    )
+    self.main_traces["peaks"] = trace
+
+    trace = go.Scatter(
+        mode="markers",
+        x=ticks_down,
+        y=y_down,
+        marker_symbol="triangle-up",
+        name="谷",
+    )
+    self.main_traces["valleys"] = trace
+
+
+
+ +
+ + + +
+ + + +

+mark_support_resist_lines(self, upthres=None, downthres=None, use_close=True, win=60) + + +

+ +
+ +

在K线图上标注支撑线和压力线

+

win个k线内,找出所有的局部峰谷点,并以最高的两个峰连线生成压力线,以最低的两个谷连线生成支撑线。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
upthres

用来检测峰谷时使用的阈值,参见omicron.talib.morph.peaks_and_valleys

None
downthres

用来检测峰谷时使用的阈值,参见omicron.talib.morph.peaks_and_valleys.

None
use_close

是否使用收盘价来进行检测。如果为False,则使用high来检测压力线,使用low来检测支撑线.

True
win

检测局部高低点的窗口.

60
+
+ Source code in omicron/plotting/candlestick.py +
def mark_support_resist_lines(
+    self, upthres: float = None, downthres: float = None, use_close=True, win=60
+):
+    """在K线图上标注支撑线和压力线
+
+    在`win`个k线内,找出所有的局部峰谷点,并以最高的两个峰连线生成压力线,以最低的两个谷连线生成支撑线。
+
+    Args:
+        upthres : 用来检测峰谷时使用的阈值,参见`omicron.talib.morph.peaks_and_valleys`
+        downthres : 用来检测峰谷时使用的阈值,参见`omicron.talib.morph.peaks_and_valleys`.
+        use_close : 是否使用收盘价来进行检测。如果为False,则使用high来检测压力线,使用low来检测支撑线.
+        win : 检测局部高低点的窗口.
+    """
+    bars = self.bars[-win:]
+    clipped = len(self.bars) - win
+
+    if use_close:
+        support, resist, x_start = support_resist_lines(
+            self._bar_close, upthres, downthres
+        )
+        x = np.arange(len(bars))[x_start:]
+
+        self.add_main_trace("support_line", x=x + clipped, y=support(x))
+        self.add_main_trace("resist_line", x=x + clipped, y=resist(x))
+
+    else:  # 使用"high"和"low"
+        bars = self.bars[-win:]
+        support, _, x_start = support_resist_lines(bars["low"], upthres, downthres)
+        x = np.arange(len(bars))[x_start:]
+        self.add_main_trace("support_line", x=x + clipped, y=support(x))
+
+        _, resist, x_start = support_resist_lines(bars["high"], upthres, downthres)
+        x = np.arange(len(bars))[x_start:]
+        self.add_main_trace("resist_line", x=x + clipped, y=resist(x))
+
+
+
+ +
+ + + +
+ + + +

+plot(self) + + +

+ +
+ +

绘制图表

+ +
+ Source code in omicron/plotting/candlestick.py +
def plot(self):
+    """绘制图表"""
+    fig = self.figure
+    fig.show()
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + + + + + + +
+ +
+ +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/2.0.0/api/plotting/metrics/index.html b/2.0.0/api/plotting/metrics/index.html new file mode 100644 index 00000000..fb2e6589 --- /dev/null +++ b/2.0.0/api/plotting/metrics/index.html @@ -0,0 +1,1412 @@ + + + + + + + + + + + + + + + + MetricsGraph - Omicron + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + +
+ + +
+ +

绘制回测资产曲线和指标图。

+

示例: +

1
+2
+3
+4
+5
+6
from omicron.plotting import MetricsGraph
+
+# calling some strategy's backtest and get bills/metrics
+mg = MetricsGraph(bills, metrics)
+
+await mg.plot()
+
+注意此方法需要在notebook中调用。 +

+ + + +
+ + + + + + + + +
+ + + +

+ +MetricsGraph + + + +

+ +
+ + +
+ Source code in omicron/plotting/metrics.py +
class MetricsGraph:
+    def __init__(
+        self,
+        bills: dict,
+        metrics: dict,
+        baseline_code: str = "399300.XSHE",
+        indicator: Optional[pd.DataFrame] = None,
+    ):
+        """
+        Args:
+            bills: 回测生成的账单,通过Strategy.bills获得
+            metrics: 回测生成的指标,通过strategy.metrics获得
+            baseline_code: 基准证券代码
+            indicator: 回测时使用的指标。如果存在,将叠加到策略回测图上。它应该是一个以日期为索引,指标值列名为"value"的pandas.DataFrame。如果不提供,将不会绘制指标图
+        """
+        self.metrics = metrics
+        self.trades = bills["trades"]
+        self.positions = bills["positions"]
+        self.start = arrow.get(bills["assets"][0][0]).date()
+        self.end = arrow.get(bills["assets"][-1][0]).date()
+
+        self.frames = [
+            tf.int2date(f) for f in tf.get_frames(self.start, self.end, FrameType.DAY)
+        ]
+
+        if indicator is not None:
+            self.indicator = indicator.join(
+                pd.Series(index=self.frames, name="frames", dtype=np.float64),
+                how="right",
+            )
+        else:
+            self.indicator = None
+
+        # 记录日期到下标的反向映射
+        self._frame2pos = {f: i for i, f in enumerate(self.frames)}
+        self.ticks = self._format_tick(self.frames)
+
+        # TODO: there's bug in backtesting, temporarily fix here
+        df = pd.DataFrame(self.frames, columns=["frame"])
+        df["assets"] = np.nan
+        assets = pd.DataFrame(bills["assets"], columns=["frame", "assets"])
+        df["assets"] = assets["assets"]
+        self.assets = df.fillna(method="ffill")["assets"].to_numpy()
+        self.nv = self.assets / self.assets[0]
+
+        self.baseline_code = baseline_code or "399300.XSHE"
+
+    def _fill_missing_prices(self, bars: BarsArray, frames: Union[List, NDArray]):
+        """将bars中缺失值采用其前值替换
+
+        当baseline为个股时,可能存在停牌的情况,这样导致由此计算的参考收益无法与回测的资产收益对齐,因此需要进行调整。
+
+        出于这个目的,本函数只返回处理后的收盘价。
+
+        Args:
+            bars: 基线行情数据。
+            frames: 日期索引
+
+        Returns:
+            补充缺失值后的收盘价序列
+        """
+        _close = pd.DataFrame(
+            {
+                "close": pd.Series(bars["close"], index=bars["frame"]),
+                "frame": pd.Series(np.empty((len(frames),)), index=frames),
+            }
+        )["close"].to_numpy()
+
+        # 这里使用omicron中的fill_nan,是因为如果数组的第一个元素即为NaN的话,那么DataFrame.fillna(method='ffill')将无法处理这样的情况(仍然保持为nan)
+
+        return fill_nan(_close)
+
+    def _format_tick(self, frames: Union[Frame, List[Frame]]) -> Union[str, NDArray]:
+        if type(frames) == datetime.date:
+            x = frames
+            return f"{x.year:02}-{x.month:02}-{x.day:02}"
+        elif type(frames) == datetime.datetime:
+            x = frames
+            return f"{x.month:02}-{x.day:02} {x.hour:02}:{x.minute:02}"
+        elif type(frames[0]) == datetime.date:  # type: ignore
+            return np.array([f"{x.year:02}-{x.month:02}-{x.day:02}" for x in frames])
+        else:
+            return np.array(
+                [f"{x.month:02}-{x.day:02} {x.hour:02}:{x.minute:02}" for x in frames]  # type: ignore
+            )
+
+    async def _metrics_trace(self):
+        metric_names = {
+            "start": "起始日",
+            "end": "结束日",
+            "window": "资产暴露窗口",
+            "total_tx": "交易次数",
+            "total_profit": "总利润",
+            "total_profit_rate": "利润率",
+            "win_rate": "胜率",
+            "mean_return": "日均回报",
+            "sharpe": "夏普率",
+            "max_drawdown": "最大回撤",
+            "annual_return": "年化回报",
+            "volatility": "波动率",
+            "sortino": "sortino",
+            "calmar": "calmar",
+        }
+
+        # bug: plotly go.Table.Cells format not work here
+        metric_formatter = {
+            "start": "{}",
+            "end": "{}",
+            "window": "{}",
+            "total_tx": "{}",
+            "total_profit": "{:.2f}",
+            "total_profit_rate": "{:.2%}",
+            "win_rate": "{:.2%}",
+            "mean_return": "{:.2%}",
+            "sharpe": "{:.2f}",
+            "max_drawdown": "{:.2%}",
+            "annual_return": "{:.2%}",
+            "volatility": "{:.2%}",
+            "sortino": "{:.2f}",
+            "calmar": "{:.2f}",
+        }
+
+        metrics = deepcopy(self.metrics)
+        baseline = metrics["baseline"] or {}
+        del metrics["baseline"]
+
+        baseline_name = (
+            await Security.alias(self.baseline_code) if self.baseline_code else "基准"
+        )
+
+        metrics_formatted = []
+        for k in metric_names.keys():
+            if metrics.get(k):
+                metrics_formatted.append(metric_formatter[k].format(metrics.get(k)))
+            else:
+                metrics_formatted.append("-")
+
+        baseline_formatted = []
+        for k in metric_names.keys():
+            if baseline.get(k):
+                baseline_formatted.append(metric_formatter[k].format(baseline.get(k)))
+            else:
+                baseline_formatted.append("-")
+
+        return go.Table(
+            header=dict(values=["指标名", "策略", baseline_name]),
+            cells=dict(
+                values=[
+                    [v for _, v in metric_names.items()],
+                    metrics_formatted,
+                    baseline_formatted,
+                ],
+                font_size=10,
+            ),
+        )
+
+    async def _trade_info_trace(self):
+        """构建hover text 序列"""
+        # convert trades into hover_info
+        buys = defaultdict(list)
+        sells = defaultdict(list)
+        for _, trade in self.trades.items():
+            trade_date = arrow.get(trade["time"]).date()
+
+            ipos = self._frame2pos.get(trade_date)
+            if ipos is None:
+                logger.warning(
+                    "date  %s in trade record not in backtest range", trade_date
+                )
+                continue
+
+            name = await Security.alias(trade["security"])
+            price = trade["price"]
+            side = trade["order_side"]
+            filled = trade["filled"]
+
+            trade_text = f"{side}:{name} {filled/100:.0f}手 价格:{price:.02f} 成交额:{filled * price/10000:.1f}万"
+
+            if side == "卖出":
+                sells[trade_date].append(trade_text)
+            elif side in ("买入", "分红配股"):
+                buys[trade_date].append(trade_text)
+
+        X_buy, Y_buy, data_buy = [], [], []
+        X_sell, Y_sell, data_sell = [], [], []
+
+        for dt, text in buys.items():
+            ipos = self._frame2pos.get(dt)
+            Y_buy.append(self.nv[ipos])
+            X_buy.append(self._format_tick(dt))
+
+            asset = self.assets[ipos]
+            hover = f"资产:{asset/10000:.1f}万<br>{'<br>'.join(text)}"
+            data_buy.append(hover)
+
+        trace_buy = go.Scatter(
+            x=X_buy,
+            y=Y_buy,
+            mode="markers",
+            text=data_buy,
+            name="买入成交",
+            marker=dict(color="red", symbol="triangle-up"),
+            hovertemplate="<br>%{text}",
+        )
+
+        for dt, text in sells.items():
+            ipos = self._frame2pos.get(dt)
+            Y_sell.append(self.nv[ipos])
+            X_sell.append(self._format_tick(dt))
+
+            asset = self.assets[ipos]
+            hover = f"资产:{asset/10000:.1f}万<br>{'<br>'.join(text)}"
+            data_sell.append(hover)
+
+        trace_sell = go.Scatter(
+            x=X_sell,
+            y=Y_sell,
+            mode="markers",
+            text=data_sell,
+            name="卖出成交",
+            marker=dict(color="green", symbol="triangle-down"),
+            hovertemplate="<br>%{text}",
+        )
+
+        return trace_buy, trace_sell
+
+    async def plot(self):
+        """绘制资产曲线及回测指标图"""
+        n = len(self.assets)
+        bars = await Stock.get_bars(self.baseline_code, n, FrameType.DAY, self.end)
+
+        baseline_prices = self._fill_missing_prices(bars, self.frames)
+        baseline_prices /= baseline_prices[0]
+
+        fig = make_subplots(
+            rows=1,
+            cols=2,
+            shared_xaxes=False,
+            specs=[
+                [{"secondary_y": True}, {"type": "table"}],
+            ],
+            column_width=[0.75, 0.25],
+            horizontal_spacing=0.01,
+            subplot_titles=("资产曲线", "策略指标"),
+        )
+
+        fig.add_trace(await self._metrics_trace(), row=1, col=2)
+
+        if self.indicator is not None:
+            indicator_on_hover = self.indicator["value"]
+        else:
+            indicator_on_hover = None
+
+        baseline_name = (
+            await Security.alias(self.baseline_code) if self.baseline_code else "基准"
+        )
+
+        baseline_trace = go.Scatter(
+            y=baseline_prices,
+            x=self.ticks,
+            mode="lines",
+            name=baseline_name,
+            showlegend=True,
+            text=indicator_on_hover,
+            hovertemplate="<br>净值:%{y:.2f}" + "<br>指标:%{text:.1f}",
+        )
+        fig.add_trace(baseline_trace, row=1, col=1)
+
+        nv_trace = go.Scatter(
+            y=self.nv,
+            x=self.ticks,
+            mode="lines",
+            name="策略",
+            showlegend=True,
+            hovertemplate="<br>净值:%{y:.2f}",
+        )
+        fig.add_trace(nv_trace, row=1, col=1)
+
+        if self.indicator is not None:
+            ind_trace = go.Scatter(
+                y=self.indicator["value"],
+                x=self.ticks,
+                mode="lines",
+                name="indicator",
+                showlegend=True,
+                visible="legendonly",
+            )
+            fig.add_trace(ind_trace, row=1, col=1, secondary_y=True)
+
+        for trace in await self._trade_info_trace():
+            fig.add_trace(trace, row=1, col=1)
+
+        fig.update_xaxes(type="category", tickangle=45, nticks=len(self.ticks) // 5)
+        fig.update_layout(margin=dict(l=20, r=20, t=50, b=50), width=1040, height=435)
+        fig.update_layout(
+            hovermode="x unified", hoverlabel=dict(bgcolor="rgba(255,255,255,0.8)")
+        )
+        fig.show()
+
+
+ + + +
+ + + + + + + + + +
+ + + +

+__init__(self, bills, metrics, baseline_code='399300.XSHE', indicator=None) + + + special + + +

+ +
+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
billsdict

回测生成的账单,通过Strategy.bills获得

required
metricsdict

回测生成的指标,通过strategy.metrics获得

required
baseline_codestr

基准证券代码

'399300.XSHE'
indicatorOptional[pandas.core.frame.DataFrame]

回测时使用的指标。如果存在,将叠加到策略回测图上。它应该是一个以日期为索引,指标值列名为"value"的pandas.DataFrame。如果不提供,将不会绘制指标图

None
+
+ Source code in omicron/plotting/metrics.py +
def __init__(
+    self,
+    bills: dict,
+    metrics: dict,
+    baseline_code: str = "399300.XSHE",
+    indicator: Optional[pd.DataFrame] = None,
+):
+    """
+    Args:
+        bills: 回测生成的账单,通过Strategy.bills获得
+        metrics: 回测生成的指标,通过strategy.metrics获得
+        baseline_code: 基准证券代码
+        indicator: 回测时使用的指标。如果存在,将叠加到策略回测图上。它应该是一个以日期为索引,指标值列名为"value"的pandas.DataFrame。如果不提供,将不会绘制指标图
+    """
+    self.metrics = metrics
+    self.trades = bills["trades"]
+    self.positions = bills["positions"]
+    self.start = arrow.get(bills["assets"][0][0]).date()
+    self.end = arrow.get(bills["assets"][-1][0]).date()
+
+    self.frames = [
+        tf.int2date(f) for f in tf.get_frames(self.start, self.end, FrameType.DAY)
+    ]
+
+    if indicator is not None:
+        self.indicator = indicator.join(
+            pd.Series(index=self.frames, name="frames", dtype=np.float64),
+            how="right",
+        )
+    else:
+        self.indicator = None
+
+    # 记录日期到下标的反向映射
+    self._frame2pos = {f: i for i, f in enumerate(self.frames)}
+    self.ticks = self._format_tick(self.frames)
+
+    # TODO: there's bug in backtesting, temporarily fix here
+    df = pd.DataFrame(self.frames, columns=["frame"])
+    df["assets"] = np.nan
+    assets = pd.DataFrame(bills["assets"], columns=["frame", "assets"])
+    df["assets"] = assets["assets"]
+    self.assets = df.fillna(method="ffill")["assets"].to_numpy()
+    self.nv = self.assets / self.assets[0]
+
+    self.baseline_code = baseline_code or "399300.XSHE"
+
+
+
+ +
+ + + +
+ + + +

+plot(self) + + + async + + +

+ +
+ +

绘制资产曲线及回测指标图

+ +
+ Source code in omicron/plotting/metrics.py +
async def plot(self):
+    """绘制资产曲线及回测指标图"""
+    n = len(self.assets)
+    bars = await Stock.get_bars(self.baseline_code, n, FrameType.DAY, self.end)
+
+    baseline_prices = self._fill_missing_prices(bars, self.frames)
+    baseline_prices /= baseline_prices[0]
+
+    fig = make_subplots(
+        rows=1,
+        cols=2,
+        shared_xaxes=False,
+        specs=[
+            [{"secondary_y": True}, {"type": "table"}],
+        ],
+        column_width=[0.75, 0.25],
+        horizontal_spacing=0.01,
+        subplot_titles=("资产曲线", "策略指标"),
+    )
+
+    fig.add_trace(await self._metrics_trace(), row=1, col=2)
+
+    if self.indicator is not None:
+        indicator_on_hover = self.indicator["value"]
+    else:
+        indicator_on_hover = None
+
+    baseline_name = (
+        await Security.alias(self.baseline_code) if self.baseline_code else "基准"
+    )
+
+    baseline_trace = go.Scatter(
+        y=baseline_prices,
+        x=self.ticks,
+        mode="lines",
+        name=baseline_name,
+        showlegend=True,
+        text=indicator_on_hover,
+        hovertemplate="<br>净值:%{y:.2f}" + "<br>指标:%{text:.1f}",
+    )
+    fig.add_trace(baseline_trace, row=1, col=1)
+
+    nv_trace = go.Scatter(
+        y=self.nv,
+        x=self.ticks,
+        mode="lines",
+        name="策略",
+        showlegend=True,
+        hovertemplate="<br>净值:%{y:.2f}",
+    )
+    fig.add_trace(nv_trace, row=1, col=1)
+
+    if self.indicator is not None:
+        ind_trace = go.Scatter(
+            y=self.indicator["value"],
+            x=self.ticks,
+            mode="lines",
+            name="indicator",
+            showlegend=True,
+            visible="legendonly",
+        )
+        fig.add_trace(ind_trace, row=1, col=1, secondary_y=True)
+
+    for trace in await self._trade_info_trace():
+        fig.add_trace(trace, row=1, col=1)
+
+    fig.update_xaxes(type="category", tickangle=45, nticks=len(self.ticks) // 5)
+    fig.update_layout(margin=dict(l=20, r=20, t=50, b=50), width=1040, height=435)
+    fig.update_layout(
+        hovermode="x unified", hoverlabel=dict(bgcolor="rgba(255,255,255,0.8)")
+    )
+    fig.show()
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + + + + + +
+ +
+ +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/2.0.0/api/security/index.html b/2.0.0/api/security/index.html new file mode 100644 index 00000000..734a3c29 --- /dev/null +++ b/2.0.0/api/security/index.html @@ -0,0 +1,2919 @@ + + + + + + + + + + + + + + + + security - Omicron + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + +
+ + +
+ + + + +
+ + + + + + + + + + + +
+ + + +

+ +Query + + + +

+ +
+ +

证券信息查询对象

+

证券信息查询对象,由Security.select()方法生成,支持链式查询。通过eval函数结束链式调用并生成查询结果。

+ +
+ Source code in omicron/models/security.py +
class Query:
+    """证券信息查询对象
+
+    证券信息查询对象,由`Security.select()`方法生成,支持链式查询。通过`eval`函数结束链式调用并生成查询结果。
+    """
+
+    def __init__(self, target_date: datetime.date = None):
+        if target_date is None:
+            # 聚宽不一定会及时更新数据,因此db中不存放当天的数据,如果传空,查cache
+            self.target_date = None
+        else:
+            # 如果是交易日,取当天,否则取前一天
+            self.target_date = tf.day_shift(target_date, 0)
+
+        # 名字,显示名,类型过滤器
+        self._name_pattern = None  # 字母名字
+        self._alias_pattern = None  # 显示名
+        self._type_pattern = None  # 不指定则默认为全部,如果传入空值则只选择股票和指数
+        # 开关选项
+        self._exclude_kcb = False  # 科创板
+        self._exclude_cyb = False  # 创业板
+        self._exclude_st = False  # ST
+        self._include_exit = False  # 是否包含已退市证券(默认不包括当天退市的)
+        # 下列开关优先级高于上面的
+        self._only_kcb = False
+        self._only_cyb = False
+        self._only_st = False
+
+    def only_cyb(self) -> "Query":
+        """返回结果中只包含创业板股票"""
+        self._only_cyb = True  # 高优先级
+        self._exclude_cyb = False
+        self._only_kcb = False
+        self._only_st = False
+        return self
+
+    def only_st(self) -> "Query":
+        """返回结果中只包含ST类型的证券"""
+        self._only_st = True  # 高优先级
+        self._exclude_st = False
+        self._only_kcb = False
+        self._only_cyb = False
+        return self
+
+    def only_kcb(self) -> "Query":
+        """返回结果中只包含科创板股票"""
+        self._only_kcb = True  # 高优先级
+        self._exclude_kcb = False
+        self._only_cyb = False
+        self._only_st = False
+        return self
+
+    def exclude_st(self) -> "Query":
+        """从返回结果中排除ST类型的股票"""
+        self._exclude_st = True
+        self._only_st = False
+        return self
+
+    def exclude_cyb(self) -> "Query":
+        """从返回结果中排除创业板类型的股票"""
+        self._exclude_cyb = True
+        self._only_cyb = False
+        return self
+
+    def exclude_kcb(self) -> "Query":
+        """从返回结果中排除科创板类型的股票"""
+        self._exclude_kcb = True
+        self._only_kcb = False
+        return self
+
+    def include_exit(self) -> "Query":
+        """从返回结果中包含已退市的证券"""
+        self._include_exit = True
+        return self
+
+    def types(self, types: List[str]) -> "Query":
+        """选择类型在`types`中的证券品种
+
+        如果不调用此方法,默认选择所有股票类型。
+        如果调用此方法但不传入参数,默认选择指数+股票
+        Args:
+            types: 有效的类型包括: 对股票指数而言是('index', 'stock'),对基金而言则是('etf', 'fjb', 'mmf', 'reits', 'fja', 'fjm', 'lof')
+        """
+        if types is None or isinstance(types, List) is False:
+            return self
+
+        if len(types) == 0:
+            self._type_pattern = ["index", "stock"]
+        else:
+            tmp = set(types)
+            self._type_pattern = list(tmp)
+
+        return self
+
+    def name_like(self, name: str) -> "Query":
+        """查找股票/证券名称中出现`name`的品种
+
+        注意这里的证券名称并不是其显示名。比如对中国平安000001.XSHE来说,它的名称是ZGPA,而不是“中国平安”。
+
+        Args:
+            name: 待查找的名字,比如"ZGPA"
+
+        """
+        if name is None or len(name) == 0:
+            self._name_pattern = None
+        else:
+            self._name_pattern = name
+
+        return self
+
+    def alias_like(self, display_name: str) -> "Query":
+        """查找股票/证券显示名中出现`display_name的品种
+
+        Args:
+            display_name: 显示名,比如“中国平安"
+        """
+        if display_name is None or len(display_name) == 0:
+            self._alias_pattern = None
+        else:
+            self._alias_pattern = display_name
+
+        return self
+
+    async def eval(self) -> List[str]:
+        """对查询结果进行求值,返回code列表
+
+        Returns:
+            代码列表
+        """
+        logger.debug("eval, date: %s", self.target_date)
+        logger.debug(
+            "eval, names and types: %s, %s, %s",
+            self._name_pattern,
+            self._alias_pattern,
+            self._type_pattern,
+        )
+        logger.debug(
+            "eval, exclude and include: %s, %s, %s, %s",
+            self._exclude_cyb,
+            self._exclude_st,
+            self._exclude_kcb,
+            self._include_exit,
+        )
+        logger.debug(
+            "eval, only: %s, %s, %s ", self._only_cyb, self._only_st, self._only_kcb
+        )
+
+        date_in_cache = await cache.security.get("security:latest_date")
+        if date_in_cache:  # 无此数据说明omega有某些问题,不处理
+            _date = arrow.get(date_in_cache).date()
+        else:
+            now = datetime.datetime.now()
+            _date = tf.day_shift(now, 0)
+
+        # 确定数据源,cache为当天8点之后获取的数据,数据库存放前一日和更早的数据
+        if not self.target_date or self.target_date >= _date:
+            self.target_date = _date
+
+        records = None
+        if self.target_date == _date:  # 从内存中查找,如果缓存中的数据已更新,重新加载到内存
+            secs = await cache.security.lrange("security:all", 0, -1)
+            if len(secs) != 0:
+                # using np.datetime64[s]
+                records = np.array(
+                    [tuple(x.split(",")) for x in secs], dtype=security_info_dtype
+                )
+        else:
+            records = await Security.load_securities_from_db(self.target_date)
+        if records is None:
+            return None
+
+        results = []
+        self._type_pattern = self._type_pattern or SecurityType.STOCK.value
+        for record in records:
+            if self._type_pattern is not None:
+                if record["type"] not in self._type_pattern:
+                    continue
+            if self._name_pattern is not None:
+                if record["name"].find(self._name_pattern) == -1:
+                    continue
+            if self._alias_pattern is not None:
+                if record["alias"].find(self._alias_pattern) == -1:
+                    continue
+
+            # 创业板,科创板,ST暂时限定为股票类型
+            if self._only_cyb:
+                if record["type"] != SecurityType.STOCK.value or not (
+                    record["code"][:3] in ("300", "301")
+                ):
+                    continue
+            if self._only_kcb:
+                if (
+                    record["type"] != SecurityType.STOCK.value
+                    or record["code"].startswith("688") is False
+                ):
+                    continue
+            if self._only_st:
+                if (
+                    record["type"] != SecurityType.STOCK.value
+                    or record["alias"].find("ST") == -1
+                ):
+                    continue
+            if self._exclude_cyb:
+                if record["type"] == SecurityType.STOCK.value and record["code"][
+                    :3
+                ] in ("300", "301"):
+                    continue
+            if self._exclude_st:
+                if (
+                    record["type"] == SecurityType.STOCK.value
+                    and record["alias"].find("ST") != -1
+                ):
+                    continue
+            if self._exclude_kcb:
+                if record["type"] == SecurityType.STOCK.value and record[
+                    "code"
+                ].startswith("688"):
+                    continue
+
+            # 退市暂不限定是否为股票
+            if self._include_exit is False:
+                d1 = convert_nptime_to_datetime(record["end"]).date()
+                if d1 < self.target_date:
+                    continue
+
+            results.append(record["code"])
+
+        # 返回所有查询到的结果
+        return results
+
+
+ + + +
+ + + + + + + + + + +
+ + + +

+alias_like(self, display_name) + + +

+ +
+ +

查找股票/证券显示名中出现`display_name的品种

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
display_namestr

显示名,比如“中国平安"

required
+
+ Source code in omicron/models/security.py +
def alias_like(self, display_name: str) -> "Query":
+    """查找股票/证券显示名中出现`display_name的品种
+
+    Args:
+        display_name: 显示名,比如“中国平安"
+    """
+    if display_name is None or len(display_name) == 0:
+        self._alias_pattern = None
+    else:
+        self._alias_pattern = display_name
+
+    return self
+
+
+
+ +
+ + + +
+ + + +

+eval(self) + + + async + + +

+ +
+ +

对查询结果进行求值,返回code列表

+ +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
List[str]

代码列表

+
+ Source code in omicron/models/security.py +
async def eval(self) -> List[str]:
+    """对查询结果进行求值,返回code列表
+
+    Returns:
+        代码列表
+    """
+    logger.debug("eval, date: %s", self.target_date)
+    logger.debug(
+        "eval, names and types: %s, %s, %s",
+        self._name_pattern,
+        self._alias_pattern,
+        self._type_pattern,
+    )
+    logger.debug(
+        "eval, exclude and include: %s, %s, %s, %s",
+        self._exclude_cyb,
+        self._exclude_st,
+        self._exclude_kcb,
+        self._include_exit,
+    )
+    logger.debug(
+        "eval, only: %s, %s, %s ", self._only_cyb, self._only_st, self._only_kcb
+    )
+
+    date_in_cache = await cache.security.get("security:latest_date")
+    if date_in_cache:  # 无此数据说明omega有某些问题,不处理
+        _date = arrow.get(date_in_cache).date()
+    else:
+        now = datetime.datetime.now()
+        _date = tf.day_shift(now, 0)
+
+    # 确定数据源,cache为当天8点之后获取的数据,数据库存放前一日和更早的数据
+    if not self.target_date or self.target_date >= _date:
+        self.target_date = _date
+
+    records = None
+    if self.target_date == _date:  # 从内存中查找,如果缓存中的数据已更新,重新加载到内存
+        secs = await cache.security.lrange("security:all", 0, -1)
+        if len(secs) != 0:
+            # using np.datetime64[s]
+            records = np.array(
+                [tuple(x.split(",")) for x in secs], dtype=security_info_dtype
+            )
+    else:
+        records = await Security.load_securities_from_db(self.target_date)
+    if records is None:
+        return None
+
+    results = []
+    self._type_pattern = self._type_pattern or SecurityType.STOCK.value
+    for record in records:
+        if self._type_pattern is not None:
+            if record["type"] not in self._type_pattern:
+                continue
+        if self._name_pattern is not None:
+            if record["name"].find(self._name_pattern) == -1:
+                continue
+        if self._alias_pattern is not None:
+            if record["alias"].find(self._alias_pattern) == -1:
+                continue
+
+        # 创业板,科创板,ST暂时限定为股票类型
+        if self._only_cyb:
+            if record["type"] != SecurityType.STOCK.value or not (
+                record["code"][:3] in ("300", "301")
+            ):
+                continue
+        if self._only_kcb:
+            if (
+                record["type"] != SecurityType.STOCK.value
+                or record["code"].startswith("688") is False
+            ):
+                continue
+        if self._only_st:
+            if (
+                record["type"] != SecurityType.STOCK.value
+                or record["alias"].find("ST") == -1
+            ):
+                continue
+        if self._exclude_cyb:
+            if record["type"] == SecurityType.STOCK.value and record["code"][
+                :3
+            ] in ("300", "301"):
+                continue
+        if self._exclude_st:
+            if (
+                record["type"] == SecurityType.STOCK.value
+                and record["alias"].find("ST") != -1
+            ):
+                continue
+        if self._exclude_kcb:
+            if record["type"] == SecurityType.STOCK.value and record[
+                "code"
+            ].startswith("688"):
+                continue
+
+        # 退市暂不限定是否为股票
+        if self._include_exit is False:
+            d1 = convert_nptime_to_datetime(record["end"]).date()
+            if d1 < self.target_date:
+                continue
+
+        results.append(record["code"])
+
+    # 返回所有查询到的结果
+    return results
+
+
+
+ +
+ + + +
+ + + +

+exclude_cyb(self) + + +

+ +
+ +

从返回结果中排除创业板类型的股票

+ +
+ Source code in omicron/models/security.py +
def exclude_cyb(self) -> "Query":
+    """从返回结果中排除创业板类型的股票"""
+    self._exclude_cyb = True
+    self._only_cyb = False
+    return self
+
+
+
+ +
+ + + +
+ + + +

+exclude_kcb(self) + + +

+ +
+ +

从返回结果中排除科创板类型的股票

+ +
+ Source code in omicron/models/security.py +
def exclude_kcb(self) -> "Query":
+    """从返回结果中排除科创板类型的股票"""
+    self._exclude_kcb = True
+    self._only_kcb = False
+    return self
+
+
+
+ +
+ + + +
+ + + +

+exclude_st(self) + + +

+ +
+ +

从返回结果中排除ST类型的股票

+ +
+ Source code in omicron/models/security.py +
def exclude_st(self) -> "Query":
+    """从返回结果中排除ST类型的股票"""
+    self._exclude_st = True
+    self._only_st = False
+    return self
+
+
+
+ +
+ + + +
+ + + +

+include_exit(self) + + +

+ +
+ +

从返回结果中包含已退市的证券

+ +
+ Source code in omicron/models/security.py +
def include_exit(self) -> "Query":
+    """从返回结果中包含已退市的证券"""
+    self._include_exit = True
+    return self
+
+
+
+ +
+ + + +
+ + + +

+name_like(self, name) + + +

+ +
+ +

查找股票/证券名称中出现name的品种

+

注意这里的证券名称并不是其显示名。比如对中国平安000001.XSHE来说,它的名称是ZGPA,而不是“中国平安”。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
namestr

待查找的名字,比如"ZGPA"

required
+
+ Source code in omicron/models/security.py +
def name_like(self, name: str) -> "Query":
+    """查找股票/证券名称中出现`name`的品种
+
+    注意这里的证券名称并不是其显示名。比如对中国平安000001.XSHE来说,它的名称是ZGPA,而不是“中国平安”。
+
+    Args:
+        name: 待查找的名字,比如"ZGPA"
+
+    """
+    if name is None or len(name) == 0:
+        self._name_pattern = None
+    else:
+        self._name_pattern = name
+
+    return self
+
+
+
+ +
+ + + +
+ + + +

+only_cyb(self) + + +

+ +
+ +

返回结果中只包含创业板股票

+ +
+ Source code in omicron/models/security.py +
def only_cyb(self) -> "Query":
+    """返回结果中只包含创业板股票"""
+    self._only_cyb = True  # 高优先级
+    self._exclude_cyb = False
+    self._only_kcb = False
+    self._only_st = False
+    return self
+
+
+
+ +
+ + + +
+ + + +

+only_kcb(self) + + +

+ +
+ +

返回结果中只包含科创板股票

+ +
+ Source code in omicron/models/security.py +
def only_kcb(self) -> "Query":
+    """返回结果中只包含科创板股票"""
+    self._only_kcb = True  # 高优先级
+    self._exclude_kcb = False
+    self._only_cyb = False
+    self._only_st = False
+    return self
+
+
+
+ +
+ + + +
+ + + +

+only_st(self) + + +

+ +
+ +

返回结果中只包含ST类型的证券

+ +
+ Source code in omicron/models/security.py +
def only_st(self) -> "Query":
+    """返回结果中只包含ST类型的证券"""
+    self._only_st = True  # 高优先级
+    self._exclude_st = False
+    self._only_kcb = False
+    self._only_cyb = False
+    return self
+
+
+
+ +
+ + + +
+ + + +

+types(self, types) + + +

+ +
+ +

选择类型在types中的证券品种

+

如果不调用此方法,默认选择所有股票类型。 +如果调用此方法但不传入参数,默认选择指数+股票

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
typesList[str]

有效的类型包括: 对股票指数而言是('index', 'stock'),对基金而言则是('etf', 'fjb', 'mmf', 'reits', 'fja', 'fjm', 'lof')

required
+
+ Source code in omicron/models/security.py +
def types(self, types: List[str]) -> "Query":
+    """选择类型在`types`中的证券品种
+
+    如果不调用此方法,默认选择所有股票类型。
+    如果调用此方法但不传入参数,默认选择指数+股票
+    Args:
+        types: 有效的类型包括: 对股票指数而言是('index', 'stock'),对基金而言则是('etf', 'fjb', 'mmf', 'reits', 'fja', 'fjm', 'lof')
+    """
+    if types is None or isinstance(types, List) is False:
+        return self
+
+    if len(types) == 0:
+        self._type_pattern = ["index", "stock"]
+    else:
+        tmp = set(types)
+        self._type_pattern = list(tmp)
+
+    return self
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + +
+ + + +

+ +Security + + + +

+ +
+ + +
+ Source code in omicron/models/security.py +
class Security:
+    _securities = []
+    _securities_date = None
+    _security_types = set()
+    _stocks = []
+
+    @classmethod
+    async def init(cls):
+        """初始化Security.
+
+        一般而言,omicron的使用者无须调用此方法,它会在omicron初始化(通过`omicron.init`)时,被自动调用。
+
+        Raises:
+            DataNotReadyError: 如果omicron未初始化,或者cache中未加载最新证券列表,则抛出此异常。
+        """
+        # read all securities from redis, 7111 records now
+        # {'index', 'stock'}
+        # {'fjb', 'mmf', 'reits', 'fja', 'fjm'}
+        # {'etf', 'lof'}
+        if len(cls._securities) > 100:
+            return True
+
+        secs = await cls.load_securities()
+        if secs is None or len(secs) == 0:  # pragma: no cover
+            raise DataNotReadyError(
+                "No securities in cache, make sure you have called omicron.init() first."
+            )
+
+        print("init securities done")
+        return True
+
+    @classmethod
+    async def load_securities(cls):
+        """加载所有证券的信息,并缓存到内存中
+
+        一般而言,omicron的使用者无须调用此方法,它会在omicron初始化(通过`omicron.init`)时,被自动调用。
+        """
+        secs = await cache.security.lrange("security:all", 0, -1)
+        if len(secs) != 0:
+            # using np.datetime64[s]
+            _securities = np.array(
+                [tuple(x.split(",")) for x in secs], dtype=security_info_dtype
+            )
+
+            # 更新证券类型列表
+            cls._securities = _securities
+            cls._security_types = set(_securities["type"])
+            cls._stocks = _securities[
+                (_securities["type"] == "stock") | (_securities["type"] == "index")
+            ]
+            logger.info(
+                "%d securities loaded, types: %s", len(_securities), cls._security_types
+            )
+
+            date_in_cache = await cache.security.get("security:latest_date")
+            if date_in_cache is not None:
+                cls._securities_date = arrow.get(date_in_cache).date()
+            else:
+                cls._securities_date = datetime.date.today()
+
+            return _securities
+        else:  # pragma: no cover
+            return None
+
+    @classmethod
+    async def get_security_types(cls):
+        if cls._security_types:
+            return list(cls._security_types)
+        else:
+            return None
+
+    @classmethod
+    def get_stock(cls, code) -> NDArray[security_info_dtype]:
+        """根据`code`来查找对应的股票(含指数)对象信息。
+
+        如果您只有股票代码,想知道该代码对应的股票名称、别名(显示名)、上市日期等信息,就可以使用此方法来获取相关信息。
+
+        返回类型为`security_info_dtype`的numpy数组,但仅包含一个元素。您可以象字典一样存取它,比如
+        ```python
+            item = Security.get_stock("000001.XSHE")
+            print(item["alias"])
+        ```
+        显示为"平安银行"
+
+        Args:
+            code: 待查询的股票/指数代码
+
+        Returns:
+            类型为`security_info_dtype`的numpy数组,但仅包含一个元素
+        """
+        if len(cls._securities) == 0:
+            return None
+
+        tmp = cls._securities[cls._securities["code"] == code]
+        if len(tmp) > 0:
+            if tmp["type"] in ["stock", "index"]:
+                return tmp[0]
+
+        return None
+
+    @classmethod
+    def fuzzy_match_ex(cls, query: str) -> Dict[str, Tuple]:
+        # fixme: 此方法与Stock.fuzzy_match重复,并且进行了类型限制,使得其不适合放在Security里,以及作为一个通用方法
+
+        query = query.upper()
+        if re.match(r"\d+", query):
+            return {
+                sec["code"]: sec.tolist()
+                for sec in cls._securities
+                if sec["code"].find(query) != -1 and sec["type"] == "stock"
+            }
+        elif re.match(r"[A-Z]+", query):
+            return {
+                sec["code"]: sec.tolist()
+                for sec in cls._securities
+                if sec["name"].startswith(query) and sec["type"] == "stock"
+            }
+        else:
+            return {
+                sec["code"]: sec.tolist()
+                for sec in cls._securities
+                if sec["alias"].find(query) != -1 and sec["type"] == "stock"
+            }
+
+    @classmethod
+    async def info(cls, code, date=None):
+        _obj = await cls.query_security_via_date(code, date)
+        if _obj is None:
+            return None
+
+        # "_time", "code", "type", "alias", "end", "ipo", "name"
+        d1 = convert_nptime_to_datetime(_obj["ipo"]).date()
+        d2 = convert_nptime_to_datetime(_obj["end"]).date()
+        return {
+            "type": _obj["type"],
+            "display_name": _obj["alias"],
+            "alias": _obj["alias"],
+            "end": d2,
+            "start": d1,
+            "name": _obj["name"],
+        }
+
+    @classmethod
+    async def name(cls, code, date=None):
+        _security = await cls.query_security_via_date(code, date)
+        if _security is None:
+            return None
+        return _security["name"]
+
+    @classmethod
+    async def alias(cls, code, date=None):
+        return await cls.display_name(code, date)
+
+    @classmethod
+    async def display_name(cls, code, date=None):
+        _security = await cls.query_security_via_date(code, date)
+        if _security is None:
+            return None
+        return _security["alias"]
+
+    @classmethod
+    async def start_date(cls, code, date=None):
+        _security = await cls.query_security_via_date(code, date)
+        if _security is None:
+            return None
+        return convert_nptime_to_datetime(_security["ipo"]).date()
+
+    @classmethod
+    async def end_date(cls, code, date=None):
+        _security = await cls.query_security_via_date(code, date)
+        if _security is None:
+            return None
+        return convert_nptime_to_datetime(_security["end"]).date()
+
+    @classmethod
+    async def security_type(cls, code, date=None) -> SecurityType:
+        _security = await cls.query_security_via_date(code, date)
+        if _security is None:
+            return None
+        return _security["type"]
+
+    @classmethod
+    async def query_security_via_date(cls, code: str, date: datetime.date = None):
+        if date is None:  # 从内存中查找,如果缓存中的数据已更新,重新加载到内存
+            date_in_cache = await cache.security.get("security:latest_date")
+            if date_in_cache is not None:
+                date = arrow.get(date_in_cache).date()
+                if date > cls._securities_date:
+                    await cls.load_securities()
+            results = cls._securities[cls._securities["code"] == code]
+        else:  # 从influxdb查找
+            date = tf.day_shift(date, 0)
+            results = await cls.load_securities_from_db(date, code)
+
+        if results is not None and len(results) > 0:
+            return results[0]
+        else:
+            return None
+
+    @classmethod
+    def select(cls, date: datetime.date = None) -> Query:
+        if date is None:
+            return Query(target_date=None)
+        else:
+            return Query(target_date=date)
+
+    @classmethod
+    async def update_secs_cache(cls, dt: datetime.date, securities: List[Tuple]):
+        """更新证券列表到缓存数据库中
+
+        Args:
+            dt: 证券列表归属的日期
+            securities: 证券列表, 元素为元组,分别为代码、别名、名称、IPO日期、退市日和证券类型
+        """
+        # stock: {'index', 'stock'}
+        # funds: {'fjb', 'mmf', 'reits', 'fja', 'fjm'}
+        # {'etf', 'lof'}
+        key = "security:all"
+        pipeline = cache.security.pipeline()
+        pipeline.delete(key)
+        for code, alias, name, start, end, _type in securities:
+            pipeline.rpush(key, f"{code},{alias},{name},{start}," f"{end},{_type}")
+        await pipeline.execute()
+        logger.info("all securities saved to cache %s, %d secs", key, len(securities))
+
+        # update latest date info
+        await cache.security.set("security:latest_date", dt.strftime("%Y-%m-%d"))
+
+    @classmethod
+    async def save_securities(cls, securities: List[str], dt: datetime.date):
+        """保存指定的证券信息到缓存中,并且存入influxdb,定时job调用本接口
+
+        Args:
+            securities: 证券代码列表。
+        """
+        # stock: {'index', 'stock'}
+        # funds: {'fjb', 'mmf', 'reits', 'fja', 'fjm'}
+        # {'etf', 'lof'}
+        if dt is None or len(securities) == 0:
+            return
+
+        measurement = "security_list"
+        client = get_influx_client()
+
+        # code, alias, name, start, end, type
+        security_list = np.array(
+            [
+                (dt, x[0], f"{x[0]},{x[1]},{x[2]},{x[3]},{x[4]},{x[5]}")
+                for x in securities
+            ],
+            dtype=security_db_dtype,
+        )
+        await client.save(
+            security_list, measurement, time_key="frame", tag_keys=["code"]
+        )
+
+    @classmethod
+    async def load_securities_from_db(
+        cls, target_date: datetime.date, code: str = None
+    ):
+        if target_date is None:
+            return None
+
+        client = get_influx_client()
+        measurement = "security_list"
+
+        flux = (
+            Flux()
+            .measurement(measurement)
+            .range(target_date, target_date)
+            .bucket(client._bucket)
+            .fields(["info"])
+        )
+        if code is not None and len(code) > 0:
+            flux.tags({"code": code})
+
+        data = await client.query(flux)
+        if len(data) == 2:  # \r\n
+            return None
+
+        ds = DataframeDeserializer(
+            sort_values="_time",
+            usecols=["_time", "code", "info"],
+            time_col="_time",
+            engine="c",
+        )
+        actual = ds(data)
+        secs = actual.to_records(index=False)
+
+        if len(secs) != 0:
+            # "_time", "code", "code, alias, name, start, end, type"
+            _securities = np.array(
+                [tuple(x["info"].split(",")) for x in secs], dtype=security_info_dtype
+            )
+            return _securities
+        else:
+            return None
+
+    @classmethod
+    async def get_datescope_from_db(cls):
+        # fixme: 函数名无法反映用途,需要增加文档注释,说明该函数的作用,或者不应该出现在此类中?
+        client = get_influx_client()
+        measurement = "security_list"
+
+        date1 = arrow.get("2005-01-01").date()
+        date2 = arrow.now().naive.date()
+
+        flux = (
+            Flux()
+            .measurement(measurement)
+            .range(date1, date2)
+            .bucket(client._bucket)
+            .tags({"code": "000001.XSHE"})
+        )
+
+        data = await client.query(flux)
+        if len(data) == 2:  # \r\n
+            return None, None
+
+        ds = DataframeDeserializer(
+            sort_values="_time", usecols=["_time"], time_col="_time", engine="c"
+        )
+        actual = ds(data)
+        secs = actual.to_records(index=False)
+
+        if len(secs) != 0:
+            d1 = convert_nptime_to_datetime(secs[0]["_time"])
+            d2 = convert_nptime_to_datetime(secs[len(secs) - 1]["_time"])
+            return d1.date(), d2.date()
+        else:
+            return None, None
+
+    @classmethod
+    async def _notify_special_bonusnote(cls, code, note, cancel_date):
+        # fixme: 这个函数应该出现在omega中?
+        default_cancel_date = datetime.date(2099, 1, 1)  # 默认无取消公告
+        # report this special event to notify user
+        if cancel_date != default_cancel_date:
+            ding("security %s, bonus_cancel_pub_date %s" % (code, cancel_date))
+
+        if note.find("流通") != -1:  # 检查是否有“流通股”文字
+            ding("security %s, special xrxd note: %s" % (code, note))
+
+    @classmethod
+    async def save_xrxd_reports(cls, reports: List[str], dt: datetime.date):
+        # fixme: 此函数应该属于omega?
+
+        """保存1年内的分红送股信息,并且存入influxdb,定时job调用本接口
+
+        Args:
+            reports: 分红送股公告
+        """
+        # code(0), a_xr_date, board_plan_bonusnote, bonus_ratio_rmb(3), dividend_ratio, transfer_ratio(5),
+        # at_bonus_ratio_rmb(6), report_date, plan_progress, implementation_bonusnote, bonus_cancel_pub_date(10)
+
+        if len(reports) == 0 or dt is None:
+            return
+
+        # read reports from db and convert to dict map
+        reports_in_db = {}
+        dt_start = dt - datetime.timedelta(days=366)  # 往前回溯366天
+        dt_end = dt + datetime.timedelta(days=366)  # 往后延长366天
+        existing_records = await cls._load_xrxd_from_db(None, dt_start, dt_end)
+        for record in existing_records:
+            code = record[0]
+            if code not in reports_in_db:
+                reports_in_db[code] = [record]
+            else:
+                reports_in_db[code].append(record)
+
+        records = []  # 准备写入db
+
+        for x in reports:
+            code = x[0]
+            note = x[2]
+            cancel_date = x[10]
+
+            existing_items = reports_in_db.get(code, None)
+            if existing_items is None:  # 新记录
+                record = (
+                    x[1],
+                    x[0],
+                    f"{x[0]}|{x[1]}|{x[2]}|{x[3]}|{x[4]}|{x[5]}|{x[6]}|{x[7]}|{x[8]}|{x[9]}|{x[10]}",
+                )
+                records.append(record)
+                await cls._notify_special_bonusnote(code, note, cancel_date)
+            else:
+                new_record = True
+                for item in existing_items:
+                    existing_date = convert_nptime_to_datetime(item[1]).date()
+                    if existing_date == x[1]:  # 如果xr_date相同,不更新
+                        new_record = False
+                        continue
+                if new_record:
+                    record = (
+                        x[1],
+                        x[0],
+                        f"{x[0]}|{x[1]}|{x[2]}|{x[3]}|{x[4]}|{x[5]}|{x[6]}|{x[7]}|{x[8]}|{x[9]}|{x[10]}",
+                    )
+                    records.append(record)
+                    await cls._notify_special_bonusnote(code, note, cancel_date)
+
+        logger.info("save_xrxd_reports, %d records to be saved", len(records))
+        if len(records) == 0:
+            return
+
+        measurement = "security_xrxd_reports"
+        client = get_influx_client()
+        # a_xr_date(_time), code(tag), info
+        report_list = np.array(records, dtype=security_db_dtype)
+        await client.save(report_list, measurement, time_key="frame", tag_keys=["code"])
+
+    @classmethod
+    async def _load_xrxd_from_db(
+        cls, code, dt_start: datetime.date, dt_end: datetime.date
+    ):
+        if dt_start is None or dt_end is None:
+            return []
+
+        client = get_influx_client()
+        measurement = "security_xrxd_reports"
+
+        flux = (
+            Flux()
+            .measurement(measurement)
+            .range(dt_start, dt_end)
+            .bucket(client._bucket)
+            .fields(["info"])
+        )
+        if code is not None and len(code) > 0:
+            flux.tags({"code": code})
+
+        data = await client.query(flux)
+        if len(data) == 2:  # \r\n
+            return []
+
+        ds = DataframeDeserializer(
+            sort_values="_time",
+            usecols=["_time", "code", "info"],
+            time_col="_time",
+            engine="c",
+        )
+        actual = ds(data)
+        secs = actual.to_records(index=False)
+
+        if len(secs) != 0:
+            _reports = np.array(
+                [tuple(x["info"].split("|")) for x in secs], dtype=xrxd_info_dtype
+            )
+            return _reports
+        else:
+            return []
+
+    @classmethod
+    async def get_xrxd_info(cls, dt: datetime.date, code: str = None):
+        if dt is None:
+            return None
+
+        # code(0), a_xr_date, board_plan_bonusnote, bonus_ratio_rmb(3), dividend_ratio, transfer_ratio(5),
+        # at_bonus_ratio_rmb(6), report_date, plan_progress, implementation_bonusnote, bonus_cancel_pub_date(10)
+        reports = await cls._load_xrxd_from_db(code, dt, dt)
+        if len(reports) == 0:
+            return None
+
+        readable_reports = []
+        for report in reports:
+            xr_date = convert_nptime_to_datetime(report[1]).date()
+            readable_reports.append(
+                {
+                    "code": report[0],
+                    "xr_date": xr_date,
+                    "bonus": report[3],
+                    "dividend": report[4],
+                    "transfer": report[5],
+                    "bonusnote": report[2],
+                }
+            )
+
+        return readable_reports
+
+
+ + + +
+ + + + + + + + + + + + + + + +
+ + + +

+get_stock(code) + + + classmethod + + +

+ +
+ +

根据code来查找对应的股票(含指数)对象信息。

+

如果您只有股票代码,想知道该代码对应的股票名称、别名(显示名)、上市日期等信息,就可以使用此方法来获取相关信息。

+

返回类型为security_info_dtype的numpy数组,但仅包含一个元素。您可以象字典一样存取它,比如 +

1
+2
    item = Security.get_stock("000001.XSHE")
+    print(item["alias"])
+
+显示为"平安银行"

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
code

待查询的股票/指数代码

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
numpy.ndarray[Any, numpy.dtype[[('code', 'O'), ('alias', 'O'), ('name', 'O'), ('ipo', 'datetime64[s]'), ('end', 'datetime64[s]'), ('type', 'O')]]]

类型为security_info_dtype的numpy数组,但仅包含一个元素

+
+ Source code in omicron/models/security.py +
@classmethod
+def get_stock(cls, code) -> NDArray[security_info_dtype]:
+    """根据`code`来查找对应的股票(含指数)对象信息。
+
+    如果您只有股票代码,想知道该代码对应的股票名称、别名(显示名)、上市日期等信息,就可以使用此方法来获取相关信息。
+
+    返回类型为`security_info_dtype`的numpy数组,但仅包含一个元素。您可以象字典一样存取它,比如
+    ```python
+        item = Security.get_stock("000001.XSHE")
+        print(item["alias"])
+    ```
+    显示为"平安银行"
+
+    Args:
+        code: 待查询的股票/指数代码
+
+    Returns:
+        类型为`security_info_dtype`的numpy数组,但仅包含一个元素
+    """
+    if len(cls._securities) == 0:
+        return None
+
+    tmp = cls._securities[cls._securities["code"] == code]
+    if len(tmp) > 0:
+        if tmp["type"] in ["stock", "index"]:
+            return tmp[0]
+
+    return None
+
+
+
+ +
+ + + + + +
+ + + +

+init() + + + async + classmethod + + +

+ +
+ +

初始化Security.

+

一般而言,omicron的使用者无须调用此方法,它会在omicron初始化(通过omicron.init)时,被自动调用。

+ +

Exceptions:

+ + + + + + + + + + + + + +
TypeDescription
DataNotReadyError

如果omicron未初始化,或者cache中未加载最新证券列表,则抛出此异常。

+
+ Source code in omicron/models/security.py +
@classmethod
+async def init(cls):
+    """初始化Security.
+
+    一般而言,omicron的使用者无须调用此方法,它会在omicron初始化(通过`omicron.init`)时,被自动调用。
+
+    Raises:
+        DataNotReadyError: 如果omicron未初始化,或者cache中未加载最新证券列表,则抛出此异常。
+    """
+    # read all securities from redis, 7111 records now
+    # {'index', 'stock'}
+    # {'fjb', 'mmf', 'reits', 'fja', 'fjm'}
+    # {'etf', 'lof'}
+    if len(cls._securities) > 100:
+        return True
+
+    secs = await cls.load_securities()
+    if secs is None or len(secs) == 0:  # pragma: no cover
+        raise DataNotReadyError(
+            "No securities in cache, make sure you have called omicron.init() first."
+        )
+
+    print("init securities done")
+    return True
+
+
+
+ +
+ + + +
+ + + +

+load_securities() + + + async + classmethod + + +

+ +
+ +

加载所有证券的信息,并缓存到内存中

+

一般而言,omicron的使用者无须调用此方法,它会在omicron初始化(通过omicron.init)时,被自动调用。

+ +
+ Source code in omicron/models/security.py +
@classmethod
+async def load_securities(cls):
+    """加载所有证券的信息,并缓存到内存中
+
+    一般而言,omicron的使用者无须调用此方法,它会在omicron初始化(通过`omicron.init`)时,被自动调用。
+    """
+    secs = await cache.security.lrange("security:all", 0, -1)
+    if len(secs) != 0:
+        # using np.datetime64[s]
+        _securities = np.array(
+            [tuple(x.split(",")) for x in secs], dtype=security_info_dtype
+        )
+
+        # 更新证券类型列表
+        cls._securities = _securities
+        cls._security_types = set(_securities["type"])
+        cls._stocks = _securities[
+            (_securities["type"] == "stock") | (_securities["type"] == "index")
+        ]
+        logger.info(
+            "%d securities loaded, types: %s", len(_securities), cls._security_types
+        )
+
+        date_in_cache = await cache.security.get("security:latest_date")
+        if date_in_cache is not None:
+            cls._securities_date = arrow.get(date_in_cache).date()
+        else:
+            cls._securities_date = datetime.date.today()
+
+        return _securities
+    else:  # pragma: no cover
+        return None
+
+
+
+ +
+ + + + + + +
+ + + +

+save_securities(securities, dt) + + + async + classmethod + + +

+ +
+ +

保存指定的证券信息到缓存中,并且存入influxdb,定时job调用本接口

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
securitiesList[str]

证券代码列表。

required
+
+ Source code in omicron/models/security.py +
@classmethod
+async def save_securities(cls, securities: List[str], dt: datetime.date):
+    """保存指定的证券信息到缓存中,并且存入influxdb,定时job调用本接口
+
+    Args:
+        securities: 证券代码列表。
+    """
+    # stock: {'index', 'stock'}
+    # funds: {'fjb', 'mmf', 'reits', 'fja', 'fjm'}
+    # {'etf', 'lof'}
+    if dt is None or len(securities) == 0:
+        return
+
+    measurement = "security_list"
+    client = get_influx_client()
+
+    # code, alias, name, start, end, type
+    security_list = np.array(
+        [
+            (dt, x[0], f"{x[0]},{x[1]},{x[2]},{x[3]},{x[4]},{x[5]}")
+            for x in securities
+        ],
+        dtype=security_db_dtype,
+    )
+    await client.save(
+        security_list, measurement, time_key="frame", tag_keys=["code"]
+    )
+
+
+
+ +
+ + + +
+ + + +

+save_xrxd_reports(reports, dt) + + + async + classmethod + + +

+ +
+ +

保存1年内的分红送股信息,并且存入influxdb,定时job调用本接口

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
reportsList[str]

分红送股公告

required
+
+ Source code in omicron/models/security.py +
@classmethod
+async def save_xrxd_reports(cls, reports: List[str], dt: datetime.date):
+    # fixme: 此函数应该属于omega?
+
+    """保存1年内的分红送股信息,并且存入influxdb,定时job调用本接口
+
+    Args:
+        reports: 分红送股公告
+    """
+    # code(0), a_xr_date, board_plan_bonusnote, bonus_ratio_rmb(3), dividend_ratio, transfer_ratio(5),
+    # at_bonus_ratio_rmb(6), report_date, plan_progress, implementation_bonusnote, bonus_cancel_pub_date(10)
+
+    if len(reports) == 0 or dt is None:
+        return
+
+    # read reports from db and convert to dict map
+    reports_in_db = {}
+    dt_start = dt - datetime.timedelta(days=366)  # 往前回溯366天
+    dt_end = dt + datetime.timedelta(days=366)  # 往后延长366天
+    existing_records = await cls._load_xrxd_from_db(None, dt_start, dt_end)
+    for record in existing_records:
+        code = record[0]
+        if code not in reports_in_db:
+            reports_in_db[code] = [record]
+        else:
+            reports_in_db[code].append(record)
+
+    records = []  # 准备写入db
+
+    for x in reports:
+        code = x[0]
+        note = x[2]
+        cancel_date = x[10]
+
+        existing_items = reports_in_db.get(code, None)
+        if existing_items is None:  # 新记录
+            record = (
+                x[1],
+                x[0],
+                f"{x[0]}|{x[1]}|{x[2]}|{x[3]}|{x[4]}|{x[5]}|{x[6]}|{x[7]}|{x[8]}|{x[9]}|{x[10]}",
+            )
+            records.append(record)
+            await cls._notify_special_bonusnote(code, note, cancel_date)
+        else:
+            new_record = True
+            for item in existing_items:
+                existing_date = convert_nptime_to_datetime(item[1]).date()
+                if existing_date == x[1]:  # 如果xr_date相同,不更新
+                    new_record = False
+                    continue
+            if new_record:
+                record = (
+                    x[1],
+                    x[0],
+                    f"{x[0]}|{x[1]}|{x[2]}|{x[3]}|{x[4]}|{x[5]}|{x[6]}|{x[7]}|{x[8]}|{x[9]}|{x[10]}",
+                )
+                records.append(record)
+                await cls._notify_special_bonusnote(code, note, cancel_date)
+
+    logger.info("save_xrxd_reports, %d records to be saved", len(records))
+    if len(records) == 0:
+        return
+
+    measurement = "security_xrxd_reports"
+    client = get_influx_client()
+    # a_xr_date(_time), code(tag), info
+    report_list = np.array(records, dtype=security_db_dtype)
+    await client.save(report_list, measurement, time_key="frame", tag_keys=["code"])
+
+
+
+ +
+ + + + + + +
+ + + +

+update_secs_cache(dt, securities) + + + async + classmethod + + +

+ +
+ +

更新证券列表到缓存数据库中

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
dtdate

证券列表归属的日期

required
securitiesList[Tuple]

证券列表, 元素为元组,分别为代码、别名、名称、IPO日期、退市日和证券类型

required
+
+ Source code in omicron/models/security.py +
@classmethod
+async def update_secs_cache(cls, dt: datetime.date, securities: List[Tuple]):
+    """更新证券列表到缓存数据库中
+
+    Args:
+        dt: 证券列表归属的日期
+        securities: 证券列表, 元素为元组,分别为代码、别名、名称、IPO日期、退市日和证券类型
+    """
+    # stock: {'index', 'stock'}
+    # funds: {'fjb', 'mmf', 'reits', 'fja', 'fjm'}
+    # {'etf', 'lof'}
+    key = "security:all"
+    pipeline = cache.security.pipeline()
+    pipeline.delete(key)
+    for code, alias, name, start, end, _type in securities:
+        pipeline.rpush(key, f"{code},{alias},{name},{start}," f"{end},{_type}")
+    await pipeline.execute()
+    logger.info("all securities saved to cache %s, %d secs", key, len(securities))
+
+    # update latest date info
+    await cache.security.set("security:latest_date", dt.strftime("%Y-%m-%d"))
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + + + + + + +
+ +
+ +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/2.0.0/api/stock/index.html b/2.0.0/api/stock/index.html new file mode 100644 index 00000000..c0a496e8 --- /dev/null +++ b/2.0.0/api/stock/index.html @@ -0,0 +1,4705 @@ + + + + + + + + + + + + + + + + stock - Omicron + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + + + + + +
+ + +
+ + + + +
+ + + + + + + + + + +
+ + + +

+ +Stock (Security) + + + + +

+ +
+ +

Stock对象用于归集某支证券(股票和指数,不包括其它投资品种)的相关信息,比如行情数据(OHLC等)、市值数据、所属概念分类等。

+ +
+ Source code in omicron/models/stock.py +
class Stock(Security):
+    """
+    Stock对象用于归集某支证券(股票和指数,不包括其它投资品种)的相关信息,比如行情数据(OHLC等)、市值数据、所属概念分类等。
+    """
+
+    _is_cache_empty = True
+
+    def __init__(self, code: str):
+        self._code = code
+        self._stock = self.get_stock(code)
+        assert self._stock, "系统中不存在该code"
+        (_, self._display_name, self._name, ipo, end, _type) = self._stock
+        self._start_date = convert_nptime_to_datetime(ipo).date()
+        self._end_date = convert_nptime_to_datetime(end).date()
+        self._type = SecurityType(_type)
+
+    @classmethod
+    def choose_listed(cls, dt: datetime.date, types: List[str] = ["stock", "index"]):
+        cond = np.array([False] * len(cls._stocks))
+        dt = datetime.datetime.combine(dt, datetime.time())
+
+        for type_ in types:
+            cond |= cls._stocks["type"] == type_
+        result = cls._stocks[cond]
+        result = result[result["end"] > dt]
+        result = result[result["ipo"] <= dt]
+        # result = np.array(result, dtype=cls.stock_info_dtype)
+        return result["code"].tolist()
+
+    @classmethod
+    def fuzzy_match(cls, query: str) -> Dict[str, Tuple]:
+        """对股票/指数进行模糊匹配查找
+
+        query可以是股票/指数代码,也可以是字母(按name查找),也可以是汉字(按显示名查找)
+
+        Args:
+            query (str): 查询字符串
+
+        Returns:
+            Dict[str, Tuple]: 查询结果,其中Tuple为(code, display_name, name, start, end, type)
+        """
+        query = query.upper()
+        if re.match(r"\d+", query):
+            return {
+                sec["code"]: sec.tolist()
+                for sec in cls._stocks
+                if sec["code"].startswith(query)
+            }
+        elif re.match(r"[A-Z]+", query):
+            return {
+                sec["code"]: sec.tolist()
+                for sec in cls._stocks
+                if sec["name"].startswith(query)
+            }
+        else:
+            return {
+                sec["code"]: sec.tolist()
+                for sec in cls._stocks
+                if sec["alias"].find(query) != -1
+            }
+
+    def __str__(self):
+        return f"{self.display_name}[{self.code}]"
+
+    @property
+    def ipo_date(self) -> datetime.date:
+        return self._start_date
+
+    @property
+    def display_name(self) -> str:
+        return self._display_name
+
+    @property
+    def name(self) -> str:
+        return self._name
+
+    @property
+    def end_date(self) -> datetime.date:
+        return self._end_date
+
+    @property
+    def code(self) -> str:
+        return self._code
+
+    @property
+    def sim_code(self) -> str:
+        return re.sub(r"\.XSH[EG]", "", self.code)
+
+    @property
+    def security_type(self) -> SecurityType:
+        """返回证券类型
+
+        Returns:
+            SecurityType: [description]
+        """
+        return self._type
+
+    @staticmethod
+    def simplify_code(code) -> str:
+        return re.sub(r"\.XSH[EG]", "", code)
+
+    @staticmethod
+    def format_code(code) -> str:
+        """新三板和北交所的股票, 暂不支持, 默认返回None
+        上证A股: 600、601、603、605
+        深证A股: 000、001
+        中小板:  002、003
+        创业板:  300/301
+        科创板:  688
+        新三板:  82、83、87、88、430、420、400
+        北交所:  43、83、87、88
+        """
+        if not code or len(code) != 6:
+            return None
+
+        prefix = code[0]
+        if prefix in ("0", "3"):
+            return f"{code}.XSHE"
+        elif prefix == "6":
+            return f"{code}.XSHG"
+        else:
+            return None
+
+    def days_since_ipo(self) -> int:
+        """获取上市以来经过了多少个交易日
+
+        由于受交易日历限制(2005年1月4日之前的交易日历没有),对于在之前上市的品种,都返回从2005年1月4日起的日期。
+
+        Returns:
+            int: [description]
+        """
+        epoch_start = arrow.get("2005-01-04").date()
+        ipo_day = self.ipo_date if self.ipo_date > epoch_start else epoch_start
+        return tf.count_day_frames(ipo_day, arrow.now().date())
+
+    @staticmethod
+    def qfq(bars: BarsArray) -> BarsArray:
+        """对行情数据执行前复权操作"""
+        # todo: 这里可以优化
+        if bars.size == 0:
+            return bars
+
+        last = bars[-1]["factor"]
+        for field in ["open", "high", "low", "close", "volume"]:
+            bars[field] = bars[field] * (bars["factor"] / last)
+
+        return bars
+
+    @classmethod
+    async def batch_get_min_level_bars_in_range(
+        cls,
+        codes: List[str],
+        frame_type: FrameType,
+        start: Frame,
+        end: Frame,
+        fq: bool = True,
+    ) -> Generator[Dict[str, BarsArray], None, None]:
+        """获取多支股票(指数)在[start, end)时间段内的行情数据
+
+        如果要获取的行情数据是分钟级别(即1m, 5m, 15m, 30m和60m),使用本接口。
+
+        停牌数据处理请见[get_bars][omicron.models.stock.Stock.get_bars]。
+
+        本函数返回一个迭代器,使用方法示例:
+        ```
+        async for code, bars in Stock.batch_get_min_level_bars_in_range(...):
+            print(code, bars)
+        ```
+
+        如果`end`不在`frame_type`所属的边界点上,那么,如果`end`大于等于当前缓存未收盘数据时间,则将包含未收盘数据;否则,返回的记录将截止到`tf.floor(end, frame_type)`。
+
+        Args:
+            codes: 股票/指数代码列表
+            frame_type: 帧类型
+            start: 起始时间
+            end: 结束时间。如果未指明,则取当前时间。
+            fq: 是否进行复权,如果是,则进行前复权。Defaults to True.
+
+        Returns:
+            Generator[Dict[str, BarsArray], None, None]: 迭代器,每次返回一个字典,其中key为代码,value为行情数据
+        """
+        closed_end = tf.floor(end, frame_type)
+        n = tf.count_frames(start, closed_end, frame_type)
+        max_query_size = min(cfg.influxdb.max_query_size, INFLUXDB_MAX_QUERY_SIZE)
+        batch_size = max(1, max_query_size // n)
+        ff = tf.first_min_frame(datetime.datetime.now(), frame_type)
+
+        for i in range(0, len(codes), batch_size):
+            batch_codes = codes[i : i + batch_size]
+
+            if end < ff:
+                part1 = await cls._batch_get_persisted_bars_in_range(
+                    batch_codes, frame_type, start, end
+                )
+                part2 = pd.DataFrame([], columns=bars_dtype_with_code.names)
+            elif start >= ff:
+                part1 = pd.DataFrame([], columns=bars_dtype_with_code.names)
+                n = tf.count_frames(start, closed_end, frame_type) + 1
+                cached = await cls._batch_get_cached_bars_n(
+                    frame_type, n, end, batch_codes
+                )
+                cached = cached[cached["frame"] >= start]
+                part2 = pd.DataFrame(cached, columns=bars_dtype_with_code.names)
+            else:
+                part1 = await cls._batch_get_persisted_bars_in_range(
+                    batch_codes, frame_type, start, ff
+                )
+                n = tf.count_frames(start, closed_end, frame_type) + 1
+                cached = await cls._batch_get_cached_bars_n(
+                    frame_type, n, end, batch_codes
+                )
+                part2 = pd.DataFrame(cached, columns=bars_dtype_with_code.names)
+
+            df = pd.concat([part1, part2])
+
+            for code in batch_codes:
+                filtered = df[df["code"] == code][bars_cols]
+                bars = filtered.to_records(index=False).astype(bars_dtype)
+                if fq:
+                    bars = cls.qfq(bars)
+
+                yield code, bars
+
+    @classmethod
+    async def batch_get_day_level_bars_in_range(
+        cls,
+        codes: List[str],
+        frame_type: FrameType,
+        start: Frame,
+        end: Frame,
+        fq: bool = True,
+    ) -> Generator[Dict[str, BarsArray], None, None]:
+        """获取多支股票(指数)在[start, end)时间段内的行情数据
+
+        如果要获取的行情数据是日线级别(即1d, 1w, 1M),使用本接口。
+
+        停牌数据处理请见[get_bars][omicron.models.stock.Stock.get_bars]。
+
+        本函数返回一个迭代器,使用方法示例:
+        ```
+        async for code, bars in Stock.batch_get_day_level_bars_in_range(...):
+            print(code, bars)
+        ```
+
+        如果`end`不在`frame_type`所属的边界点上,那么,如果`end`大于等于当前缓存未收盘数据时间,则将包含未收盘数据;否则,返回的记录将截止到`tf.floor(end, frame_type)`。
+
+        Args:
+            codes: 代码列表
+            frame_type: 帧类型
+            start: 起始时间
+            end: 结束时间
+            fq: 是否进行复权,如果是,则进行前复权。Defaults to True.
+
+        Returns:
+            Generator[Dict[str, BarsArray], None, None]: 迭代器,每次返回一个字典,其中key为代码,value为行情数据
+        """
+        today = datetime.datetime.now().date()
+        # 日线,end不等于最后交易日,此时已无缓存
+        if frame_type == FrameType.DAY and end == tf.floor(today, frame_type):
+            from_cache = True
+        elif frame_type != FrameType.DAY and start > tf.floor(today, frame_type):
+            from_cache = True
+        else:
+            from_cache = False
+
+        n = tf.count_frames(start, end, frame_type)
+        max_query_size = min(cfg.influxdb.max_query_size, INFLUXDB_MAX_QUERY_SIZE)
+        batch_size = max(max_query_size // n, 1)
+
+        for i in range(0, len(codes), batch_size):
+            batch_codes = codes[i : i + batch_size]
+            persisted = await cls._batch_get_persisted_bars_in_range(
+                batch_codes, frame_type, start, end
+            )
+
+            if from_cache:
+                cached = await cls._batch_get_cached_bars_n(
+                    frame_type, 1, end, batch_codes
+                )
+                cached = pd.DataFrame(cached, columns=bars_dtype_with_code.names)
+
+                df = pd.concat([persisted, cached])
+            else:
+                df = persisted
+
+            for code in batch_codes:
+                filtered = df[df["code"] == code][bars_cols]
+                bars = filtered.to_records(index=False).astype(bars_dtype)
+                if fq:
+                    bars = cls.qfq(bars)
+
+                yield code, bars
+
+    @classmethod
+    async def get_bars_in_range(
+        cls,
+        code: str,
+        frame_type: FrameType,
+        start: Frame,
+        end: Frame = None,
+        fq=True,
+        unclosed=True,
+    ) -> BarsArray:
+        """获取指定证券(`code`)在[`start`, `end`]期间帧类型为`frame_type`的行情数据。
+
+        Args:
+            code : 证券代码
+            frame_type : 行情数据的帧类型
+            start : 起始时间
+            end : 结束时间,如果为None,则表明取到当前时间。
+            fq : 是否对行情数据执行前复权操作
+            unclosed : 是否包含未收盘的数据
+        """
+        now = datetime.datetime.now()
+
+        if frame_type in tf.day_level_frames:
+            end = end or now.date()
+            if unclosed and tf.day_shift(end, 0) == now.date():
+                part2 = await cls._get_cached_bars_n(code, 1, frame_type)
+            else:
+                part2 = np.array([], dtype=bars_dtype)
+
+            # get rest from persisted
+            part1 = await cls._get_persisted_bars_in_range(code, frame_type, start, end)
+            bars = np.concatenate((part1, part2))
+        else:
+            end = end or now
+            closed_end = tf.floor(end, frame_type)
+            ff_min1 = tf.first_min_frame(now, FrameType.MIN1)
+            if tf.day_shift(end, 0) < now.date() or end < ff_min1:
+                part1 = await cls._get_persisted_bars_in_range(
+                    code, frame_type, start, end
+                )
+                part2 = np.array([], dtype=bars_dtype)
+            elif start >= ff_min1:  # all in cache
+                part1 = np.array([], dtype=bars_dtype)
+                n = tf.count_frames(start, closed_end, frame_type) + 1
+                part2 = await cls._get_cached_bars_n(code, n, frame_type, end)
+                part2 = part2[part2["frame"] >= start]
+            else:  # in both cache and persisted
+                ff = tf.first_min_frame(now, frame_type)
+                part1 = await cls._get_persisted_bars_in_range(
+                    code, frame_type, start, ff
+                )
+                n = tf.count_frames(ff, closed_end, frame_type) + 1
+                part2 = await cls._get_cached_bars_n(code, n, frame_type, end)
+
+            if not unclosed:
+                part2 = part2[part2["frame"] <= closed_end]
+            bars = np.concatenate((part1, part2))
+
+        if fq:
+            return cls.qfq(bars)
+        else:
+            return bars
+
+    @classmethod
+    async def get_bars(
+        cls,
+        code: str,
+        n: int,
+        frame_type: FrameType,
+        end: Frame = None,
+        fq=True,
+        unclosed=True,
+    ) -> BarsArray:
+        """获取到`end`为止的`n`个行情数据。
+
+        返回的数据是按照时间顺序递增排序的。在遇到停牌的情况时,该时段数据将被跳过,因此返回的记录可能不是交易日连续的,并且可能不足`n`个。
+
+        如果系统当前没有到指定时间`end`的数据,将尽最大努力返回数据。调用者可以通过判断最后一条数据的时间是否等于`end`来判断是否获取到了全部数据。
+
+        Args:
+            code: 证券代码
+            n: 记录数
+            frame_type: 帧类型
+            end: 截止时间,如果未指明,则取当前时间
+            fq: 是否对返回记录进行复权。如果为`True`的话,则进行前复权。Defaults to True.
+            unclosed: 是否包含最新未收盘的数据? Defaults to True.
+
+        Returns:
+            返回dtype为`coretypes.bars_dtype`的一维numpy数组。
+        """
+        now = datetime.datetime.now()
+        try:
+            cached = np.array([], dtype=bars_dtype)
+
+            if frame_type in tf.day_level_frames:
+                if end is None:
+                    end = now.date()
+                elif type(end) == datetime.datetime:
+                    end = end.date()
+                n0 = n
+                if unclosed:
+                    cached = await cls._get_cached_bars_n(code, 1, frame_type)
+                    if cached.size > 0:
+                        # 如果缓存的未收盘日期 > end,则该缓存不是需要的
+                        if cached[0]["frame"].item().date() > end:
+                            cached = np.array([], dtype=bars_dtype)
+                        else:
+                            n0 = n - 1
+            else:
+                end = end or now
+                closed_frame = tf.floor(end, frame_type)
+
+                # fetch one more bar, in case we should discard unclosed bar
+                cached = await cls._get_cached_bars_n(code, n + 1, frame_type, end)
+                if not unclosed:
+                    cached = cached[cached["frame"] <= closed_frame]
+
+                # n bars we need fetch from persisted db
+                n0 = n - cached.size
+            if n0 > 0:
+                if cached.size > 0:
+                    end0 = cached[0]["frame"].item()
+                else:
+                    end0 = end
+
+                bars = await cls._get_persisted_bars_n(code, frame_type, n0, end0)
+                merged = np.concatenate((bars, cached))
+                bars = merged[-n:]
+            else:
+                bars = cached[-n:]
+
+            if fq:
+                bars = cls.qfq(bars)
+            return bars
+        except Exception as e:
+            logger.exception(e)
+            logger.warning(
+                "failed to get bars for %s, %s, %s, %s", code, n, frame_type, end
+            )
+            raise
+
+    @classmethod
+    async def _get_persisted_bars_in_range(
+        cls, code: str, frame_type: FrameType, start: Frame, end: Frame = None
+    ) -> BarsArray:
+        """从持久化数据库中获取介于[`start`, `end`]间的行情记录
+
+        如果`start`到`end`区间某支股票停牌,则会返回空数组。
+
+        Args:
+            code: 证券代码
+            frame_type: 帧类型
+            start: 起始时间
+            end: 结束时间,如果未指明,则取当前时间
+
+        Returns:
+            返回dtype为`coretypes.bars_dtype`的一维numpy数组。
+        """
+        end = end or datetime.datetime.now()
+
+        keep_cols = ["_time"] + list(bars_cols[1:])
+
+        measurement = cls._measurement_name(frame_type)
+        flux = (
+            Flux()
+            .bucket(cfg.influxdb.bucket_name)
+            .range(start, end)
+            .measurement(measurement)
+            .fields(keep_cols)
+            .tags({"code": code})
+        )
+
+        serializer = DataframeDeserializer(
+            encoding="utf-8",
+            names=[
+                "_",
+                "table",
+                "result",
+                "frame",
+                "code",
+                "amount",
+                "close",
+                "factor",
+                "high",
+                "low",
+                "open",
+                "volume",
+            ],
+            engine="c",
+            skiprows=0,
+            header=0,
+            usecols=bars_cols,
+            parse_dates=["frame"],
+        )
+
+        client = get_influx_client()
+        result = await client.query(flux, serializer)
+        return result.to_records(index=False).astype(bars_dtype)
+
+    @classmethod
+    async def _get_persisted_bars_n(
+        cls, code: str, frame_type: FrameType, n: int, end: Frame = None
+    ) -> BarsArray:
+        """从持久化数据库中获取截止到`end`的`n`条行情记录
+
+        如果`end`未指定,则取当前时间。
+
+        基于influxdb查询的特性,在查询前,必须先根据`end`和`n`计算出起始时间,但如果在此期间某些股票有停牌,则无法返回的数据将小于`n`。而如果起始时间设置得足够早,虽然能满足返回数据条数的要求,但会带来性能上的损失。因此,我们在计算起始时间时,不是使用`n`来计算,而是使用了`min(n * 2, n + 20)`来计算起始时间,这样多数情况下,能够保证返回数据的条数为`n`条。
+
+        返回的数据按`frame`进行升序排列。
+
+        Args:
+            code: 证券代码
+            frame_type: 帧类型
+            n: 返回结果数量
+            end: 结束时间,如果未指明,则取当前时间
+
+        Returns:
+            返回dtype为`bars_dtype`的numpy数组
+        """
+        # check is needed since tags accept List as well
+        assert isinstance(code, str), "`code` must be a string"
+
+        end = end or datetime.datetime.now()
+        closed_end = tf.floor(end, frame_type)
+        start = tf.shift(closed_end, -min(2 * n, n + 20), frame_type)
+
+        keep_cols = ["_time"] + list(bars_cols[1:])
+
+        measurement = cls._measurement_name(frame_type)
+        flux = (
+            Flux()
+            .bucket(cfg.influxdb.bucket_name)
+            .range(start, end)
+            .measurement(measurement)
+            .fields(keep_cols)
+            .tags({"code": code})
+            .latest(n)
+        )
+
+        serializer = DataframeDeserializer(
+            encoding="utf-8",
+            names=[
+                "_",
+                "table",
+                "result",
+                "frame",
+                "code",
+                "amount",
+                "close",
+                "factor",
+                "high",
+                "low",
+                "open",
+                "volume",
+            ],
+            engine="c",
+            skiprows=0,
+            header=0,
+            usecols=bars_cols,
+            parse_dates=["frame"],
+        )
+
+        client = get_influx_client()
+        result = await client.query(flux, serializer)
+        return result.to_records(index=False).astype(bars_dtype)
+
+    @classmethod
+    async def _batch_get_persisted_bars_n(
+        cls, codes: List[str], frame_type: FrameType, n: int, end: Frame = None
+    ) -> pd.DataFrame:
+        """从持久化存储中获取`codes`指定的一批股票截止`end`时的`n`条记录。
+
+        返回的数据按`frame`进行升序排列。如果不存在满足指定条件的查询结果,将返回空的DataFrame。
+
+        基于influxdb查询的特性,在查询前,必须先根据`end`和`n`计算出起始时间,但如果在此期间某些股票有停牌,则无法返回的数据将小于`n`。如果起始时间设置的足够早,虽然能满足返回数据条数的要求,但会带来性能上的损失。因此,我们在计算起始时间时,不是使用`n`来计算,而是使用了`min(n * 2, n + 20)`来计算起始时间,这样多数情况下,能够保证返回数据的条数为`n`条。
+
+        Args:
+            codes: 证券代码列表。
+            frame_type: 帧类型
+            n: 返回结果数量
+            end: 结束时间,如果未指定,则使用当前时间
+
+        Returns:
+            DataFrame, columns为`code`, `frame`, `open`, `high`, `low`, `close`, `volume`, `amount`, `factor`
+
+        """
+        max_query_size = min(cfg.influxdb.max_query_size, INFLUXDB_MAX_QUERY_SIZE)
+
+        if len(codes) * min(n + 20, 2 * n) > max_query_size:
+            raise BadParameterError(
+                f"codes的数量和n的乘积超过了influxdb的最大查询数量限制{max_query_size}"
+            )
+
+        end = end or datetime.datetime.now()
+        close_end = tf.floor(end, frame_type)
+        begin = tf.shift(close_end, -1 * min(n + 20, n * 2), frame_type)
+
+        # influxdb的查询结果格式类似于CSV,其列顺序为_, result_alias, table_seq, _time, tags, fields,其中tags和fields都是升序排列
+        keep_cols = ["code"] + list(bars_cols)
+        names = ["_", "result", "table", "frame", "code"]
+
+        # influxdb will return fields in the order of name ascending parallel
+        names.extend(sorted(bars_cols[1:]))
+
+        measurement = cls._measurement_name(frame_type)
+        flux = (
+            Flux()
+            .bucket(cfg.influxdb.bucket_name)
+            .range(begin, end)
+            .measurement(measurement)
+            .fields(keep_cols)
+            .latest(n)
+        )
+
+        if codes is not None:
+            assert isinstance(codes, list), "`codes` must be a list or None"
+            flux.tags({"code": codes})
+
+        deserializer = DataframeDeserializer(
+            names=names,
+            usecols=keep_cols,
+            encoding="utf-8",
+            time_col="frame",
+            engine="c",
+        )
+
+        client = get_influx_client()
+        return await client.query(flux, deserializer)
+
+    @classmethod
+    async def _batch_get_persisted_bars_in_range(
+        cls, codes: List[str], frame_type: FrameType, begin: Frame, end: Frame = None
+    ) -> pd.DataFrame:
+        """从持久化存储中获取`codes`指定的一批股票在`begin`和`end`之间的记录。
+
+        返回的数据将按`frame`进行升序排列。
+        注意,返回的数据有可能不是等长的,因为有的股票可能停牌。
+
+        Args:
+            codes: 证券代码列表。
+            frame_type: 帧类型
+            begin: 开始时间
+            end: 结束时间
+
+        Returns:
+            DataFrame, columns为`code`, `frame`, `open`, `high`, `low`, `close`, `volume`, `amount`, `factor`
+
+        """
+        end = end or datetime.datetime.now()
+
+        n = tf.count_frames(begin, end, frame_type)
+        max_query_size = min(cfg.influxdb.max_query_size, INFLUXDB_MAX_QUERY_SIZE)
+        if len(codes) * n > max_query_size:
+            raise BadParameterError(
+                f"asked records is {len(codes) * n}, which is too large than {max_query_size}"
+            )
+
+        # influxdb的查询结果格式类似于CSV,其列顺序为_, result_alias, table_seq, _time, tags, fields,其中tags和fields都是升序排列
+        keep_cols = ["code"] + list(bars_cols)
+        names = ["_", "result", "table", "frame", "code"]
+
+        # influxdb will return fields in the order of name ascending parallel
+        names.extend(sorted(bars_cols[1:]))
+
+        measurement = cls._measurement_name(frame_type)
+        flux = (
+            Flux()
+            .bucket(cfg.influxdb.bucket_name)
+            .range(begin, end)
+            .measurement(measurement)
+            .fields(keep_cols)
+        )
+
+        flux.tags({"code": codes})
+
+        deserializer = DataframeDeserializer(
+            names=names,
+            usecols=keep_cols,
+            encoding="utf-8",
+            time_col="frame",
+            engine="c",
+        )
+
+        client = get_influx_client()
+        df = await client.query(flux, deserializer)
+        return df
+
+    @classmethod
+    async def batch_cache_bars(cls, frame_type: FrameType, bars: Dict[str, BarsArray]):
+        """缓存已收盘的分钟线和日线
+
+        当缓存日线时,仅限于当日收盘后的第一次同步时调用。
+
+        Args:
+            frame_type: 帧类型
+            bars: 行情数据,其key为股票代码,其value为dtype为`bars_dtype`的一维numpy数组。
+
+        Raises:
+            RedisError: 如果在执行过程中发生错误,则抛出以此异常为基类的各种异常,具体参考aioredis相关文档。
+        """
+        if frame_type == FrameType.DAY:
+            await cls.batch_cache_unclosed_bars(frame_type, bars)
+            return
+
+        pl = cache.security.pipeline()
+        for code, bars in bars.items():
+            key = f"bars:{frame_type.value}:{code}"
+            for bar in bars:
+                frame = tf.time2int(bar["frame"].item())
+                val = [*bar]
+                val[0] = frame
+                pl.hset(key, frame, ",".join(map(str, val)))
+        await pl.execute()
+
+    @classmethod
+    async def batch_cache_unclosed_bars(
+        cls, frame_type: FrameType, bars: Dict[str, BarsArray]
+    ):  # pragma: no cover
+        """缓存未收盘的5、15、30、60分钟线及日线、周线、月线
+
+        Args:
+            frame_type: 帧类型
+            bars: 行情数据,其key为股票代码,其value为dtype为`bars_dtype`的一维numpy数组。bars不能为None,或者empty。
+
+        Raise:
+            RedisError: 如果在执行过程中发生错误,则抛出以此异常为基类的各种异常,具体参考aioredis相关文档。
+        """
+        pl = cache.security.pipeline()
+        key = f"bars:{frame_type.value}:unclosed"
+
+        convert = tf.time2int if frame_type in tf.minute_level_frames else tf.date2int
+
+        for code, bar in bars.items():
+            val = [*bar[0]]
+            val[0] = convert(bar["frame"][0].item())  # 时间转换
+            pl.hset(key, code, ",".join(map(str, val)))
+
+        await pl.execute()
+
+    @classmethod
+    async def reset_cache(cls):
+        """清除缓存的行情数据"""
+        try:
+            for ft in itertools.chain(tf.minute_level_frames, tf.day_level_frames):
+                keys = await cache.security.keys(f"bars:{ft.value}:*")
+                if keys:
+                    await cache.security.delete(*keys)
+        finally:
+            cls._is_cache_empty = True
+
+    @classmethod
+    def _deserialize_cached_bars(cls, raw: List[str], ft: FrameType) -> BarsArray:
+        """从redis中反序列化缓存的数据
+
+        如果`raw`空数组或者元素为`None`,则返回空数组。
+
+        Args:
+            raw: redis中的缓存数据
+            ft: 帧类型
+            sort: 是否需要重新排序,缺省为False
+
+        Returns:
+            BarsArray: 行情数据
+        """
+        fix_date = False
+        if ft in tf.minute_level_frames:
+            convert = tf.int2time
+        else:
+            convert = tf.int2date
+            fix_date = True
+        recs = []
+        # it's possible to treat raw as csv and use pandas to parse, however, the performance is 10 times worse than this method
+        for raw_rec in raw:
+            if raw_rec is None:
+                continue
+            f, o, h, l, c, v, m, fac = raw_rec.split(",")
+            if fix_date:
+                f = f[:8]
+            recs.append(
+                (
+                    convert(f),
+                    float(o),
+                    float(h),
+                    float(l),
+                    float(c),
+                    float(v),
+                    float(m),
+                    float(fac),
+                )
+            )
+
+        return np.array(recs, dtype=bars_dtype)
+
+    @classmethod
+    async def _batch_get_cached_bars_n(
+        cls, frame_type: FrameType, n: int, end: Frame = None, codes: List[str] = None
+    ) -> BarsPanel:
+        """批量获取在cache中截止`end`的`n`个bars。
+
+        如果`end`不在`frame_type`所属的边界点上,那么,如果`end`大于等于当前缓存未收盘数据时间,则将包含未收盘数据;否则,返回的记录将截止到`tf.floor(end, frame_type)`。
+
+        Args:
+            frame_type: 时间帧类型
+            n: 返回记录条数
+            codes: 证券代码列表
+            end: 截止时间, 如果为None
+
+        Returns:
+            BarsPanel: 行情数据
+        """
+        # 调用者自己保证end在缓存中
+        cols = list(bars_dtype_with_code.names)
+        if frame_type in tf.day_level_frames:
+            key = f"bars:{frame_type.value}:unclosed"
+            if codes is None:
+                recs = await cache.security.hgetall(key)
+                codes = list(recs.keys())
+                recs = recs.values()
+            else:
+                recs = await cache.security.hmget(key, *codes)
+
+            barss = cls._deserialize_cached_bars(recs, frame_type)
+            if barss.size > 0:
+                if len(barss) != len(codes):
+                    # issue 39, 如果某支票当天停牌,则缓存中将不会有它的记录,此时需要移除其代码
+                    codes = [
+                        codes[i] for i, item in enumerate(recs) if item is not None
+                    ]
+                barss = numpy_append_fields(barss, "code", codes, [("code", "O")])
+                return barss[cols].astype(bars_dtype_with_code)
+            else:
+                return np.array([], dtype=bars_dtype_with_code)
+        else:
+            end = end or datetime.datetime.now()
+            close_end = tf.floor(end, frame_type)
+            all_bars = []
+            if codes is None:
+                keys = await cache.security.keys(
+                    f"bars:{frame_type.value}:*[^unclosed]"
+                )
+                codes = [key.split(":")[-1] for key in keys]
+            else:
+                keys = [f"bars:{frame_type.value}:{code}" for code in codes]
+
+            if frame_type != FrameType.MIN1:
+                unclosed = await cache.security.hgetall(
+                    f"bars:{frame_type.value}:unclosed"
+                )
+            else:
+                unclosed = {}
+
+            pl = cache.security.pipeline()
+            frames = tf.get_frames_by_count(close_end, n, frame_type)
+            for key in keys:
+                pl.hmget(key, *frames)
+
+            all_closed = await pl.execute()
+            for code, raw in zip(codes, all_closed):
+                raw.append(unclosed.get(code))
+                barss = cls._deserialize_cached_bars(raw, frame_type)
+                barss = numpy_append_fields(
+                    barss, "code", [code] * len(barss), [("code", "O")]
+                )
+                barss = barss[cols].astype(bars_dtype_with_code)
+                all_bars.append(barss[barss["frame"] <= end][-n:])
+
+            try:
+                return np.concatenate(all_bars)
+            except ValueError as e:
+                logger.exception(e)
+                return np.array([], dtype=bars_dtype_with_code)
+
+    @classmethod
+    async def _get_cached_bars_n(
+        cls, code: str, n: int, frame_type: FrameType, end: Frame = None
+    ) -> BarsArray:
+        """从缓存中获取指定代码的行情数据
+
+        存取逻辑是,从`end`指定的时间向前取`n`条记录。`end`不应该大于当前系统时间,并且根据`end`和`n`计算出来的起始时间应该在缓存中存在。否则,两种情况下,返回记录数都将小于`n`。
+
+        如果`end`不处于`frame_type`所属的边界结束位置,且小于当前已缓存的未收盘bar时间,则会返回前一个已收盘的数据,否则,返回的记录中还将包含未收盘的数据。
+
+        args:
+            code: 证券代码,比如000001.XSHE
+            n: 返回记录条数
+            frame_type: 帧类型
+            end: 结束帧,如果为None,则取当前时间
+
+        returns:
+            元素类型为`coretypes.bars_dtype`的一维numpy数组。如果没有数据,则返回空ndarray。
+        """
+        # 50 times faster than arrow.now().floor('second')
+        end = end or datetime.datetime.now().replace(second=0, microsecond=0)
+
+        if frame_type in tf.minute_level_frames:
+            cache_start = tf.first_min_frame(end.date(), frame_type)
+            closed = tf.floor(end, frame_type)
+
+            frames = (tf.get_frames(cache_start, closed, frame_type))[-n:]
+            if len(frames) == 0:
+                recs = np.empty(shape=(0,), dtype=bars_dtype)
+            else:
+                key = f"bars:{frame_type.value}:{code}"
+                recs = await cache.security.hmget(key, *frames)
+                recs = cls._deserialize_cached_bars(recs, frame_type)
+
+            if closed < end:
+                # for unclosed
+                key = f"bars:{frame_type.value}:unclosed"
+                unclosed = await cache.security.hget(key, code)
+                unclosed = cls._deserialize_cached_bars([unclosed], frame_type)
+
+                if len(unclosed) == 0:
+                    return recs[-n:]
+
+                if end < unclosed[0]["frame"].item():
+                    # 如果unclosed为9:36, 调用者要求取9:29的5m数据,则取到的unclosed不合要求,抛弃。似乎没有更好的方法检测end与unclosed的关系
+                    return recs[-n:]
+                else:
+                    bars = np.concatenate((recs, unclosed))
+                    return bars[-n:]
+            else:
+                return recs[-n:]
+        else:  # 日线及以上级别,仅在缓存中存在未收盘数据
+            key = f"bars:{frame_type.value}:unclosed"
+            rec = await cache.security.hget(key, code)
+            return cls._deserialize_cached_bars([rec], frame_type)
+
+    @classmethod
+    async def cache_bars(cls, code: str, frame_type: FrameType, bars: BarsArray):
+        """将当期已收盘的行情数据缓存
+
+        Note:
+            当前只缓存1分钟数据。其它分钟数据,都在调用时,通过resample临时合成。
+
+        行情数据缓存在以`bars:{frame_type.value}:{code}`为key, {frame}为field的hashmap中。
+
+        Args:
+            code: the full qualified code of a security or index
+            frame_type: frame type of the bars
+            bars: the bars to cache, which is a numpy array of dtype `coretypes.bars_dtype`
+
+        Raises:
+            RedisError: if redis operation failed, see documentation of aioredis
+
+        """
+        # 转换时间为int
+        convert = tf.time2int if frame_type in tf.minute_level_frames else tf.date2int
+
+        key = f"bars:{frame_type.value}:{code}"
+        pl = cache.security.pipeline()
+        for bar in bars:
+            val = [*bar]
+            val[0] = convert(bar["frame"].item())
+            pl.hset(key, val[0], ",".join(map(str, val)))
+
+        await pl.execute()
+
+    @classmethod
+    async def cache_unclosed_bars(
+        cls, code: str, frame_type: FrameType, bars: BarsArray
+    ):  # pragma: no cover
+        """将未结束的行情数据缓存
+
+        未结束的行情数据缓存在以`bars:{frame_type.value}:unclosed`为key, {code}为field的hashmap中。
+
+        尽管`bars`被声明为BarsArray,但实际上应该只包含一个元素。
+
+        Args:
+            code: the full qualified code of a security or index
+            frame_type: frame type of the bars
+            bars: the bars to cache, which is a numpy array of dtype `coretypes.bars_dtype`
+
+        Raises:
+            RedisError: if redis operation failed, see documentation of aioredis
+
+        """
+        converter = tf.time2int if frame_type in tf.minute_level_frames else tf.date2int
+
+        assert len(bars) == 1, "unclosed bars should only have one record"
+
+        key = f"bars:{frame_type.value}:unclosed"
+        bar = bars[0]
+        val = [*bar]
+        val[0] = converter(bar["frame"].item())
+        await cache.security.hset(key, code, ",".join(map(str, val)))
+
+    @classmethod
+    async def persist_bars(
+        cls,
+        frame_type: FrameType,
+        bars: Union[Dict[str, BarsArray], BarsArray, pd.DataFrame],
+    ):
+        """将行情数据持久化
+
+        如果`bars`类型为Dict,则key为`code`,value为`bars`。如果其类型为BarsArray或者pd.DataFrame,则`bars`各列字段应该为`coretypes.bars_dtype` + ("code", "O")构成。
+
+        Args:
+            frame_type: the frame type of the bars
+            bars: the bars to be persisted
+
+        Raises:
+            InfluxDBWriteError: if influxdb write failed
+        """
+        client = get_influx_client()
+
+        measurement = cls._measurement_name(frame_type)
+        logger.info("persisting bars to influxdb: %s, %d secs", measurement, len(bars))
+
+        if isinstance(bars, dict):
+            for code, value in bars.items():
+                await client.save(
+                    value, measurement, global_tags={"code": code}, time_key="frame"
+                )
+        else:
+            await client.save(bars, measurement, tag_keys=["code"], time_key="frame")
+
+    @classmethod
+    def resample(
+        cls, bars: BarsArray, from_frame: FrameType, to_frame: FrameType
+    ) -> BarsArray:
+        """将原来为`from_frame`的行情数据转换为`to_frame`的行情数据
+
+        如果`to_frame`为日线或者分钟级别线,则`from_frame`必须为分钟线;如果`to_frame`为周以上级别线,则`from_frame`必须为日线。其它级别之间的转换不支持。
+
+        如果`from_frame`为1分钟线,则必须从9:31起。
+
+        Args:
+            bars (BarsArray): 行情数据
+            from_frame (FrameType): 转换前的FrameType
+            to_frame (FrameType): 转换后的FrameType
+
+        Returns:
+            BarsArray: 转换后的行情数据
+        """
+        if from_frame == FrameType.MIN1:
+            return cls._resample_from_min1(bars, to_frame)
+        elif from_frame == FrameType.DAY:  # pragma: no cover
+            return cls._resample_from_day(bars, to_frame)
+        else:  # pragma: no cover
+            raise TypeError(f"unsupported from_frame: {from_frame}")
+
+    @classmethod
+    def _measurement_name(cls, frame_type):
+        return f"stock_bars_{frame_type.value}"
+
+    @classmethod
+    def _resample_from_min1(cls, bars: BarsArray, to_frame: FrameType) -> BarsArray:
+        """将`bars`从1分钟线转换为`to_frame`的行情数据
+
+        重采样后的数据只包含frame, open, high, low, close, volume, amount, factor,无论传入数据是否还有别的字段,它们都将被丢弃。
+
+        resampling 240根分钟线到5分钟大约需要100微秒。
+
+        TODO: 如果`bars`中包含nan怎么处理?
+        """
+        if bars[0]["frame"].item().minute != 31:
+            raise ValueError("resampling from 1min must start from 9:31")
+
+        if to_frame not in (
+            FrameType.MIN5,
+            FrameType.MIN15,
+            FrameType.MIN30,
+            FrameType.MIN60,
+            FrameType.DAY,
+        ):
+            raise ValueError(f"unsupported to_frame: {to_frame}")
+
+        bins_len = {
+            FrameType.MIN5: 5,
+            FrameType.MIN15: 15,
+            FrameType.MIN30: 30,
+            FrameType.MIN60: 60,
+            FrameType.DAY: 240,
+        }[to_frame]
+
+        bins = len(bars) // bins_len
+        npart1 = bins * bins_len
+
+        part1 = bars[:npart1].reshape((-1, bins_len))
+        part2 = bars[npart1:]
+
+        open_pos = np.arange(bins) * bins_len
+        close_pos = np.arange(1, bins + 1) * bins_len - 1
+        if len(bars) > bins_len * bins:
+            close_pos = np.append(close_pos, len(bars) - 1)
+            resampled = np.empty((bins + 1,), dtype=bars_dtype)
+        else:
+            resampled = np.empty((bins,), dtype=bars_dtype)
+
+        resampled[:bins]["open"] = bars[open_pos]["open"]
+
+        resampled[:bins]["high"] = np.max(part1["high"], axis=1)
+        resampled[:bins]["low"] = np.min(part1["low"], axis=1)
+
+        resampled[:bins]["volume"] = np.sum(part1["volume"], axis=1)
+        resampled[:bins]["amount"] = np.sum(part1["amount"], axis=1)
+
+        if len(part2):
+            resampled[-1]["open"] = part2["open"][0]
+            resampled[-1]["high"] = np.max(part2["high"])
+            resampled[-1]["low"] = np.min(part2["low"])
+
+            resampled[-1]["volume"] = np.sum(part2["volume"])
+            resampled[-1]["amount"] = np.sum(part2["amount"])
+
+        cols = ["frame", "close", "factor"]
+        resampled[cols] = bars[close_pos][cols]
+
+        if to_frame == FrameType.DAY:
+            resampled["frame"] = bars[-1]["frame"].item().date()
+
+        return resampled
+
+    @classmethod
+    def _resample_from_day(cls, bars: BarsArray, to_frame: FrameType) -> BarsArray:
+        """将`bars`从日线转换成`to_frame`的行情数据
+
+        Args:
+            bars (BarsArray): [description]
+            to_frame (FrameType): [description]
+
+        Returns:
+            转换后的行情数据
+        """
+        rules = {
+            "frame": "last",
+            "open": "first",
+            "high": "max",
+            "low": "min",
+            "close": "last",
+            "volume": "sum",
+            "amount": "sum",
+            "factor": "last",
+        }
+
+        if to_frame == FrameType.WEEK:
+            freq = "W-Fri"
+        elif to_frame == FrameType.MONTH:
+            freq = "M"
+        elif to_frame == FrameType.QUARTER:
+            freq = "Q"
+        elif to_frame == FrameType.YEAR:
+            freq = "A"
+        else:
+            raise ValueError(f"unsupported to_frame: {to_frame}")
+
+        df = pd.DataFrame(bars)
+        df.index = pd.to_datetime(bars["frame"])
+        df = df.resample(freq).agg(rules)
+        bars = np.array(df.to_records(index=False), dtype=bars_dtype)
+
+        # filter out data like (None, nan, ...)
+        return bars[np.isfinite(bars["close"])]
+
+    @classmethod
+    async def _get_price_limit_in_cache(
+        cls, code: str, begin: datetime.date, end: datetime.date
+    ):
+        date_str = await cache._security_.get(TRADE_PRICE_LIMITS_DATE)
+        if date_str:
+            date_in_cache = arrow.get(date_str).date()
+            if date_in_cache < begin or date_in_cache > end:
+                return None
+        else:
+            return None
+
+        dtype = [("frame", "O"), ("high_limit", "f4"), ("low_limit", "f4")]
+        hp = await cache._security_.hget(TRADE_PRICE_LIMITS, f"{code}.high_limit")
+        lp = await cache._security_.hget(TRADE_PRICE_LIMITS, f"{code}.low_limit")
+        if hp is None or lp is None:
+            return None
+        else:
+            return np.array([(date_in_cache, hp, lp)], dtype=dtype)
+
+    @classmethod
+    async def get_trade_price_limits(
+        cls, code: str, begin: Frame, end: Frame
+    ) -> BarsArray:
+        """从influxdb和cache中获取个股在[begin, end]之间的涨跌停价。
+
+        涨跌停价只有日线数据才有,因此,FrameType固定为FrameType.DAY,
+        当天的数据存放于redis,如果查询日期包含当天(交易日),从cache中读取并追加到结果中
+
+        Args:
+            code : 个股代码
+            begin : 开始日期
+            end : 结束日期
+
+        Returns:
+            dtype为[('frame', 'O'), ('high_limit', 'f4'), ('low_limit', 'f4')]的numpy数组
+        """
+        cols = ["_time", "high_limit", "low_limit"]
+        dtype = [("frame", "O"), ("high_limit", "f4"), ("low_limit", "f4")]
+
+        if isinstance(begin, datetime.datetime):
+            begin = begin.date()  # 强制转换为date
+        if isinstance(end, datetime.datetime):
+            end = end.date()  # 强制转换为date
+
+        data_in_cache = await cls._get_price_limit_in_cache(code, begin, end)
+
+        client = get_influx_client()
+        measurement = cls._measurement_name(FrameType.DAY)
+        flux = (
+            Flux()
+            .bucket(client._bucket)
+            .measurement(measurement)
+            .range(begin, end)
+            .tags({"code": code})
+            .fields(cols)
+            .sort("_time")
+        )
+
+        ds = NumpyDeserializer(
+            dtype,
+            use_cols=cols,
+            converters={"_time": lambda x: ciso8601.parse_datetime(x).date()},
+            # since we ask parse date in convertors, so we have to disable parse_date
+            parse_date=None,
+        )
+
+        result = await client.query(flux, ds)
+        if data_in_cache:
+            result = np.concatenate([result, data_in_cache])
+        return result
+
+    @classmethod
+    async def reset_price_limits_cache(cls, cache_only: bool, dt: datetime.date = None):
+        if cache_only is False:
+            date_str = await cache._security_.get(TRADE_PRICE_LIMITS_DATE)
+            if not date_str:
+                return  # skip clear action if date not found in cache
+            date_in_cache = arrow.get(date_str).date()
+            if dt is None or date_in_cache != dt:  # 更新的时间和cache的时间相同,则清除cache
+                return  # skip clear action
+
+        await cache._security_.delete(TRADE_PRICE_LIMITS)
+        await cache._security_.delete(TRADE_PRICE_LIMITS_DATE)
+
+    @classmethod
+    async def save_trade_price_limits(
+        cls, price_limits: LimitPriceOnlyBarsArray, to_cache: bool
+    ):
+        """保存涨跌停价
+
+        Args:
+            price_limits: 要保存的涨跌停价格数据。
+            to_cache: 是保存到缓存中,还是保存到持久化存储中
+        """
+        if len(price_limits) == 0:
+            return
+
+        if to_cache:  # 每个交易日上午9点更新两次
+            pl = cache._security_.pipeline()
+            for row in price_limits:
+                # .item convert np.float64 to python float
+                pl.hset(
+                    TRADE_PRICE_LIMITS,
+                    f"{row['code']}.high_limit",
+                    row["high_limit"].item(),
+                )
+                pl.hset(
+                    TRADE_PRICE_LIMITS,
+                    f"{row['code']}.low_limit",
+                    row["low_limit"].item(),
+                )
+
+            dt = price_limits[-1]["frame"]
+            pl.set(TRADE_PRICE_LIMITS_DATE, dt.strftime("%Y-%m-%d"))
+            await pl.execute()
+        else:
+            # to influxdb, 每个交易日的第二天早上2点保存
+            client = get_influx_client()
+            await client.save(
+                price_limits,
+                cls._measurement_name(FrameType.DAY),
+                tag_keys="code",
+                time_key="frame",
+            )
+
+    @classmethod
+    async def trade_price_limit_flags(
+        cls, code: str, start: datetime.date, end: datetime.date
+    ) -> Tuple[List[bool]]:
+        """获取个股在[start, end]之间的涨跌停标志
+
+        !!!Note
+            本函数返回的序列在股票有停牌的情况下,将不能与[start, end]一一对应。
+
+        Args:
+            code: 个股代码
+            start: 开始日期
+            end: 结束日期
+
+        Returns:
+            涨跌停标志列表(buy, sell)
+        """
+        cols = ["_time", "close", "high_limit", "low_limit"]
+        client = get_influx_client()
+        measurement = cls._measurement_name(FrameType.DAY)
+        flux = (
+            Flux()
+            .bucket(client._bucket)
+            .measurement(measurement)
+            .range(start, end)
+            .tags({"code": code})
+            .fields(cols)
+            .sort("_time")
+        )
+
+        dtype = [
+            ("frame", "O"),
+            ("close", "f4"),
+            ("high_limit", "f4"),
+            ("low_limit", "f4"),
+        ]
+        ds = NumpyDeserializer(
+            dtype,
+            use_cols=["_time", "close", "high_limit", "low_limit"],
+            converters={"_time": lambda x: ciso8601.parse_datetime(x).date()},
+            # since we ask parse date in convertors, so we have to disable parse_date
+            parse_date=None,
+        )
+
+        result = await client.query(flux, ds)
+        if result.size == 0:
+            return np.array([], dtype=dtype)
+
+        return (
+            array_price_equal(result["close"], result["high_limit"]),
+            array_price_equal(result["close"], result["low_limit"]),
+        )
+
+    @classmethod
+    async def trade_price_limit_flags_ex(
+        cls, code: str, start: datetime.date, end: datetime.date
+    ) -> Dict[datetime.date, Tuple[bool, bool]]:
+        """获取股票`code`在`[start, end]`区间的涨跌停标志
+
+        !!!Note
+            如果end为当天,注意在未收盘之前,这个涨跌停标志都是不稳定的
+
+        Args:
+            code: 股票代码
+            start: 起始日期
+            end: 结束日期
+
+        Returns:
+            以日期为key,(涨停,跌停)为值的dict
+        """
+        limit_prices = await cls.get_trade_price_limits(code, start, end)
+        bars = await Stock.get_bars_in_range(
+            code, FrameType.DAY, start=start, end=end, fq=False
+        )
+
+        close = bars["close"]
+
+        results = {}
+
+        # aligned = True
+        for i in range(len(bars)):
+            if bars[i]["frame"].item().date() != limit_prices[i]["frame"]:
+                # aligned = False
+                logger.warning("数据同步错误,涨跌停价格与收盘价时间不一致: %s, %s", code, bars[i]["frame"])
+                break
+
+            results[limit_prices[i]["frame"]] = (
+                price_equal(limit_prices[i]["high_limit"], close[i]),
+                price_equal(limit_prices[i]["low_limit"], close[i]),
+            )
+
+        # if not aligned:
+        #     bars = bars[i:]
+        #     limit_prices = limit_prices[i:]
+
+        #     for frame in bars["frame"]:
+        #         frame = frame.item().date()
+        #         close = bars[bars["frame"].item().date() == frame]["close"].item()
+        #         high = limit_prices[limit_prices["frame"] == frame]["high_limit"].item()
+        #         low = limit_prices[limit_prices["frame"] == frame]["low_limit"].item()
+        #         results[frame] = (
+        #             price_equal(high, close),
+        #             price_equal(low, close)
+        #         )
+
+        return results
+
+    @classmethod
+    async def get_latest_price(cls, codes: Iterable[str]) -> List[str]:
+        """获取多支股票的最新价格(交易日当天),暂不包括指数
+
+        价格数据每5秒更新一次,接受多只股票查询,返回最后缓存的价格
+
+        Args:
+            codes: 代码列表
+
+        Returns:
+            返回一个List,价格是字符形式的浮点数。
+        """
+        if not codes:
+            return []
+
+        _raw_code_list = []
+        for code_str in codes:
+            code, _ = code_str.split(".")
+            _raw_code_list.append(code)
+
+        _converted_data = []
+        raw_data = await cache.feature.hmget(TRADE_LATEST_PRICE, *_raw_code_list)
+        for _data in raw_data:
+            if _data is None:
+                _converted_data.append(_data)
+            else:
+                _converted_data.append(float(_data))
+        return _converted_data
+
+
+ + + +
+ + + + + + + + + + + +
+ + + +

+security_type: SecurityType + + + property + readonly + + +

+ +
+ +

返回证券类型

+ +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
SecurityType

[description]

+ +
+ + + + + + + + + +
+ + + +

+batch_cache_bars(frame_type, bars) + + + async + classmethod + + +

+ +
+ +

缓存已收盘的分钟线和日线

+

当缓存日线时,仅限于当日收盘后的第一次同步时调用。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
frame_typeFrameType

帧类型

required
barsDict[str, numpy.ndarray[Any, numpy.dtype[dtype([('frame', '<M8[s]'), ('open', '<f4'), ('high', '<f4'), ('low', '<f4'), ('close', '<f4'), ('volume', '<f8'), ('amount', '<f8'), ('factor', '<f4')])]]]

行情数据,其key为股票代码,其value为dtype为bars_dtype的一维numpy数组。

required
+

Exceptions:

+ + + + + + + + + + + + + +
TypeDescription
RedisError

如果在执行过程中发生错误,则抛出以此异常为基类的各种异常,具体参考aioredis相关文档。

+
+ Source code in omicron/models/stock.py +
@classmethod
+async def batch_cache_bars(cls, frame_type: FrameType, bars: Dict[str, BarsArray]):
+    """缓存已收盘的分钟线和日线
+
+    当缓存日线时,仅限于当日收盘后的第一次同步时调用。
+
+    Args:
+        frame_type: 帧类型
+        bars: 行情数据,其key为股票代码,其value为dtype为`bars_dtype`的一维numpy数组。
+
+    Raises:
+        RedisError: 如果在执行过程中发生错误,则抛出以此异常为基类的各种异常,具体参考aioredis相关文档。
+    """
+    if frame_type == FrameType.DAY:
+        await cls.batch_cache_unclosed_bars(frame_type, bars)
+        return
+
+    pl = cache.security.pipeline()
+    for code, bars in bars.items():
+        key = f"bars:{frame_type.value}:{code}"
+        for bar in bars:
+            frame = tf.time2int(bar["frame"].item())
+            val = [*bar]
+            val[0] = frame
+            pl.hset(key, frame, ",".join(map(str, val)))
+    await pl.execute()
+
+
+
+ +
+ + + +
+ + + +

+batch_cache_unclosed_bars(frame_type, bars) + + + async + classmethod + + +

+ +
+ +

缓存未收盘的5、15、30、60分钟线及日线、周线、月线

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
frame_typeFrameType

帧类型

required
barsDict[str, numpy.ndarray[Any, numpy.dtype[dtype([('frame', '<M8[s]'), ('open', '<f4'), ('high', '<f4'), ('low', '<f4'), ('close', '<f4'), ('volume', '<f8'), ('amount', '<f8'), ('factor', '<f4')])]]]

行情数据,其key为股票代码,其value为dtype为bars_dtype的一维numpy数组。bars不能为None,或者empty。

required
+

Exceptions:

+ + + + + + + + + + + + + +
TypeDescription
RedisError

如果在执行过程中发生错误,则抛出以此异常为基类的各种异常,具体参考aioredis相关文档。

+
+ Source code in omicron/models/stock.py +
@classmethod
+async def batch_cache_unclosed_bars(
+    cls, frame_type: FrameType, bars: Dict[str, BarsArray]
+):  # pragma: no cover
+    """缓存未收盘的5、15、30、60分钟线及日线、周线、月线
+
+    Args:
+        frame_type: 帧类型
+        bars: 行情数据,其key为股票代码,其value为dtype为`bars_dtype`的一维numpy数组。bars不能为None,或者empty。
+
+    Raise:
+        RedisError: 如果在执行过程中发生错误,则抛出以此异常为基类的各种异常,具体参考aioredis相关文档。
+    """
+    pl = cache.security.pipeline()
+    key = f"bars:{frame_type.value}:unclosed"
+
+    convert = tf.time2int if frame_type in tf.minute_level_frames else tf.date2int
+
+    for code, bar in bars.items():
+        val = [*bar[0]]
+        val[0] = convert(bar["frame"][0].item())  # 时间转换
+        pl.hset(key, code, ",".join(map(str, val)))
+
+    await pl.execute()
+
+
+
+ +
+ + + +
+ + + +

+batch_get_day_level_bars_in_range(codes, frame_type, start, end, fq=True) + + + classmethod + + +

+ +
+ +

获取多支股票(指数)在[start, end)时间段内的行情数据

+

如果要获取的行情数据是日线级别(即1d, 1w, 1M),使用本接口。

+

停牌数据处理请见get_bars

+

本函数返回一个迭代器,使用方法示例: +

1
+2
async for code, bars in Stock.batch_get_day_level_bars_in_range(...):
+    print(code, bars)
+

+

如果end不在frame_type所属的边界点上,那么,如果end大于等于当前缓存未收盘数据时间,则将包含未收盘数据;否则,返回的记录将截止到tf.floor(end, frame_type)

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
codesList[str]

代码列表

required
frame_typeFrameType

帧类型

required
startUnion[datetime.date, datetime.datetime]

起始时间

required
endUnion[datetime.date, datetime.datetime]

结束时间

required
fqbool

是否进行复权,如果是,则进行前复权。Defaults to True.

True
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Generator[Dict[str, BarsArray], None, None]

迭代器,每次返回一个字典,其中key为代码,value为行情数据

+
+ Source code in omicron/models/stock.py +
@classmethod
+async def batch_get_day_level_bars_in_range(
+    cls,
+    codes: List[str],
+    frame_type: FrameType,
+    start: Frame,
+    end: Frame,
+    fq: bool = True,
+) -> Generator[Dict[str, BarsArray], None, None]:
+    """获取多支股票(指数)在[start, end)时间段内的行情数据
+
+    如果要获取的行情数据是日线级别(即1d, 1w, 1M),使用本接口。
+
+    停牌数据处理请见[get_bars][omicron.models.stock.Stock.get_bars]。
+
+    本函数返回一个迭代器,使用方法示例:
+    ```
+    async for code, bars in Stock.batch_get_day_level_bars_in_range(...):
+        print(code, bars)
+    ```
+
+    如果`end`不在`frame_type`所属的边界点上,那么,如果`end`大于等于当前缓存未收盘数据时间,则将包含未收盘数据;否则,返回的记录将截止到`tf.floor(end, frame_type)`。
+
+    Args:
+        codes: 代码列表
+        frame_type: 帧类型
+        start: 起始时间
+        end: 结束时间
+        fq: 是否进行复权,如果是,则进行前复权。Defaults to True.
+
+    Returns:
+        Generator[Dict[str, BarsArray], None, None]: 迭代器,每次返回一个字典,其中key为代码,value为行情数据
+    """
+    today = datetime.datetime.now().date()
+    # 日线,end不等于最后交易日,此时已无缓存
+    if frame_type == FrameType.DAY and end == tf.floor(today, frame_type):
+        from_cache = True
+    elif frame_type != FrameType.DAY and start > tf.floor(today, frame_type):
+        from_cache = True
+    else:
+        from_cache = False
+
+    n = tf.count_frames(start, end, frame_type)
+    max_query_size = min(cfg.influxdb.max_query_size, INFLUXDB_MAX_QUERY_SIZE)
+    batch_size = max(max_query_size // n, 1)
+
+    for i in range(0, len(codes), batch_size):
+        batch_codes = codes[i : i + batch_size]
+        persisted = await cls._batch_get_persisted_bars_in_range(
+            batch_codes, frame_type, start, end
+        )
+
+        if from_cache:
+            cached = await cls._batch_get_cached_bars_n(
+                frame_type, 1, end, batch_codes
+            )
+            cached = pd.DataFrame(cached, columns=bars_dtype_with_code.names)
+
+            df = pd.concat([persisted, cached])
+        else:
+            df = persisted
+
+        for code in batch_codes:
+            filtered = df[df["code"] == code][bars_cols]
+            bars = filtered.to_records(index=False).astype(bars_dtype)
+            if fq:
+                bars = cls.qfq(bars)
+
+            yield code, bars
+
+
+
+ +
+ + + +
+ + + +

+batch_get_min_level_bars_in_range(codes, frame_type, start, end, fq=True) + + + classmethod + + +

+ +
+ +

获取多支股票(指数)在[start, end)时间段内的行情数据

+

如果要获取的行情数据是分钟级别(即1m, 5m, 15m, 30m和60m),使用本接口。

+

停牌数据处理请见get_bars

+

本函数返回一个迭代器,使用方法示例: +

1
+2
async for code, bars in Stock.batch_get_min_level_bars_in_range(...):
+    print(code, bars)
+

+

如果end不在frame_type所属的边界点上,那么,如果end大于等于当前缓存未收盘数据时间,则将包含未收盘数据;否则,返回的记录将截止到tf.floor(end, frame_type)

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
codesList[str]

股票/指数代码列表

required
frame_typeFrameType

帧类型

required
startUnion[datetime.date, datetime.datetime]

起始时间

required
endUnion[datetime.date, datetime.datetime]

结束时间。如果未指明,则取当前时间。

required
fqbool

是否进行复权,如果是,则进行前复权。Defaults to True.

True
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Generator[Dict[str, BarsArray], None, None]

迭代器,每次返回一个字典,其中key为代码,value为行情数据

+
+ Source code in omicron/models/stock.py +
@classmethod
+async def batch_get_min_level_bars_in_range(
+    cls,
+    codes: List[str],
+    frame_type: FrameType,
+    start: Frame,
+    end: Frame,
+    fq: bool = True,
+) -> Generator[Dict[str, BarsArray], None, None]:
+    """获取多支股票(指数)在[start, end)时间段内的行情数据
+
+    如果要获取的行情数据是分钟级别(即1m, 5m, 15m, 30m和60m),使用本接口。
+
+    停牌数据处理请见[get_bars][omicron.models.stock.Stock.get_bars]。
+
+    本函数返回一个迭代器,使用方法示例:
+    ```
+    async for code, bars in Stock.batch_get_min_level_bars_in_range(...):
+        print(code, bars)
+    ```
+
+    如果`end`不在`frame_type`所属的边界点上,那么,如果`end`大于等于当前缓存未收盘数据时间,则将包含未收盘数据;否则,返回的记录将截止到`tf.floor(end, frame_type)`。
+
+    Args:
+        codes: 股票/指数代码列表
+        frame_type: 帧类型
+        start: 起始时间
+        end: 结束时间。如果未指明,则取当前时间。
+        fq: 是否进行复权,如果是,则进行前复权。Defaults to True.
+
+    Returns:
+        Generator[Dict[str, BarsArray], None, None]: 迭代器,每次返回一个字典,其中key为代码,value为行情数据
+    """
+    closed_end = tf.floor(end, frame_type)
+    n = tf.count_frames(start, closed_end, frame_type)
+    max_query_size = min(cfg.influxdb.max_query_size, INFLUXDB_MAX_QUERY_SIZE)
+    batch_size = max(1, max_query_size // n)
+    ff = tf.first_min_frame(datetime.datetime.now(), frame_type)
+
+    for i in range(0, len(codes), batch_size):
+        batch_codes = codes[i : i + batch_size]
+
+        if end < ff:
+            part1 = await cls._batch_get_persisted_bars_in_range(
+                batch_codes, frame_type, start, end
+            )
+            part2 = pd.DataFrame([], columns=bars_dtype_with_code.names)
+        elif start >= ff:
+            part1 = pd.DataFrame([], columns=bars_dtype_with_code.names)
+            n = tf.count_frames(start, closed_end, frame_type) + 1
+            cached = await cls._batch_get_cached_bars_n(
+                frame_type, n, end, batch_codes
+            )
+            cached = cached[cached["frame"] >= start]
+            part2 = pd.DataFrame(cached, columns=bars_dtype_with_code.names)
+        else:
+            part1 = await cls._batch_get_persisted_bars_in_range(
+                batch_codes, frame_type, start, ff
+            )
+            n = tf.count_frames(start, closed_end, frame_type) + 1
+            cached = await cls._batch_get_cached_bars_n(
+                frame_type, n, end, batch_codes
+            )
+            part2 = pd.DataFrame(cached, columns=bars_dtype_with_code.names)
+
+        df = pd.concat([part1, part2])
+
+        for code in batch_codes:
+            filtered = df[df["code"] == code][bars_cols]
+            bars = filtered.to_records(index=False).astype(bars_dtype)
+            if fq:
+                bars = cls.qfq(bars)
+
+            yield code, bars
+
+
+
+ +
+ + + +
+ + + +

+cache_bars(code, frame_type, bars) + + + async + classmethod + + +

+ +
+ +

将当期已收盘的行情数据缓存

+
+

Note

+

当前只缓存1分钟数据。其它分钟数据,都在调用时,通过resample临时合成。

+
+

行情数据缓存在以bars:{frame_type.value}:{code}为key, {frame}为field的hashmap中。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
codestr

the full qualified code of a security or index

required
frame_typeFrameType

frame type of the bars

required
barsnumpy.ndarray[Any, numpy.dtype[dtype([('frame', '<M8[s]'), ('open', '<f4'), ('high', '<f4'), ('low', '<f4'), ('close', '<f4'), ('volume', '<f8'), ('amount', '<f8'), ('factor', '<f4')])]]

the bars to cache, which is a numpy array of dtype coretypes.bars_dtype

required
+

Exceptions:

+ + + + + + + + + + + + + +
TypeDescription
RedisError

if redis operation failed, see documentation of aioredis

+
+ Source code in omicron/models/stock.py +
@classmethod
+async def cache_bars(cls, code: str, frame_type: FrameType, bars: BarsArray):
+    """将当期已收盘的行情数据缓存
+
+    Note:
+        当前只缓存1分钟数据。其它分钟数据,都在调用时,通过resample临时合成。
+
+    行情数据缓存在以`bars:{frame_type.value}:{code}`为key, {frame}为field的hashmap中。
+
+    Args:
+        code: the full qualified code of a security or index
+        frame_type: frame type of the bars
+        bars: the bars to cache, which is a numpy array of dtype `coretypes.bars_dtype`
+
+    Raises:
+        RedisError: if redis operation failed, see documentation of aioredis
+
+    """
+    # 转换时间为int
+    convert = tf.time2int if frame_type in tf.minute_level_frames else tf.date2int
+
+    key = f"bars:{frame_type.value}:{code}"
+    pl = cache.security.pipeline()
+    for bar in bars:
+        val = [*bar]
+        val[0] = convert(bar["frame"].item())
+        pl.hset(key, val[0], ",".join(map(str, val)))
+
+    await pl.execute()
+
+
+
+ +
+ + + +
+ + + +

+cache_unclosed_bars(code, frame_type, bars) + + + async + classmethod + + +

+ +
+ +

将未结束的行情数据缓存

+

未结束的行情数据缓存在以bars:{frame_type.value}:unclosed为key, {code}为field的hashmap中。

+

尽管bars被声明为BarsArray,但实际上应该只包含一个元素。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
codestr

the full qualified code of a security or index

required
frame_typeFrameType

frame type of the bars

required
barsnumpy.ndarray[Any, numpy.dtype[dtype([('frame', '<M8[s]'), ('open', '<f4'), ('high', '<f4'), ('low', '<f4'), ('close', '<f4'), ('volume', '<f8'), ('amount', '<f8'), ('factor', '<f4')])]]

the bars to cache, which is a numpy array of dtype coretypes.bars_dtype

required
+

Exceptions:

+ + + + + + + + + + + + + +
TypeDescription
RedisError

if redis operation failed, see documentation of aioredis

+
+ Source code in omicron/models/stock.py +
@classmethod
+async def cache_unclosed_bars(
+    cls, code: str, frame_type: FrameType, bars: BarsArray
+):  # pragma: no cover
+    """将未结束的行情数据缓存
+
+    未结束的行情数据缓存在以`bars:{frame_type.value}:unclosed`为key, {code}为field的hashmap中。
+
+    尽管`bars`被声明为BarsArray,但实际上应该只包含一个元素。
+
+    Args:
+        code: the full qualified code of a security or index
+        frame_type: frame type of the bars
+        bars: the bars to cache, which is a numpy array of dtype `coretypes.bars_dtype`
+
+    Raises:
+        RedisError: if redis operation failed, see documentation of aioredis
+
+    """
+    converter = tf.time2int if frame_type in tf.minute_level_frames else tf.date2int
+
+    assert len(bars) == 1, "unclosed bars should only have one record"
+
+    key = f"bars:{frame_type.value}:unclosed"
+    bar = bars[0]
+    val = [*bar]
+    val[0] = converter(bar["frame"].item())
+    await cache.security.hset(key, code, ",".join(map(str, val)))
+
+
+
+ +
+ + + + +
+ + + +

+days_since_ipo(self) + + +

+ +
+ +

获取上市以来经过了多少个交易日

+

由于受交易日历限制(2005年1月4日之前的交易日历没有),对于在之前上市的品种,都返回从2005年1月4日起的日期。

+ +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
int

[description]

+
+ Source code in omicron/models/stock.py +
def days_since_ipo(self) -> int:
+    """获取上市以来经过了多少个交易日
+
+    由于受交易日历限制(2005年1月4日之前的交易日历没有),对于在之前上市的品种,都返回从2005年1月4日起的日期。
+
+    Returns:
+        int: [description]
+    """
+    epoch_start = arrow.get("2005-01-04").date()
+    ipo_day = self.ipo_date if self.ipo_date > epoch_start else epoch_start
+    return tf.count_day_frames(ipo_day, arrow.now().date())
+
+
+
+ +
+ + + +
+ + + +

+format_code(code) + + + staticmethod + + +

+ +
+ +

新三板和北交所的股票, 暂不支持, 默认返回None +上证A股: 600、601、603、605 +深证A股: 000、001 +中小板: 002、003 +创业板: 300/301 +科创板: 688 +新三板: 82、83、87、88、430、420、400 +北交所: 43、83、87、88

+ +
+ Source code in omicron/models/stock.py +
@staticmethod
+def format_code(code) -> str:
+    """新三板和北交所的股票, 暂不支持, 默认返回None
+    上证A股: 600、601、603、605
+    深证A股: 000、001
+    中小板:  002、003
+    创业板:  300/301
+    科创板:  688
+    新三板:  82、83、87、88、430、420、400
+    北交所:  43、83、87、88
+    """
+    if not code or len(code) != 6:
+        return None
+
+    prefix = code[0]
+    if prefix in ("0", "3"):
+        return f"{code}.XSHE"
+    elif prefix == "6":
+        return f"{code}.XSHG"
+    else:
+        return None
+
+
+
+ +
+ + + +
+ + + +

+fuzzy_match(query) + + + classmethod + + +

+ +
+ +

对股票/指数进行模糊匹配查找

+

query可以是股票/指数代码,也可以是字母(按name查找),也可以是汉字(按显示名查找)

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
querystr

查询字符串

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Dict[str, Tuple]

查询结果,其中Tuple为(code, display_name, name, start, end, type)

+
+ Source code in omicron/models/stock.py +
@classmethod
+def fuzzy_match(cls, query: str) -> Dict[str, Tuple]:
+    """对股票/指数进行模糊匹配查找
+
+    query可以是股票/指数代码,也可以是字母(按name查找),也可以是汉字(按显示名查找)
+
+    Args:
+        query (str): 查询字符串
+
+    Returns:
+        Dict[str, Tuple]: 查询结果,其中Tuple为(code, display_name, name, start, end, type)
+    """
+    query = query.upper()
+    if re.match(r"\d+", query):
+        return {
+            sec["code"]: sec.tolist()
+            for sec in cls._stocks
+            if sec["code"].startswith(query)
+        }
+    elif re.match(r"[A-Z]+", query):
+        return {
+            sec["code"]: sec.tolist()
+            for sec in cls._stocks
+            if sec["name"].startswith(query)
+        }
+    else:
+        return {
+            sec["code"]: sec.tolist()
+            for sec in cls._stocks
+            if sec["alias"].find(query) != -1
+        }
+
+
+
+ +
+ + + +
+ + + +

+get_bars(code, n, frame_type, end=None, fq=True, unclosed=True) + + + async + classmethod + + +

+ +
+ +

获取到end为止的n个行情数据。

+

返回的数据是按照时间顺序递增排序的。在遇到停牌的情况时,该时段数据将被跳过,因此返回的记录可能不是交易日连续的,并且可能不足n个。

+

如果系统当前没有到指定时间end的数据,将尽最大努力返回数据。调用者可以通过判断最后一条数据的时间是否等于end来判断是否获取到了全部数据。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
codestr

证券代码

required
nint

记录数

required
frame_typeFrameType

帧类型

required
endUnion[datetime.date, datetime.datetime]

截止时间,如果未指明,则取当前时间

None
fq

是否对返回记录进行复权。如果为True的话,则进行前复权。Defaults to True.

True
unclosed

是否包含最新未收盘的数据? Defaults to True.

True
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
numpy.ndarray[Any, numpy.dtype[dtype([('frame', '<M8[s]'), ('open', '<f4'), ('high', '<f4'), ('low', '<f4'), ('close', '<f4'), ('volume', '<f8'), ('amount', '<f8'), ('factor', '<f4')])]]

返回dtype为coretypes.bars_dtype的一维numpy数组。

+
+ Source code in omicron/models/stock.py +
@classmethod
+async def get_bars(
+    cls,
+    code: str,
+    n: int,
+    frame_type: FrameType,
+    end: Frame = None,
+    fq=True,
+    unclosed=True,
+) -> BarsArray:
+    """获取到`end`为止的`n`个行情数据。
+
+    返回的数据是按照时间顺序递增排序的。在遇到停牌的情况时,该时段数据将被跳过,因此返回的记录可能不是交易日连续的,并且可能不足`n`个。
+
+    如果系统当前没有到指定时间`end`的数据,将尽最大努力返回数据。调用者可以通过判断最后一条数据的时间是否等于`end`来判断是否获取到了全部数据。
+
+    Args:
+        code: 证券代码
+        n: 记录数
+        frame_type: 帧类型
+        end: 截止时间,如果未指明,则取当前时间
+        fq: 是否对返回记录进行复权。如果为`True`的话,则进行前复权。Defaults to True.
+        unclosed: 是否包含最新未收盘的数据? Defaults to True.
+
+    Returns:
+        返回dtype为`coretypes.bars_dtype`的一维numpy数组。
+    """
+    now = datetime.datetime.now()
+    try:
+        cached = np.array([], dtype=bars_dtype)
+
+        if frame_type in tf.day_level_frames:
+            if end is None:
+                end = now.date()
+            elif type(end) == datetime.datetime:
+                end = end.date()
+            n0 = n
+            if unclosed:
+                cached = await cls._get_cached_bars_n(code, 1, frame_type)
+                if cached.size > 0:
+                    # 如果缓存的未收盘日期 > end,则该缓存不是需要的
+                    if cached[0]["frame"].item().date() > end:
+                        cached = np.array([], dtype=bars_dtype)
+                    else:
+                        n0 = n - 1
+        else:
+            end = end or now
+            closed_frame = tf.floor(end, frame_type)
+
+            # fetch one more bar, in case we should discard unclosed bar
+            cached = await cls._get_cached_bars_n(code, n + 1, frame_type, end)
+            if not unclosed:
+                cached = cached[cached["frame"] <= closed_frame]
+
+            # n bars we need fetch from persisted db
+            n0 = n - cached.size
+        if n0 > 0:
+            if cached.size > 0:
+                end0 = cached[0]["frame"].item()
+            else:
+                end0 = end
+
+            bars = await cls._get_persisted_bars_n(code, frame_type, n0, end0)
+            merged = np.concatenate((bars, cached))
+            bars = merged[-n:]
+        else:
+            bars = cached[-n:]
+
+        if fq:
+            bars = cls.qfq(bars)
+        return bars
+    except Exception as e:
+        logger.exception(e)
+        logger.warning(
+            "failed to get bars for %s, %s, %s, %s", code, n, frame_type, end
+        )
+        raise
+
+
+
+ +
+ + + +
+ + + +

+get_bars_in_range(code, frame_type, start, end=None, fq=True, unclosed=True) + + + async + classmethod + + +

+ +
+ +

获取指定证券(code)在[start, end]期间帧类型为frame_type的行情数据。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
code

证券代码

required
frame_type

行情数据的帧类型

required
start

起始时间

required
end

结束时间,如果为None,则表明取到当前时间。

None
fq

是否对行情数据执行前复权操作

True
unclosed

是否包含未收盘的数据

True
+
+ Source code in omicron/models/stock.py +
@classmethod
+async def get_bars_in_range(
+    cls,
+    code: str,
+    frame_type: FrameType,
+    start: Frame,
+    end: Frame = None,
+    fq=True,
+    unclosed=True,
+) -> BarsArray:
+    """获取指定证券(`code`)在[`start`, `end`]期间帧类型为`frame_type`的行情数据。
+
+    Args:
+        code : 证券代码
+        frame_type : 行情数据的帧类型
+        start : 起始时间
+        end : 结束时间,如果为None,则表明取到当前时间。
+        fq : 是否对行情数据执行前复权操作
+        unclosed : 是否包含未收盘的数据
+    """
+    now = datetime.datetime.now()
+
+    if frame_type in tf.day_level_frames:
+        end = end or now.date()
+        if unclosed and tf.day_shift(end, 0) == now.date():
+            part2 = await cls._get_cached_bars_n(code, 1, frame_type)
+        else:
+            part2 = np.array([], dtype=bars_dtype)
+
+        # get rest from persisted
+        part1 = await cls._get_persisted_bars_in_range(code, frame_type, start, end)
+        bars = np.concatenate((part1, part2))
+    else:
+        end = end or now
+        closed_end = tf.floor(end, frame_type)
+        ff_min1 = tf.first_min_frame(now, FrameType.MIN1)
+        if tf.day_shift(end, 0) < now.date() or end < ff_min1:
+            part1 = await cls._get_persisted_bars_in_range(
+                code, frame_type, start, end
+            )
+            part2 = np.array([], dtype=bars_dtype)
+        elif start >= ff_min1:  # all in cache
+            part1 = np.array([], dtype=bars_dtype)
+            n = tf.count_frames(start, closed_end, frame_type) + 1
+            part2 = await cls._get_cached_bars_n(code, n, frame_type, end)
+            part2 = part2[part2["frame"] >= start]
+        else:  # in both cache and persisted
+            ff = tf.first_min_frame(now, frame_type)
+            part1 = await cls._get_persisted_bars_in_range(
+                code, frame_type, start, ff
+            )
+            n = tf.count_frames(ff, closed_end, frame_type) + 1
+            part2 = await cls._get_cached_bars_n(code, n, frame_type, end)
+
+        if not unclosed:
+            part2 = part2[part2["frame"] <= closed_end]
+        bars = np.concatenate((part1, part2))
+
+    if fq:
+        return cls.qfq(bars)
+    else:
+        return bars
+
+
+
+ +
+ + + +
+ + + +

+get_latest_price(codes) + + + async + classmethod + + +

+ +
+ +

获取多支股票的最新价格(交易日当天),暂不包括指数

+

价格数据每5秒更新一次,接受多只股票查询,返回最后缓存的价格

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
codesIterable[str]

代码列表

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
List[str]

返回一个List,价格是字符形式的浮点数。

+
+ Source code in omicron/models/stock.py +
@classmethod
+async def get_latest_price(cls, codes: Iterable[str]) -> List[str]:
+    """获取多支股票的最新价格(交易日当天),暂不包括指数
+
+    价格数据每5秒更新一次,接受多只股票查询,返回最后缓存的价格
+
+    Args:
+        codes: 代码列表
+
+    Returns:
+        返回一个List,价格是字符形式的浮点数。
+    """
+    if not codes:
+        return []
+
+    _raw_code_list = []
+    for code_str in codes:
+        code, _ = code_str.split(".")
+        _raw_code_list.append(code)
+
+    _converted_data = []
+    raw_data = await cache.feature.hmget(TRADE_LATEST_PRICE, *_raw_code_list)
+    for _data in raw_data:
+        if _data is None:
+            _converted_data.append(_data)
+        else:
+            _converted_data.append(float(_data))
+    return _converted_data
+
+
+
+ +
+ + + +
+ + + +

+get_trade_price_limits(code, begin, end) + + + async + classmethod + + +

+ +
+ +

从influxdb和cache中获取个股在[begin, end]之间的涨跌停价。

+

涨跌停价只有日线数据才有,因此,FrameType固定为FrameType.DAY, +当天的数据存放于redis,如果查询日期包含当天(交易日),从cache中读取并追加到结果中

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
code

个股代码

required
begin

开始日期

required
end

结束日期

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
numpy.ndarray[Any, numpy.dtype[dtype([('frame', '<M8[s]'), ('open', '<f4'), ('high', '<f4'), ('low', '<f4'), ('close', '<f4'), ('volume', '<f8'), ('amount', '<f8'), ('factor', '<f4')])]]

dtype为[('frame', 'O'), ('high_limit', 'f4'), ('low_limit', 'f4')]的numpy数组

+
+ Source code in omicron/models/stock.py +
@classmethod
+async def get_trade_price_limits(
+    cls, code: str, begin: Frame, end: Frame
+) -> BarsArray:
+    """从influxdb和cache中获取个股在[begin, end]之间的涨跌停价。
+
+    涨跌停价只有日线数据才有,因此,FrameType固定为FrameType.DAY,
+    当天的数据存放于redis,如果查询日期包含当天(交易日),从cache中读取并追加到结果中
+
+    Args:
+        code : 个股代码
+        begin : 开始日期
+        end : 结束日期
+
+    Returns:
+        dtype为[('frame', 'O'), ('high_limit', 'f4'), ('low_limit', 'f4')]的numpy数组
+    """
+    cols = ["_time", "high_limit", "low_limit"]
+    dtype = [("frame", "O"), ("high_limit", "f4"), ("low_limit", "f4")]
+
+    if isinstance(begin, datetime.datetime):
+        begin = begin.date()  # 强制转换为date
+    if isinstance(end, datetime.datetime):
+        end = end.date()  # 强制转换为date
+
+    data_in_cache = await cls._get_price_limit_in_cache(code, begin, end)
+
+    client = get_influx_client()
+    measurement = cls._measurement_name(FrameType.DAY)
+    flux = (
+        Flux()
+        .bucket(client._bucket)
+        .measurement(measurement)
+        .range(begin, end)
+        .tags({"code": code})
+        .fields(cols)
+        .sort("_time")
+    )
+
+    ds = NumpyDeserializer(
+        dtype,
+        use_cols=cols,
+        converters={"_time": lambda x: ciso8601.parse_datetime(x).date()},
+        # since we ask parse date in convertors, so we have to disable parse_date
+        parse_date=None,
+    )
+
+    result = await client.query(flux, ds)
+    if data_in_cache:
+        result = np.concatenate([result, data_in_cache])
+    return result
+
+
+
+ +
+ + + +
+ + + +

+persist_bars(frame_type, bars) + + + async + classmethod + + +

+ +
+ +

将行情数据持久化

+

如果bars类型为Dict,则key为code,value为bars。如果其类型为BarsArray或者pd.DataFrame,则bars各列字段应该为coretypes.bars_dtype + ("code", "O")构成。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
frame_typeFrameType

the frame type of the bars

required
barsUnion[Dict[str, numpy.ndarray[Any, numpy.dtype[dtype([('frame', '<M8[s]'), ('open', '<f4'), ('high', '<f4'), ('low', '<f4'), ('close', '<f4'), ('volume', '<f8'), ('amount', '<f8'), ('factor', '<f4')])]]], numpy.ndarray[Any, numpy.dtype[dtype([('frame', '<M8[s]'), ('open', '<f4'), ('high', '<f4'), ('low', '<f4'), ('close', '<f4'), ('volume', '<f8'), ('amount', '<f8'), ('factor', '<f4')])]], pandas.core.frame.DataFrame]

the bars to be persisted

required
+

Exceptions:

+ + + + + + + + + + + + + +
TypeDescription
InfluxDBWriteError

if influxdb write failed

+
+ Source code in omicron/models/stock.py +
@classmethod
+async def persist_bars(
+    cls,
+    frame_type: FrameType,
+    bars: Union[Dict[str, BarsArray], BarsArray, pd.DataFrame],
+):
+    """将行情数据持久化
+
+    如果`bars`类型为Dict,则key为`code`,value为`bars`。如果其类型为BarsArray或者pd.DataFrame,则`bars`各列字段应该为`coretypes.bars_dtype` + ("code", "O")构成。
+
+    Args:
+        frame_type: the frame type of the bars
+        bars: the bars to be persisted
+
+    Raises:
+        InfluxDBWriteError: if influxdb write failed
+    """
+    client = get_influx_client()
+
+    measurement = cls._measurement_name(frame_type)
+    logger.info("persisting bars to influxdb: %s, %d secs", measurement, len(bars))
+
+    if isinstance(bars, dict):
+        for code, value in bars.items():
+            await client.save(
+                value, measurement, global_tags={"code": code}, time_key="frame"
+            )
+    else:
+        await client.save(bars, measurement, tag_keys=["code"], time_key="frame")
+
+
+
+ +
+ + + +
+ + + +

+qfq(bars) + + + staticmethod + + +

+ +
+ +

对行情数据执行前复权操作

+ +
+ Source code in omicron/models/stock.py +
@staticmethod
+def qfq(bars: BarsArray) -> BarsArray:
+    """对行情数据执行前复权操作"""
+    # todo: 这里可以优化
+    if bars.size == 0:
+        return bars
+
+    last = bars[-1]["factor"]
+    for field in ["open", "high", "low", "close", "volume"]:
+        bars[field] = bars[field] * (bars["factor"] / last)
+
+    return bars
+
+
+
+ +
+ + + +
+ + + +

+resample(bars, from_frame, to_frame) + + + classmethod + + +

+ +
+ +

将原来为from_frame的行情数据转换为to_frame的行情数据

+

如果to_frame为日线或者分钟级别线,则from_frame必须为分钟线;如果to_frame为周以上级别线,则from_frame必须为日线。其它级别之间的转换不支持。

+

如果from_frame为1分钟线,则必须从9:31起。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
barsBarsArray

行情数据

required
from_frameFrameType

转换前的FrameType

required
to_frameFrameType

转换后的FrameType

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
BarsArray

转换后的行情数据

+
+ Source code in omicron/models/stock.py +
@classmethod
+def resample(
+    cls, bars: BarsArray, from_frame: FrameType, to_frame: FrameType
+) -> BarsArray:
+    """将原来为`from_frame`的行情数据转换为`to_frame`的行情数据
+
+    如果`to_frame`为日线或者分钟级别线,则`from_frame`必须为分钟线;如果`to_frame`为周以上级别线,则`from_frame`必须为日线。其它级别之间的转换不支持。
+
+    如果`from_frame`为1分钟线,则必须从9:31起。
+
+    Args:
+        bars (BarsArray): 行情数据
+        from_frame (FrameType): 转换前的FrameType
+        to_frame (FrameType): 转换后的FrameType
+
+    Returns:
+        BarsArray: 转换后的行情数据
+    """
+    if from_frame == FrameType.MIN1:
+        return cls._resample_from_min1(bars, to_frame)
+    elif from_frame == FrameType.DAY:  # pragma: no cover
+        return cls._resample_from_day(bars, to_frame)
+    else:  # pragma: no cover
+        raise TypeError(f"unsupported from_frame: {from_frame}")
+
+
+
+ +
+ + + +
+ + + +

+reset_cache() + + + async + classmethod + + +

+ +
+ +

清除缓存的行情数据

+ +
+ Source code in omicron/models/stock.py +
@classmethod
+async def reset_cache(cls):
+    """清除缓存的行情数据"""
+    try:
+        for ft in itertools.chain(tf.minute_level_frames, tf.day_level_frames):
+            keys = await cache.security.keys(f"bars:{ft.value}:*")
+            if keys:
+                await cache.security.delete(*keys)
+    finally:
+        cls._is_cache_empty = True
+
+
+
+ +
+ + + + +
+ + + +

+save_trade_price_limits(price_limits, to_cache) + + + async + classmethod + + +

+ +
+ +

保存涨跌停价

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
price_limitsnumpy.ndarray[Any, numpy.dtype[dtype([('frame', 'O'), ('code', 'O'), ('high_limit', '<f4'), ('low_limit', '<f4')])]]

要保存的涨跌停价格数据。

required
to_cachebool

是保存到缓存中,还是保存到持久化存储中

required
+
+ Source code in omicron/models/stock.py +
@classmethod
+async def save_trade_price_limits(
+    cls, price_limits: LimitPriceOnlyBarsArray, to_cache: bool
+):
+    """保存涨跌停价
+
+    Args:
+        price_limits: 要保存的涨跌停价格数据。
+        to_cache: 是保存到缓存中,还是保存到持久化存储中
+    """
+    if len(price_limits) == 0:
+        return
+
+    if to_cache:  # 每个交易日上午9点更新两次
+        pl = cache._security_.pipeline()
+        for row in price_limits:
+            # .item convert np.float64 to python float
+            pl.hset(
+                TRADE_PRICE_LIMITS,
+                f"{row['code']}.high_limit",
+                row["high_limit"].item(),
+            )
+            pl.hset(
+                TRADE_PRICE_LIMITS,
+                f"{row['code']}.low_limit",
+                row["low_limit"].item(),
+            )
+
+        dt = price_limits[-1]["frame"]
+        pl.set(TRADE_PRICE_LIMITS_DATE, dt.strftime("%Y-%m-%d"))
+        await pl.execute()
+    else:
+        # to influxdb, 每个交易日的第二天早上2点保存
+        client = get_influx_client()
+        await client.save(
+            price_limits,
+            cls._measurement_name(FrameType.DAY),
+            tag_keys="code",
+            time_key="frame",
+        )
+
+
+
+ +
+ + + + +
+ + + +

+trade_price_limit_flags(code, start, end) + + + async + classmethod + + +

+ +
+ +

获取个股在[start, end]之间的涨跌停标志

+
+

Note

+

本函数返回的序列在股票有停牌的情况下,将不能与[start, end]一一对应。

+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
codestr

个股代码

required
startdate

开始日期

required
enddate

结束日期

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Tuple[List[bool]]

涨跌停标志列表(buy, sell)

+
+ Source code in omicron/models/stock.py +
@classmethod
+async def trade_price_limit_flags(
+    cls, code: str, start: datetime.date, end: datetime.date
+) -> Tuple[List[bool]]:
+    """获取个股在[start, end]之间的涨跌停标志
+
+    !!!Note
+        本函数返回的序列在股票有停牌的情况下,将不能与[start, end]一一对应。
+
+    Args:
+        code: 个股代码
+        start: 开始日期
+        end: 结束日期
+
+    Returns:
+        涨跌停标志列表(buy, sell)
+    """
+    cols = ["_time", "close", "high_limit", "low_limit"]
+    client = get_influx_client()
+    measurement = cls._measurement_name(FrameType.DAY)
+    flux = (
+        Flux()
+        .bucket(client._bucket)
+        .measurement(measurement)
+        .range(start, end)
+        .tags({"code": code})
+        .fields(cols)
+        .sort("_time")
+    )
+
+    dtype = [
+        ("frame", "O"),
+        ("close", "f4"),
+        ("high_limit", "f4"),
+        ("low_limit", "f4"),
+    ]
+    ds = NumpyDeserializer(
+        dtype,
+        use_cols=["_time", "close", "high_limit", "low_limit"],
+        converters={"_time": lambda x: ciso8601.parse_datetime(x).date()},
+        # since we ask parse date in convertors, so we have to disable parse_date
+        parse_date=None,
+    )
+
+    result = await client.query(flux, ds)
+    if result.size == 0:
+        return np.array([], dtype=dtype)
+
+    return (
+        array_price_equal(result["close"], result["high_limit"]),
+        array_price_equal(result["close"], result["low_limit"]),
+    )
+
+
+
+ +
+ + + +
+ + + +

+trade_price_limit_flags_ex(code, start, end) + + + async + classmethod + + +

+ +
+ +

获取股票code[start, end]区间的涨跌停标志

+
+

Note

+

如果end为当天,注意在未收盘之前,这个涨跌停标志都是不稳定的

+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
codestr

股票代码

required
startdate

起始日期

required
enddate

结束日期

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Dict[datetime.date, Tuple[bool, bool]]

以日期为key,(涨停,跌停)为值的dict

+
+ Source code in omicron/models/stock.py +
@classmethod
+async def trade_price_limit_flags_ex(
+    cls, code: str, start: datetime.date, end: datetime.date
+) -> Dict[datetime.date, Tuple[bool, bool]]:
+    """获取股票`code`在`[start, end]`区间的涨跌停标志
+
+    !!!Note
+        如果end为当天,注意在未收盘之前,这个涨跌停标志都是不稳定的
+
+    Args:
+        code: 股票代码
+        start: 起始日期
+        end: 结束日期
+
+    Returns:
+        以日期为key,(涨停,跌停)为值的dict
+    """
+    limit_prices = await cls.get_trade_price_limits(code, start, end)
+    bars = await Stock.get_bars_in_range(
+        code, FrameType.DAY, start=start, end=end, fq=False
+    )
+
+    close = bars["close"]
+
+    results = {}
+
+    # aligned = True
+    for i in range(len(bars)):
+        if bars[i]["frame"].item().date() != limit_prices[i]["frame"]:
+            # aligned = False
+            logger.warning("数据同步错误,涨跌停价格与收盘价时间不一致: %s, %s", code, bars[i]["frame"])
+            break
+
+        results[limit_prices[i]["frame"]] = (
+            price_equal(limit_prices[i]["high_limit"], close[i]),
+            price_equal(limit_prices[i]["low_limit"], close[i]),
+        )
+
+    # if not aligned:
+    #     bars = bars[i:]
+    #     limit_prices = limit_prices[i:]
+
+    #     for frame in bars["frame"]:
+    #         frame = frame.item().date()
+    #         close = bars[bars["frame"].item().date() == frame]["close"].item()
+    #         high = limit_prices[limit_prices["frame"] == frame]["high_limit"].item()
+    #         low = limit_prices[limit_prices["frame"] == frame]["low_limit"].item()
+    #         results[frame] = (
+    #             price_equal(high, close),
+    #             price_equal(low, close)
+    #         )
+
+    return results
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + + + + + + + +
+ +
+ +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/2.0.0/api/strategy/index.html b/2.0.0/api/strategy/index.html new file mode 100644 index 00000000..7706cf7f --- /dev/null +++ b/2.0.0/api/strategy/index.html @@ -0,0 +1,3116 @@ + + + + + + + + + + + + + + + + 策略框架 - Omicron + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + +

策略框架

+ +
+ + +
+ + + + +
+ + + + + + + + + + +
+ + + +

+ base + + + +

+ +
+ + + + +
+ + + + + + + + +
+ + + +

+ +BacktestState + + + + dataclass + + +

+ +
+ +

BacktestState(start: Union[datetime.date, datetime.datetime], end: Union[datetime.date, datetime.datetime], barss: Union[NoneType, Dict[str, numpy.ndarray[Any, numpy.dtype[dtype([('frame', '<M8[s]'), ('open', '<f4'), ('high', '<f4'), ('low', '<f4'), ('close', '<f4'), ('volume', '<f8'), ('amount', '<f8'), ('factor', '<f4')])]]]], cursor: int, warmup_peroid: int, baseline: str = '399300.XSHE')

+ +
+ Source code in omicron/strategy/base.py +
class BacktestState(object):
+    start: Frame
+    end: Frame
+    barss: Union[None, Dict[str, BarsArray]]
+    cursor: int
+    warmup_peroid: int
+    baseline: str = "399300.XSHE"
+
+
+ + + +
+ + + + + + + + + + + + + + + + + + + + +
+ +
+ +
+ + + +
+ + + +

+ +BaseStrategy + + + +

+ +
+ + +
+ Source code in omicron/strategy/base.py +
class BaseStrategy:
+    def __init__(
+        self,
+        url: str,
+        account: Optional[str] = None,
+        token: Optional[str] = None,
+        name: Optional[str] = None,
+        ver: Optional[str] = None,
+        is_backtest: bool = True,
+        start: Optional[Frame] = None,
+        end: Optional[Frame] = None,
+        frame_type: Optional[FrameType] = None,
+        warmup_period: int = 0,
+    ):
+        """构造函数
+
+        Args:
+            url: 实盘/回测服务器的地址。
+            start: 回测起始日期。回测模式下必须传入。
+            end: 回测结束日期。回测模式下必须传入。
+            account: 实盘/回测账号。实盘模式下必须传入。在回测模式下,如果未传入,将以策略名+随机字符构建账号。
+            token: 实盘/回测时用的token。实盘模式下必须传入。在回测模式下,如果未传入,将自动生成。
+            is_backtest: 是否为回测模式。
+            name: 策略名。如果不传入,则使用类名字小写
+            ver: 策略版本号。如果不传入,则默认为0.1.
+            start: 如果是回测模式,则需要提供回测起始时间
+            end: 如果是回测模式,则需要提供回测结束时间
+            frame_type: 如果是回测模式,则需要提供回测时使用的主周期
+            warmup_period: 策略执行时需要的最小bar数(以frame_type)计。
+        """
+        self.ver = ver or "0.1"
+        self.name = name or self.__class__.__name__.lower() + f"_v{self.ver}"
+
+        self.token = token or uuid.uuid4().hex
+        self.account = account or f"smallcap-{self.token[-4:]}"
+
+        self.url = url
+        self.bills = None
+        self.metrics = None
+
+        # used by both live and backtest
+        self.warmup_period = warmup_period
+        self.is_backtest = is_backtest
+        if is_backtest:
+            if start is None or end is None or frame_type is None:
+                raise ValueError("start, end and frame_type must be presented.")
+
+            self.bs = BacktestState(start, end, None, 0, warmup_period)
+            self._frame_type = frame_type
+            self.broker = TraderClient(
+                url,
+                self.account,
+                self.token,
+                is_backtest=True,
+                start=self.bs.start,
+                end=self.bs.end,
+            )
+        else:
+            if account is None or token is None:
+                raise ValueError("account and token must be presented.")
+
+            self.broker = TraderClient(url, self.account, self.token, is_backtest=False)
+
+    async def _cache_bars_for_backtest(self, portfolio: List[str], n: int):
+        if portfolio is None or len(portfolio) == 0:
+            return
+
+        count = tf.count_frames(self.bs.start, self.bs.end, self._frame_type)
+        tasks = [
+            Stock.get_bars(code, count + n, self._frame_type, self.bs.end, fq=False)
+            for code in portfolio
+        ]
+
+        results = await gather(*tasks)
+        self.bs.barss = {k: v for (k, v) in zip(portfolio, results)}
+
+    def _next(self):
+        if self.bs.barss is None:
+            return None
+
+        self.bs.cursor += 1
+        return {
+            k: Stock.qfq(v[self.bs.cursor - self.bs.warmup_peroid : self.bs.cursor])
+            for (k, v) in self.bs.barss.items()
+        }
+
+    async def peek(self, code: str, n: int):
+        """允许策略偷看未来数据
+
+        可用以因子检验场景。要求数据本身已缓存。否则请用Stock.get_bars等方法获取。
+        """
+        if self.bs is None or self.bs.barss is None:
+            raise ValueError("data is not cached")
+
+        if code in self.bs.barss:
+            if self.bs.cursor + n + 1 < len(self.bs.barss[code]):
+                return Stock.qfq(
+                    self.bs.barss[code][self.bs.cursor : self.bs.cursor + n]
+                )
+
+        else:
+            raise ValueError("data is not cached")
+
+    async def backtest(self, stop_on_error: bool = True, **kwargs):
+        """执行回测
+
+        Args:
+            stop_on_error: 如果为True,则发生异常时,将停止回测。否则忽略错误,继续执行。
+        Keyword Args:
+            prefetch_stocks Dict[str, BarsArray]: 代码列表。在该列表中的品种,将在回测之前自动预取行情数据,并在调用predict时,传入截止到当前frame的,长度为n的行情数据。行情周期由构造时的frame_type指定。预取数据长度由`self.warmup_period`决定
+        """
+        prefetch_stocks: List[str] = kwargs.get("prefetch_stocks")  # type: ignore
+        await self._cache_bars_for_backtest(prefetch_stocks, self.warmup_period)
+        self.bs.cursor = self.warmup_period
+
+        intra_day = self._frame_type in tf.minute_level_frames
+        converter = tf.int2time if intra_day else tf.int2date
+
+        await self.before_start()
+
+        # 最后一周期不做预测,留出来执行上一周期的信号
+        end_ = tf.shift(self.bs.end, -1, self._frame_type)
+        for i, frame in enumerate(
+            tf.get_frames(self.bs.start, end_, self._frame_type)  # type: ignore
+        ):
+            barss = self._next()
+            day_barss = barss if self._frame_type == FrameType.DAY else None
+            frame_ = converter(frame)
+
+            prev_frame = tf.shift(frame_, -1, self._frame_type)
+            next_frame = tf.shift(frame_, 1, self._frame_type)
+
+            # new trading day start
+            if (not intra_day and prev_frame < frame_) or (
+                intra_day and prev_frame.date() < frame_.date()
+            ):
+                await self.before_trade(frame_, day_barss)
+
+            logger.debug("%sth iteration", i, date=frame_)
+            try:
+                await self.predict(
+                    frame_, self._frame_type, i, barss=barss, **kwargs  # type: ignore
+                )
+            except Exception as e:
+                if isinstance(e, TradeError):
+                    logger.warning("call stack is:\n%s", e.stack)
+                else:
+                    logger.exception(e)
+                if stop_on_error:
+                    raise e
+
+            # trading day ends
+            if (not intra_day and next_frame > frame_) or (
+                intra_day and next_frame.date() > frame_.date()
+            ):
+                await self.after_trade(frame_, day_barss)
+
+        self.broker.stop_backtest()
+
+        await self.after_stop()
+        self.bills = self.broker.bills()
+        baseline = kwargs.get("baseline", "399300.XSHE")
+        self.metrics = self.broker.metrics(baseline=baseline)
+        self.bs.baseline = baseline
+
+    @property
+    def cash(self):
+        """返回当前可用现金"""
+        return self.broker.available_money
+
+    def positions(self, dt: Optional[datetime.date] = None):
+        """返回当前持仓"""
+        return self.broker.positions(dt)
+
+    def available_shares(self, sec: str, dt: Optional[Frame] = None):
+        """返回给定股票在`dt`日的可售股数
+
+        Args:
+            sec: 证券代码
+            dt: 日期,在实盘中无意义,只能返回最新数据;在回测时,必须指定日期,且返回指定日期下的持仓。
+        """
+        return self.broker.available_shares(sec, dt)
+
+    async def buy(
+        self,
+        sec: str,
+        price: Optional[float] = None,
+        vol: Optional[int] = None,
+        money: Optional[float] = None,
+        order_time: Optional[datetime.datetime] = None,
+    ) -> Dict:
+        """买入股票
+
+        Args:
+            sec: 证券代码
+            price: 委买价。如果为None,则自动转市价买入。
+            vol: 委买股数。请自行保证为100的整数。如果为None, 则money必须传入。
+            money: 委买金额。如果同时传入了vol,则此参数自动忽略
+            order_time: 仅在回测模式下需要提供。实盘模式下,此参数自动被忽略
+        Returns:
+            见traderclient中的`buy`方法。
+        """
+        logger.debug(
+            "buy order: %s, %s, %s, %s",
+            sec,
+            f"{price:.2f}" if price is not None else None,
+            f"{vol:.0f}" if vol is not None else None,
+            f"{money:.0f}" if money is not None else None,
+            date=order_time,
+        )
+
+        if vol is None:
+            if money is None:
+                raise ValueError("parameter `mnoey` must be presented!")
+
+            return await self.broker.buy_by_money(
+                sec, money, price, order_time=order_time
+            )
+        elif price is None:
+            return self.broker.market_buy(sec, vol, order_time=order_time)
+        else:
+            return self.broker.buy(sec, price, vol, order_time=order_time)
+
+    async def sell(
+        self,
+        sec: str,
+        price: Optional[float] = None,
+        vol: Optional[float] = None,
+        percent: Optional[float] = None,
+        order_time: Optional[datetime.datetime] = None,
+    ) -> Union[List, Dict]:
+        """卖出股票
+
+        Args:
+            sec: 证券代码
+            price: 委卖价,如果未提供,则转为市价单
+            vol: 委卖股数。如果为None,则percent必须传入
+            percent: 卖出一定比例的持仓,取值介于0与1之间。如果与vol同时提供,此参数将被忽略。请自行保证按比例换算后的卖出数据是符合要求的(比如不为100的倍数,但有些情况下这是允许的,所以程序这里无法帮你判断)
+            order_time: 仅在回测模式下需要提供。实盘模式下,此参数自动被忽略
+
+        Returns:
+            Union[List, Dict]: 成交返回,详见traderclient中的`buy`方法,trade server只返回一个委托单信息
+        """
+        logger.debug(
+            "sell order: %s, %s, %s, %s",
+            sec,
+            f"{price:.2f}" if price is not None else None,
+            f"{vol:.0f}" if vol is not None else None,
+            f"{percent:.2%}" if percent is not None else None,
+            date=order_time,
+        )
+
+        if vol is None and percent is None:
+            raise ValueError("either vol or percent must be presented")
+
+        if vol is None:
+            if price is None:
+                price = await self.broker._get_market_sell_price(
+                    sec, order_time=order_time
+                )
+            # there's no market_sell_percent API in traderclient
+            return self.broker.sell_percent(sec, price, percent, order_time=order_time)  # type: ignore
+        else:
+            if price is None:
+                return self.broker.market_sell(sec, vol, order_time=order_time)
+            else:
+                return self.broker.sell(sec, price, vol, order_time=order_time)
+
+    async def filter_paused_stock(self, buylist: List[str], dt: datetime.date):
+        secs = await Security.select(dt).eval()
+        in_trading = jq.get_price(
+            secs, fields=["paused"], start_date=dt, end_date=dt, skip_paused=True
+        )["code"].to_numpy()
+
+        return np.intersect1d(buylist, in_trading)
+
+    async def before_start(self):
+        """策略启动前的准备工作。
+
+        在一次回测中,它会在backtest中、进入循环之前调用。如果策略需要根据过去的数据来计算一些自适应参数,可以在此方法中实现。
+        """
+        if self.bs is not None:
+            logger.info(
+                "BEFORE_START: %s<%s - %s>",
+                self.name,
+                self.bs.start,
+                self.bs.end,
+                date=self.bs.start,
+            )
+        else:
+            logger.info("BEFORE_START: %s", self.name)
+
+    async def before_trade(self, date: datetime.date, barss: Optional[Dict[str, BarsArray]]=None):
+        """每日开盘前的准备工作
+
+        Args:
+            date: 日期。在回测中为回测当日日期,在实盘中为系统日期
+            barss: 如果主周期为日线,且支持预取,则会将预取的barss传入
+        """
+        logger.debug("BEFORE_TRADE: %s", self.name, date=date)
+
+    async def after_trade(self, date: Frame, barss: Optional[Dict[str, BarsArray]]=None):
+        """每日收盘后的收尾工作
+
+        Args:
+            date: 日期。在回测中为回测当日日期,在实盘中为系统日期
+            barss: 如果主周期为日线,且支持预取,则会将预取的barss传入
+        """
+        logger.debug("AFTER_TRADE: %s", self.name, date=date)
+
+    async def after_stop(self):
+        if self.bs is not None:
+            logger.info(
+                "STOP %s<%s - %s>",
+                self.name,
+                self.bs.start,
+                self.bs.end,
+                date=self.bs.end,
+            )
+        else:
+            logger.info("STOP %s", self.name)
+
+    async def predict(
+        self,
+        frame: Frame,
+        frame_type: FrameType,
+        i: int,
+        barss: Optional[Dict[str, BarsArray]] = None,
+        **kwargs,
+    ):
+        """策略评估函数。在此函数中实现交易信号检测和处理。
+
+        Args:
+            frame: 当前时间帧
+            frame_type: 处理的数据主周期
+            i: 当前时间离回测起始的单位数
+            barss: 如果调用`backtest`时传入了`portfolio`及参数,则`backtest`将会在回测之前,预取从[start - warmup_period * frame_type, end]间的portfolio行情数据,并在每次调用`predict`方法时,通过`barss`参数,将[start - warmup_period * frame_type, start + i * frame_type]间的数据传给`predict`方法。传入的数据已进行前复权。
+
+        Keyword Args: 在`backtest`方法中的传入的kwargs参数将被透传到此方法中。
+        """
+        raise NotImplementedError
+
+    @deprecated("2.0.0", details="use `make_report` instead")
+    async def plot_metrics(
+        self, indicator: Union[pd.DataFrame, List[Tuple], None] = None
+    ):
+        return await self.make_report(indicator)
+
+    async def make_report(
+        self, indicator: Union[pd.DataFrame, List[Tuple], None] = None
+    ):
+        """策略回测报告
+
+        Args:
+            indicator: 回测时使用的指标。如果存在,将叠加到策略回测图上。它应该是一个以日期为索引,指标列名为"value"的DataFrame
+        """
+        if self.bills is None or self.metrics is None:
+            raise ValueError("Please run `start_backtest` first.")
+
+        if isinstance(indicator, list):
+            assert len(indicator[0]) == 2
+            indicator = pd.DataFrame(indicator, columns=["date", "value"])
+            indicator.set_index("date", inplace=True)
+
+        mg = MetricsGraph(
+            self.bills,
+            self.metrics,
+            indicator=indicator,
+            baseline_code=self.bs.baseline,
+        )
+        await mg.plot()
+
+
+ + + +
+ + + + + + +
+ + + +

+cash + + + property + readonly + + +

+ +
+ +

返回当前可用现金

+
+ +
+ + + + + + +
+ + + +

+__init__(self, url, account=None, token=None, name=None, ver=None, is_backtest=True, start=None, end=None, frame_type=None, warmup_period=0) + + + special + + +

+ +
+ +

构造函数

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
urlstr

实盘/回测服务器的地址。

required
startUnion[datetime.date, datetime.datetime]

回测起始日期。回测模式下必须传入。

None
endUnion[datetime.date, datetime.datetime]

回测结束日期。回测模式下必须传入。

None
accountOptional[str]

实盘/回测账号。实盘模式下必须传入。在回测模式下,如果未传入,将以策略名+随机字符构建账号。

None
tokenOptional[str]

实盘/回测时用的token。实盘模式下必须传入。在回测模式下,如果未传入,将自动生成。

None
is_backtestbool

是否为回测模式。

True
nameOptional[str]

策略名。如果不传入,则使用类名字小写

None
verOptional[str]

策略版本号。如果不传入,则默认为0.1.

None
startUnion[datetime.date, datetime.datetime]

如果是回测模式,则需要提供回测起始时间

None
endUnion[datetime.date, datetime.datetime]

如果是回测模式,则需要提供回测结束时间

None
frame_typeOptional[coretypes.types.FrameType]

如果是回测模式,则需要提供回测时使用的主周期

None
warmup_periodint

策略执行时需要的最小bar数(以frame_type)计。

0
+
+ Source code in omicron/strategy/base.py +
def __init__(
+    self,
+    url: str,
+    account: Optional[str] = None,
+    token: Optional[str] = None,
+    name: Optional[str] = None,
+    ver: Optional[str] = None,
+    is_backtest: bool = True,
+    start: Optional[Frame] = None,
+    end: Optional[Frame] = None,
+    frame_type: Optional[FrameType] = None,
+    warmup_period: int = 0,
+):
+    """构造函数
+
+    Args:
+        url: 实盘/回测服务器的地址。
+        start: 回测起始日期。回测模式下必须传入。
+        end: 回测结束日期。回测模式下必须传入。
+        account: 实盘/回测账号。实盘模式下必须传入。在回测模式下,如果未传入,将以策略名+随机字符构建账号。
+        token: 实盘/回测时用的token。实盘模式下必须传入。在回测模式下,如果未传入,将自动生成。
+        is_backtest: 是否为回测模式。
+        name: 策略名。如果不传入,则使用类名字小写
+        ver: 策略版本号。如果不传入,则默认为0.1.
+        start: 如果是回测模式,则需要提供回测起始时间
+        end: 如果是回测模式,则需要提供回测结束时间
+        frame_type: 如果是回测模式,则需要提供回测时使用的主周期
+        warmup_period: 策略执行时需要的最小bar数(以frame_type)计。
+    """
+    self.ver = ver or "0.1"
+    self.name = name or self.__class__.__name__.lower() + f"_v{self.ver}"
+
+    self.token = token or uuid.uuid4().hex
+    self.account = account or f"smallcap-{self.token[-4:]}"
+
+    self.url = url
+    self.bills = None
+    self.metrics = None
+
+    # used by both live and backtest
+    self.warmup_period = warmup_period
+    self.is_backtest = is_backtest
+    if is_backtest:
+        if start is None or end is None or frame_type is None:
+            raise ValueError("start, end and frame_type must be presented.")
+
+        self.bs = BacktestState(start, end, None, 0, warmup_period)
+        self._frame_type = frame_type
+        self.broker = TraderClient(
+            url,
+            self.account,
+            self.token,
+            is_backtest=True,
+            start=self.bs.start,
+            end=self.bs.end,
+        )
+    else:
+        if account is None or token is None:
+            raise ValueError("account and token must be presented.")
+
+        self.broker = TraderClient(url, self.account, self.token, is_backtest=False)
+
+
+
+ +
+ + + + +
+ + + +

+after_trade(self, date, barss=None) + + + async + + +

+ +
+ +

每日收盘后的收尾工作

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
dateUnion[datetime.date, datetime.datetime]

日期。在回测中为回测当日日期,在实盘中为系统日期

required
barssOptional[Dict[str, numpy.ndarray[Any, numpy.dtype[dtype([('frame', '<M8[s]'), ('open', '<f4'), ('high', '<f4'), ('low', '<f4'), ('close', '<f4'), ('volume', '<f8'), ('amount', '<f8'), ('factor', '<f4')])]]]]

如果主周期为日线,且支持预取,则会将预取的barss传入

None
+
+ Source code in omicron/strategy/base.py +
async def after_trade(self, date: Frame, barss: Optional[Dict[str, BarsArray]]=None):
+    """每日收盘后的收尾工作
+
+    Args:
+        date: 日期。在回测中为回测当日日期,在实盘中为系统日期
+        barss: 如果主周期为日线,且支持预取,则会将预取的barss传入
+    """
+    logger.debug("AFTER_TRADE: %s", self.name, date=date)
+
+
+
+ +
+ + + +
+ + + +

+available_shares(self, sec, dt=None) + + +

+ +
+ +

返回给定股票在dt日的可售股数

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
secstr

证券代码

required
dtUnion[datetime.date, datetime.datetime]

日期,在实盘中无意义,只能返回最新数据;在回测时,必须指定日期,且返回指定日期下的持仓。

None
+
+ Source code in omicron/strategy/base.py +
def available_shares(self, sec: str, dt: Optional[Frame] = None):
+    """返回给定股票在`dt`日的可售股数
+
+    Args:
+        sec: 证券代码
+        dt: 日期,在实盘中无意义,只能返回最新数据;在回测时,必须指定日期,且返回指定日期下的持仓。
+    """
+    return self.broker.available_shares(sec, dt)
+
+
+
+ +
+ + + +
+ + + +

+backtest(self, stop_on_error=True, **kwargs) + + + async + + +

+ +
+ +

执行回测

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
stop_on_errorbool

如果为True,则发生异常时,将停止回测。否则忽略错误,继续执行。

True
+

Keyword arguments:

+ + + + + + + + + + + + + + + +
NameTypeDescription
prefetch_stocksDict[str, BarsArray]

代码列表。在该列表中的品种,将在回测之前自动预取行情数据,并在调用predict时,传入截止到当前frame的,长度为n的行情数据。行情周期由构造时的frame_type指定。预取数据长度由self.warmup_period决定

+
+ Source code in omicron/strategy/base.py +
async def backtest(self, stop_on_error: bool = True, **kwargs):
+    """执行回测
+
+    Args:
+        stop_on_error: 如果为True,则发生异常时,将停止回测。否则忽略错误,继续执行。
+    Keyword Args:
+        prefetch_stocks Dict[str, BarsArray]: 代码列表。在该列表中的品种,将在回测之前自动预取行情数据,并在调用predict时,传入截止到当前frame的,长度为n的行情数据。行情周期由构造时的frame_type指定。预取数据长度由`self.warmup_period`决定
+    """
+    prefetch_stocks: List[str] = kwargs.get("prefetch_stocks")  # type: ignore
+    await self._cache_bars_for_backtest(prefetch_stocks, self.warmup_period)
+    self.bs.cursor = self.warmup_period
+
+    intra_day = self._frame_type in tf.minute_level_frames
+    converter = tf.int2time if intra_day else tf.int2date
+
+    await self.before_start()
+
+    # 最后一周期不做预测,留出来执行上一周期的信号
+    end_ = tf.shift(self.bs.end, -1, self._frame_type)
+    for i, frame in enumerate(
+        tf.get_frames(self.bs.start, end_, self._frame_type)  # type: ignore
+    ):
+        barss = self._next()
+        day_barss = barss if self._frame_type == FrameType.DAY else None
+        frame_ = converter(frame)
+
+        prev_frame = tf.shift(frame_, -1, self._frame_type)
+        next_frame = tf.shift(frame_, 1, self._frame_type)
+
+        # new trading day start
+        if (not intra_day and prev_frame < frame_) or (
+            intra_day and prev_frame.date() < frame_.date()
+        ):
+            await self.before_trade(frame_, day_barss)
+
+        logger.debug("%sth iteration", i, date=frame_)
+        try:
+            await self.predict(
+                frame_, self._frame_type, i, barss=barss, **kwargs  # type: ignore
+            )
+        except Exception as e:
+            if isinstance(e, TradeError):
+                logger.warning("call stack is:\n%s", e.stack)
+            else:
+                logger.exception(e)
+            if stop_on_error:
+                raise e
+
+        # trading day ends
+        if (not intra_day and next_frame > frame_) or (
+            intra_day and next_frame.date() > frame_.date()
+        ):
+            await self.after_trade(frame_, day_barss)
+
+    self.broker.stop_backtest()
+
+    await self.after_stop()
+    self.bills = self.broker.bills()
+    baseline = kwargs.get("baseline", "399300.XSHE")
+    self.metrics = self.broker.metrics(baseline=baseline)
+    self.bs.baseline = baseline
+
+
+
+ +
+ + + +
+ + + +

+before_start(self) + + + async + + +

+ +
+ +

策略启动前的准备工作。

+

在一次回测中,它会在backtest中、进入循环之前调用。如果策略需要根据过去的数据来计算一些自适应参数,可以在此方法中实现。

+ +
+ Source code in omicron/strategy/base.py +
async def before_start(self):
+    """策略启动前的准备工作。
+
+    在一次回测中,它会在backtest中、进入循环之前调用。如果策略需要根据过去的数据来计算一些自适应参数,可以在此方法中实现。
+    """
+    if self.bs is not None:
+        logger.info(
+            "BEFORE_START: %s<%s - %s>",
+            self.name,
+            self.bs.start,
+            self.bs.end,
+            date=self.bs.start,
+        )
+    else:
+        logger.info("BEFORE_START: %s", self.name)
+
+
+
+ +
+ + + +
+ + + +

+before_trade(self, date, barss=None) + + + async + + +

+ +
+ +

每日开盘前的准备工作

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
datedate

日期。在回测中为回测当日日期,在实盘中为系统日期

required
barssOptional[Dict[str, numpy.ndarray[Any, numpy.dtype[dtype([('frame', '<M8[s]'), ('open', '<f4'), ('high', '<f4'), ('low', '<f4'), ('close', '<f4'), ('volume', '<f8'), ('amount', '<f8'), ('factor', '<f4')])]]]]

如果主周期为日线,且支持预取,则会将预取的barss传入

None
+
+ Source code in omicron/strategy/base.py +
async def before_trade(self, date: datetime.date, barss: Optional[Dict[str, BarsArray]]=None):
+    """每日开盘前的准备工作
+
+    Args:
+        date: 日期。在回测中为回测当日日期,在实盘中为系统日期
+        barss: 如果主周期为日线,且支持预取,则会将预取的barss传入
+    """
+    logger.debug("BEFORE_TRADE: %s", self.name, date=date)
+
+
+
+ +
+ + + +
+ + + +

+buy(self, sec, price=None, vol=None, money=None, order_time=None) + + + async + + +

+ +
+ +

买入股票

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
secstr

证券代码

required
priceOptional[float]

委买价。如果为None,则自动转市价买入。

None
volOptional[int]

委买股数。请自行保证为100的整数。如果为None, 则money必须传入。

None
moneyOptional[float]

委买金额。如果同时传入了vol,则此参数自动忽略

None
order_timeOptional[datetime.datetime]

仅在回测模式下需要提供。实盘模式下,此参数自动被忽略

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Dict

见traderclient中的buy方法。

+
+ Source code in omicron/strategy/base.py +
async def buy(
+    self,
+    sec: str,
+    price: Optional[float] = None,
+    vol: Optional[int] = None,
+    money: Optional[float] = None,
+    order_time: Optional[datetime.datetime] = None,
+) -> Dict:
+    """买入股票
+
+    Args:
+        sec: 证券代码
+        price: 委买价。如果为None,则自动转市价买入。
+        vol: 委买股数。请自行保证为100的整数。如果为None, 则money必须传入。
+        money: 委买金额。如果同时传入了vol,则此参数自动忽略
+        order_time: 仅在回测模式下需要提供。实盘模式下,此参数自动被忽略
+    Returns:
+        见traderclient中的`buy`方法。
+    """
+    logger.debug(
+        "buy order: %s, %s, %s, %s",
+        sec,
+        f"{price:.2f}" if price is not None else None,
+        f"{vol:.0f}" if vol is not None else None,
+        f"{money:.0f}" if money is not None else None,
+        date=order_time,
+    )
+
+    if vol is None:
+        if money is None:
+            raise ValueError("parameter `mnoey` must be presented!")
+
+        return await self.broker.buy_by_money(
+            sec, money, price, order_time=order_time
+        )
+    elif price is None:
+        return self.broker.market_buy(sec, vol, order_time=order_time)
+    else:
+        return self.broker.buy(sec, price, vol, order_time=order_time)
+
+
+
+ +
+ + + + +
+ + + +

+make_report(self, indicator=None) + + + async + + +

+ +
+ +

策略回测报告

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indicatorUnion[pandas.core.frame.DataFrame, List[Tuple]]

回测时使用的指标。如果存在,将叠加到策略回测图上。它应该是一个以日期为索引,指标列名为"value"的DataFrame

None
+
+ Source code in omicron/strategy/base.py +
async def make_report(
+    self, indicator: Union[pd.DataFrame, List[Tuple], None] = None
+):
+    """策略回测报告
+
+    Args:
+        indicator: 回测时使用的指标。如果存在,将叠加到策略回测图上。它应该是一个以日期为索引,指标列名为"value"的DataFrame
+    """
+    if self.bills is None or self.metrics is None:
+        raise ValueError("Please run `start_backtest` first.")
+
+    if isinstance(indicator, list):
+        assert len(indicator[0]) == 2
+        indicator = pd.DataFrame(indicator, columns=["date", "value"])
+        indicator.set_index("date", inplace=True)
+
+    mg = MetricsGraph(
+        self.bills,
+        self.metrics,
+        indicator=indicator,
+        baseline_code=self.bs.baseline,
+    )
+    await mg.plot()
+
+
+
+ +
+ + + +
+ + + +

+peek(self, code, n) + + + async + + +

+ +
+ +

允许策略偷看未来数据

+

可用以因子检验场景。要求数据本身已缓存。否则请用Stock.get_bars等方法获取。

+ +
+ Source code in omicron/strategy/base.py +
async def peek(self, code: str, n: int):
+    """允许策略偷看未来数据
+
+    可用以因子检验场景。要求数据本身已缓存。否则请用Stock.get_bars等方法获取。
+    """
+    if self.bs is None or self.bs.barss is None:
+        raise ValueError("data is not cached")
+
+    if code in self.bs.barss:
+        if self.bs.cursor + n + 1 < len(self.bs.barss[code]):
+            return Stock.qfq(
+                self.bs.barss[code][self.bs.cursor : self.bs.cursor + n]
+            )
+
+    else:
+        raise ValueError("data is not cached")
+
+
+
+ +
+ + + +
+ + + +

+plot_metrics(self, indicator=None) + + + async + + +

+ +
+ +

.. deprecated:: 2.0.0 use make_report instead

+ +
+ Source code in omicron/strategy/base.py +
@deprecated("2.0.0", details="use `make_report` instead")
+async def plot_metrics(
+    self, indicator: Union[pd.DataFrame, List[Tuple], None] = None
+):
+    return await self.make_report(indicator)
+
+
+
+ +
+ + + +
+ + + +

+positions(self, dt=None) + + +

+ +
+ +

返回当前持仓

+ +
+ Source code in omicron/strategy/base.py +
def positions(self, dt: Optional[datetime.date] = None):
+    """返回当前持仓"""
+    return self.broker.positions(dt)
+
+
+
+ +
+ + + +
+ + + +

+predict(self, frame, frame_type, i, barss=None, **kwargs) + + + async + + +

+ +
+ +

策略评估函数。在此函数中实现交易信号检测和处理。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
frameUnion[datetime.date, datetime.datetime]

当前时间帧

required
frame_typeFrameType

处理的数据主周期

required
iint

当前时间离回测起始的单位数

required
barssOptional[Dict[str, numpy.ndarray[Any, numpy.dtype[dtype([('frame', '<M8[s]'), ('open', '<f4'), ('high', '<f4'), ('low', '<f4'), ('close', '<f4'), ('volume', '<f8'), ('amount', '<f8'), ('factor', '<f4')])]]]]

如果调用backtest时传入了portfolio及参数,则backtest将会在回测之前,预取从[start - warmup_period * frame_type, end]间的portfolio行情数据,并在每次调用predict方法时,通过barss参数,将[start - warmup_period * frame_type, start + i * frame_type]间的数据传给predict方法。传入的数据已进行前复权。

None

Keyword Args: 在backtest方法中的传入的kwargs参数将被透传到此方法中。

+ +
+ Source code in omicron/strategy/base.py +
async def predict(
+    self,
+    frame: Frame,
+    frame_type: FrameType,
+    i: int,
+    barss: Optional[Dict[str, BarsArray]] = None,
+    **kwargs,
+):
+    """策略评估函数。在此函数中实现交易信号检测和处理。
+
+    Args:
+        frame: 当前时间帧
+        frame_type: 处理的数据主周期
+        i: 当前时间离回测起始的单位数
+        barss: 如果调用`backtest`时传入了`portfolio`及参数,则`backtest`将会在回测之前,预取从[start - warmup_period * frame_type, end]间的portfolio行情数据,并在每次调用`predict`方法时,通过`barss`参数,将[start - warmup_period * frame_type, start + i * frame_type]间的数据传给`predict`方法。传入的数据已进行前复权。
+
+    Keyword Args: 在`backtest`方法中的传入的kwargs参数将被透传到此方法中。
+    """
+    raise NotImplementedError
+
+
+
+ +
+ + + +
+ + + +

+sell(self, sec, price=None, vol=None, percent=None, order_time=None) + + + async + + +

+ +
+ +

卖出股票

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
secstr

证券代码

required
priceOptional[float]

委卖价,如果未提供,则转为市价单

None
volOptional[float]

委卖股数。如果为None,则percent必须传入

None
percentOptional[float]

卖出一定比例的持仓,取值介于0与1之间。如果与vol同时提供,此参数将被忽略。请自行保证按比例换算后的卖出数据是符合要求的(比如不为100的倍数,但有些情况下这是允许的,所以程序这里无法帮你判断)

None
order_timeOptional[datetime.datetime]

仅在回测模式下需要提供。实盘模式下,此参数自动被忽略

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Union[List, Dict]

成交返回,详见traderclient中的buy方法,trade server只返回一个委托单信息

+
+ Source code in omicron/strategy/base.py +
async def sell(
+    self,
+    sec: str,
+    price: Optional[float] = None,
+    vol: Optional[float] = None,
+    percent: Optional[float] = None,
+    order_time: Optional[datetime.datetime] = None,
+) -> Union[List, Dict]:
+    """卖出股票
+
+    Args:
+        sec: 证券代码
+        price: 委卖价,如果未提供,则转为市价单
+        vol: 委卖股数。如果为None,则percent必须传入
+        percent: 卖出一定比例的持仓,取值介于0与1之间。如果与vol同时提供,此参数将被忽略。请自行保证按比例换算后的卖出数据是符合要求的(比如不为100的倍数,但有些情况下这是允许的,所以程序这里无法帮你判断)
+        order_time: 仅在回测模式下需要提供。实盘模式下,此参数自动被忽略
+
+    Returns:
+        Union[List, Dict]: 成交返回,详见traderclient中的`buy`方法,trade server只返回一个委托单信息
+    """
+    logger.debug(
+        "sell order: %s, %s, %s, %s",
+        sec,
+        f"{price:.2f}" if price is not None else None,
+        f"{vol:.0f}" if vol is not None else None,
+        f"{percent:.2%}" if percent is not None else None,
+        date=order_time,
+    )
+
+    if vol is None and percent is None:
+        raise ValueError("either vol or percent must be presented")
+
+    if vol is None:
+        if price is None:
+            price = await self.broker._get_market_sell_price(
+                sec, order_time=order_time
+            )
+        # there's no market_sell_percent API in traderclient
+        return self.broker.sell_percent(sec, price, percent, order_time=order_time)  # type: ignore
+    else:
+        if price is None:
+            return self.broker.market_sell(sec, vol, order_time=order_time)
+        else:
+            return self.broker.sell(sec, price, vol, order_time=order_time)
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + + + + + +
+ +
+ +
+ + + +
+ + + +

+ sma + + + +

+ +
+ + + + +
+ + + + + + + + +
+ + + +

+ +SMAStrategy (BaseStrategy) + + + + +

+ +
+ + +
+ Source code in omicron/strategy/sma.py +
class SMAStrategy(BaseStrategy):
+    def __init__(self, sec: str, n_short: int = 5, n_long: int = 10, *args, **kwargs):
+        self._sec = sec
+        self._n_short = n_short
+        self._n_long = n_long
+
+        self.indicators = []
+
+        super().__init__(*args, **kwargs)
+
+    async def before_start(self):
+        date = self.bs.end if self.bs is not None else None
+        logger.info("before_start, cash is %s", self.cash, date=date)
+
+    async def before_trade(self, date: datetime.date):
+        logger.info(
+            "before_trade, cash is %s, portfolio is %s",
+            self.cash,
+            self.positions(date),
+            date=date,
+        )
+
+    async def after_trade(self, date: datetime.date):
+        logger.info(
+            "after_trade, cash is %s, portfolio is %s",
+            self.cash,
+            self.positions(date),
+            date=date,
+        )
+
+    async def after_stop(self):
+        date = self.bs.end if self.bs is not None else None
+        logger.info(
+            "after_stop, cash is %s, portfolio is %s",
+            self.cash,
+            self.positions,
+            date=date,
+        )
+
+    async def predict(
+        self, frame: Frame, frame_type: FrameType, i: int, barss, **kwargs
+    ):
+        if barss is None:
+            raise ValueError("please specify `prefetch_stocks`")
+
+        bars: Union[BarsArray, None] = barss.get(self._sec)
+        if bars is None:
+            raise ValueError(f"{self._sec} not found in `prefetch_stocks`")
+
+        ma_short = np.mean(bars["close"][-self._n_short :])
+        ma_long = np.mean(bars["close"][-self._n_long :])
+
+        if ma_short > ma_long:
+            self.indicators.append((frame, 1))
+            if self.cash >= 100 * bars["close"][-1]:
+                await self.buy(
+                    self._sec,
+                    money=self.cash,
+                    order_time=tf.combine_time(frame, 14, 55),
+                )
+        elif ma_short < ma_long:
+            self.indicators.append((frame, -1))
+            if self.available_shares(self._sec, frame) > 0:
+                await self.sell(
+                    self._sec, percent=1.0, order_time=tf.combine_time(frame, 14, 55)
+                )
+
+
+ + + +
+ + + + + + + + + + + +
+ + + +

+after_trade(self, date) + + + async + + +

+ +
+ +

每日收盘后的收尾工作

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
datedate

日期。在回测中为回测当日日期,在实盘中为系统日期

required
barss

如果主周期为日线,且支持预取,则会将预取的barss传入

required
+
+ Source code in omicron/strategy/sma.py +
async def after_trade(self, date: datetime.date):
+    logger.info(
+        "after_trade, cash is %s, portfolio is %s",
+        self.cash,
+        self.positions(date),
+        date=date,
+    )
+
+
+
+ +
+ + + +
+ + + +

+before_start(self) + + + async + + +

+ +
+ +

策略启动前的准备工作。

+

在一次回测中,它会在backtest中、进入循环之前调用。如果策略需要根据过去的数据来计算一些自适应参数,可以在此方法中实现。

+ +
+ Source code in omicron/strategy/sma.py +
async def before_start(self):
+    date = self.bs.end if self.bs is not None else None
+    logger.info("before_start, cash is %s", self.cash, date=date)
+
+
+
+ +
+ + + +
+ + + +

+before_trade(self, date) + + + async + + +

+ +
+ +

每日开盘前的准备工作

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
datedate

日期。在回测中为回测当日日期,在实盘中为系统日期

required
barss

如果主周期为日线,且支持预取,则会将预取的barss传入

required
+
+ Source code in omicron/strategy/sma.py +
async def before_trade(self, date: datetime.date):
+    logger.info(
+        "before_trade, cash is %s, portfolio is %s",
+        self.cash,
+        self.positions(date),
+        date=date,
+    )
+
+
+
+ +
+ + + +
+ + + +

+predict(self, frame, frame_type, i, barss, **kwargs) + + + async + + +

+ +
+ +

策略评估函数。在此函数中实现交易信号检测和处理。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
frameUnion[datetime.date, datetime.datetime]

当前时间帧

required
frame_typeFrameType

处理的数据主周期

required
iint

当前时间离回测起始的单位数

required
barss

如果调用backtest时传入了portfolio及参数,则backtest将会在回测之前,预取从[start - warmup_period * frame_type, end]间的portfolio行情数据,并在每次调用predict方法时,通过barss参数,将[start - warmup_period * frame_type, start + i * frame_type]间的数据传给predict方法。传入的数据已进行前复权。

required

Keyword Args: 在backtest方法中的传入的kwargs参数将被透传到此方法中。

+ +
+ Source code in omicron/strategy/sma.py +
async def predict(
+    self, frame: Frame, frame_type: FrameType, i: int, barss, **kwargs
+):
+    if barss is None:
+        raise ValueError("please specify `prefetch_stocks`")
+
+    bars: Union[BarsArray, None] = barss.get(self._sec)
+    if bars is None:
+        raise ValueError(f"{self._sec} not found in `prefetch_stocks`")
+
+    ma_short = np.mean(bars["close"][-self._n_short :])
+    ma_long = np.mean(bars["close"][-self._n_long :])
+
+    if ma_short > ma_long:
+        self.indicators.append((frame, 1))
+        if self.cash >= 100 * bars["close"][-1]:
+            await self.buy(
+                self._sec,
+                money=self.cash,
+                order_time=tf.combine_time(frame, 14, 55),
+            )
+    elif ma_short < ma_long:
+        self.indicators.append((frame, -1))
+        if self.available_shares(self._sec, frame) > 0:
+            await self.sell(
+                self._sec, percent=1.0, order_time=tf.combine_time(frame, 14, 55)
+            )
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + + + + + +
+ +
+ +
+ + + + +
+ +
+ +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/2.0.0/api/talib/index.html b/2.0.0/api/talib/index.html new file mode 100644 index 00000000..ce48575e --- /dev/null +++ b/2.0.0/api/talib/index.html @@ -0,0 +1,4045 @@ + + + + + + + + + + + + + + + + talib - Omicron + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + + + + + +
+ + +
+ + + + +
+ + + + + + + + + + +
+ + + +

+ core + + + +

+ +
+ + + + +
+ + + + + + + + +
+ + + +

+angle(ts, threshold=0.01, loss_func='re') + + +

+ +
+ +

求时间序列ts拟合直线相对于x轴的夹角的余弦值

+

本函数可以用来判断时间序列的增长趋势。当angle处于[-1, 0]时,越靠近0,下降越快;当angle +处于[0, 1]时,越接近0,上升越快。

+

如果ts无法很好地拟合为直线,则返回[float, None]

+ +

Examples:

+
>>> ts = np.array([ i for i in range(5)])
+>>> round(angle(ts)[1], 3) # degree: 45, rad: pi/2
+0.707
+
+
>>> ts = np.array([ np.sqrt(3) / 3 * i for i in range(10)])
+>>> round(angle(ts)[1],3) # degree: 30, rad: pi/6
+0.866
+
+
>>> ts = np.array([ -np.sqrt(3) / 3 * i for i in range(7)])
+>>> round(angle(ts)[1], 3) # degree: 150, rad: 5*pi/6
+-0.866
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tsrequired
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Tuple[float, float]

返回 (error, consine(theta)),即拟合误差和夹角余弦值。

+
+ Source code in omicron/talib/core.py +
def angle(ts, threshold=0.01, loss_func="re") -> Tuple[float, float]:
+    """求时间序列`ts`拟合直线相对于`x`轴的夹角的余弦值
+
+    本函数可以用来判断时间序列的增长趋势。当`angle`处于[-1, 0]时,越靠近0,下降越快;当`angle`
+    处于[0, 1]时,越接近0,上升越快。
+
+    如果`ts`无法很好地拟合为直线,则返回[float, None]
+
+    Examples:
+
+        >>> ts = np.array([ i for i in range(5)])
+        >>> round(angle(ts)[1], 3) # degree: 45, rad: pi/2
+        0.707
+
+        >>> ts = np.array([ np.sqrt(3) / 3 * i for i in range(10)])
+        >>> round(angle(ts)[1],3) # degree: 30, rad: pi/6
+        0.866
+
+        >>> ts = np.array([ -np.sqrt(3) / 3 * i for i in range(7)])
+        >>> round(angle(ts)[1], 3) # degree: 150, rad: 5*pi/6
+        -0.866
+
+    Args:
+        ts:
+
+    Returns:
+        返回 (error, consine(theta)),即拟合误差和夹角余弦值。
+
+    """
+    err, (a, b) = polyfit(ts, deg=1, loss_func=loss_func)
+    if err > threshold:
+        return (err, None)
+
+    v = np.array([1, a + b])
+    vx = np.array([1, 0])
+
+    return err, copysign(np.dot(v, vx) / (norm(v) * norm(vx)), a)
+
+
+
+ +
+ + + +
+ + + +

+clustering(numbers, n) + + +

+ +
+ +

将数组numbers划分为n个簇

+

返回值为一个List, 每一个元素为一个列表,分别为簇的起始点和长度。

+ +

Examples:

+
>>> numbers = np.array([1,1,1,2,4,6,8,7,4,5,6])
+>>> clustering(numbers, 2)
+[(0, 4), (4, 7)]
+
+ +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
List[Tuple[int, int]]

划分后的簇列表。

+
+ Source code in omicron/talib/core.py +
def clustering(numbers: np.ndarray, n: int) -> List[Tuple[int, int]]:
+    """将数组`numbers`划分为`n`个簇
+
+    返回值为一个List, 每一个元素为一个列表,分别为簇的起始点和长度。
+
+    Examples:
+        >>> numbers = np.array([1,1,1,2,4,6,8,7,4,5,6])
+        >>> clustering(numbers, 2)
+        [(0, 4), (4, 7)]
+
+    Returns:
+        划分后的簇列表。
+    """
+    result = ckwrap.cksegs(numbers, n)
+
+    clusters = []
+    for pos, size in zip(result.centers, result.sizes):
+        clusters.append((int(pos - size // 2 - 1), int(size)))
+
+    return clusters
+
+
+
+ +
+ + + +
+ + + +

+exp_moving_average(values, window) + + +

+ +
+ +

Numpy implementation of EMA

+ +
+ Source code in omicron/talib/core.py +
def exp_moving_average(values, window):
+    """Numpy implementation of EMA"""
+    weights = np.exp(np.linspace(-1.0, 0.0, window))
+    weights /= weights.sum()
+    a = np.convolve(values, weights, mode="full")[: len(values)]
+    a[:window] = a[window]
+
+    return a
+
+
+
+ +
+ + + +
+ + + +

+mean_absolute_error(y, y_hat) + + +

+ +
+ +

返回预测序列相对于真值序列的平均绝对值差

+

两个序列应该具有相同的长度。如果存在nan,则nan的值不计入平均值。

+ +

Examples:

+
>>> y = np.arange(5)
+>>> y_hat = np.arange(5)
+>>> y_hat[4] = 0
+>>> mean_absolute_error(y, y)
+0.0
+
+
>>> mean_absolute_error(y, y_hat)
+0.8
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
ynp.array

真值序列

required
y_hat<built-in function array>

比较序列

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
float

平均绝对值差

+
+ Source code in omicron/talib/core.py +
def mean_absolute_error(y: np.array, y_hat: np.array) -> float:
+    """返回预测序列相对于真值序列的平均绝对值差
+
+    两个序列应该具有相同的长度。如果存在nan,则nan的值不计入平均值。
+
+    Examples:
+
+        >>> y = np.arange(5)
+        >>> y_hat = np.arange(5)
+        >>> y_hat[4] = 0
+        >>> mean_absolute_error(y, y)
+        0.0
+
+        >>> mean_absolute_error(y, y_hat)
+        0.8
+
+    Args:
+        y (np.array): 真值序列
+        y_hat: 比较序列
+
+    Returns:
+        float: 平均绝对值差
+    """
+    return nanmean(np.abs(y - y_hat))
+
+
+
+ +
+ + + +
+ + + +

+moving_average(ts, win, padding=True) + + +

+ +
+ +

生成ts序列的移动平均值

+ +

Examples:

+
>>> ts = np.arange(7)
+>>> moving_average(ts, 5)
+array([nan, nan, nan, nan,  2.,  3.,  4.])
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tsSequence

the input array

required
winint

the window size

required
padding

if True, then the return will be equal length as input, padding with np.NaN at the beginning

True
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
ndarray

The moving mean of the input array along the specified axis. The output has the same shape as the input.

+
+ Source code in omicron/talib/core.py +
def moving_average(ts: Sequence, win: int, padding=True) -> np.ndarray:
+    """生成ts序列的移动平均值
+
+    Examples:
+
+        >>> ts = np.arange(7)
+        >>> moving_average(ts, 5)
+        array([nan, nan, nan, nan,  2.,  3.,  4.])
+
+    Args:
+        ts (Sequence): the input array
+        win (int): the window size
+        padding: if True, then the return will be equal length as input, padding with np.NaN at the beginning
+
+    Returns:
+        The moving mean of the input array along the specified axis. The output has the same shape as the input.
+    """
+    ma = move_mean(ts, win)
+    if padding:
+        return ma
+    else:
+        return ma[win - 1 :]
+
+
+
+ +
+ + + +
+ + + +

+normalize(X, scaler='maxabs') + + +

+ +
+ +

对数据进行规范化处理。

+

如果scaler为maxabs,则X的各元素被压缩到[-1,1]之间 +如果scaler为unit_vector,则将X的各元素压缩到单位范数 +如果scaler为minmax,则X的各元素被压缩到[0,1]之间 +如果scaler为standard,则X的各元素被压缩到单位方差之间,且均值为零。

+

参考 sklearn

+ +

Examples:

+
>>> X = [[ 1., -1.,  2.],
+... [ 2.,  0.,  0.],
+... [ 0.,  1., -1.]]
+
+
>>> expected = [[ 0.4082, -0.4082,  0.8165],
+... [ 1.,  0.,  0.],
+... [ 0.,  0.7071, -0.7071]]
+
+
>>> X_hat = normalize(X, scaler='unit_vector')
+>>> np.testing.assert_array_almost_equal(expected, X_hat, decimal=4)
+
+
>>> expected = [[0.5, -1., 1.],
+... [1., 0., 0.],
+... [0., 1., -0.5]]
+
+
>>> X_hat = normalize(X, scaler='maxabs')
+>>> np.testing.assert_array_almost_equal(expected, X_hat, decimal = 2)
+
+
>>> expected = [[0.5       , 0.        , 1.        ],
+... [1.        , 0.5       , 0.33333333],
+... [0.        , 1.        , 0.        ]]
+>>> X_hat = normalize(X, scaler='minmax')
+>>> np.testing.assert_array_almost_equal(expected, X_hat, decimal= 3)
+
+
>>> X = [[0, 0],
+... [0, 0],
+... [1, 1],
+... [1, 1]]
+>>> expected = [[-1., -1.],
+... [-1., -1.],
+... [ 1., 1.],
+... [ 1.,  1.]]
+>>> X_hat = normalize(X, scaler='standard')
+>>> np.testing.assert_array_almost_equal(expected, X_hat, decimal = 3)
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
X2D arrayrequired
scalerstr

[description]. Defaults to 'maxabs_scale'.

'maxabs'
+
+ Source code in omicron/talib/core.py +
def normalize(X, scaler="maxabs"):
+    """对数据进行规范化处理。
+
+    如果scaler为maxabs,则X的各元素被压缩到[-1,1]之间
+    如果scaler为unit_vector,则将X的各元素压缩到单位范数
+    如果scaler为minmax,则X的各元素被压缩到[0,1]之间
+    如果scaler为standard,则X的各元素被压缩到单位方差之间,且均值为零。
+
+    参考 [sklearn]
+
+    [sklearn]: https://scikit-learn.org/stable/auto_examples/preprocessing/plot_all_scaling.html#results
+
+    Examples:
+
+        >>> X = [[ 1., -1.,  2.],
+        ... [ 2.,  0.,  0.],
+        ... [ 0.,  1., -1.]]
+
+        >>> expected = [[ 0.4082, -0.4082,  0.8165],
+        ... [ 1.,  0.,  0.],
+        ... [ 0.,  0.7071, -0.7071]]
+
+        >>> X_hat = normalize(X, scaler='unit_vector')
+        >>> np.testing.assert_array_almost_equal(expected, X_hat, decimal=4)
+
+        >>> expected = [[0.5, -1., 1.],
+        ... [1., 0., 0.],
+        ... [0., 1., -0.5]]
+
+        >>> X_hat = normalize(X, scaler='maxabs')
+        >>> np.testing.assert_array_almost_equal(expected, X_hat, decimal = 2)
+
+        >>> expected = [[0.5       , 0.        , 1.        ],
+        ... [1.        , 0.5       , 0.33333333],
+        ... [0.        , 1.        , 0.        ]]
+        >>> X_hat = normalize(X, scaler='minmax')
+        >>> np.testing.assert_array_almost_equal(expected, X_hat, decimal= 3)
+
+        >>> X = [[0, 0],
+        ... [0, 0],
+        ... [1, 1],
+        ... [1, 1]]
+        >>> expected = [[-1., -1.],
+        ... [-1., -1.],
+        ... [ 1., 1.],
+        ... [ 1.,  1.]]
+        >>> X_hat = normalize(X, scaler='standard')
+        >>> np.testing.assert_array_almost_equal(expected, X_hat, decimal = 3)
+
+    Args:
+        X (2D array):
+        scaler (str, optional): [description]. Defaults to 'maxabs_scale'.
+    """
+    if scaler == "maxabs":
+        return MaxAbsScaler().fit_transform(X)
+    elif scaler == "unit_vector":
+        return sklearn.preprocessing.normalize(X, norm="l2")
+    elif scaler == "minmax":
+        return minmax_scale(X)
+    elif scaler == "standard":
+        return StandardScaler().fit_transform(X)
+
+
+
+ +
+ + + +
+ + + +

+pct_error(y, y_hat) + + +

+ +
+ +

相对于序列算术均值的误差值

+ +

Examples:

+
>>> y = np.arange(5)
+>>> y_hat = np.arange(5)
+>>> y_hat[4] = 0
+>>> pct_error(y, y_hat)
+0.4
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
ynp.array

[description]

required
y_hatnp.array

[description]

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
float

[description]

+
+ Source code in omicron/talib/core.py +
def pct_error(y: np.array, y_hat: np.array) -> float:
+    """相对于序列算术均值的误差值
+
+    Examples:
+        >>> y = np.arange(5)
+        >>> y_hat = np.arange(5)
+        >>> y_hat[4] = 0
+        >>> pct_error(y, y_hat)
+        0.4
+
+    Args:
+        y (np.array): [description]
+        y_hat (np.array): [description]
+
+    Returns:
+        float: [description]
+    """
+    mae = mean_absolute_error(y, y_hat)
+    return mae / nanmean(np.abs(y))
+
+
+
+ +
+ + + +
+ + + +

+polyfit(ts, deg=2, loss_func='re') + + +

+ +
+ +

对给定的时间序列进行直线/二次曲线拟合。

+

二次曲线可以拟合到反生反转的行情,如圆弧底、圆弧顶;也可以拟合到上述趋势中的单边走势,即其中一段曲线。对于如长期均线,在一段时间内走势可能呈现为一条直线,故也可用此函数进行直线拟合。

+

为便于在不同品种、不同的时间之间对误差、系数进行比较,请事先对ts进行归一化。 +如果遇到无法拟合的情况(异常),将返回一个非常大的误差,并将其它项置为np.nan

+ +

Examples:

+
>>> ts = [i for i in range(5)]
+>>> err, (a, b) = polyfit(ts, deg=1)
+>>> print(round(err, 3), round(a, 1))
+0.0 1.0
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tsSequence

待拟合的时间序列

required
degint

如果要进行直线拟合,取1;二次曲线拟合取2. Defaults to 2

2
loss_funcstr

误差计算方法,取值为mae, rmse,msere。Defaults to re (relative_error)

're'
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
[Tuple]

如果为直线拟合,返回误差,(a,b)(一次项系数和常数)。如果为二次曲线拟合,返回 +误差, (a,b,c)(二次项、一次项和常量), (vert_x, vert_y)(顶点处的index,顶点值)

+
+ Source code in omicron/talib/core.py +
def polyfit(ts: Sequence, deg: int = 2, loss_func="re") -> Tuple:
+    """对给定的时间序列进行直线/二次曲线拟合。
+
+    二次曲线可以拟合到反生反转的行情,如圆弧底、圆弧顶;也可以拟合到上述趋势中的单边走势,即其中一段曲线。对于如长期均线,在一段时间内走势可能呈现为一条直线,故也可用此函数进行直线拟合。
+
+    为便于在不同品种、不同的时间之间对误差、系数进行比较,请事先对ts进行归一化。
+    如果遇到无法拟合的情况(异常),将返回一个非常大的误差,并将其它项置为np.nan
+
+    Examples:
+        >>> ts = [i for i in range(5)]
+        >>> err, (a, b) = polyfit(ts, deg=1)
+        >>> print(round(err, 3), round(a, 1))
+        0.0 1.0
+
+    Args:
+        ts (Sequence): 待拟合的时间序列
+        deg (int): 如果要进行直线拟合,取1;二次曲线拟合取2. Defaults to 2
+        loss_func (str): 误差计算方法,取值为`mae`, `rmse`,`mse` 或`re`。Defaults to `re` (relative_error)
+    Returns:
+        [Tuple]: 如果为直线拟合,返回误差,(a,b)(一次项系数和常数)。如果为二次曲线拟合,返回
+        误差, (a,b,c)(二次项、一次项和常量), (vert_x, vert_y)(顶点处的index,顶点值)
+    """
+    if deg not in (1, 2):
+        raise ValueError("deg must be 1 or 2")
+
+    try:
+        if any(np.isnan(ts)):
+            raise ValueError("ts contains nan")
+
+        x = np.array(list(range(len(ts))))
+
+        z = np.polyfit(x, ts, deg=deg)
+
+        p = np.poly1d(z)
+        ts_hat = np.array([p(xi) for xi in x])
+
+        if loss_func == "mse":
+            error = np.mean(np.square(ts - ts_hat))
+        elif loss_func == "rmse":
+            error = np.sqrt(np.mean(np.square(ts - ts_hat)))
+        elif loss_func == "mae":
+            error = mean_absolute_error(ts, ts_hat)
+        else:  # defaults to relative error
+            error = pct_error(ts, ts_hat)
+
+        if deg == 2:
+            a, b, c = z[0], z[1], z[2]
+            axis_x = -b / (2 * a)
+            if a != 0:
+                axis_y = (4 * a * c - b * b) / (4 * a)
+            else:
+                axis_y = None
+            return error, z, (axis_x, axis_y)
+        elif deg == 1:
+            return error, z
+    except Exception:
+        error = 1e9
+        if deg == 1:
+            return error, (np.nan, np.nan)
+        else:
+            return error, (np.nan, np.nan, np.nan), (np.nan, np.nan)
+
+
+
+ +
+ + + +
+ + + +

+slope(ts, loss_func='re') + + +

+ +
+ +

求ts表示的直线(如果能拟合成直线的话)的斜率

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tsnp.array

[description]

required
loss_funcstr

[description]. Defaults to 're'.

're'
+
+ Source code in omicron/talib/core.py +
def slope(ts: np.array, loss_func="re"):
+    """求ts表示的直线(如果能拟合成直线的话)的斜率
+
+    Args:
+        ts (np.array): [description]
+        loss_func (str, optional): [description]. Defaults to 're'.
+    """
+    err, (a, b) = polyfit(ts, deg=1, loss_func=loss_func)
+
+    return err, a
+
+
+
+ +
+ + + +
+ + + +

+smooth(ts, win, poly_order=1, mode='interp') + + +

+ +
+ +

平滑序列ts,使用窗口大小为win的平滑模型,默认使用线性模型

+

提供本函数主要基于这样的考虑: omicron的使用者可能并不熟悉信号处理的概念,这里相当于提供了相关功能的一个入口。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tsnp.array

[description]

required
winint

[description]

required
poly_orderint

[description]. Defaults to 1.

1
+
+ Source code in omicron/talib/core.py +
def smooth(ts: np.array, win: int, poly_order=1, mode="interp"):
+    """平滑序列ts,使用窗口大小为win的平滑模型,默认使用线性模型
+
+    提供本函数主要基于这样的考虑: omicron的使用者可能并不熟悉信号处理的概念,这里相当于提供了相关功能的一个入口。
+
+    Args:
+        ts (np.array): [description]
+        win (int): [description]
+        poly_order (int, optional): [description]. Defaults to 1.
+    """
+    return savgol_filter(ts, win, poly_order, mode=mode)
+
+
+
+ +
+ + + +
+ + + +

+weighted_moving_average(ts, win) + + +

+ +
+ +

计算加权移动平均

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tsnp.array

[description]

required
winint

[description]

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
np.array

[description]

+
+ Source code in omicron/talib/core.py +
def weighted_moving_average(ts: np.array, win: int) -> np.array:
+    """计算加权移动平均
+
+    Args:
+        ts (np.array): [description]
+        win (int): [description]
+
+    Returns:
+        np.array: [description]
+    """
+    w = [2 * (i + 1) / (win * (win + 1)) for i in range(win)]
+
+    return np.convolve(ts, w, "valid")
+
+
+
+ +
+ + + + + + +
+ +
+ +
+ + + +
+ + + +

+ morph + + + +

+ +
+ +

形态检测相关方法

+ + + +
+ + + + + + + + +
+ + + +

+ +BreakoutFlag (IntEnum) + + + + +

+ +
+ +

An enumeration.

+ +
+ Source code in omicron/talib/morph.py +
class BreakoutFlag(IntEnum):
+    UP = 1
+    DOWN = -1
+    NONE = 0
+
+
+ + + +
+ + + + + + + + + + + + + + +
+ +
+ +
+ + + +
+ + + +

+ +CrossFlag (IntEnum) + + + + +

+ +
+ +

An enumeration.

+ +
+ Source code in omicron/talib/morph.py +
class CrossFlag(IntEnum):
+    UPCROSS = 1
+    DOWNCROSS = -1
+    NONE = 0
+
+
+ + + +
+ + + + + + + + + + + + + + +
+ +
+ +
+ + + + +
+ + + +

+breakout(ts, upthres=0.01, downthres=-0.01, confirm=1) + + +

+ +
+ +

检测时间序列是否突破了压力线(整理线)

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tsnp.ndarray

时间序列

required
upthresfloat

请参考peaks_and_valleys

0.01
downthresfloat

请参考peaks_and_valleys

-0.01
confirmint

经过多少个bars后,才确认突破。默认为1

1
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
BreakoutFlag

如果上向突破压力线,返回1,如果向下突破压力线,返回-1,否则返回0

+
+ Source code in omicron/talib/morph.py +
def breakout(
+    ts: np.ndarray, upthres: float = 0.01, downthres: float = -0.01, confirm: int = 1
+) -> BreakoutFlag:
+    """检测时间序列是否突破了压力线(整理线)
+
+    Args:
+        ts (np.ndarray): 时间序列
+        upthres (float, optional): 请参考[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys]
+        downthres (float, optional): 请参考[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys]
+        confirm (int, optional): 经过多少个bars后,才确认突破。默认为1
+
+    Returns:
+        如果上向突破压力线,返回1,如果向下突破压力线,返回-1,否则返回0
+    """
+    support, resist, _ = support_resist_lines(ts[:-confirm], upthres, downthres)
+
+    x0 = len(ts) - confirm - 1
+    x = list(range(len(ts) - confirm, len(ts)))
+
+    if resist is not None:
+        if np.all(ts[x] > resist(x)) and ts[x0] <= resist(x0):
+            return BreakoutFlag.UP
+
+    if support is not None:
+        if np.all(ts[x] < support(x)) and ts[x0] >= support(x0):
+            return BreakoutFlag.DOWN
+
+    return BreakoutFlag.NONE
+
+
+
+ +
+ + + +
+ + + +

+cross(f, g) + + +

+ +
+ +

判断序列f是否与g相交。如果两个序列有且仅有一个交点,则返回1表明f上交g;-1表明f下交g

+

本方法可用以判断两条均线是否相交。

+ +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
CrossFlag

(flag, index), 其中flag取值为: +0 无效 +-1 f向下交叉g +1 f向上交叉g

+
+ Source code in omicron/talib/morph.py +
def cross(f: np.ndarray, g: np.ndarray) -> CrossFlag:
+    """判断序列f是否与g相交。如果两个序列有且仅有一个交点,则返回1表明f上交g;-1表明f下交g
+
+    本方法可用以判断两条均线是否相交。
+
+    returns:
+        (flag, index), 其中flag取值为:
+        0 无效
+        -1 f向下交叉g
+        1 f向上交叉g
+    """
+    indices = np.argwhere(np.diff(np.sign(f - g))).flatten()
+
+    if len(indices) == 0:
+        return CrossFlag.NONE, 0
+
+    # 如果存在一个或者多个交点,取最后一个
+    idx = indices[-1]
+
+    if f[idx] < g[idx]:
+        return CrossFlag.UPCROSS, idx
+    elif f[idx] > g[idx]:
+        return CrossFlag.DOWNCROSS, idx
+    else:
+        return CrossFlag(np.sign(g[idx - 1] - f[idx - 1])), idx
+
+
+
+ +
+ + + +
+ + + +

+energy_hump(bars, thresh=2) + + +

+ +
+ +

检测bars中是否存在两波以上量能剧烈增加的情形(能量驼峰),返回最后一波距现在的位置及区间长度。

+

注意如果最后一个能量驼峰距现在过远(比如超过10个bar),可能意味着资金已经逃离,能量已经耗尽。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
bars[('frame', '<M8[s]'), ('open', '<f4'), ('high', '<f4'), ('low', '<f4'), ('close', '<f4'), ('volume', '<f8'), ('amount', '<f8'), ('factor', '<f4')]

行情数据

required
thresh

最后一波量必须大于20天均量的倍数。

2
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Optional[Tuple[int, int]]

如果不存在能量驼峰的情形,则返回None,否则返回最后一个驼峰离现在的距离及区间长度。

+
+ Source code in omicron/talib/morph.py +
def energy_hump(bars: bars_dtype, thresh=2) -> Optional[Tuple[int, int]]:
+    """检测`bars`中是否存在两波以上量能剧烈增加的情形(能量驼峰),返回最后一波距现在的位置及区间长度。
+
+    注意如果最后一个能量驼峰距现在过远(比如超过10个bar),可能意味着资金已经逃离,能量已经耗尽。
+
+    Args:
+        bars: 行情数据
+        thresh: 最后一波量必须大于20天均量的倍数。
+    Returns:
+        如果不存在能量驼峰的情形,则返回None,否则返回最后一个驼峰离现在的距离及区间长度。
+    """
+    vol = bars["volume"]
+
+    std = np.std(vol[1:] / vol[:-1])
+    pvs = peak_valley_pivots(vol, std, 0)
+
+    frames = bars["frame"]
+
+    pvs[0] = 0
+    pvs[-1] = -1
+    peaks = np.argwhere(pvs == 1)
+
+    mn = np.mean(vol[peaks])
+
+    # 顶点不能缩量到尖峰均值以下
+    real_peaks = np.intersect1d(np.argwhere(vol > mn), peaks)
+
+    if len(real_peaks) < 2:
+        return None
+
+    logger.debug("found %s peaks at %s", len(real_peaks), frames[real_peaks])
+    lp = real_peaks[-1]
+    ma = moving_average(vol, 20)[lp]
+    if vol[lp] < ma * thresh:
+        logger.debug(
+            "vol of last peak[%s] is less than mean_vol(20) * thresh[%s]",
+            vol[lp],
+            ma * thresh,
+        )
+        return None
+
+    return len(bars) - real_peaks[-1], real_peaks[-1] - real_peaks[0]
+
+
+
+ +
+ + + +
+ + + +

+inverse_vcross(f, g) + + +

+ +
+ +

判断序列f是否与序列g存在^型相交。即存在两个交点,第一个交点为向上相交,第二个交点为向下 +相交。可用于判断见顶特征等场合。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
fnp.array

[description]

required
gnp.array

[description]

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Tuple

[description]

+
+ Source code in omicron/talib/morph.py +
def inverse_vcross(f: np.array, g: np.array) -> Tuple:
+    """判断序列f是否与序列g存在^型相交。即存在两个交点,第一个交点为向上相交,第二个交点为向下
+    相交。可用于判断见顶特征等场合。
+
+    Args:
+        f (np.array): [description]
+        g (np.array): [description]
+
+    Returns:
+        Tuple: [description]
+    """
+    indices = np.argwhere(np.diff(np.sign(f - g))).flatten()
+    if len(indices) == 2:
+        idx0, idx1 = indices
+        if f[idx0] < g[idx0] and f[idx1] > g[idx1]:
+            return True, (idx0, idx1)
+
+    return False, (None, None)
+
+
+
+ +
+ + + +
+ + + +

+peaks_and_valleys(ts, up_thresh=None, down_thresh=None) + + +

+ +
+ +

寻找ts中的波峰和波谷,返回数组指示在该位置上是否为波峰或波谷。如果为1,则为波峰;如果为-1,则为波谷。

+

本函数直接使用了zigzag中的peak_valley_pivots. 有很多方法可以实现本功能,比如scipy.signals.find_peaks_cwt, peak_valley_pivots等。本函数更适合金融时间序列,并且使用了cython加速。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tsnp.ndarray

时间序列

required
up_threshfloat

波峰的阈值,如果为None,则使用ts变化率的二倍标准差

None
down_threshfloat

波谷的阈值,如果为None,则使用ts变化率的二倍标准差乘以-1

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
np.ndarray

返回数组指示在该位置上是否为波峰或波谷。

+
+ Source code in omicron/talib/morph.py +
def peaks_and_valleys(
+    ts: np.ndarray,
+    up_thresh: Optional[float] = None,
+    down_thresh: Optional[float] = None,
+) -> np.ndarray:
+    """寻找ts中的波峰和波谷,返回数组指示在该位置上是否为波峰或波谷。如果为1,则为波峰;如果为-1,则为波谷。
+
+    本函数直接使用了zigzag中的peak_valley_pivots. 有很多方法可以实现本功能,比如scipy.signals.find_peaks_cwt, peak_valley_pivots等。本函数更适合金融时间序列,并且使用了cython加速。
+
+    Args:
+        ts (np.ndarray): 时间序列
+        up_thresh (float): 波峰的阈值,如果为None,则使用ts变化率的二倍标准差
+        down_thresh (float): 波谷的阈值,如果为None,则使用ts变化率的二倍标准差乘以-1
+
+    Returns:
+        np.ndarray: 返回数组指示在该位置上是否为波峰或波谷。
+    """
+    if ts.dtype != np.float64:
+        ts = ts.astype(np.float64)
+
+    if any([up_thresh is None, down_thresh is None]):
+        change_rate = ts[1:] / ts[:-1] - 1
+        std = np.std(change_rate)
+        up_thresh = up_thresh or 2 * std
+        down_thresh = down_thresh or -2 * std
+
+    return peak_valley_pivots(ts, up_thresh, down_thresh)
+
+
+
+ +
+ + + +
+ + + +

+plateaus(numbers, min_size, fall_in_range_ratio=0.97) + + +

+ +
+ +

统计数组numbers中的可能存在的平台整理。

+

如果一个数组中存在着子数组,使得其元素与均值的距离落在三个标准差以内的比例超过fall_in_range_ratio的,则认为该子数组满足平台整理。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
numbersndarray

输入数组

required
min_sizeint

平台的最小长度

required
fall_in_range_ratiofloat

超过fall_in_range_ratio比例的元素落在均值的三个标准差以内,就认为该子数组构成一个平台

0.97
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
List[Tuple]

平台的起始位置和长度的数组

+
+ Source code in omicron/talib/morph.py +
def plateaus(
+    numbers: np.ndarray, min_size: int, fall_in_range_ratio: float = 0.97
+) -> List[Tuple]:
+    """统计数组`numbers`中的可能存在的平台整理。
+
+    如果一个数组中存在着子数组,使得其元素与均值的距离落在三个标准差以内的比例超过`fall_in_range_ratio`的,则认为该子数组满足平台整理。
+
+    Args:
+        numbers: 输入数组
+        min_size: 平台的最小长度
+        fall_in_range_ratio: 超过`fall_in_range_ratio`比例的元素落在均值的三个标准差以内,就认为该子数组构成一个平台
+
+    Returns:
+        平台的起始位置和长度的数组
+    """
+    if numbers.size <= min_size:
+        n = 1
+    else:
+        n = numbers.size // min_size
+
+    clusters = clustering(numbers, n)
+
+    plats = []
+    for (start, length) in clusters:
+        if length < min_size:
+            continue
+
+        y = numbers[start : start + length]
+        mean = np.mean(y)
+        std = np.std(y)
+
+        inrange = len(y[np.abs(y - mean) < 3 * std])
+        ratio = inrange / length
+
+        if ratio >= fall_in_range_ratio:
+            plats.append((start, length))
+
+    return plats
+
+
+
+ +
+ + + +
+ + + +

+rsi_bottom_distance(close, thresh=None) + + +

+ +
+ +

根据给定的收盘价,计算最后一个数据到上一个发出rsi低水平的距离, +如果从上一个最低点rsi到最后一个数据并未发出低水平信号, +返回最后一个数据到上一个发出最低点rsi的距离。

+

其中close的长度一般不小于60。 +返回值为距离整数,不满足条件则返回None。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
closenp.array

具有时间序列的收盘价

required
threshTuple[float, float])

None适用所有股票,不必更改,也可自行设置。

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
int

返回最后一个数据到上一个发出rsi低水平的距离。 +如果从上一个最低点rsi到最后一个数据并未发出低水平信号, +返回最后一个数据到上一个发出最低点rsi的距离。 +除此之外,返回None。

+
+ Source code in omicron/talib/morph.py +
def rsi_bottom_distance(close: np.array, thresh: Tuple[float, float] = None) -> int:
+    """根据给定的收盘价,计算最后一个数据到上一个发出rsi低水平的距离,
+    如果从上一个最低点rsi到最后一个数据并未发出低水平信号,
+    返回最后一个数据到上一个发出最低点rsi的距离。
+
+    其中close的长度一般不小于60。
+    返回值为距离整数,不满足条件则返回None。
+
+    Args:
+        close (np.array): 具有时间序列的收盘价
+        thresh (Tuple[float, float]) : None适用所有股票,不必更改,也可自行设置。
+
+    Returns:
+        返回最后一个数据到上一个发出rsi低水平的距离。
+        如果从上一个最低点rsi到最后一个数据并未发出低水平信号,
+        返回最后一个数据到上一个发出最低点rsi的距离。
+        除此之外,返回None。"""
+
+    assert len(close) >= 60, "must provide an array with at least 60 length!"
+
+    if close.dtype != np.float64:
+        close = close.astype(np.float64)
+
+    if thresh is None:
+        std = np.std(close[-59:] / close[-60:-1] - 1)
+        thresh = (2 * std, -2 * std)
+
+    rsi = ta.RSI(close, 6)
+
+    watermarks = rsi_watermarks(close, thresh)
+    if watermarks is not None:
+        low_watermark, _, _ = watermarks
+        pivots = peak_valley_pivots(close, thresh[0], thresh[1])
+        pivots[0], pivots[-1] = 0, 0
+
+        # 谷值RSI<30
+        valley_rsi_index = np.where((rsi < 30) & (pivots == -1))[0]
+
+        # RSI低水平的最大值:低水平*1.01
+        low_rsi_index = np.where(rsi <= low_watermark * 1.01)[0]
+
+        if len(valley_rsi_index) > 0:
+            distance = len(rsi) - 1 - valley_rsi_index[-1]
+            if len(low_rsi_index) > 0:
+                if low_rsi_index[-1] >= valley_rsi_index[-1]:
+                    distance = len(rsi) - 1 - low_rsi_index[-1]
+            return distance
+
+
+
+ +
+ + + +
+ + + +

+rsi_bottom_divergent(close, thresh=None, rsi_limit=30) + + +

+ +
+ +

寻找最近满足条件的rsi底背离。

+

返回最后一个数据到最近底背离发生点的距离;没有满足条件的底背离,返回None。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
closenp.array

时间序列收盘价

required
threshTuple[float, float]

请参考peaks_and_valleys

None
rsi_limitfloat

RSI发生底背离时的阈值, 默认值30(20效果更佳,但是检测出来数量太少),即只过滤RSI6<30的局部最低收盘价。

30
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
int

返回int类型的整数,表示最后一个数据到最近底背离发生点的距离;没有满足条件的底背离,返回None。

+
+ Source code in omicron/talib/morph.py +
def rsi_bottom_divergent(
+    close: np.array, thresh: Tuple[float, float] = None, rsi_limit: float = 30
+) -> int:
+    """寻找最近满足条件的rsi底背离。
+
+    返回最后一个数据到最近底背离发生点的距离;没有满足条件的底背离,返回None。
+
+    Args:
+        close (np.array): 时间序列收盘价
+        thresh (Tuple[float, float]): 请参考[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys]
+        rsi_limit (float, optional): RSI发生底背离时的阈值, 默认值30(20效果更佳,但是检测出来数量太少),即只过滤RSI6<30的局部最低收盘价。
+
+    Returns:
+        返回int类型的整数,表示最后一个数据到最近底背离发生点的距离;没有满足条件的底背离,返回None。
+    """
+    assert len(close) >= 60, "must provide an array with at least 60 length!"
+    if close.dtype != np.float64:
+        close = close.astype(np.float64)
+    rsi = ta.RSI(close, 6)
+
+    if thresh is None:
+        std = np.std(close[-59:] / close[-60:-1] - 1)
+        thresh = (2 * std, -2 * std)
+
+    pivots = peak_valley_pivots(close, thresh[0], thresh[1])
+    pivots[0], pivots[-1] = 0, 0
+
+    length = len(close)
+    valley_index = np.where((pivots == -1) & (rsi <= rsi_limit))[0]
+
+    if len(valley_index) >= 2:
+        if (close[valley_index[-1]] < close[valley_index[-2]]) and (
+            rsi[valley_index[-1]] > rsi[valley_index[-2]]
+        ):
+            bottom_dev_distance = length - 1 - valley_index[-1]
+
+            return bottom_dev_distance
+
+
+
+ +
+ + + +
+ + + +

+rsi_predict_price(close, thresh=None) + + +

+ +
+ +

给定一段行情,根据最近的两个RSI的极小值和极大值预测下一个周期可能达到的最低价格和最高价格。

+

其原理是,以预测最近的两个最高价和最低价,求出其相对应的RSI值,求出最高价和最低价RSI的均值, +若只有一个则取最近的一个。再由RSI公式,反推价格。此时返回值为(None, float),即只有最高价,没有最低价。反之亦然。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
closenp.ndarray

具有时间序列的收盘价

required
threshTuple[float, float])

请参考peaks_and_valleys

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Tuple[float, float]

返回数组[predicted_low_price, predicted_high_price], 数组第一个值为利用达到之前最低收盘价的RSI预测的最低价。 +第二个值为利用达到之前最高收盘价的RSI预测的最高价。

+
+ Source code in omicron/talib/morph.py +
def rsi_predict_price(
+    close: np.ndarray, thresh: Tuple[float, float] = None
+) -> Tuple[float, float]:
+    """给定一段行情,根据最近的两个RSI的极小值和极大值预测下一个周期可能达到的最低价格和最高价格。
+
+    其原理是,以预测最近的两个最高价和最低价,求出其相对应的RSI值,求出最高价和最低价RSI的均值,
+    若只有一个则取最近的一个。再由RSI公式,反推价格。此时返回值为(None, float),即只有最高价,没有最低价。反之亦然。
+
+    Args:
+        close (np.ndarray): 具有时间序列的收盘价
+        thresh (Tuple[float, float]) : 请参考[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys]
+
+    Returns:
+        返回数组[predicted_low_price, predicted_high_price], 数组第一个值为利用达到之前最低收盘价的RSI预测的最低价。
+        第二个值为利用达到之前最高收盘价的RSI预测的最高价。
+    """
+    assert len(close) >= 60, "must provide an array with at least 60 length!"
+
+    if thresh is None:
+        std = np.std(close[-59:] / close[-60:-1] - 1)
+        thresh = (2 * std, -2 * std)
+
+    if close.dtype != np.float64:
+        close = close.astype(np.float64)
+
+    valley_rsi, peak_rsi, _ = rsi_watermarks(close, thresh=thresh)
+    pivot = peak_valley_pivots(close, thresh[0], thresh[1])
+    pivot[0], pivot[-1] = 0, 0  # 掐头去尾
+
+    price_change = pd.Series(close).diff(1).values
+    ave_price_change = (abs(price_change)[-6:].mean()) * 5
+    ave_price_raise = (np.maximum(price_change, 0)[-6:].mean()) * 5
+
+    if valley_rsi is not None:
+        predicted_low_change = (ave_price_change) - ave_price_raise / (
+            0.01 * valley_rsi
+        )
+        if predicted_low_change > 0:
+            predicted_low_change = 0
+        predicted_low_price = close[-1] + predicted_low_change
+    else:
+        predicted_low_price = None
+
+    if peak_rsi is not None:
+        predicted_high_change = (ave_price_raise - ave_price_change) / (
+            0.01 * peak_rsi - 1
+        ) - ave_price_change
+        if predicted_high_change < 0:
+            predicted_high_change = 0
+        predicted_high_price = close[-1] + predicted_high_change
+    else:
+        predicted_high_price = None
+
+    return predicted_low_price, predicted_high_price
+
+
+
+ +
+ + + +
+ + + +

+rsi_top_distance(close, thresh=None) + + +

+ +
+ +

根据给定的收盘价,计算最后一个数据到上一个发出rsi高水平的距离, +如果从上一个最高点rsi到最后一个数据并未发出高水平信号, +返回最后一个数据到上一个发出最高点rsi的距离。

+

其中close的长度一般不小于60。 +返回值为距离整数,不满足条件则返回None。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
closenp.array

具有时间序列的收盘价

required
threshTuple[float, float])

None适用所有股票,不必更改,也可自行设置。

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
int

返回最后一个数据到上一个发出rsi高水平的距离。 +如果从上一个最高点rsi到最后一个数据并未发出高水平信号, +返回最后一个数据到上一个发出最高点rsi的距离。 +除此之外,返回None。

+
+ Source code in omicron/talib/morph.py +
def rsi_top_distance(close: np.array, thresh: Tuple[float, float] = None) -> int:
+    """根据给定的收盘价,计算最后一个数据到上一个发出rsi高水平的距离,
+    如果从上一个最高点rsi到最后一个数据并未发出高水平信号,
+    返回最后一个数据到上一个发出最高点rsi的距离。
+
+    其中close的长度一般不小于60。
+    返回值为距离整数,不满足条件则返回None。
+
+    Args:
+        close (np.array): 具有时间序列的收盘价
+        thresh (Tuple[float, float]) : None适用所有股票,不必更改,也可自行设置。
+
+    Returns:
+        返回最后一个数据到上一个发出rsi高水平的距离。
+        如果从上一个最高点rsi到最后一个数据并未发出高水平信号,
+        返回最后一个数据到上一个发出最高点rsi的距离。
+        除此之外,返回None。"""
+
+    assert len(close) >= 60, "must provide an array with at least 60 length!"
+
+    if close.dtype != np.float64:
+        close = close.astype(np.float64)
+
+    if thresh is None:
+        std = np.std(close[-59:] / close[-60:-1] - 1)
+        thresh = (2 * std, -2 * std)
+
+    rsi = ta.RSI(close, 6)
+
+    watermarks = rsi_watermarks(close, thresh)
+    if watermarks is not None:
+        _, high_watermark, _ = watermarks
+        pivots = peak_valley_pivots(close, thresh[0], thresh[1])
+        pivots[0], pivots[-1] = 0, 0
+
+        # 峰值RSI>70
+        peak_rsi_index = np.where((rsi > 70) & (pivots == 1))[0]
+
+        # RSI高水平的最小值:高水平*0.99
+        high_rsi_index = np.where(rsi >= high_watermark * 0.99)[0]
+
+        if len(peak_rsi_index) > 0:
+            distance = len(rsi) - 1 - peak_rsi_index[-1]
+            if len(high_rsi_index) > 0:
+                if high_rsi_index[-1] >= peak_rsi_index[-1]:
+                    distance = len(rsi) - 1 - high_rsi_index[-1]
+            return distance
+
+
+
+ +
+ + + +
+ + + +

+rsi_top_divergent(close, thresh=None, rsi_limit=70) + + +

+ +
+ +

寻找最近满足条件的rsi顶背离。

+

返回最后一个数据到最近顶背离发生点的距离;没有满足条件的顶背离,返回None。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
closenp.array

时间序列收盘价

required
threshTuple[float, float]

请参考peaks_and_valleys

None
rsi_limitfloat

RSI发生顶背离时的阈值, 默认值70(80效果更佳,但是检测出来数量太少),即只过滤RSI6>70的局部最高收盘价。

70
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Tuple[int, int]

返回int类型的整数,表示最后一个数据到最近顶背离发生点的距离;没有满足条件的顶背离,返回None。

+
+ Source code in omicron/talib/morph.py +
def rsi_top_divergent(
+    close: np.array, thresh: Tuple[float, float] = None, rsi_limit: float = 70
+) -> Tuple[int, int]:
+    """寻找最近满足条件的rsi顶背离。
+
+    返回最后一个数据到最近顶背离发生点的距离;没有满足条件的顶背离,返回None。
+
+    Args:
+        close (np.array): 时间序列收盘价
+        thresh (Tuple[float, float]): 请参考[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys]
+        rsi_limit (float, optional): RSI发生顶背离时的阈值, 默认值70(80效果更佳,但是检测出来数量太少),即只过滤RSI6>70的局部最高收盘价。
+
+    Returns:
+        返回int类型的整数,表示最后一个数据到最近顶背离发生点的距离;没有满足条件的顶背离,返回None。
+    """
+    assert len(close) >= 60, "must provide an array with at least 60 length!"
+    if close.dtype != np.float64:
+        close = close.astype(np.float64)
+    rsi = ta.RSI(close, 6)
+
+    if thresh is None:
+        std = np.std(close[-59:] / close[-60:-1] - 1)
+        thresh = (2 * std, -2 * std)
+
+    pivots = peak_valley_pivots(close, thresh[0], thresh[1])
+    pivots[0], pivots[-1] = 0, 0
+
+    length = len(close)
+    peak_index = np.where((pivots == 1) & (rsi >= rsi_limit))[0]
+
+    if len(peak_index) >= 2:
+        if (close[peak_index[-1]] > close[peak_index[-2]]) and (
+            rsi[peak_index[-1]] < rsi[peak_index[-2]]
+        ):
+            top_dev_distance = length - 1 - peak_index[-1]
+
+            return top_dev_distance
+
+
+
+ +
+ + + +
+ + + +

+rsi_watermarks(close, thresh=None) + + +

+ +
+ +

给定一段行情数据和用以检测顶和底的阈值,返回该段行情中,谷和峰处RSI均值,最后一个RSI6值。

+

其中close的长度一般不小于60,不大于120。返回值中,一个为low_wartermark(谷底处RSI值), +一个为high_wartermark(高峰处RSI值),一个为RSI6的最后一个值,用以对比前两个警戒值。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
closenp.array

具有时间序列的收盘价

required
threshTuple[float, float])

None适用所有股票,不必更改,也可自行设置。

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Tuple[float, float, float]

返回数组[low_watermark, high_watermark, rsi[-1]], 第一个为最近两个最低收盘价的RSI均值, 第二个为最近两个最高收盘价的RSI均值。 +若传入收盘价只有一个最值,只返回一个。没有最值,则返回None, 第三个为实际的最后RSI6的值。

+
+ Source code in omicron/talib/morph.py +
def rsi_watermarks(
+    close: np.array, thresh: Tuple[float, float] = None
+) -> Tuple[float, float, float]:
+    """给定一段行情数据和用以检测顶和底的阈值,返回该段行情中,谷和峰处RSI均值,最后一个RSI6值。
+
+    其中close的长度一般不小于60,不大于120。返回值中,一个为low_wartermark(谷底处RSI值),
+    一个为high_wartermark(高峰处RSI值),一个为RSI6的最后一个值,用以对比前两个警戒值。
+
+    Args:
+        close (np.array): 具有时间序列的收盘价
+        thresh (Tuple[float, float]) : None适用所有股票,不必更改,也可自行设置。
+
+    Returns:
+        返回数组[low_watermark, high_watermark, rsi[-1]], 第一个为最近两个最低收盘价的RSI均值, 第二个为最近两个最高收盘价的RSI均值。
+        若传入收盘价只有一个最值,只返回一个。没有最值,则返回None, 第三个为实际的最后RSI6的值。
+    """
+    assert len(close) >= 60, "must provide an array with at least 60 length!"
+
+    if thresh is None:
+        std = np.std(close[-59:] / close[-60:-1] - 1)
+        thresh = (2 * std, -2 * std)
+
+    if close.dtype != np.float64:
+        close = close.astype(np.float64)
+    rsi = ta.RSI(close, 6)
+
+    pivots = peak_valley_pivots(close, thresh[0], thresh[1])
+    pivots[0], pivots[-1] = 0, 0  # 掐头去尾
+
+    # 峰值RSI>70; 谷处的RSI<30;
+    peaks_rsi_index = np.where((rsi > 70) & (pivots == 1))[0]
+    valleys_rsi_index = np.where((rsi < 30) & (pivots == -1))[0]
+
+    if len(peaks_rsi_index) == 0:
+        high_watermark = None
+    elif len(peaks_rsi_index) == 1:
+        high_watermark = rsi[peaks_rsi_index[0]]
+    else:  # 有两个以上的峰,通过最近的两个峰均值来确定走势
+        high_watermark = np.nanmean(rsi[peaks_rsi_index[-2:]])
+
+    if len(valleys_rsi_index) == 0:
+        low_watermark = None
+    elif len(valleys_rsi_index) == 1:
+        low_watermark = rsi[valleys_rsi_index[0]]
+    else:  # 有两个以上的峰,通过最近的两个峰来确定走势
+        low_watermark = np.nanmean(rsi[valleys_rsi_index[-2:]])
+
+    return low_watermark, high_watermark, rsi[-1]
+
+
+
+ +
+ + + +
+ + + +

+support_resist_lines(ts, upthres=None, downthres=None) + + +

+ +
+ +

计算时间序列的支撑线和阻力线

+

使用最近的两个高点连接成阴力线,两个低点连接成支撑线。

+ +

Examples:

+

 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+10
+11
+12
+13
+14
+15
+16
+17
    def show_support_resist_lines(ts):
+        import plotly.graph_objects as go
+
+        fig = go.Figure()
+
+        support, resist, x_start = support_resist_lines(ts, 0.03, -0.03)
+        fig.add_trace(go.Scatter(x=np.arange(len(ts)), y=ts))
+
+        x = np.arange(len(ts))[x_start:]
+        fig.add_trace(go.Line(x=x, y = support(x)))
+        fig.add_trace(go.Line(x=x, y = resist(x)))
+
+        fig.show()
+
+    np.random.seed(1978)
+    X = np.cumprod(1 + np.random.randn(100) * 0.01)
+    show_support_resist_lines(X)
+
+the above code will show this

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tsnp.ndarray

时间序列

required
upthresfloat

请参考peaks_and_valleys

None
downthresfloat

请参考peaks_and_valleys

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Tuple[Callable, Callable, numpy.ndarray]

返回支撑线和阻力线的计算函数及起始点坐标,如果没有支撑线或阻力线,则返回None

+
+ Source code in omicron/talib/morph.py +
def support_resist_lines(
+    ts: np.ndarray, upthres: float = None, downthres: float = None
+) -> Tuple[Callable, Callable, np.ndarray]:
+    """计算时间序列的支撑线和阻力线
+
+    使用最近的两个高点连接成阴力线,两个低点连接成支撑线。
+
+    Examples:
+        ```python
+            def show_support_resist_lines(ts):
+                import plotly.graph_objects as go
+
+                fig = go.Figure()
+
+                support, resist, x_start = support_resist_lines(ts, 0.03, -0.03)
+                fig.add_trace(go.Scatter(x=np.arange(len(ts)), y=ts))
+
+                x = np.arange(len(ts))[x_start:]
+                fig.add_trace(go.Line(x=x, y = support(x)))
+                fig.add_trace(go.Line(x=x, y = resist(x)))
+
+                fig.show()
+
+            np.random.seed(1978)
+            X = np.cumprod(1 + np.random.randn(100) * 0.01)
+            show_support_resist_lines(X)
+        ```
+        the above code will show this ![](https://images.jieyu.ai/images/202204/support_resist.png)
+
+    Args:
+        ts (np.ndarray): 时间序列
+        upthres (float, optional): 请参考[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys]
+        downthres (float, optional): 请参考[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys]
+
+    Returns:
+        返回支撑线和阻力线的计算函数及起始点坐标,如果没有支撑线或阻力线,则返回None
+    """
+    if ts.dtype != np.float64:
+        ts = ts.astype(np.float64)
+
+    pivots = peaks_and_valleys(ts, upthres, downthres)
+    pivots[0] = 0
+    pivots[-1] = 0
+
+    arg_max = np.argwhere(pivots == 1).flatten()
+    arg_min = np.argwhere(pivots == -1).flatten()
+
+    resist = None
+    support = None
+
+    if len(arg_max) >= 2:
+        arg_max = arg_max[-2:]
+        y = ts[arg_max]
+        coeff = np.polyfit(arg_max, y, deg=1)
+
+        resist = np.poly1d(coeff)
+
+    if len(arg_min) >= 2:
+        arg_min = arg_min[-2:]
+        y = ts[arg_min]
+        coeff = np.polyfit(arg_min, y, deg=1)
+
+        support = np.poly1d(coeff)
+
+    return support, resist, np.min([*arg_min, *arg_max])
+
+
+
+ +
+ + + +
+ + + +

+valley_detect(close, thresh=(0.05, -0.02)) + + +

+ +
+ +

给定一段行情数据和用以检测近期已发生反转的最低点,返回该段行情中,最低点到最后一个数据的距离和收益率数组, +如果给定行情中未找到满足参数的最低点,则返回两个空值数组。

+

其中bars的长度一般不小于60,不大于120。此函数采用了zigzag中的谷峰检测方法,其中参数默认(0.05,-0.02), +此参数对所有股票数据都适用。若满足参数,返回值中,距离为大于0的整数,收益率是0~1的小数。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
closenp.ndarray

具有时间序列的收盘价

required
threshTuple[float, float])

请参考peaks_and_valleys

(0.05, -0.02)
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
int

返回该段行情中,最低点到最后一个数据的距离和收益率数组, +如果给定行情中未找到满足参数的最低点,则返回两个空值数组。

+
+ Source code in omicron/talib/morph.py +
def valley_detect(
+    close: np.ndarray, thresh: Tuple[float, float] = (0.05, -0.02)
+) -> int:
+    """给定一段行情数据和用以检测近期已发生反转的最低点,返回该段行情中,最低点到最后一个数据的距离和收益率数组,
+    如果给定行情中未找到满足参数的最低点,则返回两个空值数组。
+
+    其中bars的长度一般不小于60,不大于120。此函数采用了zigzag中的谷峰检测方法,其中参数默认(0.05,-0.02),
+    此参数对所有股票数据都适用。若满足参数,返回值中,距离为大于0的整数,收益率是0~1的小数。
+
+    Args:
+        close (np.ndarray): 具有时间序列的收盘价
+        thresh (Tuple[float, float]) : 请参考[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys]
+
+    Returns:
+        返回该段行情中,最低点到最后一个数据的距离和收益率数组,
+        如果给定行情中未找到满足参数的最低点,则返回两个空值数组。
+    """
+
+    assert len(close) >= 60, "must provide an array with at least 60 length!"
+
+    if close.dtype != np.float64:
+        close = close.astype(np.float64)
+
+    if thresh is None:
+        std = np.std(close[-59:] / close[-60:-1] - 1)
+        thresh = (2 * std, -2 * std)
+
+    pivots = peak_valley_pivots(close, thresh[0], thresh[1])
+    flags = pivots[pivots != 0]
+    increased = None
+    lowest_distance = None
+    if (flags[-2] == -1) and (flags[-1] == 1):
+        length = len(pivots)
+        valley_index = np.where(pivots == -1)[0]
+        increased = (close[-1] - close[valley_index[-1]]) / close[valley_index[-1]]
+        lowest_distance = int(length - 1 - valley_index[-1])
+
+    return lowest_distance, increased
+
+
+
+ +
+ + + +
+ + + +

+vcross(f, g) + + +

+ +
+ +

判断序列f是否与g存在类型v型的相交。即存在两个交点,第一个交点为向下相交,第二个交点为向上 +相交。一般反映为洗盘拉升的特征。

+ +

Examples:

+
>>> f = np.array([ 3 * i ** 2 - 20 * i +  2 for i in range(10)])
+>>> g = np.array([ i - 5 for i in range(10)])
+>>> flag, indices = vcross(f, g)
+>>> assert flag is True
+>>> assert indices[0] == 0
+>>> assert indices[1] == 6
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
f<built-in function array>

first sequence

required
g<built-in function array>

the second sequence

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Tuple

(flag, indices), 其中flag取值为True时,存在vcross,indices为交点的索引。

+
+ Source code in omicron/talib/morph.py +
def vcross(f: np.array, g: np.array) -> Tuple:
+    """判断序列f是否与g存在类型v型的相交。即存在两个交点,第一个交点为向下相交,第二个交点为向上
+    相交。一般反映为洗盘拉升的特征。
+
+    Examples:
+
+        >>> f = np.array([ 3 * i ** 2 - 20 * i +  2 for i in range(10)])
+        >>> g = np.array([ i - 5 for i in range(10)])
+        >>> flag, indices = vcross(f, g)
+        >>> assert flag is True
+        >>> assert indices[0] == 0
+        >>> assert indices[1] == 6
+
+    Args:
+        f: first sequence
+        g: the second sequence
+
+    Returns:
+        (flag, indices), 其中flag取值为True时,存在vcross,indices为交点的索引。
+    """
+    indices = np.argwhere(np.diff(np.sign(f - g))).flatten()
+    if len(indices) == 2:
+        idx0, idx1 = indices
+        if f[idx0] > g[idx0] and f[idx1] < g[idx1]:
+            return True, (idx0, idx1)
+
+    return False, (None, None)
+
+
+
+ +
+ + + + + + +
+ +
+ +
+ + + + +
+ +
+ +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/2.0.0/api/timeframe/index.html b/2.0.0/api/timeframe/index.html new file mode 100644 index 00000000..61d14440 --- /dev/null +++ b/2.0.0/api/timeframe/index.html @@ -0,0 +1,6087 @@ + + + + + + + + + + + + + + + + timeframe - Omicron + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + + + + + +

timeframe

+ +
+ + +
+ + + + +
+ + + + + + + + + + +
+ + + +

+ +TimeFrame + + + +

+ +
+ + +
+ Source code in omicron/models/timeframe.py +
class TimeFrame:
+    minute_level_frames = [
+        FrameType.MIN1,
+        FrameType.MIN5,
+        FrameType.MIN15,
+        FrameType.MIN30,
+        FrameType.MIN60,
+    ]
+    day_level_frames = [
+        FrameType.DAY,
+        FrameType.WEEK,
+        FrameType.MONTH,
+        FrameType.QUARTER,
+        FrameType.YEAR,
+    ]
+
+    ticks = {
+        FrameType.MIN1: [i for i in itertools.chain(range(571, 691), range(781, 901))],
+        FrameType.MIN5: [
+            i for i in itertools.chain(range(575, 695, 5), range(785, 905, 5))
+        ],
+        FrameType.MIN15: [
+            i for i in itertools.chain(range(585, 705, 15), range(795, 915, 15))
+        ],
+        FrameType.MIN30: [
+            int(s[:2]) * 60 + int(s[2:])
+            for s in ["1000", "1030", "1100", "1130", "1330", "1400", "1430", "1500"]
+        ],
+        FrameType.MIN60: [
+            int(s[:2]) * 60 + int(s[2:]) for s in ["1030", "1130", "1400", "1500"]
+        ],
+    }
+    day_frames = None
+    week_frames = None
+    month_frames = None
+    quarter_frames = None
+    year_frames = None
+
+    @classmethod
+    def service_degrade(cls):
+        """当cache中不存在日历时,启用随omicron版本一起发行时自带的日历。
+
+        注意:随omicron版本一起发行时自带的日历很可能不是最新的,并且可能包含错误。比如,存在这样的情况,在本版本的omicron发行时,日历更新到了2021年12月31日,在这之前的日历都是准确的,但在此之后的日历,则有可能出现错误。因此,只应该在特殊的情况下(比如测试)调用此方法,以获得一个降级的服务。
+        """
+        _dir = os.path.dirname(__file__)
+        file = os.path.join(_dir, "..", "config", "calendar.json")
+        with open(file, "r") as f:
+            data = json.load(f)
+            for k, v in data.items():
+                setattr(cls, k, np.array(v))
+
+    @classmethod
+    async def _load_calendar(cls):
+        """从数据缓存中加载更新日历"""
+        from omicron import cache
+
+        names = [
+            "day_frames",
+            "week_frames",
+            "month_frames",
+            "quarter_frames",
+            "year_frames",
+        ]
+        for name, frame_type in zip(names, cls.day_level_frames):
+            key = f"calendar:{frame_type.value}"
+            result = await cache.security.lrange(key, 0, -1)
+            if result is not None and len(result):
+                frames = [int(x) for x in result]
+                setattr(cls, name, np.array(frames))
+            else:  # pragma: no cover
+                raise DataNotReadyError(f"calendar data is not ready: {name} missed")
+
+    @classmethod
+    async def init(cls):
+        """初始化日历"""
+        await cls._load_calendar()
+
+    @classmethod
+    def int2time(cls, tm: int) -> datetime.datetime:
+        """将整数表示的时间转换为`datetime`类型表示
+
+        examples:
+            >>> TimeFrame.int2time(202005011500)
+            datetime.datetime(2020, 5, 1, 15, 0)
+
+        Args:
+            tm: time in YYYYMMDDHHmm format
+
+        Returns:
+            转换后的时间
+        """
+        s = str(tm)
+        # its 8 times faster than arrow.get()
+        return datetime.datetime(
+            int(s[:4]), int(s[4:6]), int(s[6:8]), int(s[8:10]), int(s[10:12])
+        )
+
+    @classmethod
+    def time2int(cls, tm: Union[datetime.datetime, Arrow]) -> int:
+        """将时间类型转换为整数类型
+
+        tm可以是Arrow类型,也可以是datetime.datetime或者任何其它类型,只要它有year,month...等
+        属性
+        Examples:
+            >>> TimeFrame.time2int(datetime.datetime(2020, 5, 1, 15))
+            202005011500
+
+        Args:
+            tm:
+
+        Returns:
+            转换后的整数,比如2020050115
+        """
+        return int(f"{tm.year:04}{tm.month:02}{tm.day:02}{tm.hour:02}{tm.minute:02}")
+
+    @classmethod
+    def date2int(cls, d: Union[datetime.datetime, datetime.date, Arrow]) -> int:
+        """将日期转换为整数表示
+
+        在zillionare中,如果要对时间和日期进行持久化操作,我们一般将其转换为int类型
+
+        Examples:
+            >>> TimeFrame.date2int(datetime.date(2020,5,1))
+            20200501
+
+        Args:
+            d: date
+
+        Returns:
+            日期的整数表示,比如20220211
+        """
+        return int(f"{d.year:04}{d.month:02}{d.day:02}")
+
+    @classmethod
+    def int2date(cls, d: Union[int, str]) -> datetime.date:
+        """将数字表示的日期转换成为日期格式
+
+        Examples:
+            >>> TimeFrame.int2date(20200501)
+            datetime.date(2020, 5, 1)
+
+        Args:
+            d: YYYYMMDD表示的日期
+
+        Returns:
+            转换后的日期
+        """
+        s = str(d)
+        # it's 8 times faster than arrow.get
+        return datetime.date(int(s[:4]), int(s[4:6]), int(s[6:]))
+
+    @classmethod
+    def day_shift(cls, start: datetime.date, offset: int) -> datetime.date:
+        """对指定日期进行前后移位操作
+
+        如果 n == 0,则返回d对应的交易日(如果是非交易日,则返回刚结束的一个交易日)
+        如果 n > 0,则返回d对应的交易日后第 n 个交易日
+        如果 n < 0,则返回d对应的交易日前第 n 个交易日
+
+        Examples:
+            >>> TimeFrame.day_frames = [20191212, 20191213, 20191216, 20191217,20191218, 20191219]
+            >>> TimeFrame.day_shift(datetime.date(2019,12,13), 0)
+            datetime.date(2019, 12, 13)
+
+            >>> TimeFrame.day_shift(datetime.date(2019, 12, 15), 0)
+            datetime.date(2019, 12, 13)
+
+            >>> TimeFrame.day_shift(datetime.date(2019, 12, 15), 1)
+            datetime.date(2019, 12, 16)
+
+            >>> TimeFrame.day_shift(datetime.date(2019, 12, 13), 1)
+            datetime.date(2019, 12, 16)
+
+        Args:
+            start: the origin day
+            offset: days to shift, can be negative
+
+        Returns:
+            移位后的日期
+        """
+        # accelerated from 0.12 to 0.07, per 10000 loop, type conversion time included
+        start = cls.date2int(start)
+
+        return cls.int2date(ext.shift(cls.day_frames, start, offset))
+
+    @classmethod
+    def week_shift(cls, start: datetime.date, offset: int) -> datetime.date:
+        """对指定日期按周线帧进行前后移位操作
+
+        参考 [omicron.models.timeframe.TimeFrame.day_shift][]
+        Examples:
+            >>> TimeFrame.week_frames = np.array([20200103, 20200110, 20200117, 20200123,20200207, 20200214])
+            >>> moment = arrow.get('2020-1-21').date()
+            >>> TimeFrame.week_shift(moment, 1)
+            datetime.date(2020, 1, 23)
+
+            >>> TimeFrame.week_shift(moment, 0)
+            datetime.date(2020, 1, 17)
+
+            >>> TimeFrame.week_shift(moment, -1)
+            datetime.date(2020, 1, 10)
+
+        Returns:
+            移位后的日期
+        """
+        start = cls.date2int(start)
+        return cls.int2date(ext.shift(cls.week_frames, start, offset))
+
+    @classmethod
+    def month_shift(cls, start: datetime.date, offset: int) -> datetime.date:
+        """求`start`所在的月移位后的frame
+
+        本函数首先将`start`对齐,然后进行移位。
+        Examples:
+            >>> TimeFrame.month_frames = np.array([20150130, 20150227, 20150331, 20150430])
+            >>> TimeFrame.month_shift(arrow.get('2015-2-26').date(), 0)
+            datetime.date(2015, 1, 30)
+
+            >>> TimeFrame.month_shift(arrow.get('2015-2-27').date(), 0)
+            datetime.date(2015, 2, 27)
+
+            >>> TimeFrame.month_shift(arrow.get('2015-3-1').date(), 0)
+            datetime.date(2015, 2, 27)
+
+            >>> TimeFrame.month_shift(arrow.get('2015-3-1').date(), 1)
+            datetime.date(2015, 3, 31)
+
+        Returns:
+            移位后的日期
+        """
+        start = cls.date2int(start)
+        return cls.int2date(ext.shift(cls.month_frames, start, offset))
+
+    @classmethod
+    def get_ticks(cls, frame_type: FrameType) -> Union[List, np.array]:
+        """取月线、周线、日线及各分钟线对应的frame
+
+        对分钟线,返回值仅包含时间,不包含日期(均为整数表示)
+
+        Examples:
+            >>> TimeFrame.month_frames = np.array([20050131, 20050228, 20050331])
+            >>> TimeFrame.get_ticks(FrameType.MONTH)[:3]
+            array([20050131, 20050228, 20050331])
+
+        Args:
+            frame_type : [description]
+
+        Raises:
+            ValueError: [description]
+
+        Returns:
+            月线、周线、日线及各分钟线对应的frame
+        """
+        if frame_type in cls.minute_level_frames:
+            return cls.ticks[frame_type]
+
+        if frame_type == FrameType.DAY:
+            return cls.day_frames
+        elif frame_type == FrameType.WEEK:
+            return cls.week_frames
+        elif frame_type == FrameType.MONTH:
+            return cls.month_frames
+        else:  # pragma: no cover
+            raise ValueError(f"{frame_type} not supported!")
+
+    @classmethod
+    def shift(
+        cls,
+        moment: Union[Arrow, datetime.date, datetime.datetime],
+        n: int,
+        frame_type: FrameType,
+    ) -> Union[datetime.date, datetime.datetime]:
+        """将指定的moment移动N个`frame_type`位置。
+
+        当N为负数时,意味着向前移动;当N为正数时,意味着向后移动。如果n为零,意味着移动到最接近
+        的一个已结束的frame。
+
+        如果moment没有对齐到frame_type对应的时间,将首先进行对齐。
+
+        See also:
+
+        - [day_shift][omicron.models.timeframe.TimeFrame.day_shift]
+        - [week_shift][omicron.models.timeframe.TimeFrame.week_shift]
+        - [month_shift][omicron.models.timeframe.TimeFrame.month_shift]
+
+        Examples:
+            >>> TimeFrame.shift(datetime.date(2020, 1, 3), 1, FrameType.DAY)
+            datetime.date(2020, 1, 6)
+
+            >>> TimeFrame.shift(datetime.datetime(2020, 1, 6, 11), 1, FrameType.MIN30)
+            datetime.datetime(2020, 1, 6, 11, 30)
+
+
+        Args:
+            moment:
+            n:
+            frame_type:
+
+        Returns:
+            移位后的Frame
+        """
+        if frame_type == FrameType.DAY:
+            return cls.day_shift(moment, n)
+
+        elif frame_type == FrameType.WEEK:
+            return cls.week_shift(moment, n)
+        elif frame_type == FrameType.MONTH:
+            return cls.month_shift(moment, n)
+        elif frame_type in [
+            FrameType.MIN1,
+            FrameType.MIN5,
+            FrameType.MIN15,
+            FrameType.MIN30,
+            FrameType.MIN60,
+        ]:
+            tm = moment.hour * 60 + moment.minute
+
+            new_tick_pos = cls.ticks[frame_type].index(tm) + n
+            days = new_tick_pos // len(cls.ticks[frame_type])
+            min_part = new_tick_pos % len(cls.ticks[frame_type])
+
+            date_part = cls.day_shift(moment.date(), days)
+            minutes = cls.ticks[frame_type][min_part]
+            h, m = minutes // 60, minutes % 60
+            return datetime.datetime(
+                date_part.year,
+                date_part.month,
+                date_part.day,
+                h,
+                m,
+                tzinfo=moment.tzinfo,
+            )
+        else:  # pragma: no cover
+            raise ValueError(f"{frame_type} is not supported.")
+
+    @classmethod
+    def count_day_frames(
+        cls, start: Union[datetime.date, Arrow], end: Union[datetime.date, Arrow]
+    ) -> int:
+        """calc trade days between start and end in close-to-close way.
+
+        if start == end, this will returns 1. Both start/end will be aligned to open
+        trade day before calculation.
+
+        Examples:
+            >>> start = datetime.date(2019, 12, 21)
+            >>> end = datetime.date(2019, 12, 21)
+            >>> TimeFrame.day_frames = [20191219, 20191220, 20191223, 20191224, 20191225]
+            >>> TimeFrame.count_day_frames(start, end)
+            1
+
+            >>> # non-trade days are removed
+            >>> TimeFrame.day_frames = [20200121, 20200122, 20200123, 20200203, 20200204, 20200205]
+            >>> start = datetime.date(2020, 1, 23)
+            >>> end = datetime.date(2020, 2, 4)
+            >>> TimeFrame.count_day_frames(start, end)
+            3
+
+        args:
+            start:
+            end:
+        returns:
+            count of days
+        """
+        start = cls.date2int(start)
+        end = cls.date2int(end)
+        return int(ext.count_between(cls.day_frames, start, end))
+
+    @classmethod
+    def count_week_frames(cls, start: datetime.date, end: datetime.date) -> int:
+        """
+        calc trade weeks between start and end in close-to-close way. Both start and
+        end will be aligned to open trade day before calculation. After that, if start
+         == end, this will returns 1
+
+        for examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames]
+        args:
+            start:
+            end:
+        returns:
+            count of weeks
+        """
+        start = cls.date2int(start)
+        end = cls.date2int(end)
+        return int(ext.count_between(cls.week_frames, start, end))
+
+    @classmethod
+    def count_month_frames(cls, start: datetime.date, end: datetime.date) -> int:
+        """calc trade months between start and end date in close-to-close way
+        Both start and end will be aligned to open trade day before calculation. After
+        that, if start == end, this will returns 1.
+
+        For examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames]
+
+        Args:
+            start:
+            end:
+
+        Returns:
+            months between start and end
+        """
+        start = cls.date2int(start)
+        end = cls.date2int(end)
+
+        return int(ext.count_between(cls.month_frames, start, end))
+
+    @classmethod
+    def count_quarter_frames(cls, start: datetime.date, end: datetime.date) -> int:
+        """calc trade quarters between start and end date in close-to-close way
+        Both start and end will be aligned to open trade day before calculation. After
+        that, if start == end, this will returns 1.
+
+        For examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames]
+
+        Args:
+            start (datetime.date): [description]
+            end (datetime.date): [description]
+
+        Returns:
+            quarters between start and end
+        """
+        start = cls.date2int(start)
+        end = cls.date2int(end)
+
+        return int(ext.count_between(cls.quarter_frames, start, end))
+
+    @classmethod
+    def count_year_frames(cls, start: datetime.date, end: datetime.date) -> int:
+        """calc trade years between start and end date in close-to-close way
+        Both start and end will be aligned to open trade day before calculation. After
+        that, if start == end, this will returns 1.
+
+        For examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames]
+
+        Args:
+            start (datetime.date): [description]
+            end (datetime.date): [description]
+
+        Returns:
+            years between start and end
+        """
+        start = cls.date2int(start)
+        end = cls.date2int(end)
+
+        return int(ext.count_between(cls.year_frames, start, end))
+
+    @classmethod
+    def count_frames(
+        cls,
+        start: Union[datetime.date, datetime.datetime, Arrow],
+        end: Union[datetime.date, datetime.datetime, Arrow],
+        frame_type,
+    ) -> int:
+        """计算start与end之间有多少个周期为frame_type的frames
+
+        See also:
+
+        - [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames]
+        - [count_week_frames][omicron.models.timeframe.TimeFrame.count_week_frames]
+        - [count_month_frames][omicron.models.timeframe.TimeFrame.count_month_frames]
+
+        Args:
+            start : start frame
+            end : end frame
+            frame_type : the type of frame
+
+        Raises:
+            ValueError: 如果frame_type不支持,则会抛出此异常。
+
+        Returns:
+            从start到end的帧数
+        """
+        if frame_type == FrameType.DAY:
+            return cls.count_day_frames(start, end)
+        elif frame_type == FrameType.WEEK:
+            return cls.count_week_frames(start, end)
+        elif frame_type == FrameType.MONTH:
+            return cls.count_month_frames(start, end)
+        elif frame_type == FrameType.QUARTER:
+            return cls.count_quarter_frames(start, end)
+        elif frame_type == FrameType.YEAR:
+            return cls.count_year_frames(start, end)
+        elif frame_type in [
+            FrameType.MIN1,
+            FrameType.MIN5,
+            FrameType.MIN15,
+            FrameType.MIN30,
+            FrameType.MIN60,
+        ]:
+            tm_start = start.hour * 60 + start.minute
+            tm_end = end.hour * 60 + end.minute
+            days = cls.count_day_frames(start.date(), end.date()) - 1
+
+            tm_start_pos = cls.ticks[frame_type].index(tm_start)
+            tm_end_pos = cls.ticks[frame_type].index(tm_end)
+
+            min_bars = tm_end_pos - tm_start_pos + 1
+
+            return days * len(cls.ticks[frame_type]) + min_bars
+        else:  # pragma: no cover
+            raise ValueError(f"{frame_type} is not supported yet")
+
+    @classmethod
+    def is_trade_day(cls, dt: Union[datetime.date, datetime.datetime, Arrow]) -> bool:
+        """判断`dt`是否为交易日
+
+        Examples:
+            >>> TimeFrame.is_trade_day(arrow.get('2020-1-1'))
+            False
+
+        Args:
+            dt :
+
+        Returns:
+            bool
+        """
+        return cls.date2int(dt) in cls.day_frames
+
+    @classmethod
+    def is_open_time(cls, tm: Union[datetime.datetime, Arrow] = None) -> bool:
+        """判断`tm`指定的时间是否处在交易时间段。
+
+        交易时间段是指集合竞价时间段之外的开盘时间
+
+        Examples:
+            >>> TimeFrame.day_frames = np.array([20200102, 20200103, 20200106, 20200107, 20200108])
+            >>> TimeFrame.is_open_time(arrow.get('2020-1-1 14:59').naive)
+            False
+            >>> TimeFrame.is_open_time(arrow.get('2020-1-3 14:59').naive)
+            True
+
+        Args:
+            tm : [description]. Defaults to None.
+
+        Returns:
+            bool
+        """
+        tm = tm or arrow.now()
+
+        if not cls.is_trade_day(tm):
+            return False
+
+        tick = tm.hour * 60 + tm.minute
+        return tick in cls.ticks[FrameType.MIN1]
+
+    @classmethod
+    def is_opening_call_auction_time(
+        cls, tm: Union[Arrow, datetime.datetime] = None
+    ) -> bool:
+        """判断`tm`指定的时间是否为开盘集合竞价时间
+
+        Args:
+            tm : [description]. Defaults to None.
+
+        Returns:
+            bool
+        """
+        if tm is None:
+            tm = cls.now()
+
+        if not cls.is_trade_day(tm):
+            return False
+
+        minutes = tm.hour * 60 + tm.minute
+        return 9 * 60 + 15 < minutes <= 9 * 60 + 25
+
+    @classmethod
+    def is_closing_call_auction_time(
+        cls, tm: Union[datetime.datetime, Arrow] = None
+    ) -> bool:
+        """判断`tm`指定的时间是否为收盘集合竞价时间
+
+        Fixme:
+            此处实现有误,收盘集合竞价时间应该还包含上午收盘时间
+
+        Args:
+            tm : [description]. Defaults to None.
+
+        Returns:
+            bool
+        """
+        tm = tm or cls.now()
+
+        if not cls.is_trade_day(tm):
+            return False
+
+        minutes = tm.hour * 60 + tm.minute
+        return 15 * 60 - 3 <= minutes < 15 * 60
+
+    @classmethod
+    def floor(cls, moment: Frame, frame_type: FrameType) -> Frame:
+        """求`moment`在指定的`frame_type`中的下界
+
+        比如,如果`moment`为10:37,则当`frame_type`为30分钟时,对应的上界为10:00
+
+        Examples:
+            >>> # 如果moment为日期,则当成已收盘处理
+            >>> TimeFrame.day_frames = np.array([20050104, 20050105, 20050106, 20050107, 20050110, 20050111])
+            >>> TimeFrame.floor(datetime.date(2005, 1, 7), FrameType.DAY)
+            datetime.date(2005, 1, 7)
+
+            >>> # moment指定的时间还未收盘,floor到上一个交易日
+            >>> TimeFrame.floor(datetime.datetime(2005, 1, 7, 14, 59), FrameType.DAY)
+            datetime.date(2005, 1, 6)
+
+            >>> TimeFrame.floor(datetime.date(2005, 1, 13), FrameType.WEEK)
+            datetime.date(2005, 1, 7)
+
+            >>> TimeFrame.floor(datetime.date(2005,2, 27), FrameType.MONTH)
+            datetime.date(2005, 1, 31)
+
+            >>> TimeFrame.floor(datetime.datetime(2005,1,5,14,59), FrameType.MIN30)
+            datetime.datetime(2005, 1, 5, 14, 30)
+
+            >>> TimeFrame.floor(datetime.datetime(2005, 1, 5, 14, 59), FrameType.MIN1)
+            datetime.datetime(2005, 1, 5, 14, 59)
+
+            >>> TimeFrame.floor(arrow.get('2005-1-5 14:59').naive, FrameType.MIN1)
+            datetime.datetime(2005, 1, 5, 14, 59)
+
+        Args:
+            moment:
+            frame_type:
+
+        Returns:
+            `moment`在指定的`frame_type`中的下界
+        """
+        if frame_type in cls.minute_level_frames:
+            tm, day_offset = cls.minute_frames_floor(
+                cls.ticks[frame_type], moment.hour * 60 + moment.minute
+            )
+            h, m = tm // 60, tm % 60
+            if cls.day_shift(moment, 0) < moment.date() or day_offset == -1:
+                h = 15
+                m = 0
+                new_day = cls.day_shift(moment, day_offset)
+            else:
+                new_day = moment.date()
+            return datetime.datetime(new_day.year, new_day.month, new_day.day, h, m)
+
+        if type(moment) == datetime.date:
+            moment = datetime.datetime(moment.year, moment.month, moment.day, 15)
+
+        # 如果是交易日,但还未收盘
+        if (
+            cls.date2int(moment) in cls.day_frames
+            and moment.hour * 60 + moment.minute < 900
+        ):
+            moment = cls.day_shift(moment, -1)
+
+        day = cls.date2int(moment)
+        if frame_type == FrameType.DAY:
+            arr = cls.day_frames
+        elif frame_type == FrameType.WEEK:
+            arr = cls.week_frames
+        elif frame_type == FrameType.MONTH:
+            arr = cls.month_frames
+        else:  # pragma: no cover
+            raise ValueError(f"frame type {frame_type} not supported.")
+
+        floored = ext.floor(arr, day)
+        return cls.int2date(floored)
+
+    @classmethod
+    def last_min_frame(
+        cls, day: Union[str, Arrow, datetime.date], frame_type: FrameType
+    ) -> Union[datetime.date, datetime.datetime]:
+        """获取`day`日周期为`frame_type`的结束frame。
+
+        Example:
+            >>> TimeFrame.last_min_frame(arrow.get('2020-1-5').date(), FrameType.MIN30)
+            datetime.datetime(2020, 1, 3, 15, 0)
+
+        Args:
+            day:
+            frame_type:
+
+        Returns:
+            `day`日周期为`frame_type`的结束frame
+        """
+        if isinstance(day, str):
+            day = cls.date2int(arrow.get(day).date())
+        elif isinstance(day, arrow.Arrow) or isinstance(day, datetime.datetime):
+            day = cls.date2int(day.date())
+        elif isinstance(day, datetime.date):
+            day = cls.date2int(day)
+        else:
+            raise TypeError(f"{type(day)} is not supported.")
+
+        if frame_type in cls.minute_level_frames:
+            last_close_day = cls.day_frames[cls.day_frames <= day][-1]
+            day = cls.int2date(last_close_day)
+            return datetime.datetime(day.year, day.month, day.day, hour=15, minute=0)
+        else:  # pragma: no cover
+            raise ValueError(f"{frame_type} not supported")
+
+    @classmethod
+    def frame_len(cls, frame_type: FrameType) -> int:
+        """返回以分钟为单位的frame长度。
+
+        对日线以上级别没有意义,但会返回240
+
+        Examples:
+            >>> TimeFrame.frame_len(FrameType.MIN5)
+            5
+
+        Args:
+            frame_type:
+
+        Returns:
+            返回以分钟为单位的frame长度。
+
+        """
+
+        if frame_type == FrameType.MIN1:
+            return 1
+        elif frame_type == FrameType.MIN5:
+            return 5
+        elif frame_type == FrameType.MIN15:
+            return 15
+        elif frame_type == FrameType.MIN30:
+            return 30
+        elif frame_type == FrameType.MIN60:
+            return 60
+        else:
+            return 240
+
+    @classmethod
+    def first_min_frame(
+        cls, day: Union[str, Arrow, Frame], frame_type: FrameType
+    ) -> Union[datetime.date, datetime.datetime]:
+        """获取指定日期类型为`frame_type`的`frame`。
+
+        Examples:
+            >>> TimeFrame.day_frames = np.array([20191227, 20191230, 20191231, 20200102, 20200103])
+            >>> TimeFrame.first_min_frame('2019-12-31', FrameType.MIN1)
+            datetime.datetime(2019, 12, 31, 9, 31)
+
+        Args:
+            day: which day?
+            frame_type: which frame_type?
+
+        Returns:
+            `day`当日的第一帧
+        """
+        day = cls.date2int(arrow.get(day).date())
+
+        if frame_type == FrameType.MIN1:
+            floor_day = cls.day_frames[cls.day_frames <= day][-1]
+            day = cls.int2date(floor_day)
+            return datetime.datetime(day.year, day.month, day.day, hour=9, minute=31)
+        elif frame_type == FrameType.MIN5:
+            floor_day = cls.day_frames[cls.day_frames <= day][-1]
+            day = cls.int2date(floor_day)
+            return datetime.datetime(day.year, day.month, day.day, hour=9, minute=35)
+        elif frame_type == FrameType.MIN15:
+            floor_day = cls.day_frames[cls.day_frames <= day][-1]
+            day = cls.int2date(floor_day)
+            return datetime.datetime(day.year, day.month, day.day, hour=9, minute=45)
+        elif frame_type == FrameType.MIN30:
+            floor_day = cls.day_frames[cls.day_frames <= day][-1]
+            day = cls.int2date(floor_day)
+            return datetime.datetime(day.year, day.month, day.day, hour=10)
+        elif frame_type == FrameType.MIN60:
+            floor_day = cls.day_frames[cls.day_frames <= day][-1]
+            day = cls.int2date(floor_day)
+            return datetime.datetime(day.year, day.month, day.day, hour=10, minute=30)
+        else:  # pragma: no cover
+            raise ValueError(f"{frame_type} not supported")
+
+    @classmethod
+    def get_frames(cls, start: Frame, end: Frame, frame_type: FrameType) -> List[int]:
+        """取[start, end]间所有类型为frame_type的frames
+
+        调用本函数前,请先通过`floor`或者`ceiling`将时间帧对齐到`frame_type`的边界值
+
+        Example:
+            >>> start = arrow.get('2020-1-13 10:00').naive
+            >>> end = arrow.get('2020-1-13 13:30').naive
+            >>> TimeFrame.day_frames = np.array([20200109, 20200110, 20200113,20200114, 20200115, 20200116])
+            >>> TimeFrame.get_frames(start, end, FrameType.MIN30)
+            [202001131000, 202001131030, 202001131100, 202001131130, 202001131330]
+
+        Args:
+            start:
+            end:
+            frame_type:
+
+        Returns:
+            frame list
+        """
+        n = cls.count_frames(start, end, frame_type)
+        return cls.get_frames_by_count(end, n, frame_type)
+
+    @classmethod
+    def get_frames_by_count(
+        cls, end: Arrow, n: int, frame_type: FrameType
+    ) -> List[int]:
+        """取以end为结束点,周期为frame_type的n个frame
+
+        调用前请将`end`对齐到`frame_type`的边界
+
+        Examples:
+            >>> end = arrow.get('2020-1-6 14:30').naive
+            >>> TimeFrame.day_frames = np.array([20200102, 20200103,20200106, 20200107, 20200108, 20200109])
+            >>> TimeFrame.get_frames_by_count(end, 2, FrameType.MIN30)
+            [202001061400, 202001061430]
+
+        Args:
+            end:
+            n:
+            frame_type:
+
+        Returns:
+            frame list
+        """
+
+        if frame_type == FrameType.DAY:
+            end = cls.date2int(end)
+            pos = np.searchsorted(cls.day_frames, end, side="right")
+            return cls.day_frames[max(0, pos - n) : pos].tolist()
+        elif frame_type == FrameType.WEEK:
+            end = cls.date2int(end)
+            pos = np.searchsorted(cls.week_frames, end, side="right")
+            return cls.week_frames[max(0, pos - n) : pos].tolist()
+        elif frame_type == FrameType.MONTH:
+            end = cls.date2int(end)
+            pos = np.searchsorted(cls.month_frames, end, side="right")
+            return cls.month_frames[max(0, pos - n) : pos].tolist()
+        elif frame_type in {
+            FrameType.MIN1,
+            FrameType.MIN5,
+            FrameType.MIN15,
+            FrameType.MIN30,
+            FrameType.MIN60,
+        }:
+            n_days = n // len(cls.ticks[frame_type]) + 2
+            ticks = cls.ticks[frame_type] * n_days
+
+            days = cls.get_frames_by_count(end, n_days, FrameType.DAY)
+            days = np.repeat(days, len(cls.ticks[frame_type]))
+
+            ticks = [
+                day.item() * 10000 + int(tm / 60) * 100 + tm % 60
+                for day, tm in zip(days, ticks)
+            ]
+
+            # list index is much faster than ext.index_sorted when the arr is small
+            pos = ticks.index(cls.time2int(end)) + 1
+
+            return ticks[max(0, pos - n) : pos]
+        else:  # pragma: no cover
+            raise ValueError(f"{frame_type} not support yet")
+
+    @classmethod
+    def ceiling(cls, moment: Frame, frame_type: FrameType) -> Frame:
+        """求`moment`所在类型为`frame_type`周期的上界
+
+        比如`moment`为14:59分,如果`frame_type`为30分钟,则它的上界应该为15:00
+
+        Example:
+            >>> TimeFrame.day_frames = [20050104, 20050105, 20050106, 20050107]
+            >>> TimeFrame.ceiling(datetime.date(2005, 1, 7), FrameType.DAY)
+            datetime.date(2005, 1, 7)
+
+            >>> TimeFrame.week_frames = [20050107, 20050114, 20050121, 20050128]
+            >>> TimeFrame.ceiling(datetime.date(2005, 1, 4), FrameType.WEEK)
+            datetime.date(2005, 1, 7)
+
+            >>> TimeFrame.ceiling(datetime.date(2005,1,7), FrameType.WEEK)
+            datetime.date(2005, 1, 7)
+
+            >>> TimeFrame.month_frames = [20050131, 20050228]
+            >>> TimeFrame.ceiling(datetime.date(2005,1 ,1), FrameType.MONTH)
+            datetime.date(2005, 1, 31)
+
+            >>> TimeFrame.ceiling(datetime.datetime(2005,1,5,14,59), FrameType.MIN30)
+            datetime.datetime(2005, 1, 5, 15, 0)
+
+            >>> TimeFrame.ceiling(datetime.datetime(2005, 1, 5, 14, 59), FrameType.MIN1)
+            datetime.datetime(2005, 1, 5, 14, 59)
+
+            >>> TimeFrame.ceiling(arrow.get('2005-1-5 14:59').naive, FrameType.MIN1)
+            datetime.datetime(2005, 1, 5, 14, 59)
+
+        Args:
+            moment (datetime.datetime): [description]
+            frame_type (FrameType): [description]
+
+        Returns:
+            `moment`所在类型为`frame_type`周期的上界
+        """
+        if frame_type in cls.day_level_frames and type(moment) == datetime.datetime:
+            moment = moment.date()
+
+        floor = cls.floor(moment, frame_type)
+        if floor == moment:
+            return moment
+        elif floor > moment:
+            return floor
+        else:
+            return cls.shift(floor, 1, frame_type)
+
+    @classmethod
+    def combine_time(
+        cls,
+        date: datetime.date,
+        hour: int,
+        minute: int = 0,
+        second: int = 0,
+        microsecond: int = 0,
+    ) -> datetime.datetime:
+        """用`date`指定的日期与`hour`, `minute`, `second`等参数一起合成新的时间
+
+        Examples:
+            >>> TimeFrame.combine_time(datetime.date(2020, 1, 1), 14, 30)
+            datetime.datetime(2020, 1, 1, 14, 30)
+
+        Args:
+            date : [description]
+            hour : [description]
+            minute : [description]. Defaults to 0.
+            second : [description]. Defaults to 0.
+            microsecond : [description]. Defaults to 0.
+
+        Returns:
+            合成后的时间
+        """
+        return datetime.datetime(
+            date.year, date.month, date.day, hour, minute, second, microsecond
+        )
+
+    @classmethod
+    def replace_date(
+        cls, dtm: datetime.datetime, dt: datetime.date
+    ) -> datetime.datetime:
+        """将`dtm`变量的日期更换为`dt`指定的日期
+
+        Example:
+            >>> TimeFrame.replace_date(arrow.get('2020-1-1 13:49').datetime, datetime.date(2019, 1,1))
+            datetime.datetime(2019, 1, 1, 13, 49)
+
+        Args:
+            dtm (datetime.datetime): [description]
+            dt (datetime.date): [description]
+
+        Returns:
+            变换后的时间
+        """
+        return datetime.datetime(
+            dt.year, dt.month, dt.day, dtm.hour, dtm.minute, dtm.second, dtm.microsecond
+        )
+
+    @classmethod
+    def resample_frames(
+        cls, trade_days: Iterable[datetime.date], frame_type: FrameType
+    ) -> List[int]:
+        """将从行情服务器获取的交易日历重采样,生成周帧和月线帧
+
+        Args:
+            trade_days (Iterable): [description]
+            frame_type (FrameType): [description]
+
+        Returns:
+            List[int]: 重采样后的日期列表,日期用整数表示
+        """
+        if frame_type == FrameType.WEEK:
+            weeks = []
+            last = trade_days[0]
+            for cur in trade_days:
+                if cur.weekday() < last.weekday() or (cur - last).days >= 7:
+                    weeks.append(last)
+                last = cur
+
+            if weeks[-1] < last:
+                weeks.append(last)
+
+            return weeks
+        elif frame_type == FrameType.MONTH:
+            months = []
+            last = trade_days[0]
+            for cur in trade_days:
+                if cur.day < last.day:
+                    months.append(last)
+                last = cur
+            months.append(last)
+
+            return months
+        elif frame_type == FrameType.QUARTER:
+            quarters = []
+            last = trade_days[0]
+            for cur in trade_days:
+                if last.month % 3 == 0:
+                    if cur.month > last.month or cur.year > last.year:
+                        quarters.append(last)
+                last = cur
+            quarters.append(last)
+
+            return quarters
+        elif frame_type == FrameType.YEAR:
+            years = []
+            last = trade_days[0]
+            for cur in trade_days:
+                if cur.year > last.year:
+                    years.append(last)
+                last = cur
+            years.append(last)
+
+            return years
+        else:  # pragma: no cover
+            raise ValueError(f"Unsupported FrameType: {frame_type}")
+
+    @classmethod
+    def minute_frames_floor(cls, ticks, moment) -> Tuple[int, int]:
+        """
+        对于分钟级的frame,返回它们与frame刻度向下对齐后的frame及日期进位。如果需要对齐到上一个交易
+        日,则进位为-1,否则为0.
+
+        Examples:
+            >>> ticks = [600, 630, 660, 690, 810, 840, 870, 900]
+            >>> TimeFrame.minute_frames_floor(ticks, 545)
+            (900, -1)
+            >>> TimeFrame.minute_frames_floor(ticks, 600)
+            (600, 0)
+            >>> TimeFrame.minute_frames_floor(ticks, 605)
+            (600, 0)
+            >>> TimeFrame.minute_frames_floor(ticks, 899)
+            (870, 0)
+            >>> TimeFrame.minute_frames_floor(ticks, 900)
+            (900, 0)
+            >>> TimeFrame.minute_frames_floor(ticks, 905)
+            (900, 0)
+
+        Args:
+            ticks (np.array or list): frames刻度
+            moment (int): 整数表示的分钟数,比如900表示15:00
+
+        Returns:
+            tuple, the first is the new moment, the second is carry-on
+        """
+        if moment < ticks[0]:
+            return ticks[-1], -1
+        # ’right' 相当于 ticks <= m
+        index = np.searchsorted(ticks, moment, side="right")
+        return ticks[index - 1], 0
+
+    @classmethod
+    async def save_calendar(cls, trade_days):
+        # avoid circular import
+        from omicron import cache
+
+        for ft in [FrameType.WEEK, FrameType.MONTH, FrameType.QUARTER, FrameType.YEAR]:
+            days = cls.resample_frames(trade_days, ft)
+            frames = [cls.date2int(x) for x in days]
+
+            key = f"calendar:{ft.value}"
+            pl = cache.security.pipeline()
+            pl.delete(key)
+            pl.rpush(key, *frames)
+            await pl.execute()
+
+        frames = [cls.date2int(x) for x in trade_days]
+        key = f"calendar:{FrameType.DAY.value}"
+        pl = cache.security.pipeline()
+        pl.delete(key)
+        pl.rpush(key, *frames)
+        await pl.execute()
+
+    @classmethod
+    async def remove_calendar(cls):
+        # avoid circular import
+        from omicron import cache
+
+        for ft in cls.day_level_frames:
+            key = f"calendar:{ft.value}"
+            await cache.security.delete(key)
+
+    @classmethod
+    def is_bar_closed(cls, frame: Frame, ft: FrameType) -> bool:
+        """判断`frame`所代表的bar是否已经收盘(结束)
+
+        如果是日线,frame不为当天,则认为已收盘;或者当前时间在收盘时间之后,也认为已收盘。
+        如果是其它周期,则只有当frame正好在边界上,才认为是已收盘。这里有一个假设:我们不会在其它周期上,判断未来的某个frame是否已经收盘。
+
+        Args:
+            frame : bar所处的时间,必须小于当前时间
+            ft: bar所代表的帧类型
+
+        Returns:
+            bool: 是否已经收盘
+        """
+        floor = cls.floor(frame, ft)
+
+        now = arrow.now()
+        if ft == FrameType.DAY:
+            return floor < now.date() or now.hour >= 15
+        else:
+            return floor == frame
+
+    @classmethod
+    def get_frame_scope(cls, frame: Frame, ft: FrameType) -> Tuple[Frame, Frame]:
+        # todo: 函数的通用性不足,似乎应该放在具体的业务类中。如果是通用型的函数,参数不应该局限于周和月。
+        """对于给定的时间,取所在周的第一天和最后一天,所在月的第一天和最后一天
+
+        Args:
+            frame : 指定的日期,date对象
+            ft: 帧类型,支持WEEK和MONTH
+
+        Returns:
+            Tuple[Frame, Frame]: 周或者月的首末日期(date对象)
+
+        """
+        if frame is None:
+            raise ValueError("frame cannot be None")
+        if ft not in (FrameType.WEEK, FrameType.MONTH):
+            raise ValueError(f"FrameType only supports WEEK and MONTH: {ft}")
+
+        if isinstance(frame, datetime.datetime):
+            frame = frame.date()
+
+        if frame < CALENDAR_START:
+            raise ValueError(f"cannot be earlier than {CALENDAR_START}: {frame}")
+
+        # datetime.date(2021, 10, 8),这是个特殊的日期
+        if ft == FrameType.WEEK:
+            if frame < datetime.date(2005, 1, 10):
+                return datetime.date(2005, 1, 4), datetime.date(2005, 1, 7)
+
+            if not cls.is_trade_day(frame):  # 非交易日的情况,直接回退一天
+                week_day = cls.day_shift(frame, 0)
+            else:
+                week_day = frame
+
+            w1 = TimeFrame.floor(week_day, FrameType.WEEK)
+            if w1 == week_day:  # 本周的最后一个交易日
+                week_end = w1
+            else:
+                week_end = TimeFrame.week_shift(week_day, 1)
+
+            w0 = TimeFrame.week_shift(week_end, -1)
+            week_start = TimeFrame.day_shift(w0, 1)
+            return week_start, week_end
+
+        if ft == FrameType.MONTH:
+            if frame <= datetime.date(2005, 1, 31):
+                return datetime.date(2005, 1, 4), datetime.date(2005, 1, 31)
+
+            month_start = frame.replace(day=1)
+            if not cls.is_trade_day(month_start):  # 非交易日的情况,直接加1
+                month_start = cls.day_shift(month_start, 1)
+
+            month_end = TimeFrame.month_shift(month_start, 1)
+            return month_start, month_end
+
+    @classmethod
+    def get_previous_trade_day(cls, now: datetime.date):
+        """获取上一个交易日
+
+        如果当天是周六或者周日,返回周五(交易日),如果当天是周一,返回周五,如果当天是周五,返回周四
+
+        Args:
+            now : 指定的日期,date对象
+
+        Returns:
+            datetime.date: 上一个交易日
+
+        """
+        if now == datetime.date(2005, 1, 4):
+            return now
+
+        if TimeFrame.is_trade_day(now):
+            pre_trade_day = TimeFrame.day_shift(now, -1)
+        else:
+            pre_trade_day = TimeFrame.day_shift(now, 0)
+        return pre_trade_day
+
+
+ + + +
+ + + + + + + + + + + + + + + + + +
+ + + +

+ceiling(moment, frame_type) + + + classmethod + + +

+ +
+ +

moment所在类型为frame_type周期的上界

+

比如moment为14:59分,如果frame_type为30分钟,则它的上界应该为15:00

+ +

Examples:

+
>>> TimeFrame.day_frames = [20050104, 20050105, 20050106, 20050107]
+>>> TimeFrame.ceiling(datetime.date(2005, 1, 7), FrameType.DAY)
+datetime.date(2005, 1, 7)
+
+
>>> TimeFrame.week_frames = [20050107, 20050114, 20050121, 20050128]
+>>> TimeFrame.ceiling(datetime.date(2005, 1, 4), FrameType.WEEK)
+datetime.date(2005, 1, 7)
+
+
>>> TimeFrame.ceiling(datetime.date(2005,1,7), FrameType.WEEK)
+datetime.date(2005, 1, 7)
+
+
>>> TimeFrame.month_frames = [20050131, 20050228]
+>>> TimeFrame.ceiling(datetime.date(2005,1 ,1), FrameType.MONTH)
+datetime.date(2005, 1, 31)
+
+
>>> TimeFrame.ceiling(datetime.datetime(2005,1,5,14,59), FrameType.MIN30)
+datetime.datetime(2005, 1, 5, 15, 0)
+
+
>>> TimeFrame.ceiling(datetime.datetime(2005, 1, 5, 14, 59), FrameType.MIN1)
+datetime.datetime(2005, 1, 5, 14, 59)
+
+
>>> TimeFrame.ceiling(arrow.get('2005-1-5 14:59').naive, FrameType.MIN1)
+datetime.datetime(2005, 1, 5, 14, 59)
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
momentdatetime.datetime

[description]

required
frame_typeFrameType

[description]

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Frame

moment所在类型为frame_type周期的上界

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def ceiling(cls, moment: Frame, frame_type: FrameType) -> Frame:
+    """求`moment`所在类型为`frame_type`周期的上界
+
+    比如`moment`为14:59分,如果`frame_type`为30分钟,则它的上界应该为15:00
+
+    Example:
+        >>> TimeFrame.day_frames = [20050104, 20050105, 20050106, 20050107]
+        >>> TimeFrame.ceiling(datetime.date(2005, 1, 7), FrameType.DAY)
+        datetime.date(2005, 1, 7)
+
+        >>> TimeFrame.week_frames = [20050107, 20050114, 20050121, 20050128]
+        >>> TimeFrame.ceiling(datetime.date(2005, 1, 4), FrameType.WEEK)
+        datetime.date(2005, 1, 7)
+
+        >>> TimeFrame.ceiling(datetime.date(2005,1,7), FrameType.WEEK)
+        datetime.date(2005, 1, 7)
+
+        >>> TimeFrame.month_frames = [20050131, 20050228]
+        >>> TimeFrame.ceiling(datetime.date(2005,1 ,1), FrameType.MONTH)
+        datetime.date(2005, 1, 31)
+
+        >>> TimeFrame.ceiling(datetime.datetime(2005,1,5,14,59), FrameType.MIN30)
+        datetime.datetime(2005, 1, 5, 15, 0)
+
+        >>> TimeFrame.ceiling(datetime.datetime(2005, 1, 5, 14, 59), FrameType.MIN1)
+        datetime.datetime(2005, 1, 5, 14, 59)
+
+        >>> TimeFrame.ceiling(arrow.get('2005-1-5 14:59').naive, FrameType.MIN1)
+        datetime.datetime(2005, 1, 5, 14, 59)
+
+    Args:
+        moment (datetime.datetime): [description]
+        frame_type (FrameType): [description]
+
+    Returns:
+        `moment`所在类型为`frame_type`周期的上界
+    """
+    if frame_type in cls.day_level_frames and type(moment) == datetime.datetime:
+        moment = moment.date()
+
+    floor = cls.floor(moment, frame_type)
+    if floor == moment:
+        return moment
+    elif floor > moment:
+        return floor
+    else:
+        return cls.shift(floor, 1, frame_type)
+
+
+
+ +
+ + + +
+ + + +

+combine_time(date, hour, minute=0, second=0, microsecond=0) + + + classmethod + + +

+ +
+ +

date指定的日期与hour, minute, second等参数一起合成新的时间

+ +

Examples:

+
>>> TimeFrame.combine_time(datetime.date(2020, 1, 1), 14, 30)
+datetime.datetime(2020, 1, 1, 14, 30)
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
date

[description]

required
hour

[description]

required
minute

[description]. Defaults to 0.

0
second

[description]. Defaults to 0.

0
microsecond

[description]. Defaults to 0.

0
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
datetime.datetime

合成后的时间

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def combine_time(
+    cls,
+    date: datetime.date,
+    hour: int,
+    minute: int = 0,
+    second: int = 0,
+    microsecond: int = 0,
+) -> datetime.datetime:
+    """用`date`指定的日期与`hour`, `minute`, `second`等参数一起合成新的时间
+
+    Examples:
+        >>> TimeFrame.combine_time(datetime.date(2020, 1, 1), 14, 30)
+        datetime.datetime(2020, 1, 1, 14, 30)
+
+    Args:
+        date : [description]
+        hour : [description]
+        minute : [description]. Defaults to 0.
+        second : [description]. Defaults to 0.
+        microsecond : [description]. Defaults to 0.
+
+    Returns:
+        合成后的时间
+    """
+    return datetime.datetime(
+        date.year, date.month, date.day, hour, minute, second, microsecond
+    )
+
+
+
+ +
+ + + +
+ + + +

+count_day_frames(start, end) + + + classmethod + + +

+ +
+ +

calc trade days between start and end in close-to-close way.

+

if start == end, this will returns 1. Both start/end will be aligned to open +trade day before calculation.

+ +

Examples:

+
>>> start = datetime.date(2019, 12, 21)
+>>> end = datetime.date(2019, 12, 21)
+>>> TimeFrame.day_frames = [20191219, 20191220, 20191223, 20191224, 20191225]
+>>> TimeFrame.count_day_frames(start, end)
+1
+
+
>>> # non-trade days are removed
+>>> TimeFrame.day_frames = [20200121, 20200122, 20200123, 20200203, 20200204, 20200205]
+>>> start = datetime.date(2020, 1, 23)
+>>> end = datetime.date(2020, 2, 4)
+>>> TimeFrame.count_day_frames(start, end)
+3
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
startUnion[datetime.date, Arrow]required
endUnion[datetime.date, Arrow]required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
int

count of days

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def count_day_frames(
+    cls, start: Union[datetime.date, Arrow], end: Union[datetime.date, Arrow]
+) -> int:
+    """calc trade days between start and end in close-to-close way.
+
+    if start == end, this will returns 1. Both start/end will be aligned to open
+    trade day before calculation.
+
+    Examples:
+        >>> start = datetime.date(2019, 12, 21)
+        >>> end = datetime.date(2019, 12, 21)
+        >>> TimeFrame.day_frames = [20191219, 20191220, 20191223, 20191224, 20191225]
+        >>> TimeFrame.count_day_frames(start, end)
+        1
+
+        >>> # non-trade days are removed
+        >>> TimeFrame.day_frames = [20200121, 20200122, 20200123, 20200203, 20200204, 20200205]
+        >>> start = datetime.date(2020, 1, 23)
+        >>> end = datetime.date(2020, 2, 4)
+        >>> TimeFrame.count_day_frames(start, end)
+        3
+
+    args:
+        start:
+        end:
+    returns:
+        count of days
+    """
+    start = cls.date2int(start)
+    end = cls.date2int(end)
+    return int(ext.count_between(cls.day_frames, start, end))
+
+
+
+ +
+ + + +
+ + + +

+count_frames(start, end, frame_type) + + + classmethod + + +

+ +
+ +

计算start与end之间有多少个周期为frame_type的frames

+

See also:

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
start

start frame

required
end

end frame

required
frame_type

the type of frame

required
+

Exceptions:

+ + + + + + + + + + + + + +
TypeDescription
ValueError

如果frame_type不支持,则会抛出此异常。

+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
int

从start到end的帧数

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def count_frames(
+    cls,
+    start: Union[datetime.date, datetime.datetime, Arrow],
+    end: Union[datetime.date, datetime.datetime, Arrow],
+    frame_type,
+) -> int:
+    """计算start与end之间有多少个周期为frame_type的frames
+
+    See also:
+
+    - [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames]
+    - [count_week_frames][omicron.models.timeframe.TimeFrame.count_week_frames]
+    - [count_month_frames][omicron.models.timeframe.TimeFrame.count_month_frames]
+
+    Args:
+        start : start frame
+        end : end frame
+        frame_type : the type of frame
+
+    Raises:
+        ValueError: 如果frame_type不支持,则会抛出此异常。
+
+    Returns:
+        从start到end的帧数
+    """
+    if frame_type == FrameType.DAY:
+        return cls.count_day_frames(start, end)
+    elif frame_type == FrameType.WEEK:
+        return cls.count_week_frames(start, end)
+    elif frame_type == FrameType.MONTH:
+        return cls.count_month_frames(start, end)
+    elif frame_type == FrameType.QUARTER:
+        return cls.count_quarter_frames(start, end)
+    elif frame_type == FrameType.YEAR:
+        return cls.count_year_frames(start, end)
+    elif frame_type in [
+        FrameType.MIN1,
+        FrameType.MIN5,
+        FrameType.MIN15,
+        FrameType.MIN30,
+        FrameType.MIN60,
+    ]:
+        tm_start = start.hour * 60 + start.minute
+        tm_end = end.hour * 60 + end.minute
+        days = cls.count_day_frames(start.date(), end.date()) - 1
+
+        tm_start_pos = cls.ticks[frame_type].index(tm_start)
+        tm_end_pos = cls.ticks[frame_type].index(tm_end)
+
+        min_bars = tm_end_pos - tm_start_pos + 1
+
+        return days * len(cls.ticks[frame_type]) + min_bars
+    else:  # pragma: no cover
+        raise ValueError(f"{frame_type} is not supported yet")
+
+
+
+ +
+ + + +
+ + + +

+count_month_frames(start, end) + + + classmethod + + +

+ +
+ +

calc trade months between start and end date in close-to-close way +Both start and end will be aligned to open trade day before calculation. After +that, if start == end, this will returns 1.

+

For examples, please refer to count_day_frames

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
startdatetime.daterequired
enddatetime.daterequired
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
int

months between start and end

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def count_month_frames(cls, start: datetime.date, end: datetime.date) -> int:
+    """calc trade months between start and end date in close-to-close way
+    Both start and end will be aligned to open trade day before calculation. After
+    that, if start == end, this will returns 1.
+
+    For examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames]
+
+    Args:
+        start:
+        end:
+
+    Returns:
+        months between start and end
+    """
+    start = cls.date2int(start)
+    end = cls.date2int(end)
+
+    return int(ext.count_between(cls.month_frames, start, end))
+
+
+
+ +
+ + + +
+ + + +

+count_quarter_frames(start, end) + + + classmethod + + +

+ +
+ +

calc trade quarters between start and end date in close-to-close way +Both start and end will be aligned to open trade day before calculation. After +that, if start == end, this will returns 1.

+

For examples, please refer to count_day_frames

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
startdatetime.date

[description]

required
enddatetime.date

[description]

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
int

quarters between start and end

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def count_quarter_frames(cls, start: datetime.date, end: datetime.date) -> int:
+    """calc trade quarters between start and end date in close-to-close way
+    Both start and end will be aligned to open trade day before calculation. After
+    that, if start == end, this will returns 1.
+
+    For examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames]
+
+    Args:
+        start (datetime.date): [description]
+        end (datetime.date): [description]
+
+    Returns:
+        quarters between start and end
+    """
+    start = cls.date2int(start)
+    end = cls.date2int(end)
+
+    return int(ext.count_between(cls.quarter_frames, start, end))
+
+
+
+ +
+ + + +
+ + + +

+count_week_frames(start, end) + + + classmethod + + +

+ +
+ +

calc trade weeks between start and end in close-to-close way. Both start and +end will be aligned to open trade day before calculation. After that, if start + == end, this will returns 1

+

for examples, please refer to count_day_frames

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
startdatetime.daterequired
enddatetime.daterequired
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
int

count of weeks

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def count_week_frames(cls, start: datetime.date, end: datetime.date) -> int:
+    """
+    calc trade weeks between start and end in close-to-close way. Both start and
+    end will be aligned to open trade day before calculation. After that, if start
+     == end, this will returns 1
+
+    for examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames]
+    args:
+        start:
+        end:
+    returns:
+        count of weeks
+    """
+    start = cls.date2int(start)
+    end = cls.date2int(end)
+    return int(ext.count_between(cls.week_frames, start, end))
+
+
+
+ +
+ + + +
+ + + +

+count_year_frames(start, end) + + + classmethod + + +

+ +
+ +

calc trade years between start and end date in close-to-close way +Both start and end will be aligned to open trade day before calculation. After +that, if start == end, this will returns 1.

+

For examples, please refer to count_day_frames

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
startdatetime.date

[description]

required
enddatetime.date

[description]

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
int

years between start and end

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def count_year_frames(cls, start: datetime.date, end: datetime.date) -> int:
+    """calc trade years between start and end date in close-to-close way
+    Both start and end will be aligned to open trade day before calculation. After
+    that, if start == end, this will returns 1.
+
+    For examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames]
+
+    Args:
+        start (datetime.date): [description]
+        end (datetime.date): [description]
+
+    Returns:
+        years between start and end
+    """
+    start = cls.date2int(start)
+    end = cls.date2int(end)
+
+    return int(ext.count_between(cls.year_frames, start, end))
+
+
+
+ +
+ + + +
+ + + +

+date2int(d) + + + classmethod + + +

+ +
+ +

将日期转换为整数表示

+

在zillionare中,如果要对时间和日期进行持久化操作,我们一般将其转换为int类型

+ +

Examples:

+
>>> TimeFrame.date2int(datetime.date(2020,5,1))
+20200501
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
dUnion[datetime.datetime, datetime.date, Arrow]

date

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
int

日期的整数表示,比如20220211

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def date2int(cls, d: Union[datetime.datetime, datetime.date, Arrow]) -> int:
+    """将日期转换为整数表示
+
+    在zillionare中,如果要对时间和日期进行持久化操作,我们一般将其转换为int类型
+
+    Examples:
+        >>> TimeFrame.date2int(datetime.date(2020,5,1))
+        20200501
+
+    Args:
+        d: date
+
+    Returns:
+        日期的整数表示,比如20220211
+    """
+    return int(f"{d.year:04}{d.month:02}{d.day:02}")
+
+
+
+ +
+ + + +
+ + + +

+day_shift(start, offset) + + + classmethod + + +

+ +
+ +

对指定日期进行前后移位操作

+

如果 n == 0,则返回d对应的交易日(如果是非交易日,则返回刚结束的一个交易日) +如果 n > 0,则返回d对应的交易日后第 n 个交易日 +如果 n < 0,则返回d对应的交易日前第 n 个交易日

+ +

Examples:

+
>>> TimeFrame.day_frames = [20191212, 20191213, 20191216, 20191217,20191218, 20191219]
+>>> TimeFrame.day_shift(datetime.date(2019,12,13), 0)
+datetime.date(2019, 12, 13)
+
+
>>> TimeFrame.day_shift(datetime.date(2019, 12, 15), 0)
+datetime.date(2019, 12, 13)
+
+
>>> TimeFrame.day_shift(datetime.date(2019, 12, 15), 1)
+datetime.date(2019, 12, 16)
+
+
>>> TimeFrame.day_shift(datetime.date(2019, 12, 13), 1)
+datetime.date(2019, 12, 16)
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
startdatetime.date

the origin day

required
offsetint

days to shift, can be negative

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
datetime.date

移位后的日期

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def day_shift(cls, start: datetime.date, offset: int) -> datetime.date:
+    """对指定日期进行前后移位操作
+
+    如果 n == 0,则返回d对应的交易日(如果是非交易日,则返回刚结束的一个交易日)
+    如果 n > 0,则返回d对应的交易日后第 n 个交易日
+    如果 n < 0,则返回d对应的交易日前第 n 个交易日
+
+    Examples:
+        >>> TimeFrame.day_frames = [20191212, 20191213, 20191216, 20191217,20191218, 20191219]
+        >>> TimeFrame.day_shift(datetime.date(2019,12,13), 0)
+        datetime.date(2019, 12, 13)
+
+        >>> TimeFrame.day_shift(datetime.date(2019, 12, 15), 0)
+        datetime.date(2019, 12, 13)
+
+        >>> TimeFrame.day_shift(datetime.date(2019, 12, 15), 1)
+        datetime.date(2019, 12, 16)
+
+        >>> TimeFrame.day_shift(datetime.date(2019, 12, 13), 1)
+        datetime.date(2019, 12, 16)
+
+    Args:
+        start: the origin day
+        offset: days to shift, can be negative
+
+    Returns:
+        移位后的日期
+    """
+    # accelerated from 0.12 to 0.07, per 10000 loop, type conversion time included
+    start = cls.date2int(start)
+
+    return cls.int2date(ext.shift(cls.day_frames, start, offset))
+
+
+
+ +
+ + + +
+ + + +

+first_min_frame(day, frame_type) + + + classmethod + + +

+ +
+ +

获取指定日期类型为frame_typeframe

+ +

Examples:

+
>>> TimeFrame.day_frames = np.array([20191227, 20191230, 20191231, 20200102, 20200103])
+>>> TimeFrame.first_min_frame('2019-12-31', FrameType.MIN1)
+datetime.datetime(2019, 12, 31, 9, 31)
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
dayUnion[str, Arrow, Frame]

which day?

required
frame_typeFrameType

which frame_type?

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Union[datetime.date, datetime.datetime]

day当日的第一帧

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def first_min_frame(
+    cls, day: Union[str, Arrow, Frame], frame_type: FrameType
+) -> Union[datetime.date, datetime.datetime]:
+    """获取指定日期类型为`frame_type`的`frame`。
+
+    Examples:
+        >>> TimeFrame.day_frames = np.array([20191227, 20191230, 20191231, 20200102, 20200103])
+        >>> TimeFrame.first_min_frame('2019-12-31', FrameType.MIN1)
+        datetime.datetime(2019, 12, 31, 9, 31)
+
+    Args:
+        day: which day?
+        frame_type: which frame_type?
+
+    Returns:
+        `day`当日的第一帧
+    """
+    day = cls.date2int(arrow.get(day).date())
+
+    if frame_type == FrameType.MIN1:
+        floor_day = cls.day_frames[cls.day_frames <= day][-1]
+        day = cls.int2date(floor_day)
+        return datetime.datetime(day.year, day.month, day.day, hour=9, minute=31)
+    elif frame_type == FrameType.MIN5:
+        floor_day = cls.day_frames[cls.day_frames <= day][-1]
+        day = cls.int2date(floor_day)
+        return datetime.datetime(day.year, day.month, day.day, hour=9, minute=35)
+    elif frame_type == FrameType.MIN15:
+        floor_day = cls.day_frames[cls.day_frames <= day][-1]
+        day = cls.int2date(floor_day)
+        return datetime.datetime(day.year, day.month, day.day, hour=9, minute=45)
+    elif frame_type == FrameType.MIN30:
+        floor_day = cls.day_frames[cls.day_frames <= day][-1]
+        day = cls.int2date(floor_day)
+        return datetime.datetime(day.year, day.month, day.day, hour=10)
+    elif frame_type == FrameType.MIN60:
+        floor_day = cls.day_frames[cls.day_frames <= day][-1]
+        day = cls.int2date(floor_day)
+        return datetime.datetime(day.year, day.month, day.day, hour=10, minute=30)
+    else:  # pragma: no cover
+        raise ValueError(f"{frame_type} not supported")
+
+
+
+ +
+ + + +
+ + + +

+floor(moment, frame_type) + + + classmethod + + +

+ +
+ +

moment在指定的frame_type中的下界

+

比如,如果moment为10:37,则当frame_type为30分钟时,对应的上界为10:00

+ +

Examples:

+
>>> # 如果moment为日期,则当成已收盘处理
+>>> TimeFrame.day_frames = np.array([20050104, 20050105, 20050106, 20050107, 20050110, 20050111])
+>>> TimeFrame.floor(datetime.date(2005, 1, 7), FrameType.DAY)
+datetime.date(2005, 1, 7)
+
+
>>> # moment指定的时间还未收盘,floor到上一个交易日
+>>> TimeFrame.floor(datetime.datetime(2005, 1, 7, 14, 59), FrameType.DAY)
+datetime.date(2005, 1, 6)
+
+
>>> TimeFrame.floor(datetime.date(2005, 1, 13), FrameType.WEEK)
+datetime.date(2005, 1, 7)
+
+
>>> TimeFrame.floor(datetime.date(2005,2, 27), FrameType.MONTH)
+datetime.date(2005, 1, 31)
+
+
>>> TimeFrame.floor(datetime.datetime(2005,1,5,14,59), FrameType.MIN30)
+datetime.datetime(2005, 1, 5, 14, 30)
+
+
>>> TimeFrame.floor(datetime.datetime(2005, 1, 5, 14, 59), FrameType.MIN1)
+datetime.datetime(2005, 1, 5, 14, 59)
+
+
>>> TimeFrame.floor(arrow.get('2005-1-5 14:59').naive, FrameType.MIN1)
+datetime.datetime(2005, 1, 5, 14, 59)
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
momentFramerequired
frame_typeFrameTyperequired
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Frame

moment在指定的frame_type中的下界

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def floor(cls, moment: Frame, frame_type: FrameType) -> Frame:
+    """求`moment`在指定的`frame_type`中的下界
+
+    比如,如果`moment`为10:37,则当`frame_type`为30分钟时,对应的上界为10:00
+
+    Examples:
+        >>> # 如果moment为日期,则当成已收盘处理
+        >>> TimeFrame.day_frames = np.array([20050104, 20050105, 20050106, 20050107, 20050110, 20050111])
+        >>> TimeFrame.floor(datetime.date(2005, 1, 7), FrameType.DAY)
+        datetime.date(2005, 1, 7)
+
+        >>> # moment指定的时间还未收盘,floor到上一个交易日
+        >>> TimeFrame.floor(datetime.datetime(2005, 1, 7, 14, 59), FrameType.DAY)
+        datetime.date(2005, 1, 6)
+
+        >>> TimeFrame.floor(datetime.date(2005, 1, 13), FrameType.WEEK)
+        datetime.date(2005, 1, 7)
+
+        >>> TimeFrame.floor(datetime.date(2005,2, 27), FrameType.MONTH)
+        datetime.date(2005, 1, 31)
+
+        >>> TimeFrame.floor(datetime.datetime(2005,1,5,14,59), FrameType.MIN30)
+        datetime.datetime(2005, 1, 5, 14, 30)
+
+        >>> TimeFrame.floor(datetime.datetime(2005, 1, 5, 14, 59), FrameType.MIN1)
+        datetime.datetime(2005, 1, 5, 14, 59)
+
+        >>> TimeFrame.floor(arrow.get('2005-1-5 14:59').naive, FrameType.MIN1)
+        datetime.datetime(2005, 1, 5, 14, 59)
+
+    Args:
+        moment:
+        frame_type:
+
+    Returns:
+        `moment`在指定的`frame_type`中的下界
+    """
+    if frame_type in cls.minute_level_frames:
+        tm, day_offset = cls.minute_frames_floor(
+            cls.ticks[frame_type], moment.hour * 60 + moment.minute
+        )
+        h, m = tm // 60, tm % 60
+        if cls.day_shift(moment, 0) < moment.date() or day_offset == -1:
+            h = 15
+            m = 0
+            new_day = cls.day_shift(moment, day_offset)
+        else:
+            new_day = moment.date()
+        return datetime.datetime(new_day.year, new_day.month, new_day.day, h, m)
+
+    if type(moment) == datetime.date:
+        moment = datetime.datetime(moment.year, moment.month, moment.day, 15)
+
+    # 如果是交易日,但还未收盘
+    if (
+        cls.date2int(moment) in cls.day_frames
+        and moment.hour * 60 + moment.minute < 900
+    ):
+        moment = cls.day_shift(moment, -1)
+
+    day = cls.date2int(moment)
+    if frame_type == FrameType.DAY:
+        arr = cls.day_frames
+    elif frame_type == FrameType.WEEK:
+        arr = cls.week_frames
+    elif frame_type == FrameType.MONTH:
+        arr = cls.month_frames
+    else:  # pragma: no cover
+        raise ValueError(f"frame type {frame_type} not supported.")
+
+    floored = ext.floor(arr, day)
+    return cls.int2date(floored)
+
+
+
+ +
+ + + +
+ + + +

+frame_len(frame_type) + + + classmethod + + +

+ +
+ +

返回以分钟为单位的frame长度。

+

对日线以上级别没有意义,但会返回240

+ +

Examples:

+
>>> TimeFrame.frame_len(FrameType.MIN5)
+5
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
frame_typeFrameTyperequired
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
int

返回以分钟为单位的frame长度。

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def frame_len(cls, frame_type: FrameType) -> int:
+    """返回以分钟为单位的frame长度。
+
+    对日线以上级别没有意义,但会返回240
+
+    Examples:
+        >>> TimeFrame.frame_len(FrameType.MIN5)
+        5
+
+    Args:
+        frame_type:
+
+    Returns:
+        返回以分钟为单位的frame长度。
+
+    """
+
+    if frame_type == FrameType.MIN1:
+        return 1
+    elif frame_type == FrameType.MIN5:
+        return 5
+    elif frame_type == FrameType.MIN15:
+        return 15
+    elif frame_type == FrameType.MIN30:
+        return 30
+    elif frame_type == FrameType.MIN60:
+        return 60
+    else:
+        return 240
+
+
+
+ +
+ + + +
+ + + +

+get_frame_scope(frame, ft) + + + classmethod + + +

+ +
+ +

对于给定的时间,取所在周的第一天和最后一天,所在月的第一天和最后一天

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
frame

指定的日期,date对象

required
ftFrameType

帧类型,支持WEEK和MONTH

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Tuple[Frame, Frame]

周或者月的首末日期(date对象)

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def get_frame_scope(cls, frame: Frame, ft: FrameType) -> Tuple[Frame, Frame]:
+    # todo: 函数的通用性不足,似乎应该放在具体的业务类中。如果是通用型的函数,参数不应该局限于周和月。
+    """对于给定的时间,取所在周的第一天和最后一天,所在月的第一天和最后一天
+
+    Args:
+        frame : 指定的日期,date对象
+        ft: 帧类型,支持WEEK和MONTH
+
+    Returns:
+        Tuple[Frame, Frame]: 周或者月的首末日期(date对象)
+
+    """
+    if frame is None:
+        raise ValueError("frame cannot be None")
+    if ft not in (FrameType.WEEK, FrameType.MONTH):
+        raise ValueError(f"FrameType only supports WEEK and MONTH: {ft}")
+
+    if isinstance(frame, datetime.datetime):
+        frame = frame.date()
+
+    if frame < CALENDAR_START:
+        raise ValueError(f"cannot be earlier than {CALENDAR_START}: {frame}")
+
+    # datetime.date(2021, 10, 8),这是个特殊的日期
+    if ft == FrameType.WEEK:
+        if frame < datetime.date(2005, 1, 10):
+            return datetime.date(2005, 1, 4), datetime.date(2005, 1, 7)
+
+        if not cls.is_trade_day(frame):  # 非交易日的情况,直接回退一天
+            week_day = cls.day_shift(frame, 0)
+        else:
+            week_day = frame
+
+        w1 = TimeFrame.floor(week_day, FrameType.WEEK)
+        if w1 == week_day:  # 本周的最后一个交易日
+            week_end = w1
+        else:
+            week_end = TimeFrame.week_shift(week_day, 1)
+
+        w0 = TimeFrame.week_shift(week_end, -1)
+        week_start = TimeFrame.day_shift(w0, 1)
+        return week_start, week_end
+
+    if ft == FrameType.MONTH:
+        if frame <= datetime.date(2005, 1, 31):
+            return datetime.date(2005, 1, 4), datetime.date(2005, 1, 31)
+
+        month_start = frame.replace(day=1)
+        if not cls.is_trade_day(month_start):  # 非交易日的情况,直接加1
+            month_start = cls.day_shift(month_start, 1)
+
+        month_end = TimeFrame.month_shift(month_start, 1)
+        return month_start, month_end
+
+
+
+ +
+ + + +
+ + + +

+get_frames(start, end, frame_type) + + + classmethod + + +

+ +
+ +

取[start, end]间所有类型为frame_type的frames

+

调用本函数前,请先通过floor或者ceiling将时间帧对齐到frame_type的边界值

+ +

Examples:

+
>>> start = arrow.get('2020-1-13 10:00').naive
+>>> end = arrow.get('2020-1-13 13:30').naive
+>>> TimeFrame.day_frames = np.array([20200109, 20200110, 20200113,20200114, 20200115, 20200116])
+>>> TimeFrame.get_frames(start, end, FrameType.MIN30)
+[202001131000, 202001131030, 202001131100, 202001131130, 202001131330]
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
startFramerequired
endFramerequired
frame_typeFrameTyperequired
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
List[int]

frame list

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def get_frames(cls, start: Frame, end: Frame, frame_type: FrameType) -> List[int]:
+    """取[start, end]间所有类型为frame_type的frames
+
+    调用本函数前,请先通过`floor`或者`ceiling`将时间帧对齐到`frame_type`的边界值
+
+    Example:
+        >>> start = arrow.get('2020-1-13 10:00').naive
+        >>> end = arrow.get('2020-1-13 13:30').naive
+        >>> TimeFrame.day_frames = np.array([20200109, 20200110, 20200113,20200114, 20200115, 20200116])
+        >>> TimeFrame.get_frames(start, end, FrameType.MIN30)
+        [202001131000, 202001131030, 202001131100, 202001131130, 202001131330]
+
+    Args:
+        start:
+        end:
+        frame_type:
+
+    Returns:
+        frame list
+    """
+    n = cls.count_frames(start, end, frame_type)
+    return cls.get_frames_by_count(end, n, frame_type)
+
+
+
+ +
+ + + +
+ + + +

+get_frames_by_count(end, n, frame_type) + + + classmethod + + +

+ +
+ +

取以end为结束点,周期为frame_type的n个frame

+

调用前请将end对齐到frame_type的边界

+ +

Examples:

+
>>> end = arrow.get('2020-1-6 14:30').naive
+>>> TimeFrame.day_frames = np.array([20200102, 20200103,20200106, 20200107, 20200108, 20200109])
+>>> TimeFrame.get_frames_by_count(end, 2, FrameType.MIN30)
+[202001061400, 202001061430]
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
endArrowrequired
nintrequired
frame_typeFrameTyperequired
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
List[int]

frame list

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def get_frames_by_count(
+    cls, end: Arrow, n: int, frame_type: FrameType
+) -> List[int]:
+    """取以end为结束点,周期为frame_type的n个frame
+
+    调用前请将`end`对齐到`frame_type`的边界
+
+    Examples:
+        >>> end = arrow.get('2020-1-6 14:30').naive
+        >>> TimeFrame.day_frames = np.array([20200102, 20200103,20200106, 20200107, 20200108, 20200109])
+        >>> TimeFrame.get_frames_by_count(end, 2, FrameType.MIN30)
+        [202001061400, 202001061430]
+
+    Args:
+        end:
+        n:
+        frame_type:
+
+    Returns:
+        frame list
+    """
+
+    if frame_type == FrameType.DAY:
+        end = cls.date2int(end)
+        pos = np.searchsorted(cls.day_frames, end, side="right")
+        return cls.day_frames[max(0, pos - n) : pos].tolist()
+    elif frame_type == FrameType.WEEK:
+        end = cls.date2int(end)
+        pos = np.searchsorted(cls.week_frames, end, side="right")
+        return cls.week_frames[max(0, pos - n) : pos].tolist()
+    elif frame_type == FrameType.MONTH:
+        end = cls.date2int(end)
+        pos = np.searchsorted(cls.month_frames, end, side="right")
+        return cls.month_frames[max(0, pos - n) : pos].tolist()
+    elif frame_type in {
+        FrameType.MIN1,
+        FrameType.MIN5,
+        FrameType.MIN15,
+        FrameType.MIN30,
+        FrameType.MIN60,
+    }:
+        n_days = n // len(cls.ticks[frame_type]) + 2
+        ticks = cls.ticks[frame_type] * n_days
+
+        days = cls.get_frames_by_count(end, n_days, FrameType.DAY)
+        days = np.repeat(days, len(cls.ticks[frame_type]))
+
+        ticks = [
+            day.item() * 10000 + int(tm / 60) * 100 + tm % 60
+            for day, tm in zip(days, ticks)
+        ]
+
+        # list index is much faster than ext.index_sorted when the arr is small
+        pos = ticks.index(cls.time2int(end)) + 1
+
+        return ticks[max(0, pos - n) : pos]
+    else:  # pragma: no cover
+        raise ValueError(f"{frame_type} not support yet")
+
+
+
+ +
+ + + +
+ + + +

+get_previous_trade_day(now) + + + classmethod + + +

+ +
+ +

获取上一个交易日

+

如果当天是周六或者周日,返回周五(交易日),如果当天是周一,返回周五,如果当天是周五,返回周四

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
now

指定的日期,date对象

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
datetime.date

上一个交易日

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def get_previous_trade_day(cls, now: datetime.date):
+    """获取上一个交易日
+
+    如果当天是周六或者周日,返回周五(交易日),如果当天是周一,返回周五,如果当天是周五,返回周四
+
+    Args:
+        now : 指定的日期,date对象
+
+    Returns:
+        datetime.date: 上一个交易日
+
+    """
+    if now == datetime.date(2005, 1, 4):
+        return now
+
+    if TimeFrame.is_trade_day(now):
+        pre_trade_day = TimeFrame.day_shift(now, -1)
+    else:
+        pre_trade_day = TimeFrame.day_shift(now, 0)
+    return pre_trade_day
+
+
+
+ +
+ + + +
+ + + +

+get_ticks(frame_type) + + + classmethod + + +

+ +
+ +

取月线、周线、日线及各分钟线对应的frame

+

对分钟线,返回值仅包含时间,不包含日期(均为整数表示)

+ +

Examples:

+
>>> TimeFrame.month_frames = np.array([20050131, 20050228, 20050331])
+>>> TimeFrame.get_ticks(FrameType.MONTH)[:3]
+array([20050131, 20050228, 20050331])
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
frame_type

[description]

required
+

Exceptions:

+ + + + + + + + + + + + + +
TypeDescription
ValueError

[description]

+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Union[List, np.array]

月线、周线、日线及各分钟线对应的frame

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def get_ticks(cls, frame_type: FrameType) -> Union[List, np.array]:
+    """取月线、周线、日线及各分钟线对应的frame
+
+    对分钟线,返回值仅包含时间,不包含日期(均为整数表示)
+
+    Examples:
+        >>> TimeFrame.month_frames = np.array([20050131, 20050228, 20050331])
+        >>> TimeFrame.get_ticks(FrameType.MONTH)[:3]
+        array([20050131, 20050228, 20050331])
+
+    Args:
+        frame_type : [description]
+
+    Raises:
+        ValueError: [description]
+
+    Returns:
+        月线、周线、日线及各分钟线对应的frame
+    """
+    if frame_type in cls.minute_level_frames:
+        return cls.ticks[frame_type]
+
+    if frame_type == FrameType.DAY:
+        return cls.day_frames
+    elif frame_type == FrameType.WEEK:
+        return cls.week_frames
+    elif frame_type == FrameType.MONTH:
+        return cls.month_frames
+    else:  # pragma: no cover
+        raise ValueError(f"{frame_type} not supported!")
+
+
+
+ +
+ + + +
+ + + +

+init() + + + async + classmethod + + +

+ +
+ +

初始化日历

+ +
+ Source code in omicron/models/timeframe.py +
@classmethod
+async def init(cls):
+    """初始化日历"""
+    await cls._load_calendar()
+
+
+
+ +
+ + + +
+ + + +

+int2date(d) + + + classmethod + + +

+ +
+ +

将数字表示的日期转换成为日期格式

+ +

Examples:

+
>>> TimeFrame.int2date(20200501)
+datetime.date(2020, 5, 1)
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
dUnion[int, str]

YYYYMMDD表示的日期

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
datetime.date

转换后的日期

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def int2date(cls, d: Union[int, str]) -> datetime.date:
+    """将数字表示的日期转换成为日期格式
+
+    Examples:
+        >>> TimeFrame.int2date(20200501)
+        datetime.date(2020, 5, 1)
+
+    Args:
+        d: YYYYMMDD表示的日期
+
+    Returns:
+        转换后的日期
+    """
+    s = str(d)
+    # it's 8 times faster than arrow.get
+    return datetime.date(int(s[:4]), int(s[4:6]), int(s[6:]))
+
+
+
+ +
+ + + +
+ + + +

+int2time(tm) + + + classmethod + + +

+ +
+ +

将整数表示的时间转换为datetime类型表示

+ +

Examples:

+
>>> TimeFrame.int2time(202005011500)
+datetime.datetime(2020, 5, 1, 15, 0)
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tmint

time in YYYYMMDDHHmm format

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
datetime.datetime

转换后的时间

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def int2time(cls, tm: int) -> datetime.datetime:
+    """将整数表示的时间转换为`datetime`类型表示
+
+    examples:
+        >>> TimeFrame.int2time(202005011500)
+        datetime.datetime(2020, 5, 1, 15, 0)
+
+    Args:
+        tm: time in YYYYMMDDHHmm format
+
+    Returns:
+        转换后的时间
+    """
+    s = str(tm)
+    # its 8 times faster than arrow.get()
+    return datetime.datetime(
+        int(s[:4]), int(s[4:6]), int(s[6:8]), int(s[8:10]), int(s[10:12])
+    )
+
+
+
+ +
+ + + +
+ + + +

+is_bar_closed(frame, ft) + + + classmethod + + +

+ +
+ +

判断frame所代表的bar是否已经收盘(结束)

+

如果是日线,frame不为当天,则认为已收盘;或者当前时间在收盘时间之后,也认为已收盘。 +如果是其它周期,则只有当frame正好在边界上,才认为是已收盘。这里有一个假设:我们不会在其它周期上,判断未来的某个frame是否已经收盘。

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
frame

bar所处的时间,必须小于当前时间

required
ftFrameType

bar所代表的帧类型

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
bool

是否已经收盘

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def is_bar_closed(cls, frame: Frame, ft: FrameType) -> bool:
+    """判断`frame`所代表的bar是否已经收盘(结束)
+
+    如果是日线,frame不为当天,则认为已收盘;或者当前时间在收盘时间之后,也认为已收盘。
+    如果是其它周期,则只有当frame正好在边界上,才认为是已收盘。这里有一个假设:我们不会在其它周期上,判断未来的某个frame是否已经收盘。
+
+    Args:
+        frame : bar所处的时间,必须小于当前时间
+        ft: bar所代表的帧类型
+
+    Returns:
+        bool: 是否已经收盘
+    """
+    floor = cls.floor(frame, ft)
+
+    now = arrow.now()
+    if ft == FrameType.DAY:
+        return floor < now.date() or now.hour >= 15
+    else:
+        return floor == frame
+
+
+
+ +
+ + + +
+ + + +

+is_closing_call_auction_time(tm=None) + + + classmethod + + +

+ +
+ +

判断tm指定的时间是否为收盘集合竞价时间

+
+

Fixme

+

此处实现有误,收盘集合竞价时间应该还包含上午收盘时间

+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tm

[description]. Defaults to None.

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
bool

bool

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def is_closing_call_auction_time(
+    cls, tm: Union[datetime.datetime, Arrow] = None
+) -> bool:
+    """判断`tm`指定的时间是否为收盘集合竞价时间
+
+    Fixme:
+        此处实现有误,收盘集合竞价时间应该还包含上午收盘时间
+
+    Args:
+        tm : [description]. Defaults to None.
+
+    Returns:
+        bool
+    """
+    tm = tm or cls.now()
+
+    if not cls.is_trade_day(tm):
+        return False
+
+    minutes = tm.hour * 60 + tm.minute
+    return 15 * 60 - 3 <= minutes < 15 * 60
+
+
+
+ +
+ + + +
+ + + +

+is_open_time(tm=None) + + + classmethod + + +

+ +
+ +

判断tm指定的时间是否处在交易时间段。

+

交易时间段是指集合竞价时间段之外的开盘时间

+ +

Examples:

+
>>> TimeFrame.day_frames = np.array([20200102, 20200103, 20200106, 20200107, 20200108])
+>>> TimeFrame.is_open_time(arrow.get('2020-1-1 14:59').naive)
+False
+>>> TimeFrame.is_open_time(arrow.get('2020-1-3 14:59').naive)
+True
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tm

[description]. Defaults to None.

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
bool

bool

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def is_open_time(cls, tm: Union[datetime.datetime, Arrow] = None) -> bool:
+    """判断`tm`指定的时间是否处在交易时间段。
+
+    交易时间段是指集合竞价时间段之外的开盘时间
+
+    Examples:
+        >>> TimeFrame.day_frames = np.array([20200102, 20200103, 20200106, 20200107, 20200108])
+        >>> TimeFrame.is_open_time(arrow.get('2020-1-1 14:59').naive)
+        False
+        >>> TimeFrame.is_open_time(arrow.get('2020-1-3 14:59').naive)
+        True
+
+    Args:
+        tm : [description]. Defaults to None.
+
+    Returns:
+        bool
+    """
+    tm = tm or arrow.now()
+
+    if not cls.is_trade_day(tm):
+        return False
+
+    tick = tm.hour * 60 + tm.minute
+    return tick in cls.ticks[FrameType.MIN1]
+
+
+
+ +
+ + + +
+ + + +

+is_opening_call_auction_time(tm=None) + + + classmethod + + +

+ +
+ +

判断tm指定的时间是否为开盘集合竞价时间

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tm

[description]. Defaults to None.

None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
bool

bool

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def is_opening_call_auction_time(
+    cls, tm: Union[Arrow, datetime.datetime] = None
+) -> bool:
+    """判断`tm`指定的时间是否为开盘集合竞价时间
+
+    Args:
+        tm : [description]. Defaults to None.
+
+    Returns:
+        bool
+    """
+    if tm is None:
+        tm = cls.now()
+
+    if not cls.is_trade_day(tm):
+        return False
+
+    minutes = tm.hour * 60 + tm.minute
+    return 9 * 60 + 15 < minutes <= 9 * 60 + 25
+
+
+
+ +
+ + + +
+ + + +

+is_trade_day(dt) + + + classmethod + + +

+ +
+ +

判断dt是否为交易日

+ +

Examples:

+
>>> TimeFrame.is_trade_day(arrow.get('2020-1-1'))
+False
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
dtrequired
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
bool

bool

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def is_trade_day(cls, dt: Union[datetime.date, datetime.datetime, Arrow]) -> bool:
+    """判断`dt`是否为交易日
+
+    Examples:
+        >>> TimeFrame.is_trade_day(arrow.get('2020-1-1'))
+        False
+
+    Args:
+        dt :
+
+    Returns:
+        bool
+    """
+    return cls.date2int(dt) in cls.day_frames
+
+
+
+ +
+ + + +
+ + + +

+last_min_frame(day, frame_type) + + + classmethod + + +

+ +
+ +

获取day日周期为frame_type的结束frame。

+ +

Examples:

+
>>> TimeFrame.last_min_frame(arrow.get('2020-1-5').date(), FrameType.MIN30)
+datetime.datetime(2020, 1, 3, 15, 0)
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
dayUnion[str, Arrow, datetime.date]required
frame_typeFrameTyperequired
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Union[datetime.date, datetime.datetime]

day日周期为frame_type的结束frame

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def last_min_frame(
+    cls, day: Union[str, Arrow, datetime.date], frame_type: FrameType
+) -> Union[datetime.date, datetime.datetime]:
+    """获取`day`日周期为`frame_type`的结束frame。
+
+    Example:
+        >>> TimeFrame.last_min_frame(arrow.get('2020-1-5').date(), FrameType.MIN30)
+        datetime.datetime(2020, 1, 3, 15, 0)
+
+    Args:
+        day:
+        frame_type:
+
+    Returns:
+        `day`日周期为`frame_type`的结束frame
+    """
+    if isinstance(day, str):
+        day = cls.date2int(arrow.get(day).date())
+    elif isinstance(day, arrow.Arrow) or isinstance(day, datetime.datetime):
+        day = cls.date2int(day.date())
+    elif isinstance(day, datetime.date):
+        day = cls.date2int(day)
+    else:
+        raise TypeError(f"{type(day)} is not supported.")
+
+    if frame_type in cls.minute_level_frames:
+        last_close_day = cls.day_frames[cls.day_frames <= day][-1]
+        day = cls.int2date(last_close_day)
+        return datetime.datetime(day.year, day.month, day.day, hour=15, minute=0)
+    else:  # pragma: no cover
+        raise ValueError(f"{frame_type} not supported")
+
+
+
+ +
+ + + +
+ + + +

+minute_frames_floor(ticks, moment) + + + classmethod + + +

+ +
+ +

对于分钟级的frame,返回它们与frame刻度向下对齐后的frame及日期进位。如果需要对齐到上一个交易 +日,则进位为-1,否则为0.

+ +

Examples:

+
>>> ticks = [600, 630, 660, 690, 810, 840, 870, 900]
+>>> TimeFrame.minute_frames_floor(ticks, 545)
+(900, -1)
+>>> TimeFrame.minute_frames_floor(ticks, 600)
+(600, 0)
+>>> TimeFrame.minute_frames_floor(ticks, 605)
+(600, 0)
+>>> TimeFrame.minute_frames_floor(ticks, 899)
+(870, 0)
+>>> TimeFrame.minute_frames_floor(ticks, 900)
+(900, 0)
+>>> TimeFrame.minute_frames_floor(ticks, 905)
+(900, 0)
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
ticksnp.array or list

frames刻度

required
momentint

整数表示的分钟数,比如900表示15:00

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Tuple[int, int]

tuple, the first is the new moment, the second is carry-on

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def minute_frames_floor(cls, ticks, moment) -> Tuple[int, int]:
+    """
+    对于分钟级的frame,返回它们与frame刻度向下对齐后的frame及日期进位。如果需要对齐到上一个交易
+    日,则进位为-1,否则为0.
+
+    Examples:
+        >>> ticks = [600, 630, 660, 690, 810, 840, 870, 900]
+        >>> TimeFrame.minute_frames_floor(ticks, 545)
+        (900, -1)
+        >>> TimeFrame.minute_frames_floor(ticks, 600)
+        (600, 0)
+        >>> TimeFrame.minute_frames_floor(ticks, 605)
+        (600, 0)
+        >>> TimeFrame.minute_frames_floor(ticks, 899)
+        (870, 0)
+        >>> TimeFrame.minute_frames_floor(ticks, 900)
+        (900, 0)
+        >>> TimeFrame.minute_frames_floor(ticks, 905)
+        (900, 0)
+
+    Args:
+        ticks (np.array or list): frames刻度
+        moment (int): 整数表示的分钟数,比如900表示15:00
+
+    Returns:
+        tuple, the first is the new moment, the second is carry-on
+    """
+    if moment < ticks[0]:
+        return ticks[-1], -1
+    # ’right' 相当于 ticks <= m
+    index = np.searchsorted(ticks, moment, side="right")
+    return ticks[index - 1], 0
+
+
+
+ +
+ + + +
+ + + +

+month_shift(start, offset) + + + classmethod + + +

+ +
+ +

start所在的月移位后的frame

+

本函数首先将start对齐,然后进行移位。

+ +

Examples:

+
>>> TimeFrame.month_frames = np.array([20150130, 20150227, 20150331, 20150430])
+>>> TimeFrame.month_shift(arrow.get('2015-2-26').date(), 0)
+datetime.date(2015, 1, 30)
+
+
>>> TimeFrame.month_shift(arrow.get('2015-2-27').date(), 0)
+datetime.date(2015, 2, 27)
+
+
>>> TimeFrame.month_shift(arrow.get('2015-3-1').date(), 0)
+datetime.date(2015, 2, 27)
+
+
>>> TimeFrame.month_shift(arrow.get('2015-3-1').date(), 1)
+datetime.date(2015, 3, 31)
+
+ +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
datetime.date

移位后的日期

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def month_shift(cls, start: datetime.date, offset: int) -> datetime.date:
+    """求`start`所在的月移位后的frame
+
+    本函数首先将`start`对齐,然后进行移位。
+    Examples:
+        >>> TimeFrame.month_frames = np.array([20150130, 20150227, 20150331, 20150430])
+        >>> TimeFrame.month_shift(arrow.get('2015-2-26').date(), 0)
+        datetime.date(2015, 1, 30)
+
+        >>> TimeFrame.month_shift(arrow.get('2015-2-27').date(), 0)
+        datetime.date(2015, 2, 27)
+
+        >>> TimeFrame.month_shift(arrow.get('2015-3-1').date(), 0)
+        datetime.date(2015, 2, 27)
+
+        >>> TimeFrame.month_shift(arrow.get('2015-3-1').date(), 1)
+        datetime.date(2015, 3, 31)
+
+    Returns:
+        移位后的日期
+    """
+    start = cls.date2int(start)
+    return cls.int2date(ext.shift(cls.month_frames, start, offset))
+
+
+
+ +
+ + + + +
+ + + +

+replace_date(dtm, dt) + + + classmethod + + +

+ +
+ +

dtm变量的日期更换为dt指定的日期

+ +

Examples:

+
>>> TimeFrame.replace_date(arrow.get('2020-1-1 13:49').datetime, datetime.date(2019, 1,1))
+datetime.datetime(2019, 1, 1, 13, 49)
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
dtmdatetime.datetime

[description]

required
dtdatetime.date

[description]

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
datetime.datetime

变换后的时间

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def replace_date(
+    cls, dtm: datetime.datetime, dt: datetime.date
+) -> datetime.datetime:
+    """将`dtm`变量的日期更换为`dt`指定的日期
+
+    Example:
+        >>> TimeFrame.replace_date(arrow.get('2020-1-1 13:49').datetime, datetime.date(2019, 1,1))
+        datetime.datetime(2019, 1, 1, 13, 49)
+
+    Args:
+        dtm (datetime.datetime): [description]
+        dt (datetime.date): [description]
+
+    Returns:
+        变换后的时间
+    """
+    return datetime.datetime(
+        dt.year, dt.month, dt.day, dtm.hour, dtm.minute, dtm.second, dtm.microsecond
+    )
+
+
+
+ +
+ + + +
+ + + +

+resample_frames(trade_days, frame_type) + + + classmethod + + +

+ +
+ +

将从行情服务器获取的交易日历重采样,生成周帧和月线帧

+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
trade_daysIterable

[description]

required
frame_typeFrameType

[description]

required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
List[int]

重采样后的日期列表,日期用整数表示

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def resample_frames(
+    cls, trade_days: Iterable[datetime.date], frame_type: FrameType
+) -> List[int]:
+    """将从行情服务器获取的交易日历重采样,生成周帧和月线帧
+
+    Args:
+        trade_days (Iterable): [description]
+        frame_type (FrameType): [description]
+
+    Returns:
+        List[int]: 重采样后的日期列表,日期用整数表示
+    """
+    if frame_type == FrameType.WEEK:
+        weeks = []
+        last = trade_days[0]
+        for cur in trade_days:
+            if cur.weekday() < last.weekday() or (cur - last).days >= 7:
+                weeks.append(last)
+            last = cur
+
+        if weeks[-1] < last:
+            weeks.append(last)
+
+        return weeks
+    elif frame_type == FrameType.MONTH:
+        months = []
+        last = trade_days[0]
+        for cur in trade_days:
+            if cur.day < last.day:
+                months.append(last)
+            last = cur
+        months.append(last)
+
+        return months
+    elif frame_type == FrameType.QUARTER:
+        quarters = []
+        last = trade_days[0]
+        for cur in trade_days:
+            if last.month % 3 == 0:
+                if cur.month > last.month or cur.year > last.year:
+                    quarters.append(last)
+            last = cur
+        quarters.append(last)
+
+        return quarters
+    elif frame_type == FrameType.YEAR:
+        years = []
+        last = trade_days[0]
+        for cur in trade_days:
+            if cur.year > last.year:
+                years.append(last)
+            last = cur
+        years.append(last)
+
+        return years
+    else:  # pragma: no cover
+        raise ValueError(f"Unsupported FrameType: {frame_type}")
+
+
+
+ +
+ + + + +
+ + + +

+service_degrade() + + + classmethod + + +

+ +
+ +

当cache中不存在日历时,启用随omicron版本一起发行时自带的日历。

+

注意:随omicron版本一起发行时自带的日历很可能不是最新的,并且可能包含错误。比如,存在这样的情况,在本版本的omicron发行时,日历更新到了2021年12月31日,在这之前的日历都是准确的,但在此之后的日历,则有可能出现错误。因此,只应该在特殊的情况下(比如测试)调用此方法,以获得一个降级的服务。

+ +
+ Source code in omicron/models/timeframe.py +
@classmethod
+def service_degrade(cls):
+    """当cache中不存在日历时,启用随omicron版本一起发行时自带的日历。
+
+    注意:随omicron版本一起发行时自带的日历很可能不是最新的,并且可能包含错误。比如,存在这样的情况,在本版本的omicron发行时,日历更新到了2021年12月31日,在这之前的日历都是准确的,但在此之后的日历,则有可能出现错误。因此,只应该在特殊的情况下(比如测试)调用此方法,以获得一个降级的服务。
+    """
+    _dir = os.path.dirname(__file__)
+    file = os.path.join(_dir, "..", "config", "calendar.json")
+    with open(file, "r") as f:
+        data = json.load(f)
+        for k, v in data.items():
+            setattr(cls, k, np.array(v))
+
+
+
+ +
+ + + +
+ + + +

+shift(moment, n, frame_type) + + + classmethod + + +

+ +
+ +

将指定的moment移动N个frame_type位置。

+

当N为负数时,意味着向前移动;当N为正数时,意味着向后移动。如果n为零,意味着移动到最接近 +的一个已结束的frame。

+

如果moment没有对齐到frame_type对应的时间,将首先进行对齐。

+

See also:

+ + +

Examples:

+
>>> TimeFrame.shift(datetime.date(2020, 1, 3), 1, FrameType.DAY)
+datetime.date(2020, 1, 6)
+
+
>>> TimeFrame.shift(datetime.datetime(2020, 1, 6, 11), 1, FrameType.MIN30)
+datetime.datetime(2020, 1, 6, 11, 30)
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
momentUnion[Arrow, datetime.date, datetime.datetime]required
nintrequired
frame_typeFrameTyperequired
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Union[datetime.date, datetime.datetime]

移位后的Frame

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def shift(
+    cls,
+    moment: Union[Arrow, datetime.date, datetime.datetime],
+    n: int,
+    frame_type: FrameType,
+) -> Union[datetime.date, datetime.datetime]:
+    """将指定的moment移动N个`frame_type`位置。
+
+    当N为负数时,意味着向前移动;当N为正数时,意味着向后移动。如果n为零,意味着移动到最接近
+    的一个已结束的frame。
+
+    如果moment没有对齐到frame_type对应的时间,将首先进行对齐。
+
+    See also:
+
+    - [day_shift][omicron.models.timeframe.TimeFrame.day_shift]
+    - [week_shift][omicron.models.timeframe.TimeFrame.week_shift]
+    - [month_shift][omicron.models.timeframe.TimeFrame.month_shift]
+
+    Examples:
+        >>> TimeFrame.shift(datetime.date(2020, 1, 3), 1, FrameType.DAY)
+        datetime.date(2020, 1, 6)
+
+        >>> TimeFrame.shift(datetime.datetime(2020, 1, 6, 11), 1, FrameType.MIN30)
+        datetime.datetime(2020, 1, 6, 11, 30)
+
+
+    Args:
+        moment:
+        n:
+        frame_type:
+
+    Returns:
+        移位后的Frame
+    """
+    if frame_type == FrameType.DAY:
+        return cls.day_shift(moment, n)
+
+    elif frame_type == FrameType.WEEK:
+        return cls.week_shift(moment, n)
+    elif frame_type == FrameType.MONTH:
+        return cls.month_shift(moment, n)
+    elif frame_type in [
+        FrameType.MIN1,
+        FrameType.MIN5,
+        FrameType.MIN15,
+        FrameType.MIN30,
+        FrameType.MIN60,
+    ]:
+        tm = moment.hour * 60 + moment.minute
+
+        new_tick_pos = cls.ticks[frame_type].index(tm) + n
+        days = new_tick_pos // len(cls.ticks[frame_type])
+        min_part = new_tick_pos % len(cls.ticks[frame_type])
+
+        date_part = cls.day_shift(moment.date(), days)
+        minutes = cls.ticks[frame_type][min_part]
+        h, m = minutes // 60, minutes % 60
+        return datetime.datetime(
+            date_part.year,
+            date_part.month,
+            date_part.day,
+            h,
+            m,
+            tzinfo=moment.tzinfo,
+        )
+    else:  # pragma: no cover
+        raise ValueError(f"{frame_type} is not supported.")
+
+
+
+ +
+ + + +
+ + + +

+time2int(tm) + + + classmethod + + +

+ +
+ +

将时间类型转换为整数类型

+

tm可以是Arrow类型,也可以是datetime.datetime或者任何其它类型,只要它有year,month...等 +属性

+ +

Examples:

+
>>> TimeFrame.time2int(datetime.datetime(2020, 5, 1, 15))
+202005011500
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tmUnion[datetime.datetime, Arrow]required
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
int

转换后的整数,比如2020050115

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def time2int(cls, tm: Union[datetime.datetime, Arrow]) -> int:
+    """将时间类型转换为整数类型
+
+    tm可以是Arrow类型,也可以是datetime.datetime或者任何其它类型,只要它有year,month...等
+    属性
+    Examples:
+        >>> TimeFrame.time2int(datetime.datetime(2020, 5, 1, 15))
+        202005011500
+
+    Args:
+        tm:
+
+    Returns:
+        转换后的整数,比如2020050115
+    """
+    return int(f"{tm.year:04}{tm.month:02}{tm.day:02}{tm.hour:02}{tm.minute:02}")
+
+
+
+ +
+ + + +
+ + + +

+week_shift(start, offset) + + + classmethod + + +

+ +
+ +

对指定日期按周线帧进行前后移位操作

+

参考 omicron.models.timeframe.TimeFrame.day_shift

+ +

Examples:

+
>>> TimeFrame.week_frames = np.array([20200103, 20200110, 20200117, 20200123,20200207, 20200214])
+>>> moment = arrow.get('2020-1-21').date()
+>>> TimeFrame.week_shift(moment, 1)
+datetime.date(2020, 1, 23)
+
+
>>> TimeFrame.week_shift(moment, 0)
+datetime.date(2020, 1, 17)
+
+
>>> TimeFrame.week_shift(moment, -1)
+datetime.date(2020, 1, 10)
+
+ +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
datetime.date

移位后的日期

+
+ Source code in omicron/models/timeframe.py +
@classmethod
+def week_shift(cls, start: datetime.date, offset: int) -> datetime.date:
+    """对指定日期按周线帧进行前后移位操作
+
+    参考 [omicron.models.timeframe.TimeFrame.day_shift][]
+    Examples:
+        >>> TimeFrame.week_frames = np.array([20200103, 20200110, 20200117, 20200123,20200207, 20200214])
+        >>> moment = arrow.get('2020-1-21').date()
+        >>> TimeFrame.week_shift(moment, 1)
+        datetime.date(2020, 1, 23)
+
+        >>> TimeFrame.week_shift(moment, 0)
+        datetime.date(2020, 1, 17)
+
+        >>> TimeFrame.week_shift(moment, -1)
+        datetime.date(2020, 1, 10)
+
+    Returns:
+        移位后的日期
+    """
+    start = cls.date2int(start)
+    return cls.int2date(ext.shift(cls.week_frames, start, offset))
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + + + + + + + + +
+ +
+ +
+ +

rendering: +heading_level: 1

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/2.0.0/api/triggers/index.html b/2.0.0/api/triggers/index.html new file mode 100644 index 00000000..6e9fdfa8 --- /dev/null +++ b/2.0.0/api/triggers/index.html @@ -0,0 +1,1229 @@ + + + + + + + + + + + + + + + + Triggers - Omicron + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + +
+ + +
+ +

在apscheduler.triggers的基础上提供了FrameTrigger和IntervalTrigger,使得它们只在交易日(或者 +基于交易日+延时)时激发。

+ + + +
+ + + + + + + + +
+ + + +

+ +FrameTrigger (BaseTrigger) + + + + +

+ +
+ +

A cron like trigger fires on each valid Frame

+ +
+ Source code in omicron/core/triggers.py +
class FrameTrigger(BaseTrigger):
+    """
+    A cron like trigger fires on each valid Frame
+    """
+
+    def __init__(self, frame_type: Union[str, FrameType], jitter: str = None):
+        """构造函数
+
+        jitter的格式用正则式表达为`r"([-]?)(\\d+)([mshd])"`,其中第一组为符号,'-'表示提前;
+        第二组为数字,第三组为单位,可以为`m`(分钟), `s`(秒), `h`(小时),`d`(天)。
+
+        下面的示例构造了一个只在交易日,每30分钟触发一次,每次提前15秒触的trigger。即它的触发时
+        间是每个交易日的09:29:45, 09:59:45, ...
+
+        Examples:
+            >>> FrameTrigger(FrameType.MIN30, '-15s') # doctest: +ELLIPSIS
+            <omicron.core.triggers.FrameTrigger object at 0x...>
+
+        Args:
+            frame_type:
+            jitter: 单位秒。其中offset必须在一个FrameType的长度以内
+        """
+        self.frame_type = FrameType(frame_type)
+        if jitter is None:
+            _jitter = 0
+        else:
+            matched = re.match(r"([-]?)(\d+)([mshd])", jitter)
+            if matched is None:  # pragma: no cover
+                raise ValueError(
+                    "malformed. jitter should be [-](number)(unit), "
+                    "for example, -30m, or 30s"
+                )
+            sign, num, unit = matched.groups()
+            num = int(num)
+            if unit.lower() == "m":
+                _jitter = 60 * num
+            elif unit.lower() == "s":
+                _jitter = num
+            elif unit.lower() == "h":
+                _jitter = 3600 * num
+            elif unit.lower() == "d":
+                _jitter = 3600 * 24 * num
+            else:  # pragma: no cover
+                raise ValueError("bad time unit. only s,h,m,d is acceptable")
+
+            if sign == "-":
+                _jitter = -_jitter
+
+        self.jitter = datetime.timedelta(seconds=_jitter)
+        if (
+            frame_type == FrameType.MIN1
+            and abs(_jitter) >= 60
+            or frame_type == FrameType.MIN5
+            and abs(_jitter) >= 300
+            or frame_type == FrameType.MIN15
+            and abs(_jitter) >= 900
+            or frame_type == FrameType.MIN30
+            and abs(_jitter) >= 1800
+            or frame_type == FrameType.MIN60
+            and abs(_jitter) >= 3600
+            or frame_type == FrameType.DAY
+            and abs(_jitter) >= 24 * 3600
+            # it's still not allowed if offset > week, month, etc. Would anybody
+            # really specify an offset longer than that?
+        ):
+            raise ValueError("offset must be less than frame length")
+
+    def __str__(self):
+        return f"{self.__class__.__name__}:{self.frame_type.value}:{self.jitter}"
+
+    def get_next_fire_time(
+        self,
+        previous_fire_time: Union[datetime.date, datetime.datetime],
+        now: Union[datetime.date, datetime.datetime],
+    ):
+        """"""
+        ft = self.frame_type
+
+        # `now` is timezone aware, while ceiling isn't
+        now = now.replace(tzinfo=None)
+        next_tick = now
+        next_frame = TimeFrame.ceiling(now, ft)
+        while next_tick <= now:
+            if ft in TimeFrame.day_level_frames:
+                next_tick = TimeFrame.combine_time(next_frame, 15) + self.jitter
+            else:
+                next_tick = next_frame + self.jitter
+
+            if next_tick > now:
+                tz = tzlocal.get_localzone()
+                return next_tick.astimezone(tz)
+            else:
+                next_frame = TimeFrame.shift(next_frame, 1, ft)
+
+
+ + + +
+ + + + + + + + + +
+ + + +

+__init__(self, frame_type, jitter=None) + + + special + + +

+ +
+ +

构造函数

+

jitter的格式用正则式表达为r"([-]?)(\d+)([mshd])",其中第一组为符号,'-'表示提前; +第二组为数字,第三组为单位,可以为m(分钟), s(秒), h(小时),d(天)。

+

下面的示例构造了一个只在交易日,每30分钟触发一次,每次提前15秒触的trigger。即它的触发时 +间是每个交易日的09:29:45, 09:59:45, ...

+ +

Examples:

+
>>> FrameTrigger(FrameType.MIN30, '-15s')
+<omicron.core.triggers.FrameTrigger object at 0x...>
+
+ +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
frame_typeUnion[str, coretypes.types.FrameType]required
jitterstr

单位秒。其中offset必须在一个FrameType的长度以内

None
+
+ Source code in omicron/core/triggers.py +
def __init__(self, frame_type: Union[str, FrameType], jitter: str = None):
+    """构造函数
+
+    jitter的格式用正则式表达为`r"([-]?)(\\d+)([mshd])"`,其中第一组为符号,'-'表示提前;
+    第二组为数字,第三组为单位,可以为`m`(分钟), `s`(秒), `h`(小时),`d`(天)。
+
+    下面的示例构造了一个只在交易日,每30分钟触发一次,每次提前15秒触的trigger。即它的触发时
+    间是每个交易日的09:29:45, 09:59:45, ...
+
+    Examples:
+        >>> FrameTrigger(FrameType.MIN30, '-15s') # doctest: +ELLIPSIS
+        <omicron.core.triggers.FrameTrigger object at 0x...>
+
+    Args:
+        frame_type:
+        jitter: 单位秒。其中offset必须在一个FrameType的长度以内
+    """
+    self.frame_type = FrameType(frame_type)
+    if jitter is None:
+        _jitter = 0
+    else:
+        matched = re.match(r"([-]?)(\d+)([mshd])", jitter)
+        if matched is None:  # pragma: no cover
+            raise ValueError(
+                "malformed. jitter should be [-](number)(unit), "
+                "for example, -30m, or 30s"
+            )
+        sign, num, unit = matched.groups()
+        num = int(num)
+        if unit.lower() == "m":
+            _jitter = 60 * num
+        elif unit.lower() == "s":
+            _jitter = num
+        elif unit.lower() == "h":
+            _jitter = 3600 * num
+        elif unit.lower() == "d":
+            _jitter = 3600 * 24 * num
+        else:  # pragma: no cover
+            raise ValueError("bad time unit. only s,h,m,d is acceptable")
+
+        if sign == "-":
+            _jitter = -_jitter
+
+    self.jitter = datetime.timedelta(seconds=_jitter)
+    if (
+        frame_type == FrameType.MIN1
+        and abs(_jitter) >= 60
+        or frame_type == FrameType.MIN5
+        and abs(_jitter) >= 300
+        or frame_type == FrameType.MIN15
+        and abs(_jitter) >= 900
+        or frame_type == FrameType.MIN30
+        and abs(_jitter) >= 1800
+        or frame_type == FrameType.MIN60
+        and abs(_jitter) >= 3600
+        or frame_type == FrameType.DAY
+        and abs(_jitter) >= 24 * 3600
+        # it's still not allowed if offset > week, month, etc. Would anybody
+        # really specify an offset longer than that?
+    ):
+        raise ValueError("offset must be less than frame length")
+
+
+
+ +
+ + + + + + + +
+ +
+ +
+ + + +
+ + + +

+ +TradeTimeIntervalTrigger (BaseTrigger) + + + + +

+ +
+ +

只在交易时间触发的固定间隔的trigger

+ +
+ Source code in omicron/core/triggers.py +
class TradeTimeIntervalTrigger(BaseTrigger):
+    """只在交易时间触发的固定间隔的trigger"""
+
+    def __init__(self, interval: str):
+        """构造函数
+
+        interval的格式用正则表达式表示为 `r"(\\d+)([mshd])"` 。其中第一组为数字,第二组为单位。有效的
+        `interval`如 1 ,表示每1小时触发一次,则该触发器将在交易日的10:30, 11:30, 14:00和
+        15:00各触发一次
+
+        Args:
+            interval : [description]
+
+        Raises:
+            ValueError: [description]
+        """
+        matched = re.match(r"(\d+)([mshd])", interval)
+        if matched is None:
+            raise ValueError(f"malform interval {interval}")
+
+        interval, unit = matched.groups()
+        interval = int(interval)
+        unit = unit.lower()
+        if unit == "s":
+            self.interval = datetime.timedelta(seconds=interval)
+        elif unit == "m":
+            self.interval = datetime.timedelta(minutes=interval)
+        elif unit == "h":
+            self.interval = datetime.timedelta(hours=interval)
+        elif unit == "d":
+            self.interval = datetime.timedelta(days=interval)
+        else:
+            self.interval = datetime.timedelta(seconds=interval)
+
+    def __str__(self):
+        return f"{self.__class__.__name__}:{self.interval.seconds}"
+
+    def get_next_fire_time(
+        self,
+        previous_fire_time: Optional[datetime.datetime],
+        now: Optional[datetime.datetime],
+    ):
+        """"""
+        if previous_fire_time is not None:
+            fire_time = previous_fire_time + self.interval
+        else:
+            fire_time = now
+
+        if TimeFrame.date2int(fire_time.date()) not in TimeFrame.day_frames:
+            ft = TimeFrame.day_shift(now, 1)
+            fire_time = datetime.datetime(
+                ft.year, ft.month, ft.day, 9, 30, tzinfo=fire_time.tzinfo
+            )
+            return fire_time
+
+        minutes = fire_time.hour * 60 + fire_time.minute
+
+        if minutes < 570:
+            fire_time = fire_time.replace(hour=9, minute=30, second=0, microsecond=0)
+        elif 690 < minutes < 780:
+            fire_time = fire_time.replace(hour=13, minute=0, second=0, microsecond=0)
+        elif minutes > 900:
+            ft = TimeFrame.day_shift(fire_time, 1)
+            fire_time = datetime.datetime(
+                ft.year, ft.month, ft.day, 9, 30, tzinfo=fire_time.tzinfo
+            )
+
+        return fire_time
+
+
+ + + +
+ + + + + + + + + +
+ + + +

+__init__(self, interval) + + + special + + +

+ +
+ +

构造函数

+

interval的格式用正则表达式表示为 r"(\d+)([mshd])" 。其中第一组为数字,第二组为单位。有效的 +interval如 1 ,表示每1小时触发一次,则该触发器将在交易日的10:30, 11:30, 14:00和 +15:00各触发一次

+ +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
interval

[description]

required
+

Exceptions:

+ + + + + + + + + + + + + +
TypeDescription
ValueError

[description]

+
+ Source code in omicron/core/triggers.py +
def __init__(self, interval: str):
+    """构造函数
+
+    interval的格式用正则表达式表示为 `r"(\\d+)([mshd])"` 。其中第一组为数字,第二组为单位。有效的
+    `interval`如 1 ,表示每1小时触发一次,则该触发器将在交易日的10:30, 11:30, 14:00和
+    15:00各触发一次
+
+    Args:
+        interval : [description]
+
+    Raises:
+        ValueError: [description]
+    """
+    matched = re.match(r"(\d+)([mshd])", interval)
+    if matched is None:
+        raise ValueError(f"malform interval {interval}")
+
+    interval, unit = matched.groups()
+    interval = int(interval)
+    unit = unit.lower()
+    if unit == "s":
+        self.interval = datetime.timedelta(seconds=interval)
+    elif unit == "m":
+        self.interval = datetime.timedelta(minutes=interval)
+    elif unit == "h":
+        self.interval = datetime.timedelta(hours=interval)
+    elif unit == "d":
+        self.interval = datetime.timedelta(days=interval)
+    else:
+        self.interval = datetime.timedelta(seconds=interval)
+
+
+
+ +
+ + + + + + + +
+ +
+ +
+ + + + + + + +
+ +
+ +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/2.0.0/assets/_mkdocstrings.css b/2.0.0/assets/_mkdocstrings.css new file mode 100644 index 00000000..b2cceef2 --- /dev/null +++ b/2.0.0/assets/_mkdocstrings.css @@ -0,0 +1,16 @@ + +/* Don't capitalize names. */ +h5.doc-heading { + text-transform: none !important; +} + +/* Avoid breaking parameters name, etc. in table cells. */ +.doc-contents td code { + word-break: normal !important; +} + +/* For pieces of Markdown rendered in table cells. */ +.doc-contents td p { + margin-top: 0 !important; + margin-bottom: 0 !important; +} diff --git a/2.0.0/assets/images/favicon.png b/2.0.0/assets/images/favicon.png new file mode 100644 index 00000000..1cf13b9f Binary files /dev/null and b/2.0.0/assets/images/favicon.png differ diff --git a/2.0.0/assets/javascripts/bundle.5a2dcb6a.min.js b/2.0.0/assets/javascripts/bundle.5a2dcb6a.min.js new file mode 100644 index 00000000..6f9720b6 --- /dev/null +++ b/2.0.0/assets/javascripts/bundle.5a2dcb6a.min.js @@ -0,0 +1,29 @@ +"use strict";(()=>{var aa=Object.create;var wr=Object.defineProperty;var sa=Object.getOwnPropertyDescriptor;var ca=Object.getOwnPropertyNames,kt=Object.getOwnPropertySymbols,fa=Object.getPrototypeOf,Er=Object.prototype.hasOwnProperty,fn=Object.prototype.propertyIsEnumerable;var cn=(e,t,r)=>t in e?wr(e,t,{enumerable:!0,configurable:!0,writable:!0,value:r}):e[t]=r,H=(e,t)=>{for(var r in t||(t={}))Er.call(t,r)&&cn(e,r,t[r]);if(kt)for(var r of kt(t))fn.call(t,r)&&cn(e,r,t[r]);return e};var un=(e,t)=>{var r={};for(var n in e)Er.call(e,n)&&t.indexOf(n)<0&&(r[n]=e[n]);if(e!=null&&kt)for(var n of kt(e))t.indexOf(n)<0&&fn.call(e,n)&&(r[n]=e[n]);return r};var yt=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports);var ua=(e,t,r,n)=>{if(t&&typeof t=="object"||typeof t=="function")for(let o of ca(t))!Er.call(e,o)&&o!==r&&wr(e,o,{get:()=>t[o],enumerable:!(n=sa(t,o))||n.enumerable});return e};var Ye=(e,t,r)=>(r=e!=null?aa(fa(e)):{},ua(t||!e||!e.__esModule?wr(r,"default",{value:e,enumerable:!0}):r,e));var ln=yt((Sr,pn)=>{(function(e,t){typeof Sr=="object"&&typeof pn!="undefined"?t():typeof define=="function"&&define.amd?define(t):t()})(Sr,function(){"use strict";function e(r){var n=!0,o=!1,i=null,s={text:!0,search:!0,url:!0,tel:!0,email:!0,password:!0,number:!0,date:!0,month:!0,week:!0,time:!0,datetime:!0,"datetime-local":!0};function a(_){return!!(_&&_!==document&&_.nodeName!=="HTML"&&_.nodeName!=="BODY"&&"classList"in _&&"contains"in _.classList)}function c(_){var We=_.type,Fe=_.tagName;return!!(Fe==="INPUT"&&s[We]&&!_.readOnly||Fe==="TEXTAREA"&&!_.readOnly||_.isContentEditable)}function f(_){_.classList.contains("focus-visible")||(_.classList.add("focus-visible"),_.setAttribute("data-focus-visible-added",""))}function u(_){!_.hasAttribute("data-focus-visible-added")||(_.classList.remove("focus-visible"),_.removeAttribute("data-focus-visible-added"))}function p(_){_.metaKey||_.altKey||_.ctrlKey||(a(r.activeElement)&&f(r.activeElement),n=!0)}function l(_){n=!1}function d(_){!a(_.target)||(n||c(_.target))&&f(_.target)}function h(_){!a(_.target)||(_.target.classList.contains("focus-visible")||_.target.hasAttribute("data-focus-visible-added"))&&(o=!0,window.clearTimeout(i),i=window.setTimeout(function(){o=!1},100),u(_.target))}function b(_){document.visibilityState==="hidden"&&(o&&(n=!0),U())}function U(){document.addEventListener("mousemove",W),document.addEventListener("mousedown",W),document.addEventListener("mouseup",W),document.addEventListener("pointermove",W),document.addEventListener("pointerdown",W),document.addEventListener("pointerup",W),document.addEventListener("touchmove",W),document.addEventListener("touchstart",W),document.addEventListener("touchend",W)}function G(){document.removeEventListener("mousemove",W),document.removeEventListener("mousedown",W),document.removeEventListener("mouseup",W),document.removeEventListener("pointermove",W),document.removeEventListener("pointerdown",W),document.removeEventListener("pointerup",W),document.removeEventListener("touchmove",W),document.removeEventListener("touchstart",W),document.removeEventListener("touchend",W)}function W(_){_.target.nodeName&&_.target.nodeName.toLowerCase()==="html"||(n=!1,G())}document.addEventListener("keydown",p,!0),document.addEventListener("mousedown",l,!0),document.addEventListener("pointerdown",l,!0),document.addEventListener("touchstart",l,!0),document.addEventListener("visibilitychange",b,!0),U(),r.addEventListener("focus",d,!0),r.addEventListener("blur",h,!0),r.nodeType===Node.DOCUMENT_FRAGMENT_NODE&&r.host?r.host.setAttribute("data-js-focus-visible",""):r.nodeType===Node.DOCUMENT_NODE&&(document.documentElement.classList.add("js-focus-visible"),document.documentElement.setAttribute("data-js-focus-visible",""))}if(typeof window!="undefined"&&typeof document!="undefined"){window.applyFocusVisiblePolyfill=e;var t;try{t=new CustomEvent("focus-visible-polyfill-ready")}catch(r){t=document.createEvent("CustomEvent"),t.initCustomEvent("focus-visible-polyfill-ready",!1,!1,{})}window.dispatchEvent(t)}typeof document!="undefined"&&e(document)})});var mn=yt(Or=>{(function(e){var t=function(){try{return!!Symbol.iterator}catch(f){return!1}},r=t(),n=function(f){var u={next:function(){var p=f.shift();return{done:p===void 0,value:p}}};return r&&(u[Symbol.iterator]=function(){return u}),u},o=function(f){return encodeURIComponent(f).replace(/%20/g,"+")},i=function(f){return decodeURIComponent(String(f).replace(/\+/g," "))},s=function(){var f=function(p){Object.defineProperty(this,"_entries",{writable:!0,value:{}});var l=typeof p;if(l!=="undefined")if(l==="string")p!==""&&this._fromString(p);else if(p instanceof f){var d=this;p.forEach(function(G,W){d.append(W,G)})}else if(p!==null&&l==="object")if(Object.prototype.toString.call(p)==="[object Array]")for(var h=0;hd[0]?1:0}),f._entries&&(f._entries={});for(var p=0;p1?i(d[1]):"")}})})(typeof global!="undefined"?global:typeof window!="undefined"?window:typeof self!="undefined"?self:Or);(function(e){var t=function(){try{var o=new e.URL("b","http://a");return o.pathname="c d",o.href==="http://a/c%20d"&&o.searchParams}catch(i){return!1}},r=function(){var o=e.URL,i=function(c,f){typeof c!="string"&&(c=String(c)),f&&typeof f!="string"&&(f=String(f));var u=document,p;if(f&&(e.location===void 0||f!==e.location.href)){f=f.toLowerCase(),u=document.implementation.createHTMLDocument(""),p=u.createElement("base"),p.href=f,u.head.appendChild(p);try{if(p.href.indexOf(f)!==0)throw new Error(p.href)}catch(_){throw new Error("URL unable to set base "+f+" due to "+_)}}var l=u.createElement("a");l.href=c,p&&(u.body.appendChild(l),l.href=l.href);var d=u.createElement("input");if(d.type="url",d.value=c,l.protocol===":"||!/:/.test(l.href)||!d.checkValidity()&&!f)throw new TypeError("Invalid URL");Object.defineProperty(this,"_anchorElement",{value:l});var h=new e.URLSearchParams(this.search),b=!0,U=!0,G=this;["append","delete","set"].forEach(function(_){var We=h[_];h[_]=function(){We.apply(h,arguments),b&&(U=!1,G.search=h.toString(),U=!0)}}),Object.defineProperty(this,"searchParams",{value:h,enumerable:!0});var W=void 0;Object.defineProperty(this,"_updateSearchParams",{enumerable:!1,configurable:!1,writable:!1,value:function(){this.search!==W&&(W=this.search,U&&(b=!1,this.searchParams._fromString(this.search),b=!0))}})},s=i.prototype,a=function(c){Object.defineProperty(s,c,{get:function(){return this._anchorElement[c]},set:function(f){this._anchorElement[c]=f},enumerable:!0})};["hash","host","hostname","port","protocol"].forEach(function(c){a(c)}),Object.defineProperty(s,"search",{get:function(){return this._anchorElement.search},set:function(c){this._anchorElement.search=c,this._updateSearchParams()},enumerable:!0}),Object.defineProperties(s,{toString:{get:function(){var c=this;return function(){return c.href}}},href:{get:function(){return this._anchorElement.href.replace(/\?$/,"")},set:function(c){this._anchorElement.href=c,this._updateSearchParams()},enumerable:!0},pathname:{get:function(){return this._anchorElement.pathname.replace(/(^\/?)/,"/")},set:function(c){this._anchorElement.pathname=c},enumerable:!0},origin:{get:function(){var c={"http:":80,"https:":443,"ftp:":21}[this._anchorElement.protocol],f=this._anchorElement.port!=c&&this._anchorElement.port!=="";return this._anchorElement.protocol+"//"+this._anchorElement.hostname+(f?":"+this._anchorElement.port:"")},enumerable:!0},password:{get:function(){return""},set:function(c){},enumerable:!0},username:{get:function(){return""},set:function(c){},enumerable:!0}}),i.createObjectURL=function(c){return o.createObjectURL.apply(o,arguments)},i.revokeObjectURL=function(c){return o.revokeObjectURL.apply(o,arguments)},e.URL=i};if(t()||r(),e.location!==void 0&&!("origin"in e.location)){var n=function(){return e.location.protocol+"//"+e.location.hostname+(e.location.port?":"+e.location.port:"")};try{Object.defineProperty(e.location,"origin",{get:n,enumerable:!0})}catch(o){setInterval(function(){e.location.origin=n()},100)}}})(typeof global!="undefined"?global:typeof window!="undefined"?window:typeof self!="undefined"?self:Or)});var Pn=yt((Ks,$t)=>{/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */var dn,hn,bn,vn,gn,yn,xn,wn,En,Ht,_r,Sn,On,_n,rt,Tn,Mn,Ln,An,Cn,Rn,kn,Hn,Pt;(function(e){var t=typeof global=="object"?global:typeof self=="object"?self:typeof this=="object"?this:{};typeof define=="function"&&define.amd?define("tslib",["exports"],function(n){e(r(t,r(n)))}):typeof $t=="object"&&typeof $t.exports=="object"?e(r(t,r($t.exports))):e(r(t));function r(n,o){return n!==t&&(typeof Object.create=="function"?Object.defineProperty(n,"__esModule",{value:!0}):n.__esModule=!0),function(i,s){return n[i]=o?o(i,s):s}}})(function(e){var t=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(n,o){n.__proto__=o}||function(n,o){for(var i in o)Object.prototype.hasOwnProperty.call(o,i)&&(n[i]=o[i])};dn=function(n,o){if(typeof o!="function"&&o!==null)throw new TypeError("Class extends value "+String(o)+" is not a constructor or null");t(n,o);function i(){this.constructor=n}n.prototype=o===null?Object.create(o):(i.prototype=o.prototype,new i)},hn=Object.assign||function(n){for(var o,i=1,s=arguments.length;i=0;u--)(f=n[u])&&(c=(a<3?f(c):a>3?f(o,i,c):f(o,i))||c);return a>3&&c&&Object.defineProperty(o,i,c),c},gn=function(n,o){return function(i,s){o(i,s,n)}},yn=function(n,o){if(typeof Reflect=="object"&&typeof Reflect.metadata=="function")return Reflect.metadata(n,o)},xn=function(n,o,i,s){function a(c){return c instanceof i?c:new i(function(f){f(c)})}return new(i||(i=Promise))(function(c,f){function u(d){try{l(s.next(d))}catch(h){f(h)}}function p(d){try{l(s.throw(d))}catch(h){f(h)}}function l(d){d.done?c(d.value):a(d.value).then(u,p)}l((s=s.apply(n,o||[])).next())})},wn=function(n,o){var i={label:0,sent:function(){if(c[0]&1)throw c[1];return c[1]},trys:[],ops:[]},s,a,c,f;return f={next:u(0),throw:u(1),return:u(2)},typeof Symbol=="function"&&(f[Symbol.iterator]=function(){return this}),f;function u(l){return function(d){return p([l,d])}}function p(l){if(s)throw new TypeError("Generator is already executing.");for(;i;)try{if(s=1,a&&(c=l[0]&2?a.return:l[0]?a.throw||((c=a.return)&&c.call(a),0):a.next)&&!(c=c.call(a,l[1])).done)return c;switch(a=0,c&&(l=[l[0]&2,c.value]),l[0]){case 0:case 1:c=l;break;case 4:return i.label++,{value:l[1],done:!1};case 5:i.label++,a=l[1],l=[0];continue;case 7:l=i.ops.pop(),i.trys.pop();continue;default:if(c=i.trys,!(c=c.length>0&&c[c.length-1])&&(l[0]===6||l[0]===2)){i=0;continue}if(l[0]===3&&(!c||l[1]>c[0]&&l[1]=n.length&&(n=void 0),{value:n&&n[s++],done:!n}}};throw new TypeError(o?"Object is not iterable.":"Symbol.iterator is not defined.")},_r=function(n,o){var i=typeof Symbol=="function"&&n[Symbol.iterator];if(!i)return n;var s=i.call(n),a,c=[],f;try{for(;(o===void 0||o-- >0)&&!(a=s.next()).done;)c.push(a.value)}catch(u){f={error:u}}finally{try{a&&!a.done&&(i=s.return)&&i.call(s)}finally{if(f)throw f.error}}return c},Sn=function(){for(var n=[],o=0;o1||u(b,U)})})}function u(b,U){try{p(s[b](U))}catch(G){h(c[0][3],G)}}function p(b){b.value instanceof rt?Promise.resolve(b.value.v).then(l,d):h(c[0][2],b)}function l(b){u("next",b)}function d(b){u("throw",b)}function h(b,U){b(U),c.shift(),c.length&&u(c[0][0],c[0][1])}},Mn=function(n){var o,i;return o={},s("next"),s("throw",function(a){throw a}),s("return"),o[Symbol.iterator]=function(){return this},o;function s(a,c){o[a]=n[a]?function(f){return(i=!i)?{value:rt(n[a](f)),done:a==="return"}:c?c(f):f}:c}},Ln=function(n){if(!Symbol.asyncIterator)throw new TypeError("Symbol.asyncIterator is not defined.");var o=n[Symbol.asyncIterator],i;return o?o.call(n):(n=typeof Ht=="function"?Ht(n):n[Symbol.iterator](),i={},s("next"),s("throw"),s("return"),i[Symbol.asyncIterator]=function(){return this},i);function s(c){i[c]=n[c]&&function(f){return new Promise(function(u,p){f=n[c](f),a(u,p,f.done,f.value)})}}function a(c,f,u,p){Promise.resolve(p).then(function(l){c({value:l,done:u})},f)}},An=function(n,o){return Object.defineProperty?Object.defineProperty(n,"raw",{value:o}):n.raw=o,n};var r=Object.create?function(n,o){Object.defineProperty(n,"default",{enumerable:!0,value:o})}:function(n,o){n.default=o};Cn=function(n){if(n&&n.__esModule)return n;var o={};if(n!=null)for(var i in n)i!=="default"&&Object.prototype.hasOwnProperty.call(n,i)&&Pt(o,n,i);return r(o,n),o},Rn=function(n){return n&&n.__esModule?n:{default:n}},kn=function(n,o,i,s){if(i==="a"&&!s)throw new TypeError("Private accessor was defined without a getter");if(typeof o=="function"?n!==o||!s:!o.has(n))throw new TypeError("Cannot read private member from an object whose class did not declare it");return i==="m"?s:i==="a"?s.call(n):s?s.value:o.get(n)},Hn=function(n,o,i,s,a){if(s==="m")throw new TypeError("Private method is not writable");if(s==="a"&&!a)throw new TypeError("Private accessor was defined without a setter");if(typeof o=="function"?n!==o||!a:!o.has(n))throw new TypeError("Cannot write private member to an object whose class did not declare it");return s==="a"?a.call(n,i):a?a.value=i:o.set(n,i),i},e("__extends",dn),e("__assign",hn),e("__rest",bn),e("__decorate",vn),e("__param",gn),e("__metadata",yn),e("__awaiter",xn),e("__generator",wn),e("__exportStar",En),e("__createBinding",Pt),e("__values",Ht),e("__read",_r),e("__spread",Sn),e("__spreadArrays",On),e("__spreadArray",_n),e("__await",rt),e("__asyncGenerator",Tn),e("__asyncDelegator",Mn),e("__asyncValues",Ln),e("__makeTemplateObject",An),e("__importStar",Cn),e("__importDefault",Rn),e("__classPrivateFieldGet",kn),e("__classPrivateFieldSet",Hn)})});var Br=yt((At,Yr)=>{/*! + * clipboard.js v2.0.11 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */(function(t,r){typeof At=="object"&&typeof Yr=="object"?Yr.exports=r():typeof define=="function"&&define.amd?define([],r):typeof At=="object"?At.ClipboardJS=r():t.ClipboardJS=r()})(At,function(){return function(){var e={686:function(n,o,i){"use strict";i.d(o,{default:function(){return ia}});var s=i(279),a=i.n(s),c=i(370),f=i.n(c),u=i(817),p=i.n(u);function l(j){try{return document.execCommand(j)}catch(T){return!1}}var d=function(T){var O=p()(T);return l("cut"),O},h=d;function b(j){var T=document.documentElement.getAttribute("dir")==="rtl",O=document.createElement("textarea");O.style.fontSize="12pt",O.style.border="0",O.style.padding="0",O.style.margin="0",O.style.position="absolute",O.style[T?"right":"left"]="-9999px";var k=window.pageYOffset||document.documentElement.scrollTop;return O.style.top="".concat(k,"px"),O.setAttribute("readonly",""),O.value=j,O}var U=function(T,O){var k=b(T);O.container.appendChild(k);var $=p()(k);return l("copy"),k.remove(),$},G=function(T){var O=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body},k="";return typeof T=="string"?k=U(T,O):T instanceof HTMLInputElement&&!["text","search","url","tel","password"].includes(T==null?void 0:T.type)?k=U(T.value,O):(k=p()(T),l("copy")),k},W=G;function _(j){return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?_=function(O){return typeof O}:_=function(O){return O&&typeof Symbol=="function"&&O.constructor===Symbol&&O!==Symbol.prototype?"symbol":typeof O},_(j)}var We=function(){var T=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{},O=T.action,k=O===void 0?"copy":O,$=T.container,q=T.target,Te=T.text;if(k!=="copy"&&k!=="cut")throw new Error('Invalid "action" value, use either "copy" or "cut"');if(q!==void 0)if(q&&_(q)==="object"&&q.nodeType===1){if(k==="copy"&&q.hasAttribute("disabled"))throw new Error('Invalid "target" attribute. Please use "readonly" instead of "disabled" attribute');if(k==="cut"&&(q.hasAttribute("readonly")||q.hasAttribute("disabled")))throw new Error(`Invalid "target" attribute. You can't cut text from elements with "readonly" or "disabled" attributes`)}else throw new Error('Invalid "target" value, use a valid Element');if(Te)return W(Te,{container:$});if(q)return k==="cut"?h(q):W(q,{container:$})},Fe=We;function Pe(j){return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?Pe=function(O){return typeof O}:Pe=function(O){return O&&typeof Symbol=="function"&&O.constructor===Symbol&&O!==Symbol.prototype?"symbol":typeof O},Pe(j)}function Ji(j,T){if(!(j instanceof T))throw new TypeError("Cannot call a class as a function")}function sn(j,T){for(var O=0;O0&&arguments[0]!==void 0?arguments[0]:{};this.action=typeof $.action=="function"?$.action:this.defaultAction,this.target=typeof $.target=="function"?$.target:this.defaultTarget,this.text=typeof $.text=="function"?$.text:this.defaultText,this.container=Pe($.container)==="object"?$.container:document.body}},{key:"listenClick",value:function($){var q=this;this.listener=f()($,"click",function(Te){return q.onClick(Te)})}},{key:"onClick",value:function($){var q=$.delegateTarget||$.currentTarget,Te=this.action(q)||"copy",Rt=Fe({action:Te,container:this.container,target:this.target(q),text:this.text(q)});this.emit(Rt?"success":"error",{action:Te,text:Rt,trigger:q,clearSelection:function(){q&&q.focus(),window.getSelection().removeAllRanges()}})}},{key:"defaultAction",value:function($){return xr("action",$)}},{key:"defaultTarget",value:function($){var q=xr("target",$);if(q)return document.querySelector(q)}},{key:"defaultText",value:function($){return xr("text",$)}},{key:"destroy",value:function(){this.listener.destroy()}}],[{key:"copy",value:function($){var q=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body};return W($,q)}},{key:"cut",value:function($){return h($)}},{key:"isSupported",value:function(){var $=arguments.length>0&&arguments[0]!==void 0?arguments[0]:["copy","cut"],q=typeof $=="string"?[$]:$,Te=!!document.queryCommandSupported;return q.forEach(function(Rt){Te=Te&&!!document.queryCommandSupported(Rt)}),Te}}]),O}(a()),ia=oa},828:function(n){var o=9;if(typeof Element!="undefined"&&!Element.prototype.matches){var i=Element.prototype;i.matches=i.matchesSelector||i.mozMatchesSelector||i.msMatchesSelector||i.oMatchesSelector||i.webkitMatchesSelector}function s(a,c){for(;a&&a.nodeType!==o;){if(typeof a.matches=="function"&&a.matches(c))return a;a=a.parentNode}}n.exports=s},438:function(n,o,i){var s=i(828);function a(u,p,l,d,h){var b=f.apply(this,arguments);return u.addEventListener(l,b,h),{destroy:function(){u.removeEventListener(l,b,h)}}}function c(u,p,l,d,h){return typeof u.addEventListener=="function"?a.apply(null,arguments):typeof l=="function"?a.bind(null,document).apply(null,arguments):(typeof u=="string"&&(u=document.querySelectorAll(u)),Array.prototype.map.call(u,function(b){return a(b,p,l,d,h)}))}function f(u,p,l,d){return function(h){h.delegateTarget=s(h.target,p),h.delegateTarget&&d.call(u,h)}}n.exports=c},879:function(n,o){o.node=function(i){return i!==void 0&&i instanceof HTMLElement&&i.nodeType===1},o.nodeList=function(i){var s=Object.prototype.toString.call(i);return i!==void 0&&(s==="[object NodeList]"||s==="[object HTMLCollection]")&&"length"in i&&(i.length===0||o.node(i[0]))},o.string=function(i){return typeof i=="string"||i instanceof String},o.fn=function(i){var s=Object.prototype.toString.call(i);return s==="[object Function]"}},370:function(n,o,i){var s=i(879),a=i(438);function c(l,d,h){if(!l&&!d&&!h)throw new Error("Missing required arguments");if(!s.string(d))throw new TypeError("Second argument must be a String");if(!s.fn(h))throw new TypeError("Third argument must be a Function");if(s.node(l))return f(l,d,h);if(s.nodeList(l))return u(l,d,h);if(s.string(l))return p(l,d,h);throw new TypeError("First argument must be a String, HTMLElement, HTMLCollection, or NodeList")}function f(l,d,h){return l.addEventListener(d,h),{destroy:function(){l.removeEventListener(d,h)}}}function u(l,d,h){return Array.prototype.forEach.call(l,function(b){b.addEventListener(d,h)}),{destroy:function(){Array.prototype.forEach.call(l,function(b){b.removeEventListener(d,h)})}}}function p(l,d,h){return a(document.body,l,d,h)}n.exports=c},817:function(n){function o(i){var s;if(i.nodeName==="SELECT")i.focus(),s=i.value;else if(i.nodeName==="INPUT"||i.nodeName==="TEXTAREA"){var a=i.hasAttribute("readonly");a||i.setAttribute("readonly",""),i.select(),i.setSelectionRange(0,i.value.length),a||i.removeAttribute("readonly"),s=i.value}else{i.hasAttribute("contenteditable")&&i.focus();var c=window.getSelection(),f=document.createRange();f.selectNodeContents(i),c.removeAllRanges(),c.addRange(f),s=c.toString()}return s}n.exports=o},279:function(n){function o(){}o.prototype={on:function(i,s,a){var c=this.e||(this.e={});return(c[i]||(c[i]=[])).push({fn:s,ctx:a}),this},once:function(i,s,a){var c=this;function f(){c.off(i,f),s.apply(a,arguments)}return f._=s,this.on(i,f,a)},emit:function(i){var s=[].slice.call(arguments,1),a=((this.e||(this.e={}))[i]||[]).slice(),c=0,f=a.length;for(c;c{"use strict";/*! + * escape-html + * Copyright(c) 2012-2013 TJ Holowaychuk + * Copyright(c) 2015 Andreas Lubbe + * Copyright(c) 2015 Tiancheng "Timothy" Gu + * MIT Licensed + */var Ms=/["'&<>]/;Si.exports=Ls;function Ls(e){var t=""+e,r=Ms.exec(t);if(!r)return t;var n,o="",i=0,s=0;for(i=r.index;i0},enumerable:!1,configurable:!0}),t.prototype._trySubscribe=function(r){return this._throwIfClosed(),e.prototype._trySubscribe.call(this,r)},t.prototype._subscribe=function(r){return this._throwIfClosed(),this._checkFinalizedStatuses(r),this._innerSubscribe(r)},t.prototype._innerSubscribe=function(r){var n=this,o=this,i=o.hasError,s=o.isStopped,a=o.observers;return i||s?Tr:(this.currentObservers=null,a.push(r),new $e(function(){n.currentObservers=null,Ue(a,r)}))},t.prototype._checkFinalizedStatuses=function(r){var n=this,o=n.hasError,i=n.thrownError,s=n.isStopped;o?r.error(i):s&&r.complete()},t.prototype.asObservable=function(){var r=new F;return r.source=this,r},t.create=function(r,n){return new Qn(r,n)},t}(F);var Qn=function(e){ne(t,e);function t(r,n){var o=e.call(this)||this;return o.destination=r,o.source=n,o}return t.prototype.next=function(r){var n,o;(o=(n=this.destination)===null||n===void 0?void 0:n.next)===null||o===void 0||o.call(n,r)},t.prototype.error=function(r){var n,o;(o=(n=this.destination)===null||n===void 0?void 0:n.error)===null||o===void 0||o.call(n,r)},t.prototype.complete=function(){var r,n;(n=(r=this.destination)===null||r===void 0?void 0:r.complete)===null||n===void 0||n.call(r)},t.prototype._subscribe=function(r){var n,o;return(o=(n=this.source)===null||n===void 0?void 0:n.subscribe(r))!==null&&o!==void 0?o:Tr},t}(E);var wt={now:function(){return(wt.delegate||Date).now()},delegate:void 0};var Et=function(e){ne(t,e);function t(r,n,o){r===void 0&&(r=1/0),n===void 0&&(n=1/0),o===void 0&&(o=wt);var i=e.call(this)||this;return i._bufferSize=r,i._windowTime=n,i._timestampProvider=o,i._buffer=[],i._infiniteTimeWindow=!0,i._infiniteTimeWindow=n===1/0,i._bufferSize=Math.max(1,r),i._windowTime=Math.max(1,n),i}return t.prototype.next=function(r){var n=this,o=n.isStopped,i=n._buffer,s=n._infiniteTimeWindow,a=n._timestampProvider,c=n._windowTime;o||(i.push(r),!s&&i.push(a.now()+c)),this._trimBuffer(),e.prototype.next.call(this,r)},t.prototype._subscribe=function(r){this._throwIfClosed(),this._trimBuffer();for(var n=this._innerSubscribe(r),o=this,i=o._infiniteTimeWindow,s=o._buffer,a=s.slice(),c=0;c0?e.prototype.requestAsyncId.call(this,r,n,o):(r.actions.push(this),r._scheduled||(r._scheduled=at.requestAnimationFrame(function(){return r.flush(void 0)})))},t.prototype.recycleAsyncId=function(r,n,o){var i;if(o===void 0&&(o=0),o!=null?o>0:this.delay>0)return e.prototype.recycleAsyncId.call(this,r,n,o);var s=r.actions;n!=null&&((i=s[s.length-1])===null||i===void 0?void 0:i.id)!==n&&(at.cancelAnimationFrame(n),r._scheduled=void 0)},t}(zt);var Gn=function(e){ne(t,e);function t(){return e!==null&&e.apply(this,arguments)||this}return t.prototype.flush=function(r){this._active=!0;var n=this._scheduled;this._scheduled=void 0;var o=this.actions,i;r=r||o.shift();do if(i=r.execute(r.state,r.delay))break;while((r=o[0])&&r.id===n&&o.shift());if(this._active=!1,i){for(;(r=o[0])&&r.id===n&&o.shift();)r.unsubscribe();throw i}},t}(Nt);var xe=new Gn(Bn);var R=new F(function(e){return e.complete()});function qt(e){return e&&L(e.schedule)}function Hr(e){return e[e.length-1]}function Ve(e){return L(Hr(e))?e.pop():void 0}function Ee(e){return qt(Hr(e))?e.pop():void 0}function Kt(e,t){return typeof Hr(e)=="number"?e.pop():t}var st=function(e){return e&&typeof e.length=="number"&&typeof e!="function"};function Qt(e){return L(e==null?void 0:e.then)}function Yt(e){return L(e[it])}function Bt(e){return Symbol.asyncIterator&&L(e==null?void 0:e[Symbol.asyncIterator])}function Gt(e){return new TypeError("You provided "+(e!==null&&typeof e=="object"?"an invalid object":"'"+e+"'")+" where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.")}function ya(){return typeof Symbol!="function"||!Symbol.iterator?"@@iterator":Symbol.iterator}var Jt=ya();function Xt(e){return L(e==null?void 0:e[Jt])}function Zt(e){return jn(this,arguments,function(){var r,n,o,i;return It(this,function(s){switch(s.label){case 0:r=e.getReader(),s.label=1;case 1:s.trys.push([1,,9,10]),s.label=2;case 2:return[4,jt(r.read())];case 3:return n=s.sent(),o=n.value,i=n.done,i?[4,jt(void 0)]:[3,5];case 4:return[2,s.sent()];case 5:return[4,jt(o)];case 6:return[4,s.sent()];case 7:return s.sent(),[3,2];case 8:return[3,10];case 9:return r.releaseLock(),[7];case 10:return[2]}})})}function er(e){return L(e==null?void 0:e.getReader)}function z(e){if(e instanceof F)return e;if(e!=null){if(Yt(e))return xa(e);if(st(e))return wa(e);if(Qt(e))return Ea(e);if(Bt(e))return Jn(e);if(Xt(e))return Sa(e);if(er(e))return Oa(e)}throw Gt(e)}function xa(e){return new F(function(t){var r=e[it]();if(L(r.subscribe))return r.subscribe(t);throw new TypeError("Provided object does not correctly implement Symbol.observable")})}function wa(e){return new F(function(t){for(var r=0;r=2,!0))}function ie(e){e===void 0&&(e={});var t=e.connector,r=t===void 0?function(){return new E}:t,n=e.resetOnError,o=n===void 0?!0:n,i=e.resetOnComplete,s=i===void 0?!0:i,a=e.resetOnRefCountZero,c=a===void 0?!0:a;return function(f){var u,p,l,d=0,h=!1,b=!1,U=function(){p==null||p.unsubscribe(),p=void 0},G=function(){U(),u=l=void 0,h=b=!1},W=function(){var _=u;G(),_==null||_.unsubscribe()};return g(function(_,We){d++,!b&&!h&&U();var Fe=l=l!=null?l:r();We.add(function(){d--,d===0&&!b&&!h&&(p=Dr(W,c))}),Fe.subscribe(We),!u&&d>0&&(u=new Ge({next:function(Pe){return Fe.next(Pe)},error:function(Pe){b=!0,U(),p=Dr(G,o,Pe),Fe.error(Pe)},complete:function(){h=!0,U(),p=Dr(G,s),Fe.complete()}}),z(_).subscribe(u))})(f)}}function Dr(e,t){for(var r=[],n=2;ne.next(document)),e}function Q(e,t=document){return Array.from(t.querySelectorAll(e))}function K(e,t=document){let r=pe(e,t);if(typeof r=="undefined")throw new ReferenceError(`Missing element: expected "${e}" to be present`);return r}function pe(e,t=document){return t.querySelector(e)||void 0}function Ie(){return document.activeElement instanceof HTMLElement&&document.activeElement||void 0}function nr(e){return A(v(document.body,"focusin"),v(document.body,"focusout")).pipe(Re(1),m(()=>{let t=Ie();return typeof t!="undefined"?e.contains(t):!1}),N(e===Ie()),B())}function qe(e){return{x:e.offsetLeft,y:e.offsetTop}}function yo(e){return A(v(window,"load"),v(window,"resize")).pipe(Ae(0,xe),m(()=>qe(e)),N(qe(e)))}function or(e){return{x:e.scrollLeft,y:e.scrollTop}}function pt(e){return A(v(e,"scroll"),v(window,"resize")).pipe(Ae(0,xe),m(()=>or(e)),N(or(e)))}var wo=function(){if(typeof Map!="undefined")return Map;function e(t,r){var n=-1;return t.some(function(o,i){return o[0]===r?(n=i,!0):!1}),n}return function(){function t(){this.__entries__=[]}return Object.defineProperty(t.prototype,"size",{get:function(){return this.__entries__.length},enumerable:!0,configurable:!0}),t.prototype.get=function(r){var n=e(this.__entries__,r),o=this.__entries__[n];return o&&o[1]},t.prototype.set=function(r,n){var o=e(this.__entries__,r);~o?this.__entries__[o][1]=n:this.__entries__.push([r,n])},t.prototype.delete=function(r){var n=this.__entries__,o=e(n,r);~o&&n.splice(o,1)},t.prototype.has=function(r){return!!~e(this.__entries__,r)},t.prototype.clear=function(){this.__entries__.splice(0)},t.prototype.forEach=function(r,n){n===void 0&&(n=null);for(var o=0,i=this.__entries__;o0},e.prototype.connect_=function(){!qr||this.connected_||(document.addEventListener("transitionend",this.onTransitionEnd_),window.addEventListener("resize",this.refresh),Ka?(this.mutationsObserver_=new MutationObserver(this.refresh),this.mutationsObserver_.observe(document,{attributes:!0,childList:!0,characterData:!0,subtree:!0})):(document.addEventListener("DOMSubtreeModified",this.refresh),this.mutationEventsAdded_=!0),this.connected_=!0)},e.prototype.disconnect_=function(){!qr||!this.connected_||(document.removeEventListener("transitionend",this.onTransitionEnd_),window.removeEventListener("resize",this.refresh),this.mutationsObserver_&&this.mutationsObserver_.disconnect(),this.mutationEventsAdded_&&document.removeEventListener("DOMSubtreeModified",this.refresh),this.mutationsObserver_=null,this.mutationEventsAdded_=!1,this.connected_=!1)},e.prototype.onTransitionEnd_=function(t){var r=t.propertyName,n=r===void 0?"":r,o=qa.some(function(i){return!!~n.indexOf(i)});o&&this.refresh()},e.getInstance=function(){return this.instance_||(this.instance_=new e),this.instance_},e.instance_=null,e}(),Eo=function(e,t){for(var r=0,n=Object.keys(t);r0},e}(),Oo=typeof WeakMap!="undefined"?new WeakMap:new wo,_o=function(){function e(t){if(!(this instanceof e))throw new TypeError("Cannot call a class as a function.");if(!arguments.length)throw new TypeError("1 argument required, but only 0 present.");var r=Qa.getInstance(),n=new ns(t,r,this);Oo.set(this,n)}return e}();["observe","unobserve","disconnect"].forEach(function(e){_o.prototype[e]=function(){var t;return(t=Oo.get(this))[e].apply(t,arguments)}});var os=function(){return typeof ir.ResizeObserver!="undefined"?ir.ResizeObserver:_o}(),To=os;var Mo=new E,is=P(()=>I(new To(e=>{for(let t of e)Mo.next(t)}))).pipe(S(e=>A(Se,I(e)).pipe(C(()=>e.disconnect()))),X(1));function he(e){return{width:e.offsetWidth,height:e.offsetHeight}}function ve(e){return is.pipe(w(t=>t.observe(e)),S(t=>Mo.pipe(x(({target:r})=>r===e),C(()=>t.unobserve(e)),m(()=>he(e)))),N(he(e)))}function mt(e){return{width:e.scrollWidth,height:e.scrollHeight}}function cr(e){let t=e.parentElement;for(;t&&(e.scrollWidth<=t.scrollWidth&&e.scrollHeight<=t.scrollHeight);)t=(e=t).parentElement;return t?e:void 0}var Lo=new E,as=P(()=>I(new IntersectionObserver(e=>{for(let t of e)Lo.next(t)},{threshold:0}))).pipe(S(e=>A(Se,I(e)).pipe(C(()=>e.disconnect()))),X(1));function fr(e){return as.pipe(w(t=>t.observe(e)),S(t=>Lo.pipe(x(({target:r})=>r===e),C(()=>t.unobserve(e)),m(({isIntersecting:r})=>r))))}function Ao(e,t=16){return pt(e).pipe(m(({y:r})=>{let n=he(e),o=mt(e);return r>=o.height-n.height-t}),B())}var ur={drawer:K("[data-md-toggle=drawer]"),search:K("[data-md-toggle=search]")};function Co(e){return ur[e].checked}function Ke(e,t){ur[e].checked!==t&&ur[e].click()}function dt(e){let t=ur[e];return v(t,"change").pipe(m(()=>t.checked),N(t.checked))}function ss(e,t){switch(e.constructor){case HTMLInputElement:return e.type==="radio"?/^Arrow/.test(t):!0;case HTMLSelectElement:case HTMLTextAreaElement:return!0;default:return e.isContentEditable}}function Ro(){return v(window,"keydown").pipe(x(e=>!(e.metaKey||e.ctrlKey)),m(e=>({mode:Co("search")?"search":"global",type:e.key,claim(){e.preventDefault(),e.stopPropagation()}})),x(({mode:e,type:t})=>{if(e==="global"){let r=Ie();if(typeof r!="undefined")return!ss(r,t)}return!0}),ie())}function Oe(){return new URL(location.href)}function pr(e){location.href=e.href}function ko(){return new E}function Ho(e,t){if(typeof t=="string"||typeof t=="number")e.innerHTML+=t.toString();else if(t instanceof Node)e.appendChild(t);else if(Array.isArray(t))for(let r of t)Ho(e,r)}function M(e,t,...r){let n=document.createElement(e);if(t)for(let o of Object.keys(t))typeof t[o]!="undefined"&&(typeof t[o]!="boolean"?n.setAttribute(o,t[o]):n.setAttribute(o,""));for(let o of r)Ho(n,o);return n}function Po(e,t){let r=t;if(e.length>r){for(;e[r]!==" "&&--r>0;);return`${e.substring(0,r)}...`}return e}function lr(e){if(e>999){let t=+((e-950)%1e3>99);return`${((e+1e-6)/1e3).toFixed(t)}k`}else return e.toString()}function $o(){return location.hash.substring(1)}function Io(e){let t=M("a",{href:e});t.addEventListener("click",r=>r.stopPropagation()),t.click()}function cs(){return v(window,"hashchange").pipe(m($o),N($o()),x(e=>e.length>0),X(1))}function jo(){return cs().pipe(m(e=>pe(`[id="${e}"]`)),x(e=>typeof e!="undefined"))}function Kr(e){let t=matchMedia(e);return rr(r=>t.addListener(()=>r(t.matches))).pipe(N(t.matches))}function Fo(){let e=matchMedia("print");return A(v(window,"beforeprint").pipe(m(()=>!0)),v(window,"afterprint").pipe(m(()=>!1))).pipe(N(e.matches))}function Qr(e,t){return e.pipe(S(r=>r?t():R))}function mr(e,t={credentials:"same-origin"}){return ue(fetch(`${e}`,t)).pipe(ce(()=>R),S(r=>r.status!==200?Ot(()=>new Error(r.statusText)):I(r)))}function je(e,t){return mr(e,t).pipe(S(r=>r.json()),X(1))}function Uo(e,t){let r=new DOMParser;return mr(e,t).pipe(S(n=>n.text()),m(n=>r.parseFromString(n,"text/xml")),X(1))}function Do(e){let t=M("script",{src:e});return P(()=>(document.head.appendChild(t),A(v(t,"load"),v(t,"error").pipe(S(()=>Ot(()=>new ReferenceError(`Invalid script: ${e}`))))).pipe(m(()=>{}),C(()=>document.head.removeChild(t)),oe(1))))}function Wo(){return{x:Math.max(0,scrollX),y:Math.max(0,scrollY)}}function Vo(){return A(v(window,"scroll",{passive:!0}),v(window,"resize",{passive:!0})).pipe(m(Wo),N(Wo()))}function zo(){return{width:innerWidth,height:innerHeight}}function No(){return v(window,"resize",{passive:!0}).pipe(m(zo),N(zo()))}function qo(){return Y([Vo(),No()]).pipe(m(([e,t])=>({offset:e,size:t})),X(1))}function dr(e,{viewport$:t,header$:r}){let n=t.pipe(J("size")),o=Y([n,r]).pipe(m(()=>qe(e)));return Y([r,t,o]).pipe(m(([{height:i},{offset:s,size:a},{x:c,y:f}])=>({offset:{x:s.x-c,y:s.y-f+i},size:a})))}function Ko(e,{tx$:t}){let r=v(e,"message").pipe(m(({data:n})=>n));return t.pipe(Lt(()=>r,{leading:!0,trailing:!0}),w(n=>e.postMessage(n)),S(()=>r),ie())}var fs=K("#__config"),ht=JSON.parse(fs.textContent);ht.base=`${new URL(ht.base,Oe())}`;function le(){return ht}function Z(e){return ht.features.includes(e)}function re(e,t){return typeof t!="undefined"?ht.translations[e].replace("#",t.toString()):ht.translations[e]}function _e(e,t=document){return K(`[data-md-component=${e}]`,t)}function te(e,t=document){return Q(`[data-md-component=${e}]`,t)}function us(e){let t=K(".md-typeset > :first-child",e);return v(t,"click",{once:!0}).pipe(m(()=>K(".md-typeset",e)),m(r=>({hash:__md_hash(r.innerHTML)})))}function Qo(e){return!Z("announce.dismiss")||!e.childElementCount?R:P(()=>{let t=new E;return t.pipe(N({hash:__md_get("__announce")})).subscribe(({hash:r})=>{var n;r&&r===((n=__md_get("__announce"))!=null?n:r)&&(e.hidden=!0,__md_set("__announce",r))}),us(e).pipe(w(r=>t.next(r)),C(()=>t.complete()),m(r=>H({ref:e},r)))})}function ps(e,{target$:t}){return t.pipe(m(r=>({hidden:r!==e})))}function Yo(e,t){let r=new E;return r.subscribe(({hidden:n})=>{e.hidden=n}),ps(e,t).pipe(w(n=>r.next(n)),C(()=>r.complete()),m(n=>H({ref:e},n)))}var ii=Ye(Br());function Gr(e){return M("div",{class:"md-tooltip",id:e},M("div",{class:"md-tooltip__inner md-typeset"}))}function Bo(e,t){if(t=t?`${t}_annotation_${e}`:void 0,t){let r=t?`#${t}`:void 0;return M("aside",{class:"md-annotation",tabIndex:0},Gr(t),M("a",{href:r,class:"md-annotation__index",tabIndex:-1},M("span",{"data-md-annotation-id":e})))}else return M("aside",{class:"md-annotation",tabIndex:0},Gr(t),M("span",{class:"md-annotation__index",tabIndex:-1},M("span",{"data-md-annotation-id":e})))}function Go(e){return M("button",{class:"md-clipboard md-icon",title:re("clipboard.copy"),"data-clipboard-target":`#${e} > code`})}function Jr(e,t){let r=t&2,n=t&1,o=Object.keys(e.terms).filter(a=>!e.terms[a]).reduce((a,c)=>[...a,M("del",null,c)," "],[]).slice(0,-1),i=new URL(e.location);Z("search.highlight")&&i.searchParams.set("h",Object.entries(e.terms).filter(([,a])=>a).reduce((a,[c])=>`${a} ${c}`.trim(),""));let{tags:s}=le();return M("a",{href:`${i}`,class:"md-search-result__link",tabIndex:-1},M("article",{class:["md-search-result__article",...r?["md-search-result__article--document"]:[]].join(" "),"data-md-score":e.score.toFixed(2)},r>0&&M("div",{class:"md-search-result__icon md-icon"}),M("h1",{class:"md-search-result__title"},e.title),n>0&&e.text.length>0&&M("p",{class:"md-search-result__teaser"},Po(e.text,320)),e.tags&&M("div",{class:"md-typeset"},e.tags.map(a=>{let c=a.replace(/<[^>]+>/g,""),f=s?c in s?`md-tag-icon md-tag-icon--${s[c]}`:"md-tag-icon":"";return M("span",{class:`md-tag ${f}`},a)})),n>0&&o.length>0&&M("p",{class:"md-search-result__terms"},re("search.result.term.missing"),": ",...o)))}function Jo(e){let t=e[0].score,r=[...e],n=r.findIndex(f=>!f.location.includes("#")),[o]=r.splice(n,1),i=r.findIndex(f=>f.scoreJr(f,1)),...a.length?[M("details",{class:"md-search-result__more"},M("summary",{tabIndex:-1},a.length>0&&a.length===1?re("search.result.more.one"):re("search.result.more.other",a.length)),...a.map(f=>Jr(f,1)))]:[]];return M("li",{class:"md-search-result__item"},c)}function Xo(e){return M("ul",{class:"md-source__facts"},Object.entries(e).map(([t,r])=>M("li",{class:`md-source__fact md-source__fact--${t}`},typeof r=="number"?lr(r):r)))}function Xr(e){let t=`tabbed-control tabbed-control--${e}`;return M("div",{class:t,hidden:!0},M("button",{class:"tabbed-button",tabIndex:-1}))}function Zo(e){return M("div",{class:"md-typeset__scrollwrap"},M("div",{class:"md-typeset__table"},e))}function ls(e){let t=le(),r=new URL(`../${e.version}/`,t.base);return M("li",{class:"md-version__item"},M("a",{href:`${r}`,class:"md-version__link"},e.title))}function ei(e,t){return M("div",{class:"md-version"},M("button",{class:"md-version__current","aria-label":re("select.version.title")},t.title),M("ul",{class:"md-version__list"},e.map(ls)))}function ms(e,t){let r=P(()=>Y([yo(e),pt(t)])).pipe(m(([{x:n,y:o},i])=>{let{width:s,height:a}=he(e);return{x:n-i.x+s/2,y:o-i.y+a/2}}));return nr(e).pipe(S(n=>r.pipe(m(o=>({active:n,offset:o})),oe(+!n||1/0))))}function ti(e,t,{target$:r}){let[n,o]=Array.from(e.children);return P(()=>{let i=new E,s=i.pipe(de(1));return i.subscribe({next({offset:a}){e.style.setProperty("--md-tooltip-x",`${a.x}px`),e.style.setProperty("--md-tooltip-y",`${a.y}px`)},complete(){e.style.removeProperty("--md-tooltip-x"),e.style.removeProperty("--md-tooltip-y")}}),fr(e).pipe(ee(s)).subscribe(a=>{e.toggleAttribute("data-md-visible",a)}),A(i.pipe(x(({active:a})=>a)),i.pipe(Re(250),x(({active:a})=>!a))).subscribe({next({active:a}){a?e.prepend(n):n.remove()},complete(){e.prepend(n)}}),i.pipe(Ae(16,xe)).subscribe(({active:a})=>{n.classList.toggle("md-tooltip--active",a)}),i.pipe(zr(125,xe),x(()=>!!e.offsetParent),m(()=>e.offsetParent.getBoundingClientRect()),m(({x:a})=>a)).subscribe({next(a){a?e.style.setProperty("--md-tooltip-0",`${-a}px`):e.style.removeProperty("--md-tooltip-0")},complete(){e.style.removeProperty("--md-tooltip-0")}}),v(o,"click").pipe(ee(s),x(a=>!(a.metaKey||a.ctrlKey))).subscribe(a=>a.preventDefault()),v(o,"mousedown").pipe(ee(s),ae(i)).subscribe(([a,{active:c}])=>{var f;if(a.button!==0||a.metaKey||a.ctrlKey)a.preventDefault();else if(c){a.preventDefault();let u=e.parentElement.closest(".md-annotation");u instanceof HTMLElement?u.focus():(f=Ie())==null||f.blur()}}),r.pipe(ee(s),x(a=>a===n),ke(125)).subscribe(()=>e.focus()),ms(e,t).pipe(w(a=>i.next(a)),C(()=>i.complete()),m(a=>H({ref:e},a)))})}function ds(e){let t=[];for(let r of Q(".c, .c1, .cm",e)){let n=[],o=document.createNodeIterator(r,NodeFilter.SHOW_TEXT);for(let i=o.nextNode();i;i=o.nextNode())n.push(i);for(let i of n){let s;for(;s=/(\(\d+\))(!)?/.exec(i.textContent);){let[,a,c]=s;if(typeof c=="undefined"){let f=i.splitText(s.index);i=f.splitText(a.length),t.push(f)}else{i.textContent=a,t.push(i);break}}}}return t}function ri(e,t){t.append(...Array.from(e.childNodes))}function ni(e,t,{target$:r,print$:n}){let o=t.closest("[id]"),i=o==null?void 0:o.id,s=new Map;for(let a of ds(t)){let[,c]=a.textContent.match(/\((\d+)\)/);pe(`li:nth-child(${c})`,e)&&(s.set(c,Bo(c,i)),a.replaceWith(s.get(c)))}return s.size===0?R:P(()=>{let a=new E,c=[];for(let[f,u]of s)c.push([K(".md-typeset",u),K(`li:nth-child(${f})`,e)]);return n.pipe(ee(a.pipe(de(1)))).subscribe(f=>{e.hidden=!f;for(let[u,p]of c)f?ri(u,p):ri(p,u)}),A(...[...s].map(([,f])=>ti(f,t,{target$:r}))).pipe(C(()=>a.complete()),ie())})}var hs=0;function ai(e){if(e.nextElementSibling){let t=e.nextElementSibling;if(t.tagName==="OL")return t;if(t.tagName==="P"&&!t.children.length)return ai(t)}}function oi(e){return ve(e).pipe(m(({width:t})=>({scrollable:mt(e).width>t})),J("scrollable"))}function si(e,t){let{matches:r}=matchMedia("(hover)"),n=P(()=>{let o=new E;if(o.subscribe(({scrollable:s})=>{s&&r?e.setAttribute("tabindex","0"):e.removeAttribute("tabindex")}),ii.default.isSupported()){let s=e.closest("pre");s.id=`__code_${++hs}`,s.insertBefore(Go(s.id),e)}let i=e.closest(".highlight");if(i instanceof HTMLElement){let s=ai(i);if(typeof s!="undefined"&&(i.classList.contains("annotate")||Z("content.code.annotate"))){let a=ni(s,e,t);return oi(e).pipe(w(c=>o.next(c)),C(()=>o.complete()),m(c=>H({ref:e},c)),et(ve(i).pipe(m(({width:c,height:f})=>c&&f),B(),S(c=>c?a:R))))}}return oi(e).pipe(w(s=>o.next(s)),C(()=>o.complete()),m(s=>H({ref:e},s)))});return Z("content.lazy")?fr(e).pipe(x(o=>o),oe(1),S(()=>n)):n}var ci=".node circle,.node ellipse,.node path,.node polygon,.node rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}marker{fill:var(--md-mermaid-edge-color)!important}.edgeLabel .label rect{fill:#0000}.label{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.label foreignObject{line-height:normal;overflow:visible}.label div .edgeLabel{color:var(--md-mermaid-label-fg-color)}.edgeLabel,.edgeLabel rect,.label div .edgeLabel{background-color:var(--md-mermaid-label-bg-color)}.edgeLabel,.edgeLabel rect{fill:var(--md-mermaid-label-bg-color);color:var(--md-mermaid-edge-color)}.edgePath .path,.flowchart-link{stroke:var(--md-mermaid-edge-color)}.edgePath .arrowheadPath{fill:var(--md-mermaid-edge-color);stroke:none}.cluster rect{fill:var(--md-default-fg-color--lightest);stroke:var(--md-default-fg-color--lighter)}.cluster span{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}defs #flowchart-circleEnd,defs #flowchart-circleStart,defs #flowchart-crossEnd,defs #flowchart-crossStart,defs #flowchart-pointEnd,defs #flowchart-pointStart{stroke:none}g.classGroup line,g.classGroup rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}g.classGroup text{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.classLabel .box{fill:var(--md-mermaid-label-bg-color);background-color:var(--md-mermaid-label-bg-color);opacity:1}.classLabel .label{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.node .divider{stroke:var(--md-mermaid-node-fg-color)}.relation{stroke:var(--md-mermaid-edge-color)}.cardinality{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.cardinality text{fill:inherit!important}defs #classDiagram-compositionEnd,defs #classDiagram-compositionStart,defs #classDiagram-dependencyEnd,defs #classDiagram-dependencyStart,defs #classDiagram-extensionEnd,defs #classDiagram-extensionStart{fill:var(--md-mermaid-edge-color)!important;stroke:var(--md-mermaid-edge-color)!important}defs #classDiagram-aggregationEnd,defs #classDiagram-aggregationStart{fill:var(--md-mermaid-label-bg-color)!important;stroke:var(--md-mermaid-edge-color)!important}g.stateGroup rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}g.stateGroup .state-title{fill:var(--md-mermaid-label-fg-color)!important;font-family:var(--md-mermaid-font-family)}g.stateGroup .composit{fill:var(--md-mermaid-label-bg-color)}.nodeLabel{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.node circle.state-end,.node circle.state-start,.start-state{fill:var(--md-mermaid-edge-color);stroke:none}.end-state-inner,.end-state-outer{fill:var(--md-mermaid-edge-color)}.end-state-inner,.node circle.state-end{stroke:var(--md-mermaid-label-bg-color)}.transition{stroke:var(--md-mermaid-edge-color)}[id^=state-fork] rect,[id^=state-join] rect{fill:var(--md-mermaid-edge-color)!important;stroke:none!important}.statediagram-cluster.statediagram-cluster .inner{fill:var(--md-default-bg-color)}.statediagram-cluster rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}.statediagram-state rect.divider{fill:var(--md-default-fg-color--lightest);stroke:var(--md-default-fg-color--lighter)}defs #statediagram-barbEnd{stroke:var(--md-mermaid-edge-color)}.entityBox{fill:var(--md-mermaid-label-bg-color);stroke:var(--md-mermaid-node-fg-color)}.entityLabel{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.relationshipLabelBox{fill:var(--md-mermaid-label-bg-color);fill-opacity:1;background-color:var(--md-mermaid-label-bg-color);opacity:1}.relationshipLabel{fill:var(--md-mermaid-label-fg-color)}.relationshipLine{stroke:var(--md-mermaid-edge-color)}defs #ONE_OR_MORE_END *,defs #ONE_OR_MORE_START *,defs #ONLY_ONE_END *,defs #ONLY_ONE_START *,defs #ZERO_OR_MORE_END *,defs #ZERO_OR_MORE_START *,defs #ZERO_OR_ONE_END *,defs #ZERO_OR_ONE_START *{stroke:var(--md-mermaid-edge-color)!important}.actor,defs #ZERO_OR_MORE_END circle,defs #ZERO_OR_MORE_START circle{fill:var(--md-mermaid-label-bg-color)}.actor{stroke:var(--md-mermaid-node-fg-color)}text.actor>tspan{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}line{stroke:var(--md-default-fg-color--lighter)}.messageLine0,.messageLine1{stroke:var(--md-mermaid-edge-color)}.loopText>tspan,.messageText,.noteText>tspan{fill:var(--md-mermaid-edge-color);stroke:none;font-family:var(--md-mermaid-font-family)!important}.noteText>tspan{fill:#000}#arrowhead path{fill:var(--md-mermaid-edge-color);stroke:none}.loopLine{stroke:var(--md-mermaid-node-fg-color)}.labelBox,.loopLine{fill:var(--md-mermaid-node-bg-color)}.labelBox{stroke:none}.labelText,.labelText>span{fill:var(--md-mermaid-node-fg-color);font-family:var(--md-mermaid-font-family)}";var Zr,vs=0;function gs(){return typeof mermaid=="undefined"||mermaid instanceof Element?Do("https://unpkg.com/mermaid@9.1.7/dist/mermaid.min.js"):I(void 0)}function fi(e){return e.classList.remove("mermaid"),Zr||(Zr=gs().pipe(w(()=>mermaid.initialize({startOnLoad:!1,themeCSS:ci,sequence:{actorFontSize:"16px",messageFontSize:"16px",noteFontSize:"16px"}})),m(()=>{}),X(1))),Zr.subscribe(()=>{e.classList.add("mermaid");let t=`__mermaid_${vs++}`,r=M("div",{class:"mermaid"});mermaid.mermaidAPI.render(t,e.textContent,n=>{let o=r.attachShadow({mode:"closed"});o.innerHTML=n,e.replaceWith(r)})}),Zr.pipe(m(()=>({ref:e})))}function ys(e,{target$:t,print$:r}){let n=!0;return A(t.pipe(m(o=>o.closest("details:not([open])")),x(o=>e===o),m(()=>({action:"open",reveal:!0}))),r.pipe(x(o=>o||!n),w(()=>n=e.open),m(o=>({action:o?"open":"close"}))))}function ui(e,t){return P(()=>{let r=new E;return r.subscribe(({action:n,reveal:o})=>{e.toggleAttribute("open",n==="open"),o&&e.scrollIntoView()}),ys(e,t).pipe(w(n=>r.next(n)),C(()=>r.complete()),m(n=>H({ref:e},n)))})}var pi=M("table");function li(e){return e.replaceWith(pi),pi.replaceWith(Zo(e)),I({ref:e})}function xs(e){let t=Q(":scope > input",e),r=t.find(n=>n.checked)||t[0];return A(...t.map(n=>v(n,"change").pipe(m(()=>K(`label[for="${n.id}"]`))))).pipe(N(K(`label[for="${r.id}"]`)),m(n=>({active:n})))}function mi(e,{viewport$:t}){let r=Xr("prev");e.append(r);let n=Xr("next");e.append(n);let o=K(".tabbed-labels",e);return P(()=>{let i=new E,s=i.pipe(de(1));return Y([i,ve(e)]).pipe(Ae(1,xe),ee(s)).subscribe({next([{active:a},c]){let f=qe(a),{width:u}=he(a);e.style.setProperty("--md-indicator-x",`${f.x}px`),e.style.setProperty("--md-indicator-width",`${u}px`);let p=or(o);(f.xp.x+c.width)&&o.scrollTo({left:Math.max(0,f.x-16),behavior:"smooth"})},complete(){e.style.removeProperty("--md-indicator-x"),e.style.removeProperty("--md-indicator-width")}}),Y([pt(o),ve(o)]).pipe(ee(s)).subscribe(([a,c])=>{let f=mt(o);r.hidden=a.x<16,n.hidden=a.x>f.width-c.width-16}),A(v(r,"click").pipe(m(()=>-1)),v(n,"click").pipe(m(()=>1))).pipe(ee(s)).subscribe(a=>{let{width:c}=he(o);o.scrollBy({left:c*a,behavior:"smooth"})}),Z("content.tabs.link")&&i.pipe(He(1),ae(t)).subscribe(([{active:a},{offset:c}])=>{let f=a.innerText.trim();if(a.hasAttribute("data-md-switching"))a.removeAttribute("data-md-switching");else{let u=e.offsetTop-c.y;for(let l of Q("[data-tabs]"))for(let d of Q(":scope > input",l)){let h=K(`label[for="${d.id}"]`);if(h!==a&&h.innerText.trim()===f){h.setAttribute("data-md-switching",""),d.click();break}}window.scrollTo({top:e.offsetTop-u});let p=__md_get("__tabs")||[];__md_set("__tabs",[...new Set([f,...p])])}}),xs(e).pipe(w(a=>i.next(a)),C(()=>i.complete()),m(a=>H({ref:e},a)))}).pipe(Je(fe))}function di(e,{viewport$:t,target$:r,print$:n}){return A(...Q("pre:not(.mermaid) > code",e).map(o=>si(o,{target$:r,print$:n})),...Q("pre.mermaid",e).map(o=>fi(o)),...Q("table:not([class])",e).map(o=>li(o)),...Q("details",e).map(o=>ui(o,{target$:r,print$:n})),...Q("[data-tabs]",e).map(o=>mi(o,{viewport$:t})))}function ws(e,{alert$:t}){return t.pipe(S(r=>A(I(!0),I(!1).pipe(ke(2e3))).pipe(m(n=>({message:r,active:n})))))}function hi(e,t){let r=K(".md-typeset",e);return P(()=>{let n=new E;return n.subscribe(({message:o,active:i})=>{e.classList.toggle("md-dialog--active",i),r.textContent=o}),ws(e,t).pipe(w(o=>n.next(o)),C(()=>n.complete()),m(o=>H({ref:e},o)))})}function Es({viewport$:e}){if(!Z("header.autohide"))return I(!1);let t=e.pipe(m(({offset:{y:o}})=>o),Ce(2,1),m(([o,i])=>[oMath.abs(i-o.y)>100),m(([,[o]])=>o),B()),n=dt("search");return Y([e,n]).pipe(m(([{offset:o},i])=>o.y>400&&!i),B(),S(o=>o?r:I(!1)),N(!1))}function bi(e,t){return P(()=>Y([ve(e),Es(t)])).pipe(m(([{height:r},n])=>({height:r,hidden:n})),B((r,n)=>r.height===n.height&&r.hidden===n.hidden),X(1))}function vi(e,{header$:t,main$:r}){return P(()=>{let n=new E,o=n.pipe(de(1));return n.pipe(J("active"),Ze(t)).subscribe(([{active:i},{hidden:s}])=>{e.classList.toggle("md-header--shadow",i&&!s),e.hidden=s}),r.subscribe(n),t.pipe(ee(o),m(i=>H({ref:e},i)))})}function Ss(e,{viewport$:t,header$:r}){return dr(e,{viewport$:t,header$:r}).pipe(m(({offset:{y:n}})=>{let{height:o}=he(e);return{active:n>=o}}),J("active"))}function gi(e,t){return P(()=>{let r=new E;r.subscribe(({active:o})=>{e.classList.toggle("md-header__title--active",o)});let n=pe("article h1");return typeof n=="undefined"?R:Ss(n,t).pipe(w(o=>r.next(o)),C(()=>r.complete()),m(o=>H({ref:e},o)))})}function yi(e,{viewport$:t,header$:r}){let n=r.pipe(m(({height:i})=>i),B()),o=n.pipe(S(()=>ve(e).pipe(m(({height:i})=>({top:e.offsetTop,bottom:e.offsetTop+i})),J("bottom"))));return Y([n,o,t]).pipe(m(([i,{top:s,bottom:a},{offset:{y:c},size:{height:f}}])=>(f=Math.max(0,f-Math.max(0,s-c,i)-Math.max(0,f+c-a)),{offset:s-i,height:f,active:s-i<=c})),B((i,s)=>i.offset===s.offset&&i.height===s.height&&i.active===s.active))}function Os(e){let t=__md_get("__palette")||{index:e.findIndex(r=>matchMedia(r.getAttribute("data-md-color-media")).matches)};return I(...e).pipe(se(r=>v(r,"change").pipe(m(()=>r))),N(e[Math.max(0,t.index)]),m(r=>({index:e.indexOf(r),color:{scheme:r.getAttribute("data-md-color-scheme"),primary:r.getAttribute("data-md-color-primary"),accent:r.getAttribute("data-md-color-accent")}})),X(1))}function xi(e){return P(()=>{let t=new E;t.subscribe(n=>{document.body.setAttribute("data-md-color-switching","");for(let[o,i]of Object.entries(n.color))document.body.setAttribute(`data-md-color-${o}`,i);for(let o=0;o{document.body.removeAttribute("data-md-color-switching")});let r=Q("input",e);return Os(r).pipe(w(n=>t.next(n)),C(()=>t.complete()),m(n=>H({ref:e},n)))})}var en=Ye(Br());function _s(e){e.setAttribute("data-md-copying","");let t=e.innerText;return e.removeAttribute("data-md-copying"),t}function wi({alert$:e}){en.default.isSupported()&&new F(t=>{new en.default("[data-clipboard-target], [data-clipboard-text]",{text:r=>r.getAttribute("data-clipboard-text")||_s(K(r.getAttribute("data-clipboard-target")))}).on("success",r=>t.next(r))}).pipe(w(t=>{t.trigger.focus()}),m(()=>re("clipboard.copied"))).subscribe(e)}function Ts(e){if(e.length<2)return[""];let[t,r]=[...e].sort((o,i)=>o.length-i.length).map(o=>o.replace(/[^/]+$/,"")),n=0;if(t===r)n=t.length;else for(;t.charCodeAt(n)===r.charCodeAt(n);)n++;return e.map(o=>o.replace(t.slice(0,n),""))}function hr(e){let t=__md_get("__sitemap",sessionStorage,e);if(t)return I(t);{let r=le();return Uo(new URL("sitemap.xml",e||r.base)).pipe(m(n=>Ts(Q("loc",n).map(o=>o.textContent))),ce(()=>R),De([]),w(n=>__md_set("__sitemap",n,sessionStorage,e)))}}function Ei({document$:e,location$:t,viewport$:r}){let n=le();if(location.protocol==="file:")return;"scrollRestoration"in history&&(history.scrollRestoration="manual",v(window,"beforeunload").subscribe(()=>{history.scrollRestoration="auto"}));let o=pe("link[rel=icon]");typeof o!="undefined"&&(o.href=o.href);let i=hr().pipe(m(f=>f.map(u=>`${new URL(u,n.base)}`)),S(f=>v(document.body,"click").pipe(x(u=>!u.metaKey&&!u.ctrlKey),S(u=>{if(u.target instanceof Element){let p=u.target.closest("a");if(p&&!p.target){let l=new URL(p.href);if(l.search="",l.hash="",l.pathname!==location.pathname&&f.includes(l.toString()))return u.preventDefault(),I({url:new URL(p.href)})}}return Se}))),ie()),s=v(window,"popstate").pipe(x(f=>f.state!==null),m(f=>({url:new URL(location.href),offset:f.state})),ie());A(i,s).pipe(B((f,u)=>f.url.href===u.url.href),m(({url:f})=>f)).subscribe(t);let a=t.pipe(J("pathname"),S(f=>mr(f.href).pipe(ce(()=>(pr(f),Se)))),ie());i.pipe(ut(a)).subscribe(({url:f})=>{history.pushState({},"",`${f}`)});let c=new DOMParser;a.pipe(S(f=>f.text()),m(f=>c.parseFromString(f,"text/html"))).subscribe(e),e.pipe(He(1)).subscribe(f=>{for(let u of["title","link[rel=canonical]","meta[name=author]","meta[name=description]","[data-md-component=announce]","[data-md-component=container]","[data-md-component=header-topic]","[data-md-component=outdated]","[data-md-component=logo]","[data-md-component=skip]",...Z("navigation.tabs.sticky")?["[data-md-component=tabs]"]:[]]){let p=pe(u),l=pe(u,f);typeof p!="undefined"&&typeof l!="undefined"&&p.replaceWith(l)}}),e.pipe(He(1),m(()=>_e("container")),S(f=>Q("script",f)),Ir(f=>{let u=M("script");if(f.src){for(let p of f.getAttributeNames())u.setAttribute(p,f.getAttribute(p));return f.replaceWith(u),new F(p=>{u.onload=()=>p.complete()})}else return u.textContent=f.textContent,f.replaceWith(u),R})).subscribe(),A(i,s).pipe(ut(e)).subscribe(({url:f,offset:u})=>{f.hash&&!u?Io(f.hash):window.scrollTo(0,(u==null?void 0:u.y)||0)}),r.pipe(Mt(i),Re(250),J("offset")).subscribe(({offset:f})=>{history.replaceState(f,"")}),A(i,s).pipe(Ce(2,1),x(([f,u])=>f.url.pathname===u.url.pathname),m(([,f])=>f)).subscribe(({offset:f})=>{window.scrollTo(0,(f==null?void 0:f.y)||0)})}var As=Ye(tn());var Oi=Ye(tn());function rn(e,t){let r=new RegExp(e.separator,"img"),n=(o,i,s)=>`${i}${s}`;return o=>{o=o.replace(/[\s*+\-:~^]+/g," ").trim();let i=new RegExp(`(^|${e.separator})(${o.replace(/[|\\{}()[\]^$+*?.-]/g,"\\$&").replace(r,"|")})`,"img");return s=>(t?(0,Oi.default)(s):s).replace(i,n).replace(/<\/mark>(\s+)]*>/img,"$1")}}function _i(e){return e.split(/"([^"]+)"/g).map((t,r)=>r&1?t.replace(/^\b|^(?![^\x00-\x7F]|$)|\s+/g," +"):t).join("").replace(/"|(?:^|\s+)[*+\-:^~]+(?=\s+|$)/g,"").trim()}function bt(e){return e.type===1}function Ti(e){return e.type===2}function vt(e){return e.type===3}function Rs({config:e,docs:t}){e.lang.length===1&&e.lang[0]==="en"&&(e.lang=[re("search.config.lang")]),e.separator==="[\\s\\-]+"&&(e.separator=re("search.config.separator"));let n={pipeline:re("search.config.pipeline").split(/\s*,\s*/).filter(Boolean),suggestions:Z("search.suggest")};return{config:e,docs:t,options:n}}function Mi(e,t){let r=le(),n=new Worker(e),o=new E,i=Ko(n,{tx$:o}).pipe(m(s=>{if(vt(s))for(let a of s.data.items)for(let c of a)c.location=`${new URL(c.location,r.base)}`;return s}),ie());return ue(t).pipe(m(s=>({type:0,data:Rs(s)}))).subscribe(o.next.bind(o)),{tx$:o,rx$:i}}function Li({document$:e}){let t=le(),r=je(new URL("../versions.json",t.base)).pipe(ce(()=>R)),n=r.pipe(m(o=>{let[,i]=t.base.match(/([^/]+)\/?$/);return o.find(({version:s,aliases:a})=>s===i||a.includes(i))||o[0]}));r.pipe(m(o=>new Map(o.map(i=>[`${new URL(`../${i.version}/`,t.base)}`,i]))),S(o=>v(document.body,"click").pipe(x(i=>!i.metaKey&&!i.ctrlKey),ae(n),S(([i,s])=>{if(i.target instanceof Element){let a=i.target.closest("a");if(a&&!a.target&&o.has(a.href)){let c=a.href;return!i.target.closest(".md-version")&&o.get(c)===s?R:(i.preventDefault(),I(c))}}return R}),S(i=>{let{version:s}=o.get(i);return hr(new URL(i)).pipe(m(a=>{let f=Oe().href.replace(t.base,"");return a.includes(f.split("#")[0])?new URL(`../${s}/${f}`,t.base):new URL(i)}))})))).subscribe(o=>pr(o)),Y([r,n]).subscribe(([o,i])=>{K(".md-header__topic").appendChild(ei(o,i))}),e.pipe(S(()=>n)).subscribe(o=>{var s;let i=__md_get("__outdated",sessionStorage);if(i===null){let a=((s=t.version)==null?void 0:s.default)||"latest";i=!o.aliases.includes(a),__md_set("__outdated",i,sessionStorage)}if(i)for(let a of te("outdated"))a.hidden=!1})}function ks(e,{rx$:t}){let r=(__search==null?void 0:__search.transform)||_i,{searchParams:n}=Oe();n.has("q")&&Ke("search",!0);let o=t.pipe(x(bt),oe(1),m(()=>n.get("q")||""));dt("search").pipe(x(a=>!a),oe(1)).subscribe(()=>{let a=new URL(location.href);a.searchParams.delete("q"),history.replaceState({},"",`${a}`)}),o.subscribe(a=>{a&&(e.value=a,e.focus())});let i=nr(e),s=A(v(e,"keyup"),v(e,"focus").pipe(ke(1)),o).pipe(m(()=>r(e.value)),N(""),B());return Y([s,i]).pipe(m(([a,c])=>({value:a,focus:c})),X(1))}function Ai(e,{tx$:t,rx$:r}){let n=new E,o=n.pipe(de(1));return n.pipe(J("value"),m(({value:i})=>({type:2,data:i}))).subscribe(t.next.bind(t)),n.pipe(J("focus")).subscribe(({focus:i})=>{i?(Ke("search",i),e.placeholder=""):e.placeholder=re("search.placeholder")}),v(e.form,"reset").pipe(ee(o)).subscribe(()=>e.focus()),ks(e,{tx$:t,rx$:r}).pipe(w(i=>n.next(i)),C(()=>n.complete()),m(i=>H({ref:e},i)),ie())}function Ci(e,{rx$:t},{query$:r}){let n=new E,o=Ao(e.parentElement).pipe(x(Boolean)),i=K(":scope > :first-child",e),s=K(":scope > :last-child",e),a=t.pipe(x(bt),oe(1));return n.pipe(ae(r),Mt(a)).subscribe(([{items:f},{value:u}])=>{if(u)switch(f.length){case 0:i.textContent=re("search.result.none");break;case 1:i.textContent=re("search.result.one");break;default:i.textContent=re("search.result.other",lr(f.length))}else i.textContent=re("search.result.placeholder")}),n.pipe(w(()=>s.innerHTML=""),S(({items:f})=>A(I(...f.slice(0,10)),I(...f.slice(10)).pipe(Ce(4),Nr(o),S(([u])=>u))))).subscribe(f=>s.appendChild(Jo(f))),t.pipe(x(vt),m(({data:f})=>f)).pipe(w(f=>n.next(f)),C(()=>n.complete()),m(f=>H({ref:e},f)))}function Hs(e,{query$:t}){return t.pipe(m(({value:r})=>{let n=Oe();return n.hash="",n.searchParams.delete("h"),n.searchParams.set("q",r),{url:n}}))}function Ri(e,t){let r=new E;return r.subscribe(({url:n})=>{e.setAttribute("data-clipboard-text",e.href),e.href=`${n}`}),v(e,"click").subscribe(n=>n.preventDefault()),Hs(e,t).pipe(w(n=>r.next(n)),C(()=>r.complete()),m(n=>H({ref:e},n)))}function ki(e,{rx$:t},{keyboard$:r}){let n=new E,o=_e("search-query"),i=A(v(o,"keydown"),v(o,"focus")).pipe(Le(fe),m(()=>o.value),B());return n.pipe(Ze(i),m(([{suggestions:a},c])=>{let f=c.split(/([\s-]+)/);if((a==null?void 0:a.length)&&f[f.length-1]){let u=a[a.length-1];u.startsWith(f[f.length-1])&&(f[f.length-1]=u)}else f.length=0;return f})).subscribe(a=>e.innerHTML=a.join("").replace(/\s/g," ")),r.pipe(x(({mode:a})=>a==="search")).subscribe(a=>{switch(a.type){case"ArrowRight":e.innerText.length&&o.selectionStart===o.value.length&&(o.value=e.innerText);break}}),t.pipe(x(vt),m(({data:a})=>a)).pipe(w(a=>n.next(a)),C(()=>n.complete()),m(()=>({ref:e})))}function Hi(e,{index$:t,keyboard$:r}){let n=le();try{let o=(__search==null?void 0:__search.worker)||n.search,i=Mi(o,t),s=_e("search-query",e),a=_e("search-result",e),{tx$:c,rx$:f}=i;c.pipe(x(Ti),ut(f.pipe(x(bt))),oe(1)).subscribe(c.next.bind(c)),r.pipe(x(({mode:l})=>l==="search")).subscribe(l=>{let d=Ie();switch(l.type){case"Enter":if(d===s){let h=new Map;for(let b of Q(":first-child [href]",a)){let U=b.firstElementChild;h.set(b,parseFloat(U.getAttribute("data-md-score")))}if(h.size){let[[b]]=[...h].sort(([,U],[,G])=>G-U);b.click()}l.claim()}break;case"Escape":case"Tab":Ke("search",!1),s.blur();break;case"ArrowUp":case"ArrowDown":if(typeof d=="undefined")s.focus();else{let h=[s,...Q(":not(details) > [href], summary, details[open] [href]",a)],b=Math.max(0,(Math.max(0,h.indexOf(d))+h.length+(l.type==="ArrowUp"?-1:1))%h.length);h[b].focus()}l.claim();break;default:s!==Ie()&&s.focus()}}),r.pipe(x(({mode:l})=>l==="global")).subscribe(l=>{switch(l.type){case"f":case"s":case"/":s.focus(),s.select(),l.claim();break}});let u=Ai(s,i),p=Ci(a,i,{query$:u});return A(u,p).pipe(et(...te("search-share",e).map(l=>Ri(l,{query$:u})),...te("search-suggest",e).map(l=>ki(l,i,{keyboard$:r}))))}catch(o){return e.hidden=!0,Se}}function Pi(e,{index$:t,location$:r}){return Y([t,r.pipe(N(Oe()),x(n=>!!n.searchParams.get("h")))]).pipe(m(([n,o])=>rn(n.config,!0)(o.searchParams.get("h"))),m(n=>{var s;let o=new Map,i=document.createNodeIterator(e,NodeFilter.SHOW_TEXT);for(let a=i.nextNode();a;a=i.nextNode())if((s=a.parentElement)!=null&&s.offsetHeight){let c=a.textContent,f=n(c);f.length>c.length&&o.set(a,f)}for(let[a,c]of o){let{childNodes:f}=M("span",null,c);a.replaceWith(...Array.from(f))}return{ref:e,nodes:o}}))}function Ps(e,{viewport$:t,main$:r}){let n=e.parentElement,o=n.offsetTop-n.parentElement.offsetTop;return Y([r,t]).pipe(m(([{offset:i,height:s},{offset:{y:a}}])=>(s=s+Math.min(o,Math.max(0,a-i))-o,{height:s,locked:a>=i+o})),B((i,s)=>i.height===s.height&&i.locked===s.locked))}function nn(e,n){var o=n,{header$:t}=o,r=un(o,["header$"]);let i=K(".md-sidebar__scrollwrap",e),{y:s}=qe(i);return P(()=>{let a=new E;return a.pipe(Ae(0,xe),ae(t)).subscribe({next([{height:c},{height:f}]){i.style.height=`${c-2*s}px`,e.style.top=`${f}px`},complete(){i.style.height="",e.style.top=""}}),a.pipe(Le(xe),oe(1)).subscribe(()=>{for(let c of Q(".md-nav__link--active[href]",e)){let f=cr(c);if(typeof f!="undefined"){let u=c.offsetTop-f.offsetTop,{height:p}=he(f);f.scrollTo({top:u-p/2})}}}),Ps(e,r).pipe(w(c=>a.next(c)),C(()=>a.complete()),m(c=>H({ref:e},c)))})}function $i(e,t){if(typeof t!="undefined"){let r=`https://api.github.com/repos/${e}/${t}`;return _t(je(`${r}/releases/latest`).pipe(ce(()=>R),m(n=>({version:n.tag_name})),De({})),je(r).pipe(ce(()=>R),m(n=>({stars:n.stargazers_count,forks:n.forks_count})),De({}))).pipe(m(([n,o])=>H(H({},n),o)))}else{let r=`https://api.github.com/users/${e}`;return je(r).pipe(m(n=>({repositories:n.public_repos})),De({}))}}function Ii(e,t){let r=`https://${e}/api/v4/projects/${encodeURIComponent(t)}`;return je(r).pipe(ce(()=>R),m(({star_count:n,forks_count:o})=>({stars:n,forks:o})),De({}))}function ji(e){let t=e.match(/^.+github\.com\/([^/]+)\/?([^/]+)?/i);if(t){let[,r,n]=t;return $i(r,n)}if(t=e.match(/^.+?([^/]*gitlab[^/]+)\/(.+?)\/?$/i),t){let[,r,n]=t;return Ii(r,n)}return R}var $s;function Is(e){return $s||($s=P(()=>{let t=__md_get("__source",sessionStorage);if(t)return I(t);if(te("consent").length){let n=__md_get("__consent");if(!(n&&n.github))return R}return ji(e.href).pipe(w(n=>__md_set("__source",n,sessionStorage)))}).pipe(ce(()=>R),x(t=>Object.keys(t).length>0),m(t=>({facts:t})),X(1)))}function Fi(e){let t=K(":scope > :last-child",e);return P(()=>{let r=new E;return r.subscribe(({facts:n})=>{t.appendChild(Xo(n)),t.classList.add("md-source__repository--active")}),Is(e).pipe(w(n=>r.next(n)),C(()=>r.complete()),m(n=>H({ref:e},n)))})}function js(e,{viewport$:t,header$:r}){return ve(document.body).pipe(S(()=>dr(e,{header$:r,viewport$:t})),m(({offset:{y:n}})=>({hidden:n>=10})),J("hidden"))}function Ui(e,t){return P(()=>{let r=new E;return r.subscribe({next({hidden:n}){e.hidden=n},complete(){e.hidden=!1}}),(Z("navigation.tabs.sticky")?I({hidden:!1}):js(e,t)).pipe(w(n=>r.next(n)),C(()=>r.complete()),m(n=>H({ref:e},n)))})}function Fs(e,{viewport$:t,header$:r}){let n=new Map,o=Q("[href^=\\#]",e);for(let a of o){let c=decodeURIComponent(a.hash.substring(1)),f=pe(`[id="${c}"]`);typeof f!="undefined"&&n.set(a,f)}let i=r.pipe(J("height"),m(({height:a})=>{let c=_e("main"),f=K(":scope > :first-child",c);return a+.8*(f.offsetTop-c.offsetTop)}),ie());return ve(document.body).pipe(J("height"),S(a=>P(()=>{let c=[];return I([...n].reduce((f,[u,p])=>{for(;c.length&&n.get(c[c.length-1]).tagName>=p.tagName;)c.pop();let l=p.offsetTop;for(;!l&&p.parentElement;)p=p.parentElement,l=p.offsetTop;return f.set([...c=[...c,u]].reverse(),l)},new Map))}).pipe(m(c=>new Map([...c].sort(([,f],[,u])=>f-u))),Ze(i),S(([c,f])=>t.pipe(Ur(([u,p],{offset:{y:l},size:d})=>{let h=l+d.height>=Math.floor(a.height);for(;p.length;){let[,b]=p[0];if(b-f=l&&!h)p=[u.pop(),...p];else break}return[u,p]},[[],[...c]]),B((u,p)=>u[0]===p[0]&&u[1]===p[1])))))).pipe(m(([a,c])=>({prev:a.map(([f])=>f),next:c.map(([f])=>f)})),N({prev:[],next:[]}),Ce(2,1),m(([a,c])=>a.prev.length{let o=new E,i=o.pipe(de(1));if(o.subscribe(({prev:s,next:a})=>{for(let[c]of a)c.classList.remove("md-nav__link--passed"),c.classList.remove("md-nav__link--active");for(let[c,[f]]of s.entries())f.classList.add("md-nav__link--passed"),f.classList.toggle("md-nav__link--active",c===s.length-1)}),Z("toc.follow")){let s=A(t.pipe(Re(1),m(()=>{})),t.pipe(Re(250),m(()=>"smooth")));o.pipe(x(({prev:a})=>a.length>0),ae(s)).subscribe(([{prev:a},c])=>{let[f]=a[a.length-1];if(f.offsetHeight){let u=cr(f);if(typeof u!="undefined"){let p=f.offsetTop-u.offsetTop,{height:l}=he(u);u.scrollTo({top:p-l/2,behavior:c})}}})}return Z("navigation.tracking")&&t.pipe(ee(i),J("offset"),Re(250),He(1),ee(n.pipe(He(1))),Tt({delay:250}),ae(o)).subscribe(([,{prev:s}])=>{let a=Oe(),c=s[s.length-1];if(c&&c.length){let[f]=c,{hash:u}=new URL(f.href);a.hash!==u&&(a.hash=u,history.replaceState({},"",`${a}`))}else a.hash="",history.replaceState({},"",`${a}`)}),Fs(e,{viewport$:t,header$:r}).pipe(w(s=>o.next(s)),C(()=>o.complete()),m(s=>H({ref:e},s)))})}function Us(e,{viewport$:t,main$:r,target$:n}){let o=t.pipe(m(({offset:{y:s}})=>s),Ce(2,1),m(([s,a])=>s>a&&a>0),B()),i=r.pipe(m(({active:s})=>s));return Y([i,o]).pipe(m(([s,a])=>!(s&&a)),B(),ee(n.pipe(He(1))),Fr(!0),Tt({delay:250}),m(s=>({hidden:s})))}function Wi(e,{viewport$:t,header$:r,main$:n,target$:o}){let i=new E,s=i.pipe(de(1));return i.subscribe({next({hidden:a}){e.hidden=a,a?(e.setAttribute("tabindex","-1"),e.blur()):e.removeAttribute("tabindex")},complete(){e.style.top="",e.hidden=!0,e.removeAttribute("tabindex")}}),r.pipe(ee(s),J("height")).subscribe(({height:a})=>{e.style.top=`${a+16}px`}),Us(e,{viewport$:t,main$:n,target$:o}).pipe(w(a=>i.next(a)),C(()=>i.complete()),m(a=>H({ref:e},a)))}function Vi({document$:e,tablet$:t}){e.pipe(S(()=>Q(".md-toggle--indeterminate, [data-md-state=indeterminate]")),w(r=>{r.indeterminate=!0,r.checked=!1}),se(r=>v(r,"change").pipe(Wr(()=>r.classList.contains("md-toggle--indeterminate")),m(()=>r))),ae(t)).subscribe(([r,n])=>{r.classList.remove("md-toggle--indeterminate"),n&&(r.checked=!1)})}function Ds(){return/(iPad|iPhone|iPod)/.test(navigator.userAgent)}function zi({document$:e}){e.pipe(S(()=>Q("[data-md-scrollfix]")),w(t=>t.removeAttribute("data-md-scrollfix")),x(Ds),se(t=>v(t,"touchstart").pipe(m(()=>t)))).subscribe(t=>{let r=t.scrollTop;r===0?t.scrollTop=1:r+t.offsetHeight===t.scrollHeight&&(t.scrollTop=r-1)})}function Ni({viewport$:e,tablet$:t}){Y([dt("search"),t]).pipe(m(([r,n])=>r&&!n),S(r=>I(r).pipe(ke(r?400:100))),ae(e)).subscribe(([r,{offset:{y:n}}])=>{if(r)document.body.setAttribute("data-md-scrolllock",""),document.body.style.top=`-${n}px`;else{let o=-1*parseInt(document.body.style.top,10);document.body.removeAttribute("data-md-scrolllock"),document.body.style.top="",o&&window.scrollTo(0,o)}})}Object.entries||(Object.entries=function(e){let t=[];for(let r of Object.keys(e))t.push([r,e[r]]);return t});Object.values||(Object.values=function(e){let t=[];for(let r of Object.keys(e))t.push(e[r]);return t});typeof Element!="undefined"&&(Element.prototype.scrollTo||(Element.prototype.scrollTo=function(e,t){typeof e=="object"?(this.scrollLeft=e.left,this.scrollTop=e.top):(this.scrollLeft=e,this.scrollTop=t)}),Element.prototype.replaceWith||(Element.prototype.replaceWith=function(...e){let t=this.parentNode;if(t){e.length===0&&t.removeChild(this);for(let r=e.length-1;r>=0;r--){let n=e[r];typeof n=="string"?n=document.createTextNode(n):n.parentNode&&n.parentNode.removeChild(n),r?t.insertBefore(this.previousSibling,n):t.replaceChild(n,this)}}}));document.documentElement.classList.remove("no-js");document.documentElement.classList.add("js");var tt=go(),vr=ko(),gt=jo(),on=Ro(),we=qo(),gr=Kr("(min-width: 960px)"),Ki=Kr("(min-width: 1220px)"),Qi=Fo(),Yi=le(),Bi=document.forms.namedItem("search")?(__search==null?void 0:__search.index)||je(new URL("search/search_index.json",Yi.base)):Se,an=new E;wi({alert$:an});Z("navigation.instant")&&Ei({document$:tt,location$:vr,viewport$:we});var qi;((qi=Yi.version)==null?void 0:qi.provider)==="mike"&&Li({document$:tt});A(vr,gt).pipe(ke(125)).subscribe(()=>{Ke("drawer",!1),Ke("search",!1)});on.pipe(x(({mode:e})=>e==="global")).subscribe(e=>{switch(e.type){case"p":case",":let t=pe("[href][rel=prev]");typeof t!="undefined"&&t.click();break;case"n":case".":let r=pe("[href][rel=next]");typeof r!="undefined"&&r.click();break}});Vi({document$:tt,tablet$:gr});zi({document$:tt});Ni({viewport$:we,tablet$:gr});var Qe=bi(_e("header"),{viewport$:we}),br=tt.pipe(m(()=>_e("main")),S(e=>yi(e,{viewport$:we,header$:Qe})),X(1)),Ws=A(...te("consent").map(e=>Yo(e,{target$:gt})),...te("dialog").map(e=>hi(e,{alert$:an})),...te("header").map(e=>vi(e,{viewport$:we,header$:Qe,main$:br})),...te("palette").map(e=>xi(e)),...te("search").map(e=>Hi(e,{index$:Bi,keyboard$:on})),...te("source").map(e=>Fi(e))),Vs=P(()=>A(...te("announce").map(e=>Qo(e)),...te("content").map(e=>di(e,{viewport$:we,target$:gt,print$:Qi})),...te("content").map(e=>Z("search.highlight")?Pi(e,{index$:Bi,location$:vr}):R),...te("header-title").map(e=>gi(e,{viewport$:we,header$:Qe})),...te("sidebar").map(e=>e.getAttribute("data-md-type")==="navigation"?Qr(Ki,()=>nn(e,{viewport$:we,header$:Qe,main$:br})):Qr(gr,()=>nn(e,{viewport$:we,header$:Qe,main$:br}))),...te("tabs").map(e=>Ui(e,{viewport$:we,header$:Qe})),...te("toc").map(e=>Di(e,{viewport$:we,header$:Qe,target$:gt})),...te("top").map(e=>Wi(e,{viewport$:we,header$:Qe,main$:br,target$:gt})))),Gi=tt.pipe(S(()=>Vs),et(Ws),X(1));Gi.subscribe();window.document$=tt;window.location$=vr;window.target$=gt;window.keyboard$=on;window.viewport$=we;window.tablet$=gr;window.screen$=Ki;window.print$=Qi;window.alert$=an;window.component$=Gi;})(); +//# sourceMappingURL=bundle.5a2dcb6a.min.js.map + diff --git a/2.0.0/assets/javascripts/bundle.5a2dcb6a.min.js.map b/2.0.0/assets/javascripts/bundle.5a2dcb6a.min.js.map new file mode 100644 index 00000000..34e26a3a --- /dev/null +++ b/2.0.0/assets/javascripts/bundle.5a2dcb6a.min.js.map @@ -0,0 +1,8 @@ +{ + "version": 3, + "sources": ["node_modules/focus-visible/dist/focus-visible.js", "node_modules/url-polyfill/url-polyfill.js", "node_modules/rxjs/node_modules/tslib/tslib.js", "node_modules/clipboard/dist/clipboard.js", "node_modules/escape-html/index.js", "node_modules/array-flat-polyfill/index.mjs", "src/assets/javascripts/bundle.ts", "node_modules/unfetch/polyfill/index.js", "node_modules/rxjs/node_modules/tslib/modules/index.js", "node_modules/rxjs/src/internal/util/isFunction.ts", "node_modules/rxjs/src/internal/util/createErrorClass.ts", "node_modules/rxjs/src/internal/util/UnsubscriptionError.ts", "node_modules/rxjs/src/internal/util/arrRemove.ts", "node_modules/rxjs/src/internal/Subscription.ts", "node_modules/rxjs/src/internal/config.ts", "node_modules/rxjs/src/internal/scheduler/timeoutProvider.ts", "node_modules/rxjs/src/internal/util/reportUnhandledError.ts", "node_modules/rxjs/src/internal/util/noop.ts", "node_modules/rxjs/src/internal/NotificationFactories.ts", "node_modules/rxjs/src/internal/util/errorContext.ts", "node_modules/rxjs/src/internal/Subscriber.ts", "node_modules/rxjs/src/internal/symbol/observable.ts", "node_modules/rxjs/src/internal/util/identity.ts", "node_modules/rxjs/src/internal/util/pipe.ts", "node_modules/rxjs/src/internal/Observable.ts", "node_modules/rxjs/src/internal/util/lift.ts", "node_modules/rxjs/src/internal/operators/OperatorSubscriber.ts", "node_modules/rxjs/src/internal/scheduler/animationFrameProvider.ts", "node_modules/rxjs/src/internal/util/ObjectUnsubscribedError.ts", "node_modules/rxjs/src/internal/Subject.ts", "node_modules/rxjs/src/internal/scheduler/dateTimestampProvider.ts", "node_modules/rxjs/src/internal/ReplaySubject.ts", "node_modules/rxjs/src/internal/scheduler/Action.ts", "node_modules/rxjs/src/internal/scheduler/intervalProvider.ts", "node_modules/rxjs/src/internal/scheduler/AsyncAction.ts", "node_modules/rxjs/src/internal/Scheduler.ts", "node_modules/rxjs/src/internal/scheduler/AsyncScheduler.ts", "node_modules/rxjs/src/internal/scheduler/async.ts", "node_modules/rxjs/src/internal/scheduler/AnimationFrameAction.ts", "node_modules/rxjs/src/internal/scheduler/AnimationFrameScheduler.ts", "node_modules/rxjs/src/internal/scheduler/animationFrame.ts", "node_modules/rxjs/src/internal/observable/empty.ts", "node_modules/rxjs/src/internal/util/isScheduler.ts", "node_modules/rxjs/src/internal/util/args.ts", "node_modules/rxjs/src/internal/util/isArrayLike.ts", "node_modules/rxjs/src/internal/util/isPromise.ts", "node_modules/rxjs/src/internal/util/isInteropObservable.ts", "node_modules/rxjs/src/internal/util/isAsyncIterable.ts", "node_modules/rxjs/src/internal/util/throwUnobservableError.ts", "node_modules/rxjs/src/internal/symbol/iterator.ts", "node_modules/rxjs/src/internal/util/isIterable.ts", "node_modules/rxjs/src/internal/util/isReadableStreamLike.ts", "node_modules/rxjs/src/internal/observable/innerFrom.ts", "node_modules/rxjs/src/internal/util/executeSchedule.ts", "node_modules/rxjs/src/internal/operators/observeOn.ts", "node_modules/rxjs/src/internal/operators/subscribeOn.ts", "node_modules/rxjs/src/internal/scheduled/scheduleObservable.ts", "node_modules/rxjs/src/internal/scheduled/schedulePromise.ts", "node_modules/rxjs/src/internal/scheduled/scheduleArray.ts", "node_modules/rxjs/src/internal/scheduled/scheduleIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleAsyncIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleReadableStreamLike.ts", "node_modules/rxjs/src/internal/scheduled/scheduled.ts", "node_modules/rxjs/src/internal/observable/from.ts", "node_modules/rxjs/src/internal/observable/of.ts", "node_modules/rxjs/src/internal/observable/throwError.ts", "node_modules/rxjs/src/internal/util/isDate.ts", "node_modules/rxjs/src/internal/operators/map.ts", "node_modules/rxjs/src/internal/util/mapOneOrManyArgs.ts", "node_modules/rxjs/src/internal/util/argsArgArrayOrObject.ts", "node_modules/rxjs/src/internal/util/createObject.ts", "node_modules/rxjs/src/internal/observable/combineLatest.ts", "node_modules/rxjs/src/internal/operators/mergeInternals.ts", "node_modules/rxjs/src/internal/operators/mergeMap.ts", "node_modules/rxjs/src/internal/operators/mergeAll.ts", "node_modules/rxjs/src/internal/operators/concatAll.ts", "node_modules/rxjs/src/internal/observable/concat.ts", "node_modules/rxjs/src/internal/observable/defer.ts", "node_modules/rxjs/src/internal/observable/fromEvent.ts", "node_modules/rxjs/src/internal/observable/fromEventPattern.ts", "node_modules/rxjs/src/internal/observable/timer.ts", "node_modules/rxjs/src/internal/observable/merge.ts", "node_modules/rxjs/src/internal/observable/never.ts", "node_modules/rxjs/src/internal/util/argsOrArgArray.ts", "node_modules/rxjs/src/internal/operators/filter.ts", "node_modules/rxjs/src/internal/observable/zip.ts", "node_modules/rxjs/src/internal/operators/audit.ts", "node_modules/rxjs/src/internal/operators/auditTime.ts", "node_modules/rxjs/src/internal/operators/bufferCount.ts", "node_modules/rxjs/src/internal/operators/catchError.ts", "node_modules/rxjs/src/internal/operators/scanInternals.ts", "node_modules/rxjs/src/internal/operators/combineLatest.ts", "node_modules/rxjs/src/internal/operators/combineLatestWith.ts", "node_modules/rxjs/src/internal/operators/concatMap.ts", "node_modules/rxjs/src/internal/operators/debounceTime.ts", "node_modules/rxjs/src/internal/operators/defaultIfEmpty.ts", "node_modules/rxjs/src/internal/operators/take.ts", "node_modules/rxjs/src/internal/operators/ignoreElements.ts", "node_modules/rxjs/src/internal/operators/mapTo.ts", "node_modules/rxjs/src/internal/operators/delayWhen.ts", "node_modules/rxjs/src/internal/operators/delay.ts", "node_modules/rxjs/src/internal/operators/distinctUntilChanged.ts", "node_modules/rxjs/src/internal/operators/distinctUntilKeyChanged.ts", "node_modules/rxjs/src/internal/operators/endWith.ts", "node_modules/rxjs/src/internal/operators/finalize.ts", "node_modules/rxjs/src/internal/operators/takeLast.ts", "node_modules/rxjs/src/internal/operators/merge.ts", "node_modules/rxjs/src/internal/operators/mergeWith.ts", "node_modules/rxjs/src/internal/operators/repeat.ts", "node_modules/rxjs/src/internal/operators/sample.ts", "node_modules/rxjs/src/internal/operators/scan.ts", "node_modules/rxjs/src/internal/operators/share.ts", "node_modules/rxjs/src/internal/operators/shareReplay.ts", "node_modules/rxjs/src/internal/operators/skip.ts", "node_modules/rxjs/src/internal/operators/skipUntil.ts", "node_modules/rxjs/src/internal/operators/startWith.ts", "node_modules/rxjs/src/internal/operators/switchMap.ts", "node_modules/rxjs/src/internal/operators/takeUntil.ts", "node_modules/rxjs/src/internal/operators/takeWhile.ts", "node_modules/rxjs/src/internal/operators/tap.ts", "node_modules/rxjs/src/internal/operators/throttle.ts", "node_modules/rxjs/src/internal/operators/throttleTime.ts", "node_modules/rxjs/src/internal/operators/withLatestFrom.ts", "node_modules/rxjs/src/internal/operators/zip.ts", "node_modules/rxjs/src/internal/operators/zipWith.ts", "src/assets/javascripts/browser/document/index.ts", "src/assets/javascripts/browser/element/_/index.ts", "src/assets/javascripts/browser/element/focus/index.ts", "src/assets/javascripts/browser/element/offset/_/index.ts", "src/assets/javascripts/browser/element/offset/content/index.ts", "node_modules/resize-observer-polyfill/dist/ResizeObserver.es.js", "src/assets/javascripts/browser/element/size/_/index.ts", "src/assets/javascripts/browser/element/size/content/index.ts", "src/assets/javascripts/browser/element/visibility/index.ts", "src/assets/javascripts/browser/toggle/index.ts", "src/assets/javascripts/browser/keyboard/index.ts", "src/assets/javascripts/browser/location/_/index.ts", "src/assets/javascripts/utilities/h/index.ts", "src/assets/javascripts/utilities/string/index.ts", "src/assets/javascripts/browser/location/hash/index.ts", "src/assets/javascripts/browser/media/index.ts", "src/assets/javascripts/browser/request/index.ts", "src/assets/javascripts/browser/script/index.ts", "src/assets/javascripts/browser/viewport/offset/index.ts", "src/assets/javascripts/browser/viewport/size/index.ts", "src/assets/javascripts/browser/viewport/_/index.ts", "src/assets/javascripts/browser/viewport/at/index.ts", "src/assets/javascripts/browser/worker/index.ts", "src/assets/javascripts/_/index.ts", "src/assets/javascripts/components/_/index.ts", "src/assets/javascripts/components/announce/index.ts", "src/assets/javascripts/components/consent/index.ts", "src/assets/javascripts/components/content/code/_/index.ts", "src/assets/javascripts/templates/tooltip/index.tsx", "src/assets/javascripts/templates/annotation/index.tsx", "src/assets/javascripts/templates/clipboard/index.tsx", "src/assets/javascripts/templates/search/index.tsx", "src/assets/javascripts/templates/source/index.tsx", "src/assets/javascripts/templates/tabbed/index.tsx", "src/assets/javascripts/templates/table/index.tsx", "src/assets/javascripts/templates/version/index.tsx", "src/assets/javascripts/components/content/annotation/_/index.ts", "src/assets/javascripts/components/content/annotation/list/index.ts", "src/assets/javascripts/components/content/code/mermaid/index.ts", "src/assets/javascripts/components/content/details/index.ts", "src/assets/javascripts/components/content/table/index.ts", "src/assets/javascripts/components/content/tabs/index.ts", "src/assets/javascripts/components/content/_/index.ts", "src/assets/javascripts/components/dialog/index.ts", "src/assets/javascripts/components/header/_/index.ts", "src/assets/javascripts/components/header/title/index.ts", "src/assets/javascripts/components/main/index.ts", "src/assets/javascripts/components/palette/index.ts", "src/assets/javascripts/integrations/clipboard/index.ts", "src/assets/javascripts/integrations/sitemap/index.ts", "src/assets/javascripts/integrations/instant/index.ts", "src/assets/javascripts/integrations/search/document/index.ts", "src/assets/javascripts/integrations/search/highlighter/index.ts", "src/assets/javascripts/integrations/search/query/transform/index.ts", "src/assets/javascripts/integrations/search/worker/message/index.ts", "src/assets/javascripts/integrations/search/worker/_/index.ts", "src/assets/javascripts/integrations/version/index.ts", "src/assets/javascripts/components/search/query/index.ts", "src/assets/javascripts/components/search/result/index.ts", "src/assets/javascripts/components/search/share/index.ts", "src/assets/javascripts/components/search/suggest/index.ts", "src/assets/javascripts/components/search/_/index.ts", "src/assets/javascripts/components/search/highlight/index.ts", "src/assets/javascripts/components/sidebar/index.ts", "src/assets/javascripts/components/source/facts/github/index.ts", "src/assets/javascripts/components/source/facts/gitlab/index.ts", "src/assets/javascripts/components/source/facts/_/index.ts", "src/assets/javascripts/components/source/_/index.ts", "src/assets/javascripts/components/tabs/index.ts", "src/assets/javascripts/components/toc/index.ts", "src/assets/javascripts/components/top/index.ts", "src/assets/javascripts/patches/indeterminate/index.ts", "src/assets/javascripts/patches/scrollfix/index.ts", "src/assets/javascripts/patches/scrolllock/index.ts", "src/assets/javascripts/polyfills/index.ts"], + "sourceRoot": "../../../..", + "sourcesContent": ["(function (global, factory) {\n typeof exports === 'object' && typeof module !== 'undefined' ? factory() :\n typeof define === 'function' && define.amd ? define(factory) :\n (factory());\n}(this, (function () { 'use strict';\n\n /**\n * Applies the :focus-visible polyfill at the given scope.\n * A scope in this case is either the top-level Document or a Shadow Root.\n *\n * @param {(Document|ShadowRoot)} scope\n * @see https://github.com/WICG/focus-visible\n */\n function applyFocusVisiblePolyfill(scope) {\n var hadKeyboardEvent = true;\n var hadFocusVisibleRecently = false;\n var hadFocusVisibleRecentlyTimeout = null;\n\n var inputTypesAllowlist = {\n text: true,\n search: true,\n url: true,\n tel: true,\n email: true,\n password: true,\n number: true,\n date: true,\n month: true,\n week: true,\n time: true,\n datetime: true,\n 'datetime-local': true\n };\n\n /**\n * Helper function for legacy browsers and iframes which sometimes focus\n * elements like document, body, and non-interactive SVG.\n * @param {Element} el\n */\n function isValidFocusTarget(el) {\n if (\n el &&\n el !== document &&\n el.nodeName !== 'HTML' &&\n el.nodeName !== 'BODY' &&\n 'classList' in el &&\n 'contains' in el.classList\n ) {\n return true;\n }\n return false;\n }\n\n /**\n * Computes whether the given element should automatically trigger the\n * `focus-visible` class being added, i.e. whether it should always match\n * `:focus-visible` when focused.\n * @param {Element} el\n * @return {boolean}\n */\n function focusTriggersKeyboardModality(el) {\n var type = el.type;\n var tagName = el.tagName;\n\n if (tagName === 'INPUT' && inputTypesAllowlist[type] && !el.readOnly) {\n return true;\n }\n\n if (tagName === 'TEXTAREA' && !el.readOnly) {\n return true;\n }\n\n if (el.isContentEditable) {\n return true;\n }\n\n return false;\n }\n\n /**\n * Add the `focus-visible` class to the given element if it was not added by\n * the author.\n * @param {Element} el\n */\n function addFocusVisibleClass(el) {\n if (el.classList.contains('focus-visible')) {\n return;\n }\n el.classList.add('focus-visible');\n el.setAttribute('data-focus-visible-added', '');\n }\n\n /**\n * Remove the `focus-visible` class from the given element if it was not\n * originally added by the author.\n * @param {Element} el\n */\n function removeFocusVisibleClass(el) {\n if (!el.hasAttribute('data-focus-visible-added')) {\n return;\n }\n el.classList.remove('focus-visible');\n el.removeAttribute('data-focus-visible-added');\n }\n\n /**\n * If the most recent user interaction was via the keyboard;\n * and the key press did not include a meta, alt/option, or control key;\n * then the modality is keyboard. Otherwise, the modality is not keyboard.\n * Apply `focus-visible` to any current active element and keep track\n * of our keyboard modality state with `hadKeyboardEvent`.\n * @param {KeyboardEvent} e\n */\n function onKeyDown(e) {\n if (e.metaKey || e.altKey || e.ctrlKey) {\n return;\n }\n\n if (isValidFocusTarget(scope.activeElement)) {\n addFocusVisibleClass(scope.activeElement);\n }\n\n hadKeyboardEvent = true;\n }\n\n /**\n * If at any point a user clicks with a pointing device, ensure that we change\n * the modality away from keyboard.\n * This avoids the situation where a user presses a key on an already focused\n * element, and then clicks on a different element, focusing it with a\n * pointing device, while we still think we're in keyboard modality.\n * @param {Event} e\n */\n function onPointerDown(e) {\n hadKeyboardEvent = false;\n }\n\n /**\n * On `focus`, add the `focus-visible` class to the target if:\n * - the target received focus as a result of keyboard navigation, or\n * - the event target is an element that will likely require interaction\n * via the keyboard (e.g. a text box)\n * @param {Event} e\n */\n function onFocus(e) {\n // Prevent IE from focusing the document or HTML element.\n if (!isValidFocusTarget(e.target)) {\n return;\n }\n\n if (hadKeyboardEvent || focusTriggersKeyboardModality(e.target)) {\n addFocusVisibleClass(e.target);\n }\n }\n\n /**\n * On `blur`, remove the `focus-visible` class from the target.\n * @param {Event} e\n */\n function onBlur(e) {\n if (!isValidFocusTarget(e.target)) {\n return;\n }\n\n if (\n e.target.classList.contains('focus-visible') ||\n e.target.hasAttribute('data-focus-visible-added')\n ) {\n // To detect a tab/window switch, we look for a blur event followed\n // rapidly by a visibility change.\n // If we don't see a visibility change within 100ms, it's probably a\n // regular focus change.\n hadFocusVisibleRecently = true;\n window.clearTimeout(hadFocusVisibleRecentlyTimeout);\n hadFocusVisibleRecentlyTimeout = window.setTimeout(function() {\n hadFocusVisibleRecently = false;\n }, 100);\n removeFocusVisibleClass(e.target);\n }\n }\n\n /**\n * If the user changes tabs, keep track of whether or not the previously\n * focused element had .focus-visible.\n * @param {Event} e\n */\n function onVisibilityChange(e) {\n if (document.visibilityState === 'hidden') {\n // If the tab becomes active again, the browser will handle calling focus\n // on the element (Safari actually calls it twice).\n // If this tab change caused a blur on an element with focus-visible,\n // re-apply the class when the user switches back to the tab.\n if (hadFocusVisibleRecently) {\n hadKeyboardEvent = true;\n }\n addInitialPointerMoveListeners();\n }\n }\n\n /**\n * Add a group of listeners to detect usage of any pointing devices.\n * These listeners will be added when the polyfill first loads, and anytime\n * the window is blurred, so that they are active when the window regains\n * focus.\n */\n function addInitialPointerMoveListeners() {\n document.addEventListener('mousemove', onInitialPointerMove);\n document.addEventListener('mousedown', onInitialPointerMove);\n document.addEventListener('mouseup', onInitialPointerMove);\n document.addEventListener('pointermove', onInitialPointerMove);\n document.addEventListener('pointerdown', onInitialPointerMove);\n document.addEventListener('pointerup', onInitialPointerMove);\n document.addEventListener('touchmove', onInitialPointerMove);\n document.addEventListener('touchstart', onInitialPointerMove);\n document.addEventListener('touchend', onInitialPointerMove);\n }\n\n function removeInitialPointerMoveListeners() {\n document.removeEventListener('mousemove', onInitialPointerMove);\n document.removeEventListener('mousedown', onInitialPointerMove);\n document.removeEventListener('mouseup', onInitialPointerMove);\n document.removeEventListener('pointermove', onInitialPointerMove);\n document.removeEventListener('pointerdown', onInitialPointerMove);\n document.removeEventListener('pointerup', onInitialPointerMove);\n document.removeEventListener('touchmove', onInitialPointerMove);\n document.removeEventListener('touchstart', onInitialPointerMove);\n document.removeEventListener('touchend', onInitialPointerMove);\n }\n\n /**\n * When the polfyill first loads, assume the user is in keyboard modality.\n * If any event is received from a pointing device (e.g. mouse, pointer,\n * touch), turn off keyboard modality.\n * This accounts for situations where focus enters the page from the URL bar.\n * @param {Event} e\n */\n function onInitialPointerMove(e) {\n // Work around a Safari quirk that fires a mousemove on whenever the\n // window blurs, even if you're tabbing out of the page. \u00AF\\_(\u30C4)_/\u00AF\n if (e.target.nodeName && e.target.nodeName.toLowerCase() === 'html') {\n return;\n }\n\n hadKeyboardEvent = false;\n removeInitialPointerMoveListeners();\n }\n\n // For some kinds of state, we are interested in changes at the global scope\n // only. For example, global pointer input, global key presses and global\n // visibility change should affect the state at every scope:\n document.addEventListener('keydown', onKeyDown, true);\n document.addEventListener('mousedown', onPointerDown, true);\n document.addEventListener('pointerdown', onPointerDown, true);\n document.addEventListener('touchstart', onPointerDown, true);\n document.addEventListener('visibilitychange', onVisibilityChange, true);\n\n addInitialPointerMoveListeners();\n\n // For focus and blur, we specifically care about state changes in the local\n // scope. This is because focus / blur events that originate from within a\n // shadow root are not re-dispatched from the host element if it was already\n // the active element in its own scope:\n scope.addEventListener('focus', onFocus, true);\n scope.addEventListener('blur', onBlur, true);\n\n // We detect that a node is a ShadowRoot by ensuring that it is a\n // DocumentFragment and also has a host property. This check covers native\n // implementation and polyfill implementation transparently. If we only cared\n // about the native implementation, we could just check if the scope was\n // an instance of a ShadowRoot.\n if (scope.nodeType === Node.DOCUMENT_FRAGMENT_NODE && scope.host) {\n // Since a ShadowRoot is a special kind of DocumentFragment, it does not\n // have a root element to add a class to. So, we add this attribute to the\n // host element instead:\n scope.host.setAttribute('data-js-focus-visible', '');\n } else if (scope.nodeType === Node.DOCUMENT_NODE) {\n document.documentElement.classList.add('js-focus-visible');\n document.documentElement.setAttribute('data-js-focus-visible', '');\n }\n }\n\n // It is important to wrap all references to global window and document in\n // these checks to support server-side rendering use cases\n // @see https://github.com/WICG/focus-visible/issues/199\n if (typeof window !== 'undefined' && typeof document !== 'undefined') {\n // Make the polyfill helper globally available. This can be used as a signal\n // to interested libraries that wish to coordinate with the polyfill for e.g.,\n // applying the polyfill to a shadow root:\n window.applyFocusVisiblePolyfill = applyFocusVisiblePolyfill;\n\n // Notify interested libraries of the polyfill's presence, in case the\n // polyfill was loaded lazily:\n var event;\n\n try {\n event = new CustomEvent('focus-visible-polyfill-ready');\n } catch (error) {\n // IE11 does not support using CustomEvent as a constructor directly:\n event = document.createEvent('CustomEvent');\n event.initCustomEvent('focus-visible-polyfill-ready', false, false, {});\n }\n\n window.dispatchEvent(event);\n }\n\n if (typeof document !== 'undefined') {\n // Apply the polyfill to the global document, so that no JavaScript\n // coordination is required to use the polyfill in the top-level document:\n applyFocusVisiblePolyfill(document);\n }\n\n})));\n", "(function(global) {\r\n /**\r\n * Polyfill URLSearchParams\r\n *\r\n * Inspired from : https://github.com/WebReflection/url-search-params/blob/master/src/url-search-params.js\r\n */\r\n\r\n var checkIfIteratorIsSupported = function() {\r\n try {\r\n return !!Symbol.iterator;\r\n } catch (error) {\r\n return false;\r\n }\r\n };\r\n\r\n\r\n var iteratorSupported = checkIfIteratorIsSupported();\r\n\r\n var createIterator = function(items) {\r\n var iterator = {\r\n next: function() {\r\n var value = items.shift();\r\n return { done: value === void 0, value: value };\r\n }\r\n };\r\n\r\n if (iteratorSupported) {\r\n iterator[Symbol.iterator] = function() {\r\n return iterator;\r\n };\r\n }\r\n\r\n return iterator;\r\n };\r\n\r\n /**\r\n * Search param name and values should be encoded according to https://url.spec.whatwg.org/#urlencoded-serializing\r\n * encodeURIComponent() produces the same result except encoding spaces as `%20` instead of `+`.\r\n */\r\n var serializeParam = function(value) {\r\n return encodeURIComponent(value).replace(/%20/g, '+');\r\n };\r\n\r\n var deserializeParam = function(value) {\r\n return decodeURIComponent(String(value).replace(/\\+/g, ' '));\r\n };\r\n\r\n var polyfillURLSearchParams = function() {\r\n\r\n var URLSearchParams = function(searchString) {\r\n Object.defineProperty(this, '_entries', { writable: true, value: {} });\r\n var typeofSearchString = typeof searchString;\r\n\r\n if (typeofSearchString === 'undefined') {\r\n // do nothing\r\n } else if (typeofSearchString === 'string') {\r\n if (searchString !== '') {\r\n this._fromString(searchString);\r\n }\r\n } else if (searchString instanceof URLSearchParams) {\r\n var _this = this;\r\n searchString.forEach(function(value, name) {\r\n _this.append(name, value);\r\n });\r\n } else if ((searchString !== null) && (typeofSearchString === 'object')) {\r\n if (Object.prototype.toString.call(searchString) === '[object Array]') {\r\n for (var i = 0; i < searchString.length; i++) {\r\n var entry = searchString[i];\r\n if ((Object.prototype.toString.call(entry) === '[object Array]') || (entry.length !== 2)) {\r\n this.append(entry[0], entry[1]);\r\n } else {\r\n throw new TypeError('Expected [string, any] as entry at index ' + i + ' of URLSearchParams\\'s input');\r\n }\r\n }\r\n } else {\r\n for (var key in searchString) {\r\n if (searchString.hasOwnProperty(key)) {\r\n this.append(key, searchString[key]);\r\n }\r\n }\r\n }\r\n } else {\r\n throw new TypeError('Unsupported input\\'s type for URLSearchParams');\r\n }\r\n };\r\n\r\n var proto = URLSearchParams.prototype;\r\n\r\n proto.append = function(name, value) {\r\n if (name in this._entries) {\r\n this._entries[name].push(String(value));\r\n } else {\r\n this._entries[name] = [String(value)];\r\n }\r\n };\r\n\r\n proto.delete = function(name) {\r\n delete this._entries[name];\r\n };\r\n\r\n proto.get = function(name) {\r\n return (name in this._entries) ? this._entries[name][0] : null;\r\n };\r\n\r\n proto.getAll = function(name) {\r\n return (name in this._entries) ? this._entries[name].slice(0) : [];\r\n };\r\n\r\n proto.has = function(name) {\r\n return (name in this._entries);\r\n };\r\n\r\n proto.set = function(name, value) {\r\n this._entries[name] = [String(value)];\r\n };\r\n\r\n proto.forEach = function(callback, thisArg) {\r\n var entries;\r\n for (var name in this._entries) {\r\n if (this._entries.hasOwnProperty(name)) {\r\n entries = this._entries[name];\r\n for (var i = 0; i < entries.length; i++) {\r\n callback.call(thisArg, entries[i], name, this);\r\n }\r\n }\r\n }\r\n };\r\n\r\n proto.keys = function() {\r\n var items = [];\r\n this.forEach(function(value, name) {\r\n items.push(name);\r\n });\r\n return createIterator(items);\r\n };\r\n\r\n proto.values = function() {\r\n var items = [];\r\n this.forEach(function(value) {\r\n items.push(value);\r\n });\r\n return createIterator(items);\r\n };\r\n\r\n proto.entries = function() {\r\n var items = [];\r\n this.forEach(function(value, name) {\r\n items.push([name, value]);\r\n });\r\n return createIterator(items);\r\n };\r\n\r\n if (iteratorSupported) {\r\n proto[Symbol.iterator] = proto.entries;\r\n }\r\n\r\n proto.toString = function() {\r\n var searchArray = [];\r\n this.forEach(function(value, name) {\r\n searchArray.push(serializeParam(name) + '=' + serializeParam(value));\r\n });\r\n return searchArray.join('&');\r\n };\r\n\r\n\r\n global.URLSearchParams = URLSearchParams;\r\n };\r\n\r\n var checkIfURLSearchParamsSupported = function() {\r\n try {\r\n var URLSearchParams = global.URLSearchParams;\r\n\r\n return (\r\n (new URLSearchParams('?a=1').toString() === 'a=1') &&\r\n (typeof URLSearchParams.prototype.set === 'function') &&\r\n (typeof URLSearchParams.prototype.entries === 'function')\r\n );\r\n } catch (e) {\r\n return false;\r\n }\r\n };\r\n\r\n if (!checkIfURLSearchParamsSupported()) {\r\n polyfillURLSearchParams();\r\n }\r\n\r\n var proto = global.URLSearchParams.prototype;\r\n\r\n if (typeof proto.sort !== 'function') {\r\n proto.sort = function() {\r\n var _this = this;\r\n var items = [];\r\n this.forEach(function(value, name) {\r\n items.push([name, value]);\r\n if (!_this._entries) {\r\n _this.delete(name);\r\n }\r\n });\r\n items.sort(function(a, b) {\r\n if (a[0] < b[0]) {\r\n return -1;\r\n } else if (a[0] > b[0]) {\r\n return +1;\r\n } else {\r\n return 0;\r\n }\r\n });\r\n if (_this._entries) { // force reset because IE keeps keys index\r\n _this._entries = {};\r\n }\r\n for (var i = 0; i < items.length; i++) {\r\n this.append(items[i][0], items[i][1]);\r\n }\r\n };\r\n }\r\n\r\n if (typeof proto._fromString !== 'function') {\r\n Object.defineProperty(proto, '_fromString', {\r\n enumerable: false,\r\n configurable: false,\r\n writable: false,\r\n value: function(searchString) {\r\n if (this._entries) {\r\n this._entries = {};\r\n } else {\r\n var keys = [];\r\n this.forEach(function(value, name) {\r\n keys.push(name);\r\n });\r\n for (var i = 0; i < keys.length; i++) {\r\n this.delete(keys[i]);\r\n }\r\n }\r\n\r\n searchString = searchString.replace(/^\\?/, '');\r\n var attributes = searchString.split('&');\r\n var attribute;\r\n for (var i = 0; i < attributes.length; i++) {\r\n attribute = attributes[i].split('=');\r\n this.append(\r\n deserializeParam(attribute[0]),\r\n (attribute.length > 1) ? deserializeParam(attribute[1]) : ''\r\n );\r\n }\r\n }\r\n });\r\n }\r\n\r\n // HTMLAnchorElement\r\n\r\n})(\r\n (typeof global !== 'undefined') ? global\r\n : ((typeof window !== 'undefined') ? window\r\n : ((typeof self !== 'undefined') ? self : this))\r\n);\r\n\r\n(function(global) {\r\n /**\r\n * Polyfill URL\r\n *\r\n * Inspired from : https://github.com/arv/DOM-URL-Polyfill/blob/master/src/url.js\r\n */\r\n\r\n var checkIfURLIsSupported = function() {\r\n try {\r\n var u = new global.URL('b', 'http://a');\r\n u.pathname = 'c d';\r\n return (u.href === 'http://a/c%20d') && u.searchParams;\r\n } catch (e) {\r\n return false;\r\n }\r\n };\r\n\r\n\r\n var polyfillURL = function() {\r\n var _URL = global.URL;\r\n\r\n var URL = function(url, base) {\r\n if (typeof url !== 'string') url = String(url);\r\n if (base && typeof base !== 'string') base = String(base);\r\n\r\n // Only create another document if the base is different from current location.\r\n var doc = document, baseElement;\r\n if (base && (global.location === void 0 || base !== global.location.href)) {\r\n base = base.toLowerCase();\r\n doc = document.implementation.createHTMLDocument('');\r\n baseElement = doc.createElement('base');\r\n baseElement.href = base;\r\n doc.head.appendChild(baseElement);\r\n try {\r\n if (baseElement.href.indexOf(base) !== 0) throw new Error(baseElement.href);\r\n } catch (err) {\r\n throw new Error('URL unable to set base ' + base + ' due to ' + err);\r\n }\r\n }\r\n\r\n var anchorElement = doc.createElement('a');\r\n anchorElement.href = url;\r\n if (baseElement) {\r\n doc.body.appendChild(anchorElement);\r\n anchorElement.href = anchorElement.href; // force href to refresh\r\n }\r\n\r\n var inputElement = doc.createElement('input');\r\n inputElement.type = 'url';\r\n inputElement.value = url;\r\n\r\n if (anchorElement.protocol === ':' || !/:/.test(anchorElement.href) || (!inputElement.checkValidity() && !base)) {\r\n throw new TypeError('Invalid URL');\r\n }\r\n\r\n Object.defineProperty(this, '_anchorElement', {\r\n value: anchorElement\r\n });\r\n\r\n\r\n // create a linked searchParams which reflect its changes on URL\r\n var searchParams = new global.URLSearchParams(this.search);\r\n var enableSearchUpdate = true;\r\n var enableSearchParamsUpdate = true;\r\n var _this = this;\r\n ['append', 'delete', 'set'].forEach(function(methodName) {\r\n var method = searchParams[methodName];\r\n searchParams[methodName] = function() {\r\n method.apply(searchParams, arguments);\r\n if (enableSearchUpdate) {\r\n enableSearchParamsUpdate = false;\r\n _this.search = searchParams.toString();\r\n enableSearchParamsUpdate = true;\r\n }\r\n };\r\n });\r\n\r\n Object.defineProperty(this, 'searchParams', {\r\n value: searchParams,\r\n enumerable: true\r\n });\r\n\r\n var search = void 0;\r\n Object.defineProperty(this, '_updateSearchParams', {\r\n enumerable: false,\r\n configurable: false,\r\n writable: false,\r\n value: function() {\r\n if (this.search !== search) {\r\n search = this.search;\r\n if (enableSearchParamsUpdate) {\r\n enableSearchUpdate = false;\r\n this.searchParams._fromString(this.search);\r\n enableSearchUpdate = true;\r\n }\r\n }\r\n }\r\n });\r\n };\r\n\r\n var proto = URL.prototype;\r\n\r\n var linkURLWithAnchorAttribute = function(attributeName) {\r\n Object.defineProperty(proto, attributeName, {\r\n get: function() {\r\n return this._anchorElement[attributeName];\r\n },\r\n set: function(value) {\r\n this._anchorElement[attributeName] = value;\r\n },\r\n enumerable: true\r\n });\r\n };\r\n\r\n ['hash', 'host', 'hostname', 'port', 'protocol']\r\n .forEach(function(attributeName) {\r\n linkURLWithAnchorAttribute(attributeName);\r\n });\r\n\r\n Object.defineProperty(proto, 'search', {\r\n get: function() {\r\n return this._anchorElement['search'];\r\n },\r\n set: function(value) {\r\n this._anchorElement['search'] = value;\r\n this._updateSearchParams();\r\n },\r\n enumerable: true\r\n });\r\n\r\n Object.defineProperties(proto, {\r\n\r\n 'toString': {\r\n get: function() {\r\n var _this = this;\r\n return function() {\r\n return _this.href;\r\n };\r\n }\r\n },\r\n\r\n 'href': {\r\n get: function() {\r\n return this._anchorElement.href.replace(/\\?$/, '');\r\n },\r\n set: function(value) {\r\n this._anchorElement.href = value;\r\n this._updateSearchParams();\r\n },\r\n enumerable: true\r\n },\r\n\r\n 'pathname': {\r\n get: function() {\r\n return this._anchorElement.pathname.replace(/(^\\/?)/, '/');\r\n },\r\n set: function(value) {\r\n this._anchorElement.pathname = value;\r\n },\r\n enumerable: true\r\n },\r\n\r\n 'origin': {\r\n get: function() {\r\n // get expected port from protocol\r\n var expectedPort = { 'http:': 80, 'https:': 443, 'ftp:': 21 }[this._anchorElement.protocol];\r\n // add port to origin if, expected port is different than actual port\r\n // and it is not empty f.e http://foo:8080\r\n // 8080 != 80 && 8080 != ''\r\n var addPortToOrigin = this._anchorElement.port != expectedPort &&\r\n this._anchorElement.port !== '';\r\n\r\n return this._anchorElement.protocol +\r\n '//' +\r\n this._anchorElement.hostname +\r\n (addPortToOrigin ? (':' + this._anchorElement.port) : '');\r\n },\r\n enumerable: true\r\n },\r\n\r\n 'password': { // TODO\r\n get: function() {\r\n return '';\r\n },\r\n set: function(value) {\r\n },\r\n enumerable: true\r\n },\r\n\r\n 'username': { // TODO\r\n get: function() {\r\n return '';\r\n },\r\n set: function(value) {\r\n },\r\n enumerable: true\r\n },\r\n });\r\n\r\n URL.createObjectURL = function(blob) {\r\n return _URL.createObjectURL.apply(_URL, arguments);\r\n };\r\n\r\n URL.revokeObjectURL = function(url) {\r\n return _URL.revokeObjectURL.apply(_URL, arguments);\r\n };\r\n\r\n global.URL = URL;\r\n\r\n };\r\n\r\n if (!checkIfURLIsSupported()) {\r\n polyfillURL();\r\n }\r\n\r\n if ((global.location !== void 0) && !('origin' in global.location)) {\r\n var getOrigin = function() {\r\n return global.location.protocol + '//' + global.location.hostname + (global.location.port ? (':' + global.location.port) : '');\r\n };\r\n\r\n try {\r\n Object.defineProperty(global.location, 'origin', {\r\n get: getOrigin,\r\n enumerable: true\r\n });\r\n } catch (e) {\r\n setInterval(function() {\r\n global.location.origin = getOrigin();\r\n }, 100);\r\n }\r\n }\r\n\r\n})(\r\n (typeof global !== 'undefined') ? global\r\n : ((typeof window !== 'undefined') ? window\r\n : ((typeof self !== 'undefined') ? self : this))\r\n);\r\n", "/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global global, define, System, Reflect, Promise */\r\nvar __extends;\r\nvar __assign;\r\nvar __rest;\r\nvar __decorate;\r\nvar __param;\r\nvar __metadata;\r\nvar __awaiter;\r\nvar __generator;\r\nvar __exportStar;\r\nvar __values;\r\nvar __read;\r\nvar __spread;\r\nvar __spreadArrays;\r\nvar __spreadArray;\r\nvar __await;\r\nvar __asyncGenerator;\r\nvar __asyncDelegator;\r\nvar __asyncValues;\r\nvar __makeTemplateObject;\r\nvar __importStar;\r\nvar __importDefault;\r\nvar __classPrivateFieldGet;\r\nvar __classPrivateFieldSet;\r\nvar __createBinding;\r\n(function (factory) {\r\n var root = typeof global === \"object\" ? global : typeof self === \"object\" ? self : typeof this === \"object\" ? this : {};\r\n if (typeof define === \"function\" && define.amd) {\r\n define(\"tslib\", [\"exports\"], function (exports) { factory(createExporter(root, createExporter(exports))); });\r\n }\r\n else if (typeof module === \"object\" && typeof module.exports === \"object\") {\r\n factory(createExporter(root, createExporter(module.exports)));\r\n }\r\n else {\r\n factory(createExporter(root));\r\n }\r\n function createExporter(exports, previous) {\r\n if (exports !== root) {\r\n if (typeof Object.create === \"function\") {\r\n Object.defineProperty(exports, \"__esModule\", { value: true });\r\n }\r\n else {\r\n exports.__esModule = true;\r\n }\r\n }\r\n return function (id, v) { return exports[id] = previous ? previous(id, v) : v; };\r\n }\r\n})\r\n(function (exporter) {\r\n var extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n\r\n __extends = function (d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n };\r\n\r\n __assign = Object.assign || function (t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n };\r\n\r\n __rest = function (s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n };\r\n\r\n __decorate = function (decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n };\r\n\r\n __param = function (paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n };\r\n\r\n __metadata = function (metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n };\r\n\r\n __awaiter = function (thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n };\r\n\r\n __generator = function (thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n };\r\n\r\n __exportStar = function(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n };\r\n\r\n __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n }) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n });\r\n\r\n __values = function (o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n };\r\n\r\n __read = function (o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n };\r\n\r\n /** @deprecated */\r\n __spread = function () {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n };\r\n\r\n /** @deprecated */\r\n __spreadArrays = function () {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n };\r\n\r\n __spreadArray = function (to, from, pack) {\r\n if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {\r\n if (ar || !(i in from)) {\r\n if (!ar) ar = Array.prototype.slice.call(from, 0, i);\r\n ar[i] = from[i];\r\n }\r\n }\r\n return to.concat(ar || Array.prototype.slice.call(from));\r\n };\r\n\r\n __await = function (v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n };\r\n\r\n __asyncGenerator = function (thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n };\r\n\r\n __asyncDelegator = function (o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n };\r\n\r\n __asyncValues = function (o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n };\r\n\r\n __makeTemplateObject = function (cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n };\r\n\r\n var __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n }) : function(o, v) {\r\n o[\"default\"] = v;\r\n };\r\n\r\n __importStar = function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n };\r\n\r\n __importDefault = function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n };\r\n\r\n __classPrivateFieldGet = function (receiver, state, kind, f) {\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\r\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\r\n };\r\n\r\n __classPrivateFieldSet = function (receiver, state, value, kind, f) {\r\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\r\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\r\n };\r\n\r\n exporter(\"__extends\", __extends);\r\n exporter(\"__assign\", __assign);\r\n exporter(\"__rest\", __rest);\r\n exporter(\"__decorate\", __decorate);\r\n exporter(\"__param\", __param);\r\n exporter(\"__metadata\", __metadata);\r\n exporter(\"__awaiter\", __awaiter);\r\n exporter(\"__generator\", __generator);\r\n exporter(\"__exportStar\", __exportStar);\r\n exporter(\"__createBinding\", __createBinding);\r\n exporter(\"__values\", __values);\r\n exporter(\"__read\", __read);\r\n exporter(\"__spread\", __spread);\r\n exporter(\"__spreadArrays\", __spreadArrays);\r\n exporter(\"__spreadArray\", __spreadArray);\r\n exporter(\"__await\", __await);\r\n exporter(\"__asyncGenerator\", __asyncGenerator);\r\n exporter(\"__asyncDelegator\", __asyncDelegator);\r\n exporter(\"__asyncValues\", __asyncValues);\r\n exporter(\"__makeTemplateObject\", __makeTemplateObject);\r\n exporter(\"__importStar\", __importStar);\r\n exporter(\"__importDefault\", __importDefault);\r\n exporter(\"__classPrivateFieldGet\", __classPrivateFieldGet);\r\n exporter(\"__classPrivateFieldSet\", __classPrivateFieldSet);\r\n});\r\n", "/*!\n * clipboard.js v2.0.11\n * https://clipboardjs.com/\n *\n * Licensed MIT \u00A9 Zeno Rocha\n */\n(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"ClipboardJS\"] = factory();\n\telse\n\t\troot[\"ClipboardJS\"] = factory();\n})(this, function() {\nreturn /******/ (function() { // webpackBootstrap\n/******/ \tvar __webpack_modules__ = ({\n\n/***/ 686:\n/***/ (function(__unused_webpack_module, __webpack_exports__, __webpack_require__) {\n\n\"use strict\";\n\n// EXPORTS\n__webpack_require__.d(__webpack_exports__, {\n \"default\": function() { return /* binding */ clipboard; }\n});\n\n// EXTERNAL MODULE: ./node_modules/tiny-emitter/index.js\nvar tiny_emitter = __webpack_require__(279);\nvar tiny_emitter_default = /*#__PURE__*/__webpack_require__.n(tiny_emitter);\n// EXTERNAL MODULE: ./node_modules/good-listener/src/listen.js\nvar listen = __webpack_require__(370);\nvar listen_default = /*#__PURE__*/__webpack_require__.n(listen);\n// EXTERNAL MODULE: ./node_modules/select/src/select.js\nvar src_select = __webpack_require__(817);\nvar select_default = /*#__PURE__*/__webpack_require__.n(src_select);\n;// CONCATENATED MODULE: ./src/common/command.js\n/**\n * Executes a given operation type.\n * @param {String} type\n * @return {Boolean}\n */\nfunction command(type) {\n try {\n return document.execCommand(type);\n } catch (err) {\n return false;\n }\n}\n;// CONCATENATED MODULE: ./src/actions/cut.js\n\n\n/**\n * Cut action wrapper.\n * @param {String|HTMLElement} target\n * @return {String}\n */\n\nvar ClipboardActionCut = function ClipboardActionCut(target) {\n var selectedText = select_default()(target);\n command('cut');\n return selectedText;\n};\n\n/* harmony default export */ var actions_cut = (ClipboardActionCut);\n;// CONCATENATED MODULE: ./src/common/create-fake-element.js\n/**\n * Creates a fake textarea element with a value.\n * @param {String} value\n * @return {HTMLElement}\n */\nfunction createFakeElement(value) {\n var isRTL = document.documentElement.getAttribute('dir') === 'rtl';\n var fakeElement = document.createElement('textarea'); // Prevent zooming on iOS\n\n fakeElement.style.fontSize = '12pt'; // Reset box model\n\n fakeElement.style.border = '0';\n fakeElement.style.padding = '0';\n fakeElement.style.margin = '0'; // Move element out of screen horizontally\n\n fakeElement.style.position = 'absolute';\n fakeElement.style[isRTL ? 'right' : 'left'] = '-9999px'; // Move element to the same position vertically\n\n var yPosition = window.pageYOffset || document.documentElement.scrollTop;\n fakeElement.style.top = \"\".concat(yPosition, \"px\");\n fakeElement.setAttribute('readonly', '');\n fakeElement.value = value;\n return fakeElement;\n}\n;// CONCATENATED MODULE: ./src/actions/copy.js\n\n\n\n/**\n * Create fake copy action wrapper using a fake element.\n * @param {String} target\n * @param {Object} options\n * @return {String}\n */\n\nvar fakeCopyAction = function fakeCopyAction(value, options) {\n var fakeElement = createFakeElement(value);\n options.container.appendChild(fakeElement);\n var selectedText = select_default()(fakeElement);\n command('copy');\n fakeElement.remove();\n return selectedText;\n};\n/**\n * Copy action wrapper.\n * @param {String|HTMLElement} target\n * @param {Object} options\n * @return {String}\n */\n\n\nvar ClipboardActionCopy = function ClipboardActionCopy(target) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {\n container: document.body\n };\n var selectedText = '';\n\n if (typeof target === 'string') {\n selectedText = fakeCopyAction(target, options);\n } else if (target instanceof HTMLInputElement && !['text', 'search', 'url', 'tel', 'password'].includes(target === null || target === void 0 ? void 0 : target.type)) {\n // If input type doesn't support `setSelectionRange`. Simulate it. https://developer.mozilla.org/en-US/docs/Web/API/HTMLInputElement/setSelectionRange\n selectedText = fakeCopyAction(target.value, options);\n } else {\n selectedText = select_default()(target);\n command('copy');\n }\n\n return selectedText;\n};\n\n/* harmony default export */ var actions_copy = (ClipboardActionCopy);\n;// CONCATENATED MODULE: ./src/actions/default.js\nfunction _typeof(obj) { \"@babel/helpers - typeof\"; if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return _typeof(obj); }\n\n\n\n/**\n * Inner function which performs selection from either `text` or `target`\n * properties and then executes copy or cut operations.\n * @param {Object} options\n */\n\nvar ClipboardActionDefault = function ClipboardActionDefault() {\n var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};\n // Defines base properties passed from constructor.\n var _options$action = options.action,\n action = _options$action === void 0 ? 'copy' : _options$action,\n container = options.container,\n target = options.target,\n text = options.text; // Sets the `action` to be performed which can be either 'copy' or 'cut'.\n\n if (action !== 'copy' && action !== 'cut') {\n throw new Error('Invalid \"action\" value, use either \"copy\" or \"cut\"');\n } // Sets the `target` property using an element that will be have its content copied.\n\n\n if (target !== undefined) {\n if (target && _typeof(target) === 'object' && target.nodeType === 1) {\n if (action === 'copy' && target.hasAttribute('disabled')) {\n throw new Error('Invalid \"target\" attribute. Please use \"readonly\" instead of \"disabled\" attribute');\n }\n\n if (action === 'cut' && (target.hasAttribute('readonly') || target.hasAttribute('disabled'))) {\n throw new Error('Invalid \"target\" attribute. You can\\'t cut text from elements with \"readonly\" or \"disabled\" attributes');\n }\n } else {\n throw new Error('Invalid \"target\" value, use a valid Element');\n }\n } // Define selection strategy based on `text` property.\n\n\n if (text) {\n return actions_copy(text, {\n container: container\n });\n } // Defines which selection strategy based on `target` property.\n\n\n if (target) {\n return action === 'cut' ? actions_cut(target) : actions_copy(target, {\n container: container\n });\n }\n};\n\n/* harmony default export */ var actions_default = (ClipboardActionDefault);\n;// CONCATENATED MODULE: ./src/clipboard.js\nfunction clipboard_typeof(obj) { \"@babel/helpers - typeof\"; if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { clipboard_typeof = function _typeof(obj) { return typeof obj; }; } else { clipboard_typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return clipboard_typeof(obj); }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }\n\nfunction _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function\"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }\n\nfunction _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }\n\nfunction _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = _getPrototypeOf(Derived), result; if (hasNativeReflectConstruct) { var NewTarget = _getPrototypeOf(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return _possibleConstructorReturn(this, result); }; }\n\nfunction _possibleConstructorReturn(self, call) { if (call && (clipboard_typeof(call) === \"object\" || typeof call === \"function\")) { return call; } return _assertThisInitialized(self); }\n\nfunction _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return self; }\n\nfunction _isNativeReflectConstruct() { if (typeof Reflect === \"undefined\" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === \"function\") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }\n\nfunction _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }\n\n\n\n\n\n\n/**\n * Helper function to retrieve attribute value.\n * @param {String} suffix\n * @param {Element} element\n */\n\nfunction getAttributeValue(suffix, element) {\n var attribute = \"data-clipboard-\".concat(suffix);\n\n if (!element.hasAttribute(attribute)) {\n return;\n }\n\n return element.getAttribute(attribute);\n}\n/**\n * Base class which takes one or more elements, adds event listeners to them,\n * and instantiates a new `ClipboardAction` on each click.\n */\n\n\nvar Clipboard = /*#__PURE__*/function (_Emitter) {\n _inherits(Clipboard, _Emitter);\n\n var _super = _createSuper(Clipboard);\n\n /**\n * @param {String|HTMLElement|HTMLCollection|NodeList} trigger\n * @param {Object} options\n */\n function Clipboard(trigger, options) {\n var _this;\n\n _classCallCheck(this, Clipboard);\n\n _this = _super.call(this);\n\n _this.resolveOptions(options);\n\n _this.listenClick(trigger);\n\n return _this;\n }\n /**\n * Defines if attributes would be resolved using internal setter functions\n * or custom functions that were passed in the constructor.\n * @param {Object} options\n */\n\n\n _createClass(Clipboard, [{\n key: \"resolveOptions\",\n value: function resolveOptions() {\n var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};\n this.action = typeof options.action === 'function' ? options.action : this.defaultAction;\n this.target = typeof options.target === 'function' ? options.target : this.defaultTarget;\n this.text = typeof options.text === 'function' ? options.text : this.defaultText;\n this.container = clipboard_typeof(options.container) === 'object' ? options.container : document.body;\n }\n /**\n * Adds a click event listener to the passed trigger.\n * @param {String|HTMLElement|HTMLCollection|NodeList} trigger\n */\n\n }, {\n key: \"listenClick\",\n value: function listenClick(trigger) {\n var _this2 = this;\n\n this.listener = listen_default()(trigger, 'click', function (e) {\n return _this2.onClick(e);\n });\n }\n /**\n * Defines a new `ClipboardAction` on each click event.\n * @param {Event} e\n */\n\n }, {\n key: \"onClick\",\n value: function onClick(e) {\n var trigger = e.delegateTarget || e.currentTarget;\n var action = this.action(trigger) || 'copy';\n var text = actions_default({\n action: action,\n container: this.container,\n target: this.target(trigger),\n text: this.text(trigger)\n }); // Fires an event based on the copy operation result.\n\n this.emit(text ? 'success' : 'error', {\n action: action,\n text: text,\n trigger: trigger,\n clearSelection: function clearSelection() {\n if (trigger) {\n trigger.focus();\n }\n\n window.getSelection().removeAllRanges();\n }\n });\n }\n /**\n * Default `action` lookup function.\n * @param {Element} trigger\n */\n\n }, {\n key: \"defaultAction\",\n value: function defaultAction(trigger) {\n return getAttributeValue('action', trigger);\n }\n /**\n * Default `target` lookup function.\n * @param {Element} trigger\n */\n\n }, {\n key: \"defaultTarget\",\n value: function defaultTarget(trigger) {\n var selector = getAttributeValue('target', trigger);\n\n if (selector) {\n return document.querySelector(selector);\n }\n }\n /**\n * Allow fire programmatically a copy action\n * @param {String|HTMLElement} target\n * @param {Object} options\n * @returns Text copied.\n */\n\n }, {\n key: \"defaultText\",\n\n /**\n * Default `text` lookup function.\n * @param {Element} trigger\n */\n value: function defaultText(trigger) {\n return getAttributeValue('text', trigger);\n }\n /**\n * Destroy lifecycle.\n */\n\n }, {\n key: \"destroy\",\n value: function destroy() {\n this.listener.destroy();\n }\n }], [{\n key: \"copy\",\n value: function copy(target) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {\n container: document.body\n };\n return actions_copy(target, options);\n }\n /**\n * Allow fire programmatically a cut action\n * @param {String|HTMLElement} target\n * @returns Text cutted.\n */\n\n }, {\n key: \"cut\",\n value: function cut(target) {\n return actions_cut(target);\n }\n /**\n * Returns the support of the given action, or all actions if no action is\n * given.\n * @param {String} [action]\n */\n\n }, {\n key: \"isSupported\",\n value: function isSupported() {\n var action = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : ['copy', 'cut'];\n var actions = typeof action === 'string' ? [action] : action;\n var support = !!document.queryCommandSupported;\n actions.forEach(function (action) {\n support = support && !!document.queryCommandSupported(action);\n });\n return support;\n }\n }]);\n\n return Clipboard;\n}((tiny_emitter_default()));\n\n/* harmony default export */ var clipboard = (Clipboard);\n\n/***/ }),\n\n/***/ 828:\n/***/ (function(module) {\n\nvar DOCUMENT_NODE_TYPE = 9;\n\n/**\n * A polyfill for Element.matches()\n */\nif (typeof Element !== 'undefined' && !Element.prototype.matches) {\n var proto = Element.prototype;\n\n proto.matches = proto.matchesSelector ||\n proto.mozMatchesSelector ||\n proto.msMatchesSelector ||\n proto.oMatchesSelector ||\n proto.webkitMatchesSelector;\n}\n\n/**\n * Finds the closest parent that matches a selector.\n *\n * @param {Element} element\n * @param {String} selector\n * @return {Function}\n */\nfunction closest (element, selector) {\n while (element && element.nodeType !== DOCUMENT_NODE_TYPE) {\n if (typeof element.matches === 'function' &&\n element.matches(selector)) {\n return element;\n }\n element = element.parentNode;\n }\n}\n\nmodule.exports = closest;\n\n\n/***/ }),\n\n/***/ 438:\n/***/ (function(module, __unused_webpack_exports, __webpack_require__) {\n\nvar closest = __webpack_require__(828);\n\n/**\n * Delegates event to a selector.\n *\n * @param {Element} element\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @param {Boolean} useCapture\n * @return {Object}\n */\nfunction _delegate(element, selector, type, callback, useCapture) {\n var listenerFn = listener.apply(this, arguments);\n\n element.addEventListener(type, listenerFn, useCapture);\n\n return {\n destroy: function() {\n element.removeEventListener(type, listenerFn, useCapture);\n }\n }\n}\n\n/**\n * Delegates event to a selector.\n *\n * @param {Element|String|Array} [elements]\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @param {Boolean} useCapture\n * @return {Object}\n */\nfunction delegate(elements, selector, type, callback, useCapture) {\n // Handle the regular Element usage\n if (typeof elements.addEventListener === 'function') {\n return _delegate.apply(null, arguments);\n }\n\n // Handle Element-less usage, it defaults to global delegation\n if (typeof type === 'function') {\n // Use `document` as the first parameter, then apply arguments\n // This is a short way to .unshift `arguments` without running into deoptimizations\n return _delegate.bind(null, document).apply(null, arguments);\n }\n\n // Handle Selector-based usage\n if (typeof elements === 'string') {\n elements = document.querySelectorAll(elements);\n }\n\n // Handle Array-like based usage\n return Array.prototype.map.call(elements, function (element) {\n return _delegate(element, selector, type, callback, useCapture);\n });\n}\n\n/**\n * Finds closest match and invokes callback.\n *\n * @param {Element} element\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @return {Function}\n */\nfunction listener(element, selector, type, callback) {\n return function(e) {\n e.delegateTarget = closest(e.target, selector);\n\n if (e.delegateTarget) {\n callback.call(element, e);\n }\n }\n}\n\nmodule.exports = delegate;\n\n\n/***/ }),\n\n/***/ 879:\n/***/ (function(__unused_webpack_module, exports) {\n\n/**\n * Check if argument is a HTML element.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.node = function(value) {\n return value !== undefined\n && value instanceof HTMLElement\n && value.nodeType === 1;\n};\n\n/**\n * Check if argument is a list of HTML elements.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.nodeList = function(value) {\n var type = Object.prototype.toString.call(value);\n\n return value !== undefined\n && (type === '[object NodeList]' || type === '[object HTMLCollection]')\n && ('length' in value)\n && (value.length === 0 || exports.node(value[0]));\n};\n\n/**\n * Check if argument is a string.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.string = function(value) {\n return typeof value === 'string'\n || value instanceof String;\n};\n\n/**\n * Check if argument is a function.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.fn = function(value) {\n var type = Object.prototype.toString.call(value);\n\n return type === '[object Function]';\n};\n\n\n/***/ }),\n\n/***/ 370:\n/***/ (function(module, __unused_webpack_exports, __webpack_require__) {\n\nvar is = __webpack_require__(879);\nvar delegate = __webpack_require__(438);\n\n/**\n * Validates all params and calls the right\n * listener function based on its target type.\n *\n * @param {String|HTMLElement|HTMLCollection|NodeList} target\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listen(target, type, callback) {\n if (!target && !type && !callback) {\n throw new Error('Missing required arguments');\n }\n\n if (!is.string(type)) {\n throw new TypeError('Second argument must be a String');\n }\n\n if (!is.fn(callback)) {\n throw new TypeError('Third argument must be a Function');\n }\n\n if (is.node(target)) {\n return listenNode(target, type, callback);\n }\n else if (is.nodeList(target)) {\n return listenNodeList(target, type, callback);\n }\n else if (is.string(target)) {\n return listenSelector(target, type, callback);\n }\n else {\n throw new TypeError('First argument must be a String, HTMLElement, HTMLCollection, or NodeList');\n }\n}\n\n/**\n * Adds an event listener to a HTML element\n * and returns a remove listener function.\n *\n * @param {HTMLElement} node\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenNode(node, type, callback) {\n node.addEventListener(type, callback);\n\n return {\n destroy: function() {\n node.removeEventListener(type, callback);\n }\n }\n}\n\n/**\n * Add an event listener to a list of HTML elements\n * and returns a remove listener function.\n *\n * @param {NodeList|HTMLCollection} nodeList\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenNodeList(nodeList, type, callback) {\n Array.prototype.forEach.call(nodeList, function(node) {\n node.addEventListener(type, callback);\n });\n\n return {\n destroy: function() {\n Array.prototype.forEach.call(nodeList, function(node) {\n node.removeEventListener(type, callback);\n });\n }\n }\n}\n\n/**\n * Add an event listener to a selector\n * and returns a remove listener function.\n *\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenSelector(selector, type, callback) {\n return delegate(document.body, selector, type, callback);\n}\n\nmodule.exports = listen;\n\n\n/***/ }),\n\n/***/ 817:\n/***/ (function(module) {\n\nfunction select(element) {\n var selectedText;\n\n if (element.nodeName === 'SELECT') {\n element.focus();\n\n selectedText = element.value;\n }\n else if (element.nodeName === 'INPUT' || element.nodeName === 'TEXTAREA') {\n var isReadOnly = element.hasAttribute('readonly');\n\n if (!isReadOnly) {\n element.setAttribute('readonly', '');\n }\n\n element.select();\n element.setSelectionRange(0, element.value.length);\n\n if (!isReadOnly) {\n element.removeAttribute('readonly');\n }\n\n selectedText = element.value;\n }\n else {\n if (element.hasAttribute('contenteditable')) {\n element.focus();\n }\n\n var selection = window.getSelection();\n var range = document.createRange();\n\n range.selectNodeContents(element);\n selection.removeAllRanges();\n selection.addRange(range);\n\n selectedText = selection.toString();\n }\n\n return selectedText;\n}\n\nmodule.exports = select;\n\n\n/***/ }),\n\n/***/ 279:\n/***/ (function(module) {\n\nfunction E () {\n // Keep this empty so it's easier to inherit from\n // (via https://github.com/lipsmack from https://github.com/scottcorgan/tiny-emitter/issues/3)\n}\n\nE.prototype = {\n on: function (name, callback, ctx) {\n var e = this.e || (this.e = {});\n\n (e[name] || (e[name] = [])).push({\n fn: callback,\n ctx: ctx\n });\n\n return this;\n },\n\n once: function (name, callback, ctx) {\n var self = this;\n function listener () {\n self.off(name, listener);\n callback.apply(ctx, arguments);\n };\n\n listener._ = callback\n return this.on(name, listener, ctx);\n },\n\n emit: function (name) {\n var data = [].slice.call(arguments, 1);\n var evtArr = ((this.e || (this.e = {}))[name] || []).slice();\n var i = 0;\n var len = evtArr.length;\n\n for (i; i < len; i++) {\n evtArr[i].fn.apply(evtArr[i].ctx, data);\n }\n\n return this;\n },\n\n off: function (name, callback) {\n var e = this.e || (this.e = {});\n var evts = e[name];\n var liveEvents = [];\n\n if (evts && callback) {\n for (var i = 0, len = evts.length; i < len; i++) {\n if (evts[i].fn !== callback && evts[i].fn._ !== callback)\n liveEvents.push(evts[i]);\n }\n }\n\n // Remove event from queue to prevent memory leak\n // Suggested by https://github.com/lazd\n // Ref: https://github.com/scottcorgan/tiny-emitter/commit/c6ebfaa9bc973b33d110a84a307742b7cf94c953#commitcomment-5024910\n\n (liveEvents.length)\n ? e[name] = liveEvents\n : delete e[name];\n\n return this;\n }\n};\n\nmodule.exports = E;\nmodule.exports.TinyEmitter = E;\n\n\n/***/ })\n\n/******/ \t});\n/************************************************************************/\n/******/ \t// The module cache\n/******/ \tvar __webpack_module_cache__ = {};\n/******/ \t\n/******/ \t// The require function\n/******/ \tfunction __webpack_require__(moduleId) {\n/******/ \t\t// Check if module is in cache\n/******/ \t\tif(__webpack_module_cache__[moduleId]) {\n/******/ \t\t\treturn __webpack_module_cache__[moduleId].exports;\n/******/ \t\t}\n/******/ \t\t// Create a new module (and put it into the cache)\n/******/ \t\tvar module = __webpack_module_cache__[moduleId] = {\n/******/ \t\t\t// no module.id needed\n/******/ \t\t\t// no module.loaded needed\n/******/ \t\t\texports: {}\n/******/ \t\t};\n/******/ \t\n/******/ \t\t// Execute the module function\n/******/ \t\t__webpack_modules__[moduleId](module, module.exports, __webpack_require__);\n/******/ \t\n/******/ \t\t// Return the exports of the module\n/******/ \t\treturn module.exports;\n/******/ \t}\n/******/ \t\n/************************************************************************/\n/******/ \t/* webpack/runtime/compat get default export */\n/******/ \t!function() {\n/******/ \t\t// getDefaultExport function for compatibility with non-harmony modules\n/******/ \t\t__webpack_require__.n = function(module) {\n/******/ \t\t\tvar getter = module && module.__esModule ?\n/******/ \t\t\t\tfunction() { return module['default']; } :\n/******/ \t\t\t\tfunction() { return module; };\n/******/ \t\t\t__webpack_require__.d(getter, { a: getter });\n/******/ \t\t\treturn getter;\n/******/ \t\t};\n/******/ \t}();\n/******/ \t\n/******/ \t/* webpack/runtime/define property getters */\n/******/ \t!function() {\n/******/ \t\t// define getter functions for harmony exports\n/******/ \t\t__webpack_require__.d = function(exports, definition) {\n/******/ \t\t\tfor(var key in definition) {\n/******/ \t\t\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n/******/ \t\t\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n/******/ \t\t\t\t}\n/******/ \t\t\t}\n/******/ \t\t};\n/******/ \t}();\n/******/ \t\n/******/ \t/* webpack/runtime/hasOwnProperty shorthand */\n/******/ \t!function() {\n/******/ \t\t__webpack_require__.o = function(obj, prop) { return Object.prototype.hasOwnProperty.call(obj, prop); }\n/******/ \t}();\n/******/ \t\n/************************************************************************/\n/******/ \t// module exports must be returned from runtime so entry inlining is disabled\n/******/ \t// startup\n/******/ \t// Load entry module and return exports\n/******/ \treturn __webpack_require__(686);\n/******/ })()\n.default;\n});", "/*!\n * escape-html\n * Copyright(c) 2012-2013 TJ Holowaychuk\n * Copyright(c) 2015 Andreas Lubbe\n * Copyright(c) 2015 Tiancheng \"Timothy\" Gu\n * MIT Licensed\n */\n\n'use strict';\n\n/**\n * Module variables.\n * @private\n */\n\nvar matchHtmlRegExp = /[\"'&<>]/;\n\n/**\n * Module exports.\n * @public\n */\n\nmodule.exports = escapeHtml;\n\n/**\n * Escape special characters in the given string of html.\n *\n * @param {string} string The string to escape for inserting into HTML\n * @return {string}\n * @public\n */\n\nfunction escapeHtml(string) {\n var str = '' + string;\n var match = matchHtmlRegExp.exec(str);\n\n if (!match) {\n return str;\n }\n\n var escape;\n var html = '';\n var index = 0;\n var lastIndex = 0;\n\n for (index = match.index; index < str.length; index++) {\n switch (str.charCodeAt(index)) {\n case 34: // \"\n escape = '"';\n break;\n case 38: // &\n escape = '&';\n break;\n case 39: // '\n escape = ''';\n break;\n case 60: // <\n escape = '<';\n break;\n case 62: // >\n escape = '>';\n break;\n default:\n continue;\n }\n\n if (lastIndex !== index) {\n html += str.substring(lastIndex, index);\n }\n\n lastIndex = index + 1;\n html += escape;\n }\n\n return lastIndex !== index\n ? html + str.substring(lastIndex, index)\n : html;\n}\n", "Array.prototype.flat||Object.defineProperty(Array.prototype,\"flat\",{configurable:!0,value:function r(){var t=isNaN(arguments[0])?1:Number(arguments[0]);return t?Array.prototype.reduce.call(this,function(a,e){return Array.isArray(e)?a.push.apply(a,r.call(e,t-1)):a.push(e),a},[]):Array.prototype.slice.call(this)},writable:!0}),Array.prototype.flatMap||Object.defineProperty(Array.prototype,\"flatMap\",{configurable:!0,value:function(r){return Array.prototype.map.apply(this,arguments).flat()},writable:!0})\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport \"array-flat-polyfill\"\nimport \"focus-visible\"\nimport \"unfetch/polyfill\"\nimport \"url-polyfill\"\n\nimport {\n EMPTY,\n NEVER,\n Subject,\n defer,\n delay,\n filter,\n map,\n merge,\n mergeWith,\n shareReplay,\n switchMap\n} from \"rxjs\"\n\nimport { configuration, feature } from \"./_\"\nimport {\n at,\n getOptionalElement,\n requestJSON,\n setToggle,\n watchDocument,\n watchKeyboard,\n watchLocation,\n watchLocationTarget,\n watchMedia,\n watchPrint,\n watchViewport\n} from \"./browser\"\nimport {\n getComponentElement,\n getComponentElements,\n mountAnnounce,\n mountBackToTop,\n mountConsent,\n mountContent,\n mountDialog,\n mountHeader,\n mountHeaderTitle,\n mountPalette,\n mountSearch,\n mountSearchHiglight,\n mountSidebar,\n mountSource,\n mountTableOfContents,\n mountTabs,\n watchHeader,\n watchMain\n} from \"./components\"\nimport {\n SearchIndex,\n setupClipboardJS,\n setupInstantLoading,\n setupVersionSelector\n} from \"./integrations\"\nimport {\n patchIndeterminate,\n patchScrollfix,\n patchScrolllock\n} from \"./patches\"\nimport \"./polyfills\"\n\n/* ----------------------------------------------------------------------------\n * Application\n * ------------------------------------------------------------------------- */\n\n/* Yay, JavaScript is available */\ndocument.documentElement.classList.remove(\"no-js\")\ndocument.documentElement.classList.add(\"js\")\n\n/* Set up navigation observables and subjects */\nconst document$ = watchDocument()\nconst location$ = watchLocation()\nconst target$ = watchLocationTarget()\nconst keyboard$ = watchKeyboard()\n\n/* Set up media observables */\nconst viewport$ = watchViewport()\nconst tablet$ = watchMedia(\"(min-width: 960px)\")\nconst screen$ = watchMedia(\"(min-width: 1220px)\")\nconst print$ = watchPrint()\n\n/* Retrieve search index, if search is enabled */\nconst config = configuration()\nconst index$ = document.forms.namedItem(\"search\")\n ? __search?.index || requestJSON(\n new URL(\"search/search_index.json\", config.base)\n )\n : NEVER\n\n/* Set up Clipboard.js integration */\nconst alert$ = new Subject()\nsetupClipboardJS({ alert$ })\n\n/* Set up instant loading, if enabled */\nif (feature(\"navigation.instant\"))\n setupInstantLoading({ document$, location$, viewport$ })\n\n/* Set up version selector */\nif (config.version?.provider === \"mike\")\n setupVersionSelector({ document$ })\n\n/* Always close drawer and search on navigation */\nmerge(location$, target$)\n .pipe(\n delay(125)\n )\n .subscribe(() => {\n setToggle(\"drawer\", false)\n setToggle(\"search\", false)\n })\n\n/* Set up global keyboard handlers */\nkeyboard$\n .pipe(\n filter(({ mode }) => mode === \"global\")\n )\n .subscribe(key => {\n switch (key.type) {\n\n /* Go to previous page */\n case \"p\":\n case \",\":\n const prev = getOptionalElement(\"[href][rel=prev]\")\n if (typeof prev !== \"undefined\")\n prev.click()\n break\n\n /* Go to next page */\n case \"n\":\n case \".\":\n const next = getOptionalElement(\"[href][rel=next]\")\n if (typeof next !== \"undefined\")\n next.click()\n break\n }\n })\n\n/* Set up patches */\npatchIndeterminate({ document$, tablet$ })\npatchScrollfix({ document$ })\npatchScrolllock({ viewport$, tablet$ })\n\n/* Set up header and main area observable */\nconst header$ = watchHeader(getComponentElement(\"header\"), { viewport$ })\nconst main$ = document$\n .pipe(\n map(() => getComponentElement(\"main\")),\n switchMap(el => watchMain(el, { viewport$, header$ })),\n shareReplay(1)\n )\n\n/* Set up control component observables */\nconst control$ = merge(\n\n /* Consent */\n ...getComponentElements(\"consent\")\n .map(el => mountConsent(el, { target$ })),\n\n /* Dialog */\n ...getComponentElements(\"dialog\")\n .map(el => mountDialog(el, { alert$ })),\n\n /* Header */\n ...getComponentElements(\"header\")\n .map(el => mountHeader(el, { viewport$, header$, main$ })),\n\n /* Color palette */\n ...getComponentElements(\"palette\")\n .map(el => mountPalette(el)),\n\n /* Search */\n ...getComponentElements(\"search\")\n .map(el => mountSearch(el, { index$, keyboard$ })),\n\n /* Repository information */\n ...getComponentElements(\"source\")\n .map(el => mountSource(el))\n)\n\n/* Set up content component observables */\nconst content$ = defer(() => merge(\n\n /* Announcement bar */\n ...getComponentElements(\"announce\")\n .map(el => mountAnnounce(el)),\n\n /* Content */\n ...getComponentElements(\"content\")\n .map(el => mountContent(el, { viewport$, target$, print$ })),\n\n /* Search highlighting */\n ...getComponentElements(\"content\")\n .map(el => feature(\"search.highlight\")\n ? mountSearchHiglight(el, { index$, location$ })\n : EMPTY\n ),\n\n /* Header title */\n ...getComponentElements(\"header-title\")\n .map(el => mountHeaderTitle(el, { viewport$, header$ })),\n\n /* Sidebar */\n ...getComponentElements(\"sidebar\")\n .map(el => el.getAttribute(\"data-md-type\") === \"navigation\"\n ? at(screen$, () => mountSidebar(el, { viewport$, header$, main$ }))\n : at(tablet$, () => mountSidebar(el, { viewport$, header$, main$ }))\n ),\n\n /* Navigation tabs */\n ...getComponentElements(\"tabs\")\n .map(el => mountTabs(el, { viewport$, header$ })),\n\n /* Table of contents */\n ...getComponentElements(\"toc\")\n .map(el => mountTableOfContents(el, { viewport$, header$, target$ })),\n\n /* Back-to-top button */\n ...getComponentElements(\"top\")\n .map(el => mountBackToTop(el, { viewport$, header$, main$, target$ }))\n))\n\n/* Set up component observables */\nconst component$ = document$\n .pipe(\n switchMap(() => content$),\n mergeWith(control$),\n shareReplay(1)\n )\n\n/* Subscribe to all components */\ncomponent$.subscribe()\n\n/* ----------------------------------------------------------------------------\n * Exports\n * ------------------------------------------------------------------------- */\n\nwindow.document$ = document$ /* Document observable */\nwindow.location$ = location$ /* Location subject */\nwindow.target$ = target$ /* Location target observable */\nwindow.keyboard$ = keyboard$ /* Keyboard observable */\nwindow.viewport$ = viewport$ /* Viewport observable */\nwindow.tablet$ = tablet$ /* Media tablet observable */\nwindow.screen$ = screen$ /* Media screen observable */\nwindow.print$ = print$ /* Media print observable */\nwindow.alert$ = alert$ /* Alert subject */\nwindow.component$ = component$ /* Component observable */\n", "self.fetch||(self.fetch=function(e,n){return n=n||{},new Promise(function(t,s){var r=new XMLHttpRequest,o=[],u=[],i={},a=function(){return{ok:2==(r.status/100|0),statusText:r.statusText,status:r.status,url:r.responseURL,text:function(){return Promise.resolve(r.responseText)},json:function(){return Promise.resolve(r.responseText).then(JSON.parse)},blob:function(){return Promise.resolve(new Blob([r.response]))},clone:a,headers:{keys:function(){return o},entries:function(){return u},get:function(e){return i[e.toLowerCase()]},has:function(e){return e.toLowerCase()in i}}}};for(var c in r.open(n.method||\"get\",e,!0),r.onload=function(){r.getAllResponseHeaders().replace(/^(.*?):[^\\S\\n]*([\\s\\S]*?)$/gm,function(e,n,t){o.push(n=n.toLowerCase()),u.push([n,t]),i[n]=i[n]?i[n]+\",\"+t:t}),t(a())},r.onerror=s,r.withCredentials=\"include\"==n.credentials,n.headers)r.setRequestHeader(c,n.headers[c]);r.send(n.body||null)})});\n", "import tslib from '../tslib.js';\r\nconst {\r\n __extends,\r\n __assign,\r\n __rest,\r\n __decorate,\r\n __param,\r\n __metadata,\r\n __awaiter,\r\n __generator,\r\n __exportStar,\r\n __createBinding,\r\n __values,\r\n __read,\r\n __spread,\r\n __spreadArrays,\r\n __spreadArray,\r\n __await,\r\n __asyncGenerator,\r\n __asyncDelegator,\r\n __asyncValues,\r\n __makeTemplateObject,\r\n __importStar,\r\n __importDefault,\r\n __classPrivateFieldGet,\r\n __classPrivateFieldSet,\r\n} = tslib;\r\nexport {\r\n __extends,\r\n __assign,\r\n __rest,\r\n __decorate,\r\n __param,\r\n __metadata,\r\n __awaiter,\r\n __generator,\r\n __exportStar,\r\n __createBinding,\r\n __values,\r\n __read,\r\n __spread,\r\n __spreadArrays,\r\n __spreadArray,\r\n __await,\r\n __asyncGenerator,\r\n __asyncDelegator,\r\n __asyncValues,\r\n __makeTemplateObject,\r\n __importStar,\r\n __importDefault,\r\n __classPrivateFieldGet,\r\n __classPrivateFieldSet,\r\n};\r\n", null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n ReplaySubject,\n Subject,\n fromEvent\n} from \"rxjs\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch document\n *\n * Documents are implemented as subjects, so all downstream observables are\n * automatically updated when a new document is emitted.\n *\n * @returns Document subject\n */\nexport function watchDocument(): Subject {\n const document$ = new ReplaySubject(1)\n fromEvent(document, \"DOMContentLoaded\", { once: true })\n .subscribe(() => document$.next(document))\n\n /* Return document */\n return document$\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve all elements matching the query selector\n *\n * @template T - Element type\n *\n * @param selector - Query selector\n * @param node - Node of reference\n *\n * @returns Elements\n */\nexport function getElements(\n selector: T, node?: ParentNode\n): HTMLElementTagNameMap[T][]\n\nexport function getElements(\n selector: string, node?: ParentNode\n): T[]\n\nexport function getElements(\n selector: string, node: ParentNode = document\n): T[] {\n return Array.from(node.querySelectorAll(selector))\n}\n\n/**\n * Retrieve an element matching a query selector or throw a reference error\n *\n * Note that this function assumes that the element is present. If unsure if an\n * element is existent, use the `getOptionalElement` function instead.\n *\n * @template T - Element type\n *\n * @param selector - Query selector\n * @param node - Node of reference\n *\n * @returns Element\n */\nexport function getElement(\n selector: T, node?: ParentNode\n): HTMLElementTagNameMap[T]\n\nexport function getElement(\n selector: string, node?: ParentNode\n): T\n\nexport function getElement(\n selector: string, node: ParentNode = document\n): T {\n const el = getOptionalElement(selector, node)\n if (typeof el === \"undefined\")\n throw new ReferenceError(\n `Missing element: expected \"${selector}\" to be present`\n )\n\n /* Return element */\n return el\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Retrieve an optional element matching the query selector\n *\n * @template T - Element type\n *\n * @param selector - Query selector\n * @param node - Node of reference\n *\n * @returns Element or nothing\n */\nexport function getOptionalElement(\n selector: T, node?: ParentNode\n): HTMLElementTagNameMap[T] | undefined\n\nexport function getOptionalElement(\n selector: string, node?: ParentNode\n): T | undefined\n\nexport function getOptionalElement(\n selector: string, node: ParentNode = document\n): T | undefined {\n return node.querySelector(selector) || undefined\n}\n\n/**\n * Retrieve the currently active element\n *\n * @returns Element or nothing\n */\nexport function getActiveElement(): HTMLElement | undefined {\n return document.activeElement instanceof HTMLElement\n ? document.activeElement || undefined\n : undefined\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n debounceTime,\n distinctUntilChanged,\n fromEvent,\n map,\n merge,\n startWith\n} from \"rxjs\"\n\nimport { getActiveElement } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch element focus\n *\n * Previously, this function used `focus` and `blur` events to determine whether\n * an element is focused, but this doesn't work if there are focusable elements\n * within the elements itself. A better solutions are `focusin` and `focusout`\n * events, which bubble up the tree and allow for more fine-grained control.\n *\n * `debounceTime` is necessary, because when a focus change happens inside an\n * element, the observable would first emit `false` and then `true` again.\n *\n * @param el - Element\n *\n * @returns Element focus observable\n */\nexport function watchElementFocus(\n el: HTMLElement\n): Observable {\n return merge(\n fromEvent(document.body, \"focusin\"),\n fromEvent(document.body, \"focusout\")\n )\n .pipe(\n debounceTime(1),\n map(() => {\n const active = getActiveElement()\n return typeof active !== \"undefined\"\n ? el.contains(active)\n : false\n }),\n startWith(el === getActiveElement()),\n distinctUntilChanged()\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n animationFrameScheduler,\n auditTime,\n fromEvent,\n map,\n merge,\n startWith\n} from \"rxjs\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Element offset\n */\nexport interface ElementOffset {\n x: number /* Horizontal offset */\n y: number /* Vertical offset */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve element offset\n *\n * @param el - Element\n *\n * @returns Element offset\n */\nexport function getElementOffset(\n el: HTMLElement\n): ElementOffset {\n return {\n x: el.offsetLeft,\n y: el.offsetTop\n }\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Watch element offset\n *\n * @param el - Element\n *\n * @returns Element offset observable\n */\nexport function watchElementOffset(\n el: HTMLElement\n): Observable {\n return merge(\n fromEvent(window, \"load\"),\n fromEvent(window, \"resize\")\n )\n .pipe(\n auditTime(0, animationFrameScheduler),\n map(() => getElementOffset(el)),\n startWith(getElementOffset(el))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n animationFrameScheduler,\n auditTime,\n fromEvent,\n map,\n merge,\n startWith\n} from \"rxjs\"\n\nimport { ElementOffset } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve element content offset (= scroll offset)\n *\n * @param el - Element\n *\n * @returns Element content offset\n */\nexport function getElementContentOffset(\n el: HTMLElement\n): ElementOffset {\n return {\n x: el.scrollLeft,\n y: el.scrollTop\n }\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Watch element content offset\n *\n * @param el - Element\n *\n * @returns Element content offset observable\n */\nexport function watchElementContentOffset(\n el: HTMLElement\n): Observable {\n return merge(\n fromEvent(el, \"scroll\"),\n fromEvent(window, \"resize\")\n )\n .pipe(\n auditTime(0, animationFrameScheduler),\n map(() => getElementContentOffset(el)),\n startWith(getElementContentOffset(el))\n )\n}\n", "/**\r\n * A collection of shims that provide minimal functionality of the ES6 collections.\r\n *\r\n * These implementations are not meant to be used outside of the ResizeObserver\r\n * modules as they cover only a limited range of use cases.\r\n */\r\n/* eslint-disable require-jsdoc, valid-jsdoc */\r\nvar MapShim = (function () {\r\n if (typeof Map !== 'undefined') {\r\n return Map;\r\n }\r\n /**\r\n * Returns index in provided array that matches the specified key.\r\n *\r\n * @param {Array} arr\r\n * @param {*} key\r\n * @returns {number}\r\n */\r\n function getIndex(arr, key) {\r\n var result = -1;\r\n arr.some(function (entry, index) {\r\n if (entry[0] === key) {\r\n result = index;\r\n return true;\r\n }\r\n return false;\r\n });\r\n return result;\r\n }\r\n return /** @class */ (function () {\r\n function class_1() {\r\n this.__entries__ = [];\r\n }\r\n Object.defineProperty(class_1.prototype, \"size\", {\r\n /**\r\n * @returns {boolean}\r\n */\r\n get: function () {\r\n return this.__entries__.length;\r\n },\r\n enumerable: true,\r\n configurable: true\r\n });\r\n /**\r\n * @param {*} key\r\n * @returns {*}\r\n */\r\n class_1.prototype.get = function (key) {\r\n var index = getIndex(this.__entries__, key);\r\n var entry = this.__entries__[index];\r\n return entry && entry[1];\r\n };\r\n /**\r\n * @param {*} key\r\n * @param {*} value\r\n * @returns {void}\r\n */\r\n class_1.prototype.set = function (key, value) {\r\n var index = getIndex(this.__entries__, key);\r\n if (~index) {\r\n this.__entries__[index][1] = value;\r\n }\r\n else {\r\n this.__entries__.push([key, value]);\r\n }\r\n };\r\n /**\r\n * @param {*} key\r\n * @returns {void}\r\n */\r\n class_1.prototype.delete = function (key) {\r\n var entries = this.__entries__;\r\n var index = getIndex(entries, key);\r\n if (~index) {\r\n entries.splice(index, 1);\r\n }\r\n };\r\n /**\r\n * @param {*} key\r\n * @returns {void}\r\n */\r\n class_1.prototype.has = function (key) {\r\n return !!~getIndex(this.__entries__, key);\r\n };\r\n /**\r\n * @returns {void}\r\n */\r\n class_1.prototype.clear = function () {\r\n this.__entries__.splice(0);\r\n };\r\n /**\r\n * @param {Function} callback\r\n * @param {*} [ctx=null]\r\n * @returns {void}\r\n */\r\n class_1.prototype.forEach = function (callback, ctx) {\r\n if (ctx === void 0) { ctx = null; }\r\n for (var _i = 0, _a = this.__entries__; _i < _a.length; _i++) {\r\n var entry = _a[_i];\r\n callback.call(ctx, entry[1], entry[0]);\r\n }\r\n };\r\n return class_1;\r\n }());\r\n})();\n\n/**\r\n * Detects whether window and document objects are available in current environment.\r\n */\r\nvar isBrowser = typeof window !== 'undefined' && typeof document !== 'undefined' && window.document === document;\n\n// Returns global object of a current environment.\r\nvar global$1 = (function () {\r\n if (typeof global !== 'undefined' && global.Math === Math) {\r\n return global;\r\n }\r\n if (typeof self !== 'undefined' && self.Math === Math) {\r\n return self;\r\n }\r\n if (typeof window !== 'undefined' && window.Math === Math) {\r\n return window;\r\n }\r\n // eslint-disable-next-line no-new-func\r\n return Function('return this')();\r\n})();\n\n/**\r\n * A shim for the requestAnimationFrame which falls back to the setTimeout if\r\n * first one is not supported.\r\n *\r\n * @returns {number} Requests' identifier.\r\n */\r\nvar requestAnimationFrame$1 = (function () {\r\n if (typeof requestAnimationFrame === 'function') {\r\n // It's required to use a bounded function because IE sometimes throws\r\n // an \"Invalid calling object\" error if rAF is invoked without the global\r\n // object on the left hand side.\r\n return requestAnimationFrame.bind(global$1);\r\n }\r\n return function (callback) { return setTimeout(function () { return callback(Date.now()); }, 1000 / 60); };\r\n})();\n\n// Defines minimum timeout before adding a trailing call.\r\nvar trailingTimeout = 2;\r\n/**\r\n * Creates a wrapper function which ensures that provided callback will be\r\n * invoked only once during the specified delay period.\r\n *\r\n * @param {Function} callback - Function to be invoked after the delay period.\r\n * @param {number} delay - Delay after which to invoke callback.\r\n * @returns {Function}\r\n */\r\nfunction throttle (callback, delay) {\r\n var leadingCall = false, trailingCall = false, lastCallTime = 0;\r\n /**\r\n * Invokes the original callback function and schedules new invocation if\r\n * the \"proxy\" was called during current request.\r\n *\r\n * @returns {void}\r\n */\r\n function resolvePending() {\r\n if (leadingCall) {\r\n leadingCall = false;\r\n callback();\r\n }\r\n if (trailingCall) {\r\n proxy();\r\n }\r\n }\r\n /**\r\n * Callback invoked after the specified delay. It will further postpone\r\n * invocation of the original function delegating it to the\r\n * requestAnimationFrame.\r\n *\r\n * @returns {void}\r\n */\r\n function timeoutCallback() {\r\n requestAnimationFrame$1(resolvePending);\r\n }\r\n /**\r\n * Schedules invocation of the original function.\r\n *\r\n * @returns {void}\r\n */\r\n function proxy() {\r\n var timeStamp = Date.now();\r\n if (leadingCall) {\r\n // Reject immediately following calls.\r\n if (timeStamp - lastCallTime < trailingTimeout) {\r\n return;\r\n }\r\n // Schedule new call to be in invoked when the pending one is resolved.\r\n // This is important for \"transitions\" which never actually start\r\n // immediately so there is a chance that we might miss one if change\r\n // happens amids the pending invocation.\r\n trailingCall = true;\r\n }\r\n else {\r\n leadingCall = true;\r\n trailingCall = false;\r\n setTimeout(timeoutCallback, delay);\r\n }\r\n lastCallTime = timeStamp;\r\n }\r\n return proxy;\r\n}\n\n// Minimum delay before invoking the update of observers.\r\nvar REFRESH_DELAY = 20;\r\n// A list of substrings of CSS properties used to find transition events that\r\n// might affect dimensions of observed elements.\r\nvar transitionKeys = ['top', 'right', 'bottom', 'left', 'width', 'height', 'size', 'weight'];\r\n// Check if MutationObserver is available.\r\nvar mutationObserverSupported = typeof MutationObserver !== 'undefined';\r\n/**\r\n * Singleton controller class which handles updates of ResizeObserver instances.\r\n */\r\nvar ResizeObserverController = /** @class */ (function () {\r\n /**\r\n * Creates a new instance of ResizeObserverController.\r\n *\r\n * @private\r\n */\r\n function ResizeObserverController() {\r\n /**\r\n * Indicates whether DOM listeners have been added.\r\n *\r\n * @private {boolean}\r\n */\r\n this.connected_ = false;\r\n /**\r\n * Tells that controller has subscribed for Mutation Events.\r\n *\r\n * @private {boolean}\r\n */\r\n this.mutationEventsAdded_ = false;\r\n /**\r\n * Keeps reference to the instance of MutationObserver.\r\n *\r\n * @private {MutationObserver}\r\n */\r\n this.mutationsObserver_ = null;\r\n /**\r\n * A list of connected observers.\r\n *\r\n * @private {Array}\r\n */\r\n this.observers_ = [];\r\n this.onTransitionEnd_ = this.onTransitionEnd_.bind(this);\r\n this.refresh = throttle(this.refresh.bind(this), REFRESH_DELAY);\r\n }\r\n /**\r\n * Adds observer to observers list.\r\n *\r\n * @param {ResizeObserverSPI} observer - Observer to be added.\r\n * @returns {void}\r\n */\r\n ResizeObserverController.prototype.addObserver = function (observer) {\r\n if (!~this.observers_.indexOf(observer)) {\r\n this.observers_.push(observer);\r\n }\r\n // Add listeners if they haven't been added yet.\r\n if (!this.connected_) {\r\n this.connect_();\r\n }\r\n };\r\n /**\r\n * Removes observer from observers list.\r\n *\r\n * @param {ResizeObserverSPI} observer - Observer to be removed.\r\n * @returns {void}\r\n */\r\n ResizeObserverController.prototype.removeObserver = function (observer) {\r\n var observers = this.observers_;\r\n var index = observers.indexOf(observer);\r\n // Remove observer if it's present in registry.\r\n if (~index) {\r\n observers.splice(index, 1);\r\n }\r\n // Remove listeners if controller has no connected observers.\r\n if (!observers.length && this.connected_) {\r\n this.disconnect_();\r\n }\r\n };\r\n /**\r\n * Invokes the update of observers. It will continue running updates insofar\r\n * it detects changes.\r\n *\r\n * @returns {void}\r\n */\r\n ResizeObserverController.prototype.refresh = function () {\r\n var changesDetected = this.updateObservers_();\r\n // Continue running updates if changes have been detected as there might\r\n // be future ones caused by CSS transitions.\r\n if (changesDetected) {\r\n this.refresh();\r\n }\r\n };\r\n /**\r\n * Updates every observer from observers list and notifies them of queued\r\n * entries.\r\n *\r\n * @private\r\n * @returns {boolean} Returns \"true\" if any observer has detected changes in\r\n * dimensions of it's elements.\r\n */\r\n ResizeObserverController.prototype.updateObservers_ = function () {\r\n // Collect observers that have active observations.\r\n var activeObservers = this.observers_.filter(function (observer) {\r\n return observer.gatherActive(), observer.hasActive();\r\n });\r\n // Deliver notifications in a separate cycle in order to avoid any\r\n // collisions between observers, e.g. when multiple instances of\r\n // ResizeObserver are tracking the same element and the callback of one\r\n // of them changes content dimensions of the observed target. Sometimes\r\n // this may result in notifications being blocked for the rest of observers.\r\n activeObservers.forEach(function (observer) { return observer.broadcastActive(); });\r\n return activeObservers.length > 0;\r\n };\r\n /**\r\n * Initializes DOM listeners.\r\n *\r\n * @private\r\n * @returns {void}\r\n */\r\n ResizeObserverController.prototype.connect_ = function () {\r\n // Do nothing if running in a non-browser environment or if listeners\r\n // have been already added.\r\n if (!isBrowser || this.connected_) {\r\n return;\r\n }\r\n // Subscription to the \"Transitionend\" event is used as a workaround for\r\n // delayed transitions. This way it's possible to capture at least the\r\n // final state of an element.\r\n document.addEventListener('transitionend', this.onTransitionEnd_);\r\n window.addEventListener('resize', this.refresh);\r\n if (mutationObserverSupported) {\r\n this.mutationsObserver_ = new MutationObserver(this.refresh);\r\n this.mutationsObserver_.observe(document, {\r\n attributes: true,\r\n childList: true,\r\n characterData: true,\r\n subtree: true\r\n });\r\n }\r\n else {\r\n document.addEventListener('DOMSubtreeModified', this.refresh);\r\n this.mutationEventsAdded_ = true;\r\n }\r\n this.connected_ = true;\r\n };\r\n /**\r\n * Removes DOM listeners.\r\n *\r\n * @private\r\n * @returns {void}\r\n */\r\n ResizeObserverController.prototype.disconnect_ = function () {\r\n // Do nothing if running in a non-browser environment or if listeners\r\n // have been already removed.\r\n if (!isBrowser || !this.connected_) {\r\n return;\r\n }\r\n document.removeEventListener('transitionend', this.onTransitionEnd_);\r\n window.removeEventListener('resize', this.refresh);\r\n if (this.mutationsObserver_) {\r\n this.mutationsObserver_.disconnect();\r\n }\r\n if (this.mutationEventsAdded_) {\r\n document.removeEventListener('DOMSubtreeModified', this.refresh);\r\n }\r\n this.mutationsObserver_ = null;\r\n this.mutationEventsAdded_ = false;\r\n this.connected_ = false;\r\n };\r\n /**\r\n * \"Transitionend\" event handler.\r\n *\r\n * @private\r\n * @param {TransitionEvent} event\r\n * @returns {void}\r\n */\r\n ResizeObserverController.prototype.onTransitionEnd_ = function (_a) {\r\n var _b = _a.propertyName, propertyName = _b === void 0 ? '' : _b;\r\n // Detect whether transition may affect dimensions of an element.\r\n var isReflowProperty = transitionKeys.some(function (key) {\r\n return !!~propertyName.indexOf(key);\r\n });\r\n if (isReflowProperty) {\r\n this.refresh();\r\n }\r\n };\r\n /**\r\n * Returns instance of the ResizeObserverController.\r\n *\r\n * @returns {ResizeObserverController}\r\n */\r\n ResizeObserverController.getInstance = function () {\r\n if (!this.instance_) {\r\n this.instance_ = new ResizeObserverController();\r\n }\r\n return this.instance_;\r\n };\r\n /**\r\n * Holds reference to the controller's instance.\r\n *\r\n * @private {ResizeObserverController}\r\n */\r\n ResizeObserverController.instance_ = null;\r\n return ResizeObserverController;\r\n}());\n\n/**\r\n * Defines non-writable/enumerable properties of the provided target object.\r\n *\r\n * @param {Object} target - Object for which to define properties.\r\n * @param {Object} props - Properties to be defined.\r\n * @returns {Object} Target object.\r\n */\r\nvar defineConfigurable = (function (target, props) {\r\n for (var _i = 0, _a = Object.keys(props); _i < _a.length; _i++) {\r\n var key = _a[_i];\r\n Object.defineProperty(target, key, {\r\n value: props[key],\r\n enumerable: false,\r\n writable: false,\r\n configurable: true\r\n });\r\n }\r\n return target;\r\n});\n\n/**\r\n * Returns the global object associated with provided element.\r\n *\r\n * @param {Object} target\r\n * @returns {Object}\r\n */\r\nvar getWindowOf = (function (target) {\r\n // Assume that the element is an instance of Node, which means that it\r\n // has the \"ownerDocument\" property from which we can retrieve a\r\n // corresponding global object.\r\n var ownerGlobal = target && target.ownerDocument && target.ownerDocument.defaultView;\r\n // Return the local global object if it's not possible extract one from\r\n // provided element.\r\n return ownerGlobal || global$1;\r\n});\n\n// Placeholder of an empty content rectangle.\r\nvar emptyRect = createRectInit(0, 0, 0, 0);\r\n/**\r\n * Converts provided string to a number.\r\n *\r\n * @param {number|string} value\r\n * @returns {number}\r\n */\r\nfunction toFloat(value) {\r\n return parseFloat(value) || 0;\r\n}\r\n/**\r\n * Extracts borders size from provided styles.\r\n *\r\n * @param {CSSStyleDeclaration} styles\r\n * @param {...string} positions - Borders positions (top, right, ...)\r\n * @returns {number}\r\n */\r\nfunction getBordersSize(styles) {\r\n var positions = [];\r\n for (var _i = 1; _i < arguments.length; _i++) {\r\n positions[_i - 1] = arguments[_i];\r\n }\r\n return positions.reduce(function (size, position) {\r\n var value = styles['border-' + position + '-width'];\r\n return size + toFloat(value);\r\n }, 0);\r\n}\r\n/**\r\n * Extracts paddings sizes from provided styles.\r\n *\r\n * @param {CSSStyleDeclaration} styles\r\n * @returns {Object} Paddings box.\r\n */\r\nfunction getPaddings(styles) {\r\n var positions = ['top', 'right', 'bottom', 'left'];\r\n var paddings = {};\r\n for (var _i = 0, positions_1 = positions; _i < positions_1.length; _i++) {\r\n var position = positions_1[_i];\r\n var value = styles['padding-' + position];\r\n paddings[position] = toFloat(value);\r\n }\r\n return paddings;\r\n}\r\n/**\r\n * Calculates content rectangle of provided SVG element.\r\n *\r\n * @param {SVGGraphicsElement} target - Element content rectangle of which needs\r\n * to be calculated.\r\n * @returns {DOMRectInit}\r\n */\r\nfunction getSVGContentRect(target) {\r\n var bbox = target.getBBox();\r\n return createRectInit(0, 0, bbox.width, bbox.height);\r\n}\r\n/**\r\n * Calculates content rectangle of provided HTMLElement.\r\n *\r\n * @param {HTMLElement} target - Element for which to calculate the content rectangle.\r\n * @returns {DOMRectInit}\r\n */\r\nfunction getHTMLElementContentRect(target) {\r\n // Client width & height properties can't be\r\n // used exclusively as they provide rounded values.\r\n var clientWidth = target.clientWidth, clientHeight = target.clientHeight;\r\n // By this condition we can catch all non-replaced inline, hidden and\r\n // detached elements. Though elements with width & height properties less\r\n // than 0.5 will be discarded as well.\r\n //\r\n // Without it we would need to implement separate methods for each of\r\n // those cases and it's not possible to perform a precise and performance\r\n // effective test for hidden elements. E.g. even jQuery's ':visible' filter\r\n // gives wrong results for elements with width & height less than 0.5.\r\n if (!clientWidth && !clientHeight) {\r\n return emptyRect;\r\n }\r\n var styles = getWindowOf(target).getComputedStyle(target);\r\n var paddings = getPaddings(styles);\r\n var horizPad = paddings.left + paddings.right;\r\n var vertPad = paddings.top + paddings.bottom;\r\n // Computed styles of width & height are being used because they are the\r\n // only dimensions available to JS that contain non-rounded values. It could\r\n // be possible to utilize the getBoundingClientRect if only it's data wasn't\r\n // affected by CSS transformations let alone paddings, borders and scroll bars.\r\n var width = toFloat(styles.width), height = toFloat(styles.height);\r\n // Width & height include paddings and borders when the 'border-box' box\r\n // model is applied (except for IE).\r\n if (styles.boxSizing === 'border-box') {\r\n // Following conditions are required to handle Internet Explorer which\r\n // doesn't include paddings and borders to computed CSS dimensions.\r\n //\r\n // We can say that if CSS dimensions + paddings are equal to the \"client\"\r\n // properties then it's either IE, and thus we don't need to subtract\r\n // anything, or an element merely doesn't have paddings/borders styles.\r\n if (Math.round(width + horizPad) !== clientWidth) {\r\n width -= getBordersSize(styles, 'left', 'right') + horizPad;\r\n }\r\n if (Math.round(height + vertPad) !== clientHeight) {\r\n height -= getBordersSize(styles, 'top', 'bottom') + vertPad;\r\n }\r\n }\r\n // Following steps can't be applied to the document's root element as its\r\n // client[Width/Height] properties represent viewport area of the window.\r\n // Besides, it's as well not necessary as the itself neither has\r\n // rendered scroll bars nor it can be clipped.\r\n if (!isDocumentElement(target)) {\r\n // In some browsers (only in Firefox, actually) CSS width & height\r\n // include scroll bars size which can be removed at this step as scroll\r\n // bars are the only difference between rounded dimensions + paddings\r\n // and \"client\" properties, though that is not always true in Chrome.\r\n var vertScrollbar = Math.round(width + horizPad) - clientWidth;\r\n var horizScrollbar = Math.round(height + vertPad) - clientHeight;\r\n // Chrome has a rather weird rounding of \"client\" properties.\r\n // E.g. for an element with content width of 314.2px it sometimes gives\r\n // the client width of 315px and for the width of 314.7px it may give\r\n // 314px. And it doesn't happen all the time. So just ignore this delta\r\n // as a non-relevant.\r\n if (Math.abs(vertScrollbar) !== 1) {\r\n width -= vertScrollbar;\r\n }\r\n if (Math.abs(horizScrollbar) !== 1) {\r\n height -= horizScrollbar;\r\n }\r\n }\r\n return createRectInit(paddings.left, paddings.top, width, height);\r\n}\r\n/**\r\n * Checks whether provided element is an instance of the SVGGraphicsElement.\r\n *\r\n * @param {Element} target - Element to be checked.\r\n * @returns {boolean}\r\n */\r\nvar isSVGGraphicsElement = (function () {\r\n // Some browsers, namely IE and Edge, don't have the SVGGraphicsElement\r\n // interface.\r\n if (typeof SVGGraphicsElement !== 'undefined') {\r\n return function (target) { return target instanceof getWindowOf(target).SVGGraphicsElement; };\r\n }\r\n // If it's so, then check that element is at least an instance of the\r\n // SVGElement and that it has the \"getBBox\" method.\r\n // eslint-disable-next-line no-extra-parens\r\n return function (target) { return (target instanceof getWindowOf(target).SVGElement &&\r\n typeof target.getBBox === 'function'); };\r\n})();\r\n/**\r\n * Checks whether provided element is a document element ().\r\n *\r\n * @param {Element} target - Element to be checked.\r\n * @returns {boolean}\r\n */\r\nfunction isDocumentElement(target) {\r\n return target === getWindowOf(target).document.documentElement;\r\n}\r\n/**\r\n * Calculates an appropriate content rectangle for provided html or svg element.\r\n *\r\n * @param {Element} target - Element content rectangle of which needs to be calculated.\r\n * @returns {DOMRectInit}\r\n */\r\nfunction getContentRect(target) {\r\n if (!isBrowser) {\r\n return emptyRect;\r\n }\r\n if (isSVGGraphicsElement(target)) {\r\n return getSVGContentRect(target);\r\n }\r\n return getHTMLElementContentRect(target);\r\n}\r\n/**\r\n * Creates rectangle with an interface of the DOMRectReadOnly.\r\n * Spec: https://drafts.fxtf.org/geometry/#domrectreadonly\r\n *\r\n * @param {DOMRectInit} rectInit - Object with rectangle's x/y coordinates and dimensions.\r\n * @returns {DOMRectReadOnly}\r\n */\r\nfunction createReadOnlyRect(_a) {\r\n var x = _a.x, y = _a.y, width = _a.width, height = _a.height;\r\n // If DOMRectReadOnly is available use it as a prototype for the rectangle.\r\n var Constr = typeof DOMRectReadOnly !== 'undefined' ? DOMRectReadOnly : Object;\r\n var rect = Object.create(Constr.prototype);\r\n // Rectangle's properties are not writable and non-enumerable.\r\n defineConfigurable(rect, {\r\n x: x, y: y, width: width, height: height,\r\n top: y,\r\n right: x + width,\r\n bottom: height + y,\r\n left: x\r\n });\r\n return rect;\r\n}\r\n/**\r\n * Creates DOMRectInit object based on the provided dimensions and the x/y coordinates.\r\n * Spec: https://drafts.fxtf.org/geometry/#dictdef-domrectinit\r\n *\r\n * @param {number} x - X coordinate.\r\n * @param {number} y - Y coordinate.\r\n * @param {number} width - Rectangle's width.\r\n * @param {number} height - Rectangle's height.\r\n * @returns {DOMRectInit}\r\n */\r\nfunction createRectInit(x, y, width, height) {\r\n return { x: x, y: y, width: width, height: height };\r\n}\n\n/**\r\n * Class that is responsible for computations of the content rectangle of\r\n * provided DOM element and for keeping track of it's changes.\r\n */\r\nvar ResizeObservation = /** @class */ (function () {\r\n /**\r\n * Creates an instance of ResizeObservation.\r\n *\r\n * @param {Element} target - Element to be observed.\r\n */\r\n function ResizeObservation(target) {\r\n /**\r\n * Broadcasted width of content rectangle.\r\n *\r\n * @type {number}\r\n */\r\n this.broadcastWidth = 0;\r\n /**\r\n * Broadcasted height of content rectangle.\r\n *\r\n * @type {number}\r\n */\r\n this.broadcastHeight = 0;\r\n /**\r\n * Reference to the last observed content rectangle.\r\n *\r\n * @private {DOMRectInit}\r\n */\r\n this.contentRect_ = createRectInit(0, 0, 0, 0);\r\n this.target = target;\r\n }\r\n /**\r\n * Updates content rectangle and tells whether it's width or height properties\r\n * have changed since the last broadcast.\r\n *\r\n * @returns {boolean}\r\n */\r\n ResizeObservation.prototype.isActive = function () {\r\n var rect = getContentRect(this.target);\r\n this.contentRect_ = rect;\r\n return (rect.width !== this.broadcastWidth ||\r\n rect.height !== this.broadcastHeight);\r\n };\r\n /**\r\n * Updates 'broadcastWidth' and 'broadcastHeight' properties with a data\r\n * from the corresponding properties of the last observed content rectangle.\r\n *\r\n * @returns {DOMRectInit} Last observed content rectangle.\r\n */\r\n ResizeObservation.prototype.broadcastRect = function () {\r\n var rect = this.contentRect_;\r\n this.broadcastWidth = rect.width;\r\n this.broadcastHeight = rect.height;\r\n return rect;\r\n };\r\n return ResizeObservation;\r\n}());\n\nvar ResizeObserverEntry = /** @class */ (function () {\r\n /**\r\n * Creates an instance of ResizeObserverEntry.\r\n *\r\n * @param {Element} target - Element that is being observed.\r\n * @param {DOMRectInit} rectInit - Data of the element's content rectangle.\r\n */\r\n function ResizeObserverEntry(target, rectInit) {\r\n var contentRect = createReadOnlyRect(rectInit);\r\n // According to the specification following properties are not writable\r\n // and are also not enumerable in the native implementation.\r\n //\r\n // Property accessors are not being used as they'd require to define a\r\n // private WeakMap storage which may cause memory leaks in browsers that\r\n // don't support this type of collections.\r\n defineConfigurable(this, { target: target, contentRect: contentRect });\r\n }\r\n return ResizeObserverEntry;\r\n}());\n\nvar ResizeObserverSPI = /** @class */ (function () {\r\n /**\r\n * Creates a new instance of ResizeObserver.\r\n *\r\n * @param {ResizeObserverCallback} callback - Callback function that is invoked\r\n * when one of the observed elements changes it's content dimensions.\r\n * @param {ResizeObserverController} controller - Controller instance which\r\n * is responsible for the updates of observer.\r\n * @param {ResizeObserver} callbackCtx - Reference to the public\r\n * ResizeObserver instance which will be passed to callback function.\r\n */\r\n function ResizeObserverSPI(callback, controller, callbackCtx) {\r\n /**\r\n * Collection of resize observations that have detected changes in dimensions\r\n * of elements.\r\n *\r\n * @private {Array}\r\n */\r\n this.activeObservations_ = [];\r\n /**\r\n * Registry of the ResizeObservation instances.\r\n *\r\n * @private {Map}\r\n */\r\n this.observations_ = new MapShim();\r\n if (typeof callback !== 'function') {\r\n throw new TypeError('The callback provided as parameter 1 is not a function.');\r\n }\r\n this.callback_ = callback;\r\n this.controller_ = controller;\r\n this.callbackCtx_ = callbackCtx;\r\n }\r\n /**\r\n * Starts observing provided element.\r\n *\r\n * @param {Element} target - Element to be observed.\r\n * @returns {void}\r\n */\r\n ResizeObserverSPI.prototype.observe = function (target) {\r\n if (!arguments.length) {\r\n throw new TypeError('1 argument required, but only 0 present.');\r\n }\r\n // Do nothing if current environment doesn't have the Element interface.\r\n if (typeof Element === 'undefined' || !(Element instanceof Object)) {\r\n return;\r\n }\r\n if (!(target instanceof getWindowOf(target).Element)) {\r\n throw new TypeError('parameter 1 is not of type \"Element\".');\r\n }\r\n var observations = this.observations_;\r\n // Do nothing if element is already being observed.\r\n if (observations.has(target)) {\r\n return;\r\n }\r\n observations.set(target, new ResizeObservation(target));\r\n this.controller_.addObserver(this);\r\n // Force the update of observations.\r\n this.controller_.refresh();\r\n };\r\n /**\r\n * Stops observing provided element.\r\n *\r\n * @param {Element} target - Element to stop observing.\r\n * @returns {void}\r\n */\r\n ResizeObserverSPI.prototype.unobserve = function (target) {\r\n if (!arguments.length) {\r\n throw new TypeError('1 argument required, but only 0 present.');\r\n }\r\n // Do nothing if current environment doesn't have the Element interface.\r\n if (typeof Element === 'undefined' || !(Element instanceof Object)) {\r\n return;\r\n }\r\n if (!(target instanceof getWindowOf(target).Element)) {\r\n throw new TypeError('parameter 1 is not of type \"Element\".');\r\n }\r\n var observations = this.observations_;\r\n // Do nothing if element is not being observed.\r\n if (!observations.has(target)) {\r\n return;\r\n }\r\n observations.delete(target);\r\n if (!observations.size) {\r\n this.controller_.removeObserver(this);\r\n }\r\n };\r\n /**\r\n * Stops observing all elements.\r\n *\r\n * @returns {void}\r\n */\r\n ResizeObserverSPI.prototype.disconnect = function () {\r\n this.clearActive();\r\n this.observations_.clear();\r\n this.controller_.removeObserver(this);\r\n };\r\n /**\r\n * Collects observation instances the associated element of which has changed\r\n * it's content rectangle.\r\n *\r\n * @returns {void}\r\n */\r\n ResizeObserverSPI.prototype.gatherActive = function () {\r\n var _this = this;\r\n this.clearActive();\r\n this.observations_.forEach(function (observation) {\r\n if (observation.isActive()) {\r\n _this.activeObservations_.push(observation);\r\n }\r\n });\r\n };\r\n /**\r\n * Invokes initial callback function with a list of ResizeObserverEntry\r\n * instances collected from active resize observations.\r\n *\r\n * @returns {void}\r\n */\r\n ResizeObserverSPI.prototype.broadcastActive = function () {\r\n // Do nothing if observer doesn't have active observations.\r\n if (!this.hasActive()) {\r\n return;\r\n }\r\n var ctx = this.callbackCtx_;\r\n // Create ResizeObserverEntry instance for every active observation.\r\n var entries = this.activeObservations_.map(function (observation) {\r\n return new ResizeObserverEntry(observation.target, observation.broadcastRect());\r\n });\r\n this.callback_.call(ctx, entries, ctx);\r\n this.clearActive();\r\n };\r\n /**\r\n * Clears the collection of active observations.\r\n *\r\n * @returns {void}\r\n */\r\n ResizeObserverSPI.prototype.clearActive = function () {\r\n this.activeObservations_.splice(0);\r\n };\r\n /**\r\n * Tells whether observer has active observations.\r\n *\r\n * @returns {boolean}\r\n */\r\n ResizeObserverSPI.prototype.hasActive = function () {\r\n return this.activeObservations_.length > 0;\r\n };\r\n return ResizeObserverSPI;\r\n}());\n\n// Registry of internal observers. If WeakMap is not available use current shim\r\n// for the Map collection as it has all required methods and because WeakMap\r\n// can't be fully polyfilled anyway.\r\nvar observers = typeof WeakMap !== 'undefined' ? new WeakMap() : new MapShim();\r\n/**\r\n * ResizeObserver API. Encapsulates the ResizeObserver SPI implementation\r\n * exposing only those methods and properties that are defined in the spec.\r\n */\r\nvar ResizeObserver = /** @class */ (function () {\r\n /**\r\n * Creates a new instance of ResizeObserver.\r\n *\r\n * @param {ResizeObserverCallback} callback - Callback that is invoked when\r\n * dimensions of the observed elements change.\r\n */\r\n function ResizeObserver(callback) {\r\n if (!(this instanceof ResizeObserver)) {\r\n throw new TypeError('Cannot call a class as a function.');\r\n }\r\n if (!arguments.length) {\r\n throw new TypeError('1 argument required, but only 0 present.');\r\n }\r\n var controller = ResizeObserverController.getInstance();\r\n var observer = new ResizeObserverSPI(callback, controller, this);\r\n observers.set(this, observer);\r\n }\r\n return ResizeObserver;\r\n}());\r\n// Expose public methods of ResizeObserver.\r\n[\r\n 'observe',\r\n 'unobserve',\r\n 'disconnect'\r\n].forEach(function (method) {\r\n ResizeObserver.prototype[method] = function () {\r\n var _a;\r\n return (_a = observers.get(this))[method].apply(_a, arguments);\r\n };\r\n});\n\nvar index = (function () {\r\n // Export existing implementation if available.\r\n if (typeof global$1.ResizeObserver !== 'undefined') {\r\n return global$1.ResizeObserver;\r\n }\r\n return ResizeObserver;\r\n})();\n\nexport default index;\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport ResizeObserver from \"resize-observer-polyfill\"\nimport {\n NEVER,\n Observable,\n Subject,\n defer,\n filter,\n finalize,\n map,\n merge,\n of,\n shareReplay,\n startWith,\n switchMap,\n tap\n} from \"rxjs\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Element offset\n */\nexport interface ElementSize {\n width: number /* Element width */\n height: number /* Element height */\n}\n\n/* ----------------------------------------------------------------------------\n * Data\n * ------------------------------------------------------------------------- */\n\n/**\n * Resize observer entry subject\n */\nconst entry$ = new Subject()\n\n/**\n * Resize observer observable\n *\n * This observable will create a `ResizeObserver` on the first subscription\n * and will automatically terminate it when there are no more subscribers.\n * It's quite important to centralize observation in a single `ResizeObserver`,\n * as the performance difference can be quite dramatic, as the link shows.\n *\n * @see https://bit.ly/3iIYfEm - Google Groups on performance\n */\nconst observer$ = defer(() => of(\n new ResizeObserver(entries => {\n for (const entry of entries)\n entry$.next(entry)\n })\n))\n .pipe(\n switchMap(observer => merge(NEVER, of(observer))\n .pipe(\n finalize(() => observer.disconnect())\n )\n ),\n shareReplay(1)\n )\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve element size\n *\n * @param el - Element\n *\n * @returns Element size\n */\nexport function getElementSize(\n el: HTMLElement\n): ElementSize {\n return {\n width: el.offsetWidth,\n height: el.offsetHeight\n }\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Watch element size\n *\n * This function returns an observable that subscribes to a single internal\n * instance of `ResizeObserver` upon subscription, and emit resize events until\n * termination. Note that this function should not be called with the same\n * element twice, as the first unsubscription will terminate observation.\n *\n * Sadly, we can't use the `DOMRect` objects returned by the observer, because\n * we need the emitted values to be consistent with `getElementSize`, which will\n * return the used values (rounded) and not actual values (unrounded). Thus, we\n * use the `offset*` properties. See the linked GitHub issue.\n *\n * @see https://bit.ly/3m0k3he - GitHub issue\n *\n * @param el - Element\n *\n * @returns Element size observable\n */\nexport function watchElementSize(\n el: HTMLElement\n): Observable {\n return observer$\n .pipe(\n tap(observer => observer.observe(el)),\n switchMap(observer => entry$\n .pipe(\n filter(({ target }) => target === el),\n finalize(() => observer.unobserve(el)),\n map(() => getElementSize(el))\n )\n ),\n startWith(getElementSize(el))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { ElementSize } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve element content size (= scroll width and height)\n *\n * @param el - Element\n *\n * @returns Element content size\n */\nexport function getElementContentSize(\n el: HTMLElement\n): ElementSize {\n return {\n width: el.scrollWidth,\n height: el.scrollHeight\n }\n}\n\n/**\n * Retrieve the overflowing container of an element, if any\n *\n * @param el - Element\n *\n * @returns Overflowing container or nothing\n */\nexport function getElementContainer(\n el: HTMLElement\n): HTMLElement | undefined {\n let parent = el.parentElement\n while (parent)\n if (\n el.scrollWidth <= parent.scrollWidth &&\n el.scrollHeight <= parent.scrollHeight\n )\n parent = (el = parent).parentElement\n else\n break\n\n /* Return overflowing container */\n return parent ? el : undefined\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n NEVER,\n Observable,\n Subject,\n defer,\n distinctUntilChanged,\n filter,\n finalize,\n map,\n merge,\n of,\n shareReplay,\n switchMap,\n tap\n} from \"rxjs\"\n\nimport {\n getElementContentSize,\n getElementSize,\n watchElementContentOffset\n} from \"~/browser\"\n\n/* ----------------------------------------------------------------------------\n * Data\n * ------------------------------------------------------------------------- */\n\n/**\n * Intersection observer entry subject\n */\nconst entry$ = new Subject()\n\n/**\n * Intersection observer observable\n *\n * This observable will create an `IntersectionObserver` on first subscription\n * and will automatically terminate it when there are no more subscribers.\n *\n * @see https://bit.ly/3iIYfEm - Google Groups on performance\n */\nconst observer$ = defer(() => of(\n new IntersectionObserver(entries => {\n for (const entry of entries)\n entry$.next(entry)\n }, {\n threshold: 0\n })\n))\n .pipe(\n switchMap(observer => merge(NEVER, of(observer))\n .pipe(\n finalize(() => observer.disconnect())\n )\n ),\n shareReplay(1)\n )\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch element visibility\n *\n * @param el - Element\n *\n * @returns Element visibility observable\n */\nexport function watchElementVisibility(\n el: HTMLElement\n): Observable {\n return observer$\n .pipe(\n tap(observer => observer.observe(el)),\n switchMap(observer => entry$\n .pipe(\n filter(({ target }) => target === el),\n finalize(() => observer.unobserve(el)),\n map(({ isIntersecting }) => isIntersecting)\n )\n )\n )\n}\n\n/**\n * Watch element boundary\n *\n * This function returns an observable which emits whether the bottom content\n * boundary (= scroll offset) of an element is within a certain threshold.\n *\n * @param el - Element\n * @param threshold - Threshold\n *\n * @returns Element boundary observable\n */\nexport function watchElementBoundary(\n el: HTMLElement, threshold = 16\n): Observable {\n return watchElementContentOffset(el)\n .pipe(\n map(({ y }) => {\n const visible = getElementSize(el)\n const content = getElementContentSize(el)\n return y >= (\n content.height - visible.height - threshold\n )\n }),\n distinctUntilChanged()\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n fromEvent,\n map,\n startWith\n} from \"rxjs\"\n\nimport { getElement } from \"../element\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Toggle\n */\nexport type Toggle =\n | \"drawer\" /* Toggle for drawer */\n | \"search\" /* Toggle for search */\n\n/* ----------------------------------------------------------------------------\n * Data\n * ------------------------------------------------------------------------- */\n\n/**\n * Toggle map\n */\nconst toggles: Record = {\n drawer: getElement(\"[data-md-toggle=drawer]\"),\n search: getElement(\"[data-md-toggle=search]\")\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve the value of a toggle\n *\n * @param name - Toggle\n *\n * @returns Toggle value\n */\nexport function getToggle(name: Toggle): boolean {\n return toggles[name].checked\n}\n\n/**\n * Set toggle\n *\n * Simulating a click event seems to be the most cross-browser compatible way\n * of changing the value while also emitting a `change` event. Before, Material\n * used `CustomEvent` to programmatically change the value of a toggle, but this\n * is a much simpler and cleaner solution which doesn't require a polyfill.\n *\n * @param name - Toggle\n * @param value - Toggle value\n */\nexport function setToggle(name: Toggle, value: boolean): void {\n if (toggles[name].checked !== value)\n toggles[name].click()\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Watch toggle\n *\n * @param name - Toggle\n *\n * @returns Toggle value observable\n */\nexport function watchToggle(name: Toggle): Observable {\n const el = toggles[name]\n return fromEvent(el, \"change\")\n .pipe(\n map(() => el.checked),\n startWith(el.checked)\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n filter,\n fromEvent,\n map,\n share\n} from \"rxjs\"\n\nimport { getActiveElement } from \"../element\"\nimport { getToggle } from \"../toggle\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Keyboard mode\n */\nexport type KeyboardMode =\n | \"global\" /* Global */\n | \"search\" /* Search is open */\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Keyboard\n */\nexport interface Keyboard {\n mode: KeyboardMode /* Keyboard mode */\n type: string /* Key type */\n claim(): void /* Key claim */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Check whether an element may receive keyboard input\n *\n * @param el - Element\n * @param type - Key type\n *\n * @returns Test result\n */\nfunction isSusceptibleToKeyboard(\n el: HTMLElement, type: string\n): boolean {\n switch (el.constructor) {\n\n /* Input elements */\n case HTMLInputElement:\n /* @ts-expect-error - omit unnecessary type cast */\n if (el.type === \"radio\")\n return /^Arrow/.test(type)\n else\n return true\n\n /* Select element and textarea */\n case HTMLSelectElement:\n case HTMLTextAreaElement:\n return true\n\n /* Everything else */\n default:\n return el.isContentEditable\n }\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch keyboard\n *\n * @returns Keyboard observable\n */\nexport function watchKeyboard(): Observable {\n return fromEvent(window, \"keydown\")\n .pipe(\n filter(ev => !(ev.metaKey || ev.ctrlKey)),\n map(ev => ({\n mode: getToggle(\"search\") ? \"search\" : \"global\",\n type: ev.key,\n claim() {\n ev.preventDefault()\n ev.stopPropagation()\n }\n } as Keyboard)),\n filter(({ mode, type }) => {\n if (mode === \"global\") {\n const active = getActiveElement()\n if (typeof active !== \"undefined\")\n return !isSusceptibleToKeyboard(active, type)\n }\n return true\n }),\n share()\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { Subject } from \"rxjs\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve location\n *\n * This function returns a `URL` object (and not `Location`) to normalize the\n * typings across the application. Furthermore, locations need to be tracked\n * without setting them and `Location` is a singleton which represents the\n * current location.\n *\n * @returns URL\n */\nexport function getLocation(): URL {\n return new URL(location.href)\n}\n\n/**\n * Set location\n *\n * @param url - URL to change to\n */\nexport function setLocation(url: URL): void {\n location.href = url.href\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Watch location\n *\n * @returns Location subject\n */\nexport function watchLocation(): Subject {\n return new Subject()\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { JSX as JSXInternal } from \"preact\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * HTML attributes\n */\ntype Attributes =\n & JSXInternal.HTMLAttributes\n & JSXInternal.SVGAttributes\n & Record\n\n/**\n * Child element\n */\ntype Child =\n | HTMLElement\n | Text\n | string\n | number\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Append a child node to an element\n *\n * @param el - Element\n * @param child - Child node(s)\n */\nfunction appendChild(el: HTMLElement, child: Child | Child[]): void {\n\n /* Handle primitive types (including raw HTML) */\n if (typeof child === \"string\" || typeof child === \"number\") {\n el.innerHTML += child.toString()\n\n /* Handle nodes */\n } else if (child instanceof Node) {\n el.appendChild(child)\n\n /* Handle nested children */\n } else if (Array.isArray(child)) {\n for (const node of child)\n appendChild(el, node)\n }\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * JSX factory\n *\n * @template T - Element type\n *\n * @param tag - HTML tag\n * @param attributes - HTML attributes\n * @param children - Child elements\n *\n * @returns Element\n */\nexport function h(\n tag: T, attributes?: Attributes | null, ...children: Child[]\n): HTMLElementTagNameMap[T]\n\nexport function h(\n tag: string, attributes?: Attributes | null, ...children: Child[]\n): T\n\nexport function h(\n tag: string, attributes?: Attributes | null, ...children: Child[]\n): T {\n const el = document.createElement(tag)\n\n /* Set attributes, if any */\n if (attributes)\n for (const attr of Object.keys(attributes)) {\n if (typeof attributes[attr] === \"undefined\")\n continue\n\n /* Set default attribute or boolean */\n if (typeof attributes[attr] !== \"boolean\")\n el.setAttribute(attr, attributes[attr])\n else\n el.setAttribute(attr, \"\")\n }\n\n /* Append child nodes */\n for (const child of children)\n appendChild(el, child)\n\n /* Return element */\n return el as T\n}\n\n/* ----------------------------------------------------------------------------\n * Namespace\n * ------------------------------------------------------------------------- */\n\nexport declare namespace h {\n namespace JSX {\n type Element = HTMLElement\n type IntrinsicElements = JSXInternal.IntrinsicElements\n }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Truncate a string after the given number of characters\n *\n * This is not a very reasonable approach, since the summaries kind of suck.\n * It would be better to create something more intelligent, highlighting the\n * search occurrences and making a better summary out of it, but this note was\n * written three years ago, so who knows if we'll ever fix it.\n *\n * @param value - Value to be truncated\n * @param n - Number of characters\n *\n * @returns Truncated value\n */\nexport function truncate(value: string, n: number): string {\n let i = n\n if (value.length > i) {\n while (value[i] !== \" \" && --i > 0) { /* keep eating */ }\n return `${value.substring(0, i)}...`\n }\n return value\n}\n\n/**\n * Round a number for display with repository facts\n *\n * This is a reverse-engineered version of GitHub's weird rounding algorithm\n * for stars, forks and all other numbers. While all numbers below `1,000` are\n * returned as-is, bigger numbers are converted to fixed numbers:\n *\n * - `1,049` => `1k`\n * - `1,050` => `1.1k`\n * - `1,949` => `1.9k`\n * - `1,950` => `2k`\n *\n * @param value - Original value\n *\n * @returns Rounded value\n */\nexport function round(value: number): string {\n if (value > 999) {\n const digits = +((value - 950) % 1000 > 99)\n return `${((value + 0.000001) / 1000).toFixed(digits)}k`\n } else {\n return value.toString()\n }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n filter,\n fromEvent,\n map,\n shareReplay,\n startWith\n} from \"rxjs\"\n\nimport { getOptionalElement } from \"~/browser\"\nimport { h } from \"~/utilities\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve location hash\n *\n * @returns Location hash\n */\nexport function getLocationHash(): string {\n return location.hash.substring(1)\n}\n\n/**\n * Set location hash\n *\n * Setting a new fragment identifier via `location.hash` will have no effect\n * if the value doesn't change. When a new fragment identifier is set, we want\n * the browser to target the respective element at all times, which is why we\n * use this dirty little trick.\n *\n * @param hash - Location hash\n */\nexport function setLocationHash(hash: string): void {\n const el = h(\"a\", { href: hash })\n el.addEventListener(\"click\", ev => ev.stopPropagation())\n el.click()\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Watch location hash\n *\n * @returns Location hash observable\n */\nexport function watchLocationHash(): Observable {\n return fromEvent(window, \"hashchange\")\n .pipe(\n map(getLocationHash),\n startWith(getLocationHash()),\n filter(hash => hash.length > 0),\n shareReplay(1)\n )\n}\n\n/**\n * Watch location target\n *\n * @returns Location target observable\n */\nexport function watchLocationTarget(): Observable {\n return watchLocationHash()\n .pipe(\n map(id => getOptionalElement(`[id=\"${id}\"]`)!),\n filter(el => typeof el !== \"undefined\")\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n EMPTY,\n Observable,\n fromEvent,\n fromEventPattern,\n map,\n merge,\n startWith,\n switchMap\n} from \"rxjs\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch media query\n *\n * Note that although `MediaQueryList.addListener` is deprecated we have to\n * use it, because it's the only way to ensure proper downward compatibility.\n *\n * @see https://bit.ly/3dUBH2m - GitHub issue\n *\n * @param query - Media query\n *\n * @returns Media observable\n */\nexport function watchMedia(query: string): Observable {\n const media = matchMedia(query)\n return fromEventPattern(next => (\n media.addListener(() => next(media.matches))\n ))\n .pipe(\n startWith(media.matches)\n )\n}\n\n/**\n * Watch print mode\n *\n * @returns Print observable\n */\nexport function watchPrint(): Observable {\n const media = matchMedia(\"print\")\n return merge(\n fromEvent(window, \"beforeprint\").pipe(map(() => true)),\n fromEvent(window, \"afterprint\").pipe(map(() => false))\n )\n .pipe(\n startWith(media.matches)\n )\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Toggle an observable with a media observable\n *\n * @template T - Data type\n *\n * @param query$ - Media observable\n * @param factory - Observable factory\n *\n * @returns Toggled observable\n */\nexport function at(\n query$: Observable, factory: () => Observable\n): Observable {\n return query$\n .pipe(\n switchMap(active => active ? factory() : EMPTY)\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n EMPTY,\n Observable,\n catchError,\n from,\n map,\n of,\n shareReplay,\n switchMap,\n throwError\n} from \"rxjs\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch the given URL\n *\n * If the request fails (e.g. when dispatched from `file://` locations), the\n * observable will complete without emitting a value.\n *\n * @param url - Request URL\n * @param options - Options\n *\n * @returns Response observable\n */\nexport function request(\n url: URL | string, options: RequestInit = { credentials: \"same-origin\" }\n): Observable {\n return from(fetch(`${url}`, options))\n .pipe(\n catchError(() => EMPTY),\n switchMap(res => res.status !== 200\n ? throwError(() => new Error(res.statusText))\n : of(res)\n )\n )\n}\n\n/**\n * Fetch JSON from the given URL\n *\n * @template T - Data type\n *\n * @param url - Request URL\n * @param options - Options\n *\n * @returns Data observable\n */\nexport function requestJSON(\n url: URL | string, options?: RequestInit\n): Observable {\n return request(url, options)\n .pipe(\n switchMap(res => res.json()),\n shareReplay(1)\n )\n}\n\n/**\n * Fetch XML from the given URL\n *\n * @param url - Request URL\n * @param options - Options\n *\n * @returns Data observable\n */\nexport function requestXML(\n url: URL | string, options?: RequestInit\n): Observable {\n const dom = new DOMParser()\n return request(url, options)\n .pipe(\n switchMap(res => res.text()),\n map(res => dom.parseFromString(res, \"text/xml\")),\n shareReplay(1)\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n defer,\n finalize,\n fromEvent,\n map,\n merge,\n switchMap,\n take,\n throwError\n} from \"rxjs\"\n\nimport { h } from \"~/utilities\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Create and load a `script` element\n *\n * This function returns an observable that will emit when the script was\n * successfully loaded, or throw an error if it didn't.\n *\n * @param src - Script URL\n *\n * @returns Script observable\n */\nexport function watchScript(src: string): Observable {\n const script = h(\"script\", { src })\n return defer(() => {\n document.head.appendChild(script)\n return merge(\n fromEvent(script, \"load\"),\n fromEvent(script, \"error\")\n .pipe(\n switchMap(() => (\n throwError(() => new ReferenceError(`Invalid script: ${src}`))\n ))\n )\n )\n .pipe(\n map(() => undefined),\n finalize(() => document.head.removeChild(script)),\n take(1)\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n fromEvent,\n map,\n merge,\n startWith\n} from \"rxjs\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Viewport offset\n */\nexport interface ViewportOffset {\n x: number /* Horizontal offset */\n y: number /* Vertical offset */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve viewport offset\n *\n * On iOS Safari, viewport offset can be negative due to overflow scrolling.\n * As this may induce strange behaviors downstream, we'll just limit it to 0.\n *\n * @returns Viewport offset\n */\nexport function getViewportOffset(): ViewportOffset {\n return {\n x: Math.max(0, scrollX),\n y: Math.max(0, scrollY)\n }\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Watch viewport offset\n *\n * @returns Viewport offset observable\n */\nexport function watchViewportOffset(): Observable {\n return merge(\n fromEvent(window, \"scroll\", { passive: true }),\n fromEvent(window, \"resize\", { passive: true })\n )\n .pipe(\n map(getViewportOffset),\n startWith(getViewportOffset())\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n fromEvent,\n map,\n startWith\n} from \"rxjs\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Viewport size\n */\nexport interface ViewportSize {\n width: number /* Viewport width */\n height: number /* Viewport height */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve viewport size\n *\n * @returns Viewport size\n */\nexport function getViewportSize(): ViewportSize {\n return {\n width: innerWidth,\n height: innerHeight\n }\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Watch viewport size\n *\n * @returns Viewport size observable\n */\nexport function watchViewportSize(): Observable {\n return fromEvent(window, \"resize\", { passive: true })\n .pipe(\n map(getViewportSize),\n startWith(getViewportSize())\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n combineLatest,\n map,\n shareReplay\n} from \"rxjs\"\n\nimport {\n ViewportOffset,\n watchViewportOffset\n} from \"../offset\"\nimport {\n ViewportSize,\n watchViewportSize\n} from \"../size\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Viewport\n */\nexport interface Viewport {\n offset: ViewportOffset /* Viewport offset */\n size: ViewportSize /* Viewport size */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch viewport\n *\n * @returns Viewport observable\n */\nexport function watchViewport(): Observable {\n return combineLatest([\n watchViewportOffset(),\n watchViewportSize()\n ])\n .pipe(\n map(([offset, size]) => ({ offset, size })),\n shareReplay(1)\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n combineLatest,\n distinctUntilKeyChanged,\n map\n} from \"rxjs\"\n\nimport { Header } from \"~/components\"\n\nimport { getElementOffset } from \"../../element\"\nimport { Viewport } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
/* Header observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch viewport relative to element\n *\n * @param el - Element\n * @param options - Options\n *\n * @returns Viewport observable\n */\nexport function watchViewportAt(\n el: HTMLElement, { viewport$, header$ }: WatchOptions\n): Observable {\n const size$ = viewport$\n .pipe(\n distinctUntilKeyChanged(\"size\")\n )\n\n /* Compute element offset */\n const offset$ = combineLatest([size$, header$])\n .pipe(\n map(() => getElementOffset(el))\n )\n\n /* Compute relative viewport, return hot observable */\n return combineLatest([header$, viewport$, offset$])\n .pipe(\n map(([{ height }, { offset, size }, { x, y }]) => ({\n offset: {\n x: offset.x - x,\n y: offset.y - y + height\n },\n size\n }))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n fromEvent,\n map,\n share,\n switchMap,\n tap,\n throttle\n} from \"rxjs\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Worker message\n */\nexport interface WorkerMessage {\n type: unknown /* Message type */\n data?: unknown /* Message data */\n}\n\n/**\n * Worker handler\n *\n * @template T - Message type\n */\nexport interface WorkerHandler<\n T extends WorkerMessage\n> {\n tx$: Subject /* Message transmission subject */\n rx$: Observable /* Message receive observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n *\n * @template T - Worker message type\n */\ninterface WatchOptions {\n tx$: Observable /* Message transmission observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch a web worker\n *\n * This function returns an observable that sends all values emitted by the\n * message observable to the web worker. Web worker communication is expected\n * to be bidirectional (request-response) and synchronous. Messages that are\n * emitted during a pending request are throttled, the last one is emitted.\n *\n * @param worker - Web worker\n * @param options - Options\n *\n * @returns Worker message observable\n */\nexport function watchWorker(\n worker: Worker, { tx$ }: WatchOptions\n): Observable {\n\n /* Intercept messages from worker-like objects */\n const rx$ = fromEvent(worker, \"message\")\n .pipe(\n map(({ data }) => data as T)\n )\n\n /* Send and receive messages, return hot observable */\n return tx$\n .pipe(\n throttle(() => rx$, { leading: true, trailing: true }),\n tap(message => worker.postMessage(message)),\n switchMap(() => rx$),\n share()\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { getElement, getLocation } from \"~/browser\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Feature flag\n */\nexport type Flag =\n | \"announce.dismiss\" /* Dismissable announcement bar */\n | \"content.code.annotate\" /* Code annotations */\n | \"content.lazy\" /* Lazy content elements */\n | \"content.tabs.link\" /* Link content tabs */\n | \"header.autohide\" /* Hide header */\n | \"navigation.expand\" /* Automatic expansion */\n | \"navigation.indexes\" /* Section pages */\n | \"navigation.instant\" /* Instant loading */\n | \"navigation.sections\" /* Section navigation */\n | \"navigation.tabs\" /* Tabs navigation */\n | \"navigation.tabs.sticky\" /* Tabs navigation (sticky) */\n | \"navigation.top\" /* Back-to-top button */\n | \"navigation.tracking\" /* Anchor tracking */\n | \"search.highlight\" /* Search highlighting */\n | \"search.share\" /* Search sharing */\n | \"search.suggest\" /* Search suggestions */\n | \"toc.follow\" /* Following table of contents */\n | \"toc.integrate\" /* Integrated table of contents */\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Translation\n */\nexport type Translation =\n | \"clipboard.copy\" /* Copy to clipboard */\n | \"clipboard.copied\" /* Copied to clipboard */\n | \"search.config.lang\" /* Search language */\n | \"search.config.pipeline\" /* Search pipeline */\n | \"search.config.separator\" /* Search separator */\n | \"search.placeholder\" /* Search */\n | \"search.result.placeholder\" /* Type to start searching */\n | \"search.result.none\" /* No matching documents */\n | \"search.result.one\" /* 1 matching document */\n | \"search.result.other\" /* # matching documents */\n | \"search.result.more.one\" /* 1 more on this page */\n | \"search.result.more.other\" /* # more on this page */\n | \"search.result.term.missing\" /* Missing */\n | \"select.version.title\" /* Version selector */\n\n/**\n * Translations\n */\nexport type Translations = Record\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Versioning\n */\nexport interface Versioning {\n provider: \"mike\" /* Version provider */\n default?: string /* Default version */\n}\n\n/**\n * Configuration\n */\nexport interface Config {\n base: string /* Base URL */\n features: Flag[] /* Feature flags */\n translations: Translations /* Translations */\n search: string /* Search worker URL */\n tags?: Record /* Tags mapping */\n version?: Versioning /* Versioning */\n}\n\n/* ----------------------------------------------------------------------------\n * Data\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve global configuration and make base URL absolute\n */\nconst script = getElement(\"#__config\")\nconst config: Config = JSON.parse(script.textContent!)\nconfig.base = `${new URL(config.base, getLocation())}`\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve global configuration\n *\n * @returns Global configuration\n */\nexport function configuration(): Config {\n return config\n}\n\n/**\n * Check whether a feature flag is enabled\n *\n * @param flag - Feature flag\n *\n * @returns Test result\n */\nexport function feature(flag: Flag): boolean {\n return config.features.includes(flag)\n}\n\n/**\n * Retrieve the translation for the given key\n *\n * @param key - Key to be translated\n * @param value - Positional value, if any\n *\n * @returns Translation\n */\nexport function translation(\n key: Translation, value?: string | number\n): string {\n return typeof value !== \"undefined\"\n ? config.translations[key].replace(\"#\", value.toString())\n : config.translations[key]\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { getElement, getElements } from \"~/browser\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Component type\n */\nexport type ComponentType =\n | \"announce\" /* Announcement bar */\n | \"container\" /* Container */\n | \"consent\" /* Consent */\n | \"content\" /* Content */\n | \"dialog\" /* Dialog */\n | \"header\" /* Header */\n | \"header-title\" /* Header title */\n | \"header-topic\" /* Header topic */\n | \"main\" /* Main area */\n | \"outdated\" /* Version warning */\n | \"palette\" /* Color palette */\n | \"search\" /* Search */\n | \"search-query\" /* Search input */\n | \"search-result\" /* Search results */\n | \"search-share\" /* Search sharing */\n | \"search-suggest\" /* Search suggestions */\n | \"sidebar\" /* Sidebar */\n | \"skip\" /* Skip link */\n | \"source\" /* Repository information */\n | \"tabs\" /* Navigation tabs */\n | \"toc\" /* Table of contents */\n | \"top\" /* Back-to-top button */\n\n/**\n * Component\n *\n * @template T - Component type\n * @template U - Reference type\n */\nexport type Component<\n T extends {} = {},\n U extends HTMLElement = HTMLElement\n> =\n T & {\n ref: U /* Component reference */\n }\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Component type map\n */\ninterface ComponentTypeMap {\n \"announce\": HTMLElement /* Announcement bar */\n \"container\": HTMLElement /* Container */\n \"consent\": HTMLElement /* Consent */\n \"content\": HTMLElement /* Content */\n \"dialog\": HTMLElement /* Dialog */\n \"header\": HTMLElement /* Header */\n \"header-title\": HTMLElement /* Header title */\n \"header-topic\": HTMLElement /* Header topic */\n \"main\": HTMLElement /* Main area */\n \"outdated\": HTMLElement /* Version warning */\n \"palette\": HTMLElement /* Color palette */\n \"search\": HTMLElement /* Search */\n \"search-query\": HTMLInputElement /* Search input */\n \"search-result\": HTMLElement /* Search results */\n \"search-share\": HTMLAnchorElement /* Search sharing */\n \"search-suggest\": HTMLElement /* Search suggestions */\n \"sidebar\": HTMLElement /* Sidebar */\n \"skip\": HTMLAnchorElement /* Skip link */\n \"source\": HTMLAnchorElement /* Repository information */\n \"tabs\": HTMLElement /* Navigation tabs */\n \"toc\": HTMLElement /* Table of contents */\n \"top\": HTMLAnchorElement /* Back-to-top button */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Retrieve the element for a given component or throw a reference error\n *\n * @template T - Component type\n *\n * @param type - Component type\n * @param node - Node of reference\n *\n * @returns Element\n */\nexport function getComponentElement(\n type: T, node: ParentNode = document\n): ComponentTypeMap[T] {\n return getElement(`[data-md-component=${type}]`, node)\n}\n\n/**\n * Retrieve all elements for a given component\n *\n * @template T - Component type\n *\n * @param type - Component type\n * @param node - Node of reference\n *\n * @returns Elements\n */\nexport function getComponentElements(\n type: T, node: ParentNode = document\n): ComponentTypeMap[T][] {\n return getElements(`[data-md-component=${type}]`, node)\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n EMPTY,\n Observable,\n Subject,\n defer,\n finalize,\n fromEvent,\n map,\n startWith,\n tap\n} from \"rxjs\"\n\nimport { feature } from \"~/_\"\nimport { getElement } from \"~/browser\"\n\nimport { Component } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Announcement bar\n */\nexport interface Announce {\n hash: number /* Content hash */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch announcement bar\n *\n * @param el - Announcement bar element\n *\n * @returns Announcement bar observable\n */\nexport function watchAnnounce(\n el: HTMLElement\n): Observable {\n const button = getElement(\".md-typeset > :first-child\", el)\n return fromEvent(button, \"click\", { once: true })\n .pipe(\n map(() => getElement(\".md-typeset\", el)),\n map(content => ({ hash: __md_hash(content.innerHTML) }))\n )\n}\n\n/**\n * Mount announcement bar\n *\n * @param el - Announcement bar element\n *\n * @returns Announcement bar component observable\n */\nexport function mountAnnounce(\n el: HTMLElement\n): Observable> {\n if (!feature(\"announce.dismiss\") || !el.childElementCount)\n return EMPTY\n\n /* Mount component on subscription */\n return defer(() => {\n const push$ = new Subject()\n push$\n .pipe(\n startWith({ hash: __md_get(\"__announce\") })\n )\n .subscribe(({ hash }) => {\n if (hash && hash === (__md_get(\"__announce\") ?? hash)) {\n el.hidden = true\n\n /* Persist preference in local storage */\n __md_set(\"__announce\", hash)\n }\n })\n\n /* Create and return component */\n return watchAnnounce(el)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n finalize,\n map,\n tap\n} from \"rxjs\"\n\nimport { Component } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Consent\n */\nexport interface Consent {\n hidden: boolean /* Consent is hidden */\n}\n\n/**\n * Consent defaults\n */\nexport interface ConsentDefaults {\n analytics?: boolean /* Consent for Analytics */\n github?: boolean /* Consent for GitHub */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n target$: Observable /* Target observable */\n}\n\n/**\n * Mount options\n */\ninterface MountOptions {\n target$: Observable /* Target observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch consent\n *\n * @param el - Consent element\n * @param options - Options\n *\n * @returns Consent observable\n */\nexport function watchConsent(\n el: HTMLElement, { target$ }: WatchOptions\n): Observable {\n return target$\n .pipe(\n map(target => ({ hidden: target !== el }))\n )\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Mount consent\n *\n * @param el - Consent element\n * @param options - Options\n *\n * @returns Consent component observable\n */\nexport function mountConsent(\n el: HTMLElement, options: MountOptions\n): Observable> {\n const internal$ = new Subject()\n internal$.subscribe(({ hidden }) => {\n el.hidden = hidden\n })\n\n /* Create and return component */\n return watchConsent(el, options)\n .pipe(\n tap(state => internal$.next(state)),\n finalize(() => internal$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport ClipboardJS from \"clipboard\"\nimport {\n EMPTY,\n Observable,\n Subject,\n defer,\n distinctUntilChanged,\n distinctUntilKeyChanged,\n filter,\n finalize,\n map,\n mergeWith,\n switchMap,\n take,\n tap\n} from \"rxjs\"\n\nimport { feature } from \"~/_\"\nimport {\n getElementContentSize,\n watchElementSize,\n watchElementVisibility\n} from \"~/browser\"\nimport { renderClipboardButton } from \"~/templates\"\n\nimport { Component } from \"../../../_\"\nimport {\n Annotation,\n mountAnnotationList\n} from \"../../annotation\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Code block\n */\nexport interface CodeBlock {\n scrollable: boolean /* Code block overflows */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount options\n */\ninterface MountOptions {\n target$: Observable /* Location target observable */\n print$: Observable /* Media print observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Data\n * ------------------------------------------------------------------------- */\n\n/**\n * Global sequence number for code blocks\n */\nlet sequence = 0\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Find candidate list element directly following a code block\n *\n * @param el - Code block element\n *\n * @returns List element or nothing\n */\nfunction findCandidateList(el: HTMLElement): HTMLElement | undefined {\n if (el.nextElementSibling) {\n const sibling = el.nextElementSibling as HTMLElement\n if (sibling.tagName === \"OL\")\n return sibling\n\n /* Skip empty paragraphs - see https://bit.ly/3r4ZJ2O */\n else if (sibling.tagName === \"P\" && !sibling.children.length)\n return findCandidateList(sibling)\n }\n\n /* Everything else */\n return undefined\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch code block\n *\n * This function monitors size changes of the viewport, as well as switches of\n * content tabs with embedded code blocks, as both may trigger overflow.\n *\n * @param el - Code block element\n *\n * @returns Code block observable\n */\nexport function watchCodeBlock(\n el: HTMLElement\n): Observable {\n return watchElementSize(el)\n .pipe(\n map(({ width }) => {\n const content = getElementContentSize(el)\n return {\n scrollable: content.width > width\n }\n }),\n distinctUntilKeyChanged(\"scrollable\")\n )\n}\n\n/**\n * Mount code block\n *\n * This function ensures that an overflowing code block is focusable through\n * keyboard, so it can be scrolled without a mouse to improve on accessibility.\n * Furthermore, if code annotations are enabled, they are mounted if and only\n * if the code block is currently visible, e.g., not in a hidden content tab.\n *\n * Note that code blocks may be mounted eagerly or lazily. If they're mounted\n * lazily (on first visibility), code annotation anchor links will not work,\n * as they are evaluated on initial page load, and code annotations in general\n * might feel a little bumpier.\n *\n * @param el - Code block element\n * @param options - Options\n *\n * @returns Code block and annotation component observable\n */\nexport function mountCodeBlock(\n el: HTMLElement, options: MountOptions\n): Observable> {\n const { matches: hover } = matchMedia(\"(hover)\")\n\n /* Defer mounting of code block - see https://bit.ly/3vHVoVD */\n const factory$ = defer(() => {\n const push$ = new Subject()\n push$.subscribe(({ scrollable }) => {\n if (scrollable && hover)\n el.setAttribute(\"tabindex\", \"0\")\n else\n el.removeAttribute(\"tabindex\")\n })\n\n /* Render button for Clipboard.js integration */\n if (ClipboardJS.isSupported()) {\n const parent = el.closest(\"pre\")!\n parent.id = `__code_${++sequence}`\n parent.insertBefore(\n renderClipboardButton(parent.id),\n el\n )\n }\n\n /* Handle code annotations */\n const container = el.closest(\".highlight\")\n if (container instanceof HTMLElement) {\n const list = findCandidateList(container)\n\n /* Mount code annotations, if enabled */\n if (typeof list !== \"undefined\" && (\n container.classList.contains(\"annotate\") ||\n feature(\"content.code.annotate\")\n )) {\n const annotations$ = mountAnnotationList(list, el, options)\n\n /* Create and return component */\n return watchCodeBlock(el)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state })),\n mergeWith(\n watchElementSize(container)\n .pipe(\n map(({ width, height }) => width && height),\n distinctUntilChanged(),\n switchMap(active => active ? annotations$ : EMPTY)\n )\n )\n )\n }\n }\n\n /* Create and return component */\n return watchCodeBlock(el)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n\n /* Mount code block lazily */\n if (feature(\"content.lazy\"))\n return watchElementVisibility(el)\n .pipe(\n filter(visible => visible),\n take(1),\n switchMap(() => factory$)\n )\n\n /* Mount code block */\n return factory$\n}\n", "/*\n * Copyright (c) 2016-2021 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { h } from \"~/utilities\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Render a tooltip\n *\n * @param id - Tooltip identifier\n *\n * @returns Element\n */\nexport function renderTooltip(id?: string): HTMLElement {\n return (\n
\n
\n
\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { h } from \"~/utilities\"\n\nimport { renderTooltip } from \"../tooltip\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Render an annotation\n *\n * @param id - Annotation identifier\n * @param prefix - Tooltip identifier prefix\n *\n * @returns Element\n */\nexport function renderAnnotation(\n id: string | number, prefix?: string\n): HTMLElement {\n prefix = prefix ? `${prefix}_annotation_${id}` : undefined\n\n /* Render tooltip with anchor, if given */\n if (prefix) {\n const anchor = prefix ? `#${prefix}` : undefined\n return (\n \n )\n } else {\n return (\n \n )\n }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { translation } from \"~/_\"\nimport { h } from \"~/utilities\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Render a 'copy-to-clipboard' button\n *\n * @param id - Unique identifier\n *\n * @returns Element\n */\nexport function renderClipboardButton(id: string): HTMLElement {\n return (\n code`}\n >\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { ComponentChild } from \"preact\"\n\nimport { configuration, feature, translation } from \"~/_\"\nimport {\n SearchDocument,\n SearchMetadata,\n SearchResultItem\n} from \"~/integrations/search\"\nimport { h, truncate } from \"~/utilities\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Render flag\n */\nconst enum Flag {\n TEASER = 1, /* Render teaser */\n PARENT = 2 /* Render as parent */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper function\n * ------------------------------------------------------------------------- */\n\n/**\n * Render a search document\n *\n * @param document - Search document\n * @param flag - Render flags\n *\n * @returns Element\n */\nfunction renderSearchDocument(\n document: SearchDocument & SearchMetadata, flag: Flag\n): HTMLElement {\n const parent = flag & Flag.PARENT\n const teaser = flag & Flag.TEASER\n\n /* Render missing query terms */\n const missing = Object.keys(document.terms)\n .filter(key => !document.terms[key])\n .reduce((list, key) => [\n ...list, {key}, \" \"\n ], [])\n .slice(0, -1)\n\n /* Assemble query string for highlighting */\n const url = new URL(document.location)\n if (feature(\"search.highlight\"))\n url.searchParams.set(\"h\", Object.entries(document.terms)\n .filter(([, match]) => match)\n .reduce((highlight, [value]) => `${highlight} ${value}`.trim(), \"\")\n )\n\n /* Render article or section, depending on flags */\n const { tags } = configuration()\n return (\n \n \n {parent > 0 &&
}\n

{document.title}

\n {teaser > 0 && document.text.length > 0 &&\n

\n {truncate(document.text, 320)}\n

\n }\n {document.tags && (\n
\n {document.tags.map(tag => {\n const id = tag.replace(/<[^>]+>/g, \"\")\n const type = tags\n ? id in tags\n ? `md-tag-icon md-tag-icon--${tags[id]}`\n : \"md-tag-icon\"\n : \"\"\n return (\n {tag}\n )\n })}\n
\n )}\n {teaser > 0 && missing.length > 0 &&\n

\n {translation(\"search.result.term.missing\")}: {...missing}\n

\n }\n \n
\n )\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Render a search result\n *\n * @param result - Search result\n *\n * @returns Element\n */\nexport function renderSearchResultItem(\n result: SearchResultItem\n): HTMLElement {\n const threshold = result[0].score\n const docs = [...result]\n\n /* Find and extract parent article */\n const parent = docs.findIndex(doc => !doc.location.includes(\"#\"))\n const [article] = docs.splice(parent, 1)\n\n /* Determine last index above threshold */\n let index = docs.findIndex(doc => doc.score < threshold)\n if (index === -1)\n index = docs.length\n\n /* Partition sections */\n const best = docs.slice(0, index)\n const more = docs.slice(index)\n\n /* Render children */\n const children = [\n renderSearchDocument(article, Flag.PARENT | +(!parent && index === 0)),\n ...best.map(section => renderSearchDocument(section, Flag.TEASER)),\n ...more.length ? [\n
\n \n {more.length > 0 && more.length === 1\n ? translation(\"search.result.more.one\")\n : translation(\"search.result.more.other\", more.length)\n }\n \n {...more.map(section => renderSearchDocument(section, Flag.TEASER))}\n
\n ] : []\n ]\n\n /* Render search result */\n return (\n
  • \n {children}\n
  • \n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { SourceFacts } from \"~/components\"\nimport { h, round } from \"~/utilities\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Render repository facts\n *\n * @param facts - Repository facts\n *\n * @returns Element\n */\nexport function renderSourceFacts(facts: SourceFacts): HTMLElement {\n return (\n
      \n {Object.entries(facts).map(([key, value]) => (\n
    • \n {typeof value === \"number\" ? round(value) : value}\n
    • \n ))}\n
    \n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { h } from \"~/utilities\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Tabbed control type\n */\ntype TabbedControlType =\n | \"prev\"\n | \"next\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Render control for content tabs\n *\n * @param type - Control type\n *\n * @returns Element\n */\nexport function renderTabbedControl(\n type: TabbedControlType\n): HTMLElement {\n const classes = `tabbed-control tabbed-control--${type}`\n return (\n \n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { h } from \"~/utilities\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Render a table inside a wrapper to improve scrolling on mobile\n *\n * @param table - Table element\n *\n * @returns Element\n */\nexport function renderTable(table: HTMLElement): HTMLElement {\n return (\n
    \n
    \n {table}\n
    \n
    \n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { configuration, translation } from \"~/_\"\nimport { h } from \"~/utilities\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Version\n */\nexport interface Version {\n version: string /* Version identifier */\n title: string /* Version title */\n aliases: string[] /* Version aliases */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Render a version\n *\n * @param version - Version\n *\n * @returns Element\n */\nfunction renderVersion(version: Version): HTMLElement {\n const config = configuration()\n\n /* Ensure trailing slash - see https://bit.ly/3rL5u3f */\n const url = new URL(`../${version.version}/`, config.base)\n return (\n
  • \n \n {version.title}\n \n
  • \n )\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Render a version selector\n *\n * @param versions - Versions\n * @param active - Active version\n *\n * @returns Element\n */\nexport function renderVersionSelector(\n versions: Version[], active: Version\n): HTMLElement {\n return (\n
    \n \n {active.title}\n \n
      \n {versions.map(renderVersion)}\n
    \n
    \n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n animationFrameScheduler,\n auditTime,\n combineLatest,\n debounceTime,\n defer,\n delay,\n filter,\n finalize,\n fromEvent,\n map,\n merge,\n switchMap,\n take,\n takeLast,\n takeUntil,\n tap,\n throttleTime,\n withLatestFrom\n} from \"rxjs\"\n\nimport {\n ElementOffset,\n getActiveElement,\n getElementSize,\n watchElementContentOffset,\n watchElementFocus,\n watchElementOffset,\n watchElementVisibility\n} from \"~/browser\"\n\nimport { Component } from \"../../../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Annotation\n */\nexport interface Annotation {\n active: boolean /* Annotation is active */\n offset: ElementOffset /* Annotation offset */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount options\n */\ninterface MountOptions {\n target$: Observable /* Location target observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch annotation\n *\n * @param el - Annotation element\n * @param container - Containing element\n *\n * @returns Annotation observable\n */\nexport function watchAnnotation(\n el: HTMLElement, container: HTMLElement\n): Observable {\n const offset$ = defer(() => combineLatest([\n watchElementOffset(el),\n watchElementContentOffset(container)\n ]))\n .pipe(\n map(([{ x, y }, scroll]): ElementOffset => {\n const { width, height } = getElementSize(el)\n return ({\n x: x - scroll.x + width / 2,\n y: y - scroll.y + height / 2\n })\n })\n )\n\n /* Actively watch annotation on focus */\n return watchElementFocus(el)\n .pipe(\n switchMap(active => offset$\n .pipe(\n map(offset => ({ active, offset })),\n take(+!active || Infinity)\n )\n )\n )\n}\n\n/**\n * Mount annotation\n *\n * @param el - Annotation element\n * @param container - Containing element\n * @param options - Options\n *\n * @returns Annotation component observable\n */\nexport function mountAnnotation(\n el: HTMLElement, container: HTMLElement, { target$ }: MountOptions\n): Observable> {\n const [tooltip, index] = Array.from(el.children)\n\n /* Mount component on subscription */\n return defer(() => {\n const push$ = new Subject()\n const done$ = push$.pipe(takeLast(1))\n push$.subscribe({\n\n /* Handle emission */\n next({ offset }) {\n el.style.setProperty(\"--md-tooltip-x\", `${offset.x}px`)\n el.style.setProperty(\"--md-tooltip-y\", `${offset.y}px`)\n },\n\n /* Handle complete */\n complete() {\n el.style.removeProperty(\"--md-tooltip-x\")\n el.style.removeProperty(\"--md-tooltip-y\")\n }\n })\n\n /* Start animation only when annotation is visible */\n watchElementVisibility(el)\n .pipe(\n takeUntil(done$)\n )\n .subscribe(visible => {\n el.toggleAttribute(\"data-md-visible\", visible)\n })\n\n /* Toggle tooltip presence to mitigate empty lines when copying */\n merge(\n push$.pipe(filter(({ active }) => active)),\n push$.pipe(debounceTime(250), filter(({ active }) => !active))\n )\n .subscribe({\n\n /* Handle emission */\n next({ active }) {\n if (active)\n el.prepend(tooltip)\n else\n tooltip.remove()\n },\n\n /* Handle complete */\n complete() {\n el.prepend(tooltip)\n }\n })\n\n /* Toggle tooltip visibility */\n push$\n .pipe(\n auditTime(16, animationFrameScheduler)\n )\n .subscribe(({ active }) => {\n tooltip.classList.toggle(\"md-tooltip--active\", active)\n })\n\n /* Track relative origin of tooltip */\n push$\n .pipe(\n throttleTime(125, animationFrameScheduler),\n filter(() => !!el.offsetParent),\n map(() => el.offsetParent!.getBoundingClientRect()),\n map(({ x }) => x)\n )\n .subscribe({\n\n /* Handle emission */\n next(origin) {\n if (origin)\n el.style.setProperty(\"--md-tooltip-0\", `${-origin}px`)\n else\n el.style.removeProperty(\"--md-tooltip-0\")\n },\n\n /* Handle complete */\n complete() {\n el.style.removeProperty(\"--md-tooltip-0\")\n }\n })\n\n /* Allow to copy link without scrolling to anchor */\n fromEvent(index, \"click\")\n .pipe(\n takeUntil(done$),\n filter(ev => !(ev.metaKey || ev.ctrlKey))\n )\n .subscribe(ev => ev.preventDefault())\n\n /* Allow to open link in new tab or blur on close */\n fromEvent(index, \"mousedown\")\n .pipe(\n takeUntil(done$),\n withLatestFrom(push$)\n )\n .subscribe(([ev, { active }]) => {\n\n /* Open in new tab */\n if (ev.button !== 0 || ev.metaKey || ev.ctrlKey) {\n ev.preventDefault()\n\n /* Close annotation */\n } else if (active) {\n ev.preventDefault()\n\n /* Focus parent annotation, if any */\n const parent = el.parentElement!.closest(\".md-annotation\")\n if (parent instanceof HTMLElement)\n parent.focus()\n else\n getActiveElement()?.blur()\n }\n })\n\n /* Open and focus annotation on location target */\n target$\n .pipe(\n takeUntil(done$),\n filter(target => target === tooltip),\n delay(125)\n )\n .subscribe(() => el.focus())\n\n /* Create and return component */\n return watchAnnotation(el, container)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n EMPTY,\n Observable,\n Subject,\n defer,\n finalize,\n merge,\n share,\n takeLast,\n takeUntil\n} from \"rxjs\"\n\nimport {\n getElement,\n getElements,\n getOptionalElement\n} from \"~/browser\"\nimport { renderAnnotation } from \"~/templates\"\n\nimport { Component } from \"../../../_\"\nimport {\n Annotation,\n mountAnnotation\n} from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount options\n */\ninterface MountOptions {\n target$: Observable /* Location target observable */\n print$: Observable /* Media print observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Find all annotation markers in the given code block\n *\n * @param container - Containing element\n *\n * @returns Annotation markers\n */\nfunction findAnnotationMarkers(container: HTMLElement): Text[] {\n const markers: Text[] = []\n for (const el of getElements(\".c, .c1, .cm\", container)) {\n const nodes: Text[] = []\n\n /* Find all text nodes in current element */\n const it = document.createNodeIterator(el, NodeFilter.SHOW_TEXT)\n for (let node = it.nextNode(); node; node = it.nextNode())\n nodes.push(node as Text)\n\n /* Find all markers in each text node */\n for (let text of nodes) {\n let match: RegExpExecArray | null\n\n /* Split text at marker and add to list */\n while ((match = /(\\(\\d+\\))(!)?/.exec(text.textContent!))) {\n const [, id, force] = match\n if (typeof force === \"undefined\") {\n const marker = text.splitText(match.index)\n text = marker.splitText(id.length)\n markers.push(marker)\n\n /* Replace entire text with marker */\n } else {\n text.textContent = id\n markers.push(text)\n break\n }\n }\n }\n }\n return markers\n}\n\n/**\n * Swap the child nodes of two elements\n *\n * @param source - Source element\n * @param target - Target element\n */\nfunction swap(source: HTMLElement, target: HTMLElement): void {\n target.append(...Array.from(source.childNodes))\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount annotation list\n *\n * This function analyzes the containing code block and checks for markers\n * referring to elements in the given annotation list. If no markers are found,\n * the list is left untouched. Otherwise, list elements are rendered as\n * annotations inside the code block.\n *\n * @param el - Annotation list element\n * @param container - Containing element\n * @param options - Options\n *\n * @returns Annotation component observable\n */\nexport function mountAnnotationList(\n el: HTMLElement, container: HTMLElement, { target$, print$ }: MountOptions\n): Observable> {\n\n /* Compute prefix for tooltip anchors */\n const parent = container.closest(\"[id]\")\n const prefix = parent?.id\n\n /* Find and replace all markers with empty annotations */\n const annotations = new Map()\n for (const marker of findAnnotationMarkers(container)) {\n const [, id] = marker.textContent!.match(/\\((\\d+)\\)/)!\n if (getOptionalElement(`li:nth-child(${id})`, el)) {\n annotations.set(id, renderAnnotation(id, prefix))\n marker.replaceWith(annotations.get(id)!)\n }\n }\n\n /* Keep list if there are no annotations to render */\n if (annotations.size === 0)\n return EMPTY\n\n /* Mount component on subscription */\n return defer(() => {\n const done$ = new Subject()\n\n /* Retrieve container pairs for swapping */\n const pairs: [HTMLElement, HTMLElement][] = []\n for (const [id, annotation] of annotations)\n pairs.push([\n getElement(\".md-typeset\", annotation),\n getElement(`li:nth-child(${id})`, el)\n ])\n\n /* Handle print mode - see https://bit.ly/3rgPdpt */\n print$\n .pipe(\n takeUntil(done$.pipe(takeLast(1)))\n )\n .subscribe(active => {\n el.hidden = !active\n\n /* Show annotations in code block or list (print) */\n for (const [inner, child] of pairs)\n if (!active)\n swap(child, inner)\n else\n swap(inner, child)\n })\n\n /* Create and return component */\n return merge(...[...annotations]\n .map(([, annotation]) => (\n mountAnnotation(annotation, container, { target$ })\n ))\n )\n .pipe(\n finalize(() => done$.complete()),\n share()\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n map,\n of,\n shareReplay,\n tap\n} from \"rxjs\"\n\nimport { watchScript } from \"~/browser\"\nimport { h } from \"~/utilities\"\n\nimport { Component } from \"../../../_\"\n\nimport themeCSS from \"./index.css\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Mermaid diagram\n */\nexport interface Mermaid {}\n\n/* ----------------------------------------------------------------------------\n * Data\n * ------------------------------------------------------------------------- */\n\n/**\n * Mermaid instance observable\n */\nlet mermaid$: Observable\n\n/**\n * Global sequence number for diagrams\n */\nlet sequence = 0\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch Mermaid script\n *\n * @returns Mermaid scripts observable\n */\nfunction fetchScripts(): Observable {\n return typeof mermaid === \"undefined\" || mermaid instanceof Element\n ? watchScript(\"https://unpkg.com/mermaid@9.1.7/dist/mermaid.min.js\")\n : of(undefined)\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount Mermaid diagram\n *\n * @param el - Code block element\n *\n * @returns Mermaid diagram component observable\n */\nexport function mountMermaid(\n el: HTMLElement\n): Observable> {\n el.classList.remove(\"mermaid\") // Hack: mitigate https://bit.ly/3CiN6Du\n mermaid$ ||= fetchScripts()\n .pipe(\n tap(() => mermaid.initialize({\n startOnLoad: false,\n themeCSS,\n sequence: {\n actorFontSize: \"16px\", // Hack: mitigate https://bit.ly/3y0NEi3\n messageFontSize: \"16px\",\n noteFontSize: \"16px\"\n }\n })),\n map(() => undefined),\n shareReplay(1)\n )\n\n /* Render diagram */\n mermaid$.subscribe(() => {\n el.classList.add(\"mermaid\") // Hack: mitigate https://bit.ly/3CiN6Du\n const id = `__mermaid_${sequence++}`\n const host = h(\"div\", { class: \"mermaid\" })\n mermaid.mermaidAPI.render(id, el.textContent, (svg: string) => {\n\n /* Create a shadow root and inject diagram */\n const shadow = host.attachShadow({ mode: \"closed\" })\n shadow.innerHTML = svg\n\n /* Replace code block with diagram */\n el.replaceWith(host)\n })\n })\n\n /* Create and return component */\n return mermaid$\n .pipe(\n map(() => ({ ref: el }))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n defer,\n filter,\n finalize,\n map,\n merge,\n tap\n} from \"rxjs\"\n\nimport { Component } from \"../../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Details\n */\nexport interface Details {\n action: \"open\" | \"close\" /* Details state */\n reveal?: boolean /* Details is revealed */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n target$: Observable /* Location target observable */\n print$: Observable /* Media print observable */\n}\n\n/**\n * Mount options\n */\ninterface MountOptions {\n target$: Observable /* Location target observable */\n print$: Observable /* Media print observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch details\n *\n * @param el - Details element\n * @param options - Options\n *\n * @returns Details observable\n */\nexport function watchDetails(\n el: HTMLDetailsElement, { target$, print$ }: WatchOptions\n): Observable
    {\n let open = true\n return merge(\n\n /* Open and focus details on location target */\n target$\n .pipe(\n map(target => target.closest(\"details:not([open])\")!),\n filter(details => el === details),\n map(() => ({\n action: \"open\", reveal: true\n }) as Details)\n ),\n\n /* Open details on print and close afterwards */\n print$\n .pipe(\n filter(active => active || !open),\n tap(() => open = el.open),\n map(active => ({\n action: active ? \"open\" : \"close\"\n }) as Details)\n )\n )\n}\n\n/**\n * Mount details\n *\n * This function ensures that `details` tags are opened on anchor jumps and\n * prior to printing, so the whole content of the page is visible.\n *\n * @param el - Details element\n * @param options - Options\n *\n * @returns Details component observable\n */\nexport function mountDetails(\n el: HTMLDetailsElement, options: MountOptions\n): Observable> {\n return defer(() => {\n const push$ = new Subject
    ()\n push$.subscribe(({ action, reveal }) => {\n el.toggleAttribute(\"open\", action === \"open\")\n if (reveal)\n el.scrollIntoView()\n })\n\n /* Create and return component */\n return watchDetails(el, options)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { Observable, of } from \"rxjs\"\n\nimport { renderTable } from \"~/templates\"\nimport { h } from \"~/utilities\"\n\nimport { Component } from \"../../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Data table\n */\nexport interface DataTable {}\n\n/* ----------------------------------------------------------------------------\n * Data\n * ------------------------------------------------------------------------- */\n\n/**\n * Sentinel for replacement\n */\nconst sentinel = h(\"table\")\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount data table\n *\n * This function wraps a data table in another scrollable container, so it can\n * be smoothly scrolled on smaller screen sizes and won't break the layout.\n *\n * @param el - Data table element\n *\n * @returns Data table component observable\n */\nexport function mountDataTable(\n el: HTMLElement\n): Observable> {\n el.replaceWith(sentinel)\n sentinel.replaceWith(renderTable(el))\n\n /* Create and return component */\n return of({ ref: el })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n animationFrameScheduler,\n asyncScheduler,\n auditTime,\n combineLatest,\n defer,\n finalize,\n fromEvent,\n map,\n merge,\n skip,\n startWith,\n subscribeOn,\n takeLast,\n takeUntil,\n tap,\n withLatestFrom\n} from \"rxjs\"\n\nimport { feature } from \"~/_\"\nimport {\n Viewport,\n getElement,\n getElementContentOffset,\n getElementContentSize,\n getElementOffset,\n getElementSize,\n getElements,\n watchElementContentOffset,\n watchElementSize\n} from \"~/browser\"\nimport { renderTabbedControl } from \"~/templates\"\n\nimport { Component } from \"../../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Content tabs\n */\nexport interface ContentTabs {\n active: HTMLLabelElement /* Active tab label */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount options\n */\ninterface MountOptions {\n viewport$: Observable /* Viewport observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch content tabs\n *\n * @param el - Content tabs element\n *\n * @returns Content tabs observable\n */\nexport function watchContentTabs(\n el: HTMLElement\n): Observable {\n const inputs = getElements(\":scope > input\", el)\n const initial = inputs.find(input => input.checked) || inputs[0]\n return merge(...inputs.map(input => fromEvent(input, \"change\")\n .pipe(\n map(() => getElement(`label[for=\"${input.id}\"]`))\n )\n ))\n .pipe(\n startWith(getElement(`label[for=\"${initial.id}\"]`)),\n map(active => ({ active }))\n )\n}\n\n/**\n * Mount content tabs\n *\n * This function scrolls the active tab into view. While this functionality is\n * provided by browsers as part of `scrollInfoView`, browsers will always also\n * scroll the vertical axis, which we do not want. Thus, we decided to provide\n * this functionality ourselves.\n *\n * @param el - Content tabs element\n * @param options - Options\n *\n * @returns Content tabs component observable\n */\nexport function mountContentTabs(\n el: HTMLElement, { viewport$ }: MountOptions\n): Observable> {\n\n /* Render content tab previous button for pagination */\n const prev = renderTabbedControl(\"prev\")\n el.append(prev)\n\n /* Render content tab next button for pagination */\n const next = renderTabbedControl(\"next\")\n el.append(next)\n\n /* Mount component on subscription */\n const container = getElement(\".tabbed-labels\", el)\n return defer(() => {\n const push$ = new Subject()\n const done$ = push$.pipe(takeLast(1))\n combineLatest([push$, watchElementSize(el)])\n .pipe(\n auditTime(1, animationFrameScheduler),\n takeUntil(done$)\n )\n .subscribe({\n\n /* Handle emission */\n next([{ active }, size]) {\n const offset = getElementOffset(active)\n const { width } = getElementSize(active)\n\n /* Set tab indicator offset and width */\n el.style.setProperty(\"--md-indicator-x\", `${offset.x}px`)\n el.style.setProperty(\"--md-indicator-width\", `${width}px`)\n\n /* Scroll container to active content tab */\n const content = getElementContentOffset(container)\n if (\n offset.x < content.x ||\n offset.x + width > content.x + size.width\n )\n container.scrollTo({\n left: Math.max(0, offset.x - 16),\n behavior: \"smooth\"\n })\n },\n\n /* Handle complete */\n complete() {\n el.style.removeProperty(\"--md-indicator-x\")\n el.style.removeProperty(\"--md-indicator-width\")\n }\n })\n\n /* Hide content tab buttons on borders */\n combineLatest([\n watchElementContentOffset(container),\n watchElementSize(container)\n ])\n .pipe(\n takeUntil(done$)\n )\n .subscribe(([offset, size]) => {\n const content = getElementContentSize(container)\n prev.hidden = offset.x < 16\n next.hidden = offset.x > content.width - size.width - 16\n })\n\n /* Paginate content tab container on click */\n merge(\n fromEvent(prev, \"click\").pipe(map(() => -1)),\n fromEvent(next, \"click\").pipe(map(() => +1))\n )\n .pipe(\n takeUntil(done$)\n )\n .subscribe(direction => {\n const { width } = getElementSize(container)\n container.scrollBy({\n left: width * direction,\n behavior: \"smooth\"\n })\n })\n\n /* Set up linking of content tabs, if enabled */\n if (feature(\"content.tabs.link\"))\n push$.pipe(\n skip(1),\n withLatestFrom(viewport$)\n )\n .subscribe(([{ active }, { offset }]) => {\n const tab = active.innerText.trim()\n if (active.hasAttribute(\"data-md-switching\")) {\n active.removeAttribute(\"data-md-switching\")\n\n /* Determine viewport offset of active tab */\n } else {\n const y = el.offsetTop - offset.y\n\n /* Passively activate other tabs */\n for (const set of getElements(\"[data-tabs]\"))\n for (const input of getElements(\n \":scope > input\", set\n )) {\n const label = getElement(`label[for=\"${input.id}\"]`)\n if (\n label !== active &&\n label.innerText.trim() === tab\n ) {\n label.setAttribute(\"data-md-switching\", \"\")\n input.click()\n break\n }\n }\n\n /* Bring active tab into view */\n window.scrollTo({\n top: el.offsetTop - y\n })\n\n /* Persist active tabs in local storage */\n const tabs = __md_get(\"__tabs\") || []\n __md_set(\"__tabs\", [...new Set([tab, ...tabs])])\n }\n })\n\n /* Create and return component */\n return watchContentTabs(el)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n .pipe(\n subscribeOn(asyncScheduler)\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { Observable, merge } from \"rxjs\"\n\nimport { Viewport, getElements } from \"~/browser\"\n\nimport { Component } from \"../../_\"\nimport { Annotation } from \"../annotation\"\nimport {\n CodeBlock,\n Mermaid,\n mountCodeBlock,\n mountMermaid\n} from \"../code\"\nimport {\n Details,\n mountDetails\n} from \"../details\"\nimport {\n DataTable,\n mountDataTable\n} from \"../table\"\nimport {\n ContentTabs,\n mountContentTabs\n} from \"../tabs\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Content\n */\nexport type Content =\n | Annotation\n | ContentTabs\n | CodeBlock\n | Mermaid\n | DataTable\n | Details\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount options\n */\ninterface MountOptions {\n viewport$: Observable /* Viewport observable */\n target$: Observable /* Location target observable */\n print$: Observable /* Media print observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount content\n *\n * This function mounts all components that are found in the content of the\n * actual article, including code blocks, data tables and details.\n *\n * @param el - Content element\n * @param options - Options\n *\n * @returns Content component observable\n */\nexport function mountContent(\n el: HTMLElement, { viewport$, target$, print$ }: MountOptions\n): Observable> {\n return merge(\n\n /* Code blocks */\n ...getElements(\"pre:not(.mermaid) > code\", el)\n .map(child => mountCodeBlock(child, { target$, print$ })),\n\n /* Mermaid diagrams */\n ...getElements(\"pre.mermaid\", el)\n .map(child => mountMermaid(child)),\n\n /* Data tables */\n ...getElements(\"table:not([class])\", el)\n .map(child => mountDataTable(child)),\n\n /* Details */\n ...getElements(\"details\", el)\n .map(child => mountDetails(child, { target$, print$ })),\n\n /* Content tabs */\n ...getElements(\"[data-tabs]\", el)\n .map(child => mountContentTabs(child, { viewport$ }))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n defer,\n delay,\n finalize,\n map,\n merge,\n of,\n switchMap,\n tap\n} from \"rxjs\"\n\nimport { getElement } from \"~/browser\"\n\nimport { Component } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Dialog\n */\nexport interface Dialog {\n message: string /* Dialog message */\n active: boolean /* Dialog is active */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n alert$: Subject /* Alert subject */\n}\n\n/**\n * Mount options\n */\ninterface MountOptions {\n alert$: Subject /* Alert subject */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch dialog\n *\n * @param _el - Dialog element\n * @param options - Options\n *\n * @returns Dialog observable\n */\nexport function watchDialog(\n _el: HTMLElement, { alert$ }: WatchOptions\n): Observable {\n return alert$\n .pipe(\n switchMap(message => merge(\n of(true),\n of(false).pipe(delay(2000))\n )\n .pipe(\n map(active => ({ message, active }))\n )\n )\n )\n}\n\n/**\n * Mount dialog\n *\n * This function reveals the dialog in the right corner when a new alert is\n * emitted through the subject that is passed as part of the options.\n *\n * @param el - Dialog element\n * @param options - Options\n *\n * @returns Dialog component observable\n */\nexport function mountDialog(\n el: HTMLElement, options: MountOptions\n): Observable> {\n const inner = getElement(\".md-typeset\", el)\n return defer(() => {\n const push$ = new Subject()\n push$.subscribe(({ message, active }) => {\n el.classList.toggle(\"md-dialog--active\", active)\n inner.textContent = message\n })\n\n /* Create and return component */\n return watchDialog(el, options)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n bufferCount,\n combineLatest,\n combineLatestWith,\n defer,\n distinctUntilChanged,\n distinctUntilKeyChanged,\n filter,\n map,\n of,\n shareReplay,\n startWith,\n switchMap,\n takeLast,\n takeUntil\n} from \"rxjs\"\n\nimport { feature } from \"~/_\"\nimport {\n Viewport,\n watchElementSize,\n watchToggle\n} from \"~/browser\"\n\nimport { Component } from \"../../_\"\nimport { Main } from \"../../main\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Header\n */\nexport interface Header {\n height: number /* Header visible height */\n hidden: boolean /* Header is hidden */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n viewport$: Observable /* Viewport observable */\n}\n\n/**\n * Mount options\n */\ninterface MountOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
    /* Header observable */\n main$: Observable
    /* Main area observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Compute whether the header is hidden\n *\n * If the user scrolls past a certain threshold, the header can be hidden when\n * scrolling down, and shown when scrolling up.\n *\n * @param options - Options\n *\n * @returns Toggle observable\n */\nfunction isHidden({ viewport$ }: WatchOptions): Observable {\n if (!feature(\"header.autohide\"))\n return of(false)\n\n /* Compute direction and turning point */\n const direction$ = viewport$\n .pipe(\n map(({ offset: { y } }) => y),\n bufferCount(2, 1),\n map(([a, b]) => [a < b, b] as const),\n distinctUntilKeyChanged(0)\n )\n\n /* Compute whether header should be hidden */\n const hidden$ = combineLatest([viewport$, direction$])\n .pipe(\n filter(([{ offset }, [, y]]) => Math.abs(y - offset.y) > 100),\n map(([, [direction]]) => direction),\n distinctUntilChanged()\n )\n\n /* Compute threshold for hiding */\n const search$ = watchToggle(\"search\")\n return combineLatest([viewport$, search$])\n .pipe(\n map(([{ offset }, search]) => offset.y > 400 && !search),\n distinctUntilChanged(),\n switchMap(active => active ? hidden$ : of(false)),\n startWith(false)\n )\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch header\n *\n * @param el - Header element\n * @param options - Options\n *\n * @returns Header observable\n */\nexport function watchHeader(\n el: HTMLElement, options: WatchOptions\n): Observable
    {\n return defer(() => combineLatest([\n watchElementSize(el),\n isHidden(options)\n ]))\n .pipe(\n map(([{ height }, hidden]) => ({\n height,\n hidden\n })),\n distinctUntilChanged((a, b) => (\n a.height === b.height &&\n a.hidden === b.hidden\n )),\n shareReplay(1)\n )\n}\n\n/**\n * Mount header\n *\n * This function manages the different states of the header, i.e. whether it's\n * hidden or rendered with a shadow. This depends heavily on the main area.\n *\n * @param el - Header element\n * @param options - Options\n *\n * @returns Header component observable\n */\nexport function mountHeader(\n el: HTMLElement, { header$, main$ }: MountOptions\n): Observable> {\n return defer(() => {\n const push$ = new Subject
    ()\n const done$ = push$.pipe(takeLast(1))\n push$\n .pipe(\n distinctUntilKeyChanged(\"active\"),\n combineLatestWith(header$)\n )\n .subscribe(([{ active }, { hidden }]) => {\n el.classList.toggle(\"md-header--shadow\", active && !hidden)\n el.hidden = hidden\n })\n\n /* Link to main area */\n main$.subscribe(push$)\n\n /* Create and return component */\n return header$\n .pipe(\n takeUntil(done$),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n EMPTY,\n Observable,\n Subject,\n defer,\n distinctUntilKeyChanged,\n finalize,\n map,\n tap\n} from \"rxjs\"\n\nimport {\n Viewport,\n getElementSize,\n getOptionalElement,\n watchViewportAt\n} from \"~/browser\"\n\nimport { Component } from \"../../_\"\nimport { Header } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Header\n */\nexport interface HeaderTitle {\n active: boolean /* Header title is active */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
    /* Header observable */\n}\n\n/**\n * Mount options\n */\ninterface MountOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
    /* Header observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch header title\n *\n * @param el - Heading element\n * @param options - Options\n *\n * @returns Header title observable\n */\nexport function watchHeaderTitle(\n el: HTMLElement, { viewport$, header$ }: WatchOptions\n): Observable {\n return watchViewportAt(el, { viewport$, header$ })\n .pipe(\n map(({ offset: { y } }) => {\n const { height } = getElementSize(el)\n return {\n active: y >= height\n }\n }),\n distinctUntilKeyChanged(\"active\")\n )\n}\n\n/**\n * Mount header title\n *\n * This function swaps the header title from the site title to the title of the\n * current page when the user scrolls past the first headline.\n *\n * @param el - Header title element\n * @param options - Options\n *\n * @returns Header title component observable\n */\nexport function mountHeaderTitle(\n el: HTMLElement, options: MountOptions\n): Observable> {\n return defer(() => {\n const push$ = new Subject()\n push$.subscribe(({ active }) => {\n el.classList.toggle(\"md-header__title--active\", active)\n })\n\n /* Obtain headline, if any */\n const heading = getOptionalElement(\"article h1\")\n if (typeof heading === \"undefined\")\n return EMPTY\n\n /* Create and return component */\n return watchHeaderTitle(heading, options)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n combineLatest,\n distinctUntilChanged,\n distinctUntilKeyChanged,\n map,\n switchMap\n} from \"rxjs\"\n\nimport {\n Viewport,\n watchElementSize\n} from \"~/browser\"\n\nimport { Header } from \"../header\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Main area\n */\nexport interface Main {\n offset: number /* Main area top offset */\n height: number /* Main area visible height */\n active: boolean /* Main area is active */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
    /* Header observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch main area\n *\n * This function returns an observable that computes the visual parameters of\n * the main area which depends on the viewport vertical offset and height, as\n * well as the height of the header element, if the header is fixed.\n *\n * @param el - Main area element\n * @param options - Options\n *\n * @returns Main area observable\n */\nexport function watchMain(\n el: HTMLElement, { viewport$, header$ }: WatchOptions\n): Observable
    {\n\n /* Compute necessary adjustment for header */\n const adjust$ = header$\n .pipe(\n map(({ height }) => height),\n distinctUntilChanged()\n )\n\n /* Compute the main area's top and bottom borders */\n const border$ = adjust$\n .pipe(\n switchMap(() => watchElementSize(el)\n .pipe(\n map(({ height }) => ({\n top: el.offsetTop,\n bottom: el.offsetTop + height\n })),\n distinctUntilKeyChanged(\"bottom\")\n )\n )\n )\n\n /* Compute the main area's offset, visible height and if we scrolled past */\n return combineLatest([adjust$, border$, viewport$])\n .pipe(\n map(([header, { top, bottom }, { offset: { y }, size: { height } }]) => {\n height = Math.max(0, height\n - Math.max(0, top - y, header)\n - Math.max(0, height + y - bottom)\n )\n return {\n offset: top - header,\n height,\n active: top - header <= y\n }\n }),\n distinctUntilChanged((a, b) => (\n a.offset === b.offset &&\n a.height === b.height &&\n a.active === b.active\n ))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n asyncScheduler,\n defer,\n finalize,\n fromEvent,\n map,\n mergeMap,\n observeOn,\n of,\n shareReplay,\n startWith,\n tap\n} from \"rxjs\"\n\nimport { getElements } from \"~/browser\"\n\nimport { Component } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Palette colors\n */\nexport interface PaletteColor {\n scheme?: string /* Color scheme */\n primary?: string /* Primary color */\n accent?: string /* Accent color */\n}\n\n/**\n * Palette\n */\nexport interface Palette {\n index: number /* Palette index */\n color: PaletteColor /* Palette colors */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch color palette\n *\n * @param inputs - Color palette element\n *\n * @returns Color palette observable\n */\nexport function watchPalette(\n inputs: HTMLInputElement[]\n): Observable {\n const current = __md_get(\"__palette\") || {\n index: inputs.findIndex(input => matchMedia(\n input.getAttribute(\"data-md-color-media\")!\n ).matches)\n }\n\n /* Emit changes in color palette */\n return of(...inputs)\n .pipe(\n mergeMap(input => fromEvent(input, \"change\")\n .pipe(\n map(() => input)\n )\n ),\n startWith(inputs[Math.max(0, current.index)]),\n map(input => ({\n index: inputs.indexOf(input),\n color: {\n scheme: input.getAttribute(\"data-md-color-scheme\"),\n primary: input.getAttribute(\"data-md-color-primary\"),\n accent: input.getAttribute(\"data-md-color-accent\")\n }\n } as Palette)),\n shareReplay(1)\n )\n}\n\n/**\n * Mount color palette\n *\n * @param el - Color palette element\n *\n * @returns Color palette component observable\n */\nexport function mountPalette(\n el: HTMLElement\n): Observable> {\n return defer(() => {\n const push$ = new Subject()\n push$.subscribe(palette => {\n document.body.setAttribute(\"data-md-color-switching\", \"\")\n\n /* Set color palette */\n for (const [key, value] of Object.entries(palette.color))\n document.body.setAttribute(`data-md-color-${key}`, value)\n\n /* Toggle visibility */\n for (let index = 0; index < inputs.length; index++) {\n const label = inputs[index].nextElementSibling\n if (label instanceof HTMLElement)\n label.hidden = palette.index !== index\n }\n\n /* Persist preference in local storage */\n __md_set(\"__palette\", palette)\n })\n\n /* Revert transition durations after color switch */\n push$.pipe(observeOn(asyncScheduler))\n .subscribe(() => {\n document.body.removeAttribute(\"data-md-color-switching\")\n })\n\n /* Create and return component */\n const inputs = getElements(\"input\", el)\n return watchPalette(inputs)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport ClipboardJS from \"clipboard\"\nimport {\n Observable,\n Subject,\n map,\n tap\n} from \"rxjs\"\n\nimport { translation } from \"~/_\"\nimport { getElement } from \"~/browser\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Setup options\n */\ninterface SetupOptions {\n alert$: Subject /* Alert subject */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Extract text to copy\n *\n * @param el - HTML element\n *\n * @returns Extracted text\n */\nfunction extract(el: HTMLElement): string {\n el.setAttribute(\"data-md-copying\", \"\")\n const text = el.innerText\n el.removeAttribute(\"data-md-copying\")\n return text\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Set up Clipboard.js integration\n *\n * @param options - Options\n */\nexport function setupClipboardJS(\n { alert$ }: SetupOptions\n): void {\n if (ClipboardJS.isSupported()) {\n new Observable(subscriber => {\n new ClipboardJS(\"[data-clipboard-target], [data-clipboard-text]\", {\n text: el => (\n el.getAttribute(\"data-clipboard-text\")! ||\n extract(getElement(\n el.getAttribute(\"data-clipboard-target\")!\n ))\n )\n })\n .on(\"success\", ev => subscriber.next(ev))\n })\n .pipe(\n tap(ev => {\n const trigger = ev.trigger as HTMLElement\n trigger.focus()\n }),\n map(() => translation(\"clipboard.copied\"))\n )\n .subscribe(alert$)\n }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n EMPTY,\n Observable,\n catchError,\n defaultIfEmpty,\n map,\n of,\n tap\n} from \"rxjs\"\n\nimport { configuration } from \"~/_\"\nimport { getElements, requestXML } from \"~/browser\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Sitemap, i.e. a list of URLs\n */\nexport type Sitemap = string[]\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Preprocess a list of URLs\n *\n * This function replaces the `site_url` in the sitemap with the actual base\n * URL, to allow instant loading to work in occasions like Netlify previews.\n *\n * @param urls - URLs\n *\n * @returns URL path parts\n */\nfunction preprocess(urls: Sitemap): Sitemap {\n if (urls.length < 2)\n return [\"\"]\n\n /* Take the first two URLs and remove everything after the last slash */\n const [root, next] = [...urls]\n .sort((a, b) => a.length - b.length)\n .map(url => url.replace(/[^/]+$/, \"\"))\n\n /* Compute common prefix */\n let index = 0\n if (root === next)\n index = root.length\n else\n while (root.charCodeAt(index) === next.charCodeAt(index))\n index++\n\n /* Remove common prefix and return in original order */\n return urls.map(url => url.replace(root.slice(0, index), \"\"))\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch the sitemap for the given base URL\n *\n * @param base - Base URL\n *\n * @returns Sitemap observable\n */\nexport function fetchSitemap(base?: URL): Observable {\n const cached = __md_get(\"__sitemap\", sessionStorage, base)\n if (cached) {\n return of(cached)\n } else {\n const config = configuration()\n return requestXML(new URL(\"sitemap.xml\", base || config.base))\n .pipe(\n map(sitemap => preprocess(getElements(\"loc\", sitemap)\n .map(node => node.textContent!)\n )),\n catchError(() => EMPTY), // @todo refactor instant loading\n defaultIfEmpty([]),\n tap(sitemap => __md_set(\"__sitemap\", sitemap, sessionStorage, base))\n )\n }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n EMPTY,\n NEVER,\n Observable,\n Subject,\n bufferCount,\n catchError,\n concatMap,\n debounceTime,\n distinctUntilChanged,\n distinctUntilKeyChanged,\n filter,\n fromEvent,\n map,\n merge,\n of,\n sample,\n share,\n skip,\n skipUntil,\n switchMap\n} from \"rxjs\"\n\nimport { configuration, feature } from \"~/_\"\nimport {\n Viewport,\n ViewportOffset,\n getElements,\n getOptionalElement,\n request,\n setLocation,\n setLocationHash\n} from \"~/browser\"\nimport { getComponentElement } from \"~/components\"\nimport { h } from \"~/utilities\"\n\nimport { fetchSitemap } from \"../sitemap\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * History state\n */\nexport interface HistoryState {\n url: URL /* State URL */\n offset?: ViewportOffset /* State viewport offset */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Setup options\n */\ninterface SetupOptions {\n document$: Subject /* Document subject */\n location$: Subject /* Location subject */\n viewport$: Observable /* Viewport observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Set up instant loading\n *\n * When fetching, theoretically, we could use `responseType: \"document\"`, but\n * since all MkDocs links are relative, we need to make sure that the current\n * location matches the document we just loaded. Otherwise any relative links\n * in the document could use the old location.\n *\n * This is the reason why we need to synchronize history events and the process\n * of fetching the document for navigation changes (except `popstate` events):\n *\n * 1. Fetch document via `XMLHTTPRequest`\n * 2. Set new location via `history.pushState`\n * 3. Parse and emit fetched document\n *\n * For `popstate` events, we must not use `history.pushState`, or the forward\n * history will be irreversibly overwritten. In case the request fails, the\n * location change is dispatched regularly.\n *\n * @param options - Options\n */\nexport function setupInstantLoading(\n { document$, location$, viewport$ }: SetupOptions\n): void {\n const config = configuration()\n if (location.protocol === \"file:\")\n return\n\n /* Disable automatic scroll restoration */\n if (\"scrollRestoration\" in history) {\n history.scrollRestoration = \"manual\"\n\n /* Hack: ensure that reloads restore viewport offset */\n fromEvent(window, \"beforeunload\")\n .subscribe(() => {\n history.scrollRestoration = \"auto\"\n })\n }\n\n /* Hack: ensure absolute favicon link to omit 404s when switching */\n const favicon = getOptionalElement(\"link[rel=icon]\")\n if (typeof favicon !== \"undefined\")\n favicon.href = favicon.href\n\n /* Intercept internal navigation */\n const push$ = fetchSitemap()\n .pipe(\n map(paths => paths.map(path => `${new URL(path, config.base)}`)),\n switchMap(urls => fromEvent(document.body, \"click\")\n .pipe(\n filter(ev => !ev.metaKey && !ev.ctrlKey),\n switchMap(ev => {\n if (ev.target instanceof Element) {\n const el = ev.target.closest(\"a\")\n if (el && !el.target) {\n const url = new URL(el.href)\n\n /* Canonicalize URL */\n url.search = \"\"\n url.hash = \"\"\n\n /* Check if URL should be intercepted */\n if (\n url.pathname !== location.pathname &&\n urls.includes(url.toString())\n ) {\n ev.preventDefault()\n return of({\n url: new URL(el.href)\n })\n }\n }\n }\n return NEVER\n })\n )\n ),\n share()\n )\n\n /* Intercept history back and forward */\n const pop$ = fromEvent(window, \"popstate\")\n .pipe(\n filter(ev => ev.state !== null),\n map(ev => ({\n url: new URL(location.href),\n offset: ev.state\n })),\n share()\n )\n\n /* Emit location change */\n merge(push$, pop$)\n .pipe(\n distinctUntilChanged((a, b) => a.url.href === b.url.href),\n map(({ url }) => url)\n )\n .subscribe(location$)\n\n /* Fetch document via `XMLHTTPRequest` */\n const response$ = location$\n .pipe(\n distinctUntilKeyChanged(\"pathname\"),\n switchMap(url => request(url.href)\n .pipe(\n catchError(() => {\n setLocation(url)\n return NEVER\n })\n )\n ),\n share()\n )\n\n /* Set new location via `history.pushState` */\n push$\n .pipe(\n sample(response$)\n )\n .subscribe(({ url }) => {\n history.pushState({}, \"\", `${url}`)\n })\n\n /* Parse and emit fetched document */\n const dom = new DOMParser()\n response$\n .pipe(\n switchMap(res => res.text()),\n map(res => dom.parseFromString(res, \"text/html\"))\n )\n .subscribe(document$)\n\n /* Replace meta tags and components */\n document$\n .pipe(\n skip(1)\n )\n .subscribe(replacement => {\n for (const selector of [\n\n /* Meta tags */\n \"title\",\n \"link[rel=canonical]\",\n \"meta[name=author]\",\n \"meta[name=description]\",\n\n /* Components */\n \"[data-md-component=announce]\",\n \"[data-md-component=container]\",\n \"[data-md-component=header-topic]\",\n \"[data-md-component=outdated]\",\n \"[data-md-component=logo]\",\n \"[data-md-component=skip]\",\n ...feature(\"navigation.tabs.sticky\")\n ? [\"[data-md-component=tabs]\"]\n : []\n ]) {\n const source = getOptionalElement(selector)\n const target = getOptionalElement(selector, replacement)\n if (\n typeof source !== \"undefined\" &&\n typeof target !== \"undefined\"\n ) {\n source.replaceWith(target)\n }\n }\n })\n\n /* Re-evaluate scripts */\n document$\n .pipe(\n skip(1),\n map(() => getComponentElement(\"container\")),\n switchMap(el => getElements(\"script\", el)),\n concatMap(el => {\n const script = h(\"script\")\n if (el.src) {\n for (const name of el.getAttributeNames())\n script.setAttribute(name, el.getAttribute(name)!)\n el.replaceWith(script)\n\n /* Complete when script is loaded */\n return new Observable(observer => {\n script.onload = () => observer.complete()\n })\n\n /* Complete immediately */\n } else {\n script.textContent = el.textContent\n el.replaceWith(script)\n return EMPTY\n }\n })\n )\n .subscribe()\n\n /* Emit history state change */\n merge(push$, pop$)\n .pipe(\n sample(document$)\n )\n .subscribe(({ url, offset }) => {\n if (url.hash && !offset) {\n setLocationHash(url.hash)\n } else {\n window.scrollTo(0, offset?.y || 0)\n }\n })\n\n /* Debounce update of viewport offset */\n viewport$\n .pipe(\n skipUntil(push$),\n debounceTime(250),\n distinctUntilKeyChanged(\"offset\")\n )\n .subscribe(({ offset }) => {\n history.replaceState(offset, \"\")\n })\n\n /* Set viewport offset from history */\n merge(push$, pop$)\n .pipe(\n bufferCount(2, 1),\n filter(([a, b]) => a.url.pathname === b.url.pathname),\n map(([, state]) => state)\n )\n .subscribe(({ offset }) => {\n window.scrollTo(0, offset?.y || 0)\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport escapeHTML from \"escape-html\"\n\nimport { SearchIndexDocument } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search document\n */\nexport interface SearchDocument extends SearchIndexDocument {\n parent?: SearchIndexDocument /* Parent article */\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Search document mapping\n */\nexport type SearchDocumentMap = Map\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Create a search document mapping\n *\n * @param docs - Search index documents\n *\n * @returns Search document map\n */\nexport function setupSearchDocumentMap(\n docs: SearchIndexDocument[]\n): SearchDocumentMap {\n const documents = new Map()\n const parents = new Set()\n for (const doc of docs) {\n const [path, hash] = doc.location.split(\"#\")\n\n /* Extract location, title and tags */\n const location = doc.location\n const title = doc.title\n const tags = doc.tags\n\n /* Escape and cleanup text */\n const text = escapeHTML(doc.text)\n .replace(/\\s+(?=[,.:;!?])/g, \"\")\n .replace(/\\s+/g, \" \")\n\n /* Handle section */\n if (hash) {\n const parent = documents.get(path)!\n\n /* Ignore first section, override article */\n if (!parents.has(parent)) {\n parent.title = doc.title\n parent.text = text\n\n /* Remember that we processed the article */\n parents.add(parent)\n\n /* Add subsequent section */\n } else {\n documents.set(location, {\n location,\n title,\n text,\n parent\n })\n }\n\n /* Add article */\n } else {\n documents.set(location, {\n location,\n title,\n text,\n ...tags && { tags }\n })\n }\n }\n return documents\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport escapeHTML from \"escape-html\"\n\nimport { SearchIndexConfig } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search highlight function\n *\n * @param value - Value\n *\n * @returns Highlighted value\n */\nexport type SearchHighlightFn = (value: string) => string\n\n/**\n * Search highlight factory function\n *\n * @param query - Query value\n *\n * @returns Search highlight function\n */\nexport type SearchHighlightFactoryFn = (query: string) => SearchHighlightFn\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Create a search highlighter\n *\n * @param config - Search index configuration\n * @param escape - Whether to escape HTML\n *\n * @returns Search highlight factory function\n */\nexport function setupSearchHighlighter(\n config: SearchIndexConfig, escape: boolean\n): SearchHighlightFactoryFn {\n const separator = new RegExp(config.separator, \"img\")\n const highlight = (_: unknown, data: string, term: string) => {\n return `${data}${term}`\n }\n\n /* Return factory function */\n return (query: string) => {\n query = query\n .replace(/[\\s*+\\-:~^]+/g, \" \")\n .trim()\n\n /* Create search term match expression */\n const match = new RegExp(`(^|${config.separator})(${\n query\n .replace(/[|\\\\{}()[\\]^$+*?.-]/g, \"\\\\$&\")\n .replace(separator, \"|\")\n })`, \"img\")\n\n /* Highlight string value */\n return value => (\n escape\n ? escapeHTML(value)\n : value\n )\n .replace(match, highlight)\n .replace(/<\\/mark>(\\s+)]*>/img, \"$1\")\n }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search transformation function\n *\n * @param value - Query value\n *\n * @returns Transformed query value\n */\nexport type SearchTransformFn = (value: string) => string\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Default transformation function\n *\n * 1. Search for terms in quotation marks and prepend a `+` modifier to denote\n * that the resulting document must contain all terms, converting the query\n * to an `AND` query (as opposed to the default `OR` behavior). While users\n * may expect terms enclosed in quotation marks to map to span queries, i.e.\n * for which order is important, Lunr.js doesn't support them, so the best\n * we can do is to convert the terms to an `AND` query.\n *\n * 2. Replace control characters which are not located at the beginning of the\n * query or preceded by white space, or are not followed by a non-whitespace\n * character or are at the end of the query string. Furthermore, filter\n * unmatched quotation marks.\n *\n * 3. Trim excess whitespace from left and right.\n *\n * @param query - Query value\n *\n * @returns Transformed query value\n */\nexport function defaultTransform(query: string): string {\n return query\n .split(/\"([^\"]+)\"/g) /* => 1 */\n .map((terms, index) => index & 1\n ? terms.replace(/^\\b|^(?![^\\x00-\\x7F]|$)|\\s+/g, \" +\")\n : terms\n )\n .join(\"\")\n .replace(/\"|(?:^|\\s+)[*+\\-:^~]+(?=\\s+|$)/g, \"\") /* => 2 */\n .trim() /* => 3 */\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A RTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { SearchIndex, SearchResult } from \"../../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search message type\n */\nexport const enum SearchMessageType {\n SETUP, /* Search index setup */\n READY, /* Search index ready */\n QUERY, /* Search query */\n RESULT /* Search results */\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Message containing the data necessary to setup the search index\n */\nexport interface SearchSetupMessage {\n type: SearchMessageType.SETUP /* Message type */\n data: SearchIndex /* Message data */\n}\n\n/**\n * Message indicating the search index is ready\n */\nexport interface SearchReadyMessage {\n type: SearchMessageType.READY /* Message type */\n}\n\n/**\n * Message containing a search query\n */\nexport interface SearchQueryMessage {\n type: SearchMessageType.QUERY /* Message type */\n data: string /* Message data */\n}\n\n/**\n * Message containing results for a search query\n */\nexport interface SearchResultMessage {\n type: SearchMessageType.RESULT /* Message type */\n data: SearchResult /* Message data */\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Message exchanged with the search worker\n */\nexport type SearchMessage =\n | SearchSetupMessage\n | SearchReadyMessage\n | SearchQueryMessage\n | SearchResultMessage\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Type guard for search setup messages\n *\n * @param message - Search worker message\n *\n * @returns Test result\n */\nexport function isSearchSetupMessage(\n message: SearchMessage\n): message is SearchSetupMessage {\n return message.type === SearchMessageType.SETUP\n}\n\n/**\n * Type guard for search ready messages\n *\n * @param message - Search worker message\n *\n * @returns Test result\n */\nexport function isSearchReadyMessage(\n message: SearchMessage\n): message is SearchReadyMessage {\n return message.type === SearchMessageType.READY\n}\n\n/**\n * Type guard for search query messages\n *\n * @param message - Search worker message\n *\n * @returns Test result\n */\nexport function isSearchQueryMessage(\n message: SearchMessage\n): message is SearchQueryMessage {\n return message.type === SearchMessageType.QUERY\n}\n\n/**\n * Type guard for search result messages\n *\n * @param message - Search worker message\n *\n * @returns Test result\n */\nexport function isSearchResultMessage(\n message: SearchMessage\n): message is SearchResultMessage {\n return message.type === SearchMessageType.RESULT\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A RTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n ObservableInput,\n Subject,\n from,\n map,\n share\n} from \"rxjs\"\n\nimport { configuration, feature, translation } from \"~/_\"\nimport { WorkerHandler, watchWorker } from \"~/browser\"\n\nimport { SearchIndex } from \"../../_\"\nimport {\n SearchOptions,\n SearchPipeline\n} from \"../../options\"\nimport {\n SearchMessage,\n SearchMessageType,\n SearchSetupMessage,\n isSearchResultMessage\n} from \"../message\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search worker\n */\nexport type SearchWorker = WorkerHandler\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Set up search index\n *\n * @param data - Search index\n *\n * @returns Search index\n */\nfunction setupSearchIndex({ config, docs }: SearchIndex): SearchIndex {\n\n /* Override default language with value from translation */\n if (config.lang.length === 1 && config.lang[0] === \"en\")\n config.lang = [\n translation(\"search.config.lang\")\n ]\n\n /* Override default separator with value from translation */\n if (config.separator === \"[\\\\s\\\\-]+\")\n config.separator = translation(\"search.config.separator\")\n\n /* Set pipeline from translation */\n const pipeline = translation(\"search.config.pipeline\")\n .split(/\\s*,\\s*/)\n .filter(Boolean) as SearchPipeline\n\n /* Determine search options */\n const options: SearchOptions = {\n pipeline,\n suggestions: feature(\"search.suggest\")\n }\n\n /* Return search index after defaulting */\n return { config, docs, options }\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Set up search worker\n *\n * This function creates a web worker to set up and query the search index,\n * which is done using Lunr.js. The index must be passed as an observable to\n * enable hacks like _localsearch_ via search index embedding as JSON.\n *\n * @param url - Worker URL\n * @param index - Search index observable input\n *\n * @returns Search worker\n */\nexport function setupSearchWorker(\n url: string, index: ObservableInput\n): SearchWorker {\n const config = configuration()\n const worker = new Worker(url)\n\n /* Create communication channels and resolve relative links */\n const tx$ = new Subject()\n const rx$ = watchWorker(worker, { tx$ })\n .pipe(\n map(message => {\n if (isSearchResultMessage(message)) {\n for (const result of message.data.items)\n for (const document of result)\n document.location = `${new URL(document.location, config.base)}`\n }\n return message\n }),\n share()\n )\n\n /* Set up search index */\n from(index)\n .pipe(\n map(data => ({\n type: SearchMessageType.SETUP,\n data: setupSearchIndex(data)\n } as SearchSetupMessage))\n )\n .subscribe(tx$.next.bind(tx$))\n\n /* Return search worker */\n return { tx$, rx$ }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n EMPTY,\n Subject,\n catchError,\n combineLatest,\n filter,\n fromEvent,\n map,\n of,\n switchMap,\n withLatestFrom\n} from \"rxjs\"\n\nimport { configuration } from \"~/_\"\nimport {\n getElement,\n getLocation,\n requestJSON,\n setLocation\n} from \"~/browser\"\nimport { getComponentElements } from \"~/components\"\nimport {\n Version,\n renderVersionSelector\n} from \"~/templates\"\n\nimport { fetchSitemap } from \"../sitemap\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Setup options\n */\ninterface SetupOptions {\n document$: Subject /* Document subject */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Set up version selector\n *\n * @param options - Options\n */\nexport function setupVersionSelector(\n { document$ }: SetupOptions\n): void {\n const config = configuration()\n const versions$ = requestJSON(\n new URL(\"../versions.json\", config.base)\n )\n .pipe(\n catchError(() => EMPTY) // @todo refactor instant loading\n )\n\n /* Determine current version */\n const current$ = versions$\n .pipe(\n map(versions => {\n const [, current] = config.base.match(/([^/]+)\\/?$/)!\n return versions.find(({ version, aliases }) => (\n version === current || aliases.includes(current)\n )) || versions[0]\n })\n )\n\n /* Intercept inter-version navigation */\n versions$\n .pipe(\n map(versions => new Map(versions.map(version => [\n `${new URL(`../${version.version}/`, config.base)}`,\n version\n ]))),\n switchMap(urls => fromEvent(document.body, \"click\")\n .pipe(\n filter(ev => !ev.metaKey && !ev.ctrlKey),\n withLatestFrom(current$),\n switchMap(([ev, current]) => {\n if (ev.target instanceof Element) {\n const el = ev.target.closest(\"a\")\n if (el && !el.target && urls.has(el.href)) {\n const url = el.href\n // This is a temporary hack to detect if a version inside the\n // version selector or on another part of the site was clicked.\n // If we're inside the version selector, we definitely want to\n // find the same page, as we might have different deployments\n // due to aliases. However, if we're outside the version\n // selector, we must abort here, because we might otherwise\n // interfere with instant loading. We need to refactor this\n // at some point together with instant loading.\n //\n // See https://github.com/squidfunk/mkdocs-material/issues/4012\n if (!ev.target.closest(\".md-version\")) {\n const version = urls.get(url)!\n if (version === current)\n return EMPTY\n }\n ev.preventDefault()\n return of(url)\n }\n }\n return EMPTY\n }),\n switchMap(url => {\n const { version } = urls.get(url)!\n return fetchSitemap(new URL(url))\n .pipe(\n map(sitemap => {\n const location = getLocation()\n const path = location.href.replace(config.base, \"\")\n return sitemap.includes(path.split(\"#\")[0])\n ? new URL(`../${version}/${path}`, config.base)\n : new URL(url)\n })\n )\n })\n )\n )\n )\n .subscribe(url => setLocation(url))\n\n /* Render version selector and warning */\n combineLatest([versions$, current$])\n .subscribe(([versions, current]) => {\n const topic = getElement(\".md-header__topic\")\n topic.appendChild(renderVersionSelector(versions, current))\n })\n\n /* Integrate outdated version banner with instant loading */\n document$.pipe(switchMap(() => current$))\n .subscribe(current => {\n\n /* Check if version state was already determined */\n let outdated = __md_get(\"__outdated\", sessionStorage)\n if (outdated === null) {\n const latest = config.version?.default || \"latest\"\n outdated = !current.aliases.includes(latest)\n\n /* Persist version state in session storage */\n __md_set(\"__outdated\", outdated, sessionStorage)\n }\n\n /* Unhide outdated version banner */\n if (outdated)\n for (const warning of getComponentElements(\"outdated\"))\n warning.hidden = false\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n combineLatest,\n delay,\n distinctUntilChanged,\n distinctUntilKeyChanged,\n filter,\n finalize,\n fromEvent,\n map,\n merge,\n share,\n shareReplay,\n startWith,\n take,\n takeLast,\n takeUntil,\n tap\n} from \"rxjs\"\n\nimport { translation } from \"~/_\"\nimport {\n getLocation,\n setToggle,\n watchElementFocus,\n watchToggle\n} from \"~/browser\"\nimport {\n SearchMessageType,\n SearchQueryMessage,\n SearchWorker,\n defaultTransform,\n isSearchReadyMessage\n} from \"~/integrations\"\n\nimport { Component } from \"../../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search query\n */\nexport interface SearchQuery {\n value: string /* Query value */\n focus: boolean /* Query focus */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch search query\n *\n * Note that the focus event which triggers re-reading the current query value\n * is delayed by `1ms` so the input's empty state is allowed to propagate.\n *\n * @param el - Search query element\n * @param worker - Search worker\n *\n * @returns Search query observable\n */\nexport function watchSearchQuery(\n el: HTMLInputElement, { rx$ }: SearchWorker\n): Observable {\n const fn = __search?.transform || defaultTransform\n\n /* Immediately show search dialog */\n const { searchParams } = getLocation()\n if (searchParams.has(\"q\"))\n setToggle(\"search\", true)\n\n /* Intercept query parameter (deep link) */\n const param$ = rx$\n .pipe(\n filter(isSearchReadyMessage),\n take(1),\n map(() => searchParams.get(\"q\") || \"\")\n )\n\n /* Remove query parameter when search is closed */\n watchToggle(\"search\")\n .pipe(\n filter(active => !active),\n take(1)\n )\n .subscribe(() => {\n const url = new URL(location.href)\n url.searchParams.delete(\"q\")\n history.replaceState({}, \"\", `${url}`)\n })\n\n /* Set query from parameter */\n param$.subscribe(value => { // TODO: not ideal - find a better way\n if (value) {\n el.value = value\n el.focus()\n }\n })\n\n /* Intercept focus and input events */\n const focus$ = watchElementFocus(el)\n const value$ = merge(\n fromEvent(el, \"keyup\"),\n fromEvent(el, \"focus\").pipe(delay(1)),\n param$\n )\n .pipe(\n map(() => fn(el.value)),\n startWith(\"\"),\n distinctUntilChanged(),\n )\n\n /* Combine into single observable */\n return combineLatest([value$, focus$])\n .pipe(\n map(([value, focus]) => ({ value, focus })),\n shareReplay(1)\n )\n}\n\n/**\n * Mount search query\n *\n * @param el - Search query element\n * @param worker - Search worker\n *\n * @returns Search query component observable\n */\nexport function mountSearchQuery(\n el: HTMLInputElement, { tx$, rx$ }: SearchWorker\n): Observable> {\n const push$ = new Subject()\n const done$ = push$.pipe(takeLast(1))\n\n /* Handle value changes */\n push$\n .pipe(\n distinctUntilKeyChanged(\"value\"),\n map(({ value }): SearchQueryMessage => ({\n type: SearchMessageType.QUERY,\n data: value\n }))\n )\n .subscribe(tx$.next.bind(tx$))\n\n /* Handle focus changes */\n push$\n .pipe(\n distinctUntilKeyChanged(\"focus\")\n )\n .subscribe(({ focus }) => {\n if (focus) {\n setToggle(\"search\", focus)\n el.placeholder = \"\"\n } else {\n el.placeholder = translation(\"search.placeholder\")\n }\n })\n\n /* Handle reset */\n fromEvent(el.form!, \"reset\")\n .pipe(\n takeUntil(done$)\n )\n .subscribe(() => el.focus())\n\n /* Create and return component */\n return watchSearchQuery(el, { tx$, rx$ })\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state })),\n share()\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n bufferCount,\n filter,\n finalize,\n map,\n merge,\n of,\n skipUntil,\n switchMap,\n take,\n tap,\n withLatestFrom,\n zipWith\n} from \"rxjs\"\n\nimport { translation } from \"~/_\"\nimport {\n getElement,\n watchElementBoundary\n} from \"~/browser\"\nimport {\n SearchResult,\n SearchWorker,\n isSearchReadyMessage,\n isSearchResultMessage\n} from \"~/integrations\"\nimport { renderSearchResultItem } from \"~/templates\"\nimport { round } from \"~/utilities\"\n\nimport { Component } from \"../../_\"\nimport { SearchQuery } from \"../query\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount options\n */\ninterface MountOptions {\n query$: Observable /* Search query observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount search result list\n *\n * This function performs a lazy rendering of the search results, depending on\n * the vertical offset of the search result container.\n *\n * @param el - Search result list element\n * @param worker - Search worker\n * @param options - Options\n *\n * @returns Search result list component observable\n */\nexport function mountSearchResult(\n el: HTMLElement, { rx$ }: SearchWorker, { query$ }: MountOptions\n): Observable> {\n const push$ = new Subject()\n const boundary$ = watchElementBoundary(el.parentElement!)\n .pipe(\n filter(Boolean)\n )\n\n /* Retrieve nested components */\n const meta = getElement(\":scope > :first-child\", el)\n const list = getElement(\":scope > :last-child\", el)\n\n /* Wait until search is ready */\n const ready$ = rx$\n .pipe(\n filter(isSearchReadyMessage),\n take(1)\n )\n\n /* Update search result metadata */\n push$\n .pipe(\n withLatestFrom(query$),\n skipUntil(ready$)\n )\n .subscribe(([{ items }, { value }]) => {\n if (value) {\n switch (items.length) {\n\n /* No results */\n case 0:\n meta.textContent = translation(\"search.result.none\")\n break\n\n /* One result */\n case 1:\n meta.textContent = translation(\"search.result.one\")\n break\n\n /* Multiple result */\n default:\n meta.textContent = translation(\n \"search.result.other\",\n round(items.length)\n )\n }\n } else {\n meta.textContent = translation(\"search.result.placeholder\")\n }\n })\n\n /* Update search result list */\n push$\n .pipe(\n tap(() => list.innerHTML = \"\"),\n switchMap(({ items }) => merge(\n of(...items.slice(0, 10)),\n of(...items.slice(10))\n .pipe(\n bufferCount(4),\n zipWith(boundary$),\n switchMap(([chunk]) => chunk)\n )\n ))\n )\n .subscribe(result => list.appendChild(\n renderSearchResultItem(result)\n ))\n\n /* Filter search result message */\n const result$ = rx$\n .pipe(\n filter(isSearchResultMessage),\n map(({ data }) => data)\n )\n\n /* Create and return component */\n return result$\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n finalize,\n fromEvent,\n map,\n tap\n} from \"rxjs\"\n\nimport { getLocation } from \"~/browser\"\n\nimport { Component } from \"../../_\"\nimport { SearchQuery } from \"../query\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search sharing\n */\nexport interface SearchShare {\n url: URL /* Deep link for sharing */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n query$: Observable /* Search query observable */\n}\n\n/**\n * Mount options\n */\ninterface MountOptions {\n query$: Observable /* Search query observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount search sharing\n *\n * @param _el - Search sharing element\n * @param options - Options\n *\n * @returns Search sharing observable\n */\nexport function watchSearchShare(\n _el: HTMLElement, { query$ }: WatchOptions\n): Observable {\n return query$\n .pipe(\n map(({ value }) => {\n const url = getLocation()\n url.hash = \"\"\n url.searchParams.delete(\"h\")\n url.searchParams.set(\"q\", value)\n return { url }\n })\n )\n}\n\n/**\n * Mount search sharing\n *\n * @param el - Search sharing element\n * @param options - Options\n *\n * @returns Search sharing component observable\n */\nexport function mountSearchShare(\n el: HTMLAnchorElement, options: MountOptions\n): Observable> {\n const push$ = new Subject()\n push$.subscribe(({ url }) => {\n el.setAttribute(\"data-clipboard-text\", el.href)\n el.href = `${url}`\n })\n\n /* Prevent following of link */\n fromEvent(el, \"click\")\n .subscribe(ev => ev.preventDefault())\n\n /* Create and return component */\n return watchSearchShare(el, options)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n asyncScheduler,\n combineLatestWith,\n distinctUntilChanged,\n filter,\n finalize,\n fromEvent,\n map,\n merge,\n observeOn,\n tap\n} from \"rxjs\"\n\nimport { Keyboard } from \"~/browser\"\nimport {\n SearchResult,\n SearchWorker,\n isSearchResultMessage\n} from \"~/integrations\"\n\nimport { Component, getComponentElement } from \"../../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search suggestions\n */\nexport interface SearchSuggest {}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount options\n */\ninterface MountOptions {\n keyboard$: Observable /* Keyboard observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount search suggestions\n *\n * This function will perform a lazy rendering of the search results, depending\n * on the vertical offset of the search result container.\n *\n * @param el - Search result list element\n * @param worker - Search worker\n * @param options - Options\n *\n * @returns Search result list component observable\n */\nexport function mountSearchSuggest(\n el: HTMLElement, { rx$ }: SearchWorker, { keyboard$ }: MountOptions\n): Observable> {\n const push$ = new Subject()\n\n /* Retrieve query component and track all changes */\n const query = getComponentElement(\"search-query\")\n const query$ = merge(\n fromEvent(query, \"keydown\"),\n fromEvent(query, \"focus\")\n )\n .pipe(\n observeOn(asyncScheduler),\n map(() => query.value),\n distinctUntilChanged(),\n )\n\n /* Update search suggestions */\n push$\n .pipe(\n combineLatestWith(query$),\n map(([{ suggestions }, value]) => {\n const words = value.split(/([\\s-]+)/)\n if (suggestions?.length && words[words.length - 1]) {\n const last = suggestions[suggestions.length - 1]\n if (last.startsWith(words[words.length - 1]))\n words[words.length - 1] = last\n } else {\n words.length = 0\n }\n return words\n })\n )\n .subscribe(words => el.innerHTML = words\n .join(\"\")\n .replace(/\\s/g, \" \")\n )\n\n /* Set up search keyboard handlers */\n keyboard$\n .pipe(\n filter(({ mode }) => mode === \"search\")\n )\n .subscribe(key => {\n switch (key.type) {\n\n /* Right arrow: accept current suggestion */\n case \"ArrowRight\":\n if (\n el.innerText.length &&\n query.selectionStart === query.value.length\n )\n query.value = el.innerText\n break\n }\n })\n\n /* Filter search result message */\n const result$ = rx$\n .pipe(\n filter(isSearchResultMessage),\n map(({ data }) => data)\n )\n\n /* Create and return component */\n return result$\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(() => ({ ref: el }))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n NEVER,\n Observable,\n ObservableInput,\n filter,\n merge,\n mergeWith,\n sample,\n take\n} from \"rxjs\"\n\nimport { configuration } from \"~/_\"\nimport {\n Keyboard,\n getActiveElement,\n getElements,\n setToggle\n} from \"~/browser\"\nimport {\n SearchIndex,\n SearchResult,\n isSearchQueryMessage,\n isSearchReadyMessage,\n setupSearchWorker\n} from \"~/integrations\"\n\nimport {\n Component,\n getComponentElement,\n getComponentElements\n} from \"../../_\"\nimport {\n SearchQuery,\n mountSearchQuery\n} from \"../query\"\nimport { mountSearchResult } from \"../result\"\nimport {\n SearchShare,\n mountSearchShare\n} from \"../share\"\nimport {\n SearchSuggest,\n mountSearchSuggest\n} from \"../suggest\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search\n */\nexport type Search =\n | SearchQuery\n | SearchResult\n | SearchShare\n | SearchSuggest\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount options\n */\ninterface MountOptions {\n index$: ObservableInput /* Search index observable */\n keyboard$: Observable /* Keyboard observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount search\n *\n * This function sets up the search functionality, including the underlying\n * web worker and all keyboard bindings.\n *\n * @param el - Search element\n * @param options - Options\n *\n * @returns Search component observable\n */\nexport function mountSearch(\n el: HTMLElement, { index$, keyboard$ }: MountOptions\n): Observable> {\n const config = configuration()\n try {\n const url = __search?.worker || config.search\n const worker = setupSearchWorker(url, index$)\n\n /* Retrieve query and result components */\n const query = getComponentElement(\"search-query\", el)\n const result = getComponentElement(\"search-result\", el)\n\n /* Re-emit query when search is ready */\n const { tx$, rx$ } = worker\n tx$\n .pipe(\n filter(isSearchQueryMessage),\n sample(rx$.pipe(filter(isSearchReadyMessage))),\n take(1)\n )\n .subscribe(tx$.next.bind(tx$))\n\n /* Set up search keyboard handlers */\n keyboard$\n .pipe(\n filter(({ mode }) => mode === \"search\")\n )\n .subscribe(key => {\n const active = getActiveElement()\n switch (key.type) {\n\n /* Enter: go to first (best) result */\n case \"Enter\":\n if (active === query) {\n const anchors = new Map()\n for (const anchor of getElements(\n \":first-child [href]\", result\n )) {\n const article = anchor.firstElementChild!\n anchors.set(anchor, parseFloat(\n article.getAttribute(\"data-md-score\")!\n ))\n }\n\n /* Go to result with highest score, if any */\n if (anchors.size) {\n const [[best]] = [...anchors].sort(([, a], [, b]) => b - a)\n best.click()\n }\n\n /* Otherwise omit form submission */\n key.claim()\n }\n break\n\n /* Escape or Tab: close search */\n case \"Escape\":\n case \"Tab\":\n setToggle(\"search\", false)\n query.blur()\n break\n\n /* Vertical arrows: select previous or next search result */\n case \"ArrowUp\":\n case \"ArrowDown\":\n if (typeof active === \"undefined\") {\n query.focus()\n } else {\n const els = [query, ...getElements(\n \":not(details) > [href], summary, details[open] [href]\",\n result\n )]\n const i = Math.max(0, (\n Math.max(0, els.indexOf(active)) + els.length + (\n key.type === \"ArrowUp\" ? -1 : +1\n )\n ) % els.length)\n els[i].focus()\n }\n\n /* Prevent scrolling of page */\n key.claim()\n break\n\n /* All other keys: hand to search query */\n default:\n if (query !== getActiveElement())\n query.focus()\n }\n })\n\n /* Set up global keyboard handlers */\n keyboard$\n .pipe(\n filter(({ mode }) => mode === \"global\"),\n )\n .subscribe(key => {\n switch (key.type) {\n\n /* Open search and select query */\n case \"f\":\n case \"s\":\n case \"/\":\n query.focus()\n query.select()\n\n /* Prevent scrolling of page */\n key.claim()\n break\n }\n })\n\n /* Create and return component */\n const query$ = mountSearchQuery(query, worker)\n const result$ = mountSearchResult(result, worker, { query$ })\n return merge(query$, result$)\n .pipe(\n mergeWith(\n\n /* Search sharing */\n ...getComponentElements(\"search-share\", el)\n .map(child => mountSearchShare(child, { query$ })),\n\n /* Search suggestions */\n ...getComponentElements(\"search-suggest\", el)\n .map(child => mountSearchSuggest(child, worker, { keyboard$ }))\n )\n )\n\n /* Gracefully handle broken search */\n } catch (err) {\n el.hidden = true\n return NEVER\n }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n ObservableInput,\n combineLatest,\n filter,\n map,\n startWith\n} from \"rxjs\"\n\nimport { getLocation } from \"~/browser\"\nimport {\n SearchIndex,\n setupSearchHighlighter\n} from \"~/integrations\"\nimport { h } from \"~/utilities\"\n\nimport { Component } from \"../../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search highlighting\n */\nexport interface SearchHighlight {\n nodes: Map /* Map of replacements */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount options\n */\ninterface MountOptions {\n index$: ObservableInput /* Search index observable */\n location$: Observable /* Location observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Mount search highlighting\n *\n * @param el - Content element\n * @param options - Options\n *\n * @returns Search highlighting component observable\n */\nexport function mountSearchHiglight(\n el: HTMLElement, { index$, location$ }: MountOptions\n): Observable> {\n return combineLatest([\n index$,\n location$\n .pipe(\n startWith(getLocation()),\n filter(url => !!url.searchParams.get(\"h\"))\n )\n ])\n .pipe(\n map(([index, url]) => setupSearchHighlighter(index.config, true)(\n url.searchParams.get(\"h\")!\n )),\n map(fn => {\n const nodes = new Map()\n\n /* Traverse text nodes and collect matches */\n const it = document.createNodeIterator(el, NodeFilter.SHOW_TEXT)\n for (let node = it.nextNode(); node; node = it.nextNode()) {\n if (node.parentElement?.offsetHeight) {\n const original = node.textContent!\n const replaced = fn(original)\n if (replaced.length > original.length)\n nodes.set(node as ChildNode, replaced)\n }\n }\n\n /* Replace original nodes with matches */\n for (const [node, text] of nodes) {\n const { childNodes } = h(\"span\", null, text)\n node.replaceWith(...Array.from(childNodes))\n }\n\n /* Return component */\n return { ref: el, nodes }\n })\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n animationFrameScheduler,\n auditTime,\n combineLatest,\n defer,\n distinctUntilChanged,\n finalize,\n map,\n observeOn,\n take,\n tap,\n withLatestFrom\n} from \"rxjs\"\n\nimport {\n Viewport,\n getElement,\n getElementContainer,\n getElementOffset,\n getElementSize,\n getElements\n} from \"~/browser\"\n\nimport { Component } from \"../_\"\nimport { Header } from \"../header\"\nimport { Main } from \"../main\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Sidebar\n */\nexport interface Sidebar {\n height: number /* Sidebar height */\n locked: boolean /* Sidebar is locked */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n viewport$: Observable /* Viewport observable */\n main$: Observable
    /* Main area observable */\n}\n\n/**\n * Mount options\n */\ninterface MountOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
    /* Header observable */\n main$: Observable
    /* Main area observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch sidebar\n *\n * This function returns an observable that computes the visual parameters of\n * the sidebar which depends on the vertical viewport offset, as well as the\n * height of the main area. When the page is scrolled beyond the header, the\n * sidebar is locked and fills the remaining space.\n *\n * @param el - Sidebar element\n * @param options - Options\n *\n * @returns Sidebar observable\n */\nexport function watchSidebar(\n el: HTMLElement, { viewport$, main$ }: WatchOptions\n): Observable {\n const parent = el.parentElement!\n const adjust =\n parent.offsetTop -\n parent.parentElement!.offsetTop\n\n /* Compute the sidebar's available height and if it should be locked */\n return combineLatest([main$, viewport$])\n .pipe(\n map(([{ offset, height }, { offset: { y } }]) => {\n height = height\n + Math.min(adjust, Math.max(0, y - offset))\n - adjust\n return {\n height,\n locked: y >= offset + adjust\n }\n }),\n distinctUntilChanged((a, b) => (\n a.height === b.height &&\n a.locked === b.locked\n ))\n )\n}\n\n/**\n * Mount sidebar\n *\n * This function doesn't set the height of the actual sidebar, but of its first\n * child \u2013 the `.md-sidebar__scrollwrap` element in order to mitigiate jittery\n * sidebars when the footer is scrolled into view. At some point we switched\n * from `absolute` / `fixed` positioning to `sticky` positioning, significantly\n * reducing jitter in some browsers (respectively Firefox and Safari) when\n * scrolling from the top. However, top-aligned sticky positioning means that\n * the sidebar snaps to the bottom when the end of the container is reached.\n * This is what leads to the mentioned jitter, as the sidebar's height may be\n * updated too slowly.\n *\n * This behaviour can be mitigiated by setting the height of the sidebar to `0`\n * while preserving the padding, and the height on its first element.\n *\n * @param el - Sidebar element\n * @param options - Options\n *\n * @returns Sidebar component observable\n */\nexport function mountSidebar(\n el: HTMLElement, { header$, ...options }: MountOptions\n): Observable> {\n const inner = getElement(\".md-sidebar__scrollwrap\", el)\n const { y } = getElementOffset(inner)\n return defer(() => {\n const push$ = new Subject()\n push$\n .pipe(\n auditTime(0, animationFrameScheduler),\n withLatestFrom(header$)\n )\n .subscribe({\n\n /* Handle emission */\n next([{ height }, { height: offset }]) {\n inner.style.height = `${height - 2 * y}px`\n el.style.top = `${offset}px`\n },\n\n /* Handle complete */\n complete() {\n inner.style.height = \"\"\n el.style.top = \"\"\n }\n })\n\n /* Bring active item into view on initial load */\n push$\n .pipe(\n observeOn(animationFrameScheduler),\n take(1)\n )\n .subscribe(() => {\n for (const item of getElements(\".md-nav__link--active[href]\", el)) {\n const container = getElementContainer(item)\n if (typeof container !== \"undefined\") {\n const offset = item.offsetTop - container.offsetTop\n const { height } = getElementSize(container)\n container.scrollTo({\n top: offset - height / 2\n })\n }\n }\n })\n\n /* Create and return component */\n return watchSidebar(el, options)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { Repo, User } from \"github-types\"\nimport {\n EMPTY,\n Observable,\n catchError,\n defaultIfEmpty,\n map,\n zip\n} from \"rxjs\"\n\nimport { requestJSON } from \"~/browser\"\n\nimport { SourceFacts } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * GitHub release (partial)\n */\ninterface Release {\n tag_name: string /* Tag name */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch GitHub repository facts\n *\n * @param user - GitHub user or organization\n * @param repo - GitHub repository\n *\n * @returns Repository facts observable\n */\nexport function fetchSourceFactsFromGitHub(\n user: string, repo?: string\n): Observable {\n if (typeof repo !== \"undefined\") {\n const url = `https://api.github.com/repos/${user}/${repo}`\n return zip(\n\n /* Fetch version */\n requestJSON(`${url}/releases/latest`)\n .pipe(\n catchError(() => EMPTY), // @todo refactor instant loading\n map(release => ({\n version: release.tag_name\n })),\n defaultIfEmpty({})\n ),\n\n /* Fetch stars and forks */\n requestJSON(url)\n .pipe(\n catchError(() => EMPTY), // @todo refactor instant loading\n map(info => ({\n stars: info.stargazers_count,\n forks: info.forks_count\n })),\n defaultIfEmpty({})\n )\n )\n .pipe(\n map(([release, info]) => ({ ...release, ...info }))\n )\n\n /* User or organization */\n } else {\n const url = `https://api.github.com/users/${user}`\n return requestJSON(url)\n .pipe(\n map(info => ({\n repositories: info.public_repos\n })),\n defaultIfEmpty({})\n )\n }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { ProjectSchema } from \"gitlab\"\nimport {\n EMPTY,\n Observable,\n catchError,\n defaultIfEmpty,\n map\n} from \"rxjs\"\n\nimport { requestJSON } from \"~/browser\"\n\nimport { SourceFacts } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch GitLab repository facts\n *\n * @param base - GitLab base\n * @param project - GitLab project\n *\n * @returns Repository facts observable\n */\nexport function fetchSourceFactsFromGitLab(\n base: string, project: string\n): Observable {\n const url = `https://${base}/api/v4/projects/${encodeURIComponent(project)}`\n return requestJSON(url)\n .pipe(\n catchError(() => EMPTY), // @todo refactor instant loading\n map(({ star_count, forks_count }) => ({\n stars: star_count,\n forks: forks_count\n })),\n defaultIfEmpty({})\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport { EMPTY, Observable } from \"rxjs\"\n\nimport { fetchSourceFactsFromGitHub } from \"../github\"\nimport { fetchSourceFactsFromGitLab } from \"../gitlab\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Repository facts for repositories\n */\nexport interface RepositoryFacts {\n stars?: number /* Number of stars */\n forks?: number /* Number of forks */\n version?: string /* Latest version */\n}\n\n/**\n * Repository facts for organizations\n */\nexport interface OrganizationFacts {\n repositories?: number /* Number of repositories */\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Repository facts\n */\nexport type SourceFacts =\n | RepositoryFacts\n | OrganizationFacts\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch repository facts\n *\n * @param url - Repository URL\n *\n * @returns Repository facts observable\n */\nexport function fetchSourceFacts(\n url: string\n): Observable {\n\n /* Try to match GitHub repository */\n let match = url.match(/^.+github\\.com\\/([^/]+)\\/?([^/]+)?/i)\n if (match) {\n const [, user, repo] = match\n return fetchSourceFactsFromGitHub(user, repo)\n }\n\n /* Try to match GitLab repository */\n match = url.match(/^.+?([^/]*gitlab[^/]+)\\/(.+?)\\/?$/i)\n if (match) {\n const [, base, slug] = match\n return fetchSourceFactsFromGitLab(base, slug)\n }\n\n /* Fallback */\n return EMPTY\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n EMPTY,\n Observable,\n Subject,\n catchError,\n defer,\n filter,\n finalize,\n map,\n of,\n shareReplay,\n tap\n} from \"rxjs\"\n\nimport { getElement } from \"~/browser\"\nimport { ConsentDefaults } from \"~/components/consent\"\nimport { renderSourceFacts } from \"~/templates\"\n\nimport {\n Component,\n getComponentElements\n} from \"../../_\"\nimport {\n SourceFacts,\n fetchSourceFacts\n} from \"../facts\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Repository information\n */\nexport interface Source {\n facts: SourceFacts /* Repository facts */\n}\n\n/* ----------------------------------------------------------------------------\n * Data\n * ------------------------------------------------------------------------- */\n\n/**\n * Repository information observable\n */\nlet fetch$: Observable\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch repository information\n *\n * This function tries to read the repository facts from session storage, and\n * if unsuccessful, fetches them from the underlying provider.\n *\n * @param el - Repository information element\n *\n * @returns Repository information observable\n */\nexport function watchSource(\n el: HTMLAnchorElement\n): Observable {\n return fetch$ ||= defer(() => {\n const cached = __md_get(\"__source\", sessionStorage)\n if (cached) {\n return of(cached)\n } else {\n\n /* Check if consent is configured and was given */\n const els = getComponentElements(\"consent\")\n if (els.length) {\n const consent = __md_get(\"__consent\")\n if (!(consent && consent.github))\n return EMPTY\n }\n\n /* Fetch repository facts */\n return fetchSourceFacts(el.href)\n .pipe(\n tap(facts => __md_set(\"__source\", facts, sessionStorage))\n )\n }\n })\n .pipe(\n catchError(() => EMPTY),\n filter(facts => Object.keys(facts).length > 0),\n map(facts => ({ facts })),\n shareReplay(1)\n )\n}\n\n/**\n * Mount repository information\n *\n * @param el - Repository information element\n *\n * @returns Repository information component observable\n */\nexport function mountSource(\n el: HTMLAnchorElement\n): Observable> {\n const inner = getElement(\":scope > :last-child\", el)\n return defer(() => {\n const push$ = new Subject()\n push$.subscribe(({ facts }) => {\n inner.appendChild(renderSourceFacts(facts))\n inner.classList.add(\"md-source__repository--active\")\n })\n\n /* Create and return component */\n return watchSource(el)\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n defer,\n distinctUntilKeyChanged,\n finalize,\n map,\n of,\n switchMap,\n tap\n} from \"rxjs\"\n\nimport { feature } from \"~/_\"\nimport {\n Viewport,\n watchElementSize,\n watchViewportAt\n} from \"~/browser\"\n\nimport { Component } from \"../_\"\nimport { Header } from \"../header\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Navigation tabs\n */\nexport interface Tabs {\n hidden: boolean /* Navigation tabs are hidden */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
    /* Header observable */\n}\n\n/**\n * Mount options\n */\ninterface MountOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
    /* Header observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch navigation tabs\n *\n * @param el - Navigation tabs element\n * @param options - Options\n *\n * @returns Navigation tabs observable\n */\nexport function watchTabs(\n el: HTMLElement, { viewport$, header$ }: WatchOptions\n): Observable {\n return watchElementSize(document.body)\n .pipe(\n switchMap(() => watchViewportAt(el, { header$, viewport$ })),\n map(({ offset: { y } }) => {\n return {\n hidden: y >= 10\n }\n }),\n distinctUntilKeyChanged(\"hidden\")\n )\n}\n\n/**\n * Mount navigation tabs\n *\n * This function hides the navigation tabs when scrolling past the threshold\n * and makes them reappear in a nice CSS animation when scrolling back up.\n *\n * @param el - Navigation tabs element\n * @param options - Options\n *\n * @returns Navigation tabs component observable\n */\nexport function mountTabs(\n el: HTMLElement, options: MountOptions\n): Observable> {\n return defer(() => {\n const push$ = new Subject()\n push$.subscribe({\n\n /* Handle emission */\n next({ hidden }) {\n el.hidden = hidden\n },\n\n /* Handle complete */\n complete() {\n el.hidden = false\n }\n })\n\n /* Create and return component */\n return (\n feature(\"navigation.tabs.sticky\")\n ? of({ hidden: false })\n : watchTabs(el, options)\n )\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n bufferCount,\n combineLatestWith,\n debounceTime,\n defer,\n distinctUntilChanged,\n distinctUntilKeyChanged,\n filter,\n finalize,\n map,\n merge,\n of,\n repeat,\n scan,\n share,\n skip,\n startWith,\n switchMap,\n takeLast,\n takeUntil,\n tap,\n withLatestFrom\n} from \"rxjs\"\n\nimport { feature } from \"~/_\"\nimport {\n Viewport,\n getElement,\n getElementContainer,\n getElementSize,\n getElements,\n getLocation,\n getOptionalElement,\n watchElementSize\n} from \"~/browser\"\n\nimport {\n Component,\n getComponentElement\n} from \"../_\"\nimport { Header } from \"../header\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Table of contents\n */\nexport interface TableOfContents {\n prev: HTMLAnchorElement[][] /* Anchors (previous) */\n next: HTMLAnchorElement[][] /* Anchors (next) */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
    /* Header observable */\n}\n\n/**\n * Mount options\n */\ninterface MountOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
    /* Header observable */\n target$: Observable /* Location target observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch table of contents\n *\n * This is effectively a scroll spy implementation which will account for the\n * fixed header and automatically re-calculate anchor offsets when the viewport\n * is resized. The returned observable will only emit if the table of contents\n * needs to be repainted.\n *\n * This implementation tracks an anchor element's entire path starting from its\n * level up to the top-most anchor element, e.g. `[h3, h2, h1]`. Although the\n * Material theme currently doesn't make use of this information, it enables\n * the styling of the entire hierarchy through customization.\n *\n * Note that the current anchor is the last item of the `prev` anchor list.\n *\n * @param el - Table of contents element\n * @param options - Options\n *\n * @returns Table of contents observable\n */\nexport function watchTableOfContents(\n el: HTMLElement, { viewport$, header$ }: WatchOptions\n): Observable {\n const table = new Map()\n\n /* Compute anchor-to-target mapping */\n const anchors = getElements(\"[href^=\\\\#]\", el)\n for (const anchor of anchors) {\n const id = decodeURIComponent(anchor.hash.substring(1))\n const target = getOptionalElement(`[id=\"${id}\"]`)\n if (typeof target !== \"undefined\")\n table.set(anchor, target)\n }\n\n /* Compute necessary adjustment for header */\n const adjust$ = header$\n .pipe(\n distinctUntilKeyChanged(\"height\"),\n map(({ height }) => {\n const main = getComponentElement(\"main\")\n const grid = getElement(\":scope > :first-child\", main)\n return height + 0.8 * (\n grid.offsetTop -\n main.offsetTop\n )\n }),\n share()\n )\n\n /* Compute partition of previous and next anchors */\n const partition$ = watchElementSize(document.body)\n .pipe(\n distinctUntilKeyChanged(\"height\"),\n\n /* Build index to map anchor paths to vertical offsets */\n switchMap(body => defer(() => {\n let path: HTMLAnchorElement[] = []\n return of([...table].reduce((index, [anchor, target]) => {\n while (path.length) {\n const last = table.get(path[path.length - 1])!\n if (last.tagName >= target.tagName) {\n path.pop()\n } else {\n break\n }\n }\n\n /* If the current anchor is hidden, continue with its parent */\n let offset = target.offsetTop\n while (!offset && target.parentElement) {\n target = target.parentElement\n offset = target.offsetTop\n }\n\n /* Map reversed anchor path to vertical offset */\n return index.set(\n [...path = [...path, anchor]].reverse(),\n offset\n )\n }, new Map()))\n })\n .pipe(\n\n /* Sort index by vertical offset (see https://bit.ly/30z6QSO) */\n map(index => new Map([...index].sort(([, a], [, b]) => a - b))),\n combineLatestWith(adjust$),\n\n /* Re-compute partition when viewport offset changes */\n switchMap(([index, adjust]) => viewport$\n .pipe(\n scan(([prev, next], { offset: { y }, size }) => {\n const last = y + size.height >= Math.floor(body.height)\n\n /* Look forward */\n while (next.length) {\n const [, offset] = next[0]\n if (offset - adjust < y || last) {\n prev = [...prev, next.shift()!]\n } else {\n break\n }\n }\n\n /* Look backward */\n while (prev.length) {\n const [, offset] = prev[prev.length - 1]\n if (offset - adjust >= y && !last) {\n next = [prev.pop()!, ...next]\n } else {\n break\n }\n }\n\n /* Return partition */\n return [prev, next]\n }, [[], [...index]]),\n distinctUntilChanged((a, b) => (\n a[0] === b[0] &&\n a[1] === b[1]\n ))\n )\n )\n )\n )\n )\n\n /* Compute and return anchor list migrations */\n return partition$\n .pipe(\n map(([prev, next]) => ({\n prev: prev.map(([path]) => path),\n next: next.map(([path]) => path)\n })),\n\n /* Extract anchor list migrations */\n startWith({ prev: [], next: [] }),\n bufferCount(2, 1),\n map(([a, b]) => {\n\n /* Moving down */\n if (a.prev.length < b.prev.length) {\n return {\n prev: b.prev.slice(Math.max(0, a.prev.length - 1), b.prev.length),\n next: []\n }\n\n /* Moving up */\n } else {\n return {\n prev: b.prev.slice(-1),\n next: b.next.slice(0, b.next.length - a.next.length)\n }\n }\n })\n )\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Mount table of contents\n *\n * @param el - Table of contents element\n * @param options - Options\n *\n * @returns Table of contents component observable\n */\nexport function mountTableOfContents(\n el: HTMLElement, { viewport$, header$, target$ }: MountOptions\n): Observable> {\n return defer(() => {\n const push$ = new Subject()\n const done$ = push$.pipe(takeLast(1))\n push$.subscribe(({ prev, next }) => {\n\n /* Look forward */\n for (const [anchor] of next) {\n anchor.classList.remove(\"md-nav__link--passed\")\n anchor.classList.remove(\"md-nav__link--active\")\n }\n\n /* Look backward */\n for (const [index, [anchor]] of prev.entries()) {\n anchor.classList.add(\"md-nav__link--passed\")\n anchor.classList.toggle(\n \"md-nav__link--active\",\n index === prev.length - 1\n )\n }\n })\n\n /* Set up following, if enabled */\n if (feature(\"toc.follow\")) {\n\n /* Toggle smooth scrolling only for anchor clicks */\n const smooth$ = merge(\n viewport$.pipe(debounceTime(1), map(() => undefined)),\n viewport$.pipe(debounceTime(250), map(() => \"smooth\" as const))\n )\n\n /* Bring active anchor into view */\n push$\n .pipe(\n filter(({ prev }) => prev.length > 0),\n withLatestFrom(smooth$)\n )\n .subscribe(([{ prev }, behavior]) => {\n const [anchor] = prev[prev.length - 1]\n if (anchor.offsetHeight) {\n\n /* Retrieve overflowing container and scroll */\n const container = getElementContainer(anchor)\n if (typeof container !== \"undefined\") {\n const offset = anchor.offsetTop - container.offsetTop\n const { height } = getElementSize(container)\n container.scrollTo({\n top: offset - height / 2,\n behavior\n })\n }\n }\n })\n }\n\n /* Set up anchor tracking, if enabled */\n if (feature(\"navigation.tracking\"))\n viewport$\n .pipe(\n takeUntil(done$),\n distinctUntilKeyChanged(\"offset\"),\n debounceTime(250),\n skip(1),\n takeUntil(target$.pipe(skip(1))),\n repeat({ delay: 250 }),\n withLatestFrom(push$)\n )\n .subscribe(([, { prev }]) => {\n const url = getLocation()\n\n /* Set hash fragment to active anchor */\n const anchor = prev[prev.length - 1]\n if (anchor && anchor.length) {\n const [active] = anchor\n const { hash } = new URL(active.href)\n if (url.hash !== hash) {\n url.hash = hash\n history.replaceState({}, \"\", `${url}`)\n }\n\n /* Reset anchor when at the top */\n } else {\n url.hash = \"\"\n history.replaceState({}, \"\", `${url}`)\n }\n })\n\n /* Create and return component */\n return watchTableOfContents(el, { viewport$, header$ })\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n Subject,\n bufferCount,\n combineLatest,\n distinctUntilChanged,\n distinctUntilKeyChanged,\n endWith,\n finalize,\n map,\n repeat,\n skip,\n takeLast,\n takeUntil,\n tap\n} from \"rxjs\"\n\nimport { Viewport } from \"~/browser\"\n\nimport { Component } from \"../_\"\nimport { Header } from \"../header\"\nimport { Main } from \"../main\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Back-to-top button\n */\nexport interface BackToTop {\n hidden: boolean /* Back-to-top button is hidden */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch options\n */\ninterface WatchOptions {\n viewport$: Observable /* Viewport observable */\n main$: Observable
    /* Main area observable */\n target$: Observable /* Location target observable */\n}\n\n/**\n * Mount options\n */\ninterface MountOptions {\n viewport$: Observable /* Viewport observable */\n header$: Observable
    /* Header observable */\n main$: Observable
    /* Main area observable */\n target$: Observable /* Location target observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Watch back-to-top\n *\n * @param _el - Back-to-top element\n * @param options - Options\n *\n * @returns Back-to-top observable\n */\nexport function watchBackToTop(\n _el: HTMLElement, { viewport$, main$, target$ }: WatchOptions\n): Observable {\n\n /* Compute direction */\n const direction$ = viewport$\n .pipe(\n map(({ offset: { y } }) => y),\n bufferCount(2, 1),\n map(([a, b]) => a > b && b > 0),\n distinctUntilChanged()\n )\n\n /* Compute whether main area is active */\n const active$ = main$\n .pipe(\n map(({ active }) => active)\n )\n\n /* Compute threshold for hiding */\n return combineLatest([active$, direction$])\n .pipe(\n map(([active, direction]) => !(active && direction)),\n distinctUntilChanged(),\n takeUntil(target$.pipe(skip(1))),\n endWith(true),\n repeat({ delay: 250 }),\n map(hidden => ({ hidden }))\n )\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Mount back-to-top\n *\n * @param el - Back-to-top element\n * @param options - Options\n *\n * @returns Back-to-top component observable\n */\nexport function mountBackToTop(\n el: HTMLElement, { viewport$, header$, main$, target$ }: MountOptions\n): Observable> {\n const push$ = new Subject()\n const done$ = push$.pipe(takeLast(1))\n push$.subscribe({\n\n /* Handle emission */\n next({ hidden }) {\n el.hidden = hidden\n if (hidden) {\n el.setAttribute(\"tabindex\", \"-1\")\n el.blur()\n } else {\n el.removeAttribute(\"tabindex\")\n }\n },\n\n /* Handle complete */\n complete() {\n el.style.top = \"\"\n el.hidden = true\n el.removeAttribute(\"tabindex\")\n }\n })\n\n /* Watch header height */\n header$\n .pipe(\n takeUntil(done$),\n distinctUntilKeyChanged(\"height\")\n )\n .subscribe(({ height }) => {\n el.style.top = `${height + 16}px`\n })\n\n /* Create and return component */\n return watchBackToTop(el, { viewport$, main$, target$ })\n .pipe(\n tap(state => push$.next(state)),\n finalize(() => push$.complete()),\n map(state => ({ ref: el, ...state }))\n )\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n fromEvent,\n map,\n mergeMap,\n switchMap,\n takeWhile,\n tap,\n withLatestFrom\n} from \"rxjs\"\n\nimport { getElements } from \"~/browser\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Patch options\n */\ninterface PatchOptions {\n document$: Observable /* Document observable */\n tablet$: Observable /* Media tablet observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Patch indeterminate checkboxes\n *\n * This function replaces the indeterminate \"pseudo state\" with the actual\n * indeterminate state, which is used to keep navigation always expanded.\n *\n * @param options - Options\n */\nexport function patchIndeterminate(\n { document$, tablet$ }: PatchOptions\n): void {\n document$\n .pipe(\n switchMap(() => getElements(\n // @todo `data-md-state` is deprecated and removed in v9\n \".md-toggle--indeterminate, [data-md-state=indeterminate]\"\n )),\n tap(el => {\n el.indeterminate = true\n el.checked = false\n }),\n mergeMap(el => fromEvent(el, \"change\")\n .pipe(\n takeWhile(() => el.classList.contains(\"md-toggle--indeterminate\")),\n map(() => el)\n )\n ),\n withLatestFrom(tablet$)\n )\n .subscribe(([el, tablet]) => {\n el.classList.remove(\"md-toggle--indeterminate\")\n if (tablet)\n el.checked = false\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n filter,\n fromEvent,\n map,\n mergeMap,\n switchMap,\n tap\n} from \"rxjs\"\n\nimport { getElements } from \"~/browser\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Patch options\n */\ninterface PatchOptions {\n document$: Observable /* Document observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Check whether the given device is an Apple device\n *\n * @returns Test result\n */\nfunction isAppleDevice(): boolean {\n return /(iPad|iPhone|iPod)/.test(navigator.userAgent)\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Patch all elements with `data-md-scrollfix` attributes\n *\n * This is a year-old patch which ensures that overflow scrolling works at the\n * top and bottom of containers on iOS by ensuring a `1px` scroll offset upon\n * the start of a touch event.\n *\n * @see https://bit.ly/2SCtAOO - Original source\n *\n * @param options - Options\n */\nexport function patchScrollfix(\n { document$ }: PatchOptions\n): void {\n document$\n .pipe(\n switchMap(() => getElements(\"[data-md-scrollfix]\")),\n tap(el => el.removeAttribute(\"data-md-scrollfix\")),\n filter(isAppleDevice),\n mergeMap(el => fromEvent(el, \"touchstart\")\n .pipe(\n map(() => el)\n )\n )\n )\n .subscribe(el => {\n const top = el.scrollTop\n\n /* We're at the top of the container */\n if (top === 0) {\n el.scrollTop = 1\n\n /* We're at the bottom of the container */\n } else if (top + el.offsetHeight === el.scrollHeight) {\n el.scrollTop = top - 1\n }\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n Observable,\n combineLatest,\n delay,\n map,\n of,\n switchMap,\n withLatestFrom\n} from \"rxjs\"\n\nimport {\n Viewport,\n watchToggle\n} from \"~/browser\"\n\n/* ----------------------------------------------------------------------------\n * Helper types\n * ------------------------------------------------------------------------- */\n\n/**\n * Patch options\n */\ninterface PatchOptions {\n viewport$: Observable /* Viewport observable */\n tablet$: Observable /* Media tablet observable */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Patch the document body to lock when search is open\n *\n * For mobile and tablet viewports, the search is rendered full screen, which\n * leads to scroll leaking when at the top or bottom of the search result. This\n * function locks the body when the search is in full screen mode, and restores\n * the scroll position when leaving.\n *\n * @param options - Options\n */\nexport function patchScrolllock(\n { viewport$, tablet$ }: PatchOptions\n): void {\n combineLatest([watchToggle(\"search\"), tablet$])\n .pipe(\n map(([active, tablet]) => active && !tablet),\n switchMap(active => of(active)\n .pipe(\n delay(active ? 400 : 100)\n )\n ),\n withLatestFrom(viewport$)\n )\n .subscribe(([active, { offset: { y }}]) => {\n if (active) {\n document.body.setAttribute(\"data-md-scrolllock\", \"\")\n document.body.style.top = `-${y}px`\n } else {\n const value = -1 * parseInt(document.body.style.top, 10)\n document.body.removeAttribute(\"data-md-scrolllock\")\n document.body.style.top = \"\"\n if (value)\n window.scrollTo(0, value)\n }\n })\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n/* ----------------------------------------------------------------------------\n * Polyfills\n * ------------------------------------------------------------------------- */\n\n/* Polyfill `Object.entries` */\nif (!Object.entries)\n Object.entries = function (obj: object) {\n const data: [string, string][] = []\n for (const key of Object.keys(obj))\n // @ts-expect-error - ignore property access warning\n data.push([key, obj[key]])\n\n /* Return entries */\n return data\n }\n\n/* Polyfill `Object.values` */\nif (!Object.values)\n Object.values = function (obj: object) {\n const data: string[] = []\n for (const key of Object.keys(obj))\n // @ts-expect-error - ignore property access warning\n data.push(obj[key])\n\n /* Return values */\n return data\n }\n\n/* ------------------------------------------------------------------------- */\n\n/* Polyfills for `Element` */\nif (typeof Element !== \"undefined\") {\n\n /* Polyfill `Element.scrollTo` */\n if (!Element.prototype.scrollTo)\n Element.prototype.scrollTo = function (\n x?: ScrollToOptions | number, y?: number\n ): void {\n if (typeof x === \"object\") {\n this.scrollLeft = x.left!\n this.scrollTop = x.top!\n } else {\n this.scrollLeft = x!\n this.scrollTop = y!\n }\n }\n\n /* Polyfill `Element.replaceWith` */\n if (!Element.prototype.replaceWith)\n Element.prototype.replaceWith = function (\n ...nodes: Array\n ): void {\n const parent = this.parentNode\n if (parent) {\n if (nodes.length === 0)\n parent.removeChild(this)\n\n /* Replace children and create text nodes */\n for (let i = nodes.length - 1; i >= 0; i--) {\n let node = nodes[i]\n if (typeof node === \"string\")\n node = document.createTextNode(node)\n else if (node.parentNode)\n node.parentNode.removeChild(node)\n\n /* Replace child or insert before previous sibling */\n if (!i)\n parent.replaceChild(node, this)\n else\n parent.insertBefore(this.previousSibling!, node)\n }\n }\n }\n}\n"], + "mappings": "6+BAAA,IAAAA,GAAAC,GAAA,CAAAC,GAAAC,KAAA,EAAC,SAAUC,EAAQC,EAAS,CAC1B,OAAOH,IAAY,UAAY,OAAOC,IAAW,YAAcE,EAAQ,EACvE,OAAO,QAAW,YAAc,OAAO,IAAM,OAAOA,CAAO,EAC1DA,EAAQ,CACX,GAAEH,GAAO,UAAY,CAAE,aASrB,SAASI,EAA0BC,EAAO,CACxC,IAAIC,EAAmB,GACnBC,EAA0B,GAC1BC,EAAiC,KAEjCC,EAAsB,CACxB,KAAM,GACN,OAAQ,GACR,IAAK,GACL,IAAK,GACL,MAAO,GACP,SAAU,GACV,OAAQ,GACR,KAAM,GACN,MAAO,GACP,KAAM,GACN,KAAM,GACN,SAAU,GACV,iBAAkB,EACpB,EAOA,SAASC,EAAmBC,EAAI,CAC9B,MACE,GAAAA,GACAA,IAAO,UACPA,EAAG,WAAa,QAChBA,EAAG,WAAa,QAChB,cAAeA,GACf,aAAcA,EAAG,UAKrB,CASA,SAASC,EAA8BD,EAAI,CACzC,IAAIE,GAAOF,EAAG,KACVG,GAAUH,EAAG,QAUjB,MARI,GAAAG,KAAY,SAAWL,EAAoBI,KAAS,CAACF,EAAG,UAIxDG,KAAY,YAAc,CAACH,EAAG,UAI9BA,EAAG,kBAKT,CAOA,SAASI,EAAqBJ,EAAI,CAC5BA,EAAG,UAAU,SAAS,eAAe,IAGzCA,EAAG,UAAU,IAAI,eAAe,EAChCA,EAAG,aAAa,2BAA4B,EAAE,EAChD,CAOA,SAASK,EAAwBL,EAAI,CAC/B,CAACA,EAAG,aAAa,0BAA0B,IAG/CA,EAAG,UAAU,OAAO,eAAe,EACnCA,EAAG,gBAAgB,0BAA0B,EAC/C,CAUA,SAASM,EAAUC,EAAG,CAChBA,EAAE,SAAWA,EAAE,QAAUA,EAAE,UAI3BR,EAAmBL,EAAM,aAAa,GACxCU,EAAqBV,EAAM,aAAa,EAG1CC,EAAmB,GACrB,CAUA,SAASa,EAAcD,EAAG,CACxBZ,EAAmB,EACrB,CASA,SAASc,EAAQF,EAAG,CAEd,CAACR,EAAmBQ,EAAE,MAAM,IAI5BZ,GAAoBM,EAA8BM,EAAE,MAAM,IAC5DH,EAAqBG,EAAE,MAAM,CAEjC,CAMA,SAASG,EAAOH,EAAG,CACb,CAACR,EAAmBQ,EAAE,MAAM,IAK9BA,EAAE,OAAO,UAAU,SAAS,eAAe,GAC3CA,EAAE,OAAO,aAAa,0BAA0B,KAMhDX,EAA0B,GAC1B,OAAO,aAAaC,CAA8B,EAClDA,EAAiC,OAAO,WAAW,UAAW,CAC5DD,EAA0B,EAC5B,EAAG,GAAG,EACNS,EAAwBE,EAAE,MAAM,EAEpC,CAOA,SAASI,EAAmBJ,EAAG,CACzB,SAAS,kBAAoB,WAK3BX,IACFD,EAAmB,IAErBiB,EAA+B,EAEnC,CAQA,SAASA,GAAiC,CACxC,SAAS,iBAAiB,YAAaC,CAAoB,EAC3D,SAAS,iBAAiB,YAAaA,CAAoB,EAC3D,SAAS,iBAAiB,UAAWA,CAAoB,EACzD,SAAS,iBAAiB,cAAeA,CAAoB,EAC7D,SAAS,iBAAiB,cAAeA,CAAoB,EAC7D,SAAS,iBAAiB,YAAaA,CAAoB,EAC3D,SAAS,iBAAiB,YAAaA,CAAoB,EAC3D,SAAS,iBAAiB,aAAcA,CAAoB,EAC5D,SAAS,iBAAiB,WAAYA,CAAoB,CAC5D,CAEA,SAASC,GAAoC,CAC3C,SAAS,oBAAoB,YAAaD,CAAoB,EAC9D,SAAS,oBAAoB,YAAaA,CAAoB,EAC9D,SAAS,oBAAoB,UAAWA,CAAoB,EAC5D,SAAS,oBAAoB,cAAeA,CAAoB,EAChE,SAAS,oBAAoB,cAAeA,CAAoB,EAChE,SAAS,oBAAoB,YAAaA,CAAoB,EAC9D,SAAS,oBAAoB,YAAaA,CAAoB,EAC9D,SAAS,oBAAoB,aAAcA,CAAoB,EAC/D,SAAS,oBAAoB,WAAYA,CAAoB,CAC/D,CASA,SAASA,EAAqBN,EAAG,CAG3BA,EAAE,OAAO,UAAYA,EAAE,OAAO,SAAS,YAAY,IAAM,SAI7DZ,EAAmB,GACnBmB,EAAkC,EACpC,CAKA,SAAS,iBAAiB,UAAWR,EAAW,EAAI,EACpD,SAAS,iBAAiB,YAAaE,EAAe,EAAI,EAC1D,SAAS,iBAAiB,cAAeA,EAAe,EAAI,EAC5D,SAAS,iBAAiB,aAAcA,EAAe,EAAI,EAC3D,SAAS,iBAAiB,mBAAoBG,EAAoB,EAAI,EAEtEC,EAA+B,EAM/BlB,EAAM,iBAAiB,QAASe,EAAS,EAAI,EAC7Cf,EAAM,iBAAiB,OAAQgB,EAAQ,EAAI,EAOvChB,EAAM,WAAa,KAAK,wBAA0BA,EAAM,KAI1DA,EAAM,KAAK,aAAa,wBAAyB,EAAE,EAC1CA,EAAM,WAAa,KAAK,gBACjC,SAAS,gBAAgB,UAAU,IAAI,kBAAkB,EACzD,SAAS,gBAAgB,aAAa,wBAAyB,EAAE,EAErE,CAKA,GAAI,OAAO,QAAW,aAAe,OAAO,UAAa,YAAa,CAIpE,OAAO,0BAA4BD,EAInC,IAAIsB,EAEJ,GAAI,CACFA,EAAQ,IAAI,YAAY,8BAA8B,CACxD,OAASC,EAAP,CAEAD,EAAQ,SAAS,YAAY,aAAa,EAC1CA,EAAM,gBAAgB,+BAAgC,GAAO,GAAO,CAAC,CAAC,CACxE,CAEA,OAAO,cAAcA,CAAK,CAC5B,CAEI,OAAO,UAAa,aAGtBtB,EAA0B,QAAQ,CAGtC,CAAE,ICvTF,IAAAwB,GAAAC,GAAAC,IAAA,EAAC,SAASC,EAAQ,CAOhB,IAAIC,EAA6B,UAAW,CAC1C,GAAI,CACF,MAAO,CAAC,CAAC,OAAO,QAClB,OAASC,EAAP,CACA,MAAO,EACT,CACF,EAGIC,EAAoBF,EAA2B,EAE/CG,EAAiB,SAASC,EAAO,CACnC,IAAIC,EAAW,CACb,KAAM,UAAW,CACf,IAAIC,EAAQF,EAAM,MAAM,EACxB,MAAO,CAAE,KAAME,IAAU,OAAQ,MAAOA,CAAM,CAChD,CACF,EAEA,OAAIJ,IACFG,EAAS,OAAO,UAAY,UAAW,CACrC,OAAOA,CACT,GAGKA,CACT,EAMIE,EAAiB,SAASD,EAAO,CACnC,OAAO,mBAAmBA,CAAK,EAAE,QAAQ,OAAQ,GAAG,CACtD,EAEIE,EAAmB,SAASF,EAAO,CACrC,OAAO,mBAAmB,OAAOA,CAAK,EAAE,QAAQ,MAAO,GAAG,CAAC,CAC7D,EAEIG,EAA0B,UAAW,CAEvC,IAAIC,EAAkB,SAASC,EAAc,CAC3C,OAAO,eAAe,KAAM,WAAY,CAAE,SAAU,GAAM,MAAO,CAAC,CAAE,CAAC,EACrE,IAAIC,EAAqB,OAAOD,EAEhC,GAAIC,IAAuB,YAEpB,GAAIA,IAAuB,SAC5BD,IAAiB,IACnB,KAAK,YAAYA,CAAY,UAEtBA,aAAwBD,EAAiB,CAClD,IAAIG,EAAQ,KACZF,EAAa,QAAQ,SAASL,EAAOQ,EAAM,CACzCD,EAAM,OAAOC,EAAMR,CAAK,CAC1B,CAAC,CACH,SAAYK,IAAiB,MAAUC,IAAuB,SAC5D,GAAI,OAAO,UAAU,SAAS,KAAKD,CAAY,IAAM,iBACnD,QAASI,EAAI,EAAGA,EAAIJ,EAAa,OAAQI,IAAK,CAC5C,IAAIC,EAAQL,EAAaI,GACzB,GAAK,OAAO,UAAU,SAAS,KAAKC,CAAK,IAAM,kBAAsBA,EAAM,SAAW,EACpF,KAAK,OAAOA,EAAM,GAAIA,EAAM,EAAE,MAE9B,OAAM,IAAI,UAAU,4CAA8CD,EAAI,6BAA8B,CAExG,KAEA,SAASE,KAAON,EACVA,EAAa,eAAeM,CAAG,GACjC,KAAK,OAAOA,EAAKN,EAAaM,EAAI,MAKxC,OAAM,IAAI,UAAU,8CAA+C,CAEvE,EAEIC,EAAQR,EAAgB,UAE5BQ,EAAM,OAAS,SAASJ,EAAMR,EAAO,CAC/BQ,KAAQ,KAAK,SACf,KAAK,SAASA,GAAM,KAAK,OAAOR,CAAK,CAAC,EAEtC,KAAK,SAASQ,GAAQ,CAAC,OAAOR,CAAK,CAAC,CAExC,EAEAY,EAAM,OAAS,SAASJ,EAAM,CAC5B,OAAO,KAAK,SAASA,EACvB,EAEAI,EAAM,IAAM,SAASJ,EAAM,CACzB,OAAQA,KAAQ,KAAK,SAAY,KAAK,SAASA,GAAM,GAAK,IAC5D,EAEAI,EAAM,OAAS,SAASJ,EAAM,CAC5B,OAAQA,KAAQ,KAAK,SAAY,KAAK,SAASA,GAAM,MAAM,CAAC,EAAI,CAAC,CACnE,EAEAI,EAAM,IAAM,SAASJ,EAAM,CACzB,OAAQA,KAAQ,KAAK,QACvB,EAEAI,EAAM,IAAM,SAASJ,EAAMR,EAAO,CAChC,KAAK,SAASQ,GAAQ,CAAC,OAAOR,CAAK,CAAC,CACtC,EAEAY,EAAM,QAAU,SAASC,EAAUC,EAAS,CAC1C,IAAIC,EACJ,QAASP,KAAQ,KAAK,SACpB,GAAI,KAAK,SAAS,eAAeA,CAAI,EAAG,CACtCO,EAAU,KAAK,SAASP,GACxB,QAASC,EAAI,EAAGA,EAAIM,EAAQ,OAAQN,IAClCI,EAAS,KAAKC,EAASC,EAAQN,GAAID,EAAM,IAAI,CAEjD,CAEJ,EAEAI,EAAM,KAAO,UAAW,CACtB,IAAId,EAAQ,CAAC,EACb,YAAK,QAAQ,SAASE,EAAOQ,EAAM,CACjCV,EAAM,KAAKU,CAAI,CACjB,CAAC,EACMX,EAAeC,CAAK,CAC7B,EAEAc,EAAM,OAAS,UAAW,CACxB,IAAId,EAAQ,CAAC,EACb,YAAK,QAAQ,SAASE,EAAO,CAC3BF,EAAM,KAAKE,CAAK,CAClB,CAAC,EACMH,EAAeC,CAAK,CAC7B,EAEAc,EAAM,QAAU,UAAW,CACzB,IAAId,EAAQ,CAAC,EACb,YAAK,QAAQ,SAASE,EAAOQ,EAAM,CACjCV,EAAM,KAAK,CAACU,EAAMR,CAAK,CAAC,CAC1B,CAAC,EACMH,EAAeC,CAAK,CAC7B,EAEIF,IACFgB,EAAM,OAAO,UAAYA,EAAM,SAGjCA,EAAM,SAAW,UAAW,CAC1B,IAAII,EAAc,CAAC,EACnB,YAAK,QAAQ,SAAShB,EAAOQ,EAAM,CACjCQ,EAAY,KAAKf,EAAeO,CAAI,EAAI,IAAMP,EAAeD,CAAK,CAAC,CACrE,CAAC,EACMgB,EAAY,KAAK,GAAG,CAC7B,EAGAvB,EAAO,gBAAkBW,CAC3B,EAEIa,EAAkC,UAAW,CAC/C,GAAI,CACF,IAAIb,EAAkBX,EAAO,gBAE7B,OACG,IAAIW,EAAgB,MAAM,EAAE,SAAS,IAAM,OAC3C,OAAOA,EAAgB,UAAU,KAAQ,YACzC,OAAOA,EAAgB,UAAU,SAAY,UAElD,OAASc,EAAP,CACA,MAAO,EACT,CACF,EAEKD,EAAgC,GACnCd,EAAwB,EAG1B,IAAIS,EAAQnB,EAAO,gBAAgB,UAE/B,OAAOmB,EAAM,MAAS,aACxBA,EAAM,KAAO,UAAW,CACtB,IAAIL,EAAQ,KACRT,EAAQ,CAAC,EACb,KAAK,QAAQ,SAASE,EAAOQ,EAAM,CACjCV,EAAM,KAAK,CAACU,EAAMR,CAAK,CAAC,EACnBO,EAAM,UACTA,EAAM,OAAOC,CAAI,CAErB,CAAC,EACDV,EAAM,KAAK,SAASqB,EAAGC,EAAG,CACxB,OAAID,EAAE,GAAKC,EAAE,GACJ,GACED,EAAE,GAAKC,EAAE,GACX,EAEA,CAEX,CAAC,EACGb,EAAM,WACRA,EAAM,SAAW,CAAC,GAEpB,QAASE,EAAI,EAAGA,EAAIX,EAAM,OAAQW,IAChC,KAAK,OAAOX,EAAMW,GAAG,GAAIX,EAAMW,GAAG,EAAE,CAExC,GAGE,OAAOG,EAAM,aAAgB,YAC/B,OAAO,eAAeA,EAAO,cAAe,CAC1C,WAAY,GACZ,aAAc,GACd,SAAU,GACV,MAAO,SAASP,EAAc,CAC5B,GAAI,KAAK,SACP,KAAK,SAAW,CAAC,MACZ,CACL,IAAIgB,EAAO,CAAC,EACZ,KAAK,QAAQ,SAASrB,EAAOQ,EAAM,CACjCa,EAAK,KAAKb,CAAI,CAChB,CAAC,EACD,QAASC,EAAI,EAAGA,EAAIY,EAAK,OAAQZ,IAC/B,KAAK,OAAOY,EAAKZ,EAAE,CAEvB,CAEAJ,EAAeA,EAAa,QAAQ,MAAO,EAAE,EAG7C,QAFIiB,EAAajB,EAAa,MAAM,GAAG,EACnCkB,EACKd,EAAI,EAAGA,EAAIa,EAAW,OAAQb,IACrCc,EAAYD,EAAWb,GAAG,MAAM,GAAG,EACnC,KAAK,OACHP,EAAiBqB,EAAU,EAAE,EAC5BA,EAAU,OAAS,EAAKrB,EAAiBqB,EAAU,EAAE,EAAI,EAC5D,CAEJ,CACF,CAAC,CAKL,GACG,OAAO,QAAW,YAAe,OAC5B,OAAO,QAAW,YAAe,OACjC,OAAO,MAAS,YAAe,KAAO/B,EAC9C,GAEC,SAASC,EAAQ,CAOhB,IAAI+B,EAAwB,UAAW,CACrC,GAAI,CACF,IAAIC,EAAI,IAAIhC,EAAO,IAAI,IAAK,UAAU,EACtC,OAAAgC,EAAE,SAAW,MACLA,EAAE,OAAS,kBAAqBA,EAAE,YAC5C,OAASP,EAAP,CACA,MAAO,EACT,CACF,EAGIQ,EAAc,UAAW,CAC3B,IAAIC,EAAOlC,EAAO,IAEdmC,EAAM,SAASC,EAAKC,EAAM,CACxB,OAAOD,GAAQ,WAAUA,EAAM,OAAOA,CAAG,GACzCC,GAAQ,OAAOA,GAAS,WAAUA,EAAO,OAAOA,CAAI,GAGxD,IAAIC,EAAM,SAAUC,EACpB,GAAIF,IAASrC,EAAO,WAAa,QAAUqC,IAASrC,EAAO,SAAS,MAAO,CACzEqC,EAAOA,EAAK,YAAY,EACxBC,EAAM,SAAS,eAAe,mBAAmB,EAAE,EACnDC,EAAcD,EAAI,cAAc,MAAM,EACtCC,EAAY,KAAOF,EACnBC,EAAI,KAAK,YAAYC,CAAW,EAChC,GAAI,CACF,GAAIA,EAAY,KAAK,QAAQF,CAAI,IAAM,EAAG,MAAM,IAAI,MAAME,EAAY,IAAI,CAC5E,OAASC,EAAP,CACA,MAAM,IAAI,MAAM,0BAA4BH,EAAO,WAAaG,CAAG,CACrE,CACF,CAEA,IAAIC,EAAgBH,EAAI,cAAc,GAAG,EACzCG,EAAc,KAAOL,EACjBG,IACFD,EAAI,KAAK,YAAYG,CAAa,EAClCA,EAAc,KAAOA,EAAc,MAGrC,IAAIC,EAAeJ,EAAI,cAAc,OAAO,EAI5C,GAHAI,EAAa,KAAO,MACpBA,EAAa,MAAQN,EAEjBK,EAAc,WAAa,KAAO,CAAC,IAAI,KAAKA,EAAc,IAAI,GAAM,CAACC,EAAa,cAAc,GAAK,CAACL,EACxG,MAAM,IAAI,UAAU,aAAa,EAGnC,OAAO,eAAe,KAAM,iBAAkB,CAC5C,MAAOI,CACT,CAAC,EAID,IAAIE,EAAe,IAAI3C,EAAO,gBAAgB,KAAK,MAAM,EACrD4C,EAAqB,GACrBC,EAA2B,GAC3B/B,EAAQ,KACZ,CAAC,SAAU,SAAU,KAAK,EAAE,QAAQ,SAASgC,EAAY,CACvD,IAAIC,GAASJ,EAAaG,GAC1BH,EAAaG,GAAc,UAAW,CACpCC,GAAO,MAAMJ,EAAc,SAAS,EAChCC,IACFC,EAA2B,GAC3B/B,EAAM,OAAS6B,EAAa,SAAS,EACrCE,EAA2B,GAE/B,CACF,CAAC,EAED,OAAO,eAAe,KAAM,eAAgB,CAC1C,MAAOF,EACP,WAAY,EACd,CAAC,EAED,IAAIK,EAAS,OACb,OAAO,eAAe,KAAM,sBAAuB,CACjD,WAAY,GACZ,aAAc,GACd,SAAU,GACV,MAAO,UAAW,CACZ,KAAK,SAAWA,IAClBA,EAAS,KAAK,OACVH,IACFD,EAAqB,GACrB,KAAK,aAAa,YAAY,KAAK,MAAM,EACzCA,EAAqB,IAG3B,CACF,CAAC,CACH,EAEIzB,EAAQgB,EAAI,UAEZc,EAA6B,SAASC,EAAe,CACvD,OAAO,eAAe/B,EAAO+B,EAAe,CAC1C,IAAK,UAAW,CACd,OAAO,KAAK,eAAeA,EAC7B,EACA,IAAK,SAAS3C,EAAO,CACnB,KAAK,eAAe2C,GAAiB3C,CACvC,EACA,WAAY,EACd,CAAC,CACH,EAEA,CAAC,OAAQ,OAAQ,WAAY,OAAQ,UAAU,EAC5C,QAAQ,SAAS2C,EAAe,CAC/BD,EAA2BC,CAAa,CAC1C,CAAC,EAEH,OAAO,eAAe/B,EAAO,SAAU,CACrC,IAAK,UAAW,CACd,OAAO,KAAK,eAAe,MAC7B,EACA,IAAK,SAASZ,EAAO,CACnB,KAAK,eAAe,OAAYA,EAChC,KAAK,oBAAoB,CAC3B,EACA,WAAY,EACd,CAAC,EAED,OAAO,iBAAiBY,EAAO,CAE7B,SAAY,CACV,IAAK,UAAW,CACd,IAAIL,EAAQ,KACZ,OAAO,UAAW,CAChB,OAAOA,EAAM,IACf,CACF,CACF,EAEA,KAAQ,CACN,IAAK,UAAW,CACd,OAAO,KAAK,eAAe,KAAK,QAAQ,MAAO,EAAE,CACnD,EACA,IAAK,SAASP,EAAO,CACnB,KAAK,eAAe,KAAOA,EAC3B,KAAK,oBAAoB,CAC3B,EACA,WAAY,EACd,EAEA,SAAY,CACV,IAAK,UAAW,CACd,OAAO,KAAK,eAAe,SAAS,QAAQ,SAAU,GAAG,CAC3D,EACA,IAAK,SAASA,EAAO,CACnB,KAAK,eAAe,SAAWA,CACjC,EACA,WAAY,EACd,EAEA,OAAU,CACR,IAAK,UAAW,CAEd,IAAI4C,EAAe,CAAE,QAAS,GAAI,SAAU,IAAK,OAAQ,EAAG,EAAE,KAAK,eAAe,UAI9EC,EAAkB,KAAK,eAAe,MAAQD,GAChD,KAAK,eAAe,OAAS,GAE/B,OAAO,KAAK,eAAe,SACzB,KACA,KAAK,eAAe,UACnBC,EAAmB,IAAM,KAAK,eAAe,KAAQ,GAC1D,EACA,WAAY,EACd,EAEA,SAAY,CACV,IAAK,UAAW,CACd,MAAO,EACT,EACA,IAAK,SAAS7C,EAAO,CACrB,EACA,WAAY,EACd,EAEA,SAAY,CACV,IAAK,UAAW,CACd,MAAO,EACT,EACA,IAAK,SAASA,EAAO,CACrB,EACA,WAAY,EACd,CACF,CAAC,EAED4B,EAAI,gBAAkB,SAASkB,EAAM,CACnC,OAAOnB,EAAK,gBAAgB,MAAMA,EAAM,SAAS,CACnD,EAEAC,EAAI,gBAAkB,SAASC,EAAK,CAClC,OAAOF,EAAK,gBAAgB,MAAMA,EAAM,SAAS,CACnD,EAEAlC,EAAO,IAAMmC,CAEf,EAMA,GAJKJ,EAAsB,GACzBE,EAAY,EAGTjC,EAAO,WAAa,QAAW,EAAE,WAAYA,EAAO,UAAW,CAClE,IAAIsD,EAAY,UAAW,CACzB,OAAOtD,EAAO,SAAS,SAAW,KAAOA,EAAO,SAAS,UAAYA,EAAO,SAAS,KAAQ,IAAMA,EAAO,SAAS,KAAQ,GAC7H,EAEA,GAAI,CACF,OAAO,eAAeA,EAAO,SAAU,SAAU,CAC/C,IAAKsD,EACL,WAAY,EACd,CAAC,CACH,OAAS7B,EAAP,CACA,YAAY,UAAW,CACrBzB,EAAO,SAAS,OAASsD,EAAU,CACrC,EAAG,GAAG,CACR,CACF,CAEF,GACG,OAAO,QAAW,YAAe,OAC5B,OAAO,QAAW,YAAe,OACjC,OAAO,MAAS,YAAe,KAAOvD,EAC9C,IC5eA,IAAAwD,GAAAC,GAAA,CAAAC,GAAAC,KAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gFAeA,IAAIC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,IACH,SAAUC,EAAS,CAChB,IAAIC,EAAO,OAAO,QAAW,SAAW,OAAS,OAAO,MAAS,SAAW,KAAO,OAAO,MAAS,SAAW,KAAO,CAAC,EAClH,OAAO,QAAW,YAAc,OAAO,IACvC,OAAO,QAAS,CAAC,SAAS,EAAG,SAAU3B,EAAS,CAAE0B,EAAQE,EAAeD,EAAMC,EAAe5B,CAAO,CAAC,CAAC,CAAG,CAAC,EAEtG,OAAOC,IAAW,UAAY,OAAOA,GAAO,SAAY,SAC7DyB,EAAQE,EAAeD,EAAMC,EAAe3B,GAAO,OAAO,CAAC,CAAC,EAG5DyB,EAAQE,EAAeD,CAAI,CAAC,EAEhC,SAASC,EAAe5B,EAAS6B,EAAU,CACvC,OAAI7B,IAAY2B,IACR,OAAO,OAAO,QAAW,WACzB,OAAO,eAAe3B,EAAS,aAAc,CAAE,MAAO,EAAK,CAAC,EAG5DA,EAAQ,WAAa,IAGtB,SAAU8B,EAAIC,EAAG,CAAE,OAAO/B,EAAQ8B,GAAMD,EAAWA,EAASC,EAAIC,CAAC,EAAIA,CAAG,CACnF,CACJ,GACC,SAAUC,EAAU,CACjB,IAAIC,EAAgB,OAAO,gBACtB,CAAE,UAAW,CAAC,CAAE,YAAa,OAAS,SAAUC,EAAGC,EAAG,CAAED,EAAE,UAAYC,CAAG,GAC1E,SAAUD,EAAGC,EAAG,CAAE,QAASC,KAAKD,EAAO,OAAO,UAAU,eAAe,KAAKA,EAAGC,CAAC,IAAGF,EAAEE,GAAKD,EAAEC,GAAI,EAEpGlC,GAAY,SAAUgC,EAAGC,EAAG,CACxB,GAAI,OAAOA,GAAM,YAAcA,IAAM,KACjC,MAAM,IAAI,UAAU,uBAAyB,OAAOA,CAAC,EAAI,+BAA+B,EAC5FF,EAAcC,EAAGC,CAAC,EAClB,SAASE,GAAK,CAAE,KAAK,YAAcH,CAAG,CACtCA,EAAE,UAAYC,IAAM,KAAO,OAAO,OAAOA,CAAC,GAAKE,EAAG,UAAYF,EAAE,UAAW,IAAIE,EACnF,EAEAlC,GAAW,OAAO,QAAU,SAAUmC,EAAG,CACrC,QAASC,EAAG,EAAI,EAAGC,EAAI,UAAU,OAAQ,EAAIA,EAAG,IAAK,CACjDD,EAAI,UAAU,GACd,QAASH,KAAKG,EAAO,OAAO,UAAU,eAAe,KAAKA,EAAGH,CAAC,IAAGE,EAAEF,GAAKG,EAAEH,GAC9E,CACA,OAAOE,CACX,EAEAlC,GAAS,SAAUmC,EAAGE,EAAG,CACrB,IAAIH,EAAI,CAAC,EACT,QAASF,KAAKG,EAAO,OAAO,UAAU,eAAe,KAAKA,EAAGH,CAAC,GAAKK,EAAE,QAAQL,CAAC,EAAI,IAC9EE,EAAEF,GAAKG,EAAEH,IACb,GAAIG,GAAK,MAAQ,OAAO,OAAO,uBAA0B,WACrD,QAASG,EAAI,EAAGN,EAAI,OAAO,sBAAsBG,CAAC,EAAGG,EAAIN,EAAE,OAAQM,IAC3DD,EAAE,QAAQL,EAAEM,EAAE,EAAI,GAAK,OAAO,UAAU,qBAAqB,KAAKH,EAAGH,EAAEM,EAAE,IACzEJ,EAAEF,EAAEM,IAAMH,EAAEH,EAAEM,KAE1B,OAAOJ,CACX,EAEAjC,GAAa,SAAUsC,EAAYC,EAAQC,EAAKC,EAAM,CAClD,IAAIC,EAAI,UAAU,OAAQC,EAAID,EAAI,EAAIH,EAASE,IAAS,KAAOA,EAAO,OAAO,yBAAyBF,EAAQC,CAAG,EAAIC,EAAMZ,EAC3H,GAAI,OAAO,SAAY,UAAY,OAAO,QAAQ,UAAa,WAAYc,EAAI,QAAQ,SAASL,EAAYC,EAAQC,EAAKC,CAAI,MACxH,SAASJ,EAAIC,EAAW,OAAS,EAAGD,GAAK,EAAGA,KAASR,EAAIS,EAAWD,MAAIM,GAAKD,EAAI,EAAIb,EAAEc,CAAC,EAAID,EAAI,EAAIb,EAAEU,EAAQC,EAAKG,CAAC,EAAId,EAAEU,EAAQC,CAAG,IAAMG,GAChJ,OAAOD,EAAI,GAAKC,GAAK,OAAO,eAAeJ,EAAQC,EAAKG,CAAC,EAAGA,CAChE,EAEA1C,GAAU,SAAU2C,EAAYC,EAAW,CACvC,OAAO,SAAUN,EAAQC,EAAK,CAAEK,EAAUN,EAAQC,EAAKI,CAAU,CAAG,CACxE,EAEA1C,GAAa,SAAU4C,EAAaC,EAAe,CAC/C,GAAI,OAAO,SAAY,UAAY,OAAO,QAAQ,UAAa,WAAY,OAAO,QAAQ,SAASD,EAAaC,CAAa,CACjI,EAEA5C,GAAY,SAAU6C,EAASC,EAAYC,EAAGC,EAAW,CACrD,SAASC,EAAMC,EAAO,CAAE,OAAOA,aAAiBH,EAAIG,EAAQ,IAAIH,EAAE,SAAUI,EAAS,CAAEA,EAAQD,CAAK,CAAG,CAAC,CAAG,CAC3G,OAAO,IAAKH,IAAMA,EAAI,UAAU,SAAUI,EAASC,EAAQ,CACvD,SAASC,EAAUH,EAAO,CAAE,GAAI,CAAEI,EAAKN,EAAU,KAAKE,CAAK,CAAC,CAAG,OAASjB,EAAP,CAAYmB,EAAOnB,CAAC,CAAG,CAAE,CAC1F,SAASsB,EAASL,EAAO,CAAE,GAAI,CAAEI,EAAKN,EAAU,MAASE,CAAK,CAAC,CAAG,OAASjB,EAAP,CAAYmB,EAAOnB,CAAC,CAAG,CAAE,CAC7F,SAASqB,EAAKE,EAAQ,CAAEA,EAAO,KAAOL,EAAQK,EAAO,KAAK,EAAIP,EAAMO,EAAO,KAAK,EAAE,KAAKH,EAAWE,CAAQ,CAAG,CAC7GD,GAAMN,EAAYA,EAAU,MAAMH,EAASC,GAAc,CAAC,CAAC,GAAG,KAAK,CAAC,CACxE,CAAC,CACL,EAEA7C,GAAc,SAAU4C,EAASY,EAAM,CACnC,IAAIC,EAAI,CAAE,MAAO,EAAG,KAAM,UAAW,CAAE,GAAI5B,EAAE,GAAK,EAAG,MAAMA,EAAE,GAAI,OAAOA,EAAE,EAAI,EAAG,KAAM,CAAC,EAAG,IAAK,CAAC,CAAE,EAAG6B,EAAGC,EAAG9B,EAAG+B,EAC/G,OAAOA,EAAI,CAAE,KAAMC,EAAK,CAAC,EAAG,MAASA,EAAK,CAAC,EAAG,OAAUA,EAAK,CAAC,CAAE,EAAG,OAAO,QAAW,aAAeD,EAAE,OAAO,UAAY,UAAW,CAAE,OAAO,IAAM,GAAIA,EACvJ,SAASC,EAAK9B,EAAG,CAAE,OAAO,SAAUT,EAAG,CAAE,OAAO+B,EAAK,CAACtB,EAAGT,CAAC,CAAC,CAAG,CAAG,CACjE,SAAS+B,EAAKS,EAAI,CACd,GAAIJ,EAAG,MAAM,IAAI,UAAU,iCAAiC,EAC5D,KAAOD,GAAG,GAAI,CACV,GAAIC,EAAI,EAAGC,IAAM9B,EAAIiC,EAAG,GAAK,EAAIH,EAAE,OAAYG,EAAG,GAAKH,EAAE,SAAc9B,EAAI8B,EAAE,SAAc9B,EAAE,KAAK8B,CAAC,EAAG,GAAKA,EAAE,OAAS,EAAE9B,EAAIA,EAAE,KAAK8B,EAAGG,EAAG,EAAE,GAAG,KAAM,OAAOjC,EAE3J,OADI8B,EAAI,EAAG9B,IAAGiC,EAAK,CAACA,EAAG,GAAK,EAAGjC,EAAE,KAAK,GAC9BiC,EAAG,GAAI,CACX,IAAK,GAAG,IAAK,GAAGjC,EAAIiC,EAAI,MACxB,IAAK,GAAG,OAAAL,EAAE,QAAgB,CAAE,MAAOK,EAAG,GAAI,KAAM,EAAM,EACtD,IAAK,GAAGL,EAAE,QAASE,EAAIG,EAAG,GAAIA,EAAK,CAAC,CAAC,EAAG,SACxC,IAAK,GAAGA,EAAKL,EAAE,IAAI,IAAI,EAAGA,EAAE,KAAK,IAAI,EAAG,SACxC,QACI,GAAM5B,EAAI4B,EAAE,KAAM,EAAA5B,EAAIA,EAAE,OAAS,GAAKA,EAAEA,EAAE,OAAS,MAAQiC,EAAG,KAAO,GAAKA,EAAG,KAAO,GAAI,CAAEL,EAAI,EAAG,QAAU,CAC3G,GAAIK,EAAG,KAAO,IAAM,CAACjC,GAAMiC,EAAG,GAAKjC,EAAE,IAAMiC,EAAG,GAAKjC,EAAE,IAAM,CAAE4B,EAAE,MAAQK,EAAG,GAAI,KAAO,CACrF,GAAIA,EAAG,KAAO,GAAKL,EAAE,MAAQ5B,EAAE,GAAI,CAAE4B,EAAE,MAAQ5B,EAAE,GAAIA,EAAIiC,EAAI,KAAO,CACpE,GAAIjC,GAAK4B,EAAE,MAAQ5B,EAAE,GAAI,CAAE4B,EAAE,MAAQ5B,EAAE,GAAI4B,EAAE,IAAI,KAAKK,CAAE,EAAG,KAAO,CAC9DjC,EAAE,IAAI4B,EAAE,IAAI,IAAI,EACpBA,EAAE,KAAK,IAAI,EAAG,QACtB,CACAK,EAAKN,EAAK,KAAKZ,EAASa,CAAC,CAC7B,OAASzB,EAAP,CAAY8B,EAAK,CAAC,EAAG9B,CAAC,EAAG2B,EAAI,CAAG,QAAE,CAAUD,EAAI7B,EAAI,CAAG,CACzD,GAAIiC,EAAG,GAAK,EAAG,MAAMA,EAAG,GAAI,MAAO,CAAE,MAAOA,EAAG,GAAKA,EAAG,GAAK,OAAQ,KAAM,EAAK,CACnF,CACJ,EAEA7D,GAAe,SAAS8D,EAAG,EAAG,CAC1B,QAASpC,KAAKoC,EAAOpC,IAAM,WAAa,CAAC,OAAO,UAAU,eAAe,KAAK,EAAGA,CAAC,GAAGX,GAAgB,EAAG+C,EAAGpC,CAAC,CAChH,EAEAX,GAAkB,OAAO,OAAU,SAASgD,EAAGD,EAAGE,EAAGC,EAAI,CACjDA,IAAO,SAAWA,EAAKD,GAC3B,OAAO,eAAeD,EAAGE,EAAI,CAAE,WAAY,GAAM,IAAK,UAAW,CAAE,OAAOH,EAAEE,EAAI,CAAE,CAAC,CACvF,EAAM,SAASD,EAAGD,EAAGE,EAAGC,EAAI,CACpBA,IAAO,SAAWA,EAAKD,GAC3BD,EAAEE,GAAMH,EAAEE,EACd,EAEA/D,GAAW,SAAU8D,EAAG,CACpB,IAAIlC,EAAI,OAAO,QAAW,YAAc,OAAO,SAAUiC,EAAIjC,GAAKkC,EAAElC,GAAIG,EAAI,EAC5E,GAAI8B,EAAG,OAAOA,EAAE,KAAKC,CAAC,EACtB,GAAIA,GAAK,OAAOA,EAAE,QAAW,SAAU,MAAO,CAC1C,KAAM,UAAY,CACd,OAAIA,GAAK/B,GAAK+B,EAAE,SAAQA,EAAI,QACrB,CAAE,MAAOA,GAAKA,EAAE/B,KAAM,KAAM,CAAC+B,CAAE,CAC1C,CACJ,EACA,MAAM,IAAI,UAAUlC,EAAI,0BAA4B,iCAAiC,CACzF,EAEA3B,GAAS,SAAU6D,EAAGjC,EAAG,CACrB,IAAIgC,EAAI,OAAO,QAAW,YAAcC,EAAE,OAAO,UACjD,GAAI,CAACD,EAAG,OAAOC,EACf,IAAI/B,EAAI8B,EAAE,KAAKC,CAAC,EAAGzB,EAAG4B,EAAK,CAAC,EAAGnC,EAC/B,GAAI,CACA,MAAQD,IAAM,QAAUA,KAAM,IAAM,EAAEQ,EAAIN,EAAE,KAAK,GAAG,MAAMkC,EAAG,KAAK5B,EAAE,KAAK,CAC7E,OACO6B,EAAP,CAAgBpC,EAAI,CAAE,MAAOoC,CAAM,CAAG,QACtC,CACI,GAAI,CACI7B,GAAK,CAACA,EAAE,OAASwB,EAAI9B,EAAE,SAAY8B,EAAE,KAAK9B,CAAC,CACnD,QACA,CAAU,GAAID,EAAG,MAAMA,EAAE,KAAO,CACpC,CACA,OAAOmC,CACX,EAGA/D,GAAW,UAAY,CACnB,QAAS+D,EAAK,CAAC,EAAGlC,EAAI,EAAGA,EAAI,UAAU,OAAQA,IAC3CkC,EAAKA,EAAG,OAAOhE,GAAO,UAAU8B,EAAE,CAAC,EACvC,OAAOkC,CACX,EAGA9D,GAAiB,UAAY,CACzB,QAASyB,EAAI,EAAGG,EAAI,EAAGoC,EAAK,UAAU,OAAQpC,EAAIoC,EAAIpC,IAAKH,GAAK,UAAUG,GAAG,OAC7E,QAASM,EAAI,MAAMT,CAAC,EAAGmC,EAAI,EAAGhC,EAAI,EAAGA,EAAIoC,EAAIpC,IACzC,QAASqC,EAAI,UAAUrC,GAAIsC,EAAI,EAAGC,EAAKF,EAAE,OAAQC,EAAIC,EAAID,IAAKN,IAC1D1B,EAAE0B,GAAKK,EAAEC,GACjB,OAAOhC,CACX,EAEAjC,GAAgB,SAAUmE,EAAIC,EAAMC,EAAM,CACtC,GAAIA,GAAQ,UAAU,SAAW,EAAG,QAAS1C,EAAI,EAAG2C,EAAIF,EAAK,OAAQP,EAAIlC,EAAI2C,EAAG3C,KACxEkC,GAAM,EAAElC,KAAKyC,MACRP,IAAIA,EAAK,MAAM,UAAU,MAAM,KAAKO,EAAM,EAAGzC,CAAC,GACnDkC,EAAGlC,GAAKyC,EAAKzC,IAGrB,OAAOwC,EAAG,OAAON,GAAM,MAAM,UAAU,MAAM,KAAKO,CAAI,CAAC,CAC3D,EAEAnE,GAAU,SAAUe,EAAG,CACnB,OAAO,gBAAgBf,IAAW,KAAK,EAAIe,EAAG,MAAQ,IAAIf,GAAQe,CAAC,CACvE,EAEAd,GAAmB,SAAUoC,EAASC,EAAYE,EAAW,CACzD,GAAI,CAAC,OAAO,cAAe,MAAM,IAAI,UAAU,sCAAsC,EACrF,IAAIa,EAAIb,EAAU,MAAMH,EAASC,GAAc,CAAC,CAAC,EAAGZ,EAAG4C,EAAI,CAAC,EAC5D,OAAO5C,EAAI,CAAC,EAAG4B,EAAK,MAAM,EAAGA,EAAK,OAAO,EAAGA,EAAK,QAAQ,EAAG5B,EAAE,OAAO,eAAiB,UAAY,CAAE,OAAO,IAAM,EAAGA,EACpH,SAAS4B,EAAK9B,EAAG,CAAM6B,EAAE7B,KAAIE,EAAEF,GAAK,SAAUT,EAAG,CAAE,OAAO,IAAI,QAAQ,SAAUgD,EAAG5C,EAAG,CAAEmD,EAAE,KAAK,CAAC9C,EAAGT,EAAGgD,EAAG5C,CAAC,CAAC,EAAI,GAAKoD,EAAO/C,EAAGT,CAAC,CAAG,CAAC,CAAG,EAAG,CACzI,SAASwD,EAAO/C,EAAGT,EAAG,CAAE,GAAI,CAAE+B,EAAKO,EAAE7B,GAAGT,CAAC,CAAC,CAAG,OAASU,EAAP,CAAY+C,EAAOF,EAAE,GAAG,GAAI7C,CAAC,CAAG,CAAE,CACjF,SAASqB,EAAKd,EAAG,CAAEA,EAAE,iBAAiBhC,GAAU,QAAQ,QAAQgC,EAAE,MAAM,CAAC,EAAE,KAAKyC,EAAS7B,CAAM,EAAI4B,EAAOF,EAAE,GAAG,GAAItC,CAAC,CAAI,CACxH,SAASyC,EAAQ/B,EAAO,CAAE6B,EAAO,OAAQ7B,CAAK,CAAG,CACjD,SAASE,EAAOF,EAAO,CAAE6B,EAAO,QAAS7B,CAAK,CAAG,CACjD,SAAS8B,EAAOrB,EAAGpC,EAAG,CAAMoC,EAAEpC,CAAC,EAAGuD,EAAE,MAAM,EAAGA,EAAE,QAAQC,EAAOD,EAAE,GAAG,GAAIA,EAAE,GAAG,EAAE,CAAG,CACrF,EAEApE,GAAmB,SAAUuD,EAAG,CAC5B,IAAI/B,EAAGN,EACP,OAAOM,EAAI,CAAC,EAAG4B,EAAK,MAAM,EAAGA,EAAK,QAAS,SAAU7B,EAAG,CAAE,MAAMA,CAAG,CAAC,EAAG6B,EAAK,QAAQ,EAAG5B,EAAE,OAAO,UAAY,UAAY,CAAE,OAAO,IAAM,EAAGA,EAC1I,SAAS4B,EAAK9B,EAAG2B,EAAG,CAAEzB,EAAEF,GAAKiC,EAAEjC,GAAK,SAAUT,EAAG,CAAE,OAAQK,EAAI,CAACA,GAAK,CAAE,MAAOpB,GAAQyD,EAAEjC,GAAGT,CAAC,CAAC,EAAG,KAAMS,IAAM,QAAS,EAAI2B,EAAIA,EAAEpC,CAAC,EAAIA,CAAG,EAAIoC,CAAG,CAClJ,EAEAhD,GAAgB,SAAUsD,EAAG,CACzB,GAAI,CAAC,OAAO,cAAe,MAAM,IAAI,UAAU,sCAAsC,EACrF,IAAID,EAAIC,EAAE,OAAO,eAAgB,EACjC,OAAOD,EAAIA,EAAE,KAAKC,CAAC,GAAKA,EAAI,OAAO9D,IAAa,WAAaA,GAAS8D,CAAC,EAAIA,EAAE,OAAO,UAAU,EAAG,EAAI,CAAC,EAAGH,EAAK,MAAM,EAAGA,EAAK,OAAO,EAAGA,EAAK,QAAQ,EAAG,EAAE,OAAO,eAAiB,UAAY,CAAE,OAAO,IAAM,EAAG,GAC9M,SAASA,EAAK9B,EAAG,CAAE,EAAEA,GAAKiC,EAAEjC,IAAM,SAAUT,EAAG,CAAE,OAAO,IAAI,QAAQ,SAAU4B,EAASC,EAAQ,CAAE7B,EAAI0C,EAAEjC,GAAGT,CAAC,EAAGyD,EAAO7B,EAASC,EAAQ7B,EAAE,KAAMA,EAAE,KAAK,CAAG,CAAC,CAAG,CAAG,CAC/J,SAASyD,EAAO7B,EAASC,EAAQ1B,EAAGH,EAAG,CAAE,QAAQ,QAAQA,CAAC,EAAE,KAAK,SAASA,EAAG,CAAE4B,EAAQ,CAAE,MAAO5B,EAAG,KAAMG,CAAE,CAAC,CAAG,EAAG0B,CAAM,CAAG,CAC/H,EAEAxC,GAAuB,SAAUsE,EAAQC,EAAK,CAC1C,OAAI,OAAO,eAAkB,OAAO,eAAeD,EAAQ,MAAO,CAAE,MAAOC,CAAI,CAAC,EAAYD,EAAO,IAAMC,EAClGD,CACX,EAEA,IAAIE,EAAqB,OAAO,OAAU,SAASnB,EAAG1C,EAAG,CACrD,OAAO,eAAe0C,EAAG,UAAW,CAAE,WAAY,GAAM,MAAO1C,CAAE,CAAC,CACtE,EAAK,SAAS0C,EAAG1C,EAAG,CAChB0C,EAAE,QAAa1C,CACnB,EAEAV,GAAe,SAAUwE,EAAK,CAC1B,GAAIA,GAAOA,EAAI,WAAY,OAAOA,EAClC,IAAI7B,EAAS,CAAC,EACd,GAAI6B,GAAO,KAAM,QAASnB,KAAKmB,EAASnB,IAAM,WAAa,OAAO,UAAU,eAAe,KAAKmB,EAAKnB,CAAC,GAAGjD,GAAgBuC,EAAQ6B,EAAKnB,CAAC,EACvI,OAAAkB,EAAmB5B,EAAQ6B,CAAG,EACvB7B,CACX,EAEA1C,GAAkB,SAAUuE,EAAK,CAC7B,OAAQA,GAAOA,EAAI,WAAcA,EAAM,CAAE,QAAWA,CAAI,CAC5D,EAEAtE,GAAyB,SAAUuE,EAAUC,EAAOC,EAAM7B,EAAG,CACzD,GAAI6B,IAAS,KAAO,CAAC7B,EAAG,MAAM,IAAI,UAAU,+CAA+C,EAC3F,GAAI,OAAO4B,GAAU,WAAaD,IAAaC,GAAS,CAAC5B,EAAI,CAAC4B,EAAM,IAAID,CAAQ,EAAG,MAAM,IAAI,UAAU,0EAA0E,EACjL,OAAOE,IAAS,IAAM7B,EAAI6B,IAAS,IAAM7B,EAAE,KAAK2B,CAAQ,EAAI3B,EAAIA,EAAE,MAAQ4B,EAAM,IAAID,CAAQ,CAChG,EAEAtE,GAAyB,SAAUsE,EAAUC,EAAOrC,EAAOsC,EAAM7B,EAAG,CAChE,GAAI6B,IAAS,IAAK,MAAM,IAAI,UAAU,gCAAgC,EACtE,GAAIA,IAAS,KAAO,CAAC7B,EAAG,MAAM,IAAI,UAAU,+CAA+C,EAC3F,GAAI,OAAO4B,GAAU,WAAaD,IAAaC,GAAS,CAAC5B,EAAI,CAAC4B,EAAM,IAAID,CAAQ,EAAG,MAAM,IAAI,UAAU,yEAAyE,EAChL,OAAQE,IAAS,IAAM7B,EAAE,KAAK2B,EAAUpC,CAAK,EAAIS,EAAIA,EAAE,MAAQT,EAAQqC,EAAM,IAAID,EAAUpC,CAAK,EAAIA,CACxG,EAEA1B,EAAS,YAAa9B,EAAS,EAC/B8B,EAAS,WAAY7B,EAAQ,EAC7B6B,EAAS,SAAU5B,EAAM,EACzB4B,EAAS,aAAc3B,EAAU,EACjC2B,EAAS,UAAW1B,EAAO,EAC3B0B,EAAS,aAAczB,EAAU,EACjCyB,EAAS,YAAaxB,EAAS,EAC/BwB,EAAS,cAAevB,EAAW,EACnCuB,EAAS,eAAgBtB,EAAY,EACrCsB,EAAS,kBAAmBP,EAAe,EAC3CO,EAAS,WAAYrB,EAAQ,EAC7BqB,EAAS,SAAUpB,EAAM,EACzBoB,EAAS,WAAYnB,EAAQ,EAC7BmB,EAAS,iBAAkBlB,EAAc,EACzCkB,EAAS,gBAAiBjB,EAAa,EACvCiB,EAAS,UAAWhB,EAAO,EAC3BgB,EAAS,mBAAoBf,EAAgB,EAC7Ce,EAAS,mBAAoBd,EAAgB,EAC7Cc,EAAS,gBAAiBb,EAAa,EACvCa,EAAS,uBAAwBZ,EAAoB,EACrDY,EAAS,eAAgBX,EAAY,EACrCW,EAAS,kBAAmBV,EAAe,EAC3CU,EAAS,yBAA0BT,EAAsB,EACzDS,EAAS,yBAA0BR,EAAsB,CAC7D,CAAC,ICjTD,IAAAyE,GAAAC,GAAA,CAAAC,GAAAC,KAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMC,SAA0CC,EAAMC,EAAS,CACtD,OAAOH,IAAY,UAAY,OAAOC,IAAW,SACnDA,GAAO,QAAUE,EAAQ,EAClB,OAAO,QAAW,YAAc,OAAO,IAC9C,OAAO,CAAC,EAAGA,CAAO,EACX,OAAOH,IAAY,SAC1BA,GAAQ,YAAiBG,EAAQ,EAEjCD,EAAK,YAAiBC,EAAQ,CAChC,GAAGH,GAAM,UAAW,CACpB,OAAiB,UAAW,CAClB,IAAII,EAAuB,CAE/B,IACC,SAASC,EAAyBC,EAAqBC,EAAqB,CAEnF,aAGAA,EAAoB,EAAED,EAAqB,CACzC,QAAW,UAAW,CAAE,OAAqBE,EAAW,CAC1D,CAAC,EAGD,IAAIC,EAAeF,EAAoB,GAAG,EACtCG,EAAoCH,EAAoB,EAAEE,CAAY,EAEtEE,EAASJ,EAAoB,GAAG,EAChCK,EAA8BL,EAAoB,EAAEI,CAAM,EAE1DE,EAAaN,EAAoB,GAAG,EACpCO,EAA8BP,EAAoB,EAAEM,CAAU,EAOlE,SAASE,EAAQC,EAAM,CACrB,GAAI,CACF,OAAO,SAAS,YAAYA,CAAI,CAClC,OAASC,EAAP,CACA,MAAO,EACT,CACF,CAUA,IAAIC,EAAqB,SAA4BC,EAAQ,CAC3D,IAAIC,EAAeN,EAAe,EAAEK,CAAM,EAC1C,OAAAJ,EAAQ,KAAK,EACNK,CACT,EAEiCC,EAAeH,EAOhD,SAASI,EAAkBC,EAAO,CAChC,IAAIC,EAAQ,SAAS,gBAAgB,aAAa,KAAK,IAAM,MACzDC,EAAc,SAAS,cAAc,UAAU,EAEnDA,EAAY,MAAM,SAAW,OAE7BA,EAAY,MAAM,OAAS,IAC3BA,EAAY,MAAM,QAAU,IAC5BA,EAAY,MAAM,OAAS,IAE3BA,EAAY,MAAM,SAAW,WAC7BA,EAAY,MAAMD,EAAQ,QAAU,QAAU,UAE9C,IAAIE,EAAY,OAAO,aAAe,SAAS,gBAAgB,UAC/D,OAAAD,EAAY,MAAM,IAAM,GAAG,OAAOC,EAAW,IAAI,EACjDD,EAAY,aAAa,WAAY,EAAE,EACvCA,EAAY,MAAQF,EACbE,CACT,CAYA,IAAIE,EAAiB,SAAwBJ,EAAOK,EAAS,CAC3D,IAAIH,EAAcH,EAAkBC,CAAK,EACzCK,EAAQ,UAAU,YAAYH,CAAW,EACzC,IAAIL,EAAeN,EAAe,EAAEW,CAAW,EAC/C,OAAAV,EAAQ,MAAM,EACdU,EAAY,OAAO,EACZL,CACT,EASIS,EAAsB,SAA6BV,EAAQ,CAC7D,IAAIS,EAAU,UAAU,OAAS,GAAK,UAAU,KAAO,OAAY,UAAU,GAAK,CAChF,UAAW,SAAS,IACtB,EACIR,EAAe,GAEnB,OAAI,OAAOD,GAAW,SACpBC,EAAeO,EAAeR,EAAQS,CAAO,EACpCT,aAAkB,kBAAoB,CAAC,CAAC,OAAQ,SAAU,MAAO,MAAO,UAAU,EAAE,SAASA,GAAW,KAA4B,OAASA,EAAO,IAAI,EAEjKC,EAAeO,EAAeR,EAAO,MAAOS,CAAO,GAEnDR,EAAeN,EAAe,EAAEK,CAAM,EACtCJ,EAAQ,MAAM,GAGTK,CACT,EAEiCU,EAAgBD,EAEjD,SAASE,EAAQC,EAAK,CAA6B,OAAI,OAAO,QAAW,YAAc,OAAO,OAAO,UAAa,SAAYD,EAAU,SAAiBC,EAAK,CAAE,OAAO,OAAOA,CAAK,EAAYD,EAAU,SAAiBC,EAAK,CAAE,OAAOA,GAAO,OAAO,QAAW,YAAcA,EAAI,cAAgB,QAAUA,IAAQ,OAAO,UAAY,SAAW,OAAOA,CAAK,EAAYD,EAAQC,CAAG,CAAG,CAUzX,IAAIC,GAAyB,UAAkC,CAC7D,IAAIL,EAAU,UAAU,OAAS,GAAK,UAAU,KAAO,OAAY,UAAU,GAAK,CAAC,EAE/EM,EAAkBN,EAAQ,OAC1BO,EAASD,IAAoB,OAAS,OAASA,EAC/CE,EAAYR,EAAQ,UACpBT,EAASS,EAAQ,OACjBS,GAAOT,EAAQ,KAEnB,GAAIO,IAAW,QAAUA,IAAW,MAClC,MAAM,IAAI,MAAM,oDAAoD,EAItE,GAAIhB,IAAW,OACb,GAAIA,GAAUY,EAAQZ,CAAM,IAAM,UAAYA,EAAO,WAAa,EAAG,CACnE,GAAIgB,IAAW,QAAUhB,EAAO,aAAa,UAAU,EACrD,MAAM,IAAI,MAAM,mFAAmF,EAGrG,GAAIgB,IAAW,QAAUhB,EAAO,aAAa,UAAU,GAAKA,EAAO,aAAa,UAAU,GACxF,MAAM,IAAI,MAAM,uGAAwG,CAE5H,KACE,OAAM,IAAI,MAAM,6CAA6C,EAKjE,GAAIkB,GACF,OAAOP,EAAaO,GAAM,CACxB,UAAWD,CACb,CAAC,EAIH,GAAIjB,EACF,OAAOgB,IAAW,MAAQd,EAAYF,CAAM,EAAIW,EAAaX,EAAQ,CACnE,UAAWiB,CACb,CAAC,CAEL,EAEiCE,GAAmBL,GAEpD,SAASM,GAAiBP,EAAK,CAA6B,OAAI,OAAO,QAAW,YAAc,OAAO,OAAO,UAAa,SAAYO,GAAmB,SAAiBP,EAAK,CAAE,OAAO,OAAOA,CAAK,EAAYO,GAAmB,SAAiBP,EAAK,CAAE,OAAOA,GAAO,OAAO,QAAW,YAAcA,EAAI,cAAgB,QAAUA,IAAQ,OAAO,UAAY,SAAW,OAAOA,CAAK,EAAYO,GAAiBP,CAAG,CAAG,CAE7Z,SAASQ,GAAgBC,EAAUC,EAAa,CAAE,GAAI,EAAED,aAAoBC,GAAgB,MAAM,IAAI,UAAU,mCAAmC,CAAK,CAExJ,SAASC,GAAkBxB,EAAQyB,EAAO,CAAE,QAASC,EAAI,EAAGA,EAAID,EAAM,OAAQC,IAAK,CAAE,IAAIC,EAAaF,EAAMC,GAAIC,EAAW,WAAaA,EAAW,YAAc,GAAOA,EAAW,aAAe,GAAU,UAAWA,IAAYA,EAAW,SAAW,IAAM,OAAO,eAAe3B,EAAQ2B,EAAW,IAAKA,CAAU,CAAG,CAAE,CAE5T,SAASC,GAAaL,EAAaM,EAAYC,EAAa,CAAE,OAAID,GAAYL,GAAkBD,EAAY,UAAWM,CAAU,EAAOC,GAAaN,GAAkBD,EAAaO,CAAW,EAAUP,CAAa,CAEtN,SAASQ,GAAUC,EAAUC,EAAY,CAAE,GAAI,OAAOA,GAAe,YAAcA,IAAe,KAAQ,MAAM,IAAI,UAAU,oDAAoD,EAAKD,EAAS,UAAY,OAAO,OAAOC,GAAcA,EAAW,UAAW,CAAE,YAAa,CAAE,MAAOD,EAAU,SAAU,GAAM,aAAc,EAAK,CAAE,CAAC,EAAOC,GAAYC,GAAgBF,EAAUC,CAAU,CAAG,CAEhY,SAASC,GAAgBC,EAAGC,EAAG,CAAE,OAAAF,GAAkB,OAAO,gBAAkB,SAAyBC,EAAGC,EAAG,CAAE,OAAAD,EAAE,UAAYC,EAAUD,CAAG,EAAUD,GAAgBC,EAAGC,CAAC,CAAG,CAEzK,SAASC,GAAaC,EAAS,CAAE,IAAIC,EAA4BC,GAA0B,EAAG,OAAO,UAAgC,CAAE,IAAIC,EAAQC,GAAgBJ,CAAO,EAAGK,EAAQ,GAAIJ,EAA2B,CAAE,IAAIK,EAAYF,GAAgB,IAAI,EAAE,YAAaC,EAAS,QAAQ,UAAUF,EAAO,UAAWG,CAAS,CAAG,MAASD,EAASF,EAAM,MAAM,KAAM,SAAS,EAAK,OAAOI,GAA2B,KAAMF,CAAM,CAAG,CAAG,CAExa,SAASE,GAA2BC,EAAMC,EAAM,CAAE,OAAIA,IAAS3B,GAAiB2B,CAAI,IAAM,UAAY,OAAOA,GAAS,YAAsBA,EAAeC,GAAuBF,CAAI,CAAG,CAEzL,SAASE,GAAuBF,EAAM,CAAE,GAAIA,IAAS,OAAU,MAAM,IAAI,eAAe,2DAA2D,EAAK,OAAOA,CAAM,CAErK,SAASN,IAA4B,CAA0E,GAApE,OAAO,SAAY,aAAe,CAAC,QAAQ,WAA6B,QAAQ,UAAU,KAAM,MAAO,GAAO,GAAI,OAAO,OAAU,WAAY,MAAO,GAAM,GAAI,CAAE,YAAK,UAAU,SAAS,KAAK,QAAQ,UAAU,KAAM,CAAC,EAAG,UAAY,CAAC,CAAC,CAAC,EAAU,EAAM,OAASS,EAAP,CAAY,MAAO,EAAO,CAAE,CAEnU,SAASP,GAAgBP,EAAG,CAAE,OAAAO,GAAkB,OAAO,eAAiB,OAAO,eAAiB,SAAyBP,EAAG,CAAE,OAAOA,EAAE,WAAa,OAAO,eAAeA,CAAC,CAAG,EAAUO,GAAgBP,CAAC,CAAG,CAa5M,SAASe,GAAkBC,EAAQC,EAAS,CAC1C,IAAIC,EAAY,kBAAkB,OAAOF,CAAM,EAE/C,GAAI,EAACC,EAAQ,aAAaC,CAAS,EAInC,OAAOD,EAAQ,aAAaC,CAAS,CACvC,CAOA,IAAIC,GAAyB,SAAUC,EAAU,CAC/CxB,GAAUuB,EAAWC,CAAQ,EAE7B,IAAIC,EAASnB,GAAaiB,CAAS,EAMnC,SAASA,EAAUG,EAAShD,EAAS,CACnC,IAAIiD,EAEJ,OAAArC,GAAgB,KAAMiC,CAAS,EAE/BI,EAAQF,EAAO,KAAK,IAAI,EAExBE,EAAM,eAAejD,CAAO,EAE5BiD,EAAM,YAAYD,CAAO,EAElBC,CACT,CAQA,OAAA9B,GAAa0B,EAAW,CAAC,CACvB,IAAK,iBACL,MAAO,UAA0B,CAC/B,IAAI7C,EAAU,UAAU,OAAS,GAAK,UAAU,KAAO,OAAY,UAAU,GAAK,CAAC,EACnF,KAAK,OAAS,OAAOA,EAAQ,QAAW,WAAaA,EAAQ,OAAS,KAAK,cAC3E,KAAK,OAAS,OAAOA,EAAQ,QAAW,WAAaA,EAAQ,OAAS,KAAK,cAC3E,KAAK,KAAO,OAAOA,EAAQ,MAAS,WAAaA,EAAQ,KAAO,KAAK,YACrE,KAAK,UAAYW,GAAiBX,EAAQ,SAAS,IAAM,SAAWA,EAAQ,UAAY,SAAS,IACnG,CAMF,EAAG,CACD,IAAK,cACL,MAAO,SAAqBgD,EAAS,CACnC,IAAIE,EAAS,KAEb,KAAK,SAAWlE,EAAe,EAAEgE,EAAS,QAAS,SAAUR,GAAG,CAC9D,OAAOU,EAAO,QAAQV,EAAC,CACzB,CAAC,CACH,CAMF,EAAG,CACD,IAAK,UACL,MAAO,SAAiBA,EAAG,CACzB,IAAIQ,EAAUR,EAAE,gBAAkBA,EAAE,cAChCjC,GAAS,KAAK,OAAOyC,CAAO,GAAK,OACjCvC,GAAOC,GAAgB,CACzB,OAAQH,GACR,UAAW,KAAK,UAChB,OAAQ,KAAK,OAAOyC,CAAO,EAC3B,KAAM,KAAK,KAAKA,CAAO,CACzB,CAAC,EAED,KAAK,KAAKvC,GAAO,UAAY,QAAS,CACpC,OAAQF,GACR,KAAME,GACN,QAASuC,EACT,eAAgB,UAA0B,CACpCA,GACFA,EAAQ,MAAM,EAGhB,OAAO,aAAa,EAAE,gBAAgB,CACxC,CACF,CAAC,CACH,CAMF,EAAG,CACD,IAAK,gBACL,MAAO,SAAuBA,EAAS,CACrC,OAAOP,GAAkB,SAAUO,CAAO,CAC5C,CAMF,EAAG,CACD,IAAK,gBACL,MAAO,SAAuBA,EAAS,CACrC,IAAIG,EAAWV,GAAkB,SAAUO,CAAO,EAElD,GAAIG,EACF,OAAO,SAAS,cAAcA,CAAQ,CAE1C,CAQF,EAAG,CACD,IAAK,cAML,MAAO,SAAqBH,EAAS,CACnC,OAAOP,GAAkB,OAAQO,CAAO,CAC1C,CAKF,EAAG,CACD,IAAK,UACL,MAAO,UAAmB,CACxB,KAAK,SAAS,QAAQ,CACxB,CACF,CAAC,EAAG,CAAC,CACH,IAAK,OACL,MAAO,SAAczD,EAAQ,CAC3B,IAAIS,EAAU,UAAU,OAAS,GAAK,UAAU,KAAO,OAAY,UAAU,GAAK,CAChF,UAAW,SAAS,IACtB,EACA,OAAOE,EAAaX,EAAQS,CAAO,CACrC,CAOF,EAAG,CACD,IAAK,MACL,MAAO,SAAaT,EAAQ,CAC1B,OAAOE,EAAYF,CAAM,CAC3B,CAOF,EAAG,CACD,IAAK,cACL,MAAO,UAAuB,CAC5B,IAAIgB,EAAS,UAAU,OAAS,GAAK,UAAU,KAAO,OAAY,UAAU,GAAK,CAAC,OAAQ,KAAK,EAC3F6C,EAAU,OAAO7C,GAAW,SAAW,CAACA,CAAM,EAAIA,EAClD8C,GAAU,CAAC,CAAC,SAAS,sBACzB,OAAAD,EAAQ,QAAQ,SAAU7C,GAAQ,CAChC8C,GAAUA,IAAW,CAAC,CAAC,SAAS,sBAAsB9C,EAAM,CAC9D,CAAC,EACM8C,EACT,CACF,CAAC,CAAC,EAEKR,CACT,EAAG/D,EAAqB,CAAE,EAEOF,GAAaiE,EAExC,EAEA,IACC,SAASxE,EAAQ,CAExB,IAAIiF,EAAqB,EAKzB,GAAI,OAAO,SAAY,aAAe,CAAC,QAAQ,UAAU,QAAS,CAC9D,IAAIC,EAAQ,QAAQ,UAEpBA,EAAM,QAAUA,EAAM,iBACNA,EAAM,oBACNA,EAAM,mBACNA,EAAM,kBACNA,EAAM,qBAC1B,CASA,SAASC,EAASb,EAASQ,EAAU,CACjC,KAAOR,GAAWA,EAAQ,WAAaW,GAAoB,CACvD,GAAI,OAAOX,EAAQ,SAAY,YAC3BA,EAAQ,QAAQQ,CAAQ,EAC1B,OAAOR,EAETA,EAAUA,EAAQ,UACtB,CACJ,CAEAtE,EAAO,QAAUmF,CAGX,EAEA,IACC,SAASnF,EAAQoF,EAA0B9E,EAAqB,CAEvE,IAAI6E,EAAU7E,EAAoB,GAAG,EAYrC,SAAS+E,EAAUf,EAASQ,EAAU/D,EAAMuE,EAAUC,EAAY,CAC9D,IAAIC,EAAaC,EAAS,MAAM,KAAM,SAAS,EAE/C,OAAAnB,EAAQ,iBAAiBvD,EAAMyE,EAAYD,CAAU,EAE9C,CACH,QAAS,UAAW,CAChBjB,EAAQ,oBAAoBvD,EAAMyE,EAAYD,CAAU,CAC5D,CACJ,CACJ,CAYA,SAASG,EAASC,EAAUb,EAAU/D,EAAMuE,EAAUC,EAAY,CAE9D,OAAI,OAAOI,EAAS,kBAAqB,WAC9BN,EAAU,MAAM,KAAM,SAAS,EAItC,OAAOtE,GAAS,WAGTsE,EAAU,KAAK,KAAM,QAAQ,EAAE,MAAM,KAAM,SAAS,GAI3D,OAAOM,GAAa,WACpBA,EAAW,SAAS,iBAAiBA,CAAQ,GAI1C,MAAM,UAAU,IAAI,KAAKA,EAAU,SAAUrB,EAAS,CACzD,OAAOe,EAAUf,EAASQ,EAAU/D,EAAMuE,EAAUC,CAAU,CAClE,CAAC,EACL,CAWA,SAASE,EAASnB,EAASQ,EAAU/D,EAAMuE,EAAU,CACjD,OAAO,SAASnB,EAAG,CACfA,EAAE,eAAiBgB,EAAQhB,EAAE,OAAQW,CAAQ,EAEzCX,EAAE,gBACFmB,EAAS,KAAKhB,EAASH,CAAC,CAEhC,CACJ,CAEAnE,EAAO,QAAU0F,CAGX,EAEA,IACC,SAAStF,EAAyBL,EAAS,CAQlDA,EAAQ,KAAO,SAASuB,EAAO,CAC3B,OAAOA,IAAU,QACVA,aAAiB,aACjBA,EAAM,WAAa,CAC9B,EAQAvB,EAAQ,SAAW,SAASuB,EAAO,CAC/B,IAAIP,EAAO,OAAO,UAAU,SAAS,KAAKO,CAAK,EAE/C,OAAOA,IAAU,SACTP,IAAS,qBAAuBA,IAAS,4BACzC,WAAYO,IACZA,EAAM,SAAW,GAAKvB,EAAQ,KAAKuB,EAAM,EAAE,EACvD,EAQAvB,EAAQ,OAAS,SAASuB,EAAO,CAC7B,OAAO,OAAOA,GAAU,UACjBA,aAAiB,MAC5B,EAQAvB,EAAQ,GAAK,SAASuB,EAAO,CACzB,IAAIP,EAAO,OAAO,UAAU,SAAS,KAAKO,CAAK,EAE/C,OAAOP,IAAS,mBACpB,CAGM,EAEA,IACC,SAASf,EAAQoF,EAA0B9E,EAAqB,CAEvE,IAAIsF,EAAKtF,EAAoB,GAAG,EAC5BoF,EAAWpF,EAAoB,GAAG,EAWtC,SAASI,EAAOQ,EAAQH,EAAMuE,EAAU,CACpC,GAAI,CAACpE,GAAU,CAACH,GAAQ,CAACuE,EACrB,MAAM,IAAI,MAAM,4BAA4B,EAGhD,GAAI,CAACM,EAAG,OAAO7E,CAAI,EACf,MAAM,IAAI,UAAU,kCAAkC,EAG1D,GAAI,CAAC6E,EAAG,GAAGN,CAAQ,EACf,MAAM,IAAI,UAAU,mCAAmC,EAG3D,GAAIM,EAAG,KAAK1E,CAAM,EACd,OAAO2E,EAAW3E,EAAQH,EAAMuE,CAAQ,EAEvC,GAAIM,EAAG,SAAS1E,CAAM,EACvB,OAAO4E,EAAe5E,EAAQH,EAAMuE,CAAQ,EAE3C,GAAIM,EAAG,OAAO1E,CAAM,EACrB,OAAO6E,EAAe7E,EAAQH,EAAMuE,CAAQ,EAG5C,MAAM,IAAI,UAAU,2EAA2E,CAEvG,CAWA,SAASO,EAAWG,EAAMjF,EAAMuE,EAAU,CACtC,OAAAU,EAAK,iBAAiBjF,EAAMuE,CAAQ,EAE7B,CACH,QAAS,UAAW,CAChBU,EAAK,oBAAoBjF,EAAMuE,CAAQ,CAC3C,CACJ,CACJ,CAWA,SAASQ,EAAeG,EAAUlF,EAAMuE,EAAU,CAC9C,aAAM,UAAU,QAAQ,KAAKW,EAAU,SAASD,EAAM,CAClDA,EAAK,iBAAiBjF,EAAMuE,CAAQ,CACxC,CAAC,EAEM,CACH,QAAS,UAAW,CAChB,MAAM,UAAU,QAAQ,KAAKW,EAAU,SAASD,EAAM,CAClDA,EAAK,oBAAoBjF,EAAMuE,CAAQ,CAC3C,CAAC,CACL,CACJ,CACJ,CAWA,SAASS,EAAejB,EAAU/D,EAAMuE,EAAU,CAC9C,OAAOI,EAAS,SAAS,KAAMZ,EAAU/D,EAAMuE,CAAQ,CAC3D,CAEAtF,EAAO,QAAUU,CAGX,EAEA,IACC,SAASV,EAAQ,CAExB,SAASkG,EAAO5B,EAAS,CACrB,IAAInD,EAEJ,GAAImD,EAAQ,WAAa,SACrBA,EAAQ,MAAM,EAEdnD,EAAemD,EAAQ,cAElBA,EAAQ,WAAa,SAAWA,EAAQ,WAAa,WAAY,CACtE,IAAI6B,EAAa7B,EAAQ,aAAa,UAAU,EAE3C6B,GACD7B,EAAQ,aAAa,WAAY,EAAE,EAGvCA,EAAQ,OAAO,EACfA,EAAQ,kBAAkB,EAAGA,EAAQ,MAAM,MAAM,EAE5C6B,GACD7B,EAAQ,gBAAgB,UAAU,EAGtCnD,EAAemD,EAAQ,KAC3B,KACK,CACGA,EAAQ,aAAa,iBAAiB,GACtCA,EAAQ,MAAM,EAGlB,IAAI8B,EAAY,OAAO,aAAa,EAChCC,EAAQ,SAAS,YAAY,EAEjCA,EAAM,mBAAmB/B,CAAO,EAChC8B,EAAU,gBAAgB,EAC1BA,EAAU,SAASC,CAAK,EAExBlF,EAAeiF,EAAU,SAAS,CACtC,CAEA,OAAOjF,CACX,CAEAnB,EAAO,QAAUkG,CAGX,EAEA,IACC,SAASlG,EAAQ,CAExB,SAASsG,GAAK,CAGd,CAEAA,EAAE,UAAY,CACZ,GAAI,SAAUC,EAAMjB,EAAUkB,EAAK,CACjC,IAAIrC,EAAI,KAAK,IAAM,KAAK,EAAI,CAAC,GAE7B,OAACA,EAAEoC,KAAUpC,EAAEoC,GAAQ,CAAC,IAAI,KAAK,CAC/B,GAAIjB,EACJ,IAAKkB,CACP,CAAC,EAEM,IACT,EAEA,KAAM,SAAUD,EAAMjB,EAAUkB,EAAK,CACnC,IAAIxC,EAAO,KACX,SAASyB,GAAY,CACnBzB,EAAK,IAAIuC,EAAMd,CAAQ,EACvBH,EAAS,MAAMkB,EAAK,SAAS,CAC/B,CAEA,OAAAf,EAAS,EAAIH,EACN,KAAK,GAAGiB,EAAMd,EAAUe,CAAG,CACpC,EAEA,KAAM,SAAUD,EAAM,CACpB,IAAIE,EAAO,CAAC,EAAE,MAAM,KAAK,UAAW,CAAC,EACjCC,IAAW,KAAK,IAAM,KAAK,EAAI,CAAC,IAAIH,IAAS,CAAC,GAAG,MAAM,EACvD3D,EAAI,EACJ+D,EAAMD,EAAO,OAEjB,IAAK9D,EAAGA,EAAI+D,EAAK/D,IACf8D,EAAO9D,GAAG,GAAG,MAAM8D,EAAO9D,GAAG,IAAK6D,CAAI,EAGxC,OAAO,IACT,EAEA,IAAK,SAAUF,EAAMjB,EAAU,CAC7B,IAAInB,EAAI,KAAK,IAAM,KAAK,EAAI,CAAC,GACzByC,EAAOzC,EAAEoC,GACTM,EAAa,CAAC,EAElB,GAAID,GAAQtB,EACV,QAAS1C,EAAI,EAAG+D,EAAMC,EAAK,OAAQhE,EAAI+D,EAAK/D,IACtCgE,EAAKhE,GAAG,KAAO0C,GAAYsB,EAAKhE,GAAG,GAAG,IAAM0C,GAC9CuB,EAAW,KAAKD,EAAKhE,EAAE,EAQ7B,OAACiE,EAAW,OACR1C,EAAEoC,GAAQM,EACV,OAAO1C,EAAEoC,GAEN,IACT,CACF,EAEAvG,EAAO,QAAUsG,EACjBtG,EAAO,QAAQ,YAAcsG,CAGvB,CAEI,EAGIQ,EAA2B,CAAC,EAGhC,SAASxG,EAAoByG,EAAU,CAEtC,GAAGD,EAAyBC,GAC3B,OAAOD,EAAyBC,GAAU,QAG3C,IAAI/G,EAAS8G,EAAyBC,GAAY,CAGjD,QAAS,CAAC,CACX,EAGA,OAAA5G,EAAoB4G,GAAU/G,EAAQA,EAAO,QAASM,CAAmB,EAGlEN,EAAO,OACf,CAIA,OAAC,UAAW,CAEXM,EAAoB,EAAI,SAASN,EAAQ,CACxC,IAAIgH,EAAShH,GAAUA,EAAO,WAC7B,UAAW,CAAE,OAAOA,EAAO,OAAY,EACvC,UAAW,CAAE,OAAOA,CAAQ,EAC7B,OAAAM,EAAoB,EAAE0G,EAAQ,CAAE,EAAGA,CAAO,CAAC,EACpCA,CACR,CACD,EAAE,EAGD,UAAW,CAEX1G,EAAoB,EAAI,SAASP,EAASkH,EAAY,CACrD,QAAQC,KAAOD,EACX3G,EAAoB,EAAE2G,EAAYC,CAAG,GAAK,CAAC5G,EAAoB,EAAEP,EAASmH,CAAG,GAC/E,OAAO,eAAenH,EAASmH,EAAK,CAAE,WAAY,GAAM,IAAKD,EAAWC,EAAK,CAAC,CAGjF,CACD,EAAE,EAGD,UAAW,CACX5G,EAAoB,EAAI,SAASyB,EAAKoF,EAAM,CAAE,OAAO,OAAO,UAAU,eAAe,KAAKpF,EAAKoF,CAAI,CAAG,CACvG,EAAE,EAMK7G,EAAoB,GAAG,CAC/B,EAAG,EACX,OACD,CAAC,ICz3BD,IAAA8G,GAAAC,GAAA,CAAAC,GAAAC,KAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAeA,IAAIC,GAAkB,UAOtBD,GAAO,QAAUE,GAUjB,SAASA,GAAWC,EAAQ,CAC1B,IAAIC,EAAM,GAAKD,EACXE,EAAQJ,GAAgB,KAAKG,CAAG,EAEpC,GAAI,CAACC,EACH,OAAOD,EAGT,IAAIE,EACAC,EAAO,GACPC,EAAQ,EACRC,EAAY,EAEhB,IAAKD,EAAQH,EAAM,MAAOG,EAAQJ,EAAI,OAAQI,IAAS,CACrD,OAAQJ,EAAI,WAAWI,CAAK,EAAG,CAC7B,IAAK,IACHF,EAAS,SACT,MACF,IAAK,IACHA,EAAS,QACT,MACF,IAAK,IACHA,EAAS,QACT,MACF,IAAK,IACHA,EAAS,OACT,MACF,IAAK,IACHA,EAAS,OACT,MACF,QACE,QACJ,CAEIG,IAAcD,IAChBD,GAAQH,EAAI,UAAUK,EAAWD,CAAK,GAGxCC,EAAYD,EAAQ,EACpBD,GAAQD,CACV,CAEA,OAAOG,IAAcD,EACjBD,EAAOH,EAAI,UAAUK,EAAWD,CAAK,EACrCD,CACN,IC7EA,MAAM,UAAU,MAAM,OAAO,eAAe,MAAM,UAAU,OAAO,CAAC,aAAa,GAAG,MAAM,SAASG,GAAG,CAAC,IAAI,EAAE,MAAM,UAAU,EAAE,EAAE,EAAE,OAAO,UAAU,EAAE,EAAE,OAAO,EAAE,MAAM,UAAU,OAAO,KAAK,KAAK,SAASC,EAAEC,EAAE,CAAC,OAAO,MAAM,QAAQA,CAAC,EAAED,EAAE,KAAK,MAAMA,EAAED,EAAE,KAAKE,EAAE,EAAE,CAAC,CAAC,EAAED,EAAE,KAAKC,CAAC,EAAED,CAAC,EAAE,CAAC,CAAC,EAAE,MAAM,UAAU,MAAM,KAAK,IAAI,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,MAAM,UAAU,SAAS,OAAO,eAAe,MAAM,UAAU,UAAU,CAAC,aAAa,GAAG,MAAM,SAASD,EAAE,CAAC,OAAO,MAAM,UAAU,IAAI,MAAM,KAAK,SAAS,EAAE,KAAK,CAAC,EAAE,SAAS,EAAE,CAAC,ECuBxf,IAAAG,GAAO,SCvBP,KAAK,QAAQ,KAAK,MAAM,SAAS,EAAEC,EAAE,CAAC,OAAOA,EAAEA,GAAG,CAAC,EAAE,IAAI,QAAQ,SAASC,EAAEC,EAAE,CAAC,IAAIC,EAAE,IAAI,eAAeC,EAAE,CAAC,EAAEC,EAAE,CAAC,EAAEC,EAAE,CAAC,EAAEC,EAAE,UAAU,CAAC,MAAM,CAAC,IAAOJ,EAAE,OAAO,IAAI,IAAjB,EAAoB,WAAWA,EAAE,WAAW,OAAOA,EAAE,OAAO,IAAIA,EAAE,YAAY,KAAK,UAAU,CAAC,OAAO,QAAQ,QAAQA,EAAE,YAAY,CAAC,EAAE,KAAK,UAAU,CAAC,OAAO,QAAQ,QAAQA,EAAE,YAAY,EAAE,KAAK,KAAK,KAAK,CAAC,EAAE,KAAK,UAAU,CAAC,OAAO,QAAQ,QAAQ,IAAI,KAAK,CAACA,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,MAAMI,EAAE,QAAQ,CAAC,KAAK,UAAU,CAAC,OAAOH,CAAC,EAAE,QAAQ,UAAU,CAAC,OAAOC,CAAC,EAAE,IAAI,SAASG,EAAE,CAAC,OAAOF,EAAEE,EAAE,YAAY,EAAE,EAAE,IAAI,SAASA,EAAE,CAAC,OAAOA,EAAE,YAAY,IAAIF,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQG,KAAKN,EAAE,KAAKH,EAAE,QAAQ,MAAM,EAAE,EAAE,EAAEG,EAAE,OAAO,UAAU,CAACA,EAAE,sBAAsB,EAAE,QAAQ,+BAA+B,SAASK,EAAER,EAAEC,EAAE,CAACG,EAAE,KAAKJ,EAAEA,EAAE,YAAY,CAAC,EAAEK,EAAE,KAAK,CAACL,EAAEC,CAAC,CAAC,EAAEK,EAAEN,GAAGM,EAAEN,GAAGM,EAAEN,GAAG,IAAIC,EAAEA,CAAC,CAAC,EAAEA,EAAEM,EAAE,CAAC,CAAC,EAAEJ,EAAE,QAAQD,EAAEC,EAAE,gBAA2BH,EAAE,aAAb,UAAyBA,EAAE,QAAQG,EAAE,iBAAiBM,EAAET,EAAE,QAAQS,EAAE,EAAEN,EAAE,KAAKH,EAAE,MAAM,IAAI,CAAC,CAAC,CAAC,GDyBj5B,IAAAU,GAAO,SEzBP,IAAAC,GAAkB,WACZ,CACF,UAAAC,GACA,SAAAC,GACA,OAAAC,GACA,WAAAC,GACA,QAAAC,GACA,WAAAC,GACA,UAAAC,GACA,YAAAC,GACA,aAAAC,GACA,gBAAAC,GACA,SAAAC,GACA,OAAAC,EACA,SAAAC,GACA,eAAAC,GACA,cAAAC,EACA,QAAAC,GACA,iBAAAC,GACA,iBAAAC,GACA,cAAAC,GACA,qBAAAC,GACA,aAAAC,GACA,gBAAAC,GACA,uBAAAC,GACA,uBAAAC,EACJ,EAAI,GAAAC,QCtBE,SAAUC,EAAWC,EAAU,CACnC,OAAO,OAAOA,GAAU,UAC1B,CCGM,SAAUC,GAAoBC,EAAgC,CAClE,IAAMC,EAAS,SAACC,EAAa,CAC3B,MAAM,KAAKA,CAAQ,EACnBA,EAAS,MAAQ,IAAI,MAAK,EAAG,KAC/B,EAEMC,EAAWH,EAAWC,CAAM,EAClC,OAAAE,EAAS,UAAY,OAAO,OAAO,MAAM,SAAS,EAClDA,EAAS,UAAU,YAAcA,EAC1BA,CACT,CCDO,IAAMC,GAA+CC,GAC1D,SAACC,EAAM,CACL,OAAA,SAA4CC,EAA0B,CACpED,EAAO,IAAI,EACX,KAAK,QAAUC,EACRA,EAAO,OAAM;EACxBA,EAAO,IAAI,SAACC,EAAKC,EAAC,CAAK,OAAGA,EAAI,EAAC,KAAKD,EAAI,SAAQ,CAAzB,CAA6B,EAAE,KAAK;GAAM,EACzD,GACJ,KAAK,KAAO,sBACZ,KAAK,OAASD,CAChB,CARA,CAQC,ECvBC,SAAUG,GAAaC,EAA6BC,EAAO,CAC/D,GAAID,EAAK,CACP,IAAME,EAAQF,EAAI,QAAQC,CAAI,EAC9B,GAAKC,GAASF,EAAI,OAAOE,EAAO,CAAC,EAErC,CCOA,IAAAC,GAAA,UAAA,CAyBE,SAAAA,EAAoBC,EAA4B,CAA5B,KAAA,gBAAAA,EAdb,KAAA,OAAS,GAER,KAAA,WAAmD,KAMnD,KAAA,YAAqD,IAMV,CAQnD,OAAAD,EAAA,UAAA,YAAA,UAAA,aACME,EAEJ,GAAI,CAAC,KAAK,OAAQ,CAChB,KAAK,OAAS,GAGN,IAAAC,EAAe,KAAI,WAC3B,GAAIA,EAEF,GADA,KAAK,WAAa,KACd,MAAM,QAAQA,CAAU,MAC1B,QAAqBC,EAAAC,GAAAF,CAAU,EAAAG,EAAAF,EAAA,KAAA,EAAA,CAAAE,EAAA,KAAAA,EAAAF,EAAA,KAAA,EAAE,CAA5B,IAAMG,EAAMD,EAAA,MACfC,EAAO,OAAO,IAAI,yGAGpBJ,EAAW,OAAO,IAAI,EAIlB,IAAiBK,EAAqB,KAAI,gBAClD,GAAIC,EAAWD,CAAgB,EAC7B,GAAI,CACFA,EAAgB,QACTE,EAAP,CACAR,EAASQ,aAAaC,GAAsBD,EAAE,OAAS,CAACA,CAAC,EAIrD,IAAAE,EAAgB,KAAI,YAC5B,GAAIA,EAAa,CACf,KAAK,YAAc,SACnB,QAAwBC,EAAAR,GAAAO,CAAW,EAAAE,EAAAD,EAAA,KAAA,EAAA,CAAAC,EAAA,KAAAA,EAAAD,EAAA,KAAA,EAAE,CAAhC,IAAME,EAASD,EAAA,MAClB,GAAI,CACFE,GAAcD,CAAS,QAChBE,EAAP,CACAf,EAASA,GAAM,KAANA,EAAU,CAAA,EACfe,aAAeN,GACjBT,EAAMgB,EAAAA,EAAA,CAAA,EAAAC,EAAOjB,CAAM,CAAA,EAAAiB,EAAKF,EAAI,MAAM,CAAA,EAElCf,EAAO,KAAKe,CAAG,sGAMvB,GAAIf,EACF,MAAM,IAAIS,GAAoBT,CAAM,EAG1C,EAoBAF,EAAA,UAAA,IAAA,SAAIoB,EAAuB,OAGzB,GAAIA,GAAYA,IAAa,KAC3B,GAAI,KAAK,OAGPJ,GAAcI,CAAQ,MACjB,CACL,GAAIA,aAAoBpB,EAAc,CAGpC,GAAIoB,EAAS,QAAUA,EAAS,WAAW,IAAI,EAC7C,OAEFA,EAAS,WAAW,IAAI,GAEzB,KAAK,aAAcC,EAAA,KAAK,eAAW,MAAAA,IAAA,OAAAA,EAAI,CAAA,GAAI,KAAKD,CAAQ,EAG/D,EAOQpB,EAAA,UAAA,WAAR,SAAmBsB,EAAoB,CAC7B,IAAAnB,EAAe,KAAI,WAC3B,OAAOA,IAAemB,GAAW,MAAM,QAAQnB,CAAU,GAAKA,EAAW,SAASmB,CAAM,CAC1F,EASQtB,EAAA,UAAA,WAAR,SAAmBsB,EAAoB,CAC7B,IAAAnB,EAAe,KAAI,WAC3B,KAAK,WAAa,MAAM,QAAQA,CAAU,GAAKA,EAAW,KAAKmB,CAAM,EAAGnB,GAAcA,EAAa,CAACA,EAAYmB,CAAM,EAAIA,CAC5H,EAMQtB,EAAA,UAAA,cAAR,SAAsBsB,EAAoB,CAChC,IAAAnB,EAAe,KAAI,WACvBA,IAAemB,EACjB,KAAK,WAAa,KACT,MAAM,QAAQnB,CAAU,GACjCoB,GAAUpB,EAAYmB,CAAM,CAEhC,EAgBAtB,EAAA,UAAA,OAAA,SAAOoB,EAAsC,CACnC,IAAAR,EAAgB,KAAI,YAC5BA,GAAeW,GAAUX,EAAaQ,CAAQ,EAE1CA,aAAoBpB,GACtBoB,EAAS,cAAc,IAAI,CAE/B,EAlLcpB,EAAA,MAAS,UAAA,CACrB,IAAMwB,EAAQ,IAAIxB,EAClB,OAAAwB,EAAM,OAAS,GACRA,CACT,EAAE,EA+KJxB,GArLA,EAuLO,IAAMyB,GAAqBC,GAAa,MAEzC,SAAUC,GAAeC,EAAU,CACvC,OACEA,aAAiBF,IAChBE,GAAS,WAAYA,GAASC,EAAWD,EAAM,MAAM,GAAKC,EAAWD,EAAM,GAAG,GAAKC,EAAWD,EAAM,WAAW,CAEpH,CAEA,SAASE,GAAcC,EAAwC,CACzDF,EAAWE,CAAS,EACtBA,EAAS,EAETA,EAAU,YAAW,CAEzB,CChNO,IAAMC,GAAuB,CAClC,iBAAkB,KAClB,sBAAuB,KACvB,QAAS,OACT,sCAAuC,GACvC,yBAA0B,ICGrB,IAAMC,GAAmC,CAG9C,WAAA,SAAWC,EAAqBC,EAAgB,SAAEC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,EAAA,GAAA,UAAAA,GACxC,IAAAC,EAAaL,GAAe,SACpC,OAAIK,GAAQ,MAARA,EAAU,WACLA,EAAS,WAAU,MAAnBA,EAAQC,EAAA,CAAYL,EAASC,CAAO,EAAAK,EAAKJ,CAAI,CAAA,CAAA,EAE/C,WAAU,MAAA,OAAAG,EAAA,CAACL,EAASC,CAAO,EAAAK,EAAKJ,CAAI,CAAA,CAAA,CAC7C,EACA,aAAA,SAAaK,EAAM,CACT,IAAAH,EAAaL,GAAe,SACpC,QAAQK,GAAQ,KAAA,OAARA,EAAU,eAAgB,cAAcG,CAAa,CAC/D,EACA,SAAU,QCjBN,SAAUC,GAAqBC,EAAQ,CAC3CC,GAAgB,WAAW,UAAA,CACjB,IAAAC,EAAqBC,GAAM,iBACnC,GAAID,EAEFA,EAAiBF,CAAG,MAGpB,OAAMA,CAEV,CAAC,CACH,CCtBM,SAAUI,IAAI,CAAK,CCMlB,IAAMC,GAAyB,UAAA,CAAM,OAAAC,GAAmB,IAAK,OAAW,MAAS,CAA5C,EAAsE,EAO5G,SAAUC,GAAkBC,EAAU,CAC1C,OAAOF,GAAmB,IAAK,OAAWE,CAAK,CACjD,CAOM,SAAUC,GAAoBC,EAAQ,CAC1C,OAAOJ,GAAmB,IAAKI,EAAO,MAAS,CACjD,CAQM,SAAUJ,GAAmBK,EAAuBD,EAAYF,EAAU,CAC9E,MAAO,CACL,KAAIG,EACJ,MAAKD,EACL,MAAKF,EAET,CCrCA,IAAII,GAAuD,KASrD,SAAUC,GAAaC,EAAc,CACzC,GAAIC,GAAO,sCAAuC,CAChD,IAAMC,EAAS,CAACJ,GAKhB,GAJII,IACFJ,GAAU,CAAE,YAAa,GAAO,MAAO,IAAI,GAE7CE,EAAE,EACEE,EAAQ,CACJ,IAAAC,EAAyBL,GAAvBM,EAAWD,EAAA,YAAEE,EAAKF,EAAA,MAE1B,GADAL,GAAU,KACNM,EACF,MAAMC,QAMVL,EAAE,CAEN,CAMM,SAAUM,GAAaC,EAAQ,CAC/BN,GAAO,uCAAyCH,KAClDA,GAAQ,YAAc,GACtBA,GAAQ,MAAQS,EAEpB,CCrBA,IAAAC,GAAA,SAAAC,EAAA,CAAmCC,GAAAF,EAAAC,CAAA,EA6BjC,SAAAD,EAAYG,EAA6C,CAAzD,IAAAC,EACEH,EAAA,KAAA,IAAA,GAAO,KATC,OAAAG,EAAA,UAAqB,GAUzBD,GACFC,EAAK,YAAcD,EAGfE,GAAeF,CAAW,GAC5BA,EAAY,IAAIC,CAAI,GAGtBA,EAAK,YAAcE,IAEvB,CAzBO,OAAAN,EAAA,OAAP,SAAiBO,EAAwBC,EAA2BC,EAAqB,CACvF,OAAO,IAAIC,GAAeH,EAAMC,EAAOC,CAAQ,CACjD,EAgCAT,EAAA,UAAA,KAAA,SAAKW,EAAS,CACR,KAAK,UACPC,GAA0BC,GAAiBF,CAAK,EAAG,IAAI,EAEvD,KAAK,MAAMA,CAAM,CAErB,EASAX,EAAA,UAAA,MAAA,SAAMc,EAAS,CACT,KAAK,UACPF,GAA0BG,GAAkBD,CAAG,EAAG,IAAI,GAEtD,KAAK,UAAY,GACjB,KAAK,OAAOA,CAAG,EAEnB,EAQAd,EAAA,UAAA,SAAA,UAAA,CACM,KAAK,UACPY,GAA0BI,GAAuB,IAAI,GAErD,KAAK,UAAY,GACjB,KAAK,UAAS,EAElB,EAEAhB,EAAA,UAAA,YAAA,UAAA,CACO,KAAK,SACR,KAAK,UAAY,GACjBC,EAAA,UAAM,YAAW,KAAA,IAAA,EACjB,KAAK,YAAc,KAEvB,EAEUD,EAAA,UAAA,MAAV,SAAgBW,EAAQ,CACtB,KAAK,YAAY,KAAKA,CAAK,CAC7B,EAEUX,EAAA,UAAA,OAAV,SAAiBc,EAAQ,CACvB,GAAI,CACF,KAAK,YAAY,MAAMA,CAAG,UAE1B,KAAK,YAAW,EAEpB,EAEUd,EAAA,UAAA,UAAV,UAAA,CACE,GAAI,CACF,KAAK,YAAY,SAAQ,UAEzB,KAAK,YAAW,EAEpB,EACFA,CAAA,EApHmCiB,EAAY,EA2H/C,IAAMC,GAAQ,SAAS,UAAU,KAEjC,SAASC,GAAyCC,EAAQC,EAAY,CACpE,OAAOH,GAAM,KAAKE,EAAIC,CAAO,CAC/B,CAMA,IAAAC,GAAA,UAAA,CACE,SAAAA,EAAoBC,EAAqC,CAArC,KAAA,gBAAAA,CAAwC,CAE5D,OAAAD,EAAA,UAAA,KAAA,SAAKE,EAAQ,CACH,IAAAD,EAAoB,KAAI,gBAChC,GAAIA,EAAgB,KAClB,GAAI,CACFA,EAAgB,KAAKC,CAAK,QACnBC,EAAP,CACAC,GAAqBD,CAAK,EAGhC,EAEAH,EAAA,UAAA,MAAA,SAAMK,EAAQ,CACJ,IAAAJ,EAAoB,KAAI,gBAChC,GAAIA,EAAgB,MAClB,GAAI,CACFA,EAAgB,MAAMI,CAAG,QAClBF,EAAP,CACAC,GAAqBD,CAAK,OAG5BC,GAAqBC,CAAG,CAE5B,EAEAL,EAAA,UAAA,SAAA,UAAA,CACU,IAAAC,EAAoB,KAAI,gBAChC,GAAIA,EAAgB,SAClB,GAAI,CACFA,EAAgB,SAAQ,QACjBE,EAAP,CACAC,GAAqBD,CAAK,EAGhC,EACFH,CAAA,EArCA,EAuCAM,GAAA,SAAAC,EAAA,CAAuCC,GAAAF,EAAAC,CAAA,EACrC,SAAAD,EACEG,EACAN,EACAO,EAA8B,CAHhC,IAAAC,EAKEJ,EAAA,KAAA,IAAA,GAAO,KAEHN,EACJ,GAAIW,EAAWH,CAAc,GAAK,CAACA,EAGjCR,EAAkB,CAChB,KAAOQ,GAAc,KAAdA,EAAkB,OACzB,MAAON,GAAK,KAALA,EAAS,OAChB,SAAUO,GAAQ,KAARA,EAAY,YAEnB,CAEL,IAAIG,EACAF,GAAQG,GAAO,0BAIjBD,EAAU,OAAO,OAAOJ,CAAc,EACtCI,EAAQ,YAAc,UAAA,CAAM,OAAAF,EAAK,YAAW,CAAhB,EAC5BV,EAAkB,CAChB,KAAMQ,EAAe,MAAQZ,GAAKY,EAAe,KAAMI,CAAO,EAC9D,MAAOJ,EAAe,OAASZ,GAAKY,EAAe,MAAOI,CAAO,EACjE,SAAUJ,EAAe,UAAYZ,GAAKY,EAAe,SAAUI,CAAO,IAI5EZ,EAAkBQ,EAMtB,OAAAE,EAAK,YAAc,IAAIX,GAAiBC,CAAe,GACzD,CACF,OAAAK,CAAA,EAzCuCS,EAAU,EA2CjD,SAASC,GAAqBC,EAAU,CAClCC,GAAO,sCACTC,GAAaF,CAAK,EAIlBG,GAAqBH,CAAK,CAE9B,CAQA,SAASI,GAAoBC,EAAQ,CACnC,MAAMA,CACR,CAOA,SAASC,GAA0BC,EAA2CC,EAA2B,CAC/F,IAAAC,EAA0BR,GAAM,sBACxCQ,GAAyBC,GAAgB,WAAW,UAAA,CAAM,OAAAD,EAAsBF,EAAcC,CAAU,CAA9C,CAA+C,CAC3G,CAOO,IAAMG,GAA6D,CACxE,OAAQ,GACR,KAAMC,GACN,MAAOR,GACP,SAAUQ,ICjRL,IAAMC,GAA+B,UAAA,CAAM,OAAC,OAAO,QAAW,YAAc,OAAO,YAAe,cAAvD,EAAsE,ECyClH,SAAUC,GAAYC,EAAI,CAC9B,OAAOA,CACT,CCiCM,SAAUC,IAAI,SAACC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACnB,OAAOC,GAAcF,CAAG,CAC1B,CAGM,SAAUE,GAAoBF,EAA+B,CACjE,OAAIA,EAAI,SAAW,EACVG,GAGLH,EAAI,SAAW,EACVA,EAAI,GAGN,SAAeI,EAAQ,CAC5B,OAAOJ,EAAI,OAAO,SAACK,EAAWC,EAAuB,CAAK,OAAAA,EAAGD,CAAI,CAAP,EAAUD,CAAY,CAClF,CACF,CC9EA,IAAAG,EAAA,UAAA,CAkBE,SAAAA,EAAYC,EAA6E,CACnFA,IACF,KAAK,WAAaA,EAEtB,CA4BA,OAAAD,EAAA,UAAA,KAAA,SAAQE,EAAyB,CAC/B,IAAMC,EAAa,IAAIH,EACvB,OAAAG,EAAW,OAAS,KACpBA,EAAW,SAAWD,EACfC,CACT,EA8IAH,EAAA,UAAA,UAAA,SACEI,EACAC,EACAC,EAA8B,CAHhC,IAAAC,EAAA,KAKQC,EAAaC,GAAaL,CAAc,EAAIA,EAAiB,IAAIM,GAAeN,EAAgBC,EAAOC,CAAQ,EAErH,OAAAK,GAAa,UAAA,CACL,IAAAC,EAAuBL,EAArBL,EAAQU,EAAA,SAAEC,EAAMD,EAAA,OACxBJ,EAAW,IACTN,EAGIA,EAAS,KAAKM,EAAYK,CAAM,EAChCA,EAIAN,EAAK,WAAWC,CAAU,EAG1BD,EAAK,cAAcC,CAAU,CAAC,CAEtC,CAAC,EAEMA,CACT,EAGUR,EAAA,UAAA,cAAV,SAAwBc,EAAmB,CACzC,GAAI,CACF,OAAO,KAAK,WAAWA,CAAI,QACpBC,EAAP,CAIAD,EAAK,MAAMC,CAAG,EAElB,EA6DAf,EAAA,UAAA,QAAA,SAAQgB,EAA0BC,EAAoC,CAAtE,IAAAV,EAAA,KACE,OAAAU,EAAcC,GAAeD,CAAW,EAEjC,IAAIA,EAAkB,SAACE,EAASC,EAAM,CAC3C,IAAMZ,EAAa,IAAIE,GAAkB,CACvC,KAAM,SAACW,EAAK,CACV,GAAI,CACFL,EAAKK,CAAK,QACHN,EAAP,CACAK,EAAOL,CAAG,EACVP,EAAW,YAAW,EAE1B,EACA,MAAOY,EACP,SAAUD,EACX,EACDZ,EAAK,UAAUC,CAAU,CAC3B,CAAC,CACH,EAGUR,EAAA,UAAA,WAAV,SAAqBQ,EAA2B,OAC9C,OAAOI,EAAA,KAAK,UAAM,MAAAA,IAAA,OAAA,OAAAA,EAAE,UAAUJ,CAAU,CAC1C,EAOAR,EAAA,UAACG,IAAD,UAAA,CACE,OAAO,IACT,EA4FAH,EAAA,UAAA,KAAA,UAAA,SAAKsB,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACH,OAAOC,GAAcF,CAAU,EAAE,IAAI,CACvC,EA6BAtB,EAAA,UAAA,UAAA,SAAUiB,EAAoC,CAA9C,IAAAV,EAAA,KACE,OAAAU,EAAcC,GAAeD,CAAW,EAEjC,IAAIA,EAAY,SAACE,EAASC,EAAM,CACrC,IAAIC,EACJd,EAAK,UACH,SAACkB,EAAI,CAAK,OAACJ,EAAQI,CAAT,EACV,SAACV,EAAQ,CAAK,OAAAK,EAAOL,CAAG,CAAV,EACd,UAAA,CAAM,OAAAI,EAAQE,CAAK,CAAb,CAAc,CAExB,CAAC,CACH,EA3aOrB,EAAA,OAAkC,SAAIC,EAAwD,CACnG,OAAO,IAAID,EAAcC,CAAS,CACpC,EA0aFD,GA/cA,EAwdA,SAAS0B,GAAeC,EAA+C,OACrE,OAAOC,EAAAD,GAAW,KAAXA,EAAeE,GAAO,WAAO,MAAAD,IAAA,OAAAA,EAAI,OAC1C,CAEA,SAASE,GAAcC,EAAU,CAC/B,OAAOA,GAASC,EAAWD,EAAM,IAAI,GAAKC,EAAWD,EAAM,KAAK,GAAKC,EAAWD,EAAM,QAAQ,CAChG,CAEA,SAASE,GAAgBF,EAAU,CACjC,OAAQA,GAASA,aAAiBG,IAAgBJ,GAAWC,CAAK,GAAKI,GAAeJ,CAAK,CAC7F,CC1eM,SAAUK,GAAQC,EAAW,CACjC,OAAOC,EAAWD,GAAM,KAAA,OAANA,EAAQ,IAAI,CAChC,CAMM,SAAUE,EACdC,EAAqF,CAErF,OAAO,SAACH,EAAqB,CAC3B,GAAID,GAAQC,CAAM,EAChB,OAAOA,EAAO,KAAK,SAA+BI,EAA2B,CAC3E,GAAI,CACF,OAAOD,EAAKC,EAAc,IAAI,QACvBC,EAAP,CACA,KAAK,MAAMA,CAAG,EAElB,CAAC,EAEH,MAAM,IAAI,UAAU,wCAAwC,CAC9D,CACF,CCjBM,SAAUC,EACdC,EACAC,EACAC,EACAC,EACAC,EAAuB,CAEvB,OAAO,IAAIC,GAAmBL,EAAaC,EAAQC,EAAYC,EAASC,CAAU,CACpF,CAMA,IAAAC,GAAA,SAAAC,EAAA,CAA2CC,GAAAF,EAAAC,CAAA,EAiBzC,SAAAD,EACEL,EACAC,EACAC,EACAC,EACQC,EACAI,EAAiC,CAN3C,IAAAC,EAoBEH,EAAA,KAAA,KAAMN,CAAW,GAAC,KAfV,OAAAS,EAAA,WAAAL,EACAK,EAAA,kBAAAD,EAeRC,EAAK,MAAQR,EACT,SAAuCS,EAAQ,CAC7C,GAAI,CACFT,EAAOS,CAAK,QACLC,EAAP,CACAX,EAAY,MAAMW,CAAG,EAEzB,EACAL,EAAA,UAAM,MACVG,EAAK,OAASN,EACV,SAAuCQ,EAAQ,CAC7C,GAAI,CACFR,EAAQQ,CAAG,QACJA,EAAP,CAEAX,EAAY,MAAMW,CAAG,UAGrB,KAAK,YAAW,EAEpB,EACAL,EAAA,UAAM,OACVG,EAAK,UAAYP,EACb,UAAA,CACE,GAAI,CACFA,EAAU,QACHS,EAAP,CAEAX,EAAY,MAAMW,CAAG,UAGrB,KAAK,YAAW,EAEpB,EACAL,EAAA,UAAM,WACZ,CAEA,OAAAD,EAAA,UAAA,YAAA,UAAA,OACE,GAAI,CAAC,KAAK,mBAAqB,KAAK,kBAAiB,EAAI,CAC/C,IAAAO,EAAW,KAAI,OACvBN,EAAA,UAAM,YAAW,KAAA,IAAA,EAEjB,CAACM,KAAUC,EAAA,KAAK,cAAU,MAAAA,IAAA,QAAAA,EAAA,KAAf,IAAI,GAEnB,EACFR,CAAA,EAnF2CS,EAAU,ECd9C,IAAMC,GAAiD,CAG5D,SAAA,SAASC,EAAQ,CACf,IAAIC,EAAU,sBACVC,EAAkD,qBAC9CC,EAAaJ,GAAsB,SACvCI,IACFF,EAAUE,EAAS,sBACnBD,EAASC,EAAS,sBAEpB,IAAMC,EAASH,EAAQ,SAACI,EAAS,CAI/BH,EAAS,OACTF,EAASK,CAAS,CACpB,CAAC,EACD,OAAO,IAAIC,GAAa,UAAA,CAAM,OAAAJ,GAAM,KAAA,OAANA,EAASE,CAAM,CAAf,CAAgB,CAChD,EACA,sBAAqB,UAAA,SAACG,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACZ,IAAAL,EAAaJ,GAAsB,SAC3C,QAAQI,GAAQ,KAAA,OAARA,EAAU,wBAAyB,uBAAsB,MAAA,OAAAM,EAAA,CAAA,EAAAC,EAAIH,CAAI,CAAA,CAAA,CAC3E,EACA,qBAAoB,UAAA,SAACA,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACX,IAAAL,EAAaJ,GAAsB,SAC3C,QAAQI,GAAQ,KAAA,OAARA,EAAU,uBAAwB,sBAAqB,MAAA,OAAAM,EAAA,CAAA,EAAAC,EAAIH,CAAI,CAAA,CAAA,CACzE,EACA,SAAU,QCrBL,IAAMI,GAAuDC,GAClE,SAACC,EAAM,CACL,OAAA,UAAoC,CAClCA,EAAO,IAAI,EACX,KAAK,KAAO,0BACZ,KAAK,QAAU,qBACjB,CAJA,CAIC,ECXL,IAAAC,EAAA,SAAAC,EAAA,CAAgCC,GAAAF,EAAAC,CAAA,EAwB9B,SAAAD,GAAA,CAAA,IAAAG,EAEEF,EAAA,KAAA,IAAA,GAAO,KAzBT,OAAAE,EAAA,OAAS,GAEDA,EAAA,iBAAyC,KAGjDA,EAAA,UAA2B,CAAA,EAE3BA,EAAA,UAAY,GAEZA,EAAA,SAAW,GAEXA,EAAA,YAAmB,MAenB,CAGA,OAAAH,EAAA,UAAA,KAAA,SAAQI,EAAwB,CAC9B,IAAMC,EAAU,IAAIC,GAAiB,KAAM,IAAI,EAC/C,OAAAD,EAAQ,SAAWD,EACZC,CACT,EAGUL,EAAA,UAAA,eAAV,UAAA,CACE,GAAI,KAAK,OACP,MAAM,IAAIO,EAEd,EAEAP,EAAA,UAAA,KAAA,SAAKQ,EAAQ,CAAb,IAAAL,EAAA,KACEM,GAAa,UAAA,SAEX,GADAN,EAAK,eAAc,EACf,CAACA,EAAK,UAAW,CACdA,EAAK,mBACRA,EAAK,iBAAmB,MAAM,KAAKA,EAAK,SAAS,OAEnD,QAAuBO,EAAAC,GAAAR,EAAK,gBAAgB,EAAAS,EAAAF,EAAA,KAAA,EAAA,CAAAE,EAAA,KAAAA,EAAAF,EAAA,KAAA,EAAE,CAAzC,IAAMG,EAAQD,EAAA,MACjBC,EAAS,KAAKL,CAAK,qGAGzB,CAAC,CACH,EAEAR,EAAA,UAAA,MAAA,SAAMc,EAAQ,CAAd,IAAAX,EAAA,KACEM,GAAa,UAAA,CAEX,GADAN,EAAK,eAAc,EACf,CAACA,EAAK,UAAW,CACnBA,EAAK,SAAWA,EAAK,UAAY,GACjCA,EAAK,YAAcW,EAEnB,QADQC,EAAcZ,EAAI,UACnBY,EAAU,QACfA,EAAU,MAAK,EAAI,MAAMD,CAAG,EAGlC,CAAC,CACH,EAEAd,EAAA,UAAA,SAAA,UAAA,CAAA,IAAAG,EAAA,KACEM,GAAa,UAAA,CAEX,GADAN,EAAK,eAAc,EACf,CAACA,EAAK,UAAW,CACnBA,EAAK,UAAY,GAEjB,QADQY,EAAcZ,EAAI,UACnBY,EAAU,QACfA,EAAU,MAAK,EAAI,SAAQ,EAGjC,CAAC,CACH,EAEAf,EAAA,UAAA,YAAA,UAAA,CACE,KAAK,UAAY,KAAK,OAAS,GAC/B,KAAK,UAAY,KAAK,iBAAmB,IAC3C,EAEA,OAAA,eAAIA,EAAA,UAAA,WAAQ,KAAZ,UAAA,OACE,QAAOgB,EAAA,KAAK,aAAS,MAAAA,IAAA,OAAA,OAAAA,EAAE,QAAS,CAClC,kCAGUhB,EAAA,UAAA,cAAV,SAAwBiB,EAAyB,CAC/C,YAAK,eAAc,EACZhB,EAAA,UAAM,cAAa,KAAA,KAACgB,CAAU,CACvC,EAGUjB,EAAA,UAAA,WAAV,SAAqBiB,EAAyB,CAC5C,YAAK,eAAc,EACnB,KAAK,wBAAwBA,CAAU,EAChC,KAAK,gBAAgBA,CAAU,CACxC,EAGUjB,EAAA,UAAA,gBAAV,SAA0BiB,EAA2B,CAArD,IAAAd,EAAA,KACQa,EAAqC,KAAnCE,EAAQF,EAAA,SAAEG,EAASH,EAAA,UAAED,EAASC,EAAA,UACtC,OAAIE,GAAYC,EACPC,IAET,KAAK,iBAAmB,KACxBL,EAAU,KAAKE,CAAU,EAClB,IAAII,GAAa,UAAA,CACtBlB,EAAK,iBAAmB,KACxBmB,GAAUP,EAAWE,CAAU,CACjC,CAAC,EACH,EAGUjB,EAAA,UAAA,wBAAV,SAAkCiB,EAA2B,CACrD,IAAAD,EAAuC,KAArCE,EAAQF,EAAA,SAAEO,EAAWP,EAAA,YAAEG,EAASH,EAAA,UACpCE,EACFD,EAAW,MAAMM,CAAW,EACnBJ,GACTF,EAAW,SAAQ,CAEvB,EAQAjB,EAAA,UAAA,aAAA,UAAA,CACE,IAAMwB,EAAkB,IAAIC,EAC5B,OAAAD,EAAW,OAAS,KACbA,CACT,EAxHOxB,EAAA,OAAkC,SAAI0B,EAA0BC,EAAqB,CAC1F,OAAO,IAAIrB,GAAoBoB,EAAaC,CAAM,CACpD,EAuHF3B,GA7IgCyB,CAAU,EAkJ1C,IAAAG,GAAA,SAAAC,EAAA,CAAyCC,GAAAF,EAAAC,CAAA,EACvC,SAAAD,EAESG,EACPC,EAAsB,CAHxB,IAAAC,EAKEJ,EAAA,KAAA,IAAA,GAAO,KAHA,OAAAI,EAAA,YAAAF,EAIPE,EAAK,OAASD,GAChB,CAEA,OAAAJ,EAAA,UAAA,KAAA,SAAKM,EAAQ,UACXC,GAAAC,EAAA,KAAK,eAAW,MAAAA,IAAA,OAAA,OAAAA,EAAE,QAAI,MAAAD,IAAA,QAAAA,EAAA,KAAAC,EAAGF,CAAK,CAChC,EAEAN,EAAA,UAAA,MAAA,SAAMS,EAAQ,UACZF,GAAAC,EAAA,KAAK,eAAW,MAAAA,IAAA,OAAA,OAAAA,EAAE,SAAK,MAAAD,IAAA,QAAAA,EAAA,KAAAC,EAAGC,CAAG,CAC/B,EAEAT,EAAA,UAAA,SAAA,UAAA,UACEO,GAAAC,EAAA,KAAK,eAAW,MAAAA,IAAA,OAAA,OAAAA,EAAE,YAAQ,MAAAD,IAAA,QAAAA,EAAA,KAAAC,CAAA,CAC5B,EAGUR,EAAA,UAAA,WAAV,SAAqBU,EAAyB,SAC5C,OAAOH,GAAAC,EAAA,KAAK,UAAM,MAAAA,IAAA,OAAA,OAAAA,EAAE,UAAUE,CAAU,KAAC,MAAAH,IAAA,OAAAA,EAAII,EAC/C,EACFX,CAAA,EA1ByCY,CAAO,EC5JzC,IAAMC,GAA+C,CAC1D,IAAG,UAAA,CAGD,OAAQA,GAAsB,UAAY,MAAM,IAAG,CACrD,EACA,SAAU,QCwBZ,IAAAC,GAAA,SAAAC,EAAA,CAAsCC,GAAAF,EAAAC,CAAA,EAUpC,SAAAD,EACUG,EACAC,EACAC,EAA6D,CAF7DF,IAAA,SAAAA,EAAA,KACAC,IAAA,SAAAA,EAAA,KACAC,IAAA,SAAAA,EAAAC,IAHV,IAAAC,EAKEN,EAAA,KAAA,IAAA,GAAO,KAJC,OAAAM,EAAA,YAAAJ,EACAI,EAAA,YAAAH,EACAG,EAAA,mBAAAF,EAZFE,EAAA,QAA0B,CAAA,EAC1BA,EAAA,oBAAsB,GAc5BA,EAAK,oBAAsBH,IAAgB,IAC3CG,EAAK,YAAc,KAAK,IAAI,EAAGJ,CAAW,EAC1CI,EAAK,YAAc,KAAK,IAAI,EAAGH,CAAW,GAC5C,CAEA,OAAAJ,EAAA,UAAA,KAAA,SAAKQ,EAAQ,CACL,IAAAC,EAA+E,KAA7EC,EAASD,EAAA,UAAEE,EAAOF,EAAA,QAAEG,EAAmBH,EAAA,oBAAEJ,EAAkBI,EAAA,mBAAEL,EAAWK,EAAA,YAC3EC,IACHC,EAAQ,KAAKH,CAAK,EAClB,CAACI,GAAuBD,EAAQ,KAAKN,EAAmB,IAAG,EAAKD,CAAW,GAE7E,KAAK,YAAW,EAChBH,EAAA,UAAM,KAAI,KAAA,KAACO,CAAK,CAClB,EAGUR,EAAA,UAAA,WAAV,SAAqBa,EAAyB,CAC5C,KAAK,eAAc,EACnB,KAAK,YAAW,EAQhB,QANMC,EAAe,KAAK,gBAAgBD,CAAU,EAE9CJ,EAAmC,KAAjCG,EAAmBH,EAAA,oBAAEE,EAAOF,EAAA,QAG9BM,EAAOJ,EAAQ,MAAK,EACjBK,EAAI,EAAGA,EAAID,EAAK,QAAU,CAACF,EAAW,OAAQG,GAAKJ,EAAsB,EAAI,EACpFC,EAAW,KAAKE,EAAKC,EAAO,EAG9B,YAAK,wBAAwBH,CAAU,EAEhCC,CACT,EAEQd,EAAA,UAAA,YAAR,UAAA,CACQ,IAAAS,EAAoE,KAAlEN,EAAWM,EAAA,YAAEJ,EAAkBI,EAAA,mBAAEE,EAAOF,EAAA,QAAEG,EAAmBH,EAAA,oBAK/DQ,GAAsBL,EAAsB,EAAI,GAAKT,EAK3D,GAJAA,EAAc,KAAYc,EAAqBN,EAAQ,QAAUA,EAAQ,OAAO,EAAGA,EAAQ,OAASM,CAAkB,EAIlH,CAACL,EAAqB,CAKxB,QAJMM,EAAMb,EAAmB,IAAG,EAC9Bc,EAAO,EAGFH,EAAI,EAAGA,EAAIL,EAAQ,QAAWA,EAAQK,IAAiBE,EAAKF,GAAK,EACxEG,EAAOH,EAETG,GAAQR,EAAQ,OAAO,EAAGQ,EAAO,CAAC,EAEtC,EACFnB,CAAA,EAzEsCoB,CAAO,EClB7C,IAAAC,GAAA,SAAAC,EAAA,CAA+BC,GAAAF,EAAAC,CAAA,EAC7B,SAAAD,EAAYG,EAAsBC,EAAmD,QACnFH,EAAA,KAAA,IAAA,GAAO,IACT,CAWO,OAAAD,EAAA,UAAA,SAAP,SAAgBK,EAAWC,EAAiB,CAAjB,OAAAA,IAAA,SAAAA,EAAA,GAClB,IACT,EACFN,CAAA,EAjB+BO,EAAY,ECHpC,IAAMC,GAAqC,CAGhD,YAAA,SAAYC,EAAqBC,EAAgB,SAAEC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,EAAA,GAAA,UAAAA,GACzC,IAAAC,EAAaL,GAAgB,SACrC,OAAIK,GAAQ,MAARA,EAAU,YACLA,EAAS,YAAW,MAApBA,EAAQC,EAAA,CAAaL,EAASC,CAAO,EAAAK,EAAKJ,CAAI,CAAA,CAAA,EAEhD,YAAW,MAAA,OAAAG,EAAA,CAACL,EAASC,CAAO,EAAAK,EAAKJ,CAAI,CAAA,CAAA,CAC9C,EACA,cAAA,SAAcK,EAAM,CACV,IAAAH,EAAaL,GAAgB,SACrC,QAAQK,GAAQ,KAAA,OAARA,EAAU,gBAAiB,eAAeG,CAAa,CACjE,EACA,SAAU,QCrBZ,IAAAC,GAAA,SAAAC,EAAA,CAAoCC,GAAAF,EAAAC,CAAA,EAOlC,SAAAD,EAAsBG,EAAqCC,EAAmD,CAA9G,IAAAC,EACEJ,EAAA,KAAA,KAAME,EAAWC,CAAI,GAAC,KADF,OAAAC,EAAA,UAAAF,EAAqCE,EAAA,KAAAD,EAFjDC,EAAA,QAAmB,IAI7B,CAEO,OAAAL,EAAA,UAAA,SAAP,SAAgBM,EAAWC,EAAiB,OAC1C,GADyBA,IAAA,SAAAA,EAAA,GACrB,KAAK,OACP,OAAO,KAIT,KAAK,MAAQD,EAEb,IAAME,EAAK,KAAK,GACVL,EAAY,KAAK,UAuBvB,OAAIK,GAAM,OACR,KAAK,GAAK,KAAK,eAAeL,EAAWK,EAAID,CAAK,GAKpD,KAAK,QAAU,GAEf,KAAK,MAAQA,EAEb,KAAK,IAAKE,EAAA,KAAK,MAAE,MAAAA,IAAA,OAAAA,EAAI,KAAK,eAAeN,EAAW,KAAK,GAAII,CAAK,EAE3D,IACT,EAEUP,EAAA,UAAA,eAAV,SAAyBG,EAA2BO,EAAmBH,EAAiB,CAAjB,OAAAA,IAAA,SAAAA,EAAA,GAC9DI,GAAiB,YAAYR,EAAU,MAAM,KAAKA,EAAW,IAAI,EAAGI,CAAK,CAClF,EAEUP,EAAA,UAAA,eAAV,SAAyBY,EAA4BJ,EAAkBD,EAAwB,CAE7F,GAFqEA,IAAA,SAAAA,EAAA,GAEjEA,GAAS,MAAQ,KAAK,QAAUA,GAAS,KAAK,UAAY,GAC5D,OAAOC,EAILA,GAAM,MACRG,GAAiB,cAAcH,CAAE,CAIrC,EAMOR,EAAA,UAAA,QAAP,SAAeM,EAAUC,EAAa,CACpC,GAAI,KAAK,OACP,OAAO,IAAI,MAAM,8BAA8B,EAGjD,KAAK,QAAU,GACf,IAAMM,EAAQ,KAAK,SAASP,EAAOC,CAAK,EACxC,GAAIM,EACF,OAAOA,EACE,KAAK,UAAY,IAAS,KAAK,IAAM,OAc9C,KAAK,GAAK,KAAK,eAAe,KAAK,UAAW,KAAK,GAAI,IAAI,EAE/D,EAEUb,EAAA,UAAA,SAAV,SAAmBM,EAAUQ,EAAc,CACzC,IAAIC,EAAmB,GACnBC,EACJ,GAAI,CACF,KAAK,KAAKV,CAAK,QACRW,EAAP,CACAF,EAAU,GAIVC,EAAaC,GAAQ,IAAI,MAAM,oCAAoC,EAErE,GAAIF,EACF,YAAK,YAAW,EACTC,CAEX,EAEAhB,EAAA,UAAA,YAAA,UAAA,CACE,GAAI,CAAC,KAAK,OAAQ,CACV,IAAAS,EAAoB,KAAlBD,EAAEC,EAAA,GAAEN,EAASM,EAAA,UACbS,EAAYf,EAAS,QAE7B,KAAK,KAAO,KAAK,MAAQ,KAAK,UAAY,KAC1C,KAAK,QAAU,GAEfgB,GAAUD,EAAS,IAAI,EACnBV,GAAM,OACR,KAAK,GAAK,KAAK,eAAeL,EAAWK,EAAI,IAAI,GAGnD,KAAK,MAAQ,KACbP,EAAA,UAAM,YAAW,KAAA,IAAA,EAErB,EACFD,CAAA,EA9IoCoB,EAAM,ECgB1C,IAAAC,GAAA,UAAA,CAGE,SAAAA,EAAoBC,EAAoCC,EAAiC,CAAjCA,IAAA,SAAAA,EAAoBF,EAAU,KAAlE,KAAA,oBAAAC,EAClB,KAAK,IAAMC,CACb,CA6BO,OAAAF,EAAA,UAAA,SAAP,SAAmBG,EAAqDC,EAAmBC,EAAS,CAA5B,OAAAD,IAAA,SAAAA,EAAA,GAC/D,IAAI,KAAK,oBAAuB,KAAMD,CAAI,EAAE,SAASE,EAAOD,CAAK,CAC1E,EAnCcJ,EAAA,IAAoBM,GAAsB,IAoC1DN,GArCA,ECnBA,IAAAO,GAAA,SAAAC,EAAA,CAAoCC,GAAAF,EAAAC,CAAA,EAkBlC,SAAAD,EAAYG,EAAgCC,EAAiC,CAAjCA,IAAA,SAAAA,EAAoBC,GAAU,KAA1E,IAAAC,EACEL,EAAA,KAAA,KAAME,EAAiBC,CAAG,GAAC,KAlBtB,OAAAE,EAAA,QAAmC,CAAA,EAOnCA,EAAA,QAAmB,IAY1B,CAEO,OAAAN,EAAA,UAAA,MAAP,SAAaO,EAAwB,CAC3B,IAAAC,EAAY,KAAI,QAExB,GAAI,KAAK,QAAS,CAChBA,EAAQ,KAAKD,CAAM,EACnB,OAGF,IAAIE,EACJ,KAAK,QAAU,GAEf,EACE,IAAKA,EAAQF,EAAO,QAAQA,EAAO,MAAOA,EAAO,KAAK,EACpD,YAEMA,EAASC,EAAQ,MAAK,GAIhC,GAFA,KAAK,QAAU,GAEXC,EAAO,CACT,KAAQF,EAASC,EAAQ,MAAK,GAC5BD,EAAO,YAAW,EAEpB,MAAME,EAEV,EACFT,CAAA,EAhDoCK,EAAS,EC6CtC,IAAMK,GAAiB,IAAIC,GAAeC,EAAW,EAK/CC,GAAQH,GCjDrB,IAAAI,GAAA,SAAAC,EAAA,CAA6CC,GAAAF,EAAAC,CAAA,EAC3C,SAAAD,EAAsBG,EAA8CC,EAAmD,CAAvH,IAAAC,EACEJ,EAAA,KAAA,KAAME,EAAWC,CAAI,GAAC,KADF,OAAAC,EAAA,UAAAF,EAA8CE,EAAA,KAAAD,GAEpE,CAEU,OAAAJ,EAAA,UAAA,eAAV,SAAyBG,EAAoCG,EAAkBC,EAAiB,CAE9F,OAF6EA,IAAA,SAAAA,EAAA,GAEzEA,IAAU,MAAQA,EAAQ,EACrBN,EAAA,UAAM,eAAc,KAAA,KAACE,EAAWG,EAAIC,CAAK,GAGlDJ,EAAU,QAAQ,KAAK,IAAI,EAIpBA,EAAU,aAAeA,EAAU,WAAaK,GAAuB,sBAAsB,UAAA,CAAM,OAAAL,EAAU,MAAM,MAAS,CAAzB,CAA0B,GACtI,EAEUH,EAAA,UAAA,eAAV,SAAyBG,EAAoCG,EAAkBC,EAAiB,OAI9F,GAJ6EA,IAAA,SAAAA,EAAA,GAIzEA,GAAS,KAAOA,EAAQ,EAAI,KAAK,MAAQ,EAC3C,OAAON,EAAA,UAAM,eAAc,KAAA,KAACE,EAAWG,EAAIC,CAAK,EAK1C,IAAAE,EAAYN,EAAS,QACzBG,GAAM,QAAQI,EAAAD,EAAQA,EAAQ,OAAS,MAAE,MAAAC,IAAA,OAAA,OAAAA,EAAE,MAAOJ,IACpDE,GAAuB,qBAAqBF,CAAY,EACxDH,EAAU,WAAa,OAI3B,EACFH,CAAA,EApC6CW,EAAW,ECHxD,IAAAC,GAAA,SAAAC,EAAA,CAA6CC,GAAAF,EAAAC,CAAA,EAA7C,SAAAD,GAAA,+CAkCA,CAjCS,OAAAA,EAAA,UAAA,MAAP,SAAaG,EAAyB,CACpC,KAAK,QAAU,GAUf,IAAMC,EAAU,KAAK,WACrB,KAAK,WAAa,OAEV,IAAAC,EAAY,KAAI,QACpBC,EACJH,EAASA,GAAUE,EAAQ,MAAK,EAEhC,EACE,IAAKC,EAAQH,EAAO,QAAQA,EAAO,MAAOA,EAAO,KAAK,EACpD,aAEMA,EAASE,EAAQ,KAAOF,EAAO,KAAOC,GAAWC,EAAQ,MAAK,GAIxE,GAFA,KAAK,QAAU,GAEXC,EAAO,CACT,MAAQH,EAASE,EAAQ,KAAOF,EAAO,KAAOC,GAAWC,EAAQ,MAAK,GACpEF,EAAO,YAAW,EAEpB,MAAMG,EAEV,EACFN,CAAA,EAlC6CO,EAAc,ECgCpD,IAAMC,GAA0B,IAAIC,GAAwBC,EAAoB,EC8BhF,IAAMC,EAAQ,IAAIC,EAAkB,SAACC,EAAU,CAAK,OAAAA,EAAW,SAAQ,CAAnB,CAAqB,EC9D1E,SAAUC,GAAYC,EAAU,CACpC,OAAOA,GAASC,EAAWD,EAAM,QAAQ,CAC3C,CCDA,SAASE,GAAQC,EAAQ,CACvB,OAAOA,EAAIA,EAAI,OAAS,EAC1B,CAEM,SAAUC,GAAkBC,EAAW,CAC3C,OAAOC,EAAWJ,GAAKG,CAAI,CAAC,EAAIA,EAAK,IAAG,EAAK,MAC/C,CAEM,SAAUE,GAAaF,EAAW,CACtC,OAAOG,GAAYN,GAAKG,CAAI,CAAC,EAAIA,EAAK,IAAG,EAAK,MAChD,CAEM,SAAUI,GAAUJ,EAAaK,EAAoB,CACzD,OAAO,OAAOR,GAAKG,CAAI,GAAM,SAAWA,EAAK,IAAG,EAAMK,CACxD,CClBO,IAAMC,GAAe,SAAIC,EAAM,CAAwB,OAAAA,GAAK,OAAOA,EAAE,QAAW,UAAY,OAAOA,GAAM,UAAlD,ECMxD,SAAUC,GAAUC,EAAU,CAClC,OAAOC,EAAWD,GAAK,KAAA,OAALA,EAAO,IAAI,CAC/B,CCHM,SAAUE,GAAoBC,EAAU,CAC5C,OAAOC,EAAWD,EAAME,GAAkB,CAC5C,CCLM,SAAUC,GAAmBC,EAAQ,CACzC,OAAO,OAAO,eAAiBC,EAAWD,GAAG,KAAA,OAAHA,EAAM,OAAO,cAAc,CACvE,CCAM,SAAUE,GAAiCC,EAAU,CAEzD,OAAO,IAAI,UACT,iBACEA,IAAU,MAAQ,OAAOA,GAAU,SAAW,oBAAsB,IAAIA,EAAK,KAAG,0HACwC,CAE9H,CCXM,SAAUC,IAAiB,CAC/B,OAAI,OAAO,QAAW,YAAc,CAAC,OAAO,SACnC,aAGF,OAAO,QAChB,CAEO,IAAMC,GAAWD,GAAiB,ECJnC,SAAUE,GAAWC,EAAU,CACnC,OAAOC,EAAWD,GAAK,KAAA,OAALA,EAAQE,GAAgB,CAC5C,CCHM,SAAiBC,GAAsCC,EAAqC,mGAC1FC,EAASD,EAAe,UAAS,2DAGX,MAAA,CAAA,EAAAE,GAAMD,EAAO,KAAI,CAAE,CAAA,gBAArCE,EAAkBC,EAAA,KAAA,EAAhBC,EAAKF,EAAA,MAAEG,EAAIH,EAAA,KACfG,iBAAA,CAAA,EAAA,CAAA,SACF,MAAA,CAAA,EAAAF,EAAA,KAAA,CAAA,qBAEIC,CAAM,CAAA,SAAZ,MAAA,CAAA,EAAAD,EAAA,KAAA,CAAA,SAAA,OAAAA,EAAA,KAAA,mCAGF,OAAAH,EAAO,YAAW,6BAIhB,SAAUM,GAAwBC,EAAQ,CAG9C,OAAOC,EAAWD,GAAG,KAAA,OAAHA,EAAK,SAAS,CAClC,CCPM,SAAUE,EAAaC,EAAyB,CACpD,GAAIA,aAAiBC,EACnB,OAAOD,EAET,GAAIA,GAAS,KAAM,CACjB,GAAIE,GAAoBF,CAAK,EAC3B,OAAOG,GAAsBH,CAAK,EAEpC,GAAII,GAAYJ,CAAK,EACnB,OAAOK,GAAcL,CAAK,EAE5B,GAAIM,GAAUN,CAAK,EACjB,OAAOO,GAAYP,CAAK,EAE1B,GAAIQ,GAAgBR,CAAK,EACvB,OAAOS,GAAkBT,CAAK,EAEhC,GAAIU,GAAWV,CAAK,EAClB,OAAOW,GAAaX,CAAK,EAE3B,GAAIY,GAAqBZ,CAAK,EAC5B,OAAOa,GAAuBb,CAAK,EAIvC,MAAMc,GAAiCd,CAAK,CAC9C,CAMM,SAAUG,GAAyBY,EAAQ,CAC/C,OAAO,IAAId,EAAW,SAACe,EAAyB,CAC9C,IAAMC,EAAMF,EAAIG,IAAkB,EAClC,GAAIC,EAAWF,EAAI,SAAS,EAC1B,OAAOA,EAAI,UAAUD,CAAU,EAGjC,MAAM,IAAI,UAAU,gEAAgE,CACtF,CAAC,CACH,CASM,SAAUX,GAAiBe,EAAmB,CAClD,OAAO,IAAInB,EAAW,SAACe,EAAyB,CAU9C,QAASK,EAAI,EAAGA,EAAID,EAAM,QAAU,CAACJ,EAAW,OAAQK,IACtDL,EAAW,KAAKI,EAAMC,EAAE,EAE1BL,EAAW,SAAQ,CACrB,CAAC,CACH,CAEM,SAAUT,GAAee,EAAuB,CACpD,OAAO,IAAIrB,EAAW,SAACe,EAAyB,CAC9CM,EACG,KACC,SAACC,EAAK,CACCP,EAAW,SACdA,EAAW,KAAKO,CAAK,EACrBP,EAAW,SAAQ,EAEvB,EACA,SAACQ,EAAQ,CAAK,OAAAR,EAAW,MAAMQ,CAAG,CAApB,CAAqB,EAEpC,KAAK,KAAMC,EAAoB,CACpC,CAAC,CACH,CAEM,SAAUd,GAAgBe,EAAqB,CACnD,OAAO,IAAIzB,EAAW,SAACe,EAAyB,aAC9C,QAAoBW,EAAAC,GAAAF,CAAQ,EAAAG,EAAAF,EAAA,KAAA,EAAA,CAAAE,EAAA,KAAAA,EAAAF,EAAA,KAAA,EAAE,CAAzB,IAAMJ,EAAKM,EAAA,MAEd,GADAb,EAAW,KAAKO,CAAK,EACjBP,EAAW,OACb,yGAGJA,EAAW,SAAQ,CACrB,CAAC,CACH,CAEM,SAAUP,GAAqBqB,EAA+B,CAClE,OAAO,IAAI7B,EAAW,SAACe,EAAyB,CAC9Ce,GAAQD,EAAed,CAAU,EAAE,MAAM,SAACQ,EAAG,CAAK,OAAAR,EAAW,MAAMQ,CAAG,CAApB,CAAqB,CACzE,CAAC,CACH,CAEM,SAAUX,GAA0BmB,EAAqC,CAC7E,OAAOvB,GAAkBwB,GAAmCD,CAAc,CAAC,CAC7E,CAEA,SAAeD,GAAWD,EAAiCd,EAAyB,uIACxDkB,EAAAC,GAAAL,CAAa,gFAIrC,GAJeP,EAAKa,EAAA,MACpBpB,EAAW,KAAKO,CAAK,EAGjBP,EAAW,OACb,MAAA,CAAA,CAAA,6RAGJ,OAAAA,EAAW,SAAQ,WChHf,SAAUqB,GACdC,EACAC,EACAC,EACAC,EACAC,EAAc,CADdD,IAAA,SAAAA,EAAA,GACAC,IAAA,SAAAA,EAAA,IAEA,IAAMC,EAAuBJ,EAAU,SAAS,UAAA,CAC9CC,EAAI,EACAE,EACFJ,EAAmB,IAAI,KAAK,SAAS,KAAMG,CAAK,CAAC,EAEjD,KAAK,YAAW,CAEpB,EAAGA,CAAK,EAIR,GAFAH,EAAmB,IAAIK,CAAoB,EAEvC,CAACD,EAKH,OAAOC,CAEX,CCeM,SAAUC,GAAaC,EAA0BC,EAAS,CAAT,OAAAA,IAAA,SAAAA,EAAA,GAC9CC,EAAQ,SAACC,EAAQC,EAAU,CAChCD,EAAO,UACLE,EACED,EACA,SAACE,EAAK,CAAK,OAAAC,GAAgBH,EAAYJ,EAAW,UAAA,CAAM,OAAAI,EAAW,KAAKE,CAAK,CAArB,EAAwBL,CAAK,CAA1E,EACX,UAAA,CAAM,OAAAM,GAAgBH,EAAYJ,EAAW,UAAA,CAAM,OAAAI,EAAW,SAAQ,CAAnB,EAAuBH,CAAK,CAAzE,EACN,SAACO,EAAG,CAAK,OAAAD,GAAgBH,EAAYJ,EAAW,UAAA,CAAM,OAAAI,EAAW,MAAMI,CAAG,CAApB,EAAuBP,CAAK,CAAzE,CAA0E,CACpF,CAEL,CAAC,CACH,CCPM,SAAUQ,GAAeC,EAA0BC,EAAiB,CAAjB,OAAAA,IAAA,SAAAA,EAAA,GAChDC,EAAQ,SAACC,EAAQC,EAAU,CAChCA,EAAW,IAAIJ,EAAU,SAAS,UAAA,CAAM,OAAAG,EAAO,UAAUC,CAAU,CAA3B,EAA8BH,CAAK,CAAC,CAC9E,CAAC,CACH,CC7DM,SAAUI,GAAsBC,EAA6BC,EAAwB,CACzF,OAAOC,EAAUF,CAAK,EAAE,KAAKG,GAAYF,CAAS,EAAGG,GAAUH,CAAS,CAAC,CAC3E,CCFM,SAAUI,GAAmBC,EAAuBC,EAAwB,CAChF,OAAOC,EAAUF,CAAK,EAAE,KAAKG,GAAYF,CAAS,EAAGG,GAAUH,CAAS,CAAC,CAC3E,CCJM,SAAUI,GAAiBC,EAAqBC,EAAwB,CAC5E,OAAO,IAAIC,EAAc,SAACC,EAAU,CAElC,IAAIC,EAAI,EAER,OAAOH,EAAU,SAAS,UAAA,CACpBG,IAAMJ,EAAM,OAGdG,EAAW,SAAQ,GAInBA,EAAW,KAAKH,EAAMI,IAAI,EAIrBD,EAAW,QACd,KAAK,SAAQ,EAGnB,CAAC,CACH,CAAC,CACH,CCfM,SAAUE,GAAoBC,EAAoBC,EAAwB,CAC9E,OAAO,IAAIC,EAAc,SAACC,EAAU,CAClC,IAAIC,EAKJ,OAAAC,GAAgBF,EAAYF,EAAW,UAAA,CAErCG,EAAYJ,EAAcI,IAAgB,EAE1CC,GACEF,EACAF,EACA,UAAA,OACMK,EACAC,EACJ,GAAI,CAEDC,EAAkBJ,EAAS,KAAI,EAA7BE,EAAKE,EAAA,MAAED,EAAIC,EAAA,WACPC,EAAP,CAEAN,EAAW,MAAMM,CAAG,EACpB,OAGEF,EAKFJ,EAAW,SAAQ,EAGnBA,EAAW,KAAKG,CAAK,CAEzB,EACA,EACA,EAAI,CAER,CAAC,EAMM,UAAA,CAAM,OAAAI,EAAWN,GAAQ,KAAA,OAARA,EAAU,MAAM,GAAKA,EAAS,OAAM,CAA/C,CACf,CAAC,CACH,CCvDM,SAAUO,GAAyBC,EAAyBC,EAAwB,CACxF,GAAI,CAACD,EACH,MAAM,IAAI,MAAM,yBAAyB,EAE3C,OAAO,IAAIE,EAAc,SAACC,EAAU,CAClCC,GAAgBD,EAAYF,EAAW,UAAA,CACrC,IAAMI,EAAWL,EAAM,OAAO,eAAc,EAC5CI,GACED,EACAF,EACA,UAAA,CACEI,EAAS,KAAI,EAAG,KAAK,SAACC,EAAM,CACtBA,EAAO,KAGTH,EAAW,SAAQ,EAEnBA,EAAW,KAAKG,EAAO,KAAK,CAEhC,CAAC,CACH,EACA,EACA,EAAI,CAER,CAAC,CACH,CAAC,CACH,CCzBM,SAAUC,GAA8BC,EAA8BC,EAAwB,CAClG,OAAOC,GAAsBC,GAAmCH,CAAK,EAAGC,CAAS,CACnF,CCoBM,SAAUG,GAAaC,EAA2BC,EAAwB,CAC9E,GAAID,GAAS,KAAM,CACjB,GAAIE,GAAoBF,CAAK,EAC3B,OAAOG,GAAmBH,EAAOC,CAAS,EAE5C,GAAIG,GAAYJ,CAAK,EACnB,OAAOK,GAAcL,EAAOC,CAAS,EAEvC,GAAIK,GAAUN,CAAK,EACjB,OAAOO,GAAgBP,EAAOC,CAAS,EAEzC,GAAIO,GAAgBR,CAAK,EACvB,OAAOS,GAAsBT,EAAOC,CAAS,EAE/C,GAAIS,GAAWV,CAAK,EAClB,OAAOW,GAAiBX,EAAOC,CAAS,EAE1C,GAAIW,GAAqBZ,CAAK,EAC5B,OAAOa,GAA2Bb,EAAOC,CAAS,EAGtD,MAAMa,GAAiCd,CAAK,CAC9C,CCoDM,SAAUe,GAAQC,EAA2BC,EAAyB,CAC1E,OAAOA,EAAYC,GAAUF,EAAOC,CAAS,EAAIE,EAAUH,CAAK,CAClE,CCxBM,SAAUI,GAAE,SAAIC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACpB,IAAMC,EAAYC,GAAaH,CAAI,EACnC,OAAOI,GAAKJ,EAAaE,CAAS,CACpC,CCsCM,SAAUG,GAAWC,EAA0BC,EAAyB,CAC5E,IAAMC,EAAeC,EAAWH,CAAmB,EAAIA,EAAsB,UAAA,CAAM,OAAAA,CAAA,EAC7EI,EAAO,SAACC,EAA6B,CAAK,OAAAA,EAAW,MAAMH,EAAY,CAAE,CAA/B,EAChD,OAAO,IAAII,EAAWL,EAAY,SAACI,EAAU,CAAK,OAAAJ,EAAU,SAASG,EAAa,EAAGC,CAAU,CAA7C,EAAiDD,CAAI,CACzG,CCrHM,SAAUG,GAAYC,EAAU,CACpC,OAAOA,aAAiB,MAAQ,CAAC,MAAMA,CAAY,CACrD,CCsCM,SAAUC,EAAUC,EAAyCC,EAAa,CAC9E,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAEhC,IAAIC,EAAQ,EAGZF,EAAO,UACLG,EAAyBF,EAAY,SAACG,EAAQ,CAG5CH,EAAW,KAAKJ,EAAQ,KAAKC,EAASM,EAAOF,GAAO,CAAC,CACvD,CAAC,CAAC,CAEN,CAAC,CACH,CC1DQ,IAAAG,GAAY,MAAK,QAEzB,SAASC,GAAkBC,EAA6BC,EAAW,CAC/D,OAAOH,GAAQG,CAAI,EAAID,EAAE,MAAA,OAAAE,EAAA,CAAA,EAAAC,EAAIF,CAAI,CAAA,CAAA,EAAID,EAAGC,CAAI,CAChD,CAMM,SAAUG,GAAuBJ,EAA2B,CAC9D,OAAOK,EAAI,SAAAJ,EAAI,CAAI,OAAAF,GAAYC,EAAIC,CAAI,CAApB,CAAqB,CAC5C,CCfQ,IAAAK,GAAY,MAAK,QACjBC,GAA0D,OAAM,eAArCC,GAA+B,OAAM,UAAlBC,GAAY,OAAM,KAQlE,SAAUC,GAAqDC,EAAuB,CAC1F,GAAIA,EAAK,SAAW,EAAG,CACrB,IAAMC,EAAQD,EAAK,GACnB,GAAIL,GAAQM,CAAK,EACf,MAAO,CAAE,KAAMA,EAAO,KAAM,IAAI,EAElC,GAAIC,GAAOD,CAAK,EAAG,CACjB,IAAME,EAAOL,GAAQG,CAAK,EAC1B,MAAO,CACL,KAAME,EAAK,IAAI,SAACC,EAAG,CAAK,OAAAH,EAAMG,EAAN,CAAU,EAClC,KAAID,IAKV,MAAO,CAAE,KAAMH,EAAa,KAAM,IAAI,CACxC,CAEA,SAASE,GAAOG,EAAQ,CACtB,OAAOA,GAAO,OAAOA,GAAQ,UAAYT,GAAeS,CAAG,IAAMR,EACnE,CC7BM,SAAUS,GAAaC,EAAgBC,EAAa,CACxD,OAAOD,EAAK,OAAO,SAACE,EAAQC,EAAKC,EAAC,CAAK,OAAEF,EAAOC,GAAOF,EAAOG,GAAKF,CAA5B,EAAqC,CAAA,CAAS,CACvF,CCsMM,SAAUG,GAAa,SAAoCC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GAC/D,IAAMC,EAAYC,GAAaH,CAAI,EAC7BI,EAAiBC,GAAkBL,CAAI,EAEvCM,EAA8BC,GAAqBP,CAAI,EAA/CQ,EAAWF,EAAA,KAAEG,EAAIH,EAAA,KAE/B,GAAIE,EAAY,SAAW,EAIzB,OAAOE,GAAK,CAAA,EAAIR,CAAgB,EAGlC,IAAMS,EAAS,IAAIC,EACjBC,GACEL,EACAN,EACAO,EAEI,SAACK,EAAM,CAAK,OAAAC,GAAaN,EAAMK,CAAM,CAAzB,EAEZE,EAAQ,CACb,EAGH,OAAOZ,EAAkBO,EAAO,KAAKM,GAAiBb,CAAc,CAAC,EAAsBO,CAC7F,CAEM,SAAUE,GACdL,EACAN,EACAgB,EAAiD,CAAjD,OAAAA,IAAA,SAAAA,EAAAF,IAEO,SAACG,EAA2B,CAGjCC,GACElB,EACA,UAAA,CAaE,QAZQmB,EAAWb,EAAW,OAExBM,EAAS,IAAI,MAAMO,CAAM,EAG3BC,EAASD,EAITE,EAAuBF,aAGlBG,EAAC,CACRJ,GACElB,EACA,UAAA,CACE,IAAMuB,EAASf,GAAKF,EAAYgB,GAAItB,CAAgB,EAChDwB,EAAgB,GACpBD,EAAO,UACLE,EACER,EACA,SAACS,EAAK,CAEJd,EAAOU,GAAKI,EACPF,IAEHA,EAAgB,GAChBH,KAEGA,GAGHJ,EAAW,KAAKD,EAAeJ,EAAO,MAAK,CAAE,CAAC,CAElD,EACA,UAAA,CACO,EAAEQ,GAGLH,EAAW,SAAQ,CAEvB,CAAC,CACF,CAEL,EACAA,CAAU,GAjCLK,EAAI,EAAGA,EAAIH,EAAQG,MAAnBA,CAAC,CAoCZ,EACAL,CAAU,CAEd,CACF,CAMA,SAASC,GAAclB,EAAsC2B,EAAqBC,EAA0B,CACtG5B,EACF6B,GAAgBD,EAAc5B,EAAW2B,CAAO,EAEhDA,EAAO,CAEX,CC3RM,SAAUG,GACdC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EAAgC,CAGhC,IAAMC,EAAc,CAAA,EAEhBC,EAAS,EAETC,EAAQ,EAERC,EAAa,GAKXC,EAAgB,UAAA,CAIhBD,GAAc,CAACH,EAAO,QAAU,CAACC,GACnCR,EAAW,SAAQ,CAEvB,EAGMY,EAAY,SAACC,EAAQ,CAAK,OAACL,EAASN,EAAaY,EAAWD,CAAK,EAAIN,EAAO,KAAKM,CAAK,CAA5D,EAE1BC,EAAa,SAACD,EAAQ,CAI1BT,GAAUJ,EAAW,KAAKa,CAAY,EAItCL,IAKA,IAAIO,EAAgB,GAGpBC,EAAUf,EAAQY,EAAOJ,GAAO,CAAC,EAAE,UACjCQ,EACEjB,EACA,SAACkB,EAAU,CAGTf,GAAY,MAAZA,EAAee,CAAU,EAErBd,EAGFQ,EAAUM,CAAiB,EAG3BlB,EAAW,KAAKkB,CAAU,CAE9B,EACA,UAAA,CAGEH,EAAgB,EAClB,EAEA,OACA,UAAA,CAIE,GAAIA,EAKF,GAAI,CAIFP,IAKA,qBACE,IAAMW,EAAgBZ,EAAO,MAAK,EAI9BF,EACFe,GAAgBpB,EAAYK,EAAmB,UAAA,CAAM,OAAAS,EAAWK,CAAa,CAAxB,CAAyB,EAE9EL,EAAWK,CAAa,GARrBZ,EAAO,QAAUC,EAASN,OAYjCS,EAAa,QACNU,EAAP,CACArB,EAAW,MAAMqB,CAAG,EAG1B,CAAC,CACF,CAEL,EAGA,OAAAtB,EAAO,UACLkB,EAAyBjB,EAAYY,EAAW,UAAA,CAE9CF,EAAa,GACbC,EAAa,CACf,CAAC,CAAC,EAKG,UAAA,CACLL,GAAmB,MAAnBA,EAAmB,CACrB,CACF,CClEM,SAAUgB,GACdC,EACAC,EACAC,EAA6B,CAE7B,OAFAA,IAAA,SAAAA,EAAA,KAEIC,EAAWF,CAAc,EAEpBF,GAAS,SAACK,EAAGC,EAAC,CAAK,OAAAC,EAAI,SAACC,EAAQC,EAAU,CAAK,OAAAP,EAAeG,EAAGG,EAAGF,EAAGG,CAAE,CAA1B,CAA2B,EAAEC,EAAUT,EAAQI,EAAGC,CAAC,CAAC,CAAC,CAAjF,EAAoFH,CAAU,GAC/G,OAAOD,GAAmB,WACnCC,EAAaD,GAGRS,EAAQ,SAACC,EAAQC,EAAU,CAAK,OAAAC,GAAeF,EAAQC,EAAYZ,EAASE,CAAU,CAAtD,CAAuD,EAChG,CChCM,SAAUY,GAAyCC,EAA6B,CAA7B,OAAAA,IAAA,SAAAA,EAAA,KAChDC,GAASC,GAAUF,CAAU,CACtC,CCNM,SAAUG,IAAS,CACvB,OAAOC,GAAS,CAAC,CACnB,CCmDM,SAAUC,IAAM,SAACC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACrB,OAAOC,GAAS,EAAGC,GAAKH,EAAMI,GAAaJ,CAAI,CAAC,CAAC,CACnD,CC9DM,SAAUK,EAAsCC,EAA0B,CAC9E,OAAO,IAAIC,EAA+B,SAACC,EAAU,CACnDC,EAAUH,EAAiB,CAAE,EAAE,UAAUE,CAAU,CACrD,CAAC,CACH,CChDA,IAAME,GAA0B,CAAC,cAAe,gBAAgB,EAC1DC,GAAqB,CAAC,mBAAoB,qBAAqB,EAC/DC,GAAgB,CAAC,KAAM,KAAK,EA8N5B,SAAUC,EACdC,EACAC,EACAC,EACAC,EAAsC,CAMtC,GAJIC,EAAWF,CAAO,IACpBC,EAAiBD,EACjBA,EAAU,QAERC,EACF,OAAOJ,EAAaC,EAAQC,EAAWC,CAA+B,EAAE,KAAKG,GAAiBF,CAAc,CAAC,EAUzG,IAAAG,EAAAC,EAEJC,GAAcR,CAAM,EAChBH,GAAmB,IAAI,SAACY,EAAU,CAAK,OAAA,SAACC,EAAY,CAAK,OAAAV,EAAOS,GAAYR,EAAWS,EAASR,CAA+B,CAAtE,CAAlB,CAAyF,EAElIS,GAAwBX,CAAM,EAC5BJ,GAAwB,IAAIgB,GAAwBZ,EAAQC,CAAS,CAAC,EACtEY,GAA0Bb,CAAM,EAChCF,GAAc,IAAIc,GAAwBZ,EAAQC,CAAS,CAAC,EAC5D,CAAA,EAAE,CAAA,EATDa,EAAGR,EAAA,GAAES,EAAMT,EAAA,GAgBlB,GAAI,CAACQ,GACCE,GAAYhB,CAAM,EACpB,OAAOiB,GAAS,SAACC,EAAc,CAAK,OAAAnB,EAAUmB,EAAWjB,EAAWC,CAA+B,CAA/D,CAAgE,EAClGiB,EAAUnB,CAAM,CAAC,EAOvB,GAAI,CAACc,EACH,MAAM,IAAI,UAAU,sBAAsB,EAG5C,OAAO,IAAIM,EAAc,SAACC,EAAU,CAIlC,IAAMX,EAAU,UAAA,SAACY,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GAAmB,OAAAF,EAAW,KAAK,EAAIC,EAAK,OAASA,EAAOA,EAAK,EAAE,CAAhD,EAEpC,OAAAR,EAAIJ,CAAO,EAEJ,UAAA,CAAM,OAAAK,EAAQL,CAAO,CAAf,CACf,CAAC,CACH,CASA,SAASE,GAAwBZ,EAAaC,EAAiB,CAC7D,OAAO,SAACQ,EAAkB,CAAK,OAAA,SAACC,EAAY,CAAK,OAAAV,EAAOS,GAAYR,EAAWS,CAAO,CAArC,CAAlB,CACjC,CAOA,SAASC,GAAwBX,EAAW,CAC1C,OAAOI,EAAWJ,EAAO,WAAW,GAAKI,EAAWJ,EAAO,cAAc,CAC3E,CAOA,SAASa,GAA0Bb,EAAW,CAC5C,OAAOI,EAAWJ,EAAO,EAAE,GAAKI,EAAWJ,EAAO,GAAG,CACvD,CAOA,SAASQ,GAAcR,EAAW,CAChC,OAAOI,EAAWJ,EAAO,gBAAgB,GAAKI,EAAWJ,EAAO,mBAAmB,CACrF,CC/LM,SAAUwB,GACdC,EACAC,EACAC,EAAsC,CAEtC,OAAIA,EACKH,GAAoBC,EAAYC,CAAa,EAAE,KAAKE,GAAiBD,CAAc,CAAC,EAGtF,IAAIE,EAAoB,SAACC,EAAU,CACxC,IAAMC,EAAU,UAAA,SAACC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GAAc,OAAAH,EAAW,KAAKE,EAAE,SAAW,EAAIA,EAAE,GAAKA,CAAC,CAAzC,EACzBE,EAAWT,EAAWM,CAAO,EACnC,OAAOI,EAAWT,CAAa,EAAI,UAAA,CAAM,OAAAA,EAAcK,EAASG,CAAQ,CAA/B,EAAmC,MAC9E,CAAC,CACH,CCtBM,SAAUE,GACdC,EACAC,EACAC,EAAyC,CAFzCF,IAAA,SAAAA,EAAA,GAEAE,IAAA,SAAAA,EAAAC,IAIA,IAAIC,EAAmB,GAEvB,OAAIH,GAAuB,OAIrBI,GAAYJ,CAAmB,EACjCC,EAAYD,EAIZG,EAAmBH,GAIhB,IAAIK,EAAW,SAACC,EAAU,CAI/B,IAAIC,EAAMC,GAAYT,CAAO,EAAI,CAACA,EAAUE,EAAW,IAAG,EAAKF,EAE3DQ,EAAM,IAERA,EAAM,GAIR,IAAIE,EAAI,EAGR,OAAOR,EAAU,SAAS,UAAA,CACnBK,EAAW,SAEdA,EAAW,KAAKG,GAAG,EAEf,GAAKN,EAGP,KAAK,SAAS,OAAWA,CAAgB,EAGzCG,EAAW,SAAQ,EAGzB,EAAGC,CAAG,CACR,CAAC,CACH,CChGM,SAAUG,GAAK,SAACC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACpB,IAAMC,EAAYC,GAAaH,CAAI,EAC7BI,EAAaC,GAAUL,EAAM,GAAQ,EACrCM,EAAUN,EAChB,OAAQM,EAAQ,OAGZA,EAAQ,SAAW,EAEnBC,EAAUD,EAAQ,EAAE,EAEpBE,GAASJ,CAAU,EAAEK,GAAKH,EAASJ,CAAS,CAAC,EAL7CQ,CAMN,CCjEO,IAAMC,GAAQ,IAAIC,EAAkBC,EAAI,ECpCvC,IAAAC,GAAY,MAAK,QAMnB,SAAUC,GAAkBC,EAAiB,CACjD,OAAOA,EAAK,SAAW,GAAKF,GAAQE,EAAK,EAAE,EAAIA,EAAK,GAAMA,CAC5D,CCoDM,SAAUC,EAAUC,EAAiDC,EAAa,CACtF,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAEhC,IAAIC,EAAQ,EAIZF,EAAO,UAILG,EAAyBF,EAAY,SAACG,EAAK,CAAK,OAAAP,EAAU,KAAKC,EAASM,EAAOF,GAAO,GAAKD,EAAW,KAAKG,CAAK,CAAhE,CAAiE,CAAC,CAEtH,CAAC,CACH,CCxBM,SAAUC,IAAG,SAACC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GAClB,IAAMC,EAAiBC,GAAkBH,CAAI,EAEvCI,EAAUC,GAAeL,CAAI,EAEnC,OAAOI,EAAQ,OACX,IAAIE,EAAsB,SAACC,EAAU,CAGnC,IAAIC,EAAuBJ,EAAQ,IAAI,UAAA,CAAM,MAAA,CAAA,CAAA,CAAE,EAK3CK,EAAYL,EAAQ,IAAI,UAAA,CAAM,MAAA,EAAA,CAAK,EAGvCG,EAAW,IAAI,UAAA,CACbC,EAAUC,EAAY,IACxB,CAAC,EAKD,mBAASC,EAAW,CAClBC,EAAUP,EAAQM,EAAY,EAAE,UAC9BE,EACEL,EACA,SAACM,EAAK,CAKJ,GAJAL,EAAQE,GAAa,KAAKG,CAAK,EAI3BL,EAAQ,MAAM,SAACM,EAAM,CAAK,OAAAA,EAAO,MAAP,CAAa,EAAG,CAC5C,IAAMC,EAAcP,EAAQ,IAAI,SAACM,EAAM,CAAK,OAAAA,EAAO,MAAK,CAAZ,CAAe,EAE3DP,EAAW,KAAKL,EAAiBA,EAAc,MAAA,OAAAc,EAAA,CAAA,EAAAC,EAAIF,CAAM,CAAA,CAAA,EAAIA,CAAM,EAI/DP,EAAQ,KAAK,SAACM,EAAQI,EAAC,CAAK,MAAA,CAACJ,EAAO,QAAUL,EAAUS,EAA5B,CAA8B,GAC5DX,EAAW,SAAQ,EAGzB,EACA,UAAA,CAGEE,EAAUC,GAAe,GAIzB,CAACF,EAAQE,GAAa,QAAUH,EAAW,SAAQ,CACrD,CAAC,CACF,GA9BIG,EAAc,EAAG,CAACH,EAAW,QAAUG,EAAcN,EAAQ,OAAQM,MAArEA,CAAW,EAmCpB,OAAO,UAAA,CACLF,EAAUC,EAAY,IACxB,CACF,CAAC,EACDU,CACN,CC9DM,SAAUC,GAASC,EAAoD,CAC3E,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChC,IAAIC,EAAW,GACXC,EAAsB,KACtBC,EAA6C,KAC7CC,EAAa,GAEXC,EAAc,UAAA,CAGlB,GAFAF,GAAkB,MAAlBA,EAAoB,YAAW,EAC/BA,EAAqB,KACjBF,EAAU,CACZA,EAAW,GACX,IAAMK,EAAQJ,EACdA,EAAY,KACZF,EAAW,KAAKM,CAAK,EAEvBF,GAAcJ,EAAW,SAAQ,CACnC,EAEMO,EAAkB,UAAA,CACtBJ,EAAqB,KACrBC,GAAcJ,EAAW,SAAQ,CACnC,EAEAD,EAAO,UACLS,EACER,EACA,SAACM,EAAK,CACJL,EAAW,GACXC,EAAYI,EACPH,GACHM,EAAUZ,EAAiBS,CAAK,CAAC,EAAE,UAChCH,EAAqBK,EAAyBR,EAAYK,EAAaE,CAAe,CAAE,CAG/F,EACA,UAAA,CACEH,EAAa,IACZ,CAACH,GAAY,CAACE,GAAsBA,EAAmB,SAAWH,EAAW,SAAQ,CACxF,CAAC,CACF,CAEL,CAAC,CACH,CC3CM,SAAUU,GAAaC,EAAkBC,EAAyC,CAAzC,OAAAA,IAAA,SAAAA,EAAAC,IACtCC,GAAM,UAAA,CAAM,OAAAC,GAAMJ,EAAUC,CAAS,CAAzB,CAA0B,CAC/C,CCEM,SAAUI,GAAeC,EAAoBC,EAAsC,CAAtC,OAAAA,IAAA,SAAAA,EAAA,MAGjDA,EAAmBA,GAAgB,KAAhBA,EAAoBD,EAEhCE,EAAQ,SAACC,EAAQC,EAAU,CAChC,IAAIC,EAAiB,CAAA,EACjBC,EAAQ,EAEZH,EAAO,UACLI,EACEH,EACA,SAACI,EAAK,aACAC,EAAuB,KAKvBH,IAAUL,IAAsB,GAClCI,EAAQ,KAAK,CAAA,CAAE,MAIjB,QAAqBK,EAAAC,GAAAN,CAAO,EAAAO,EAAAF,EAAA,KAAA,EAAA,CAAAE,EAAA,KAAAA,EAAAF,EAAA,KAAA,EAAE,CAAzB,IAAMG,EAAMD,EAAA,MACfC,EAAO,KAAKL,CAAK,EAMbR,GAAca,EAAO,SACvBJ,EAASA,GAAM,KAANA,EAAU,CAAA,EACnBA,EAAO,KAAKI,CAAM,qGAItB,GAAIJ,MAIF,QAAqBK,EAAAH,GAAAF,CAAM,EAAAM,EAAAD,EAAA,KAAA,EAAA,CAAAC,EAAA,KAAAA,EAAAD,EAAA,KAAA,EAAE,CAAxB,IAAMD,EAAME,EAAA,MACfC,GAAUX,EAASQ,CAAM,EACzBT,EAAW,KAAKS,CAAM,oGAG5B,EACA,UAAA,aAGE,QAAqBI,EAAAN,GAAAN,CAAO,EAAAa,EAAAD,EAAA,KAAA,EAAA,CAAAC,EAAA,KAAAA,EAAAD,EAAA,KAAA,EAAE,CAAzB,IAAMJ,EAAMK,EAAA,MACfd,EAAW,KAAKS,CAAM,oGAExBT,EAAW,SAAQ,CACrB,EAEA,OACA,UAAA,CAEEC,EAAU,IACZ,CAAC,CACF,CAEL,CAAC,CACH,CCbM,SAAUc,GACdC,EAAgD,CAEhD,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChC,IAAIC,EAAgC,KAChCC,EAAY,GACZC,EAEJF,EAAWF,EAAO,UAChBK,EAAyBJ,EAAY,OAAW,OAAW,SAACK,EAAG,CAC7DF,EAAgBG,EAAUT,EAASQ,EAAKT,GAAWC,CAAQ,EAAEE,CAAM,CAAC,CAAC,EACjEE,GACFA,EAAS,YAAW,EACpBA,EAAW,KACXE,EAAc,UAAUH,CAAU,GAIlCE,EAAY,EAEhB,CAAC,CAAC,EAGAA,IAMFD,EAAS,YAAW,EACpBA,EAAW,KACXE,EAAe,UAAUH,CAAU,EAEvC,CAAC,CACH,CC/HM,SAAUO,GACdC,EACAC,EACAC,EACAC,EACAC,EAAqC,CAErC,OAAO,SAACC,EAAuBC,EAA2B,CAIxD,IAAIC,EAAWL,EAIXM,EAAaP,EAEbQ,EAAQ,EAGZJ,EAAO,UACLK,EACEJ,EACA,SAACK,EAAK,CAEJ,IAAMC,EAAIH,IAEVD,EAAQD,EAEJP,EAAYQ,EAAOG,EAAOC,CAAC,GAIzBL,EAAW,GAAOI,GAGxBR,GAAcG,EAAW,KAAKE,CAAK,CACrC,EAGAJ,GACG,UAAA,CACCG,GAAYD,EAAW,KAAKE,CAAK,EACjCF,EAAW,SAAQ,CACrB,CAAE,CACL,CAEL,CACF,CCnCM,SAAUO,IAAa,SAAOC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GAClC,IAAMC,EAAiBC,GAAkBH,CAAI,EAC7C,OAAOE,EACHE,GAAKL,GAAa,MAAA,OAAAM,EAAA,CAAA,EAAAC,EAAKN,CAAoC,CAAA,CAAA,EAAGO,GAAiBL,CAAc,CAAC,EAC9FM,EAAQ,SAACC,EAAQC,EAAU,CACzBC,GAAiBN,EAAA,CAAEI,CAAM,EAAAH,EAAKM,GAAeZ,CAAI,CAAC,CAAA,CAAA,EAAGU,CAAU,CACjE,CAAC,CACP,CCUM,SAAUG,IAAiB,SAC/BC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GAEA,OAAOC,GAAa,MAAA,OAAAC,EAAA,CAAA,EAAAC,EAAIJ,CAAY,CAAA,CAAA,CACtC,CC+BM,SAAUK,GACdC,EACAC,EAA6G,CAE7G,OAAOC,EAAWD,CAAc,EAAIE,GAASH,EAASC,EAAgB,CAAC,EAAIE,GAASH,EAAS,CAAC,CAChG,CCpBM,SAAUI,GAAgBC,EAAiBC,EAAyC,CAAzC,OAAAA,IAAA,SAAAA,EAAAC,IACxCC,EAAQ,SAACC,EAAQC,EAAU,CAChC,IAAIC,EAAkC,KAClCC,EAAsB,KACtBC,EAA0B,KAExBC,EAAO,UAAA,CACX,GAAIH,EAAY,CAEdA,EAAW,YAAW,EACtBA,EAAa,KACb,IAAMI,EAAQH,EACdA,EAAY,KACZF,EAAW,KAAKK,CAAK,EAEzB,EACA,SAASC,GAAY,CAInB,IAAMC,EAAaJ,EAAYR,EACzBa,EAAMZ,EAAU,IAAG,EACzB,GAAIY,EAAMD,EAAY,CAEpBN,EAAa,KAAK,SAAS,OAAWM,EAAaC,CAAG,EACtDR,EAAW,IAAIC,CAAU,EACzB,OAGFG,EAAI,CACN,CAEAL,EAAO,UACLU,EACET,EACA,SAACK,EAAQ,CACPH,EAAYG,EACZF,EAAWP,EAAU,IAAG,EAGnBK,IACHA,EAAaL,EAAU,SAASU,EAAcX,CAAO,EACrDK,EAAW,IAAIC,CAAU,EAE7B,EACA,UAAA,CAGEG,EAAI,EACJJ,EAAW,SAAQ,CACrB,EAEA,OACA,UAAA,CAEEE,EAAYD,EAAa,IAC3B,CAAC,CACF,CAEL,CAAC,CACH,CCpFM,SAAUS,GAAqBC,EAAe,CAClD,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChC,IAAIC,EAAW,GACfF,EAAO,UACLG,EACEF,EACA,SAACG,EAAK,CACJF,EAAW,GACXD,EAAW,KAAKG,CAAK,CACvB,EACA,UAAA,CACOF,GACHD,EAAW,KAAKH,CAAa,EAE/BG,EAAW,SAAQ,CACrB,CAAC,CACF,CAEL,CAAC,CACH,CCXM,SAAUI,GAAQC,EAAa,CACnC,OAAOA,GAAS,EAEZ,UAAA,CAAM,OAAAC,CAAA,EACNC,EAAQ,SAACC,EAAQC,EAAU,CACzB,IAAIC,EAAO,EACXF,EAAO,UACLG,EAAyBF,EAAY,SAACG,EAAK,CAIrC,EAAEF,GAAQL,IACZI,EAAW,KAAKG,CAAK,EAIjBP,GAASK,GACXD,EAAW,SAAQ,EAGzB,CAAC,CAAC,CAEN,CAAC,CACP,CC9BM,SAAUI,IAAc,CAC5B,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChCD,EAAO,UAAUE,EAAyBD,EAAYE,EAAI,CAAC,CAC7D,CAAC,CACH,CCCM,SAAUC,GAASC,EAAQ,CAC/B,OAAOC,EAAI,UAAA,CAAM,OAAAD,CAAA,CAAK,CACxB,CCyCM,SAAUE,GACdC,EACAC,EAAmC,CAEnC,OAAIA,EAEK,SAACC,EAAqB,CAC3B,OAAAC,GAAOF,EAAkB,KAAKG,GAAK,CAAC,EAAGC,GAAc,CAAE,EAAGH,EAAO,KAAKH,GAAUC,CAAqB,CAAC,CAAC,CAAvG,EAGGM,GAAS,SAACC,EAAOC,EAAK,CAAK,OAAAR,EAAsBO,EAAOC,CAAK,EAAE,KAAKJ,GAAK,CAAC,EAAGK,GAAMF,CAAK,CAAC,CAA9D,CAA+D,CACnG,CCtCM,SAAUG,GAASC,EAAoBC,EAAyC,CAAzCA,IAAA,SAAAA,EAAAC,IAC3C,IAAMC,EAAWC,GAAMJ,EAAKC,CAAS,EACrC,OAAOI,GAAU,UAAA,CAAM,OAAAF,CAAA,CAAQ,CACjC,CC0EM,SAAUG,EACdC,EACAC,EAA0D,CAA1D,OAAAA,IAAA,SAAAA,EAA+BC,IAK/BF,EAAaA,GAAU,KAAVA,EAAcG,GAEpBC,EAAQ,SAACC,EAAQC,EAAU,CAGhC,IAAIC,EAEAC,EAAQ,GAEZH,EAAO,UACLI,EAAyBH,EAAY,SAACI,EAAK,CAEzC,IAAMC,EAAaV,EAAYS,CAAK,GAKhCF,GAAS,CAACR,EAAYO,EAAaI,CAAU,KAM/CH,EAAQ,GACRD,EAAcI,EAGdL,EAAW,KAAKI,CAAK,EAEzB,CAAC,CAAC,CAEN,CAAC,CACH,CAEA,SAASP,GAAeS,EAAQC,EAAM,CACpC,OAAOD,IAAMC,CACf,CCjHM,SAAUC,EAA8CC,EAAQC,EAAuC,CAC3G,OAAOC,EAAqB,SAACC,EAAMC,EAAI,CAAK,OAAAH,EAAUA,EAAQE,EAAEH,GAAMI,EAAEJ,EAAI,EAAIG,EAAEH,KAASI,EAAEJ,EAAjD,CAAqD,CACnG,CCLM,SAAUK,IAAO,SAAIC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACzB,OAAO,SAACC,EAAqB,CAAK,OAAAC,GAAOD,EAAQE,EAAE,MAAA,OAAAC,EAAA,CAAA,EAAAC,EAAIN,CAAM,CAAA,CAAA,CAAA,CAA3B,CACpC,CCHM,SAAUO,EAAYC,EAAoB,CAC9C,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAGhC,GAAI,CACFD,EAAO,UAAUC,CAAU,UAE3BA,EAAW,IAAIH,CAAQ,EAE3B,CAAC,CACH,CC9BM,SAAUI,GAAYC,EAAa,CACvC,OAAOA,GAAS,EACZ,UAAA,CAAM,OAAAC,CAAA,EACNC,EAAQ,SAACC,EAAQC,EAAU,CAKzB,IAAIC,EAAc,CAAA,EAClBF,EAAO,UACLG,EACEF,EACA,SAACG,EAAK,CAEJF,EAAO,KAAKE,CAAK,EAGjBP,EAAQK,EAAO,QAAUA,EAAO,MAAK,CACvC,EACA,UAAA,aAGE,QAAoBG,EAAAC,GAAAJ,CAAM,EAAAK,EAAAF,EAAA,KAAA,EAAA,CAAAE,EAAA,KAAAA,EAAAF,EAAA,KAAA,EAAE,CAAvB,IAAMD,EAAKG,EAAA,MACdN,EAAW,KAAKG,CAAK,oGAEvBH,EAAW,SAAQ,CACrB,EAEA,OACA,UAAA,CAEEC,EAAS,IACX,CAAC,CACF,CAEL,CAAC,CACP,CC1DM,SAAUM,IAAK,SAAIC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACvB,IAAMC,EAAYC,GAAaH,CAAI,EAC7BI,EAAaC,GAAUL,EAAM,GAAQ,EAC3C,OAAAA,EAAOM,GAAeN,CAAI,EAEnBO,EAAQ,SAACC,EAAQC,EAAU,CAChCC,GAASN,CAAU,EAAEO,GAAIC,EAAA,CAAEJ,CAAM,EAAAK,EAAMb,CAA6B,CAAA,EAAGE,CAAS,CAAC,EAAE,UAAUO,CAAU,CACzG,CAAC,CACH,CCcM,SAAUK,IAAS,SACvBC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GAEA,OAAOC,GAAK,MAAA,OAAAC,EAAA,CAAA,EAAAC,EAAIJ,CAAY,CAAA,CAAA,CAC9B,CCmEM,SAAUK,GAAUC,EAAqC,OACzDC,EAAQ,IACRC,EAEJ,OAAIF,GAAiB,OACf,OAAOA,GAAkB,UACxBG,EAA4BH,EAAa,MAAzCC,EAAKE,IAAA,OAAG,IAAQA,EAAED,EAAUF,EAAa,OAE5CC,EAAQD,GAILC,GAAS,EACZ,UAAA,CAAM,OAAAG,CAAA,EACNC,EAAQ,SAACC,EAAQC,EAAU,CACzB,IAAIC,EAAQ,EACRC,EAEEC,EAAc,UAAA,CAGlB,GAFAD,GAAS,MAATA,EAAW,YAAW,EACtBA,EAAY,KACRP,GAAS,KAAM,CACjB,IAAMS,EAAW,OAAOT,GAAU,SAAWU,GAAMV,CAAK,EAAIW,EAAUX,EAAMM,CAAK,CAAC,EAC5EM,EAAqBC,EAAyBR,EAAY,UAAA,CAC9DO,EAAmB,YAAW,EAC9BE,EAAiB,CACnB,CAAC,EACDL,EAAS,UAAUG,CAAkB,OAErCE,EAAiB,CAErB,EAEMA,EAAoB,UAAA,CACxB,IAAIC,EAAY,GAChBR,EAAYH,EAAO,UACjBS,EAAyBR,EAAY,OAAW,UAAA,CAC1C,EAAEC,EAAQP,EACRQ,EACFC,EAAW,EAEXO,EAAY,GAGdV,EAAW,SAAQ,CAEvB,CAAC,CAAC,EAGAU,GACFP,EAAW,CAEf,EAEAM,EAAiB,CACnB,CAAC,CACP,CC7HM,SAAUE,GAAUC,EAAyB,CACjD,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChC,IAAIC,EAAW,GACXC,EAAsB,KAC1BH,EAAO,UACLI,EAAyBH,EAAY,SAACI,EAAK,CACzCH,EAAW,GACXC,EAAYE,CACd,CAAC,CAAC,EAEJP,EAAS,UACPM,EACEH,EACA,UAAA,CACE,GAAIC,EAAU,CACZA,EAAW,GACX,IAAMG,EAAQF,EACdA,EAAY,KACZF,EAAW,KAAKI,CAAK,EAEzB,EACAC,EAAI,CACL,CAEL,CAAC,CACH,CCgBM,SAAUC,GAAcC,EAA6DC,EAAQ,CAMjG,OAAOC,EAAQC,GAAcH,EAAaC,EAAW,UAAU,QAAU,EAAG,EAAI,CAAC,CACnF,CCgDM,SAAUG,GAASC,EAA4B,CAA5BA,IAAA,SAAAA,EAAA,CAAA,GACf,IAAAC,EAAgHD,EAAO,UAAvHE,EAASD,IAAA,OAAG,UAAA,CAAM,OAAA,IAAIE,CAAJ,EAAgBF,EAAEG,EAA4EJ,EAAO,aAAnFK,EAAYD,IAAA,OAAG,GAAIA,EAAEE,EAAuDN,EAAO,gBAA9DO,EAAeD,IAAA,OAAG,GAAIA,EAAEE,EAA+BR,EAAO,oBAAtCS,EAAmBD,IAAA,OAAG,GAAIA,EAUnH,OAAO,SAACE,EAAa,CACnB,IAAIC,EACAC,EACAC,EACAC,EAAW,EACXC,EAAe,GACfC,EAAa,GAEXC,EAAc,UAAA,CAClBL,GAAe,MAAfA,EAAiB,YAAW,EAC5BA,EAAkB,MACpB,EAGMM,EAAQ,UAAA,CACZD,EAAW,EACXN,EAAaE,EAAU,OACvBE,EAAeC,EAAa,EAC9B,EACMG,EAAsB,UAAA,CAG1B,IAAMC,EAAOT,EACbO,EAAK,EACLE,GAAI,MAAJA,EAAM,YAAW,CACnB,EAEA,OAAOC,EAAc,SAACC,EAAQC,GAAU,CACtCT,IACI,CAACE,GAAc,CAACD,GAClBE,EAAW,EAOb,IAAMO,GAAQX,EAAUA,GAAO,KAAPA,EAAWX,EAAS,EAO5CqB,GAAW,IAAI,UAAA,CACbT,IAKIA,IAAa,GAAK,CAACE,GAAc,CAACD,IACpCH,EAAkBa,GAAYN,EAAqBV,CAAmB,EAE1E,CAAC,EAIDe,GAAK,UAAUD,EAAU,EAGvB,CAACZ,GAIDG,EAAW,IAOXH,EAAa,IAAIe,GAAe,CAC9B,KAAM,SAACC,GAAK,CAAK,OAAAH,GAAK,KAAKG,EAAK,CAAf,EACjB,MAAO,SAACC,GAAG,CACTZ,EAAa,GACbC,EAAW,EACXL,EAAkBa,GAAYP,EAAOb,EAAcuB,EAAG,EACtDJ,GAAK,MAAMI,EAAG,CAChB,EACA,SAAU,UAAA,CACRb,EAAe,GACfE,EAAW,EACXL,EAAkBa,GAAYP,EAAOX,CAAe,EACpDiB,GAAK,SAAQ,CACf,EACD,EACDK,EAAUP,CAAM,EAAE,UAAUX,CAAU,EAE1C,CAAC,EAAED,CAAa,CAClB,CACF,CAEA,SAASe,GACPP,EACAY,EAA+C,SAC/CC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,EAAA,GAAA,UAAAA,GAEA,GAAIF,IAAO,GAAM,CACfZ,EAAK,EACL,OAGF,GAAIY,IAAO,GAIX,KAAMG,EAAe,IAAIP,GAAe,CACtC,KAAM,UAAA,CACJO,EAAa,YAAW,EACxBf,EAAK,CACP,EACD,EAED,OAAOY,EAAE,MAAA,OAAAI,EAAA,CAAA,EAAAC,EAAIJ,CAAI,CAAA,CAAA,EAAE,UAAUE,CAAY,EAC3C,CCjHM,SAAUG,EACdC,EACAC,EACAC,EAAyB,WAErBC,EACAC,EAAW,GACf,OAAIJ,GAAsB,OAAOA,GAAuB,UACnDK,EAA8EL,EAAkB,WAAhGG,EAAUE,IAAA,OAAG,IAAQA,EAAEC,EAAuDN,EAAkB,WAAzEC,EAAUK,IAAA,OAAG,IAAQA,EAAEC,EAAgCP,EAAkB,SAAlDI,EAAQG,IAAA,OAAG,GAAKA,EAAEL,EAAcF,EAAkB,WAEnGG,EAAcH,GAAkB,KAAlBA,EAAsB,IAE/BQ,GAAS,CACd,UAAW,UAAA,CAAM,OAAA,IAAIC,GAAcN,EAAYF,EAAYC,CAAS,CAAnD,EACjB,aAAc,GACd,gBAAiB,GACjB,oBAAqBE,EACtB,CACH,CCxIM,SAAUM,GAAQC,EAAa,CACnC,OAAOC,EAAO,SAACC,EAAGC,EAAK,CAAK,OAAAH,GAASG,CAAT,CAAc,CAC5C,CCWM,SAAUC,GAAaC,EAAyB,CACpD,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChC,IAAIC,EAAS,GAEPC,EAAiBC,EACrBH,EACA,UAAA,CACEE,GAAc,MAAdA,EAAgB,YAAW,EAC3BD,EAAS,EACX,EACAG,EAAI,EAGNC,EAAUR,CAAQ,EAAE,UAAUK,CAAc,EAE5CH,EAAO,UAAUI,EAAyBH,EAAY,SAACM,EAAK,CAAK,OAAAL,GAAUD,EAAW,KAAKM,CAAK,CAA/B,CAAgC,CAAC,CACpG,CAAC,CACH,CCRM,SAAUC,GAAS,SAAOC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GAC9B,IAAMC,EAAYC,GAAaH,CAAM,EACrC,OAAOI,EAAQ,SAACC,EAAQC,EAAU,EAI/BJ,EAAYK,GAAOP,EAAQK,EAAQH,CAAS,EAAIK,GAAOP,EAAQK,CAAM,GAAG,UAAUC,CAAU,CAC/F,CAAC,CACH,CCmBM,SAAUE,EACdC,EACAC,EAA6G,CAE7G,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChC,IAAIC,EAAyD,KACzDC,EAAQ,EAERC,EAAa,GAIXC,EAAgB,UAAA,CAAM,OAAAD,GAAc,CAACF,GAAmBD,EAAW,SAAQ,CAArD,EAE5BD,EAAO,UACLM,EACEL,EACA,SAACM,EAAK,CAEJL,GAAe,MAAfA,EAAiB,YAAW,EAC5B,IAAIM,EAAa,EACXC,EAAaN,IAEnBO,EAAUb,EAAQU,EAAOE,CAAU,CAAC,EAAE,UACnCP,EAAkBI,EACjBL,EAIA,SAACU,EAAU,CAAK,OAAAV,EAAW,KAAKH,EAAiBA,EAAeS,EAAOI,EAAYF,EAAYD,GAAY,EAAIG,CAAU,CAAzG,EAChB,UAAA,CAIET,EAAkB,KAClBG,EAAa,CACf,CAAC,CACD,CAEN,EACA,UAAA,CACED,EAAa,GACbC,EAAa,CACf,CAAC,CACF,CAEL,CAAC,CACH,CCvFM,SAAUO,GAAaC,EAA8B,CACzD,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChCC,EAAUJ,CAAQ,EAAE,UAAUK,EAAyBF,EAAY,UAAA,CAAM,OAAAA,EAAW,SAAQ,CAAnB,EAAuBG,EAAI,CAAC,EACrG,CAACH,EAAW,QAAUD,EAAO,UAAUC,CAAU,CACnD,CAAC,CACH,CCIM,SAAUI,GAAaC,EAAiDC,EAAiB,CAAjB,OAAAA,IAAA,SAAAA,EAAA,IACrEC,EAAQ,SAACC,EAAQC,EAAU,CAChC,IAAIC,EAAQ,EACZF,EAAO,UACLG,EAAyBF,EAAY,SAACG,EAAK,CACzC,IAAMC,EAASR,EAAUO,EAAOF,GAAO,GACtCG,GAAUP,IAAcG,EAAW,KAAKG,CAAK,EAC9C,CAACC,GAAUJ,EAAW,SAAQ,CAChC,CAAC,CAAC,CAEN,CAAC,CACH,CCyCM,SAAUK,EACdC,EACAC,EACAC,EAA8B,CAK9B,IAAMC,EACJC,EAAWJ,CAAc,GAAKC,GAASC,EAElC,CAAE,KAAMF,EAA2E,MAAKC,EAAE,SAAQC,CAAA,EACnGF,EAEN,OAAOG,EACHE,EAAQ,SAACC,EAAQC,EAAU,QACzBC,EAAAL,EAAY,aAAS,MAAAK,IAAA,QAAAA,EAAA,KAArBL,CAAW,EACX,IAAIM,EAAU,GACdH,EAAO,UACLI,EACEH,EACA,SAACI,EAAK,QACJH,EAAAL,EAAY,QAAI,MAAAK,IAAA,QAAAA,EAAA,KAAhBL,EAAmBQ,CAAK,EACxBJ,EAAW,KAAKI,CAAK,CACvB,EACA,UAAA,OACEF,EAAU,IACVD,EAAAL,EAAY,YAAQ,MAAAK,IAAA,QAAAA,EAAA,KAApBL,CAAW,EACXI,EAAW,SAAQ,CACrB,EACA,SAACK,EAAG,OACFH,EAAU,IACVD,EAAAL,EAAY,SAAK,MAAAK,IAAA,QAAAA,EAAA,KAAjBL,EAAoBS,CAAG,EACvBL,EAAW,MAAMK,CAAG,CACtB,EACA,UAAA,SACMH,KACFD,EAAAL,EAAY,eAAW,MAAAK,IAAA,QAAAA,EAAA,KAAvBL,CAAW,IAEbU,EAAAV,EAAY,YAAQ,MAAAU,IAAA,QAAAA,EAAA,KAApBV,CAAW,CACb,CAAC,CACF,CAEL,CAAC,EAIDW,EACN,CC9IO,IAAMC,GAAwC,CACnD,QAAS,GACT,SAAU,IAiDN,SAAUC,GACdC,EACAC,EAA8C,CAA9C,OAAAA,IAAA,SAAAA,EAAAH,IAEOI,EAAQ,SAACC,EAAQC,EAAU,CACxB,IAAAC,EAAsBJ,EAAM,QAAnBK,EAAaL,EAAM,SAChCM,EAAW,GACXC,EAAsB,KACtBC,EAAiC,KACjCC,EAAa,GAEXC,EAAgB,UAAA,CACpBF,GAAS,MAATA,EAAW,YAAW,EACtBA,EAAY,KACRH,IACFM,EAAI,EACJF,GAAcN,EAAW,SAAQ,EAErC,EAEMS,EAAoB,UAAA,CACxBJ,EAAY,KACZC,GAAcN,EAAW,SAAQ,CACnC,EAEMU,EAAgB,SAACC,EAAQ,CAC7B,OAACN,EAAYO,EAAUhB,EAAiBe,CAAK,CAAC,EAAE,UAAUE,EAAyBb,EAAYO,EAAeE,CAAiB,CAAC,CAAhI,EAEID,EAAO,UAAA,CACX,GAAIL,EAAU,CAIZA,EAAW,GACX,IAAMQ,EAAQP,EACdA,EAAY,KAEZJ,EAAW,KAAKW,CAAK,EACrB,CAACL,GAAcI,EAAcC,CAAK,EAEtC,EAEAZ,EAAO,UACLc,EACEb,EAMA,SAACW,EAAK,CACJR,EAAW,GACXC,EAAYO,EACZ,EAAEN,GAAa,CAACA,EAAU,UAAYJ,EAAUO,EAAI,EAAKE,EAAcC,CAAK,EAC9E,EACA,UAAA,CACEL,EAAa,GACb,EAAEJ,GAAYC,GAAYE,GAAa,CAACA,EAAU,SAAWL,EAAW,SAAQ,CAClF,CAAC,CACF,CAEL,CAAC,CACH,CCvEM,SAAUc,GACdC,EACAC,EACAC,EAA8B,CAD9BD,IAAA,SAAAA,EAAAE,IACAD,IAAA,SAAAA,EAAAE,IAEA,IAAMC,EAAYC,GAAMN,EAAUC,CAAS,EAC3C,OAAOM,GAAS,UAAA,CAAM,OAAAF,CAAA,EAAWH,CAAM,CACzC,CCJM,SAAUM,IAAc,SAAOC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACnC,IAAMC,EAAUC,GAAkBH,CAAM,EAExC,OAAOI,EAAQ,SAACC,EAAQC,EAAU,CAehC,QAdMC,EAAMP,EAAO,OACbQ,EAAc,IAAI,MAAMD,CAAG,EAI7BE,EAAWT,EAAO,IAAI,UAAA,CAAM,MAAA,EAAA,CAAK,EAGjCU,EAAQ,cAMHC,EAAC,CACRC,EAAUZ,EAAOW,EAAE,EAAE,UACnBE,EACEP,EACA,SAACQ,EAAK,CACJN,EAAYG,GAAKG,EACb,CAACJ,GAAS,CAACD,EAASE,KAEtBF,EAASE,GAAK,IAKbD,EAAQD,EAAS,MAAMM,EAAQ,KAAON,EAAW,MAEtD,EAGAO,EAAI,CACL,GAnBIL,EAAI,EAAGA,EAAIJ,EAAKI,MAAhBA,CAAC,EAwBVN,EAAO,UACLQ,EAAyBP,EAAY,SAACQ,EAAK,CACzC,GAAIJ,EAAO,CAET,IAAMO,EAAMC,EAAA,CAAIJ,CAAK,EAAAK,EAAKX,CAAW,CAAA,EACrCF,EAAW,KAAKJ,EAAUA,EAAO,MAAA,OAAAgB,EAAA,CAAA,EAAAC,EAAIF,CAAM,CAAA,CAAA,EAAIA,CAAM,EAEzD,CAAC,CAAC,CAEN,CAAC,CACH,CCxFM,SAAUG,IAAG,SAAOC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACxB,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChCL,GAAS,MAAA,OAAAM,EAAA,CAACF,CAA8B,EAAAG,EAAMN,CAAuC,CAAA,CAAA,EAAE,UAAUI,CAAU,CAC7G,CAAC,CACH,CCCM,SAAUG,IAAO,SAAkCC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACvD,OAAOC,GAAG,MAAA,OAAAC,EAAA,CAAA,EAAAC,EAAIJ,CAAW,CAAA,CAAA,CAC3B,CCYO,SAASK,IAAmC,CACjD,IAAMC,EAAY,IAAIC,GAAwB,CAAC,EAC/C,OAAAC,EAAU,SAAU,mBAAoB,CAAE,KAAM,EAAK,CAAC,EACnD,UAAU,IAAMF,EAAU,KAAK,QAAQ,CAAC,EAGpCA,CACT,CCHO,SAASG,EACdC,EAAkBC,EAAmB,SAChC,CACL,OAAO,MAAM,KAAKA,EAAK,iBAAoBD,CAAQ,CAAC,CACtD,CAuBO,SAASE,EACdF,EAAkBC,EAAmB,SAClC,CACH,IAAME,EAAKC,GAAsBJ,EAAUC,CAAI,EAC/C,GAAI,OAAOE,GAAO,YAChB,MAAM,IAAI,eACR,8BAA8BH,kBAChC,EAGF,OAAOG,CACT,CAsBO,SAASC,GACdJ,EAAkBC,EAAmB,SACtB,CACf,OAAOA,EAAK,cAAiBD,CAAQ,GAAK,MAC5C,CAOO,SAASK,IAA4C,CAC1D,OAAO,SAAS,yBAAyB,aACrC,SAAS,eAAiB,MAEhC,CClEO,SAASC,GACdC,EACqB,CACrB,OAAOC,EACLC,EAAU,SAAS,KAAM,SAAS,EAClCA,EAAU,SAAS,KAAM,UAAU,CACrC,EACG,KACCC,GAAa,CAAC,EACdC,EAAI,IAAM,CACR,IAAMC,EAASC,GAAiB,EAChC,OAAO,OAAOD,GAAW,YACrBL,EAAG,SAASK,CAAM,EAClB,EACN,CAAC,EACDE,EAAUP,IAAOM,GAAiB,CAAC,EACnCE,EAAqB,CACvB,CACJ,CChBO,SAASC,GACdC,EACe,CACf,MAAO,CACL,EAAGA,EAAG,WACN,EAAGA,EAAG,SACR,CACF,CAWO,SAASC,GACdD,EAC2B,CAC3B,OAAOE,EACLC,EAAU,OAAQ,MAAM,EACxBA,EAAU,OAAQ,QAAQ,CAC5B,EACG,KACCC,GAAU,EAAGC,EAAuB,EACpCC,EAAI,IAAMP,GAAiBC,CAAE,CAAC,EAC9BO,EAAUR,GAAiBC,CAAE,CAAC,CAChC,CACJ,CCxCO,SAASQ,GACdC,EACe,CACf,MAAO,CACL,EAAGA,EAAG,WACN,EAAGA,EAAG,SACR,CACF,CAWO,SAASC,GACdD,EAC2B,CAC3B,OAAOE,EACLC,EAAUH,EAAI,QAAQ,EACtBG,EAAU,OAAQ,QAAQ,CAC5B,EACG,KACCC,GAAU,EAAGC,EAAuB,EACpCC,EAAI,IAAMP,GAAwBC,CAAE,CAAC,EACrCO,EAAUR,GAAwBC,CAAE,CAAC,CACvC,CACJ,CCpEA,IAAIQ,GAAW,UAAY,CACvB,GAAI,OAAO,KAAQ,YACf,OAAO,IASX,SAASC,EAASC,EAAKC,EAAK,CACxB,IAAIC,EAAS,GACb,OAAAF,EAAI,KAAK,SAAUG,EAAOC,EAAO,CAC7B,OAAID,EAAM,KAAOF,GACbC,EAASE,EACF,IAEJ,EACX,CAAC,EACMF,CACX,CACA,OAAsB,UAAY,CAC9B,SAASG,GAAU,CACf,KAAK,YAAc,CAAC,CACxB,CACA,cAAO,eAAeA,EAAQ,UAAW,OAAQ,CAI7C,IAAK,UAAY,CACb,OAAO,KAAK,YAAY,MAC5B,EACA,WAAY,GACZ,aAAc,EAClB,CAAC,EAKDA,EAAQ,UAAU,IAAM,SAAUJ,EAAK,CACnC,IAAIG,EAAQL,EAAS,KAAK,YAAaE,CAAG,EACtCE,EAAQ,KAAK,YAAYC,GAC7B,OAAOD,GAASA,EAAM,EAC1B,EAMAE,EAAQ,UAAU,IAAM,SAAUJ,EAAKK,EAAO,CAC1C,IAAIF,EAAQL,EAAS,KAAK,YAAaE,CAAG,EACtC,CAACG,EACD,KAAK,YAAYA,GAAO,GAAKE,EAG7B,KAAK,YAAY,KAAK,CAACL,EAAKK,CAAK,CAAC,CAE1C,EAKAD,EAAQ,UAAU,OAAS,SAAUJ,EAAK,CACtC,IAAIM,EAAU,KAAK,YACfH,EAAQL,EAASQ,EAASN,CAAG,EAC7B,CAACG,GACDG,EAAQ,OAAOH,EAAO,CAAC,CAE/B,EAKAC,EAAQ,UAAU,IAAM,SAAUJ,EAAK,CACnC,MAAO,CAAC,CAAC,CAACF,EAAS,KAAK,YAAaE,CAAG,CAC5C,EAIAI,EAAQ,UAAU,MAAQ,UAAY,CAClC,KAAK,YAAY,OAAO,CAAC,CAC7B,EAMAA,EAAQ,UAAU,QAAU,SAAUG,EAAUC,EAAK,CAC7CA,IAAQ,SAAUA,EAAM,MAC5B,QAASC,EAAK,EAAGC,EAAK,KAAK,YAAaD,EAAKC,EAAG,OAAQD,IAAM,CAC1D,IAAIP,EAAQQ,EAAGD,GACfF,EAAS,KAAKC,EAAKN,EAAM,GAAIA,EAAM,EAAE,CACzC,CACJ,EACOE,CACX,EAAE,CACN,EAAG,EAKCO,GAAY,OAAO,QAAW,aAAe,OAAO,UAAa,aAAe,OAAO,WAAa,SAGpGC,GAAY,UAAY,CACxB,OAAI,OAAO,QAAW,aAAe,OAAO,OAAS,KAC1C,OAEP,OAAO,MAAS,aAAe,KAAK,OAAS,KACtC,KAEP,OAAO,QAAW,aAAe,OAAO,OAAS,KAC1C,OAGJ,SAAS,aAAa,EAAE,CACnC,EAAG,EAQCC,GAA2B,UAAY,CACvC,OAAI,OAAO,uBAA0B,WAI1B,sBAAsB,KAAKD,EAAQ,EAEvC,SAAUL,EAAU,CAAE,OAAO,WAAW,UAAY,CAAE,OAAOA,EAAS,KAAK,IAAI,CAAC,CAAG,EAAG,IAAO,EAAE,CAAG,CAC7G,EAAG,EAGCO,GAAkB,EAStB,SAASC,GAAUR,EAAUS,EAAO,CAChC,IAAIC,EAAc,GAAOC,EAAe,GAAOC,EAAe,EAO9D,SAASC,GAAiB,CAClBH,IACAA,EAAc,GACdV,EAAS,GAETW,GACAG,EAAM,CAEd,CAQA,SAASC,GAAkB,CACvBT,GAAwBO,CAAc,CAC1C,CAMA,SAASC,GAAQ,CACb,IAAIE,EAAY,KAAK,IAAI,EACzB,GAAIN,EAAa,CAEb,GAAIM,EAAYJ,EAAeL,GAC3B,OAMJI,EAAe,EACnB,MAEID,EAAc,GACdC,EAAe,GACf,WAAWI,EAAiBN,CAAK,EAErCG,EAAeI,CACnB,CACA,OAAOF,CACX,CAGA,IAAIG,GAAgB,GAGhBC,GAAiB,CAAC,MAAO,QAAS,SAAU,OAAQ,QAAS,SAAU,OAAQ,QAAQ,EAEvFC,GAA4B,OAAO,kBAAqB,YAIxDC,GAA0C,UAAY,CAMtD,SAASA,GAA2B,CAMhC,KAAK,WAAa,GAMlB,KAAK,qBAAuB,GAM5B,KAAK,mBAAqB,KAM1B,KAAK,WAAa,CAAC,EACnB,KAAK,iBAAmB,KAAK,iBAAiB,KAAK,IAAI,EACvD,KAAK,QAAUZ,GAAS,KAAK,QAAQ,KAAK,IAAI,EAAGS,EAAa,CAClE,CAOA,OAAAG,EAAyB,UAAU,YAAc,SAAUC,EAAU,CAC5D,CAAC,KAAK,WAAW,QAAQA,CAAQ,GAClC,KAAK,WAAW,KAAKA,CAAQ,EAG5B,KAAK,YACN,KAAK,SAAS,CAEtB,EAOAD,EAAyB,UAAU,eAAiB,SAAUC,EAAU,CACpE,IAAIC,EAAY,KAAK,WACjB1B,EAAQ0B,EAAU,QAAQD,CAAQ,EAElC,CAACzB,GACD0B,EAAU,OAAO1B,EAAO,CAAC,EAGzB,CAAC0B,EAAU,QAAU,KAAK,YAC1B,KAAK,YAAY,CAEzB,EAOAF,EAAyB,UAAU,QAAU,UAAY,CACrD,IAAIG,EAAkB,KAAK,iBAAiB,EAGxCA,GACA,KAAK,QAAQ,CAErB,EASAH,EAAyB,UAAU,iBAAmB,UAAY,CAE9D,IAAII,EAAkB,KAAK,WAAW,OAAO,SAAUH,EAAU,CAC7D,OAAOA,EAAS,aAAa,EAAGA,EAAS,UAAU,CACvD,CAAC,EAMD,OAAAG,EAAgB,QAAQ,SAAUH,EAAU,CAAE,OAAOA,EAAS,gBAAgB,CAAG,CAAC,EAC3EG,EAAgB,OAAS,CACpC,EAOAJ,EAAyB,UAAU,SAAW,UAAY,CAGlD,CAAChB,IAAa,KAAK,aAMvB,SAAS,iBAAiB,gBAAiB,KAAK,gBAAgB,EAChE,OAAO,iBAAiB,SAAU,KAAK,OAAO,EAC1Ce,IACA,KAAK,mBAAqB,IAAI,iBAAiB,KAAK,OAAO,EAC3D,KAAK,mBAAmB,QAAQ,SAAU,CACtC,WAAY,GACZ,UAAW,GACX,cAAe,GACf,QAAS,EACb,CAAC,IAGD,SAAS,iBAAiB,qBAAsB,KAAK,OAAO,EAC5D,KAAK,qBAAuB,IAEhC,KAAK,WAAa,GACtB,EAOAC,EAAyB,UAAU,YAAc,UAAY,CAGrD,CAAChB,IAAa,CAAC,KAAK,aAGxB,SAAS,oBAAoB,gBAAiB,KAAK,gBAAgB,EACnE,OAAO,oBAAoB,SAAU,KAAK,OAAO,EAC7C,KAAK,oBACL,KAAK,mBAAmB,WAAW,EAEnC,KAAK,sBACL,SAAS,oBAAoB,qBAAsB,KAAK,OAAO,EAEnE,KAAK,mBAAqB,KAC1B,KAAK,qBAAuB,GAC5B,KAAK,WAAa,GACtB,EAQAgB,EAAyB,UAAU,iBAAmB,SAAUjB,EAAI,CAChE,IAAIsB,EAAKtB,EAAG,aAAcuB,EAAeD,IAAO,OAAS,GAAKA,EAE1DE,EAAmBT,GAAe,KAAK,SAAUzB,EAAK,CACtD,MAAO,CAAC,CAAC,CAACiC,EAAa,QAAQjC,CAAG,CACtC,CAAC,EACGkC,GACA,KAAK,QAAQ,CAErB,EAMAP,EAAyB,YAAc,UAAY,CAC/C,OAAK,KAAK,YACN,KAAK,UAAY,IAAIA,GAElB,KAAK,SAChB,EAMAA,EAAyB,UAAY,KAC9BA,CACX,EAAE,EASEQ,GAAsB,SAAUC,EAAQC,EAAO,CAC/C,QAAS5B,EAAK,EAAGC,EAAK,OAAO,KAAK2B,CAAK,EAAG5B,EAAKC,EAAG,OAAQD,IAAM,CAC5D,IAAIT,EAAMU,EAAGD,GACb,OAAO,eAAe2B,EAAQpC,EAAK,CAC/B,MAAOqC,EAAMrC,GACb,WAAY,GACZ,SAAU,GACV,aAAc,EAClB,CAAC,CACL,CACA,OAAOoC,CACX,EAQIE,GAAe,SAAUF,EAAQ,CAIjC,IAAIG,EAAcH,GAAUA,EAAO,eAAiBA,EAAO,cAAc,YAGzE,OAAOG,GAAe3B,EAC1B,EAGI4B,GAAYC,GAAe,EAAG,EAAG,EAAG,CAAC,EAOzC,SAASC,GAAQrC,EAAO,CACpB,OAAO,WAAWA,CAAK,GAAK,CAChC,CAQA,SAASsC,GAAeC,EAAQ,CAE5B,QADIC,EAAY,CAAC,EACRpC,EAAK,EAAGA,EAAK,UAAU,OAAQA,IACpCoC,EAAUpC,EAAK,GAAK,UAAUA,GAElC,OAAOoC,EAAU,OAAO,SAAUC,EAAMC,EAAU,CAC9C,IAAI1C,EAAQuC,EAAO,UAAYG,EAAW,UAC1C,OAAOD,EAAOJ,GAAQrC,CAAK,CAC/B,EAAG,CAAC,CACR,CAOA,SAAS2C,GAAYJ,EAAQ,CAGzB,QAFIC,EAAY,CAAC,MAAO,QAAS,SAAU,MAAM,EAC7CI,EAAW,CAAC,EACPxC,EAAK,EAAGyC,EAAcL,EAAWpC,EAAKyC,EAAY,OAAQzC,IAAM,CACrE,IAAIsC,EAAWG,EAAYzC,GACvBJ,EAAQuC,EAAO,WAAaG,GAChCE,EAASF,GAAYL,GAAQrC,CAAK,CACtC,CACA,OAAO4C,CACX,CAQA,SAASE,GAAkBf,EAAQ,CAC/B,IAAIgB,EAAOhB,EAAO,QAAQ,EAC1B,OAAOK,GAAe,EAAG,EAAGW,EAAK,MAAOA,EAAK,MAAM,CACvD,CAOA,SAASC,GAA0BjB,EAAQ,CAGvC,IAAIkB,EAAclB,EAAO,YAAamB,EAAenB,EAAO,aAS5D,GAAI,CAACkB,GAAe,CAACC,EACjB,OAAOf,GAEX,IAAII,EAASN,GAAYF,CAAM,EAAE,iBAAiBA,CAAM,EACpDa,EAAWD,GAAYJ,CAAM,EAC7BY,EAAWP,EAAS,KAAOA,EAAS,MACpCQ,EAAUR,EAAS,IAAMA,EAAS,OAKlCS,EAAQhB,GAAQE,EAAO,KAAK,EAAGe,EAASjB,GAAQE,EAAO,MAAM,EAqBjE,GAlBIA,EAAO,YAAc,eAOjB,KAAK,MAAMc,EAAQF,CAAQ,IAAMF,IACjCI,GAASf,GAAeC,EAAQ,OAAQ,OAAO,EAAIY,GAEnD,KAAK,MAAMG,EAASF,CAAO,IAAMF,IACjCI,GAAUhB,GAAeC,EAAQ,MAAO,QAAQ,EAAIa,IAOxD,CAACG,GAAkBxB,CAAM,EAAG,CAK5B,IAAIyB,EAAgB,KAAK,MAAMH,EAAQF,CAAQ,EAAIF,EAC/CQ,EAAiB,KAAK,MAAMH,EAASF,CAAO,EAAIF,EAMhD,KAAK,IAAIM,CAAa,IAAM,IAC5BH,GAASG,GAET,KAAK,IAAIC,CAAc,IAAM,IAC7BH,GAAUG,EAElB,CACA,OAAOrB,GAAeQ,EAAS,KAAMA,EAAS,IAAKS,EAAOC,CAAM,CACpE,CAOA,IAAII,GAAwB,UAAY,CAGpC,OAAI,OAAO,oBAAuB,YACvB,SAAU3B,EAAQ,CAAE,OAAOA,aAAkBE,GAAYF,CAAM,EAAE,kBAAoB,EAKzF,SAAUA,EAAQ,CAAE,OAAQA,aAAkBE,GAAYF,CAAM,EAAE,YACrE,OAAOA,EAAO,SAAY,UAAa,CAC/C,EAAG,EAOH,SAASwB,GAAkBxB,EAAQ,CAC/B,OAAOA,IAAWE,GAAYF,CAAM,EAAE,SAAS,eACnD,CAOA,SAAS4B,GAAe5B,EAAQ,CAC5B,OAAKzB,GAGDoD,GAAqB3B,CAAM,EACpBe,GAAkBf,CAAM,EAE5BiB,GAA0BjB,CAAM,EAL5BI,EAMf,CAQA,SAASyB,GAAmBvD,EAAI,CAC5B,IAAIwD,EAAIxD,EAAG,EAAGyD,EAAIzD,EAAG,EAAGgD,EAAQhD,EAAG,MAAOiD,EAASjD,EAAG,OAElD0D,EAAS,OAAO,iBAAoB,YAAc,gBAAkB,OACpEC,EAAO,OAAO,OAAOD,EAAO,SAAS,EAEzC,OAAAjC,GAAmBkC,EAAM,CACrB,EAAGH,EAAG,EAAGC,EAAG,MAAOT,EAAO,OAAQC,EAClC,IAAKQ,EACL,MAAOD,EAAIR,EACX,OAAQC,EAASQ,EACjB,KAAMD,CACV,CAAC,EACMG,CACX,CAWA,SAAS5B,GAAeyB,EAAGC,EAAGT,EAAOC,EAAQ,CACzC,MAAO,CAAE,EAAGO,EAAG,EAAGC,EAAG,MAAOT,EAAO,OAAQC,CAAO,CACtD,CAMA,IAAIW,GAAmC,UAAY,CAM/C,SAASA,EAAkBlC,EAAQ,CAM/B,KAAK,eAAiB,EAMtB,KAAK,gBAAkB,EAMvB,KAAK,aAAeK,GAAe,EAAG,EAAG,EAAG,CAAC,EAC7C,KAAK,OAASL,CAClB,CAOA,OAAAkC,EAAkB,UAAU,SAAW,UAAY,CAC/C,IAAID,EAAOL,GAAe,KAAK,MAAM,EACrC,YAAK,aAAeK,EACZA,EAAK,QAAU,KAAK,gBACxBA,EAAK,SAAW,KAAK,eAC7B,EAOAC,EAAkB,UAAU,cAAgB,UAAY,CACpD,IAAID,EAAO,KAAK,aAChB,YAAK,eAAiBA,EAAK,MAC3B,KAAK,gBAAkBA,EAAK,OACrBA,CACX,EACOC,CACX,EAAE,EAEEC,GAAqC,UAAY,CAOjD,SAASA,EAAoBnC,EAAQoC,EAAU,CAC3C,IAAIC,EAAcR,GAAmBO,CAAQ,EAO7CrC,GAAmB,KAAM,CAAE,OAAQC,EAAQ,YAAaqC,CAAY,CAAC,CACzE,CACA,OAAOF,CACX,EAAE,EAEEG,GAAmC,UAAY,CAW/C,SAASA,EAAkBnE,EAAUoE,EAAYC,EAAa,CAc1D,GAPA,KAAK,oBAAsB,CAAC,EAM5B,KAAK,cAAgB,IAAI/E,GACrB,OAAOU,GAAa,WACpB,MAAM,IAAI,UAAU,yDAAyD,EAEjF,KAAK,UAAYA,EACjB,KAAK,YAAcoE,EACnB,KAAK,aAAeC,CACxB,CAOA,OAAAF,EAAkB,UAAU,QAAU,SAAUtC,EAAQ,CACpD,GAAI,CAAC,UAAU,OACX,MAAM,IAAI,UAAU,0CAA0C,EAGlE,GAAI,SAAO,SAAY,aAAe,EAAE,mBAAmB,SAG3D,IAAI,EAAEA,aAAkBE,GAAYF,CAAM,EAAE,SACxC,MAAM,IAAI,UAAU,uCAAuC,EAE/D,IAAIyC,EAAe,KAAK,cAEpBA,EAAa,IAAIzC,CAAM,IAG3ByC,EAAa,IAAIzC,EAAQ,IAAIkC,GAAkBlC,CAAM,CAAC,EACtD,KAAK,YAAY,YAAY,IAAI,EAEjC,KAAK,YAAY,QAAQ,GAC7B,EAOAsC,EAAkB,UAAU,UAAY,SAAUtC,EAAQ,CACtD,GAAI,CAAC,UAAU,OACX,MAAM,IAAI,UAAU,0CAA0C,EAGlE,GAAI,SAAO,SAAY,aAAe,EAAE,mBAAmB,SAG3D,IAAI,EAAEA,aAAkBE,GAAYF,CAAM,EAAE,SACxC,MAAM,IAAI,UAAU,uCAAuC,EAE/D,IAAIyC,EAAe,KAAK,cAEpB,CAACA,EAAa,IAAIzC,CAAM,IAG5ByC,EAAa,OAAOzC,CAAM,EACrByC,EAAa,MACd,KAAK,YAAY,eAAe,IAAI,GAE5C,EAMAH,EAAkB,UAAU,WAAa,UAAY,CACjD,KAAK,YAAY,EACjB,KAAK,cAAc,MAAM,EACzB,KAAK,YAAY,eAAe,IAAI,CACxC,EAOAA,EAAkB,UAAU,aAAe,UAAY,CACnD,IAAII,EAAQ,KACZ,KAAK,YAAY,EACjB,KAAK,cAAc,QAAQ,SAAUC,EAAa,CAC1CA,EAAY,SAAS,GACrBD,EAAM,oBAAoB,KAAKC,CAAW,CAElD,CAAC,CACL,EAOAL,EAAkB,UAAU,gBAAkB,UAAY,CAEtD,GAAI,EAAC,KAAK,UAAU,EAGpB,KAAIlE,EAAM,KAAK,aAEXF,EAAU,KAAK,oBAAoB,IAAI,SAAUyE,EAAa,CAC9D,OAAO,IAAIR,GAAoBQ,EAAY,OAAQA,EAAY,cAAc,CAAC,CAClF,CAAC,EACD,KAAK,UAAU,KAAKvE,EAAKF,EAASE,CAAG,EACrC,KAAK,YAAY,EACrB,EAMAkE,EAAkB,UAAU,YAAc,UAAY,CAClD,KAAK,oBAAoB,OAAO,CAAC,CACrC,EAMAA,EAAkB,UAAU,UAAY,UAAY,CAChD,OAAO,KAAK,oBAAoB,OAAS,CAC7C,EACOA,CACX,EAAE,EAKE7C,GAAY,OAAO,SAAY,YAAc,IAAI,QAAY,IAAIhC,GAKjEmF,GAAgC,UAAY,CAO5C,SAASA,EAAezE,EAAU,CAC9B,GAAI,EAAE,gBAAgByE,GAClB,MAAM,IAAI,UAAU,oCAAoC,EAE5D,GAAI,CAAC,UAAU,OACX,MAAM,IAAI,UAAU,0CAA0C,EAElE,IAAIL,EAAahD,GAAyB,YAAY,EAClDC,EAAW,IAAI8C,GAAkBnE,EAAUoE,EAAY,IAAI,EAC/D9C,GAAU,IAAI,KAAMD,CAAQ,CAChC,CACA,OAAOoD,CACX,EAAE,EAEF,CACI,UACA,YACA,YACJ,EAAE,QAAQ,SAAUC,EAAQ,CACxBD,GAAe,UAAUC,GAAU,UAAY,CAC3C,IAAIvE,EACJ,OAAQA,EAAKmB,GAAU,IAAI,IAAI,GAAGoD,GAAQ,MAAMvE,EAAI,SAAS,CACjE,CACJ,CAAC,EAED,IAAIP,GAAS,UAAY,CAErB,OAAI,OAAOS,GAAS,gBAAmB,YAC5BA,GAAS,eAEboE,EACX,EAAG,EAEIE,GAAQ/E,GCr2Bf,IAAMgF,GAAS,IAAIC,EAYbC,GAAYC,EAAM,IAAMC,EAC5B,IAAIC,GAAeC,GAAW,CAC5B,QAAWC,KAASD,EAClBN,GAAO,KAAKO,CAAK,CACrB,CAAC,CACH,CAAC,EACE,KACCC,EAAUC,GAAYC,EAAMC,GAAOP,EAAGK,CAAQ,CAAC,EAC5C,KACCG,EAAS,IAAMH,EAAS,WAAW,CAAC,CACtC,CACF,EACAI,EAAY,CAAC,CACf,EAaK,SAASC,GACdC,EACa,CACb,MAAO,CACL,MAAQA,EAAG,YACX,OAAQA,EAAG,YACb,CACF,CAuBO,SAASC,GACdD,EACyB,CACzB,OAAOb,GACJ,KACCe,EAAIR,GAAYA,EAAS,QAAQM,CAAE,CAAC,EACpCP,EAAUC,GAAYT,GACnB,KACCkB,EAAO,CAAC,CAAE,OAAAC,CAAO,IAAMA,IAAWJ,CAAE,EACpCH,EAAS,IAAMH,EAAS,UAAUM,CAAE,CAAC,EACrCK,EAAI,IAAMN,GAAeC,CAAE,CAAC,CAC9B,CACF,EACAM,EAAUP,GAAeC,CAAE,CAAC,CAC9B,CACJ,CC1GO,SAASO,GACdC,EACa,CACb,MAAO,CACL,MAAQA,EAAG,YACX,OAAQA,EAAG,YACb,CACF,CASO,SAASC,GACdD,EACyB,CACzB,IAAIE,EAASF,EAAG,cAChB,KAAOE,IAEHF,EAAG,aAAeE,EAAO,aACzBF,EAAG,cAAgBE,EAAO,eAE1BA,GAAUF,EAAKE,GAAQ,cAK3B,OAAOA,EAASF,EAAK,MACvB,CCfA,IAAMG,GAAS,IAAIC,EAUbC,GAAYC,EAAM,IAAMC,EAC5B,IAAI,qBAAqBC,GAAW,CAClC,QAAWC,KAASD,EAClBL,GAAO,KAAKM,CAAK,CACrB,EAAG,CACD,UAAW,CACb,CAAC,CACH,CAAC,EACE,KACCC,EAAUC,GAAYC,EAAMC,GAAON,EAAGI,CAAQ,CAAC,EAC5C,KACCG,EAAS,IAAMH,EAAS,WAAW,CAAC,CACtC,CACF,EACAI,EAAY,CAAC,CACf,EAaK,SAASC,GACdC,EACqB,CACrB,OAAOZ,GACJ,KACCa,EAAIP,GAAYA,EAAS,QAAQM,CAAE,CAAC,EACpCP,EAAUC,GAAYR,GACnB,KACCgB,EAAO,CAAC,CAAE,OAAAC,CAAO,IAAMA,IAAWH,CAAE,EACpCH,EAAS,IAAMH,EAAS,UAAUM,CAAE,CAAC,EACrCI,EAAI,CAAC,CAAE,eAAAC,CAAe,IAAMA,CAAc,CAC5C,CACF,CACF,CACJ,CAaO,SAASC,GACdN,EAAiBO,EAAY,GACR,CACrB,OAAOC,GAA0BR,CAAE,EAChC,KACCI,EAAI,CAAC,CAAE,EAAAK,CAAE,IAAM,CACb,IAAMC,EAAUC,GAAeX,CAAE,EAC3BY,EAAUC,GAAsBb,CAAE,EACxC,OAAOS,GACLG,EAAQ,OAASF,EAAQ,OAASH,CAEtC,CAAC,EACDO,EAAqB,CACvB,CACJ,CCjFA,IAAMC,GAA4C,CAChD,OAAQC,EAAW,yBAAyB,EAC5C,OAAQA,EAAW,yBAAyB,CAC9C,EAaO,SAASC,GAAUC,EAAuB,CAC/C,OAAOH,GAAQG,GAAM,OACvB,CAaO,SAASC,GAAUD,EAAcE,EAAsB,CACxDL,GAAQG,GAAM,UAAYE,GAC5BL,GAAQG,GAAM,MAAM,CACxB,CAWO,SAASG,GAAYH,EAAmC,CAC7D,IAAMI,EAAKP,GAAQG,GACnB,OAAOK,EAAUD,EAAI,QAAQ,EAC1B,KACCE,EAAI,IAAMF,EAAG,OAAO,EACpBG,EAAUH,EAAG,OAAO,CACtB,CACJ,CClCA,SAASI,GACPC,EAAiBC,EACR,CACT,OAAQD,EAAG,YAAa,CAGtB,KAAK,iBAEH,OAAIA,EAAG,OAAS,QACP,SAAS,KAAKC,CAAI,EAElB,GAGX,KAAK,kBACL,KAAK,oBACH,MAAO,GAGT,QACE,OAAOD,EAAG,iBACd,CACF,CAWO,SAASE,IAAsC,CACpD,OAAOC,EAAyB,OAAQ,SAAS,EAC9C,KACCC,EAAOC,GAAM,EAAEA,EAAG,SAAWA,EAAG,QAAQ,EACxCC,EAAID,IAAO,CACT,KAAME,GAAU,QAAQ,EAAI,SAAW,SACvC,KAAMF,EAAG,IACT,OAAQ,CACNA,EAAG,eAAe,EAClBA,EAAG,gBAAgB,CACrB,CACF,EAAc,EACdD,EAAO,CAAC,CAAE,KAAAI,EAAM,KAAAP,CAAK,IAAM,CACzB,GAAIO,IAAS,SAAU,CACrB,IAAMC,EAASC,GAAiB,EAChC,GAAI,OAAOD,GAAW,YACpB,MAAO,CAACV,GAAwBU,EAAQR,CAAI,CAChD,CACA,MAAO,EACT,CAAC,EACDU,GAAM,CACR,CACJ,CCpFO,SAASC,IAAmB,CACjC,OAAO,IAAI,IAAI,SAAS,IAAI,CAC9B,CAOO,SAASC,GAAYC,EAAgB,CAC1C,SAAS,KAAOA,EAAI,IACtB,CASO,SAASC,IAA8B,CAC5C,OAAO,IAAIC,CACb,CCLA,SAASC,GAAYC,EAAiBC,EAA8B,CAGlE,GAAI,OAAOA,GAAU,UAAY,OAAOA,GAAU,SAChDD,EAAG,WAAaC,EAAM,SAAS,UAGtBA,aAAiB,KAC1BD,EAAG,YAAYC,CAAK,UAGX,MAAM,QAAQA,CAAK,EAC5B,QAAWC,KAAQD,EACjBF,GAAYC,EAAIE,CAAI,CAE1B,CAyBO,SAASC,EACdC,EAAaC,KAAmCC,EAC7C,CACH,IAAMN,EAAK,SAAS,cAAcI,CAAG,EAGrC,GAAIC,EACF,QAAWE,KAAQ,OAAO,KAAKF,CAAU,EACnC,OAAOA,EAAWE,IAAU,cAI5B,OAAOF,EAAWE,IAAU,UAC9BP,EAAG,aAAaO,EAAMF,EAAWE,EAAK,EAEtCP,EAAG,aAAaO,EAAM,EAAE,GAI9B,QAAWN,KAASK,EAClBP,GAAYC,EAAIC,CAAK,EAGvB,OAAOD,CACT,CChFO,SAASQ,GAASC,EAAeC,EAAmB,CACzD,IAAIC,EAAID,EACR,GAAID,EAAM,OAASE,EAAG,CACpB,KAAOF,EAAME,KAAO,KAAO,EAAEA,EAAI,GAAG,CACpC,MAAO,GAAGF,EAAM,UAAU,EAAGE,CAAC,MAChC,CACA,OAAOF,CACT,CAkBO,SAASG,GAAMH,EAAuB,CAC3C,GAAIA,EAAQ,IAAK,CACf,IAAMI,EAAS,GAAGJ,EAAQ,KAAO,IAAO,IACxC,MAAO,KAAKA,EAAQ,MAAY,KAAM,QAAQI,CAAM,IACtD,KACE,QAAOJ,EAAM,SAAS,CAE1B,CC5BO,SAASK,IAA0B,CACxC,OAAO,SAAS,KAAK,UAAU,CAAC,CAClC,CAYO,SAASC,GAAgBC,EAAoB,CAClD,IAAMC,EAAKC,EAAE,IAAK,CAAE,KAAMF,CAAK,CAAC,EAChCC,EAAG,iBAAiB,QAASE,GAAMA,EAAG,gBAAgB,CAAC,EACvDF,EAAG,MAAM,CACX,CASO,SAASG,IAAwC,CACtD,OAAOC,EAA2B,OAAQ,YAAY,EACnD,KACCC,EAAIR,EAAe,EACnBS,EAAUT,GAAgB,CAAC,EAC3BU,EAAOR,GAAQA,EAAK,OAAS,CAAC,EAC9BS,EAAY,CAAC,CACf,CACJ,CAOO,SAASC,IAA+C,CAC7D,OAAON,GAAkB,EACtB,KACCE,EAAIK,GAAMC,GAAmB,QAAQD,KAAM,CAAE,EAC7CH,EAAOP,GAAM,OAAOA,GAAO,WAAW,CACxC,CACJ,CC1CO,SAASY,GAAWC,EAAoC,CAC7D,IAAMC,EAAQ,WAAWD,CAAK,EAC9B,OAAOE,GAA0BC,GAC/BF,EAAM,YAAY,IAAME,EAAKF,EAAM,OAAO,CAAC,CAC5C,EACE,KACCG,EAAUH,EAAM,OAAO,CACzB,CACJ,CAOO,SAASI,IAAkC,CAChD,IAAMJ,EAAQ,WAAW,OAAO,EAChC,OAAOK,EACLC,EAAU,OAAQ,aAAa,EAAE,KAAKC,EAAI,IAAM,EAAI,CAAC,EACrDD,EAAU,OAAQ,YAAY,EAAE,KAAKC,EAAI,IAAM,EAAK,CAAC,CACvD,EACG,KACCJ,EAAUH,EAAM,OAAO,CACzB,CACJ,CAcO,SAASQ,GACdC,EAA6BC,EACd,CACf,OAAOD,EACJ,KACCE,EAAUC,GAAUA,EAASF,EAAQ,EAAIG,CAAK,CAChD,CACJ,CC7CO,SAASC,GACdC,EAAmBC,EAAuB,CAAE,YAAa,aAAc,EACjD,CACtB,OAAOC,GAAK,MAAM,GAAGF,IAAOC,CAAO,CAAC,EACjC,KACCE,GAAW,IAAMC,CAAK,EACtBC,EAAUC,GAAOA,EAAI,SAAW,IAC5BC,GAAW,IAAM,IAAI,MAAMD,EAAI,UAAU,CAAC,EAC1CE,EAAGF,CAAG,CACV,CACF,CACJ,CAYO,SAASG,GACdT,EAAmBC,EACJ,CACf,OAAOF,GAAQC,EAAKC,CAAO,EACxB,KACCI,EAAUC,GAAOA,EAAI,KAAK,CAAC,EAC3BI,EAAY,CAAC,CACf,CACJ,CAUO,SAASC,GACdX,EAAmBC,EACG,CACtB,IAAMW,EAAM,IAAI,UAChB,OAAOb,GAAQC,EAAKC,CAAO,EACxB,KACCI,EAAUC,GAAOA,EAAI,KAAK,CAAC,EAC3BO,EAAIP,GAAOM,EAAI,gBAAgBN,EAAK,UAAU,CAAC,EAC/CI,EAAY,CAAC,CACf,CACJ,CClDO,SAASI,GAAYC,EAA+B,CACzD,IAAMC,EAASC,EAAE,SAAU,CAAE,IAAAF,CAAI,CAAC,EAClC,OAAOG,EAAM,KACX,SAAS,KAAK,YAAYF,CAAM,EACzBG,EACLC,EAAUJ,EAAQ,MAAM,EACxBI,EAAUJ,EAAQ,OAAO,EACtB,KACCK,EAAU,IACRC,GAAW,IAAM,IAAI,eAAe,mBAAmBP,GAAK,CAAC,CAC9D,CACH,CACJ,EACG,KACCQ,EAAI,IAAG,EAAY,EACnBC,EAAS,IAAM,SAAS,KAAK,YAAYR,CAAM,CAAC,EAChDS,GAAK,CAAC,CACR,EACH,CACH,CCfO,SAASC,IAAoC,CAClD,MAAO,CACL,EAAG,KAAK,IAAI,EAAG,OAAO,EACtB,EAAG,KAAK,IAAI,EAAG,OAAO,CACxB,CACF,CASO,SAASC,IAAkD,CAChE,OAAOC,EACLC,EAAU,OAAQ,SAAU,CAAE,QAAS,EAAK,CAAC,EAC7CA,EAAU,OAAQ,SAAU,CAAE,QAAS,EAAK,CAAC,CAC/C,EACG,KACCC,EAAIJ,EAAiB,EACrBK,EAAUL,GAAkB,CAAC,CAC/B,CACJ,CC3BO,SAASM,IAAgC,CAC9C,MAAO,CACL,MAAQ,WACR,OAAQ,WACV,CACF,CASO,SAASC,IAA8C,CAC5D,OAAOC,EAAU,OAAQ,SAAU,CAAE,QAAS,EAAK,CAAC,EACjD,KACCC,EAAIH,EAAe,EACnBI,EAAUJ,GAAgB,CAAC,CAC7B,CACJ,CCXO,SAASK,IAAsC,CACpD,OAAOC,EAAc,CACnBC,GAAoB,EACpBC,GAAkB,CACpB,CAAC,EACE,KACCC,EAAI,CAAC,CAACC,EAAQC,CAAI,KAAO,CAAE,OAAAD,EAAQ,KAAAC,CAAK,EAAE,EAC1CC,EAAY,CAAC,CACf,CACJ,CCVO,SAASC,GACdC,EAAiB,CAAE,UAAAC,EAAW,QAAAC,CAAQ,EAChB,CACtB,IAAMC,EAAQF,EACX,KACCG,EAAwB,MAAM,CAChC,EAGIC,EAAUC,EAAc,CAACH,EAAOD,CAAO,CAAC,EAC3C,KACCK,EAAI,IAAMC,GAAiBR,CAAE,CAAC,CAChC,EAGF,OAAOM,EAAc,CAACJ,EAASD,EAAWI,CAAO,CAAC,EAC/C,KACCE,EAAI,CAAC,CAAC,CAAE,OAAAE,CAAO,EAAG,CAAE,OAAAC,EAAQ,KAAAC,CAAK,EAAG,CAAE,EAAAC,EAAG,EAAAC,CAAE,CAAC,KAAO,CACjD,OAAQ,CACN,EAAGH,EAAO,EAAIE,EACd,EAAGF,EAAO,EAAIG,EAAIJ,CACpB,EACA,KAAAE,CACF,EAAE,CACJ,CACJ,CCIO,SAASG,GACdC,EAAgB,CAAE,IAAAC,CAAI,EACP,CAGf,IAAMC,EAAMC,EAAwBH,EAAQ,SAAS,EAClD,KACCI,EAAI,CAAC,CAAE,KAAAC,CAAK,IAAMA,CAAS,CAC7B,EAGF,OAAOJ,EACJ,KACCK,GAAS,IAAMJ,EAAK,CAAE,QAAS,GAAM,SAAU,EAAK,CAAC,EACrDK,EAAIC,GAAWR,EAAO,YAAYQ,CAAO,CAAC,EAC1CC,EAAU,IAAMP,CAAG,EACnBQ,GAAM,CACR,CACJ,CCCA,IAAMC,GAASC,EAAW,WAAW,EAC/BC,GAAiB,KAAK,MAAMF,GAAO,WAAY,EACrDE,GAAO,KAAO,GAAG,IAAI,IAAIA,GAAO,KAAMC,GAAY,CAAC,IAW5C,SAASC,IAAwB,CACtC,OAAOF,EACT,CASO,SAASG,EAAQC,EAAqB,CAC3C,OAAOJ,GAAO,SAAS,SAASI,CAAI,CACtC,CAUO,SAASC,GACdC,EAAkBC,EACV,CACR,OAAO,OAAOA,GAAU,YACpBP,GAAO,aAAaM,GAAK,QAAQ,IAAKC,EAAM,SAAS,CAAC,EACtDP,GAAO,aAAaM,EAC1B,CCjCO,SAASE,GACdC,EAASC,EAAmB,SACP,CACrB,OAAOC,EAAW,sBAAsBF,KAASC,CAAI,CACvD,CAYO,SAASE,GACdH,EAASC,EAAmB,SACL,CACvB,OAAOG,EAAY,sBAAsBJ,KAASC,CAAI,CACxD,CC1EO,SAASI,GACdC,EACsB,CACtB,IAAMC,EAASC,EAAW,6BAA8BF,CAAE,EAC1D,OAAOG,EAAUF,EAAQ,QAAS,CAAE,KAAM,EAAK,CAAC,EAC7C,KACCG,EAAI,IAAMF,EAAW,cAAeF,CAAE,CAAC,EACvCI,EAAIC,IAAY,CAAE,KAAM,UAAUA,EAAQ,SAAS,CAAE,EAAE,CACzD,CACJ,CASO,SAASC,GACdN,EACiC,CACjC,MAAI,CAACO,EAAQ,kBAAkB,GAAK,CAACP,EAAG,kBAC/BQ,EAGFC,EAAM,IAAM,CACjB,IAAMC,EAAQ,IAAIC,EAClB,OAAAD,EACG,KACCE,EAAU,CAAE,KAAM,SAAiB,YAAY,CAAE,CAAC,CACpD,EACG,UAAU,CAAC,CAAE,KAAAC,CAAK,IAAM,CA5FjC,IAAAC,EA6FcD,GAAQA,MAAUC,EAAA,SAAiB,YAAY,IAA7B,KAAAA,EAAkCD,KACtDb,EAAG,OAAS,GAGZ,SAAiB,aAAca,CAAI,EAEvC,CAAC,EAGEd,GAAcC,CAAE,EACpB,KACCe,EAAIC,GAASN,EAAM,KAAKM,CAAK,CAAC,EAC9BC,EAAS,IAAMP,EAAM,SAAS,CAAC,EAC/BN,EAAIY,GAAUE,EAAA,CAAE,IAAKlB,GAAOgB,EAAQ,CACtC,CACJ,CAAC,CACH,CC5BO,SAASG,GACdC,EAAiB,CAAE,QAAAC,CAAQ,EACN,CACrB,OAAOA,EACJ,KACCC,EAAIC,IAAW,CAAE,OAAQA,IAAWH,CAAG,EAAE,CAC3C,CACJ,CAYO,SAASI,GACdJ,EAAiBK,EACe,CAChC,IAAMC,EAAY,IAAIC,EACtB,OAAAD,EAAU,UAAU,CAAC,CAAE,OAAAE,CAAO,IAAM,CAClCR,EAAG,OAASQ,CACd,CAAC,EAGMT,GAAaC,EAAIK,CAAO,EAC5B,KACCI,EAAIC,GAASJ,EAAU,KAAKI,CAAK,CAAC,EAClCC,EAAS,IAAML,EAAU,SAAS,CAAC,EACnCJ,EAAIQ,GAAUE,EAAA,CAAE,IAAKZ,GAAOU,EAAQ,CACtC,CACJ,CC7FA,IAAAG,GAAwB,SCajB,SAASC,GAAcC,EAA0B,CACtD,OACEC,EAAC,OAAI,MAAM,aAAa,GAAID,GAC1BC,EAAC,OAAI,MAAM,+BAA+B,CAC5C,CAEJ,CCHO,SAASC,GACdC,EAAqBC,EACR,CAIb,GAHAA,EAASA,EAAS,GAAGA,gBAAqBD,IAAO,OAG7CC,EAAQ,CACV,IAAMC,EAASD,EAAS,IAAIA,IAAW,OACvC,OACEE,EAAC,SAAM,MAAM,gBAAgB,SAAU,GACpCC,GAAcH,CAAM,EACrBE,EAAC,KAAE,KAAMD,EAAQ,MAAM,uBAAuB,SAAU,IACtDC,EAAC,QAAK,wBAAuBH,EAAI,CACnC,CACF,CAEJ,KACE,QACEG,EAAC,SAAM,MAAM,gBAAgB,SAAU,GACpCC,GAAcH,CAAM,EACrBE,EAAC,QAAK,MAAM,uBAAuB,SAAU,IAC3CA,EAAC,QAAK,wBAAuBH,EAAI,CACnC,CACF,CAGN,CC5BO,SAASK,GAAsBC,EAAyB,CAC7D,OACEC,EAAC,UACC,MAAM,uBACN,MAAOC,GAAY,gBAAgB,EACnC,wBAAuB,IAAIF,WAC5B,CAEL,CCYA,SAASG,GACPC,EAA2CC,EAC9B,CACb,IAAMC,EAASD,EAAO,EAChBE,EAASF,EAAO,EAGhBG,EAAU,OAAO,KAAKJ,EAAS,KAAK,EACvC,OAAOK,GAAO,CAACL,EAAS,MAAMK,EAAI,EAClC,OAAyB,CAACC,EAAMD,IAAQ,CACvC,GAAGC,EAAMC,EAAC,WAAKF,CAAI,EAAQ,GAC7B,EAAG,CAAC,CAAC,EACJ,MAAM,EAAG,EAAE,EAGRG,EAAM,IAAI,IAAIR,EAAS,QAAQ,EACjCS,EAAQ,kBAAkB,GAC5BD,EAAI,aAAa,IAAI,IAAK,OAAO,QAAQR,EAAS,KAAK,EACpD,OAAO,CAAC,CAAC,CAAEU,CAAK,IAAMA,CAAK,EAC3B,OAAO,CAACC,EAAW,CAACC,CAAK,IAAM,GAAGD,KAAaC,IAAQ,KAAK,EAAG,EAAE,CACpE,EAGF,GAAM,CAAE,KAAAC,CAAK,EAAIC,GAAc,EAC/B,OACEP,EAAC,KAAE,KAAM,GAAGC,IAAO,MAAM,yBAAyB,SAAU,IAC1DD,EAAC,WACC,MAAO,CAAC,4BAA6B,GAAGL,EACpC,CAAC,qCAAqC,EACtC,CAAC,CACL,EAAE,KAAK,GAAG,EACV,gBAAeF,EAAS,MAAM,QAAQ,CAAC,GAEtCE,EAAS,GAAKK,EAAC,OAAI,MAAM,iCAAiC,EAC3DA,EAAC,MAAG,MAAM,2BAA2BP,EAAS,KAAM,EACnDG,EAAS,GAAKH,EAAS,KAAK,OAAS,GACpCO,EAAC,KAAE,MAAM,4BACNQ,GAASf,EAAS,KAAM,GAAG,CAC9B,EAEDA,EAAS,MACRO,EAAC,OAAI,MAAM,cACRP,EAAS,KAAK,IAAIgB,GAAO,CACxB,IAAMC,EAAKD,EAAI,QAAQ,WAAY,EAAE,EAC/BE,EAAOL,EACTI,KAAMJ,EACJ,4BAA4BA,EAAKI,KACjC,cACF,GACJ,OACEV,EAAC,QAAK,MAAO,UAAUW,KAASF,CAAI,CAExC,CAAC,CACH,EAEDb,EAAS,GAAKC,EAAQ,OAAS,GAC9BG,EAAC,KAAE,MAAM,2BACNY,GAAY,4BAA4B,EAAE,KAAG,GAAGf,CACnD,CAEJ,CACF,CAEJ,CAaO,SAASgB,GACdC,EACa,CACb,IAAMC,EAAYD,EAAO,GAAG,MACtBE,EAAO,CAAC,GAAGF,CAAM,EAGjBnB,EAASqB,EAAK,UAAUC,GAAO,CAACA,EAAI,SAAS,SAAS,GAAG,CAAC,EAC1D,CAACC,CAAO,EAAIF,EAAK,OAAOrB,EAAQ,CAAC,EAGnCwB,EAAQH,EAAK,UAAUC,GAAOA,EAAI,MAAQF,CAAS,EACnDI,IAAU,KACZA,EAAQH,EAAK,QAGf,IAAMI,EAAOJ,EAAK,MAAM,EAAGG,CAAK,EAC1BE,EAAOL,EAAK,MAAMG,CAAK,EAGvBG,EAAW,CACf9B,GAAqB0B,EAAS,EAAc,EAAE,CAACvB,GAAUwB,IAAU,EAAE,EACrE,GAAGC,EAAK,IAAIG,GAAW/B,GAAqB+B,EAAS,CAAW,CAAC,EACjE,GAAGF,EAAK,OAAS,CACfrB,EAAC,WAAQ,MAAM,0BACbA,EAAC,WAAQ,SAAU,IAChBqB,EAAK,OAAS,GAAKA,EAAK,SAAW,EAChCT,GAAY,wBAAwB,EACpCA,GAAY,2BAA4BS,EAAK,MAAM,CAEzD,EACC,GAAGA,EAAK,IAAIE,GAAW/B,GAAqB+B,EAAS,CAAW,CAAC,CACpE,CACF,EAAI,CAAC,CACP,EAGA,OACEvB,EAAC,MAAG,MAAM,0BACPsB,CACH,CAEJ,CC1IO,SAASE,GAAkBC,EAAiC,CACjE,OACEC,EAAC,MAAG,MAAM,oBACP,OAAO,QAAQD,CAAK,EAAE,IAAI,CAAC,CAACE,EAAKC,CAAK,IACrCF,EAAC,MAAG,MAAO,oCAAoCC,KAC5C,OAAOC,GAAU,SAAWC,GAAMD,CAAK,EAAIA,CAC9C,CACD,CACH,CAEJ,CCAO,SAASE,GACdC,EACa,CACb,IAAMC,EAAU,kCAAkCD,IAClD,OACEE,EAAC,OAAI,MAAOD,EAAS,OAAM,IACzBC,EAAC,UAAO,MAAM,gBAAgB,SAAU,GAAI,CAC9C,CAEJ,CCpBO,SAASC,GAAYC,EAAiC,CAC3D,OACEC,EAAC,OAAI,MAAM,0BACTA,EAAC,OAAI,MAAM,qBACRD,CACH,CACF,CAEJ,CCMA,SAASE,GAAcC,EAA+B,CACpD,IAAMC,EAASC,GAAc,EAGvBC,EAAM,IAAI,IAAI,MAAMH,EAAQ,WAAYC,EAAO,IAAI,EACzD,OACEG,EAAC,MAAG,MAAM,oBACRA,EAAC,KAAE,KAAM,GAAGD,IAAO,MAAM,oBACtBH,EAAQ,KACX,CACF,CAEJ,CAcO,SAASK,GACdC,EAAqBC,EACR,CACb,OACEH,EAAC,OAAI,MAAM,cACTA,EAAC,UACC,MAAM,sBACN,aAAYI,GAAY,sBAAsB,GAE7CD,EAAO,KACV,EACAH,EAAC,MAAG,MAAM,oBACPE,EAAS,IAAIP,EAAa,CAC7B,CACF,CAEJ,CCCO,SAASU,GACdC,EAAiBC,EACO,CACxB,IAAMC,EAAUC,EAAM,IAAMC,EAAc,CACxCC,GAAmBL,CAAE,EACrBM,GAA0BL,CAAS,CACrC,CAAC,CAAC,EACC,KACCM,EAAI,CAAC,CAAC,CAAE,EAAAC,EAAG,EAAAC,CAAE,EAAGC,CAAM,IAAqB,CACzC,GAAM,CAAE,MAAAC,EAAO,OAAAC,CAAO,EAAIC,GAAeb,CAAE,EAC3C,MAAQ,CACN,EAAGQ,EAAIE,EAAO,EAAIC,EAAQ,EAC1B,EAAGF,EAAIC,EAAO,EAAIE,EAAS,CAC7B,CACF,CAAC,CACH,EAGF,OAAOE,GAAkBd,CAAE,EACxB,KACCe,EAAUC,GAAUd,EACjB,KACCK,EAAIU,IAAW,CAAE,OAAAD,EAAQ,OAAAC,CAAO,EAAE,EAClCC,GAAK,CAAC,CAACF,GAAU,GAAQ,CAC3B,CACF,CACF,CACJ,CAWO,SAASG,GACdnB,EAAiBC,EAAwB,CAAE,QAAAmB,CAAQ,EAChB,CACnC,GAAM,CAACC,EAASC,CAAK,EAAI,MAAM,KAAKtB,EAAG,QAAQ,EAG/C,OAAOG,EAAM,IAAM,CACjB,IAAMoB,EAAQ,IAAIC,EACZC,EAAQF,EAAM,KAAKG,GAAS,CAAC,CAAC,EACpC,OAAAH,EAAM,UAAU,CAGd,KAAK,CAAE,OAAAN,CAAO,EAAG,CACfjB,EAAG,MAAM,YAAY,iBAAkB,GAAGiB,EAAO,KAAK,EACtDjB,EAAG,MAAM,YAAY,iBAAkB,GAAGiB,EAAO,KAAK,CACxD,EAGA,UAAW,CACTjB,EAAG,MAAM,eAAe,gBAAgB,EACxCA,EAAG,MAAM,eAAe,gBAAgB,CAC1C,CACF,CAAC,EAGD2B,GAAuB3B,CAAE,EACtB,KACC4B,GAAUH,CAAK,CACjB,EACG,UAAUI,GAAW,CACpB7B,EAAG,gBAAgB,kBAAmB6B,CAAO,CAC/C,CAAC,EAGLC,EACEP,EAAM,KAAKQ,EAAO,CAAC,CAAE,OAAAf,CAAO,IAAMA,CAAM,CAAC,EACzCO,EAAM,KAAKS,GAAa,GAAG,EAAGD,EAAO,CAAC,CAAE,OAAAf,CAAO,IAAM,CAACA,CAAM,CAAC,CAC/D,EACG,UAAU,CAGT,KAAK,CAAE,OAAAA,CAAO,EAAG,CACXA,EACFhB,EAAG,QAAQqB,CAAO,EAElBA,EAAQ,OAAO,CACnB,EAGA,UAAW,CACTrB,EAAG,QAAQqB,CAAO,CACpB,CACF,CAAC,EAGHE,EACG,KACCU,GAAU,GAAIC,EAAuB,CACvC,EACG,UAAU,CAAC,CAAE,OAAAlB,CAAO,IAAM,CACzBK,EAAQ,UAAU,OAAO,qBAAsBL,CAAM,CACvD,CAAC,EAGLO,EACG,KACCY,GAAa,IAAKD,EAAuB,EACzCH,EAAO,IAAM,CAAC,CAAC/B,EAAG,YAAY,EAC9BO,EAAI,IAAMP,EAAG,aAAc,sBAAsB,CAAC,EAClDO,EAAI,CAAC,CAAE,EAAAC,CAAE,IAAMA,CAAC,CAClB,EACG,UAAU,CAGT,KAAK4B,EAAQ,CACPA,EACFpC,EAAG,MAAM,YAAY,iBAAkB,GAAG,CAACoC,KAAU,EAErDpC,EAAG,MAAM,eAAe,gBAAgB,CAC5C,EAGA,UAAW,CACTA,EAAG,MAAM,eAAe,gBAAgB,CAC1C,CACF,CAAC,EAGLqC,EAAsBf,EAAO,OAAO,EACjC,KACCM,GAAUH,CAAK,EACfM,EAAOO,GAAM,EAAEA,EAAG,SAAWA,EAAG,QAAQ,CAC1C,EACG,UAAUA,GAAMA,EAAG,eAAe,CAAC,EAGxCD,EAAsBf,EAAO,WAAW,EACrC,KACCM,GAAUH,CAAK,EACfc,GAAehB,CAAK,CACtB,EACG,UAAU,CAAC,CAACe,EAAI,CAAE,OAAAtB,CAAO,CAAC,IAAM,CAvOzC,IAAAwB,EA0OU,GAAIF,EAAG,SAAW,GAAKA,EAAG,SAAWA,EAAG,QACtCA,EAAG,eAAe,UAGTtB,EAAQ,CACjBsB,EAAG,eAAe,EAGlB,IAAMG,EAASzC,EAAG,cAAe,QAAQ,gBAAgB,EACrDyC,aAAkB,YACpBA,EAAO,MAAM,GAEbD,EAAAE,GAAiB,IAAjB,MAAAF,EAAoB,MACxB,CACF,CAAC,EAGLpB,EACG,KACCQ,GAAUH,CAAK,EACfM,EAAOY,GAAUA,IAAWtB,CAAO,EACnCuB,GAAM,GAAG,CACX,EACG,UAAU,IAAM5C,EAAG,MAAM,CAAC,EAGxBD,GAAgBC,EAAIC,CAAS,EACjC,KACC4C,EAAIC,GAASvB,EAAM,KAAKuB,CAAK,CAAC,EAC9BC,EAAS,IAAMxB,EAAM,SAAS,CAAC,EAC/BhB,EAAIuC,GAAUE,EAAA,CAAE,IAAKhD,GAAO8C,EAAQ,CACtC,CACJ,CAAC,CACH,CCrMA,SAASG,GAAsBC,EAAgC,CAC7D,IAAMC,EAAkB,CAAC,EACzB,QAAWC,KAAMC,EAAY,eAAgBH,CAAS,EAAG,CACvD,IAAMI,EAAgB,CAAC,EAGjBC,EAAK,SAAS,mBAAmBH,EAAI,WAAW,SAAS,EAC/D,QAASI,EAAOD,EAAG,SAAS,EAAGC,EAAMA,EAAOD,EAAG,SAAS,EACtDD,EAAM,KAAKE,CAAY,EAGzB,QAASC,KAAQH,EAAO,CACtB,IAAII,EAGJ,KAAQA,EAAQ,gBAAgB,KAAKD,EAAK,WAAY,GAAI,CACxD,GAAM,CAAC,CAAEE,EAAIC,CAAK,EAAIF,EACtB,GAAI,OAAOE,GAAU,YAAa,CAChC,IAAMC,EAASJ,EAAK,UAAUC,EAAM,KAAK,EACzCD,EAAOI,EAAO,UAAUF,EAAG,MAAM,EACjCR,EAAQ,KAAKU,CAAM,CAGrB,KAAO,CACLJ,EAAK,YAAcE,EACnBR,EAAQ,KAAKM,CAAI,EACjB,KACF,CACF,CACF,CACF,CACA,OAAON,CACT,CAQA,SAASW,GAAKC,EAAqBC,EAA2B,CAC5DA,EAAO,OAAO,GAAG,MAAM,KAAKD,EAAO,UAAU,CAAC,CAChD,CAoBO,SAASE,GACdb,EAAiBF,EAAwB,CAAE,QAAAgB,EAAS,OAAAC,CAAO,EACxB,CAGnC,IAAMC,EAASlB,EAAU,QAAQ,MAAM,EACjCmB,EAASD,GAAA,YAAAA,EAAQ,GAGjBE,EAAc,IAAI,IACxB,QAAWT,KAAUZ,GAAsBC,CAAS,EAAG,CACrD,GAAM,CAAC,CAAES,CAAE,EAAIE,EAAO,YAAa,MAAM,WAAW,EAChDU,GAAmB,gBAAgBZ,KAAOP,CAAE,IAC9CkB,EAAY,IAAIX,EAAIa,GAAiBb,EAAIU,CAAM,CAAC,EAChDR,EAAO,YAAYS,EAAY,IAAIX,CAAE,CAAE,EAE3C,CAGA,OAAIW,EAAY,OAAS,EAChBG,EAGFC,EAAM,IAAM,CACjB,IAAMC,EAAQ,IAAIC,EAGZC,EAAsC,CAAC,EAC7C,OAAW,CAAClB,EAAImB,CAAU,IAAKR,EAC7BO,EAAM,KAAK,CACTE,EAAW,cAAeD,CAAU,EACpCC,EAAW,gBAAgBpB,KAAOP,CAAE,CACtC,CAAC,EAGH,OAAAe,EACG,KACCa,GAAUL,EAAM,KAAKM,GAAS,CAAC,CAAC,CAAC,CACnC,EACG,UAAUC,GAAU,CACnB9B,EAAG,OAAS,CAAC8B,EAGb,OAAW,CAACC,EAAOC,CAAK,IAAKP,EACtBK,EAGHpB,GAAKqB,EAAOC,CAAK,EAFjBtB,GAAKsB,EAAOD,CAAK,CAGvB,CAAC,EAGEE,EAAM,GAAG,CAAC,GAAGf,CAAW,EAC5B,IAAI,CAAC,CAAC,CAAEQ,CAAU,IACjBQ,GAAgBR,EAAY5B,EAAW,CAAE,QAAAgB,CAAQ,CAAC,CACnD,CACH,EACG,KACCqB,EAAS,IAAMZ,EAAM,SAAS,CAAC,EAC/Ba,GAAM,CACR,CACJ,CAAC,CACH,CV9GA,IAAIC,GAAW,EAaf,SAASC,GAAkBC,EAA0C,CACnE,GAAIA,EAAG,mBAAoB,CACzB,IAAMC,EAAUD,EAAG,mBACnB,GAAIC,EAAQ,UAAY,KACtB,OAAOA,EAGJ,GAAIA,EAAQ,UAAY,KAAO,CAACA,EAAQ,SAAS,OACpD,OAAOF,GAAkBE,CAAO,CACpC,CAIF,CAgBO,SAASC,GACdF,EACuB,CACvB,OAAOG,GAAiBH,CAAE,EACvB,KACCI,EAAI,CAAC,CAAE,MAAAC,CAAM,KAEJ,CACL,WAFcC,GAAsBN,CAAE,EAElB,MAAQK,CAC9B,EACD,EACDE,EAAwB,YAAY,CACtC,CACJ,CAoBO,SAASC,GACdR,EAAiBS,EAC8B,CAC/C,GAAM,CAAE,QAASC,CAAM,EAAI,WAAW,SAAS,EAGzCC,EAAWC,EAAM,IAAM,CAC3B,IAAMC,EAAQ,IAAIC,EASlB,GARAD,EAAM,UAAU,CAAC,CAAE,WAAAE,CAAW,IAAM,CAC9BA,GAAcL,EAChBV,EAAG,aAAa,WAAY,GAAG,EAE/BA,EAAG,gBAAgB,UAAU,CACjC,CAAC,EAGG,GAAAgB,QAAY,YAAY,EAAG,CAC7B,IAAMC,EAASjB,EAAG,QAAQ,KAAK,EAC/BiB,EAAO,GAAK,UAAU,EAAEnB,KACxBmB,EAAO,aACLC,GAAsBD,EAAO,EAAE,EAC/BjB,CACF,CACF,CAGA,IAAMmB,EAAYnB,EAAG,QAAQ,YAAY,EACzC,GAAImB,aAAqB,YAAa,CACpC,IAAMC,EAAOrB,GAAkBoB,CAAS,EAGxC,GAAI,OAAOC,GAAS,cAClBD,EAAU,UAAU,SAAS,UAAU,GACvCE,EAAQ,uBAAuB,GAC9B,CACD,IAAMC,EAAeC,GAAoBH,EAAMpB,EAAIS,CAAO,EAG1D,OAAOP,GAAeF,CAAE,EACrB,KACCwB,EAAIC,GAASZ,EAAM,KAAKY,CAAK,CAAC,EAC9BC,EAAS,IAAMb,EAAM,SAAS,CAAC,EAC/BT,EAAIqB,GAAUE,EAAA,CAAE,IAAK3B,GAAOyB,EAAQ,EACpCG,GACEzB,GAAiBgB,CAAS,EACvB,KACCf,EAAI,CAAC,CAAE,MAAAC,EAAO,OAAAwB,CAAO,IAAMxB,GAASwB,CAAM,EAC1CC,EAAqB,EACrBC,EAAUC,GAAUA,EAASV,EAAeW,CAAK,CACnD,CACJ,CACF,CACJ,CACF,CAGA,OAAO/B,GAAeF,CAAE,EACrB,KACCwB,EAAIC,GAASZ,EAAM,KAAKY,CAAK,CAAC,EAC9BC,EAAS,IAAMb,EAAM,SAAS,CAAC,EAC/BT,EAAIqB,GAAUE,EAAA,CAAE,IAAK3B,GAAOyB,EAAQ,CACtC,CACJ,CAAC,EAGD,OAAIJ,EAAQ,cAAc,EACjBa,GAAuBlC,CAAE,EAC7B,KACCmC,EAAOC,GAAWA,CAAO,EACzBC,GAAK,CAAC,EACNN,EAAU,IAAMpB,CAAQ,CAC1B,EAGGA,CACT,iyJWpLA,IAAI2B,GAKAC,GAAW,EAWf,SAASC,IAAiC,CACxC,OAAO,OAAO,SAAY,aAAe,mBAAmB,QACxDC,GAAY,qDAAqD,EACjEC,EAAG,MAAS,CAClB,CAaO,SAASC,GACdC,EACgC,CAChC,OAAAA,EAAG,UAAU,OAAO,SAAS,EAC7BN,QAAaE,GAAa,EACvB,KACCK,EAAI,IAAM,QAAQ,WAAW,CAC3B,YAAa,GACb,SAAAC,GACA,SAAU,CACR,cAAe,OACf,gBAAiB,OACjB,aAAc,MAChB,CACF,CAAC,CAAC,EACFC,EAAI,IAAG,EAAY,EACnBC,EAAY,CAAC,CACf,GAGFV,GAAS,UAAU,IAAM,CACvBM,EAAG,UAAU,IAAI,SAAS,EAC1B,IAAMK,EAAK,aAAaV,OAClBW,EAAOC,EAAE,MAAO,CAAE,MAAO,SAAU,CAAC,EAC1C,QAAQ,WAAW,OAAOF,EAAIL,EAAG,YAAcQ,GAAgB,CAG7D,IAAMC,EAASH,EAAK,aAAa,CAAE,KAAM,QAAS,CAAC,EACnDG,EAAO,UAAYD,EAGnBR,EAAG,YAAYM,CAAI,CACrB,CAAC,CACH,CAAC,EAGMZ,GACJ,KACCS,EAAI,KAAO,CAAE,IAAKH,CAAG,EAAE,CACzB,CACJ,CC/CO,SAASU,GACdC,EAAwB,CAAE,QAAAC,EAAS,OAAAC,CAAO,EACrB,CACrB,IAAIC,EAAO,GACX,OAAOC,EAGLH,EACG,KACCI,EAAIC,GAAUA,EAAO,QAAQ,qBAAqB,CAAE,EACpDC,EAAOC,GAAWR,IAAOQ,CAAO,EAChCH,EAAI,KAAO,CACT,OAAQ,OAAQ,OAAQ,EAC1B,EAAa,CACf,EAGFH,EACG,KACCK,EAAOE,GAAUA,GAAU,CAACN,CAAI,EAChCO,EAAI,IAAMP,EAAOH,EAAG,IAAI,EACxBK,EAAII,IAAW,CACb,OAAQA,EAAS,OAAS,OAC5B,EAAa,CACf,CACJ,CACF,CAaO,SAASE,GACdX,EAAwBY,EACQ,CAChC,OAAOC,EAAM,IAAM,CACjB,IAAMC,EAAQ,IAAIC,EAClB,OAAAD,EAAM,UAAU,CAAC,CAAE,OAAAE,EAAQ,OAAAC,CAAO,IAAM,CACtCjB,EAAG,gBAAgB,OAAQgB,IAAW,MAAM,EACxCC,GACFjB,EAAG,eAAe,CACtB,CAAC,EAGMD,GAAaC,EAAIY,CAAO,EAC5B,KACCF,EAAIQ,GAASJ,EAAM,KAAKI,CAAK,CAAC,EAC9BC,EAAS,IAAML,EAAM,SAAS,CAAC,EAC/BT,EAAIa,GAAUE,EAAA,CAAE,IAAKpB,GAAOkB,EAAQ,CACtC,CACJ,CAAC,CACH,CC5FA,IAAMG,GAAWC,EAAE,OAAO,EAgBnB,SAASC,GACdC,EACkC,CAClC,OAAAA,EAAG,YAAYH,EAAQ,EACvBA,GAAS,YAAYI,GAAYD,CAAE,CAAC,EAG7BE,EAAG,CAAE,IAAKF,CAAG,CAAC,CACvB,CCuBO,SAASG,GACdC,EACyB,CACzB,IAAMC,EAASC,EAA8B,iBAAkBF,CAAE,EAC3DG,EAAUF,EAAO,KAAKG,GAASA,EAAM,OAAO,GAAKH,EAAO,GAC9D,OAAOI,EAAM,GAAGJ,EAAO,IAAIG,GAASE,EAAUF,EAAO,QAAQ,EAC1D,KACCG,EAAI,IAAMC,EAA6B,cAAcJ,EAAM,MAAM,CAAC,CACpE,CACF,CAAC,EACE,KACCK,EAAUD,EAA6B,cAAcL,EAAQ,MAAM,CAAC,EACpEI,EAAIG,IAAW,CAAE,OAAAA,CAAO,EAAE,CAC5B,CACJ,CAeO,SAASC,GACdX,EAAiB,CAAE,UAAAY,CAAU,EACO,CAGpC,IAAMC,EAAOC,GAAoB,MAAM,EACvCd,EAAG,OAAOa,CAAI,EAGd,IAAME,EAAOD,GAAoB,MAAM,EACvCd,EAAG,OAAOe,CAAI,EAGd,IAAMC,EAAYR,EAAW,iBAAkBR,CAAE,EACjD,OAAOiB,EAAM,IAAM,CACjB,IAAMC,EAAQ,IAAIC,EACZC,EAAQF,EAAM,KAAKG,GAAS,CAAC,CAAC,EACpC,OAAAC,EAAc,CAACJ,EAAOK,GAAiBvB,CAAE,CAAC,CAAC,EACxC,KACCwB,GAAU,EAAGC,EAAuB,EACpCC,GAAUN,CAAK,CACjB,EACG,UAAU,CAGT,KAAK,CAAC,CAAE,OAAAV,CAAO,EAAGiB,CAAI,EAAG,CACvB,IAAMC,EAASC,GAAiBnB,CAAM,EAChC,CAAE,MAAAoB,CAAM,EAAIC,GAAerB,CAAM,EAGvCV,EAAG,MAAM,YAAY,mBAAoB,GAAG4B,EAAO,KAAK,EACxD5B,EAAG,MAAM,YAAY,uBAAwB,GAAG8B,KAAS,EAGzD,IAAME,EAAUC,GAAwBjB,CAAS,GAE/CY,EAAO,EAAYI,EAAQ,GAC3BJ,EAAO,EAAIE,EAAQE,EAAQ,EAAIL,EAAK,QAEpCX,EAAU,SAAS,CACjB,KAAM,KAAK,IAAI,EAAGY,EAAO,EAAI,EAAE,EAC/B,SAAU,QACZ,CAAC,CACL,EAGA,UAAW,CACT5B,EAAG,MAAM,eAAe,kBAAkB,EAC1CA,EAAG,MAAM,eAAe,sBAAsB,CAChD,CACF,CAAC,EAGLsB,EAAc,CACZY,GAA0BlB,CAAS,EACnCO,GAAiBP,CAAS,CAC5B,CAAC,EACE,KACCU,GAAUN,CAAK,CACjB,EACG,UAAU,CAAC,CAACQ,EAAQD,CAAI,IAAM,CAC7B,IAAMK,EAAUG,GAAsBnB,CAAS,EAC/CH,EAAK,OAASe,EAAO,EAAI,GACzBb,EAAK,OAASa,EAAO,EAAII,EAAQ,MAAQL,EAAK,MAAQ,EACxD,CAAC,EAGLtB,EACEC,EAAUO,EAAM,OAAO,EAAE,KAAKN,EAAI,IAAM,EAAE,CAAC,EAC3CD,EAAUS,EAAM,OAAO,EAAE,KAAKR,EAAI,IAAM,CAAE,CAAC,CAC7C,EACG,KACCmB,GAAUN,CAAK,CACjB,EACG,UAAUgB,GAAa,CACtB,GAAM,CAAE,MAAAN,CAAM,EAAIC,GAAef,CAAS,EAC1CA,EAAU,SAAS,CACjB,KAAMc,EAAQM,EACd,SAAU,QACZ,CAAC,CACH,CAAC,EAGDC,EAAQ,mBAAmB,GAC7BnB,EAAM,KACJoB,GAAK,CAAC,EACNC,GAAe3B,CAAS,CAC1B,EACG,UAAU,CAAC,CAAC,CAAE,OAAAF,CAAO,EAAG,CAAE,OAAAkB,CAAO,CAAC,IAAM,CACvC,IAAMY,EAAM9B,EAAO,UAAU,KAAK,EAClC,GAAIA,EAAO,aAAa,mBAAmB,EACzCA,EAAO,gBAAgB,mBAAmB,MAGrC,CACL,IAAM+B,EAAIzC,EAAG,UAAY4B,EAAO,EAGhC,QAAWc,KAAOxC,EAAY,aAAa,EACzC,QAAWE,KAASF,EAClB,iBAAkBwC,CACpB,EAAG,CACD,IAAMC,EAAQnC,EAAW,cAAcJ,EAAM,MAAM,EACnD,GACEuC,IAAUjC,GACViC,EAAM,UAAU,KAAK,IAAMH,EAC3B,CACAG,EAAM,aAAa,oBAAqB,EAAE,EAC1CvC,EAAM,MAAM,EACZ,KACF,CACF,CAGF,OAAO,SAAS,CACd,IAAKJ,EAAG,UAAYyC,CACtB,CAAC,EAGD,IAAMG,EAAO,SAAmB,QAAQ,GAAK,CAAC,EAC9C,SAAS,SAAU,CAAC,GAAG,IAAI,IAAI,CAACJ,EAAK,GAAGI,CAAI,CAAC,CAAC,CAAC,CACjD,CACF,CAAC,EAGE7C,GAAiBC,CAAE,EACvB,KACC6C,EAAIC,GAAS5B,EAAM,KAAK4B,CAAK,CAAC,EAC9BC,EAAS,IAAM7B,EAAM,SAAS,CAAC,EAC/BX,EAAIuC,GAAUE,EAAA,CAAE,IAAKhD,GAAO8C,EAAQ,CACtC,CACJ,CAAC,EACE,KACCG,GAAYC,EAAc,CAC5B,CACJ,CCtKO,SAASC,GACdC,EAAiB,CAAE,UAAAC,EAAW,QAAAC,EAAS,OAAAC,CAAO,EACd,CAChC,OAAOC,EAGL,GAAGC,EAAY,2BAA4BL,CAAE,EAC1C,IAAIM,GAASC,GAAeD,EAAO,CAAE,QAAAJ,EAAS,OAAAC,CAAO,CAAC,CAAC,EAG1D,GAAGE,EAAY,cAAeL,CAAE,EAC7B,IAAIM,GAASE,GAAaF,CAAK,CAAC,EAGnC,GAAGD,EAAY,qBAAsBL,CAAE,EACpC,IAAIM,GAASG,GAAeH,CAAK,CAAC,EAGrC,GAAGD,EAAY,UAAWL,CAAE,EACzB,IAAIM,GAASI,GAAaJ,EAAO,CAAE,QAAAJ,EAAS,OAAAC,CAAO,CAAC,CAAC,EAGxD,GAAGE,EAAY,cAAeL,CAAE,EAC7B,IAAIM,GAASK,GAAiBL,EAAO,CAAE,UAAAL,CAAU,CAAC,CAAC,CACxD,CACF,CClCO,SAASW,GACdC,EAAkB,CAAE,OAAAC,CAAO,EACP,CACpB,OAAOA,EACJ,KACCC,EAAUC,GAAWC,EACnBC,EAAG,EAAI,EACPA,EAAG,EAAK,EAAE,KAAKC,GAAM,GAAI,CAAC,CAC5B,EACG,KACCC,EAAIC,IAAW,CAAE,QAAAL,EAAS,OAAAK,CAAO,EAAE,CACrC,CACF,CACF,CACJ,CAaO,SAASC,GACdC,EAAiBC,EACc,CAC/B,IAAMC,EAAQC,EAAW,cAAeH,CAAE,EAC1C,OAAOI,EAAM,IAAM,CACjB,IAAMC,EAAQ,IAAIC,EAClB,OAAAD,EAAM,UAAU,CAAC,CAAE,QAAAZ,EAAS,OAAAK,CAAO,IAAM,CACvCE,EAAG,UAAU,OAAO,oBAAqBF,CAAM,EAC/CI,EAAM,YAAcT,CACtB,CAAC,EAGMJ,GAAYW,EAAIC,CAAO,EAC3B,KACCM,EAAIC,GAASH,EAAM,KAAKG,CAAK,CAAC,EAC9BC,EAAS,IAAMJ,EAAM,SAAS,CAAC,EAC/BR,EAAIW,GAAUE,EAAA,CAAE,IAAKV,GAAOQ,EAAQ,CACtC,CACJ,CAAC,CACH,CC9BA,SAASG,GAAS,CAAE,UAAAC,CAAU,EAAsC,CAClE,GAAI,CAACC,EAAQ,iBAAiB,EAC5B,OAAOC,EAAG,EAAK,EAGjB,IAAMC,EAAaH,EAChB,KACCI,EAAI,CAAC,CAAE,OAAQ,CAAE,EAAAC,CAAE,CAAE,IAAMA,CAAC,EAC5BC,GAAY,EAAG,CAAC,EAChBF,EAAI,CAAC,CAACG,EAAGC,CAAC,IAAM,CAACD,EAAIC,EAAGA,CAAC,CAAU,EACnCC,EAAwB,CAAC,CAC3B,EAGIC,EAAUC,EAAc,CAACX,EAAWG,CAAU,CAAC,EAClD,KACCS,EAAO,CAAC,CAAC,CAAE,OAAAC,CAAO,EAAG,CAAC,CAAER,CAAC,CAAC,IAAM,KAAK,IAAIA,EAAIQ,EAAO,CAAC,EAAI,GAAG,EAC5DT,EAAI,CAAC,CAAC,CAAE,CAACU,CAAS,CAAC,IAAMA,CAAS,EAClCC,EAAqB,CACvB,EAGIC,EAAUC,GAAY,QAAQ,EACpC,OAAON,EAAc,CAACX,EAAWgB,CAAO,CAAC,EACtC,KACCZ,EAAI,CAAC,CAAC,CAAE,OAAAS,CAAO,EAAGK,CAAM,IAAML,EAAO,EAAI,KAAO,CAACK,CAAM,EACvDH,EAAqB,EACrBI,EAAUC,GAAUA,EAASV,EAAUR,EAAG,EAAK,CAAC,EAChDmB,EAAU,EAAK,CACjB,CACJ,CAcO,SAASC,GACdC,EAAiBC,EACG,CACpB,OAAOC,EAAM,IAAMd,EAAc,CAC/Be,GAAiBH,CAAE,EACnBxB,GAASyB,CAAO,CAClB,CAAC,CAAC,EACC,KACCpB,EAAI,CAAC,CAAC,CAAE,OAAAuB,CAAO,EAAGC,CAAM,KAAO,CAC7B,OAAAD,EACA,OAAAC,CACF,EAAE,EACFb,EAAqB,CAACR,EAAGC,IACvBD,EAAE,SAAWC,EAAE,QACfD,EAAE,SAAWC,EAAE,MAChB,EACDqB,EAAY,CAAC,CACf,CACJ,CAaO,SAASC,GACdP,EAAiB,CAAE,QAAAQ,EAAS,MAAAC,CAAM,EACH,CAC/B,OAAOP,EAAM,IAAM,CACjB,IAAMQ,EAAQ,IAAIC,EACZC,EAAQF,EAAM,KAAKG,GAAS,CAAC,CAAC,EACpC,OAAAH,EACG,KACCxB,EAAwB,QAAQ,EAChC4B,GAAkBN,CAAO,CAC3B,EACG,UAAU,CAAC,CAAC,CAAE,OAAAX,CAAO,EAAG,CAAE,OAAAQ,CAAO,CAAC,IAAM,CACvCL,EAAG,UAAU,OAAO,oBAAqBH,GAAU,CAACQ,CAAM,EAC1DL,EAAG,OAASK,CACd,CAAC,EAGLI,EAAM,UAAUC,CAAK,EAGdF,EACJ,KACCO,GAAUH,CAAK,EACf/B,EAAImC,GAAUC,EAAA,CAAE,IAAKjB,GAAOgB,EAAQ,CACtC,CACJ,CAAC,CACH,CChHO,SAASE,GACdC,EAAiB,CAAE,UAAAC,EAAW,QAAAC,CAAQ,EACb,CACzB,OAAOC,GAAgBH,EAAI,CAAE,UAAAC,EAAW,QAAAC,CAAQ,CAAC,EAC9C,KACCE,EAAI,CAAC,CAAE,OAAQ,CAAE,EAAAC,CAAE,CAAE,IAAM,CACzB,GAAM,CAAE,OAAAC,CAAO,EAAIC,GAAeP,CAAE,EACpC,MAAO,CACL,OAAQK,GAAKC,CACf,CACF,CAAC,EACDE,EAAwB,QAAQ,CAClC,CACJ,CAaO,SAASC,GACdT,EAAiBU,EACmB,CACpC,OAAOC,EAAM,IAAM,CACjB,IAAMC,EAAQ,IAAIC,EAClBD,EAAM,UAAU,CAAC,CAAE,OAAAE,CAAO,IAAM,CAC9Bd,EAAG,UAAU,OAAO,2BAA4Bc,CAAM,CACxD,CAAC,EAGD,IAAMC,EAAUC,GAAmB,YAAY,EAC/C,OAAI,OAAOD,GAAY,YACdE,EAGFlB,GAAiBgB,EAASL,CAAO,EACrC,KACCQ,EAAIC,GAASP,EAAM,KAAKO,CAAK,CAAC,EAC9BC,EAAS,IAAMR,EAAM,SAAS,CAAC,EAC/BR,EAAIe,GAAUE,EAAA,CAAE,IAAKrB,GAAOmB,EAAQ,CACtC,CACJ,CAAC,CACH,CCvDO,SAASG,GACdC,EAAiB,CAAE,UAAAC,EAAW,QAAAC,CAAQ,EACpB,CAGlB,IAAMC,EAAUD,EACb,KACCE,EAAI,CAAC,CAAE,OAAAC,CAAO,IAAMA,CAAM,EAC1BC,EAAqB,CACvB,EAGIC,EAAUJ,EACb,KACCK,EAAU,IAAMC,GAAiBT,CAAE,EAChC,KACCI,EAAI,CAAC,CAAE,OAAAC,CAAO,KAAO,CACnB,IAAQL,EAAG,UACX,OAAQA,EAAG,UAAYK,CACzB,EAAE,EACFK,EAAwB,QAAQ,CAClC,CACF,CACF,EAGF,OAAOC,EAAc,CAACR,EAASI,EAASN,CAAS,CAAC,EAC/C,KACCG,EAAI,CAAC,CAACQ,EAAQ,CAAE,IAAAC,EAAK,OAAAC,CAAO,EAAG,CAAE,OAAQ,CAAE,EAAAC,CAAE,EAAG,KAAM,CAAE,OAAAV,CAAO,CAAE,CAAC,KAChEA,EAAS,KAAK,IAAI,EAAGA,EACjB,KAAK,IAAI,EAAGQ,EAASE,EAAIH,CAAM,EAC/B,KAAK,IAAI,EAAGP,EAASU,EAAID,CAAM,CACnC,EACO,CACL,OAAQD,EAAMD,EACd,OAAAP,EACA,OAAQQ,EAAMD,GAAUG,CAC1B,EACD,EACDT,EAAqB,CAACU,EAAGC,IACvBD,EAAE,SAAWC,EAAE,QACfD,EAAE,SAAWC,EAAE,QACfD,EAAE,SAAWC,EAAE,MAChB,CACH,CACJ,CClDO,SAASC,GACdC,EACqB,CACrB,IAAMC,EAAU,SAAkB,WAAW,GAAK,CAChD,MAAOD,EAAO,UAAUE,GAAS,WAC/BA,EAAM,aAAa,qBAAqB,CAC1C,EAAE,OAAO,CACX,EAGA,OAAOC,EAAG,GAAGH,CAAM,EAChB,KACCI,GAASF,GAASG,EAAUH,EAAO,QAAQ,EACxC,KACCI,EAAI,IAAMJ,CAAK,CACjB,CACF,EACAK,EAAUP,EAAO,KAAK,IAAI,EAAGC,EAAQ,KAAK,EAAE,EAC5CK,EAAIJ,IAAU,CACZ,MAAOF,EAAO,QAAQE,CAAK,EAC3B,MAAO,CACL,OAASA,EAAM,aAAa,sBAAsB,EAClD,QAASA,EAAM,aAAa,uBAAuB,EACnD,OAASA,EAAM,aAAa,sBAAsB,CACpD,CACF,EAAa,EACbM,EAAY,CAAC,CACf,CACJ,CASO,SAASC,GACdC,EACgC,CAChC,OAAOC,EAAM,IAAM,CACjB,IAAMC,EAAQ,IAAIC,EAClBD,EAAM,UAAUE,GAAW,CACzB,SAAS,KAAK,aAAa,0BAA2B,EAAE,EAGxD,OAAW,CAACC,EAAKC,CAAK,IAAK,OAAO,QAAQF,EAAQ,KAAK,EACrD,SAAS,KAAK,aAAa,iBAAiBC,IAAOC,CAAK,EAG1D,QAASC,EAAQ,EAAGA,EAAQjB,EAAO,OAAQiB,IAAS,CAClD,IAAMC,EAAQlB,EAAOiB,GAAO,mBACxBC,aAAiB,cACnBA,EAAM,OAASJ,EAAQ,QAAUG,EACrC,CAGA,SAAS,YAAaH,CAAO,CAC/B,CAAC,EAGDF,EAAM,KAAKO,GAAUC,EAAc,CAAC,EACjC,UAAU,IAAM,CACf,SAAS,KAAK,gBAAgB,yBAAyB,CACzD,CAAC,EAGH,IAAMpB,EAASqB,EAA8B,QAASX,CAAE,EACxD,OAAOX,GAAaC,CAAM,EACvB,KACCsB,EAAIC,GAASX,EAAM,KAAKW,CAAK,CAAC,EAC9BC,EAAS,IAAMZ,EAAM,SAAS,CAAC,EAC/BN,EAAIiB,GAAUE,EAAA,CAAE,IAAKf,GAAOa,EAAQ,CACtC,CACJ,CAAC,CACH,CC/HA,IAAAG,GAAwB,SAiCxB,SAASC,GAAQC,EAAyB,CACxCA,EAAG,aAAa,kBAAmB,EAAE,EACrC,IAAMC,EAAOD,EAAG,UAChB,OAAAA,EAAG,gBAAgB,iBAAiB,EAC7BC,CACT,CAWO,SAASC,GACd,CAAE,OAAAC,CAAO,EACH,CACF,GAAAC,QAAY,YAAY,GAC1B,IAAIC,EAA8BC,GAAc,CAC9C,IAAI,GAAAF,QAAY,iDAAkD,CAChE,KAAMJ,GACJA,EAAG,aAAa,qBAAqB,GACrCD,GAAQQ,EACNP,EAAG,aAAa,uBAAuB,CACzC,CAAC,CAEL,CAAC,EACE,GAAG,UAAWQ,GAAMF,EAAW,KAAKE,CAAE,CAAC,CAC5C,CAAC,EACE,KACCC,EAAID,GAAM,CACQA,EAAG,QACX,MAAM,CAChB,CAAC,EACDE,EAAI,IAAMC,GAAY,kBAAkB,CAAC,CAC3C,EACG,UAAUR,CAAM,CAEzB,CCrCA,SAASS,GAAWC,EAAwB,CAC1C,GAAIA,EAAK,OAAS,EAChB,MAAO,CAAC,EAAE,EAGZ,GAAM,CAACC,EAAMC,CAAI,EAAI,CAAC,GAAGF,CAAI,EAC1B,KAAK,CAACG,EAAGC,IAAMD,EAAE,OAASC,EAAE,MAAM,EAClC,IAAIC,GAAOA,EAAI,QAAQ,SAAU,EAAE,CAAC,EAGnCC,EAAQ,EACZ,GAAIL,IAASC,EACXI,EAAQL,EAAK,WAEb,MAAOA,EAAK,WAAWK,CAAK,IAAMJ,EAAK,WAAWI,CAAK,GACrDA,IAGJ,OAAON,EAAK,IAAIK,GAAOA,EAAI,QAAQJ,EAAK,MAAM,EAAGK,CAAK,EAAG,EAAE,CAAC,CAC9D,CAaO,SAASC,GAAaC,EAAiC,CAC5D,IAAMC,EAAS,SAAkB,YAAa,eAAgBD,CAAI,EAClE,GAAIC,EACF,OAAOC,EAAGD,CAAM,EACX,CACL,IAAME,EAASC,GAAc,EAC7B,OAAOC,GAAW,IAAI,IAAI,cAAeL,GAAQG,EAAO,IAAI,CAAC,EAC1D,KACCG,EAAIC,GAAWhB,GAAWiB,EAAY,MAAOD,CAAO,EACjD,IAAIE,GAAQA,EAAK,WAAY,CAChC,CAAC,EACDC,GAAW,IAAMC,CAAK,EACtBC,GAAe,CAAC,CAAC,EACjBC,EAAIN,GAAW,SAAS,YAAaA,EAAS,eAAgBP,CAAI,CAAC,CACrE,CACJ,CACF,CCIO,SAASc,GACd,CAAE,UAAAC,EAAW,UAAAC,EAAW,UAAAC,CAAU,EAC5B,CACN,IAAMC,EAASC,GAAc,EAC7B,GAAI,SAAS,WAAa,QACxB,OAGE,sBAAuB,UACzB,QAAQ,kBAAoB,SAG5BC,EAAU,OAAQ,cAAc,EAC7B,UAAU,IAAM,CACf,QAAQ,kBAAoB,MAC9B,CAAC,GAIL,IAAMC,EAAUC,GAAoC,gBAAgB,EAChE,OAAOD,GAAY,cACrBA,EAAQ,KAAOA,EAAQ,MAGzB,IAAME,EAAQC,GAAa,EACxB,KACCC,EAAIC,GAASA,EAAM,IAAIC,GAAQ,GAAG,IAAI,IAAIA,EAAMT,EAAO,IAAI,GAAG,CAAC,EAC/DU,EAAUC,GAAQT,EAAsB,SAAS,KAAM,OAAO,EAC3D,KACCU,EAAOC,GAAM,CAACA,EAAG,SAAW,CAACA,EAAG,OAAO,EACvCH,EAAUG,GAAM,CACd,GAAIA,EAAG,kBAAkB,QAAS,CAChC,IAAMC,EAAKD,EAAG,OAAO,QAAQ,GAAG,EAChC,GAAIC,GAAM,CAACA,EAAG,OAAQ,CACpB,IAAMC,EAAM,IAAI,IAAID,EAAG,IAAI,EAO3B,GAJAC,EAAI,OAAS,GACbA,EAAI,KAAO,GAITA,EAAI,WAAa,SAAS,UAC1BJ,EAAK,SAASI,EAAI,SAAS,CAAC,EAE5B,OAAAF,EAAG,eAAe,EACXG,EAAG,CACR,IAAK,IAAI,IAAIF,EAAG,IAAI,CACtB,CAAC,CAEL,CACF,CACA,OAAOG,EACT,CAAC,CACH,CACF,EACAC,GAAoB,CACtB,EAGIC,EAAOjB,EAAyB,OAAQ,UAAU,EACrD,KACCU,EAAOC,GAAMA,EAAG,QAAU,IAAI,EAC9BN,EAAIM,IAAO,CACT,IAAK,IAAI,IAAI,SAAS,IAAI,EAC1B,OAAQA,EAAG,KACb,EAAE,EACFK,GAAoB,CACtB,EAGFE,EAAMf,EAAOc,CAAI,EACd,KACCE,EAAqB,CAACC,EAAGC,IAAMD,EAAE,IAAI,OAASC,EAAE,IAAI,IAAI,EACxDhB,EAAI,CAAC,CAAE,IAAAQ,CAAI,IAAMA,CAAG,CACtB,EACG,UAAUjB,CAAS,EAGxB,IAAM0B,EAAY1B,EACf,KACC2B,EAAwB,UAAU,EAClCf,EAAUK,GAAOW,GAAQX,EAAI,IAAI,EAC9B,KACCY,GAAW,KACTC,GAAYb,CAAG,EACRE,GACR,CACH,CACF,EACAC,GAAM,CACR,EAGFb,EACG,KACCwB,GAAOL,CAAS,CAClB,EACG,UAAU,CAAC,CAAE,IAAAT,CAAI,IAAM,CACtB,QAAQ,UAAU,CAAC,EAAG,GAAI,GAAGA,GAAK,CACpC,CAAC,EAGL,IAAMe,EAAM,IAAI,UAChBN,EACG,KACCd,EAAUqB,GAAOA,EAAI,KAAK,CAAC,EAC3BxB,EAAIwB,GAAOD,EAAI,gBAAgBC,EAAK,WAAW,CAAC,CAClD,EACG,UAAUlC,CAAS,EAGxBA,EACG,KACCmC,GAAK,CAAC,CACR,EACG,UAAUC,GAAe,CACxB,QAAWC,IAAY,CAGrB,QACA,sBACA,oBACA,yBAGA,+BACA,gCACA,mCACA,+BACA,2BACA,2BACA,GAAGC,EAAQ,wBAAwB,EAC/B,CAAC,0BAA0B,EAC3B,CAAC,CACP,EAAG,CACD,IAAMC,EAAShC,GAAmB8B,CAAQ,EACpCG,EAASjC,GAAmB8B,EAAUD,CAAW,EAErD,OAAOG,GAAW,aAClB,OAAOC,GAAW,aAElBD,EAAO,YAAYC,CAAM,CAE7B,CACF,CAAC,EAGLxC,EACG,KACCmC,GAAK,CAAC,EACNzB,EAAI,IAAM+B,GAAoB,WAAW,CAAC,EAC1C5B,EAAUI,GAAMyB,EAAY,SAAUzB,CAAE,CAAC,EACzC0B,GAAU1B,GAAM,CACd,IAAM2B,EAASC,EAAE,QAAQ,EACzB,GAAI5B,EAAG,IAAK,CACV,QAAW6B,KAAQ7B,EAAG,kBAAkB,EACtC2B,EAAO,aAAaE,EAAM7B,EAAG,aAAa6B,CAAI,CAAE,EAClD,OAAA7B,EAAG,YAAY2B,CAAM,EAGd,IAAIG,EAAWC,GAAY,CAChCJ,EAAO,OAAS,IAAMI,EAAS,SAAS,CAC1C,CAAC,CAGH,KACE,QAAAJ,EAAO,YAAc3B,EAAG,YACxBA,EAAG,YAAY2B,CAAM,EACdK,CAEX,CAAC,CACH,EACG,UAAU,EAGf1B,EAAMf,EAAOc,CAAI,EACd,KACCU,GAAOhC,CAAS,CAClB,EACG,UAAU,CAAC,CAAE,IAAAkB,EAAK,OAAAgC,CAAO,IAAM,CAC1BhC,EAAI,MAAQ,CAACgC,EACfC,GAAgBjC,EAAI,IAAI,EAExB,OAAO,SAAS,GAAGgC,GAAA,YAAAA,EAAQ,IAAK,CAAC,CAErC,CAAC,EAGLhD,EACG,KACCkD,GAAU5C,CAAK,EACf6C,GAAa,GAAG,EAChBzB,EAAwB,QAAQ,CAClC,EACG,UAAU,CAAC,CAAE,OAAAsB,CAAO,IAAM,CACzB,QAAQ,aAAaA,EAAQ,EAAE,CACjC,CAAC,EAGL3B,EAAMf,EAAOc,CAAI,EACd,KACCgC,GAAY,EAAG,CAAC,EAChBvC,EAAO,CAAC,CAACU,EAAGC,CAAC,IAAMD,EAAE,IAAI,WAAaC,EAAE,IAAI,QAAQ,EACpDhB,EAAI,CAAC,CAAC,CAAE6C,CAAK,IAAMA,CAAK,CAC1B,EACG,UAAU,CAAC,CAAE,OAAAL,CAAO,IAAM,CACzB,OAAO,SAAS,GAAGA,GAAA,YAAAA,EAAQ,IAAK,CAAC,CACnC,CAAC,CACP,CCzSA,IAAAM,GAAuB,SCAvB,IAAAC,GAAuB,SAsChB,SAASC,GACdC,EAA2BC,EACD,CAC1B,IAAMC,EAAY,IAAI,OAAOF,EAAO,UAAW,KAAK,EAC9CG,EAAY,CAACC,EAAYC,EAAcC,IACpC,GAAGD,4BAA+BC,WAI3C,OAAQC,GAAkB,CACxBA,EAAQA,EACL,QAAQ,gBAAiB,GAAG,EAC5B,KAAK,EAGR,IAAMC,EAAQ,IAAI,OAAO,MAAMR,EAAO,cACpCO,EACG,QAAQ,uBAAwB,MAAM,EACtC,QAAQL,EAAW,GAAG,KACtB,KAAK,EAGV,OAAOO,IACLR,KACI,GAAAS,SAAWD,CAAK,EAChBA,GAED,QAAQD,EAAOL,CAAS,EACxB,QAAQ,8BAA+B,IAAI,CAClD,CACF,CC9BO,SAASQ,GAAiBC,EAAuB,CACtD,OAAOA,EACJ,MAAM,YAAY,EAChB,IAAI,CAACC,EAAOC,IAAUA,EAAQ,EAC3BD,EAAM,QAAQ,+BAAgC,IAAI,EAClDA,CACJ,EACC,KAAK,EAAE,EACT,QAAQ,kCAAmC,EAAE,EAC7C,KAAK,CACV,CCoCO,SAASE,GACdC,EAC+B,CAC/B,OAAOA,EAAQ,OAAS,CAC1B,CASO,SAASC,GACdD,EAC+B,CAC/B,OAAOA,EAAQ,OAAS,CAC1B,CASO,SAASE,GACdF,EACgC,CAChC,OAAOA,EAAQ,OAAS,CAC1B,CCvEA,SAASG,GAAiB,CAAE,OAAAC,EAAQ,KAAAC,CAAK,EAA6B,CAGhED,EAAO,KAAK,SAAW,GAAKA,EAAO,KAAK,KAAO,OACjDA,EAAO,KAAO,CACZE,GAAY,oBAAoB,CAClC,GAGEF,EAAO,YAAc,cACvBA,EAAO,UAAYE,GAAY,yBAAyB,GAQ1D,IAAMC,EAAyB,CAC7B,SANeD,GAAY,wBAAwB,EAClD,MAAM,SAAS,EACf,OAAO,OAAO,EAKf,YAAaE,EAAQ,gBAAgB,CACvC,EAGA,MAAO,CAAE,OAAAJ,EAAQ,KAAAC,EAAM,QAAAE,CAAQ,CACjC,CAkBO,SAASE,GACdC,EAAaC,EACC,CACd,IAAMP,EAASQ,GAAc,EACvBC,EAAS,IAAI,OAAOH,CAAG,EAGvBI,EAAM,IAAIC,EACVC,EAAMC,GAAYJ,EAAQ,CAAE,IAAAC,CAAI,CAAC,EACpC,KACCI,EAAIC,GAAW,CACb,GAAIC,GAAsBD,CAAO,EAC/B,QAAWE,KAAUF,EAAQ,KAAK,MAChC,QAAWG,KAAYD,EACrBC,EAAS,SAAW,GAAG,IAAI,IAAIA,EAAS,SAAUlB,EAAO,IAAI,IAEnE,OAAOe,CACT,CAAC,EACDI,GAAM,CACR,EAGF,OAAAC,GAAKb,CAAK,EACP,KACCO,EAAIO,IAAS,CACX,OACA,KAAMtB,GAAiBsB,CAAI,CAC7B,EAAwB,CAC1B,EACG,UAAUX,EAAI,KAAK,KAAKA,CAAG,CAAC,EAG1B,CAAE,IAAAA,EAAK,IAAAE,CAAI,CACpB,CCvEO,SAASU,GACd,CAAE,UAAAC,CAAU,EACN,CACN,IAAMC,EAASC,GAAc,EACvBC,EAAYC,GAChB,IAAI,IAAI,mBAAoBH,EAAO,IAAI,CACzC,EACG,KACCI,GAAW,IAAMC,CAAK,CACxB,EAGIC,EAAWJ,EACd,KACCK,EAAIC,GAAY,CACd,GAAM,CAAC,CAAEC,CAAO,EAAIT,EAAO,KAAK,MAAM,aAAa,EACnD,OAAOQ,EAAS,KAAK,CAAC,CAAE,QAAAE,EAAS,QAAAC,CAAQ,IACvCD,IAAYD,GAAWE,EAAQ,SAASF,CAAO,CAChD,GAAKD,EAAS,EACjB,CAAC,CACH,EAGFN,EACG,KACCK,EAAIC,GAAY,IAAI,IAAIA,EAAS,IAAIE,GAAW,CAC9C,GAAG,IAAI,IAAI,MAAMA,EAAQ,WAAYV,EAAO,IAAI,IAChDU,CACF,CAAC,CAAC,CAAC,EACHE,EAAUC,GAAQC,EAAsB,SAAS,KAAM,OAAO,EAC3D,KACCC,EAAOC,GAAM,CAACA,EAAG,SAAW,CAACA,EAAG,OAAO,EACvCC,GAAeX,CAAQ,EACvBM,EAAU,CAAC,CAACI,EAAIP,CAAO,IAAM,CAC3B,GAAIO,EAAG,kBAAkB,QAAS,CAChC,IAAME,EAAKF,EAAG,OAAO,QAAQ,GAAG,EAChC,GAAIE,GAAM,CAACA,EAAG,QAAUL,EAAK,IAAIK,EAAG,IAAI,EAAG,CACzC,IAAMC,EAAMD,EAAG,KAWf,MAAI,CAACF,EAAG,OAAO,QAAQ,aAAa,GAClBH,EAAK,IAAIM,CAAG,IACZV,EACPJ,GAEXW,EAAG,eAAe,EACXI,EAAGD,CAAG,EACf,CACF,CACA,OAAOd,CACT,CAAC,EACDO,EAAUO,GAAO,CACf,GAAM,CAAE,QAAAT,CAAQ,EAAIG,EAAK,IAAIM,CAAG,EAChC,OAAOE,GAAa,IAAI,IAAIF,CAAG,CAAC,EAC7B,KACCZ,EAAIe,GAAW,CAEb,IAAMC,EADWC,GAAY,EACP,KAAK,QAAQxB,EAAO,KAAM,EAAE,EAClD,OAAOsB,EAAQ,SAASC,EAAK,MAAM,GAAG,EAAE,EAAE,EACtC,IAAI,IAAI,MAAMb,KAAWa,IAAQvB,EAAO,IAAI,EAC5C,IAAI,IAAImB,CAAG,CACjB,CAAC,CACH,CACJ,CAAC,CACH,CACF,CACF,EACG,UAAUA,GAAOM,GAAYN,CAAG,CAAC,EAGtCO,EAAc,CAACxB,EAAWI,CAAQ,CAAC,EAChC,UAAU,CAAC,CAACE,EAAUC,CAAO,IAAM,CACpBkB,EAAW,mBAAmB,EACtC,YAAYC,GAAsBpB,EAAUC,CAAO,CAAC,CAC5D,CAAC,EAGHV,EAAU,KAAKa,EAAU,IAAMN,CAAQ,CAAC,EACrC,UAAUG,GAAW,CA5J1B,IAAAoB,EA+JM,IAAIC,EAAW,SAAS,aAAc,cAAc,EACpD,GAAIA,IAAa,KAAM,CACrB,IAAMC,IAASF,EAAA7B,EAAO,UAAP,YAAA6B,EAAgB,UAAW,SAC1CC,EAAW,CAACrB,EAAQ,QAAQ,SAASsB,CAAM,EAG3C,SAAS,aAAcD,EAAU,cAAc,CACjD,CAGA,GAAIA,EACF,QAAWE,KAAWC,GAAqB,UAAU,EACnDD,EAAQ,OAAS,EACvB,CAAC,CACL,CCtFO,SAASE,GACdC,EAAsB,CAAE,IAAAC,CAAI,EACH,CACzB,IAAMC,GAAK,+BAAU,YAAaC,GAG5B,CAAE,aAAAC,CAAa,EAAIC,GAAY,EACjCD,EAAa,IAAI,GAAG,GACtBE,GAAU,SAAU,EAAI,EAG1B,IAAMC,EAASN,EACZ,KACCO,EAAOC,EAAoB,EAC3BC,GAAK,CAAC,EACNC,EAAI,IAAMP,EAAa,IAAI,GAAG,GAAK,EAAE,CACvC,EAGFQ,GAAY,QAAQ,EACjB,KACCJ,EAAOK,GAAU,CAACA,CAAM,EACxBH,GAAK,CAAC,CACR,EACG,UAAU,IAAM,CACf,IAAMI,EAAM,IAAI,IAAI,SAAS,IAAI,EACjCA,EAAI,aAAa,OAAO,GAAG,EAC3B,QAAQ,aAAa,CAAC,EAAG,GAAI,GAAGA,GAAK,CACvC,CAAC,EAGLP,EAAO,UAAUQ,GAAS,CACpBA,IACFf,EAAG,MAAQe,EACXf,EAAG,MAAM,EAEb,CAAC,EAGD,IAAMgB,EAASC,GAAkBjB,CAAE,EAC7BkB,EAASC,EACbC,EAAUpB,EAAI,OAAO,EACrBoB,EAAUpB,EAAI,OAAO,EAAE,KAAKqB,GAAM,CAAC,CAAC,EACpCd,CACF,EACG,KACCI,EAAI,IAAMT,EAAGF,EAAG,KAAK,CAAC,EACtBsB,EAAU,EAAE,EACZC,EAAqB,CACvB,EAGF,OAAOC,EAAc,CAACN,EAAQF,CAAM,CAAC,EAClC,KACCL,EAAI,CAAC,CAACI,EAAOU,CAAK,KAAO,CAAE,MAAAV,EAAO,MAAAU,CAAM,EAAE,EAC1CC,EAAY,CAAC,CACf,CACJ,CAUO,SAASC,GACd3B,EAAsB,CAAE,IAAA4B,EAAK,IAAA3B,CAAI,EACqB,CACtD,IAAM4B,EAAQ,IAAIC,EACZC,EAAQF,EAAM,KAAKG,GAAS,CAAC,CAAC,EAGpC,OAAAH,EACG,KACCI,EAAwB,OAAO,EAC/BtB,EAAI,CAAC,CAAE,MAAAI,CAAM,KAA2B,CACtC,OACA,KAAMA,CACR,EAAE,CACJ,EACG,UAAUa,EAAI,KAAK,KAAKA,CAAG,CAAC,EAGjCC,EACG,KACCI,EAAwB,OAAO,CACjC,EACG,UAAU,CAAC,CAAE,MAAAR,CAAM,IAAM,CACpBA,GACFnB,GAAU,SAAUmB,CAAK,EACzBzB,EAAG,YAAc,IAEjBA,EAAG,YAAckC,GAAY,oBAAoB,CAErD,CAAC,EAGLd,EAAUpB,EAAG,KAAO,OAAO,EACxB,KACCmC,GAAUJ,CAAK,CACjB,EACG,UAAU,IAAM/B,EAAG,MAAM,CAAC,EAGxBD,GAAiBC,EAAI,CAAE,IAAA4B,EAAK,IAAA3B,CAAI,CAAC,EACrC,KACCmC,EAAIC,GAASR,EAAM,KAAKQ,CAAK,CAAC,EAC9BC,EAAS,IAAMT,EAAM,SAAS,CAAC,EAC/BlB,EAAI0B,GAAUE,EAAA,CAAE,IAAKvC,GAAOqC,EAAQ,EACpCG,GAAM,CACR,CACJ,CCrHO,SAASC,GACdC,EAAiB,CAAE,IAAAC,CAAI,EAAiB,CAAE,OAAAC,CAAO,EACZ,CACrC,IAAMC,EAAQ,IAAIC,EACZC,EAAYC,GAAqBN,EAAG,aAAc,EACrD,KACCO,EAAO,OAAO,CAChB,EAGIC,EAAOC,EAAW,wBAAyBT,CAAE,EAC7CU,EAAOD,EAAW,uBAAwBT,CAAE,EAG5CW,EAASV,EACZ,KACCM,EAAOK,EAAoB,EAC3BC,GAAK,CAAC,CACR,EAGF,OAAAV,EACG,KACCW,GAAeZ,CAAM,EACrBa,GAAUJ,CAAM,CAClB,EACG,UAAU,CAAC,CAAC,CAAE,MAAAK,CAAM,EAAG,CAAE,MAAAC,CAAM,CAAC,IAAM,CACrC,GAAIA,EACF,OAAQD,EAAM,OAAQ,CAGpB,IAAK,GACHR,EAAK,YAAcU,GAAY,oBAAoB,EACnD,MAGF,IAAK,GACHV,EAAK,YAAcU,GAAY,mBAAmB,EAClD,MAGF,QACEV,EAAK,YAAcU,GACjB,sBACAC,GAAMH,EAAM,MAAM,CACpB,CACJ,MAEAR,EAAK,YAAcU,GAAY,2BAA2B,CAE9D,CAAC,EAGLf,EACG,KACCiB,EAAI,IAAMV,EAAK,UAAY,EAAE,EAC7BW,EAAU,CAAC,CAAE,MAAAL,CAAM,IAAMM,EACvBC,EAAG,GAAGP,EAAM,MAAM,EAAG,EAAE,CAAC,EACxBO,EAAG,GAAGP,EAAM,MAAM,EAAE,CAAC,EAClB,KACCQ,GAAY,CAAC,EACbC,GAAQpB,CAAS,EACjBgB,EAAU,CAAC,CAACK,CAAK,IAAMA,CAAK,CAC9B,CACJ,CAAC,CACH,EACG,UAAUC,GAAUjB,EAAK,YACxBkB,GAAuBD,CAAM,CAC/B,CAAC,EAGW1B,EACb,KACCM,EAAOsB,EAAqB,EAC5BC,EAAI,CAAC,CAAE,KAAAC,CAAK,IAAMA,CAAI,CACxB,EAIC,KACCX,EAAIY,GAAS7B,EAAM,KAAK6B,CAAK,CAAC,EAC9BC,EAAS,IAAM9B,EAAM,SAAS,CAAC,EAC/B2B,EAAIE,GAAUE,EAAA,CAAE,IAAKlC,GAAOgC,EAAQ,CACtC,CACJ,CC1FO,SAASG,GACdC,EAAkB,CAAE,OAAAC,CAAO,EACF,CACzB,OAAOA,EACJ,KACCC,EAAI,CAAC,CAAE,MAAAC,CAAM,IAAM,CACjB,IAAMC,EAAMC,GAAY,EACxB,OAAAD,EAAI,KAAO,GACXA,EAAI,aAAa,OAAO,GAAG,EAC3BA,EAAI,aAAa,IAAI,IAAKD,CAAK,EACxB,CAAE,IAAAC,CAAI,CACf,CAAC,CACH,CACJ,CAUO,SAASE,GACdC,EAAuBC,EACa,CACpC,IAAMC,EAAQ,IAAIC,EAClB,OAAAD,EAAM,UAAU,CAAC,CAAE,IAAAL,CAAI,IAAM,CAC3BG,EAAG,aAAa,sBAAuBA,EAAG,IAAI,EAC9CA,EAAG,KAAO,GAAGH,GACf,CAAC,EAGDO,EAAUJ,EAAI,OAAO,EAClB,UAAUK,GAAMA,EAAG,eAAe,CAAC,EAG/Bb,GAAiBQ,EAAIC,CAAO,EAChC,KACCK,EAAIC,GAASL,EAAM,KAAKK,CAAK,CAAC,EAC9BC,EAAS,IAAMN,EAAM,SAAS,CAAC,EAC/BP,EAAIY,GAAUE,EAAA,CAAE,IAAKT,GAAOO,EAAQ,CACtC,CACJ,CCtCO,SAASG,GACdC,EAAiB,CAAE,IAAAC,CAAI,EAAiB,CAAE,UAAAC,CAAU,EACd,CACtC,IAAMC,EAAQ,IAAIC,EAGZC,EAASC,GAAoB,cAAc,EAC3CC,EAASC,EACbC,EAAUJ,EAAO,SAAS,EAC1BI,EAAUJ,EAAO,OAAO,CAC1B,EACG,KACCK,GAAUC,EAAc,EACxBC,EAAI,IAAMP,EAAM,KAAK,EACrBQ,EAAqB,CACvB,EAGF,OAAAV,EACG,KACCW,GAAkBP,CAAM,EACxBK,EAAI,CAAC,CAAC,CAAE,YAAAG,CAAY,EAAGC,CAAK,IAAM,CAChC,IAAMC,EAAQD,EAAM,MAAM,UAAU,EACpC,IAAID,GAAA,YAAAA,EAAa,SAAUE,EAAMA,EAAM,OAAS,GAAI,CAClD,IAAMC,EAAOH,EAAYA,EAAY,OAAS,GAC1CG,EAAK,WAAWD,EAAMA,EAAM,OAAS,EAAE,IACzCA,EAAMA,EAAM,OAAS,GAAKC,EAC9B,MACED,EAAM,OAAS,EAEjB,OAAOA,CACT,CAAC,CACH,EACG,UAAUA,GAASjB,EAAG,UAAYiB,EAChC,KAAK,EAAE,EACP,QAAQ,MAAO,QAAQ,CAC1B,EAGJf,EACG,KACCiB,EAAO,CAAC,CAAE,KAAAC,CAAK,IAAMA,IAAS,QAAQ,CACxC,EACG,UAAUC,GAAO,CAChB,OAAQA,EAAI,KAAM,CAGhB,IAAK,aAEDrB,EAAG,UAAU,QACbK,EAAM,iBAAmBA,EAAM,MAAM,SAErCA,EAAM,MAAQL,EAAG,WACnB,KACJ,CACF,CAAC,EAGWC,EACb,KACCkB,EAAOG,EAAqB,EAC5BV,EAAI,CAAC,CAAE,KAAAW,CAAK,IAAMA,CAAI,CACxB,EAIC,KACCC,EAAIC,GAAStB,EAAM,KAAKsB,CAAK,CAAC,EAC9BC,EAAS,IAAMvB,EAAM,SAAS,CAAC,EAC/BS,EAAI,KAAO,CAAE,IAAKZ,CAAG,EAAE,CACzB,CACJ,CC9CO,SAAS2B,GACdC,EAAiB,CAAE,OAAAC,EAAQ,UAAAC,CAAU,EACN,CAC/B,IAAMC,EAASC,GAAc,EAC7B,GAAI,CACF,IAAMC,GAAM,+BAAU,SAAUF,EAAO,OACjCG,EAASC,GAAkBF,EAAKJ,CAAM,EAGtCO,EAASC,GAAoB,eAAgBT,CAAE,EAC/CU,EAASD,GAAoB,gBAAiBT,CAAE,EAGhD,CAAE,IAAAW,EAAK,IAAAC,CAAI,EAAIN,EACrBK,EACG,KACCE,EAAOC,EAAoB,EAC3BC,GAAOH,EAAI,KAAKC,EAAOG,EAAoB,CAAC,CAAC,EAC7CC,GAAK,CAAC,CACR,EACG,UAAUN,EAAI,KAAK,KAAKA,CAAG,CAAC,EAGjCT,EACG,KACCW,EAAO,CAAC,CAAE,KAAAK,CAAK,IAAMA,IAAS,QAAQ,CACxC,EACG,UAAUC,GAAO,CAChB,IAAMC,EAASC,GAAiB,EAChC,OAAQF,EAAI,KAAM,CAGhB,IAAK,QACH,GAAIC,IAAWZ,EAAO,CACpB,IAAMc,EAAU,IAAI,IACpB,QAAWC,KAAUC,EACnB,sBAAuBd,CACzB,EAAG,CACD,IAAMe,EAAUF,EAAO,kBACvBD,EAAQ,IAAIC,EAAQ,WAClBE,EAAQ,aAAa,eAAe,CACtC,CAAC,CACH,CAGA,GAAIH,EAAQ,KAAM,CAChB,GAAM,CAAC,CAACI,CAAI,CAAC,EAAI,CAAC,GAAGJ,CAAO,EAAE,KAAK,CAAC,CAAC,CAAEK,CAAC,EAAG,CAAC,CAAEC,CAAC,IAAMA,EAAID,CAAC,EAC1DD,EAAK,MAAM,CACb,CAGAP,EAAI,MAAM,CACZ,CACA,MAGF,IAAK,SACL,IAAK,MACHU,GAAU,SAAU,EAAK,EACzBrB,EAAM,KAAK,EACX,MAGF,IAAK,UACL,IAAK,YACH,GAAI,OAAOY,GAAW,YACpBZ,EAAM,MAAM,MACP,CACL,IAAMsB,EAAM,CAACtB,EAAO,GAAGgB,EACrB,wDACAd,CACF,CAAC,EACKqB,EAAI,KAAK,IAAI,GACjB,KAAK,IAAI,EAAGD,EAAI,QAAQV,CAAM,CAAC,EAAIU,EAAI,QACrCX,EAAI,OAAS,UAAY,GAAK,IAE9BW,EAAI,MAAM,EACdA,EAAIC,GAAG,MAAM,CACf,CAGAZ,EAAI,MAAM,EACV,MAGF,QACMX,IAAUa,GAAiB,GAC7Bb,EAAM,MAAM,CAClB,CACF,CAAC,EAGLN,EACG,KACCW,EAAO,CAAC,CAAE,KAAAK,CAAK,IAAMA,IAAS,QAAQ,CACxC,EACG,UAAUC,GAAO,CAChB,OAAQA,EAAI,KAAM,CAGhB,IAAK,IACL,IAAK,IACL,IAAK,IACHX,EAAM,MAAM,EACZA,EAAM,OAAO,EAGbW,EAAI,MAAM,EACV,KACJ,CACF,CAAC,EAGL,IAAMa,EAAUC,GAAiBzB,EAAOF,CAAM,EACxC4B,EAAUC,GAAkBzB,EAAQJ,EAAQ,CAAE,OAAA0B,CAAO,CAAC,EAC5D,OAAOI,EAAMJ,EAAQE,CAAO,EACzB,KACCG,GAGE,GAAGC,GAAqB,eAAgBtC,CAAE,EACvC,IAAIuC,GAASC,GAAiBD,EAAO,CAAE,OAAAP,CAAO,CAAC,CAAC,EAGnD,GAAGM,GAAqB,iBAAkBtC,CAAE,EACzC,IAAIuC,GAASE,GAAmBF,EAAOjC,EAAQ,CAAE,UAAAJ,CAAU,CAAC,CAAC,CAClE,CACF,CAGJ,OAASwC,EAAP,CACA,OAAA1C,EAAG,OAAS,GACL2C,EACT,CACF,CCtKO,SAASC,GACdC,EAAiB,CAAE,OAAAC,EAAQ,UAAAC,CAAU,EACG,CACxC,OAAOC,EAAc,CACnBF,EACAC,EACG,KACCE,EAAUC,GAAY,CAAC,EACvBC,EAAOC,GAAO,CAAC,CAACA,EAAI,aAAa,IAAI,GAAG,CAAC,CAC3C,CACJ,CAAC,EACE,KACCC,EAAI,CAAC,CAACC,EAAOF,CAAG,IAAMG,GAAuBD,EAAM,OAAQ,EAAI,EAC7DF,EAAI,aAAa,IAAI,GAAG,CAC1B,CAAC,EACDC,EAAIG,GAAM,CA1FhB,IAAAC,EA2FQ,IAAMC,EAAQ,IAAI,IAGZC,EAAK,SAAS,mBAAmBd,EAAI,WAAW,SAAS,EAC/D,QAASe,EAAOD,EAAG,SAAS,EAAGC,EAAMA,EAAOD,EAAG,SAAS,EACtD,IAAIF,EAAAG,EAAK,gBAAL,MAAAH,EAAoB,aAAc,CACpC,IAAMI,EAAWD,EAAK,YAChBE,EAAWN,EAAGK,CAAQ,EACxBC,EAAS,OAASD,EAAS,QAC7BH,EAAM,IAAIE,EAAmBE,CAAQ,CACzC,CAIF,OAAW,CAACF,EAAMG,CAAI,IAAKL,EAAO,CAChC,GAAM,CAAE,WAAAM,CAAW,EAAIC,EAAE,OAAQ,KAAMF,CAAI,EAC3CH,EAAK,YAAY,GAAG,MAAM,KAAKI,CAAU,CAAC,CAC5C,CAGA,MAAO,CAAE,IAAKnB,EAAI,MAAAa,CAAM,CAC1B,CAAC,CACH,CACJ,CCbO,SAASQ,GACdC,EAAiB,CAAE,UAAAC,EAAW,MAAAC,CAAM,EACf,CACrB,IAAMC,EAASH,EAAG,cACZI,EACJD,EAAO,UACPA,EAAO,cAAe,UAGxB,OAAOE,EAAc,CAACH,EAAOD,CAAS,CAAC,EACpC,KACCK,EAAI,CAAC,CAAC,CAAE,OAAAC,EAAQ,OAAAC,CAAO,EAAG,CAAE,OAAQ,CAAE,EAAAC,CAAE,CAAE,CAAC,KACzCD,EAASA,EACL,KAAK,IAAIJ,EAAQ,KAAK,IAAI,EAAGK,EAAIF,CAAM,CAAC,EACxCH,EACG,CACL,OAAAI,EACA,OAAQC,GAAKF,EAASH,CACxB,EACD,EACDM,EAAqB,CAACC,EAAGC,IACvBD,EAAE,SAAWC,EAAE,QACfD,EAAE,SAAWC,EAAE,MAChB,CACH,CACJ,CAuBO,SAASC,GACdb,EAAiBc,EACe,CADf,IAAAC,EAAAD,EAAE,SAAAE,CAtJrB,EAsJmBD,EAAcE,EAAAC,GAAdH,EAAc,CAAZ,YAEnB,IAAMI,EAAQC,EAAW,0BAA2BpB,CAAE,EAChD,CAAE,EAAAS,CAAE,EAAIY,GAAiBF,CAAK,EACpC,OAAOG,EAAM,IAAM,CACjB,IAAMC,EAAQ,IAAIC,EAClB,OAAAD,EACG,KACCE,GAAU,EAAGC,EAAuB,EACpCC,GAAeX,CAAO,CACxB,EACG,UAAU,CAGT,KAAK,CAAC,CAAE,OAAAR,CAAO,EAAG,CAAE,OAAQD,CAAO,CAAC,EAAG,CACrCY,EAAM,MAAM,OAAS,GAAGX,EAAS,EAAIC,MACrCT,EAAG,MAAM,IAAY,GAAGO,KAC1B,EAGA,UAAW,CACTY,EAAM,MAAM,OAAS,GACrBnB,EAAG,MAAM,IAAY,EACvB,CACF,CAAC,EAGLuB,EACG,KACCK,GAAUF,EAAuB,EACjCG,GAAK,CAAC,CACR,EACG,UAAU,IAAM,CACf,QAAWC,KAAQC,EAAY,8BAA+B/B,CAAE,EAAG,CACjE,IAAMgC,EAAYC,GAAoBH,CAAI,EAC1C,GAAI,OAAOE,GAAc,YAAa,CACpC,IAAMzB,EAASuB,EAAK,UAAYE,EAAU,UACpC,CAAE,OAAAxB,CAAO,EAAI0B,GAAeF,CAAS,EAC3CA,EAAU,SAAS,CACjB,IAAKzB,EAASC,EAAS,CACzB,CAAC,CACH,CACF,CACF,CAAC,EAGET,GAAaC,EAAIiB,CAAO,EAC5B,KACCkB,EAAIC,GAASb,EAAM,KAAKa,CAAK,CAAC,EAC9BC,EAAS,IAAMd,EAAM,SAAS,CAAC,EAC/BjB,EAAI8B,GAAUE,EAAA,CAAE,IAAKtC,GAAOoC,EAAQ,CACtC,CACJ,CAAC,CACH,CChJO,SAASG,GACdC,EAAcC,EACW,CACzB,GAAI,OAAOA,GAAS,YAAa,CAC/B,IAAMC,EAAM,gCAAgCF,KAAQC,IACpD,OAAOE,GAGLC,GAAqB,GAAGF,mBAAqB,EAC1C,KACCG,GAAW,IAAMC,CAAK,EACtBC,EAAIC,IAAY,CACd,QAASA,EAAQ,QACnB,EAAE,EACFC,GAAe,CAAC,CAAC,CACnB,EAGFL,GAAkBF,CAAG,EAClB,KACCG,GAAW,IAAMC,CAAK,EACtBC,EAAIG,IAAS,CACX,MAAOA,EAAK,iBACZ,MAAOA,EAAK,WACd,EAAE,EACFD,GAAe,CAAC,CAAC,CACnB,CACJ,EACG,KACCF,EAAI,CAAC,CAACC,EAASE,CAAI,IAAOC,IAAA,GAAKH,GAAYE,EAAO,CACpD,CAGJ,KAAO,CACL,IAAMR,EAAM,gCAAgCF,IAC5C,OAAOI,GAAkBF,CAAG,EACzB,KACCK,EAAIG,IAAS,CACX,aAAcA,EAAK,YACrB,EAAE,EACFD,GAAe,CAAC,CAAC,CACnB,CACJ,CACF,CCvDO,SAASG,GACdC,EAAcC,EACW,CACzB,IAAMC,EAAM,WAAWF,qBAAwB,mBAAmBC,CAAO,IACzE,OAAOE,GAA2BD,CAAG,EAClC,KACCE,GAAW,IAAMC,CAAK,EACtBC,EAAI,CAAC,CAAE,WAAAC,EAAY,YAAAC,CAAY,KAAO,CACpC,MAAOD,EACP,MAAOC,CACT,EAAE,EACFC,GAAe,CAAC,CAAC,CACnB,CACJ,CCOO,SAASC,GACdC,EACyB,CAGzB,IAAIC,EAAQD,EAAI,MAAM,qCAAqC,EAC3D,GAAIC,EAAO,CACT,GAAM,CAAC,CAAEC,EAAMC,CAAI,EAAIF,EACvB,OAAOG,GAA2BF,EAAMC,CAAI,CAC9C,CAIA,GADAF,EAAQD,EAAI,MAAM,oCAAoC,EAClDC,EAAO,CACT,GAAM,CAAC,CAAEI,EAAMC,CAAI,EAAIL,EACvB,OAAOM,GAA2BF,EAAMC,CAAI,CAC9C,CAGA,OAAOE,CACT,CCpBA,IAAIC,GAgBG,SAASC,GACdC,EACoB,CACpB,OAAOF,QAAWG,EAAM,IAAM,CAC5B,IAAMC,EAAS,SAAsB,WAAY,cAAc,EAC/D,GAAIA,EACF,OAAOC,EAAGD,CAAM,EAKhB,GADYE,GAAqB,SAAS,EAClC,OAAQ,CACd,IAAMC,EAAU,SAA0B,WAAW,EACrD,GAAI,EAAEA,GAAWA,EAAQ,QACvB,OAAOC,CACX,CAGA,OAAOC,GAAiBP,EAAG,IAAI,EAC5B,KACCQ,EAAIC,GAAS,SAAS,WAAYA,EAAO,cAAc,CAAC,CAC1D,CAEN,CAAC,EACE,KACCC,GAAW,IAAMJ,CAAK,EACtBK,EAAOF,GAAS,OAAO,KAAKA,CAAK,EAAE,OAAS,CAAC,EAC7CG,EAAIH,IAAU,CAAE,MAAAA,CAAM,EAAE,EACxBI,EAAY,CAAC,CACf,EACJ,CASO,SAASC,GACdd,EAC+B,CAC/B,IAAMe,EAAQC,EAAW,uBAAwBhB,CAAE,EACnD,OAAOC,EAAM,IAAM,CACjB,IAAMgB,EAAQ,IAAIC,EAClB,OAAAD,EAAM,UAAU,CAAC,CAAE,MAAAR,CAAM,IAAM,CAC7BM,EAAM,YAAYI,GAAkBV,CAAK,CAAC,EAC1CM,EAAM,UAAU,IAAI,+BAA+B,CACrD,CAAC,EAGMhB,GAAYC,CAAE,EAClB,KACCQ,EAAIY,GAASH,EAAM,KAAKG,CAAK,CAAC,EAC9BC,EAAS,IAAMJ,EAAM,SAAS,CAAC,EAC/BL,EAAIQ,GAAUE,EAAA,CAAE,IAAKtB,GAAOoB,EAAQ,CACtC,CACJ,CAAC,CACH,CCtDO,SAASG,GACdC,EAAiB,CAAE,UAAAC,EAAW,QAAAC,CAAQ,EACpB,CAClB,OAAOC,GAAiB,SAAS,IAAI,EAClC,KACCC,EAAU,IAAMC,GAAgBL,EAAI,CAAE,QAAAE,EAAS,UAAAD,CAAU,CAAC,CAAC,EAC3DK,EAAI,CAAC,CAAE,OAAQ,CAAE,EAAAC,CAAE,CAAE,KACZ,CACL,OAAQA,GAAK,EACf,EACD,EACDC,EAAwB,QAAQ,CAClC,CACJ,CAaO,SAASC,GACdT,EAAiBU,EACY,CAC7B,OAAOC,EAAM,IAAM,CACjB,IAAMC,EAAQ,IAAIC,EAClB,OAAAD,EAAM,UAAU,CAGd,KAAK,CAAE,OAAAE,CAAO,EAAG,CACfd,EAAG,OAASc,CACd,EAGA,UAAW,CACTd,EAAG,OAAS,EACd,CACF,CAAC,GAICe,EAAQ,wBAAwB,EAC5BC,EAAG,CAAE,OAAQ,EAAM,CAAC,EACpBjB,GAAUC,EAAIU,CAAO,GAExB,KACCO,EAAIC,GAASN,EAAM,KAAKM,CAAK,CAAC,EAC9BC,EAAS,IAAMP,EAAM,SAAS,CAAC,EAC/BN,EAAIY,GAAUE,EAAA,CAAE,IAAKpB,GAAOkB,EAAQ,CACtC,CACJ,CAAC,CACH,CCpBO,SAASG,GACdC,EAAiB,CAAE,UAAAC,EAAW,QAAAC,CAAQ,EACT,CAC7B,IAAMC,EAAQ,IAAI,IAGZC,EAAUC,EAA+B,cAAeL,CAAE,EAChE,QAAWM,KAAUF,EAAS,CAC5B,IAAMG,EAAK,mBAAmBD,EAAO,KAAK,UAAU,CAAC,CAAC,EAChDE,EAASC,GAAmB,QAAQF,KAAM,EAC5C,OAAOC,GAAW,aACpBL,EAAM,IAAIG,EAAQE,CAAM,CAC5B,CAGA,IAAME,EAAUR,EACb,KACCS,EAAwB,QAAQ,EAChCC,EAAI,CAAC,CAAE,OAAAC,CAAO,IAAM,CAClB,IAAMC,EAAOC,GAAoB,MAAM,EACjCC,EAAOC,EAAW,wBAAyBH,CAAI,EACrD,OAAOD,EAAS,IACdG,EAAK,UACLF,EAAK,UAET,CAAC,EACDI,GAAM,CACR,EAgFF,OA7EmBC,GAAiB,SAAS,IAAI,EAC9C,KACCR,EAAwB,QAAQ,EAGhCS,EAAUC,GAAQC,EAAM,IAAM,CAC5B,IAAIC,EAA4B,CAAC,EACjC,OAAOC,EAAG,CAAC,GAAGrB,CAAK,EAAE,OAAO,CAACsB,EAAO,CAACnB,EAAQE,CAAM,IAAM,CACvD,KAAOe,EAAK,QACGpB,EAAM,IAAIoB,EAAKA,EAAK,OAAS,EAAE,EACnC,SAAWf,EAAO,SACzBe,EAAK,IAAI,EAOb,IAAIG,EAASlB,EAAO,UACpB,KAAO,CAACkB,GAAUlB,EAAO,eACvBA,EAASA,EAAO,cAChBkB,EAASlB,EAAO,UAIlB,OAAOiB,EAAM,IACX,CAAC,GAAGF,EAAO,CAAC,GAAGA,EAAMjB,CAAM,CAAC,EAAE,QAAQ,EACtCoB,CACF,CACF,EAAG,IAAI,GAAkC,CAAC,CAC5C,CAAC,EACE,KAGCd,EAAIa,GAAS,IAAI,IAAI,CAAC,GAAGA,CAAK,EAAE,KAAK,CAAC,CAAC,CAAEE,CAAC,EAAG,CAAC,CAAEC,CAAC,IAAMD,EAAIC,CAAC,CAAC,CAAC,EAC9DC,GAAkBnB,CAAO,EAGzBU,EAAU,CAAC,CAACK,EAAOK,CAAM,IAAM7B,EAC5B,KACC8B,GAAK,CAAC,CAACC,EAAMC,CAAI,EAAG,CAAE,OAAQ,CAAE,EAAAC,CAAE,EAAG,KAAAC,CAAK,IAAM,CAC9C,IAAMC,EAAOF,EAAIC,EAAK,QAAU,KAAK,MAAMd,EAAK,MAAM,EAGtD,KAAOY,EAAK,QAAQ,CAClB,GAAM,CAAC,CAAEP,CAAM,EAAIO,EAAK,GACxB,GAAIP,EAASI,EAASI,GAAKE,EACzBJ,EAAO,CAAC,GAAGA,EAAMC,EAAK,MAAM,CAAE,MAE9B,MAEJ,CAGA,KAAOD,EAAK,QAAQ,CAClB,GAAM,CAAC,CAAEN,CAAM,EAAIM,EAAKA,EAAK,OAAS,GACtC,GAAIN,EAASI,GAAUI,GAAK,CAACE,EAC3BH,EAAO,CAACD,EAAK,IAAI,EAAI,GAAGC,CAAI,MAE5B,MAEJ,CAGA,MAAO,CAACD,EAAMC,CAAI,CACpB,EAAG,CAAC,CAAC,EAAG,CAAC,GAAGR,CAAK,CAAC,CAAC,EACnBY,EAAqB,CAACV,EAAGC,IACvBD,EAAE,KAAOC,EAAE,IACXD,EAAE,KAAOC,EAAE,EACZ,CACH,CACF,CACF,CACF,CACF,EAIC,KACChB,EAAI,CAAC,CAACoB,EAAMC,CAAI,KAAO,CACrB,KAAMD,EAAK,IAAI,CAAC,CAACT,CAAI,IAAMA,CAAI,EAC/B,KAAMU,EAAK,IAAI,CAAC,CAACV,CAAI,IAAMA,CAAI,CACjC,EAAE,EAGFe,EAAU,CAAE,KAAM,CAAC,EAAG,KAAM,CAAC,CAAE,CAAC,EAChCC,GAAY,EAAG,CAAC,EAChB3B,EAAI,CAAC,CAAC,EAAGgB,CAAC,IAGJ,EAAE,KAAK,OAASA,EAAE,KAAK,OAClB,CACL,KAAMA,EAAE,KAAK,MAAM,KAAK,IAAI,EAAG,EAAE,KAAK,OAAS,CAAC,EAAGA,EAAE,KAAK,MAAM,EAChE,KAAM,CAAC,CACT,EAIO,CACL,KAAMA,EAAE,KAAK,MAAM,EAAE,EACrB,KAAMA,EAAE,KAAK,MAAM,EAAGA,EAAE,KAAK,OAAS,EAAE,KAAK,MAAM,CACrD,CAEH,CACH,CACJ,CAYO,SAASY,GACdxC,EAAiB,CAAE,UAAAC,EAAW,QAAAC,EAAS,QAAAuC,CAAQ,EACP,CACxC,OAAOnB,EAAM,IAAM,CACjB,IAAMoB,EAAQ,IAAIC,EACZC,EAAQF,EAAM,KAAKG,GAAS,CAAC,CAAC,EAoBpC,GAnBAH,EAAM,UAAU,CAAC,CAAE,KAAAV,EAAM,KAAAC,CAAK,IAAM,CAGlC,OAAW,CAAC3B,CAAM,IAAK2B,EACrB3B,EAAO,UAAU,OAAO,sBAAsB,EAC9CA,EAAO,UAAU,OAAO,sBAAsB,EAIhD,OAAW,CAACmB,EAAO,CAACnB,CAAM,CAAC,IAAK0B,EAAK,QAAQ,EAC3C1B,EAAO,UAAU,IAAI,sBAAsB,EAC3CA,EAAO,UAAU,OACf,uBACAmB,IAAUO,EAAK,OAAS,CAC1B,CAEJ,CAAC,EAGGc,EAAQ,YAAY,EAAG,CAGzB,IAAMC,EAAUC,EACd/C,EAAU,KAAKgD,GAAa,CAAC,EAAGrC,EAAI,IAAG,EAAY,CAAC,EACpDX,EAAU,KAAKgD,GAAa,GAAG,EAAGrC,EAAI,IAAM,QAAiB,CAAC,CAChE,EAGA8B,EACG,KACCQ,EAAO,CAAC,CAAE,KAAAlB,CAAK,IAAMA,EAAK,OAAS,CAAC,EACpCmB,GAAeJ,CAAO,CACxB,EACG,UAAU,CAAC,CAAC,CAAE,KAAAf,CAAK,EAAGoB,CAAQ,IAAM,CACnC,GAAM,CAAC9C,CAAM,EAAI0B,EAAKA,EAAK,OAAS,GACpC,GAAI1B,EAAO,aAAc,CAGvB,IAAM+C,EAAYC,GAAoBhD,CAAM,EAC5C,GAAI,OAAO+C,GAAc,YAAa,CACpC,IAAM3B,EAASpB,EAAO,UAAY+C,EAAU,UACtC,CAAE,OAAAxC,CAAO,EAAI0C,GAAeF,CAAS,EAC3CA,EAAU,SAAS,CACjB,IAAK3B,EAASb,EAAS,EACvB,SAAAuC,CACF,CAAC,CACH,CACF,CACF,CAAC,CACP,CAGA,OAAIN,EAAQ,qBAAqB,GAC/B7C,EACG,KACCuD,GAAUZ,CAAK,EACfjC,EAAwB,QAAQ,EAChCsC,GAAa,GAAG,EAChBQ,GAAK,CAAC,EACND,GAAUf,EAAQ,KAAKgB,GAAK,CAAC,CAAC,CAAC,EAC/BC,GAAO,CAAE,MAAO,GAAI,CAAC,EACrBP,GAAeT,CAAK,CACtB,EACG,UAAU,CAAC,CAAC,CAAE,CAAE,KAAAV,CAAK,CAAC,IAAM,CAC3B,IAAM2B,EAAMC,GAAY,EAGlBtD,EAAS0B,EAAKA,EAAK,OAAS,GAClC,GAAI1B,GAAUA,EAAO,OAAQ,CAC3B,GAAM,CAACuD,CAAM,EAAIvD,EACX,CAAE,KAAAwD,CAAK,EAAI,IAAI,IAAID,EAAO,IAAI,EAChCF,EAAI,OAASG,IACfH,EAAI,KAAOG,EACX,QAAQ,aAAa,CAAC,EAAG,GAAI,GAAGH,GAAK,EAIzC,MACEA,EAAI,KAAO,GACX,QAAQ,aAAa,CAAC,EAAG,GAAI,GAAGA,GAAK,CAEzC,CAAC,EAGA5D,GAAqBC,EAAI,CAAE,UAAAC,EAAW,QAAAC,CAAQ,CAAC,EACnD,KACC6D,EAAIC,GAAStB,EAAM,KAAKsB,CAAK,CAAC,EAC9BC,EAAS,IAAMvB,EAAM,SAAS,CAAC,EAC/B9B,EAAIoD,GAAUE,EAAA,CAAE,IAAKlE,GAAOgE,EAAQ,CACtC,CACJ,CAAC,CACH,CCpRO,SAASG,GACdC,EAAkB,CAAE,UAAAC,EAAW,MAAAC,EAAO,QAAAC,CAAQ,EACvB,CAGvB,IAAMC,EAAaH,EAChB,KACCI,EAAI,CAAC,CAAE,OAAQ,CAAE,EAAAC,CAAE,CAAE,IAAMA,CAAC,EAC5BC,GAAY,EAAG,CAAC,EAChBF,EAAI,CAAC,CAACG,EAAGC,CAAC,IAAMD,EAAIC,GAAKA,EAAI,CAAC,EAC9BC,EAAqB,CACvB,EAGIC,EAAUT,EACb,KACCG,EAAI,CAAC,CAAE,OAAAO,CAAO,IAAMA,CAAM,CAC5B,EAGF,OAAOC,EAAc,CAACF,EAASP,CAAU,CAAC,EACvC,KACCC,EAAI,CAAC,CAACO,EAAQE,CAAS,IAAM,EAAEF,GAAUE,EAAU,EACnDJ,EAAqB,EACrBK,GAAUZ,EAAQ,KAAKa,GAAK,CAAC,CAAC,CAAC,EAC/BC,GAAQ,EAAI,EACZC,GAAO,CAAE,MAAO,GAAI,CAAC,EACrBb,EAAIc,IAAW,CAAE,OAAAA,CAAO,EAAE,CAC5B,CACJ,CAYO,SAASC,GACdC,EAAiB,CAAE,UAAApB,EAAW,QAAAqB,EAAS,MAAApB,EAAO,QAAAC,CAAQ,EACpB,CAClC,IAAMoB,EAAQ,IAAIC,EACZC,EAAQF,EAAM,KAAKG,GAAS,CAAC,CAAC,EACpC,OAAAH,EAAM,UAAU,CAGd,KAAK,CAAE,OAAAJ,CAAO,EAAG,CACfE,EAAG,OAASF,EACRA,GACFE,EAAG,aAAa,WAAY,IAAI,EAChCA,EAAG,KAAK,GAERA,EAAG,gBAAgB,UAAU,CAEjC,EAGA,UAAW,CACTA,EAAG,MAAM,IAAM,GACfA,EAAG,OAAS,GACZA,EAAG,gBAAgB,UAAU,CAC/B,CACF,CAAC,EAGDC,EACG,KACCP,GAAUU,CAAK,EACfE,EAAwB,QAAQ,CAClC,EACG,UAAU,CAAC,CAAE,OAAAC,CAAO,IAAM,CACzBP,EAAG,MAAM,IAAM,GAAGO,EAAS,MAC7B,CAAC,EAGE7B,GAAesB,EAAI,CAAE,UAAApB,EAAW,MAAAC,EAAO,QAAAC,CAAQ,CAAC,EACpD,KACC0B,EAAIC,GAASP,EAAM,KAAKO,CAAK,CAAC,EAC9BC,EAAS,IAAMR,EAAM,SAAS,CAAC,EAC/BlB,EAAIyB,GAAUE,EAAA,CAAE,IAAKX,GAAOS,EAAQ,CACtC,CACJ,CCpHO,SAASG,GACd,CAAE,UAAAC,EAAW,QAAAC,CAAQ,EACf,CACND,EACG,KACCE,EAAU,IAAMC,EAEd,0DACF,CAAC,EACDC,EAAIC,GAAM,CACRA,EAAG,cAAgB,GACnBA,EAAG,QAAU,EACf,CAAC,EACDC,GAASD,GAAME,EAAUF,EAAI,QAAQ,EAClC,KACCG,GAAU,IAAMH,EAAG,UAAU,SAAS,0BAA0B,CAAC,EACjEI,EAAI,IAAMJ,CAAE,CACd,CACF,EACAK,GAAeT,CAAO,CACxB,EACG,UAAU,CAAC,CAACI,EAAIM,CAAM,IAAM,CAC3BN,EAAG,UAAU,OAAO,0BAA0B,EAC1CM,IACFN,EAAG,QAAU,GACjB,CAAC,CACP,CC/BA,SAASO,IAAyB,CAChC,MAAO,qBAAqB,KAAK,UAAU,SAAS,CACtD,CAiBO,SAASC,GACd,CAAE,UAAAC,CAAU,EACN,CACNA,EACG,KACCC,EAAU,IAAMC,EAAY,qBAAqB,CAAC,EAClDC,EAAIC,GAAMA,EAAG,gBAAgB,mBAAmB,CAAC,EACjDC,EAAOP,EAAa,EACpBQ,GAASF,GAAMG,EAAUH,EAAI,YAAY,EACtC,KACCI,EAAI,IAAMJ,CAAE,CACd,CACF,CACF,EACG,UAAUA,GAAM,CACf,IAAMK,EAAML,EAAG,UAGXK,IAAQ,EACVL,EAAG,UAAY,EAGNK,EAAML,EAAG,eAAiBA,EAAG,eACtCA,EAAG,UAAYK,EAAM,EAEzB,CAAC,CACP,CCpCO,SAASC,GACd,CAAE,UAAAC,EAAW,QAAAC,CAAQ,EACf,CACNC,EAAc,CAACC,GAAY,QAAQ,EAAGF,CAAO,CAAC,EAC3C,KACCG,EAAI,CAAC,CAACC,EAAQC,CAAM,IAAMD,GAAU,CAACC,CAAM,EAC3CC,EAAUF,GAAUG,EAAGH,CAAM,EAC1B,KACCI,GAAMJ,EAAS,IAAM,GAAG,CAC1B,CACF,EACAK,GAAeV,CAAS,CAC1B,EACG,UAAU,CAAC,CAACK,EAAQ,CAAE,OAAQ,CAAE,EAAAM,CAAE,CAAC,CAAC,IAAM,CACzC,GAAIN,EACF,SAAS,KAAK,aAAa,qBAAsB,EAAE,EACnD,SAAS,KAAK,MAAM,IAAM,IAAIM,UACzB,CACL,IAAMC,EAAQ,GAAK,SAAS,SAAS,KAAK,MAAM,IAAK,EAAE,EACvD,SAAS,KAAK,gBAAgB,oBAAoB,EAClD,SAAS,KAAK,MAAM,IAAM,GACtBA,GACF,OAAO,SAAS,EAAGA,CAAK,CAC5B,CACF,CAAC,CACP,CC7DK,OAAO,UACV,OAAO,QAAU,SAAUC,EAAa,CACtC,IAAMC,EAA2B,CAAC,EAClC,QAAWC,KAAO,OAAO,KAAKF,CAAG,EAE/BC,EAAK,KAAK,CAACC,EAAKF,EAAIE,EAAI,CAAC,EAG3B,OAAOD,CACT,GAGG,OAAO,SACV,OAAO,OAAS,SAAUD,EAAa,CACrC,IAAMC,EAAiB,CAAC,EACxB,QAAWC,KAAO,OAAO,KAAKF,CAAG,EAE/BC,EAAK,KAAKD,EAAIE,EAAI,EAGpB,OAAOD,CACT,GAKE,OAAO,SAAY,cAGhB,QAAQ,UAAU,WACrB,QAAQ,UAAU,SAAW,SAC3BE,EAA8BC,EACxB,CACF,OAAOD,GAAM,UACf,KAAK,WAAaA,EAAE,KACpB,KAAK,UAAYA,EAAE,MAEnB,KAAK,WAAaA,EAClB,KAAK,UAAYC,EAErB,GAGG,QAAQ,UAAU,cACrB,QAAQ,UAAU,YAAc,YAC3BC,EACG,CACN,IAAMC,EAAS,KAAK,WACpB,GAAIA,EAAQ,CACND,EAAM,SAAW,GACnBC,EAAO,YAAY,IAAI,EAGzB,QAASC,EAAIF,EAAM,OAAS,EAAGE,GAAK,EAAGA,IAAK,CAC1C,IAAIC,EAAOH,EAAME,GACb,OAAOC,GAAS,SAClBA,EAAO,SAAS,eAAeA,CAAI,EAC5BA,EAAK,YACZA,EAAK,WAAW,YAAYA,CAAI,EAG7BD,EAGHD,EAAO,aAAa,KAAK,gBAAkBE,CAAI,EAF/CF,EAAO,aAAaE,EAAM,IAAI,CAGlC,CACF,CACF,IjMDJ,SAAS,gBAAgB,UAAU,OAAO,OAAO,EACjD,SAAS,gBAAgB,UAAU,IAAI,IAAI,EAG3C,IAAMC,GAAYC,GAAc,EAC1BC,GAAYC,GAAc,EAC1BC,GAAYC,GAAoB,EAChCC,GAAYC,GAAc,EAG1BC,GAAYC,GAAc,EAC1BC,GAAYC,GAAW,oBAAoB,EAC3CC,GAAYD,GAAW,qBAAqB,EAC5CE,GAAYC,GAAW,EAGvBC,GAASC,GAAc,EACvBC,GAAS,SAAS,MAAM,UAAU,QAAQ,GAC5C,+BAAU,QAASC,GACnB,IAAI,IAAI,2BAA4BH,GAAO,IAAI,CACjD,EACEI,GAGEC,GAAS,IAAIC,EACnBC,GAAiB,CAAE,OAAAF,EAAO,CAAC,EAGvBG,EAAQ,oBAAoB,GAC9BC,GAAoB,CAAE,UAAAxB,GAAW,UAAAE,GAAW,UAAAM,EAAU,CAAC,EA1HzD,IAAAiB,KA6HIA,GAAAV,GAAO,UAAP,YAAAU,GAAgB,YAAa,QAC/BC,GAAqB,CAAE,UAAA1B,EAAU,CAAC,EAGpC2B,EAAMzB,GAAWE,EAAO,EACrB,KACCwB,GAAM,GAAG,CACX,EACG,UAAU,IAAM,CACfC,GAAU,SAAU,EAAK,EACzBA,GAAU,SAAU,EAAK,CAC3B,CAAC,EAGLvB,GACG,KACCwB,EAAO,CAAC,CAAE,KAAAC,CAAK,IAAMA,IAAS,QAAQ,CACxC,EACG,UAAUC,GAAO,CAChB,OAAQA,EAAI,KAAM,CAGhB,IAAK,IACL,IAAK,IACH,IAAMC,EAAOC,GAAmB,kBAAkB,EAC9C,OAAOD,GAAS,aAClBA,EAAK,MAAM,EACb,MAGF,IAAK,IACL,IAAK,IACH,IAAME,EAAOD,GAAmB,kBAAkB,EAC9C,OAAOC,GAAS,aAClBA,EAAK,MAAM,EACb,KACJ,CACF,CAAC,EAGLC,GAAmB,CAAE,UAAApC,GAAW,QAAAU,EAAQ,CAAC,EACzC2B,GAAe,CAAE,UAAArC,EAAU,CAAC,EAC5BsC,GAAgB,CAAE,UAAA9B,GAAW,QAAAE,EAAQ,CAAC,EAGtC,IAAM6B,GAAUC,GAAYC,GAAoB,QAAQ,EAAG,CAAE,UAAAjC,EAAU,CAAC,EAClEkC,GAAQ1C,GACX,KACC2C,EAAI,IAAMF,GAAoB,MAAM,CAAC,EACrCG,EAAUC,GAAMC,GAAUD,EAAI,CAAE,UAAArC,GAAW,QAAA+B,EAAQ,CAAC,CAAC,EACrDQ,EAAY,CAAC,CACf,EAGIC,GAAWrB,EAGf,GAAGsB,GAAqB,SAAS,EAC9B,IAAIJ,GAAMK,GAAaL,EAAI,CAAE,QAAAzC,EAAQ,CAAC,CAAC,EAG1C,GAAG6C,GAAqB,QAAQ,EAC7B,IAAIJ,GAAMM,GAAYN,EAAI,CAAE,OAAAzB,EAAO,CAAC,CAAC,EAGxC,GAAG6B,GAAqB,QAAQ,EAC7B,IAAIJ,GAAMO,GAAYP,EAAI,CAAE,UAAArC,GAAW,QAAA+B,GAAS,MAAAG,EAAM,CAAC,CAAC,EAG3D,GAAGO,GAAqB,SAAS,EAC9B,IAAIJ,GAAMQ,GAAaR,CAAE,CAAC,EAG7B,GAAGI,GAAqB,QAAQ,EAC7B,IAAIJ,GAAMS,GAAYT,EAAI,CAAE,OAAA5B,GAAQ,UAAAX,EAAU,CAAC,CAAC,EAGnD,GAAG2C,GAAqB,QAAQ,EAC7B,IAAIJ,GAAMU,GAAYV,CAAE,CAAC,CAC9B,EAGMW,GAAWC,EAAM,IAAM9B,EAG3B,GAAGsB,GAAqB,UAAU,EAC/B,IAAIJ,GAAMa,GAAcb,CAAE,CAAC,EAG9B,GAAGI,GAAqB,SAAS,EAC9B,IAAIJ,GAAMc,GAAad,EAAI,CAAE,UAAArC,GAAW,QAAAJ,GAAS,OAAAS,EAAO,CAAC,CAAC,EAG7D,GAAGoC,GAAqB,SAAS,EAC9B,IAAIJ,GAAMtB,EAAQ,kBAAkB,EACjCqC,GAAoBf,EAAI,CAAE,OAAA5B,GAAQ,UAAAf,EAAU,CAAC,EAC7C2D,CACJ,EAGF,GAAGZ,GAAqB,cAAc,EACnC,IAAIJ,GAAMiB,GAAiBjB,EAAI,CAAE,UAAArC,GAAW,QAAA+B,EAAQ,CAAC,CAAC,EAGzD,GAAGU,GAAqB,SAAS,EAC9B,IAAIJ,GAAMA,EAAG,aAAa,cAAc,IAAM,aAC3CkB,GAAGnD,GAAS,IAAMoD,GAAanB,EAAI,CAAE,UAAArC,GAAW,QAAA+B,GAAS,MAAAG,EAAM,CAAC,CAAC,EACjEqB,GAAGrD,GAAS,IAAMsD,GAAanB,EAAI,CAAE,UAAArC,GAAW,QAAA+B,GAAS,MAAAG,EAAM,CAAC,CAAC,CACrE,EAGF,GAAGO,GAAqB,MAAM,EAC3B,IAAIJ,GAAMoB,GAAUpB,EAAI,CAAE,UAAArC,GAAW,QAAA+B,EAAQ,CAAC,CAAC,EAGlD,GAAGU,GAAqB,KAAK,EAC1B,IAAIJ,GAAMqB,GAAqBrB,EAAI,CAAE,UAAArC,GAAW,QAAA+B,GAAS,QAAAnC,EAAQ,CAAC,CAAC,EAGtE,GAAG6C,GAAqB,KAAK,EAC1B,IAAIJ,GAAMsB,GAAetB,EAAI,CAAE,UAAArC,GAAW,QAAA+B,GAAS,MAAAG,GAAO,QAAAtC,EAAQ,CAAC,CAAC,CACzE,CAAC,EAGKgE,GAAapE,GAChB,KACC4C,EAAU,IAAMY,EAAQ,EACxBa,GAAUrB,EAAQ,EAClBD,EAAY,CAAC,CACf,EAGFqB,GAAW,UAAU,EAMrB,OAAO,UAAapE,GACpB,OAAO,UAAaE,GACpB,OAAO,QAAaE,GACpB,OAAO,UAAaE,GACpB,OAAO,UAAaE,GACpB,OAAO,QAAaE,GACpB,OAAO,QAAaE,GACpB,OAAO,OAAaC,GACpB,OAAO,OAAaO,GACpB,OAAO,WAAagD", + "names": ["require_focus_visible", "__commonJSMin", "exports", "module", "global", "factory", "applyFocusVisiblePolyfill", "scope", "hadKeyboardEvent", "hadFocusVisibleRecently", "hadFocusVisibleRecentlyTimeout", "inputTypesAllowlist", "isValidFocusTarget", "el", "focusTriggersKeyboardModality", "type", "tagName", "addFocusVisibleClass", "removeFocusVisibleClass", "onKeyDown", "e", "onPointerDown", "onFocus", "onBlur", "onVisibilityChange", "addInitialPointerMoveListeners", "onInitialPointerMove", "removeInitialPointerMoveListeners", "event", "error", "require_url_polyfill", "__commonJSMin", "exports", "global", "checkIfIteratorIsSupported", "error", "iteratorSupported", "createIterator", "items", "iterator", "value", "serializeParam", "deserializeParam", "polyfillURLSearchParams", "URLSearchParams", "searchString", "typeofSearchString", "_this", "name", "i", "entry", "key", "proto", "callback", "thisArg", "entries", "searchArray", "checkIfURLSearchParamsSupported", "e", "a", "b", "keys", "attributes", "attribute", "checkIfURLIsSupported", "u", "polyfillURL", "_URL", "URL", "url", "base", "doc", "baseElement", "err", "anchorElement", "inputElement", "searchParams", "enableSearchUpdate", "enableSearchParamsUpdate", "methodName", "method", "search", "linkURLWithAnchorAttribute", "attributeName", "expectedPort", "addPortToOrigin", "blob", "getOrigin", "require_tslib", "__commonJSMin", "exports", "module", "__extends", "__assign", "__rest", "__decorate", "__param", "__metadata", "__awaiter", "__generator", "__exportStar", "__values", "__read", "__spread", "__spreadArrays", "__spreadArray", "__await", "__asyncGenerator", "__asyncDelegator", "__asyncValues", "__makeTemplateObject", "__importStar", "__importDefault", "__classPrivateFieldGet", "__classPrivateFieldSet", "__createBinding", "factory", "root", "createExporter", "previous", "id", "v", "exporter", "extendStatics", "d", "b", "p", "__", "t", "s", "n", "e", "i", "decorators", "target", "key", "desc", "c", "r", "paramIndex", "decorator", "metadataKey", "metadataValue", "thisArg", "_arguments", "P", "generator", "adopt", "value", "resolve", "reject", "fulfilled", "step", "rejected", "result", "body", "_", "f", "y", "g", "verb", "op", "m", "o", "k", "k2", "ar", "error", "il", "a", "j", "jl", "to", "from", "pack", "l", "q", "resume", "settle", "fulfill", "cooked", "raw", "__setModuleDefault", "mod", "receiver", "state", "kind", "require_clipboard", "__commonJSMin", "exports", "module", "root", "factory", "__webpack_modules__", "__unused_webpack_module", "__webpack_exports__", "__webpack_require__", "clipboard", "tiny_emitter", "tiny_emitter_default", "listen", "listen_default", "src_select", "select_default", "command", "type", "err", "ClipboardActionCut", "target", "selectedText", "actions_cut", "createFakeElement", "value", "isRTL", "fakeElement", "yPosition", "fakeCopyAction", "options", "ClipboardActionCopy", "actions_copy", "_typeof", "obj", "ClipboardActionDefault", "_options$action", "action", "container", "text", "actions_default", "clipboard_typeof", "_classCallCheck", "instance", "Constructor", "_defineProperties", "props", "i", "descriptor", "_createClass", "protoProps", "staticProps", "_inherits", "subClass", "superClass", "_setPrototypeOf", "o", "p", "_createSuper", "Derived", "hasNativeReflectConstruct", "_isNativeReflectConstruct", "Super", "_getPrototypeOf", "result", "NewTarget", "_possibleConstructorReturn", "self", "call", "_assertThisInitialized", "e", "getAttributeValue", "suffix", "element", "attribute", "Clipboard", "_Emitter", "_super", "trigger", "_this", "_this2", "selector", "actions", "support", "DOCUMENT_NODE_TYPE", "proto", "closest", "__unused_webpack_exports", "_delegate", "callback", "useCapture", "listenerFn", "listener", "delegate", "elements", "is", "listenNode", "listenNodeList", "listenSelector", "node", "nodeList", "select", "isReadOnly", "selection", "range", "E", "name", "ctx", "data", "evtArr", "len", "evts", "liveEvents", "__webpack_module_cache__", "moduleId", "getter", "definition", "key", "prop", "require_escape_html", "__commonJSMin", "exports", "module", "matchHtmlRegExp", "escapeHtml", "string", "str", "match", "escape", "html", "index", "lastIndex", "r", "a", "e", "import_focus_visible", "n", "t", "s", "r", "o", "u", "i", "a", "e", "c", "import_url_polyfill", "import_tslib", "__extends", "__assign", "__rest", "__decorate", "__param", "__metadata", "__awaiter", "__generator", "__exportStar", "__createBinding", "__values", "__read", "__spread", "__spreadArrays", "__spreadArray", "__await", "__asyncGenerator", "__asyncDelegator", "__asyncValues", "__makeTemplateObject", "__importStar", "__importDefault", "__classPrivateFieldGet", "__classPrivateFieldSet", "tslib", "isFunction", "value", "createErrorClass", "createImpl", "_super", "instance", "ctorFunc", "UnsubscriptionError", "createErrorClass", "_super", "errors", "err", "i", "arrRemove", "arr", "item", "index", "Subscription", "initialTeardown", "errors", "_parentage", "_parentage_1", "__values", "_parentage_1_1", "parent_1", "initialFinalizer", "isFunction", "e", "UnsubscriptionError", "_finalizers", "_finalizers_1", "_finalizers_1_1", "finalizer", "execFinalizer", "err", "__spreadArray", "__read", "teardown", "_a", "parent", "arrRemove", "empty", "EMPTY_SUBSCRIPTION", "Subscription", "isSubscription", "value", "isFunction", "execFinalizer", "finalizer", "config", "timeoutProvider", "handler", "timeout", "args", "_i", "delegate", "__spreadArray", "__read", "handle", "reportUnhandledError", "err", "timeoutProvider", "onUnhandledError", "config", "noop", "COMPLETE_NOTIFICATION", "createNotification", "errorNotification", "error", "nextNotification", "value", "kind", "context", "errorContext", "cb", "config", "isRoot", "_a", "errorThrown", "error", "captureError", "err", "Subscriber", "_super", "__extends", "destination", "_this", "isSubscription", "EMPTY_OBSERVER", "next", "error", "complete", "SafeSubscriber", "value", "handleStoppedNotification", "nextNotification", "err", "errorNotification", "COMPLETE_NOTIFICATION", "Subscription", "_bind", "bind", "fn", "thisArg", "ConsumerObserver", "partialObserver", "value", "error", "handleUnhandledError", "err", "SafeSubscriber", "_super", "__extends", "observerOrNext", "complete", "_this", "isFunction", "context_1", "config", "Subscriber", "handleUnhandledError", "error", "config", "captureError", "reportUnhandledError", "defaultErrorHandler", "err", "handleStoppedNotification", "notification", "subscriber", "onStoppedNotification", "timeoutProvider", "EMPTY_OBSERVER", "noop", "observable", "identity", "x", "pipe", "fns", "_i", "pipeFromArray", "identity", "input", "prev", "fn", "Observable", "subscribe", "operator", "observable", "observerOrNext", "error", "complete", "_this", "subscriber", "isSubscriber", "SafeSubscriber", "errorContext", "_a", "source", "sink", "err", "next", "promiseCtor", "getPromiseCtor", "resolve", "reject", "value", "operations", "_i", "pipeFromArray", "x", "getPromiseCtor", "promiseCtor", "_a", "config", "isObserver", "value", "isFunction", "isSubscriber", "Subscriber", "isSubscription", "hasLift", "source", "isFunction", "operate", "init", "liftedSource", "err", "createOperatorSubscriber", "destination", "onNext", "onComplete", "onError", "onFinalize", "OperatorSubscriber", "_super", "__extends", "shouldUnsubscribe", "_this", "value", "err", "closed_1", "_a", "Subscriber", "animationFrameProvider", "callback", "request", "cancel", "delegate", "handle", "timestamp", "Subscription", "args", "_i", "__spreadArray", "__read", "ObjectUnsubscribedError", "createErrorClass", "_super", "Subject", "_super", "__extends", "_this", "operator", "subject", "AnonymousSubject", "ObjectUnsubscribedError", "value", "errorContext", "_b", "__values", "_c", "observer", "err", "observers", "_a", "subscriber", "hasError", "isStopped", "EMPTY_SUBSCRIPTION", "Subscription", "arrRemove", "thrownError", "observable", "Observable", "destination", "source", "AnonymousSubject", "_super", "__extends", "destination", "source", "_this", "value", "_b", "_a", "err", "subscriber", "EMPTY_SUBSCRIPTION", "Subject", "dateTimestampProvider", "ReplaySubject", "_super", "__extends", "_bufferSize", "_windowTime", "_timestampProvider", "dateTimestampProvider", "_this", "value", "_a", "isStopped", "_buffer", "_infiniteTimeWindow", "subscriber", "subscription", "copy", "i", "adjustedBufferSize", "now", "last", "Subject", "Action", "_super", "__extends", "scheduler", "work", "state", "delay", "Subscription", "intervalProvider", "handler", "timeout", "args", "_i", "delegate", "__spreadArray", "__read", "handle", "AsyncAction", "_super", "__extends", "scheduler", "work", "_this", "state", "delay", "id", "_a", "_id", "intervalProvider", "_scheduler", "error", "_delay", "errored", "errorValue", "e", "actions", "arrRemove", "Action", "Scheduler", "schedulerActionCtor", "now", "work", "delay", "state", "dateTimestampProvider", "AsyncScheduler", "_super", "__extends", "SchedulerAction", "now", "Scheduler", "_this", "action", "actions", "error", "asyncScheduler", "AsyncScheduler", "AsyncAction", "async", "AnimationFrameAction", "_super", "__extends", "scheduler", "work", "_this", "id", "delay", "animationFrameProvider", "actions", "_a", "AsyncAction", "AnimationFrameScheduler", "_super", "__extends", "action", "flushId", "actions", "error", "AsyncScheduler", "animationFrameScheduler", "AnimationFrameScheduler", "AnimationFrameAction", "EMPTY", "Observable", "subscriber", "isScheduler", "value", "isFunction", "last", "arr", "popResultSelector", "args", "isFunction", "popScheduler", "isScheduler", "popNumber", "defaultValue", "isArrayLike", "x", "isPromise", "value", "isFunction", "isInteropObservable", "input", "isFunction", "observable", "isAsyncIterable", "obj", "isFunction", "createInvalidObservableTypeError", "input", "getSymbolIterator", "iterator", "isIterable", "input", "isFunction", "iterator", "readableStreamLikeToAsyncGenerator", "readableStream", "reader", "__await", "_a", "_b", "value", "done", "isReadableStreamLike", "obj", "isFunction", "innerFrom", "input", "Observable", "isInteropObservable", "fromInteropObservable", "isArrayLike", "fromArrayLike", "isPromise", "fromPromise", "isAsyncIterable", "fromAsyncIterable", "isIterable", "fromIterable", "isReadableStreamLike", "fromReadableStreamLike", "createInvalidObservableTypeError", "obj", "subscriber", "obs", "observable", "isFunction", "array", "i", "promise", "value", "err", "reportUnhandledError", "iterable", "iterable_1", "__values", "iterable_1_1", "asyncIterable", "process", "readableStream", "readableStreamLikeToAsyncGenerator", "asyncIterable_1", "__asyncValues", "asyncIterable_1_1", "executeSchedule", "parentSubscription", "scheduler", "work", "delay", "repeat", "scheduleSubscription", "observeOn", "scheduler", "delay", "operate", "source", "subscriber", "createOperatorSubscriber", "value", "executeSchedule", "err", "subscribeOn", "scheduler", "delay", "operate", "source", "subscriber", "scheduleObservable", "input", "scheduler", "innerFrom", "subscribeOn", "observeOn", "schedulePromise", "input", "scheduler", "innerFrom", "subscribeOn", "observeOn", "scheduleArray", "input", "scheduler", "Observable", "subscriber", "i", "scheduleIterable", "input", "scheduler", "Observable", "subscriber", "iterator", "executeSchedule", "value", "done", "_a", "err", "isFunction", "scheduleAsyncIterable", "input", "scheduler", "Observable", "subscriber", "executeSchedule", "iterator", "result", "scheduleReadableStreamLike", "input", "scheduler", "scheduleAsyncIterable", "readableStreamLikeToAsyncGenerator", "scheduled", "input", "scheduler", "isInteropObservable", "scheduleObservable", "isArrayLike", "scheduleArray", "isPromise", "schedulePromise", "isAsyncIterable", "scheduleAsyncIterable", "isIterable", "scheduleIterable", "isReadableStreamLike", "scheduleReadableStreamLike", "createInvalidObservableTypeError", "from", "input", "scheduler", "scheduled", "innerFrom", "of", "args", "_i", "scheduler", "popScheduler", "from", "throwError", "errorOrErrorFactory", "scheduler", "errorFactory", "isFunction", "init", "subscriber", "Observable", "isValidDate", "value", "map", "project", "thisArg", "operate", "source", "subscriber", "index", "createOperatorSubscriber", "value", "isArray", "callOrApply", "fn", "args", "__spreadArray", "__read", "mapOneOrManyArgs", "map", "isArray", "getPrototypeOf", "objectProto", "getKeys", "argsArgArrayOrObject", "args", "first_1", "isPOJO", "keys", "key", "obj", "createObject", "keys", "values", "result", "key", "i", "combineLatest", "args", "_i", "scheduler", "popScheduler", "resultSelector", "popResultSelector", "_a", "argsArgArrayOrObject", "observables", "keys", "from", "result", "Observable", "combineLatestInit", "values", "createObject", "identity", "mapOneOrManyArgs", "valueTransform", "subscriber", "maybeSchedule", "length", "active", "remainingFirstValues", "i", "source", "hasFirstValue", "createOperatorSubscriber", "value", "execute", "subscription", "executeSchedule", "mergeInternals", "source", "subscriber", "project", "concurrent", "onBeforeNext", "expand", "innerSubScheduler", "additionalFinalizer", "buffer", "active", "index", "isComplete", "checkComplete", "outerNext", "value", "doInnerSub", "innerComplete", "innerFrom", "createOperatorSubscriber", "innerValue", "bufferedValue", "executeSchedule", "err", "mergeMap", "project", "resultSelector", "concurrent", "isFunction", "a", "i", "map", "b", "ii", "innerFrom", "operate", "source", "subscriber", "mergeInternals", "mergeAll", "concurrent", "mergeMap", "identity", "concatAll", "mergeAll", "concat", "args", "_i", "concatAll", "from", "popScheduler", "defer", "observableFactory", "Observable", "subscriber", "innerFrom", "nodeEventEmitterMethods", "eventTargetMethods", "jqueryMethods", "fromEvent", "target", "eventName", "options", "resultSelector", "isFunction", "mapOneOrManyArgs", "_a", "__read", "isEventTarget", "methodName", "handler", "isNodeStyleEventEmitter", "toCommonHandlerRegistry", "isJQueryStyleEventEmitter", "add", "remove", "isArrayLike", "mergeMap", "subTarget", "innerFrom", "Observable", "subscriber", "args", "_i", "fromEventPattern", "addHandler", "removeHandler", "resultSelector", "mapOneOrManyArgs", "Observable", "subscriber", "handler", "e", "_i", "retValue", "isFunction", "timer", "dueTime", "intervalOrScheduler", "scheduler", "async", "intervalDuration", "isScheduler", "Observable", "subscriber", "due", "isValidDate", "n", "merge", "args", "_i", "scheduler", "popScheduler", "concurrent", "popNumber", "sources", "innerFrom", "mergeAll", "from", "EMPTY", "NEVER", "Observable", "noop", "isArray", "argsOrArgArray", "args", "filter", "predicate", "thisArg", "operate", "source", "subscriber", "index", "createOperatorSubscriber", "value", "zip", "args", "_i", "resultSelector", "popResultSelector", "sources", "argsOrArgArray", "Observable", "subscriber", "buffers", "completed", "sourceIndex", "innerFrom", "createOperatorSubscriber", "value", "buffer", "result", "__spreadArray", "__read", "i", "EMPTY", "audit", "durationSelector", "operate", "source", "subscriber", "hasValue", "lastValue", "durationSubscriber", "isComplete", "endDuration", "value", "cleanupDuration", "createOperatorSubscriber", "innerFrom", "auditTime", "duration", "scheduler", "asyncScheduler", "audit", "timer", "bufferCount", "bufferSize", "startBufferEvery", "operate", "source", "subscriber", "buffers", "count", "createOperatorSubscriber", "value", "toEmit", "buffers_1", "__values", "buffers_1_1", "buffer", "toEmit_1", "toEmit_1_1", "arrRemove", "buffers_2", "buffers_2_1", "catchError", "selector", "operate", "source", "subscriber", "innerSub", "syncUnsub", "handledResult", "createOperatorSubscriber", "err", "innerFrom", "scanInternals", "accumulator", "seed", "hasSeed", "emitOnNext", "emitBeforeComplete", "source", "subscriber", "hasState", "state", "index", "createOperatorSubscriber", "value", "i", "combineLatest", "args", "_i", "resultSelector", "popResultSelector", "pipe", "__spreadArray", "__read", "mapOneOrManyArgs", "operate", "source", "subscriber", "combineLatestInit", "argsOrArgArray", "combineLatestWith", "otherSources", "_i", "combineLatest", "__spreadArray", "__read", "concatMap", "project", "resultSelector", "isFunction", "mergeMap", "debounceTime", "dueTime", "scheduler", "asyncScheduler", "operate", "source", "subscriber", "activeTask", "lastValue", "lastTime", "emit", "value", "emitWhenIdle", "targetTime", "now", "createOperatorSubscriber", "defaultIfEmpty", "defaultValue", "operate", "source", "subscriber", "hasValue", "createOperatorSubscriber", "value", "take", "count", "EMPTY", "operate", "source", "subscriber", "seen", "createOperatorSubscriber", "value", "ignoreElements", "operate", "source", "subscriber", "createOperatorSubscriber", "noop", "mapTo", "value", "map", "delayWhen", "delayDurationSelector", "subscriptionDelay", "source", "concat", "take", "ignoreElements", "mergeMap", "value", "index", "mapTo", "delay", "due", "scheduler", "asyncScheduler", "duration", "timer", "delayWhen", "distinctUntilChanged", "comparator", "keySelector", "identity", "defaultCompare", "operate", "source", "subscriber", "previousKey", "first", "createOperatorSubscriber", "value", "currentKey", "a", "b", "distinctUntilKeyChanged", "key", "compare", "distinctUntilChanged", "x", "y", "endWith", "values", "_i", "source", "concat", "of", "__spreadArray", "__read", "finalize", "callback", "operate", "source", "subscriber", "takeLast", "count", "EMPTY", "operate", "source", "subscriber", "buffer", "createOperatorSubscriber", "value", "buffer_1", "__values", "buffer_1_1", "merge", "args", "_i", "scheduler", "popScheduler", "concurrent", "popNumber", "argsOrArgArray", "operate", "source", "subscriber", "mergeAll", "from", "__spreadArray", "__read", "mergeWith", "otherSources", "_i", "merge", "__spreadArray", "__read", "repeat", "countOrConfig", "count", "delay", "_a", "EMPTY", "operate", "source", "subscriber", "soFar", "sourceSub", "resubscribe", "notifier", "timer", "innerFrom", "notifierSubscriber_1", "createOperatorSubscriber", "subscribeToSource", "syncUnsub", "sample", "notifier", "operate", "source", "subscriber", "hasValue", "lastValue", "createOperatorSubscriber", "value", "noop", "scan", "accumulator", "seed", "operate", "scanInternals", "share", "options", "_a", "connector", "Subject", "_b", "resetOnError", "_c", "resetOnComplete", "_d", "resetOnRefCountZero", "wrapperSource", "connection", "resetConnection", "subject", "refCount", "hasCompleted", "hasErrored", "cancelReset", "reset", "resetAndUnsubscribe", "conn", "operate", "source", "subscriber", "dest", "handleReset", "SafeSubscriber", "value", "err", "innerFrom", "on", "args", "_i", "onSubscriber", "__spreadArray", "__read", "shareReplay", "configOrBufferSize", "windowTime", "scheduler", "bufferSize", "refCount", "_a", "_b", "_c", "share", "ReplaySubject", "skip", "count", "filter", "_", "index", "skipUntil", "notifier", "operate", "source", "subscriber", "taking", "skipSubscriber", "createOperatorSubscriber", "noop", "innerFrom", "value", "startWith", "values", "_i", "scheduler", "popScheduler", "operate", "source", "subscriber", "concat", "switchMap", "project", "resultSelector", "operate", "source", "subscriber", "innerSubscriber", "index", "isComplete", "checkComplete", "createOperatorSubscriber", "value", "innerIndex", "outerIndex", "innerFrom", "innerValue", "takeUntil", "notifier", "operate", "source", "subscriber", "innerFrom", "createOperatorSubscriber", "noop", "takeWhile", "predicate", "inclusive", "operate", "source", "subscriber", "index", "createOperatorSubscriber", "value", "result", "tap", "observerOrNext", "error", "complete", "tapObserver", "isFunction", "operate", "source", "subscriber", "_a", "isUnsub", "createOperatorSubscriber", "value", "err", "_b", "identity", "defaultThrottleConfig", "throttle", "durationSelector", "config", "operate", "source", "subscriber", "leading", "trailing", "hasValue", "sendValue", "throttled", "isComplete", "endThrottling", "send", "cleanupThrottling", "startThrottle", "value", "innerFrom", "createOperatorSubscriber", "throttleTime", "duration", "scheduler", "config", "asyncScheduler", "defaultThrottleConfig", "duration$", "timer", "throttle", "withLatestFrom", "inputs", "_i", "project", "popResultSelector", "operate", "source", "subscriber", "len", "otherValues", "hasValue", "ready", "i", "innerFrom", "createOperatorSubscriber", "value", "identity", "noop", "values", "__spreadArray", "__read", "zip", "sources", "_i", "operate", "source", "subscriber", "__spreadArray", "__read", "zipWith", "otherInputs", "_i", "zip", "__spreadArray", "__read", "watchDocument", "document$", "ReplaySubject", "fromEvent", "getElements", "selector", "node", "getElement", "el", "getOptionalElement", "getActiveElement", "watchElementFocus", "el", "merge", "fromEvent", "debounceTime", "map", "active", "getActiveElement", "startWith", "distinctUntilChanged", "getElementOffset", "el", "watchElementOffset", "merge", "fromEvent", "auditTime", "animationFrameScheduler", "map", "startWith", "getElementContentOffset", "el", "watchElementContentOffset", "merge", "fromEvent", "auditTime", "animationFrameScheduler", "map", "startWith", "MapShim", "getIndex", "arr", "key", "result", "entry", "index", "class_1", "value", "entries", "callback", "ctx", "_i", "_a", "isBrowser", "global$1", "requestAnimationFrame$1", "trailingTimeout", "throttle", "delay", "leadingCall", "trailingCall", "lastCallTime", "resolvePending", "proxy", "timeoutCallback", "timeStamp", "REFRESH_DELAY", "transitionKeys", "mutationObserverSupported", "ResizeObserverController", "observer", "observers", "changesDetected", "activeObservers", "_b", "propertyName", "isReflowProperty", "defineConfigurable", "target", "props", "getWindowOf", "ownerGlobal", "emptyRect", "createRectInit", "toFloat", "getBordersSize", "styles", "positions", "size", "position", "getPaddings", "paddings", "positions_1", "getSVGContentRect", "bbox", "getHTMLElementContentRect", "clientWidth", "clientHeight", "horizPad", "vertPad", "width", "height", "isDocumentElement", "vertScrollbar", "horizScrollbar", "isSVGGraphicsElement", "getContentRect", "createReadOnlyRect", "x", "y", "Constr", "rect", "ResizeObservation", "ResizeObserverEntry", "rectInit", "contentRect", "ResizeObserverSPI", "controller", "callbackCtx", "observations", "_this", "observation", "ResizeObserver", "method", "ResizeObserver_es_default", "entry$", "Subject", "observer$", "defer", "of", "ResizeObserver_es_default", "entries", "entry", "switchMap", "observer", "merge", "NEVER", "finalize", "shareReplay", "getElementSize", "el", "watchElementSize", "tap", "filter", "target", "map", "startWith", "getElementContentSize", "el", "getElementContainer", "parent", "entry$", "Subject", "observer$", "defer", "of", "entries", "entry", "switchMap", "observer", "merge", "NEVER", "finalize", "shareReplay", "watchElementVisibility", "el", "tap", "filter", "target", "map", "isIntersecting", "watchElementBoundary", "threshold", "watchElementContentOffset", "y", "visible", "getElementSize", "content", "getElementContentSize", "distinctUntilChanged", "toggles", "getElement", "getToggle", "name", "setToggle", "value", "watchToggle", "el", "fromEvent", "map", "startWith", "isSusceptibleToKeyboard", "el", "type", "watchKeyboard", "fromEvent", "filter", "ev", "map", "getToggle", "mode", "active", "getActiveElement", "share", "getLocation", "setLocation", "url", "watchLocation", "Subject", "appendChild", "el", "child", "node", "h", "tag", "attributes", "children", "attr", "truncate", "value", "n", "i", "round", "digits", "getLocationHash", "setLocationHash", "hash", "el", "h", "ev", "watchLocationHash", "fromEvent", "map", "startWith", "filter", "shareReplay", "watchLocationTarget", "id", "getOptionalElement", "watchMedia", "query", "media", "fromEventPattern", "next", "startWith", "watchPrint", "merge", "fromEvent", "map", "at", "query$", "factory", "switchMap", "active", "EMPTY", "request", "url", "options", "from", "catchError", "EMPTY", "switchMap", "res", "throwError", "of", "requestJSON", "shareReplay", "requestXML", "dom", "map", "watchScript", "src", "script", "h", "defer", "merge", "fromEvent", "switchMap", "throwError", "map", "finalize", "take", "getViewportOffset", "watchViewportOffset", "merge", "fromEvent", "map", "startWith", "getViewportSize", "watchViewportSize", "fromEvent", "map", "startWith", "watchViewport", "combineLatest", "watchViewportOffset", "watchViewportSize", "map", "offset", "size", "shareReplay", "watchViewportAt", "el", "viewport$", "header$", "size$", "distinctUntilKeyChanged", "offset$", "combineLatest", "map", "getElementOffset", "height", "offset", "size", "x", "y", "watchWorker", "worker", "tx$", "rx$", "fromEvent", "map", "data", "throttle", "tap", "message", "switchMap", "share", "script", "getElement", "config", "getLocation", "configuration", "feature", "flag", "translation", "key", "value", "getComponentElement", "type", "node", "getElement", "getComponentElements", "getElements", "watchAnnounce", "el", "button", "getElement", "fromEvent", "map", "content", "mountAnnounce", "feature", "EMPTY", "defer", "push$", "Subject", "startWith", "hash", "_a", "tap", "state", "finalize", "__spreadValues", "watchConsent", "el", "target$", "map", "target", "mountConsent", "options", "internal$", "Subject", "hidden", "tap", "state", "finalize", "__spreadValues", "import_clipboard", "renderTooltip", "id", "h", "renderAnnotation", "id", "prefix", "anchor", "h", "renderTooltip", "renderClipboardButton", "id", "h", "translation", "renderSearchDocument", "document", "flag", "parent", "teaser", "missing", "key", "list", "h", "url", "feature", "match", "highlight", "value", "tags", "configuration", "truncate", "tag", "id", "type", "translation", "renderSearchResultItem", "result", "threshold", "docs", "doc", "article", "index", "best", "more", "children", "section", "renderSourceFacts", "facts", "h", "key", "value", "round", "renderTabbedControl", "type", "classes", "h", "renderTable", "table", "h", "renderVersion", "version", "config", "configuration", "url", "h", "renderVersionSelector", "versions", "active", "translation", "watchAnnotation", "el", "container", "offset$", "defer", "combineLatest", "watchElementOffset", "watchElementContentOffset", "map", "x", "y", "scroll", "width", "height", "getElementSize", "watchElementFocus", "switchMap", "active", "offset", "take", "mountAnnotation", "target$", "tooltip", "index", "push$", "Subject", "done$", "takeLast", "watchElementVisibility", "takeUntil", "visible", "merge", "filter", "debounceTime", "auditTime", "animationFrameScheduler", "throttleTime", "origin", "fromEvent", "ev", "withLatestFrom", "_a", "parent", "getActiveElement", "target", "delay", "tap", "state", "finalize", "__spreadValues", "findAnnotationMarkers", "container", "markers", "el", "getElements", "nodes", "it", "node", "text", "match", "id", "force", "marker", "swap", "source", "target", "mountAnnotationList", "target$", "print$", "parent", "prefix", "annotations", "getOptionalElement", "renderAnnotation", "EMPTY", "defer", "done$", "Subject", "pairs", "annotation", "getElement", "takeUntil", "takeLast", "active", "inner", "child", "merge", "mountAnnotation", "finalize", "share", "sequence", "findCandidateList", "el", "sibling", "watchCodeBlock", "watchElementSize", "map", "width", "getElementContentSize", "distinctUntilKeyChanged", "mountCodeBlock", "options", "hover", "factory$", "defer", "push$", "Subject", "scrollable", "ClipboardJS", "parent", "renderClipboardButton", "container", "list", "feature", "annotations$", "mountAnnotationList", "tap", "state", "finalize", "__spreadValues", "mergeWith", "height", "distinctUntilChanged", "switchMap", "active", "EMPTY", "watchElementVisibility", "filter", "visible", "take", "mermaid$", "sequence", "fetchScripts", "watchScript", "of", "mountMermaid", "el", "tap", "mermaid_default", "map", "shareReplay", "id", "host", "h", "svg", "shadow", "watchDetails", "el", "target$", "print$", "open", "merge", "map", "target", "filter", "details", "active", "tap", "mountDetails", "options", "defer", "push$", "Subject", "action", "reveal", "state", "finalize", "__spreadValues", "sentinel", "h", "mountDataTable", "el", "renderTable", "of", "watchContentTabs", "el", "inputs", "getElements", "initial", "input", "merge", "fromEvent", "map", "getElement", "startWith", "active", "mountContentTabs", "viewport$", "prev", "renderTabbedControl", "next", "container", "defer", "push$", "Subject", "done$", "takeLast", "combineLatest", "watchElementSize", "auditTime", "animationFrameScheduler", "takeUntil", "size", "offset", "getElementOffset", "width", "getElementSize", "content", "getElementContentOffset", "watchElementContentOffset", "getElementContentSize", "direction", "feature", "skip", "withLatestFrom", "tab", "y", "set", "label", "tabs", "tap", "state", "finalize", "__spreadValues", "subscribeOn", "asyncScheduler", "mountContent", "el", "viewport$", "target$", "print$", "merge", "getElements", "child", "mountCodeBlock", "mountMermaid", "mountDataTable", "mountDetails", "mountContentTabs", "watchDialog", "_el", "alert$", "switchMap", "message", "merge", "of", "delay", "map", "active", "mountDialog", "el", "options", "inner", "getElement", "defer", "push$", "Subject", "tap", "state", "finalize", "__spreadValues", "isHidden", "viewport$", "feature", "of", "direction$", "map", "y", "bufferCount", "a", "b", "distinctUntilKeyChanged", "hidden$", "combineLatest", "filter", "offset", "direction", "distinctUntilChanged", "search$", "watchToggle", "search", "switchMap", "active", "startWith", "watchHeader", "el", "options", "defer", "watchElementSize", "height", "hidden", "shareReplay", "mountHeader", "header$", "main$", "push$", "Subject", "done$", "takeLast", "combineLatestWith", "takeUntil", "state", "__spreadValues", "watchHeaderTitle", "el", "viewport$", "header$", "watchViewportAt", "map", "y", "height", "getElementSize", "distinctUntilKeyChanged", "mountHeaderTitle", "options", "defer", "push$", "Subject", "active", "heading", "getOptionalElement", "EMPTY", "tap", "state", "finalize", "__spreadValues", "watchMain", "el", "viewport$", "header$", "adjust$", "map", "height", "distinctUntilChanged", "border$", "switchMap", "watchElementSize", "distinctUntilKeyChanged", "combineLatest", "header", "top", "bottom", "y", "a", "b", "watchPalette", "inputs", "current", "input", "of", "mergeMap", "fromEvent", "map", "startWith", "shareReplay", "mountPalette", "el", "defer", "push$", "Subject", "palette", "key", "value", "index", "label", "observeOn", "asyncScheduler", "getElements", "tap", "state", "finalize", "__spreadValues", "import_clipboard", "extract", "el", "text", "setupClipboardJS", "alert$", "ClipboardJS", "Observable", "subscriber", "getElement", "ev", "tap", "map", "translation", "preprocess", "urls", "root", "next", "a", "b", "url", "index", "fetchSitemap", "base", "cached", "of", "config", "configuration", "requestXML", "map", "sitemap", "getElements", "node", "catchError", "EMPTY", "defaultIfEmpty", "tap", "setupInstantLoading", "document$", "location$", "viewport$", "config", "configuration", "fromEvent", "favicon", "getOptionalElement", "push$", "fetchSitemap", "map", "paths", "path", "switchMap", "urls", "filter", "ev", "el", "url", "of", "NEVER", "share", "pop$", "merge", "distinctUntilChanged", "a", "b", "response$", "distinctUntilKeyChanged", "request", "catchError", "setLocation", "sample", "dom", "res", "skip", "replacement", "selector", "feature", "source", "target", "getComponentElement", "getElements", "concatMap", "script", "h", "name", "Observable", "observer", "EMPTY", "offset", "setLocationHash", "skipUntil", "debounceTime", "bufferCount", "state", "import_escape_html", "import_escape_html", "setupSearchHighlighter", "config", "escape", "separator", "highlight", "_", "data", "term", "query", "match", "value", "escapeHTML", "defaultTransform", "query", "terms", "index", "isSearchReadyMessage", "message", "isSearchQueryMessage", "isSearchResultMessage", "setupSearchIndex", "config", "docs", "translation", "options", "feature", "setupSearchWorker", "url", "index", "configuration", "worker", "tx$", "Subject", "rx$", "watchWorker", "map", "message", "isSearchResultMessage", "result", "document", "share", "from", "data", "setupVersionSelector", "document$", "config", "configuration", "versions$", "requestJSON", "catchError", "EMPTY", "current$", "map", "versions", "current", "version", "aliases", "switchMap", "urls", "fromEvent", "filter", "ev", "withLatestFrom", "el", "url", "of", "fetchSitemap", "sitemap", "path", "getLocation", "setLocation", "combineLatest", "getElement", "renderVersionSelector", "_a", "outdated", "latest", "warning", "getComponentElements", "watchSearchQuery", "el", "rx$", "fn", "defaultTransform", "searchParams", "getLocation", "setToggle", "param$", "filter", "isSearchReadyMessage", "take", "map", "watchToggle", "active", "url", "value", "focus$", "watchElementFocus", "value$", "merge", "fromEvent", "delay", "startWith", "distinctUntilChanged", "combineLatest", "focus", "shareReplay", "mountSearchQuery", "tx$", "push$", "Subject", "done$", "takeLast", "distinctUntilKeyChanged", "translation", "takeUntil", "tap", "state", "finalize", "__spreadValues", "share", "mountSearchResult", "el", "rx$", "query$", "push$", "Subject", "boundary$", "watchElementBoundary", "filter", "meta", "getElement", "list", "ready$", "isSearchReadyMessage", "take", "withLatestFrom", "skipUntil", "items", "value", "translation", "round", "tap", "switchMap", "merge", "of", "bufferCount", "zipWith", "chunk", "result", "renderSearchResultItem", "isSearchResultMessage", "map", "data", "state", "finalize", "__spreadValues", "watchSearchShare", "_el", "query$", "map", "value", "url", "getLocation", "mountSearchShare", "el", "options", "push$", "Subject", "fromEvent", "ev", "tap", "state", "finalize", "__spreadValues", "mountSearchSuggest", "el", "rx$", "keyboard$", "push$", "Subject", "query", "getComponentElement", "query$", "merge", "fromEvent", "observeOn", "asyncScheduler", "map", "distinctUntilChanged", "combineLatestWith", "suggestions", "value", "words", "last", "filter", "mode", "key", "isSearchResultMessage", "data", "tap", "state", "finalize", "mountSearch", "el", "index$", "keyboard$", "config", "configuration", "url", "worker", "setupSearchWorker", "query", "getComponentElement", "result", "tx$", "rx$", "filter", "isSearchQueryMessage", "sample", "isSearchReadyMessage", "take", "mode", "key", "active", "getActiveElement", "anchors", "anchor", "getElements", "article", "best", "a", "b", "setToggle", "els", "i", "query$", "mountSearchQuery", "result$", "mountSearchResult", "merge", "mergeWith", "getComponentElements", "child", "mountSearchShare", "mountSearchSuggest", "err", "NEVER", "mountSearchHiglight", "el", "index$", "location$", "combineLatest", "startWith", "getLocation", "filter", "url", "map", "index", "setupSearchHighlighter", "fn", "_a", "nodes", "it", "node", "original", "replaced", "text", "childNodes", "h", "watchSidebar", "el", "viewport$", "main$", "parent", "adjust", "combineLatest", "map", "offset", "height", "y", "distinctUntilChanged", "a", "b", "mountSidebar", "_a", "_b", "header$", "options", "__objRest", "inner", "getElement", "getElementOffset", "defer", "push$", "Subject", "auditTime", "animationFrameScheduler", "withLatestFrom", "observeOn", "take", "item", "getElements", "container", "getElementContainer", "getElementSize", "tap", "state", "finalize", "__spreadValues", "fetchSourceFactsFromGitHub", "user", "repo", "url", "zip", "requestJSON", "catchError", "EMPTY", "map", "release", "defaultIfEmpty", "info", "__spreadValues", "fetchSourceFactsFromGitLab", "base", "project", "url", "requestJSON", "catchError", "EMPTY", "map", "star_count", "forks_count", "defaultIfEmpty", "fetchSourceFacts", "url", "match", "user", "repo", "fetchSourceFactsFromGitHub", "base", "slug", "fetchSourceFactsFromGitLab", "EMPTY", "fetch$", "watchSource", "el", "defer", "cached", "of", "getComponentElements", "consent", "EMPTY", "fetchSourceFacts", "tap", "facts", "catchError", "filter", "map", "shareReplay", "mountSource", "inner", "getElement", "push$", "Subject", "renderSourceFacts", "state", "finalize", "__spreadValues", "watchTabs", "el", "viewport$", "header$", "watchElementSize", "switchMap", "watchViewportAt", "map", "y", "distinctUntilKeyChanged", "mountTabs", "options", "defer", "push$", "Subject", "hidden", "feature", "of", "tap", "state", "finalize", "__spreadValues", "watchTableOfContents", "el", "viewport$", "header$", "table", "anchors", "getElements", "anchor", "id", "target", "getOptionalElement", "adjust$", "distinctUntilKeyChanged", "map", "height", "main", "getComponentElement", "grid", "getElement", "share", "watchElementSize", "switchMap", "body", "defer", "path", "of", "index", "offset", "a", "b", "combineLatestWith", "adjust", "scan", "prev", "next", "y", "size", "last", "distinctUntilChanged", "startWith", "bufferCount", "mountTableOfContents", "target$", "push$", "Subject", "done$", "takeLast", "feature", "smooth$", "merge", "debounceTime", "filter", "withLatestFrom", "behavior", "container", "getElementContainer", "getElementSize", "takeUntil", "skip", "repeat", "url", "getLocation", "active", "hash", "tap", "state", "finalize", "__spreadValues", "watchBackToTop", "_el", "viewport$", "main$", "target$", "direction$", "map", "y", "bufferCount", "a", "b", "distinctUntilChanged", "active$", "active", "combineLatest", "direction", "takeUntil", "skip", "endWith", "repeat", "hidden", "mountBackToTop", "el", "header$", "push$", "Subject", "done$", "takeLast", "distinctUntilKeyChanged", "height", "tap", "state", "finalize", "__spreadValues", "patchIndeterminate", "document$", "tablet$", "switchMap", "getElements", "tap", "el", "mergeMap", "fromEvent", "takeWhile", "map", "withLatestFrom", "tablet", "isAppleDevice", "patchScrollfix", "document$", "switchMap", "getElements", "tap", "el", "filter", "mergeMap", "fromEvent", "map", "top", "patchScrolllock", "viewport$", "tablet$", "combineLatest", "watchToggle", "map", "active", "tablet", "switchMap", "of", "delay", "withLatestFrom", "y", "value", "obj", "data", "key", "x", "y", "nodes", "parent", "i", "node", "document$", "watchDocument", "location$", "watchLocation", "target$", "watchLocationTarget", "keyboard$", "watchKeyboard", "viewport$", "watchViewport", "tablet$", "watchMedia", "screen$", "print$", "watchPrint", "config", "configuration", "index$", "requestJSON", "NEVER", "alert$", "Subject", "setupClipboardJS", "feature", "setupInstantLoading", "_a", "setupVersionSelector", "merge", "delay", "setToggle", "filter", "mode", "key", "prev", "getOptionalElement", "next", "patchIndeterminate", "patchScrollfix", "patchScrolllock", "header$", "watchHeader", "getComponentElement", "main$", "map", "switchMap", "el", "watchMain", "shareReplay", "control$", "getComponentElements", "mountConsent", "mountDialog", "mountHeader", "mountPalette", "mountSearch", "mountSource", "content$", "defer", "mountAnnounce", "mountContent", "mountSearchHiglight", "EMPTY", "mountHeaderTitle", "at", "mountSidebar", "mountTabs", "mountTableOfContents", "mountBackToTop", "component$", "mergeWith"] +} diff --git a/2.0.0/assets/javascripts/extra/bundle.5f09fbc3.min.js b/2.0.0/assets/javascripts/extra/bundle.5f09fbc3.min.js new file mode 100644 index 00000000..48b752cd --- /dev/null +++ b/2.0.0/assets/javascripts/extra/bundle.5f09fbc3.min.js @@ -0,0 +1,18 @@ +"use strict";(()=>{var Je=Object.create;var qr=Object.defineProperty;var $e=Object.getOwnPropertyDescriptor;var Qe=Object.getOwnPropertyNames;var Xe=Object.getPrototypeOf,Ze=Object.prototype.hasOwnProperty;var rt=(r,o)=>()=>(o||r((o={exports:{}}).exports,o),o.exports);var et=(r,o,t,e)=>{if(o&&typeof o=="object"||typeof o=="function")for(let n of Qe(o))!Ze.call(r,n)&&n!==t&&qr(r,n,{get:()=>o[n],enumerable:!(e=$e(o,n))||e.enumerable});return r};var tt=(r,o,t)=>(t=r!=null?Je(Xe(r)):{},et(o||!r||!r.__esModule?qr(t,"default",{value:r,enumerable:!0}):t,r));var me=rt((Tt,er)=>{/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */var Hr,Kr,Jr,$r,Qr,Xr,Zr,re,ee,Z,Ar,te,oe,ne,k,ie,fe,ae,ue,ce,se,pe,le,rr;(function(r){var o=typeof global=="object"?global:typeof self=="object"?self:typeof this=="object"?this:{};typeof define=="function"&&define.amd?define("tslib",["exports"],function(e){r(t(o,t(e)))}):typeof er=="object"&&typeof er.exports=="object"?r(t(o,t(er.exports))):r(t(o));function t(e,n){return e!==o&&(typeof Object.create=="function"?Object.defineProperty(e,"__esModule",{value:!0}):e.__esModule=!0),function(i,f){return e[i]=n?n(i,f):f}}})(function(r){var o=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,n){e.__proto__=n}||function(e,n){for(var i in n)Object.prototype.hasOwnProperty.call(n,i)&&(e[i]=n[i])};Hr=function(e,n){if(typeof n!="function"&&n!==null)throw new TypeError("Class extends value "+String(n)+" is not a constructor or null");o(e,n);function i(){this.constructor=e}e.prototype=n===null?Object.create(n):(i.prototype=n.prototype,new i)},Kr=Object.assign||function(e){for(var n,i=1,f=arguments.length;i=0;s--)(c=e[s])&&(a=(u<3?c(a):u>3?c(n,i,a):c(n,i))||a);return u>3&&a&&Object.defineProperty(n,i,a),a},Qr=function(e,n){return function(i,f){n(i,f,e)}},Xr=function(e,n){if(typeof Reflect=="object"&&typeof Reflect.metadata=="function")return Reflect.metadata(e,n)},Zr=function(e,n,i,f){function u(a){return a instanceof i?a:new i(function(c){c(a)})}return new(i||(i=Promise))(function(a,c){function s(y){try{p(f.next(y))}catch(g){c(g)}}function d(y){try{p(f.throw(y))}catch(g){c(g)}}function p(y){y.done?a(y.value):u(y.value).then(s,d)}p((f=f.apply(e,n||[])).next())})},re=function(e,n){var i={label:0,sent:function(){if(a[0]&1)throw a[1];return a[1]},trys:[],ops:[]},f,u,a,c;return c={next:s(0),throw:s(1),return:s(2)},typeof Symbol=="function"&&(c[Symbol.iterator]=function(){return this}),c;function s(p){return function(y){return d([p,y])}}function d(p){if(f)throw new TypeError("Generator is already executing.");for(;i;)try{if(f=1,u&&(a=p[0]&2?u.return:p[0]?u.throw||((a=u.return)&&a.call(u),0):u.next)&&!(a=a.call(u,p[1])).done)return a;switch(u=0,a&&(p=[p[0]&2,a.value]),p[0]){case 0:case 1:a=p;break;case 4:return i.label++,{value:p[1],done:!1};case 5:i.label++,u=p[1],p=[0];continue;case 7:p=i.ops.pop(),i.trys.pop();continue;default:if(a=i.trys,!(a=a.length>0&&a[a.length-1])&&(p[0]===6||p[0]===2)){i=0;continue}if(p[0]===3&&(!a||p[1]>a[0]&&p[1]=e.length&&(e=void 0),{value:e&&e[f++],done:!e}}};throw new TypeError(n?"Object is not iterable.":"Symbol.iterator is not defined.")},Ar=function(e,n){var i=typeof Symbol=="function"&&e[Symbol.iterator];if(!i)return e;var f=i.call(e),u,a=[],c;try{for(;(n===void 0||n-- >0)&&!(u=f.next()).done;)a.push(u.value)}catch(s){c={error:s}}finally{try{u&&!u.done&&(i=f.return)&&i.call(f)}finally{if(c)throw c.error}}return a},te=function(){for(var e=[],n=0;n1||s(m,P)})})}function s(m,P){try{d(f[m](P))}catch(j){g(a[0][3],j)}}function d(m){m.value instanceof k?Promise.resolve(m.value.v).then(p,y):g(a[0][2],m)}function p(m){s("next",m)}function y(m){s("throw",m)}function g(m,P){m(P),a.shift(),a.length&&s(a[0][0],a[0][1])}},fe=function(e){var n,i;return n={},f("next"),f("throw",function(u){throw u}),f("return"),n[Symbol.iterator]=function(){return this},n;function f(u,a){n[u]=e[u]?function(c){return(i=!i)?{value:k(e[u](c)),done:u==="return"}:a?a(c):c}:a}},ae=function(e){if(!Symbol.asyncIterator)throw new TypeError("Symbol.asyncIterator is not defined.");var n=e[Symbol.asyncIterator],i;return n?n.call(e):(e=typeof Z=="function"?Z(e):e[Symbol.iterator](),i={},f("next"),f("throw"),f("return"),i[Symbol.asyncIterator]=function(){return this},i);function f(a){i[a]=e[a]&&function(c){return new Promise(function(s,d){c=e[a](c),u(s,d,c.done,c.value)})}}function u(a,c,s,d){Promise.resolve(d).then(function(p){a({value:p,done:s})},c)}},ue=function(e,n){return Object.defineProperty?Object.defineProperty(e,"raw",{value:n}):e.raw=n,e};var t=Object.create?function(e,n){Object.defineProperty(e,"default",{enumerable:!0,value:n})}:function(e,n){e.default=n};ce=function(e){if(e&&e.__esModule)return e;var n={};if(e!=null)for(var i in e)i!=="default"&&Object.prototype.hasOwnProperty.call(e,i)&&rr(n,e,i);return t(n,e),n},se=function(e){return e&&e.__esModule?e:{default:e}},pe=function(e,n,i,f){if(i==="a"&&!f)throw new TypeError("Private accessor was defined without a getter");if(typeof n=="function"?e!==n||!f:!n.has(e))throw new TypeError("Cannot read private member from an object whose class did not declare it");return i==="m"?f:i==="a"?f.call(e):f?f.value:n.get(e)},le=function(e,n,i,f,u){if(f==="m")throw new TypeError("Private method is not writable");if(f==="a"&&!u)throw new TypeError("Private accessor was defined without a setter");if(typeof n=="function"?e!==n||!u:!n.has(e))throw new TypeError("Cannot write private member to an object whose class did not declare it");return f==="a"?u.call(e,i):u?u.value=i:n.set(e,i),i},r("__extends",Hr),r("__assign",Kr),r("__rest",Jr),r("__decorate",$r),r("__param",Qr),r("__metadata",Xr),r("__awaiter",Zr),r("__generator",re),r("__exportStar",ee),r("__createBinding",rr),r("__values",Z),r("__read",Ar),r("__spread",te),r("__spreadArrays",oe),r("__spreadArray",ne),r("__await",k),r("__asyncGenerator",ie),r("__asyncDelegator",fe),r("__asyncValues",ae),r("__makeTemplateObject",ue),r("__importStar",ce),r("__importDefault",se),r("__classPrivateFieldGet",pe),r("__classPrivateFieldSet",le)})});var de=tt(me(),1),{__extends:_,__assign:Pt,__rest:jt,__decorate:Ft,__param:Mt,__metadata:Ct,__awaiter:he,__generator:tr,__exportStar:Lt,__createBinding:Rt,__values:M,__read:w,__spread:kt,__spreadArrays:Ut,__spreadArray:S,__await:or,__asyncGenerator:ve,__asyncDelegator:Wt,__asyncValues:be,__makeTemplateObject:Dt,__importStar:Vt,__importDefault:Bt,__classPrivateFieldGet:Gt,__classPrivateFieldSet:Nt}=de.default;function l(r){return typeof r=="function"}function nr(r){var o=function(e){Error.call(e),e.stack=new Error().stack},t=r(o);return t.prototype=Object.create(Error.prototype),t.prototype.constructor=t,t}var ir=nr(function(r){return function(t){r(this),this.message=t?t.length+` errors occurred during unsubscription: +`+t.map(function(e,n){return n+1+") "+e.toString()}).join(` + `):"",this.name="UnsubscriptionError",this.errors=t}});function C(r,o){if(r){var t=r.indexOf(o);0<=t&&r.splice(t,1)}}var F=function(){function r(o){this.initialTeardown=o,this.closed=!1,this._parentage=null,this._finalizers=null}return r.prototype.unsubscribe=function(){var o,t,e,n,i;if(!this.closed){this.closed=!0;var f=this._parentage;if(f)if(this._parentage=null,Array.isArray(f))try{for(var u=M(f),a=u.next();!a.done;a=u.next()){var c=a.value;c.remove(this)}}catch(m){o={error:m}}finally{try{a&&!a.done&&(t=u.return)&&t.call(u)}finally{if(o)throw o.error}}else f.remove(this);var s=this.initialTeardown;if(l(s))try{s()}catch(m){i=m instanceof ir?m.errors:[m]}var d=this._finalizers;if(d){this._finalizers=null;try{for(var p=M(d),y=p.next();!y.done;y=p.next()){var g=y.value;try{ye(g)}catch(m){i=i!=null?i:[],m instanceof ir?i=S(S([],w(i)),w(m.errors)):i.push(m)}}}catch(m){e={error:m}}finally{try{y&&!y.done&&(n=p.return)&&n.call(p)}finally{if(e)throw e.error}}}if(i)throw new ir(i)}},r.prototype.add=function(o){var t;if(o&&o!==this)if(this.closed)ye(o);else{if(o instanceof r){if(o.closed||o._hasParent(this))return;o._addParent(this)}(this._finalizers=(t=this._finalizers)!==null&&t!==void 0?t:[]).push(o)}},r.prototype._hasParent=function(o){var t=this._parentage;return t===o||Array.isArray(t)&&t.includes(o)},r.prototype._addParent=function(o){var t=this._parentage;this._parentage=Array.isArray(t)?(t.push(o),t):t?[t,o]:o},r.prototype._removeParent=function(o){var t=this._parentage;t===o?this._parentage=null:Array.isArray(t)&&C(t,o)},r.prototype.remove=function(o){var t=this._finalizers;t&&C(t,o),o instanceof r&&o._removeParent(this)},r.EMPTY=function(){var o=new r;return o.closed=!0,o}(),r}();var Ir=F.EMPTY;function fr(r){return r instanceof F||r&&"closed"in r&&l(r.remove)&&l(r.add)&&l(r.unsubscribe)}function ye(r){l(r)?r():r.unsubscribe()}var O={onUnhandledError:null,onStoppedNotification:null,Promise:void 0,useDeprecatedSynchronousErrorHandling:!1,useDeprecatedNextContext:!1};var U={setTimeout:function(r,o){for(var t=[],e=2;e0},enumerable:!1,configurable:!0}),o.prototype._trySubscribe=function(t){return this._throwIfClosed(),r.prototype._trySubscribe.call(this,t)},o.prototype._subscribe=function(t){return this._throwIfClosed(),this._checkFinalizedStatuses(t),this._innerSubscribe(t)},o.prototype._innerSubscribe=function(t){var e=this,n=this,i=n.hasError,f=n.isStopped,u=n.observers;return i||f?Ir:(this.currentObservers=null,u.push(t),new F(function(){e.currentObservers=null,C(u,t)}))},o.prototype._checkFinalizedStatuses=function(t){var e=this,n=e.hasError,i=e.thrownError,f=e.isStopped;n?t.error(i):f&&t.complete()},o.prototype.asObservable=function(){var t=new b;return t.source=this,t},o.create=function(t,e){return new Ae(t,e)},o}(b);var Ae=function(r){_(o,r);function o(t,e){var n=r.call(this)||this;return n.destination=t,n.source=e,n}return o.prototype.next=function(t){var e,n;(n=(e=this.destination)===null||e===void 0?void 0:e.next)===null||n===void 0||n.call(e,t)},o.prototype.error=function(t){var e,n;(n=(e=this.destination)===null||e===void 0?void 0:e.error)===null||n===void 0||n.call(e,t)},o.prototype.complete=function(){var t,e;(e=(t=this.destination)===null||t===void 0?void 0:t.complete)===null||e===void 0||e.call(t)},o.prototype._subscribe=function(t){var e,n;return(n=(e=this.source)===null||e===void 0?void 0:e.subscribe(t))!==null&&n!==void 0?n:Ir},o}(Fr);var J={now:function(){return(J.delegate||Date).now()},delegate:void 0};var Mr=function(r){_(o,r);function o(t,e,n){t===void 0&&(t=1/0),e===void 0&&(e=1/0),n===void 0&&(n=J);var i=r.call(this)||this;return i._bufferSize=t,i._windowTime=e,i._timestampProvider=n,i._buffer=[],i._infiniteTimeWindow=!0,i._infiniteTimeWindow=e===1/0,i._bufferSize=Math.max(1,t),i._windowTime=Math.max(1,e),i}return o.prototype.next=function(t){var e=this,n=e.isStopped,i=e._buffer,f=e._infiniteTimeWindow,u=e._timestampProvider,a=e._windowTime;n||(i.push(t),!f&&i.push(u.now()+a)),this._trimBuffer(),r.prototype.next.call(this,t)},o.prototype._subscribe=function(t){this._throwIfClosed(),this._trimBuffer();for(var e=this._innerSubscribe(t),n=this,i=n._infiniteTimeWindow,f=n._buffer,u=f.slice(),a=0;a{sessionStorage.setItem("\u1D34\u2092\u1D34\u2092\u1D34\u2092",`${t}`),r.hidden=!t}),o.next(JSON.parse(sessionStorage.getItem("\u1D34\u2092\u1D34\u2092\u1D34\u2092")||"true")),z(r,"click").pipe(zr(o)).subscribe(([,t])=>o.next(!t)),kr(250).pipe(gr(o.pipe(X(t=>!t))),H(75),Nr({delay:()=>o.pipe(X(t=>t))}),T(()=>{let t=document.createElement("div");return t.className="\u1D34\u2092\u1D34\u2092\u1D34\u2092",t.ariaHidden="true",Ke.appendChild(t),Ur(Wr,Rr(t)).pipe(Gr(()=>t.remove()),gr(o.pipe(X(e=>!e))),Yr(e=>z(e,"click").pipe(Er(()=>e.classList.add("\u1D34\u2092\u1D34\u2092\u1D34\u2092--\u1D4D\u2092\u1D57\uA700\u1D34\u2090")),Vr(1e3),Er(()=>e.classList.remove("\u1D34\u2092\u1D34\u2092\u1D34\u2092--\u1D4D\u2092\u1D57\uA700\u1D34\u2090")))))})).subscribe()}})(); +//# sourceMappingURL=bundle.5f09fbc3.min.js.map + diff --git a/2.0.0/assets/javascripts/extra/bundle.5f09fbc3.min.js.map b/2.0.0/assets/javascripts/extra/bundle.5f09fbc3.min.js.map new file mode 100644 index 00000000..24f36746 --- /dev/null +++ b/2.0.0/assets/javascripts/extra/bundle.5f09fbc3.min.js.map @@ -0,0 +1,8 @@ +{ + "version": 3, + "sources": ["node_modules/rxjs/node_modules/tslib/tslib.js", "node_modules/rxjs/node_modules/tslib/modules/index.js", "node_modules/rxjs/src/internal/util/isFunction.ts", "node_modules/rxjs/src/internal/util/createErrorClass.ts", "node_modules/rxjs/src/internal/util/UnsubscriptionError.ts", "node_modules/rxjs/src/internal/util/arrRemove.ts", "node_modules/rxjs/src/internal/Subscription.ts", "node_modules/rxjs/src/internal/config.ts", "node_modules/rxjs/src/internal/scheduler/timeoutProvider.ts", "node_modules/rxjs/src/internal/util/reportUnhandledError.ts", "node_modules/rxjs/src/internal/util/noop.ts", "node_modules/rxjs/src/internal/NotificationFactories.ts", "node_modules/rxjs/src/internal/util/errorContext.ts", "node_modules/rxjs/src/internal/Subscriber.ts", "node_modules/rxjs/src/internal/symbol/observable.ts", "node_modules/rxjs/src/internal/util/identity.ts", "node_modules/rxjs/src/internal/util/pipe.ts", "node_modules/rxjs/src/internal/Observable.ts", "node_modules/rxjs/src/internal/util/lift.ts", "node_modules/rxjs/src/internal/operators/OperatorSubscriber.ts", "node_modules/rxjs/src/internal/util/ObjectUnsubscribedError.ts", "node_modules/rxjs/src/internal/Subject.ts", "node_modules/rxjs/src/internal/scheduler/dateTimestampProvider.ts", "node_modules/rxjs/src/internal/ReplaySubject.ts", "node_modules/rxjs/src/internal/scheduler/Action.ts", "node_modules/rxjs/src/internal/scheduler/intervalProvider.ts", "node_modules/rxjs/src/internal/scheduler/AsyncAction.ts", "node_modules/rxjs/src/internal/Scheduler.ts", "node_modules/rxjs/src/internal/scheduler/AsyncScheduler.ts", "node_modules/rxjs/src/internal/scheduler/async.ts", "node_modules/rxjs/src/internal/observable/empty.ts", "node_modules/rxjs/src/internal/util/isScheduler.ts", "node_modules/rxjs/src/internal/util/args.ts", "node_modules/rxjs/src/internal/util/isArrayLike.ts", "node_modules/rxjs/src/internal/util/isPromise.ts", "node_modules/rxjs/src/internal/util/isInteropObservable.ts", "node_modules/rxjs/src/internal/util/isAsyncIterable.ts", "node_modules/rxjs/src/internal/util/throwUnobservableError.ts", "node_modules/rxjs/src/internal/symbol/iterator.ts", "node_modules/rxjs/src/internal/util/isIterable.ts", "node_modules/rxjs/src/internal/util/isReadableStreamLike.ts", "node_modules/rxjs/src/internal/observable/innerFrom.ts", "node_modules/rxjs/src/internal/util/executeSchedule.ts", "node_modules/rxjs/src/internal/operators/observeOn.ts", "node_modules/rxjs/src/internal/operators/subscribeOn.ts", "node_modules/rxjs/src/internal/scheduled/scheduleObservable.ts", "node_modules/rxjs/src/internal/scheduled/schedulePromise.ts", "node_modules/rxjs/src/internal/scheduled/scheduleArray.ts", "node_modules/rxjs/src/internal/scheduled/scheduleIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleAsyncIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleReadableStreamLike.ts", "node_modules/rxjs/src/internal/scheduled/scheduled.ts", "node_modules/rxjs/src/internal/observable/from.ts", "node_modules/rxjs/src/internal/observable/of.ts", "node_modules/rxjs/src/internal/util/isDate.ts", "node_modules/rxjs/src/internal/operators/map.ts", "node_modules/rxjs/src/internal/util/mapOneOrManyArgs.ts", "node_modules/rxjs/src/internal/operators/mergeInternals.ts", "node_modules/rxjs/src/internal/operators/mergeMap.ts", "node_modules/rxjs/src/internal/operators/mergeAll.ts", "node_modules/rxjs/src/internal/operators/concatAll.ts", "node_modules/rxjs/src/internal/observable/concat.ts", "node_modules/rxjs/src/internal/observable/fromEvent.ts", "node_modules/rxjs/src/internal/observable/timer.ts", "node_modules/rxjs/src/internal/observable/interval.ts", "node_modules/rxjs/src/internal/observable/merge.ts", "node_modules/rxjs/src/internal/observable/never.ts", "node_modules/rxjs/src/internal/operators/filter.ts", "node_modules/rxjs/src/internal/operators/take.ts", "node_modules/rxjs/src/internal/operators/ignoreElements.ts", "node_modules/rxjs/src/internal/operators/mapTo.ts", "node_modules/rxjs/src/internal/operators/delayWhen.ts", "node_modules/rxjs/src/internal/operators/delay.ts", "node_modules/rxjs/src/internal/operators/distinctUntilChanged.ts", "node_modules/rxjs/src/internal/operators/finalize.ts", "node_modules/rxjs/src/internal/operators/repeat.ts", "node_modules/rxjs/src/internal/operators/switchMap.ts", "node_modules/rxjs/src/internal/operators/takeUntil.ts", "node_modules/rxjs/src/internal/operators/tap.ts", "node_modules/rxjs/src/internal/operators/withLatestFrom.ts", "src/assets/javascripts/extra/bundle.ts"], + "sourceRoot": "../../../..", + "sourcesContent": ["/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global global, define, System, Reflect, Promise */\r\nvar __extends;\r\nvar __assign;\r\nvar __rest;\r\nvar __decorate;\r\nvar __param;\r\nvar __metadata;\r\nvar __awaiter;\r\nvar __generator;\r\nvar __exportStar;\r\nvar __values;\r\nvar __read;\r\nvar __spread;\r\nvar __spreadArrays;\r\nvar __spreadArray;\r\nvar __await;\r\nvar __asyncGenerator;\r\nvar __asyncDelegator;\r\nvar __asyncValues;\r\nvar __makeTemplateObject;\r\nvar __importStar;\r\nvar __importDefault;\r\nvar __classPrivateFieldGet;\r\nvar __classPrivateFieldSet;\r\nvar __createBinding;\r\n(function (factory) {\r\n var root = typeof global === \"object\" ? global : typeof self === \"object\" ? self : typeof this === \"object\" ? this : {};\r\n if (typeof define === \"function\" && define.amd) {\r\n define(\"tslib\", [\"exports\"], function (exports) { factory(createExporter(root, createExporter(exports))); });\r\n }\r\n else if (typeof module === \"object\" && typeof module.exports === \"object\") {\r\n factory(createExporter(root, createExporter(module.exports)));\r\n }\r\n else {\r\n factory(createExporter(root));\r\n }\r\n function createExporter(exports, previous) {\r\n if (exports !== root) {\r\n if (typeof Object.create === \"function\") {\r\n Object.defineProperty(exports, \"__esModule\", { value: true });\r\n }\r\n else {\r\n exports.__esModule = true;\r\n }\r\n }\r\n return function (id, v) { return exports[id] = previous ? previous(id, v) : v; };\r\n }\r\n})\r\n(function (exporter) {\r\n var extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n\r\n __extends = function (d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n };\r\n\r\n __assign = Object.assign || function (t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n };\r\n\r\n __rest = function (s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n };\r\n\r\n __decorate = function (decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n };\r\n\r\n __param = function (paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n };\r\n\r\n __metadata = function (metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n };\r\n\r\n __awaiter = function (thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n };\r\n\r\n __generator = function (thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n };\r\n\r\n __exportStar = function(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n };\r\n\r\n __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n }) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n });\r\n\r\n __values = function (o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n };\r\n\r\n __read = function (o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n };\r\n\r\n /** @deprecated */\r\n __spread = function () {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n };\r\n\r\n /** @deprecated */\r\n __spreadArrays = function () {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n };\r\n\r\n __spreadArray = function (to, from, pack) {\r\n if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {\r\n if (ar || !(i in from)) {\r\n if (!ar) ar = Array.prototype.slice.call(from, 0, i);\r\n ar[i] = from[i];\r\n }\r\n }\r\n return to.concat(ar || Array.prototype.slice.call(from));\r\n };\r\n\r\n __await = function (v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n };\r\n\r\n __asyncGenerator = function (thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n };\r\n\r\n __asyncDelegator = function (o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n };\r\n\r\n __asyncValues = function (o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n };\r\n\r\n __makeTemplateObject = function (cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n };\r\n\r\n var __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n }) : function(o, v) {\r\n o[\"default\"] = v;\r\n };\r\n\r\n __importStar = function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n };\r\n\r\n __importDefault = function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n };\r\n\r\n __classPrivateFieldGet = function (receiver, state, kind, f) {\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\r\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\r\n };\r\n\r\n __classPrivateFieldSet = function (receiver, state, value, kind, f) {\r\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\r\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\r\n };\r\n\r\n exporter(\"__extends\", __extends);\r\n exporter(\"__assign\", __assign);\r\n exporter(\"__rest\", __rest);\r\n exporter(\"__decorate\", __decorate);\r\n exporter(\"__param\", __param);\r\n exporter(\"__metadata\", __metadata);\r\n exporter(\"__awaiter\", __awaiter);\r\n exporter(\"__generator\", __generator);\r\n exporter(\"__exportStar\", __exportStar);\r\n exporter(\"__createBinding\", __createBinding);\r\n exporter(\"__values\", __values);\r\n exporter(\"__read\", __read);\r\n exporter(\"__spread\", __spread);\r\n exporter(\"__spreadArrays\", __spreadArrays);\r\n exporter(\"__spreadArray\", __spreadArray);\r\n exporter(\"__await\", __await);\r\n exporter(\"__asyncGenerator\", __asyncGenerator);\r\n exporter(\"__asyncDelegator\", __asyncDelegator);\r\n exporter(\"__asyncValues\", __asyncValues);\r\n exporter(\"__makeTemplateObject\", __makeTemplateObject);\r\n exporter(\"__importStar\", __importStar);\r\n exporter(\"__importDefault\", __importDefault);\r\n exporter(\"__classPrivateFieldGet\", __classPrivateFieldGet);\r\n exporter(\"__classPrivateFieldSet\", __classPrivateFieldSet);\r\n});\r\n", "import tslib from '../tslib.js';\r\nconst {\r\n __extends,\r\n __assign,\r\n __rest,\r\n __decorate,\r\n __param,\r\n __metadata,\r\n __awaiter,\r\n __generator,\r\n __exportStar,\r\n __createBinding,\r\n __values,\r\n __read,\r\n __spread,\r\n __spreadArrays,\r\n __spreadArray,\r\n __await,\r\n __asyncGenerator,\r\n __asyncDelegator,\r\n __asyncValues,\r\n __makeTemplateObject,\r\n __importStar,\r\n __importDefault,\r\n __classPrivateFieldGet,\r\n __classPrivateFieldSet,\r\n} = tslib;\r\nexport {\r\n __extends,\r\n __assign,\r\n __rest,\r\n __decorate,\r\n __param,\r\n __metadata,\r\n __awaiter,\r\n __generator,\r\n __exportStar,\r\n __createBinding,\r\n __values,\r\n __read,\r\n __spread,\r\n __spreadArrays,\r\n __spreadArray,\r\n __await,\r\n __asyncGenerator,\r\n __asyncDelegator,\r\n __asyncValues,\r\n __makeTemplateObject,\r\n __importStar,\r\n __importDefault,\r\n __classPrivateFieldGet,\r\n __classPrivateFieldSet,\r\n};\r\n", null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n NEVER,\n ReplaySubject,\n delay,\n distinctUntilChanged,\n filter,\n finalize,\n fromEvent,\n interval,\n merge,\n mergeMap,\n of,\n repeat,\n switchMap,\n take,\n takeUntil,\n tap,\n withLatestFrom\n} from \"rxjs\"\n\n/* ----------------------------------------------------------------------------\n * Script\n * ------------------------------------------------------------------------- */\n\n/* Append container for instances */\nconst container = document.createElement(\"div\")\ndocument.body.appendChild(container)\n\n/* Append button next to palette toggle */\nconst header = document.querySelector(\".md-header__option\")\nif (header) {\n const button = document.createElement(\"button\")\n button.className = \"md-header__button md-icon \u1D34\u2092\u1D34\u2092\u1D34\u2092__button\"\n if (header.parentElement)\n header.parentElement.insertBefore(button, header)\n\n /* Toggle animation */\n const on$ = new ReplaySubject(1)\n on$\n .pipe(\n distinctUntilChanged()\n )\n .subscribe(on => {\n sessionStorage.setItem(\"\u1D34\u2092\u1D34\u2092\u1D34\u2092\", `${on}`)\n button.hidden = !on\n })\n\n /* Load state from session storage */\n on$.next(JSON.parse(sessionStorage.getItem(\"\u1D34\u2092\u1D34\u2092\u1D34\u2092\") || \"true\"))\n fromEvent(button, \"click\")\n .pipe(\n withLatestFrom(on$)\n )\n .subscribe(([, on]) => on$.next(!on))\n\n /* Generate instances */\n interval(250)\n .pipe(\n takeUntil(on$.pipe(filter(on => !on))),\n take(75),\n repeat({ delay: () => on$.pipe(filter(on => on)) }),\n mergeMap(() => {\n const instance = document.createElement(\"div\")\n instance.className = \"\u1D34\u2092\u1D34\u2092\u1D34\u2092\"\n instance.ariaHidden = \"true\"\n container.appendChild(instance)\n return merge(NEVER, of(instance))\n .pipe(\n finalize(() => instance.remove()),\n takeUntil(on$.pipe(filter(on => !on))),\n switchMap(el => fromEvent(el, \"click\")\n .pipe(\n tap(() => el.classList.add(\"\u1D34\u2092\u1D34\u2092\u1D34\u2092--\u1D4D\u2092\u1D57\uA700\u1D34\u2090\")),\n delay(1000),\n tap(() => el.classList.remove(\"\u1D34\u2092\u1D34\u2092\u1D34\u2092--\u1D4D\u2092\u1D57\uA700\u1D34\u2090\"))\n )\n )\n )\n })\n )\n .subscribe()\n}\n"], + "mappings": "6iBAAA,IAAAA,GAAAC,GAAA,CAAAC,GAAAC,KAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gFAeA,IAAIC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,EACAC,GACAC,GACAC,GACAC,GACAC,EACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,IACH,SAAUC,EAAS,CAChB,IAAIC,EAAO,OAAO,QAAW,SAAW,OAAS,OAAO,MAAS,SAAW,KAAO,OAAO,MAAS,SAAW,KAAO,CAAC,EAClH,OAAO,QAAW,YAAc,OAAO,IACvC,OAAO,QAAS,CAAC,SAAS,EAAG,SAAU3B,EAAS,CAAE0B,EAAQE,EAAeD,EAAMC,EAAe5B,CAAO,CAAC,CAAC,CAAG,CAAC,EAEtG,OAAOC,IAAW,UAAY,OAAOA,GAAO,SAAY,SAC7DyB,EAAQE,EAAeD,EAAMC,EAAe3B,GAAO,OAAO,CAAC,CAAC,EAG5DyB,EAAQE,EAAeD,CAAI,CAAC,EAEhC,SAASC,EAAe5B,EAAS6B,EAAU,CACvC,OAAI7B,IAAY2B,IACR,OAAO,OAAO,QAAW,WACzB,OAAO,eAAe3B,EAAS,aAAc,CAAE,MAAO,EAAK,CAAC,EAG5DA,EAAQ,WAAa,IAGtB,SAAU8B,EAAIC,EAAG,CAAE,OAAO/B,EAAQ8B,GAAMD,EAAWA,EAASC,EAAIC,CAAC,EAAIA,CAAG,CACnF,CACJ,GACC,SAAUC,EAAU,CACjB,IAAIC,EAAgB,OAAO,gBACtB,CAAE,UAAW,CAAC,CAAE,YAAa,OAAS,SAAUC,EAAGC,EAAG,CAAED,EAAE,UAAYC,CAAG,GAC1E,SAAUD,EAAGC,EAAG,CAAE,QAASC,KAAKD,EAAO,OAAO,UAAU,eAAe,KAAKA,EAAGC,CAAC,IAAGF,EAAEE,GAAKD,EAAEC,GAAI,EAEpGlC,GAAY,SAAUgC,EAAGC,EAAG,CACxB,GAAI,OAAOA,GAAM,YAAcA,IAAM,KACjC,MAAM,IAAI,UAAU,uBAAyB,OAAOA,CAAC,EAAI,+BAA+B,EAC5FF,EAAcC,EAAGC,CAAC,EAClB,SAASE,GAAK,CAAE,KAAK,YAAcH,CAAG,CACtCA,EAAE,UAAYC,IAAM,KAAO,OAAO,OAAOA,CAAC,GAAKE,EAAG,UAAYF,EAAE,UAAW,IAAIE,EACnF,EAEAlC,GAAW,OAAO,QAAU,SAAUmC,EAAG,CACrC,QAASC,EAAG,EAAI,EAAGC,EAAI,UAAU,OAAQ,EAAIA,EAAG,IAAK,CACjDD,EAAI,UAAU,GACd,QAASH,KAAKG,EAAO,OAAO,UAAU,eAAe,KAAKA,EAAGH,CAAC,IAAGE,EAAEF,GAAKG,EAAEH,GAC9E,CACA,OAAOE,CACX,EAEAlC,GAAS,SAAUmC,EAAGE,EAAG,CACrB,IAAIH,EAAI,CAAC,EACT,QAASF,KAAKG,EAAO,OAAO,UAAU,eAAe,KAAKA,EAAGH,CAAC,GAAKK,EAAE,QAAQL,CAAC,EAAI,IAC9EE,EAAEF,GAAKG,EAAEH,IACb,GAAIG,GAAK,MAAQ,OAAO,OAAO,uBAA0B,WACrD,QAASG,EAAI,EAAGN,EAAI,OAAO,sBAAsBG,CAAC,EAAGG,EAAIN,EAAE,OAAQM,IAC3DD,EAAE,QAAQL,EAAEM,EAAE,EAAI,GAAK,OAAO,UAAU,qBAAqB,KAAKH,EAAGH,EAAEM,EAAE,IACzEJ,EAAEF,EAAEM,IAAMH,EAAEH,EAAEM,KAE1B,OAAOJ,CACX,EAEAjC,GAAa,SAAUsC,EAAYC,EAAQC,EAAKC,EAAM,CAClD,IAAIC,EAAI,UAAU,OAAQC,EAAID,EAAI,EAAIH,EAASE,IAAS,KAAOA,EAAO,OAAO,yBAAyBF,EAAQC,CAAG,EAAIC,EAAMZ,EAC3H,GAAI,OAAO,SAAY,UAAY,OAAO,QAAQ,UAAa,WAAYc,EAAI,QAAQ,SAASL,EAAYC,EAAQC,EAAKC,CAAI,MACxH,SAASJ,EAAIC,EAAW,OAAS,EAAGD,GAAK,EAAGA,KAASR,EAAIS,EAAWD,MAAIM,GAAKD,EAAI,EAAIb,EAAEc,CAAC,EAAID,EAAI,EAAIb,EAAEU,EAAQC,EAAKG,CAAC,EAAId,EAAEU,EAAQC,CAAG,IAAMG,GAChJ,OAAOD,EAAI,GAAKC,GAAK,OAAO,eAAeJ,EAAQC,EAAKG,CAAC,EAAGA,CAChE,EAEA1C,GAAU,SAAU2C,EAAYC,EAAW,CACvC,OAAO,SAAUN,EAAQC,EAAK,CAAEK,EAAUN,EAAQC,EAAKI,CAAU,CAAG,CACxE,EAEA1C,GAAa,SAAU4C,EAAaC,EAAe,CAC/C,GAAI,OAAO,SAAY,UAAY,OAAO,QAAQ,UAAa,WAAY,OAAO,QAAQ,SAASD,EAAaC,CAAa,CACjI,EAEA5C,GAAY,SAAU6C,EAASC,EAAYC,EAAGC,EAAW,CACrD,SAASC,EAAMC,EAAO,CAAE,OAAOA,aAAiBH,EAAIG,EAAQ,IAAIH,EAAE,SAAUI,EAAS,CAAEA,EAAQD,CAAK,CAAG,CAAC,CAAG,CAC3G,OAAO,IAAKH,IAAMA,EAAI,UAAU,SAAUI,EAASC,EAAQ,CACvD,SAASC,EAAUH,EAAO,CAAE,GAAI,CAAEI,EAAKN,EAAU,KAAKE,CAAK,CAAC,CAAG,OAASjB,EAAP,CAAYmB,EAAOnB,CAAC,CAAG,CAAE,CAC1F,SAASsB,EAASL,EAAO,CAAE,GAAI,CAAEI,EAAKN,EAAU,MAASE,CAAK,CAAC,CAAG,OAASjB,EAAP,CAAYmB,EAAOnB,CAAC,CAAG,CAAE,CAC7F,SAASqB,EAAKE,EAAQ,CAAEA,EAAO,KAAOL,EAAQK,EAAO,KAAK,EAAIP,EAAMO,EAAO,KAAK,EAAE,KAAKH,EAAWE,CAAQ,CAAG,CAC7GD,GAAMN,EAAYA,EAAU,MAAMH,EAASC,GAAc,CAAC,CAAC,GAAG,KAAK,CAAC,CACxE,CAAC,CACL,EAEA7C,GAAc,SAAU4C,EAASY,EAAM,CACnC,IAAIC,EAAI,CAAE,MAAO,EAAG,KAAM,UAAW,CAAE,GAAI5B,EAAE,GAAK,EAAG,MAAMA,EAAE,GAAI,OAAOA,EAAE,EAAI,EAAG,KAAM,CAAC,EAAG,IAAK,CAAC,CAAE,EAAG,EAAG6B,EAAG7B,EAAG8B,EAC/G,OAAOA,EAAI,CAAE,KAAMC,EAAK,CAAC,EAAG,MAASA,EAAK,CAAC,EAAG,OAAUA,EAAK,CAAC,CAAE,EAAG,OAAO,QAAW,aAAeD,EAAE,OAAO,UAAY,UAAW,CAAE,OAAO,IAAM,GAAIA,EACvJ,SAASC,EAAK7B,EAAG,CAAE,OAAO,SAAUT,EAAG,CAAE,OAAO+B,EAAK,CAACtB,EAAGT,CAAC,CAAC,CAAG,CAAG,CACjE,SAAS+B,EAAKQ,EAAI,CACd,GAAI,EAAG,MAAM,IAAI,UAAU,iCAAiC,EAC5D,KAAOJ,GAAG,GAAI,CACV,GAAI,EAAI,EAAGC,IAAM7B,EAAIgC,EAAG,GAAK,EAAIH,EAAE,OAAYG,EAAG,GAAKH,EAAE,SAAc7B,EAAI6B,EAAE,SAAc7B,EAAE,KAAK6B,CAAC,EAAG,GAAKA,EAAE,OAAS,EAAE7B,EAAIA,EAAE,KAAK6B,EAAGG,EAAG,EAAE,GAAG,KAAM,OAAOhC,EAE3J,OADI6B,EAAI,EAAG7B,IAAGgC,EAAK,CAACA,EAAG,GAAK,EAAGhC,EAAE,KAAK,GAC9BgC,EAAG,GAAI,CACX,IAAK,GAAG,IAAK,GAAGhC,EAAIgC,EAAI,MACxB,IAAK,GAAG,OAAAJ,EAAE,QAAgB,CAAE,MAAOI,EAAG,GAAI,KAAM,EAAM,EACtD,IAAK,GAAGJ,EAAE,QAASC,EAAIG,EAAG,GAAIA,EAAK,CAAC,CAAC,EAAG,SACxC,IAAK,GAAGA,EAAKJ,EAAE,IAAI,IAAI,EAAGA,EAAE,KAAK,IAAI,EAAG,SACxC,QACI,GAAM5B,EAAI4B,EAAE,KAAM,EAAA5B,EAAIA,EAAE,OAAS,GAAKA,EAAEA,EAAE,OAAS,MAAQgC,EAAG,KAAO,GAAKA,EAAG,KAAO,GAAI,CAAEJ,EAAI,EAAG,QAAU,CAC3G,GAAII,EAAG,KAAO,IAAM,CAAChC,GAAMgC,EAAG,GAAKhC,EAAE,IAAMgC,EAAG,GAAKhC,EAAE,IAAM,CAAE4B,EAAE,MAAQI,EAAG,GAAI,KAAO,CACrF,GAAIA,EAAG,KAAO,GAAKJ,EAAE,MAAQ5B,EAAE,GAAI,CAAE4B,EAAE,MAAQ5B,EAAE,GAAIA,EAAIgC,EAAI,KAAO,CACpE,GAAIhC,GAAK4B,EAAE,MAAQ5B,EAAE,GAAI,CAAE4B,EAAE,MAAQ5B,EAAE,GAAI4B,EAAE,IAAI,KAAKI,CAAE,EAAG,KAAO,CAC9DhC,EAAE,IAAI4B,EAAE,IAAI,IAAI,EACpBA,EAAE,KAAK,IAAI,EAAG,QACtB,CACAI,EAAKL,EAAK,KAAKZ,EAASa,CAAC,CAC7B,OAASzB,EAAP,CAAY6B,EAAK,CAAC,EAAG7B,CAAC,EAAG0B,EAAI,CAAG,QAAE,CAAU,EAAI7B,EAAI,CAAG,CACzD,GAAIgC,EAAG,GAAK,EAAG,MAAMA,EAAG,GAAI,MAAO,CAAE,MAAOA,EAAG,GAAKA,EAAG,GAAK,OAAQ,KAAM,EAAK,CACnF,CACJ,EAEA5D,GAAe,SAAS6D,EAAGC,EAAG,CAC1B,QAASpC,KAAKmC,EAAOnC,IAAM,WAAa,CAAC,OAAO,UAAU,eAAe,KAAKoC,EAAGpC,CAAC,GAAGX,GAAgB+C,EAAGD,EAAGnC,CAAC,CAChH,EAEAX,GAAkB,OAAO,OAAU,SAAS+C,EAAGD,EAAGE,EAAGC,EAAI,CACjDA,IAAO,SAAWA,EAAKD,GAC3B,OAAO,eAAeD,EAAGE,EAAI,CAAE,WAAY,GAAM,IAAK,UAAW,CAAE,OAAOH,EAAEE,EAAI,CAAE,CAAC,CACvF,EAAM,SAASD,EAAGD,EAAGE,EAAGC,EAAI,CACpBA,IAAO,SAAWA,EAAKD,GAC3BD,EAAEE,GAAMH,EAAEE,EACd,EAEA9D,EAAW,SAAU6D,EAAG,CACpB,IAAIjC,EAAI,OAAO,QAAW,YAAc,OAAO,SAAUgC,EAAIhC,GAAKiC,EAAEjC,GAAIG,EAAI,EAC5E,GAAI6B,EAAG,OAAOA,EAAE,KAAKC,CAAC,EACtB,GAAIA,GAAK,OAAOA,EAAE,QAAW,SAAU,MAAO,CAC1C,KAAM,UAAY,CACd,OAAIA,GAAK9B,GAAK8B,EAAE,SAAQA,EAAI,QACrB,CAAE,MAAOA,GAAKA,EAAE9B,KAAM,KAAM,CAAC8B,CAAE,CAC1C,CACJ,EACA,MAAM,IAAI,UAAUjC,EAAI,0BAA4B,iCAAiC,CACzF,EAEA3B,GAAS,SAAU4D,EAAG,EAAG,CACrB,IAAID,EAAI,OAAO,QAAW,YAAcC,EAAE,OAAO,UACjD,GAAI,CAACD,EAAG,OAAOC,EACf,IAAI9B,EAAI6B,EAAE,KAAKC,CAAC,EAAGxB,EAAG2B,EAAK,CAAC,EAAGlC,EAC/B,GAAI,CACA,MAAQ,IAAM,QAAU,KAAM,IAAM,EAAEO,EAAIN,EAAE,KAAK,GAAG,MAAMiC,EAAG,KAAK3B,EAAE,KAAK,CAC7E,OACO4B,EAAP,CAAgBnC,EAAI,CAAE,MAAOmC,CAAM,CAAG,QACtC,CACI,GAAI,CACI5B,GAAK,CAACA,EAAE,OAASuB,EAAI7B,EAAE,SAAY6B,EAAE,KAAK7B,CAAC,CACnD,QACA,CAAU,GAAID,EAAG,MAAMA,EAAE,KAAO,CACpC,CACA,OAAOkC,CACX,EAGA9D,GAAW,UAAY,CACnB,QAAS8D,EAAK,CAAC,EAAGjC,EAAI,EAAGA,EAAI,UAAU,OAAQA,IAC3CiC,EAAKA,EAAG,OAAO/D,GAAO,UAAU8B,EAAE,CAAC,EACvC,OAAOiC,CACX,EAGA7D,GAAiB,UAAY,CACzB,QAASyB,EAAI,EAAGG,EAAI,EAAGmC,EAAK,UAAU,OAAQnC,EAAImC,EAAInC,IAAKH,GAAK,UAAUG,GAAG,OAC7E,QAASM,EAAI,MAAMT,CAAC,EAAGkC,EAAI,EAAG/B,EAAI,EAAGA,EAAImC,EAAInC,IACzC,QAAS,EAAI,UAAUA,GAAIoC,EAAI,EAAGC,EAAK,EAAE,OAAQD,EAAIC,EAAID,IAAKL,IAC1DzB,EAAEyB,GAAK,EAAEK,GACjB,OAAO9B,CACX,EAEAjC,GAAgB,SAAUiE,EAAIC,EAAMC,EAAM,CACtC,GAAIA,GAAQ,UAAU,SAAW,EAAG,QAASxC,EAAI,EAAGyC,EAAIF,EAAK,OAAQN,EAAIjC,EAAIyC,EAAGzC,KACxEiC,GAAM,EAAEjC,KAAKuC,MACRN,IAAIA,EAAK,MAAM,UAAU,MAAM,KAAKM,EAAM,EAAGvC,CAAC,GACnDiC,EAAGjC,GAAKuC,EAAKvC,IAGrB,OAAOsC,EAAG,OAAOL,GAAM,MAAM,UAAU,MAAM,KAAKM,CAAI,CAAC,CAC3D,EAEAjE,EAAU,SAAUe,EAAG,CACnB,OAAO,gBAAgBf,GAAW,KAAK,EAAIe,EAAG,MAAQ,IAAIf,EAAQe,CAAC,CACvE,EAEAd,GAAmB,SAAUoC,EAASC,EAAYE,EAAW,CACzD,GAAI,CAAC,OAAO,cAAe,MAAM,IAAI,UAAU,sCAAsC,EACrF,IAAIY,EAAIZ,EAAU,MAAMH,EAASC,GAAc,CAAC,CAAC,EAAGZ,EAAG0C,EAAI,CAAC,EAC5D,OAAO1C,EAAI,CAAC,EAAG2B,EAAK,MAAM,EAAGA,EAAK,OAAO,EAAGA,EAAK,QAAQ,EAAG3B,EAAE,OAAO,eAAiB,UAAY,CAAE,OAAO,IAAM,EAAGA,EACpH,SAAS2B,EAAK7B,EAAG,CAAM4B,EAAE5B,KAAIE,EAAEF,GAAK,SAAUT,EAAG,CAAE,OAAO,IAAI,QAAQ,SAAUsD,EAAGlD,EAAG,CAAEiD,EAAE,KAAK,CAAC5C,EAAGT,EAAGsD,EAAGlD,CAAC,CAAC,EAAI,GAAKmD,EAAO9C,EAAGT,CAAC,CAAG,CAAC,CAAG,EAAG,CACzI,SAASuD,EAAO9C,EAAGT,EAAG,CAAE,GAAI,CAAE+B,EAAKM,EAAE5B,GAAGT,CAAC,CAAC,CAAG,OAASU,EAAP,CAAY8C,EAAOH,EAAE,GAAG,GAAI3C,CAAC,CAAG,CAAE,CACjF,SAASqB,EAAKd,EAAG,CAAEA,EAAE,iBAAiBhC,EAAU,QAAQ,QAAQgC,EAAE,MAAM,CAAC,EAAE,KAAKwC,EAAS5B,CAAM,EAAI2B,EAAOH,EAAE,GAAG,GAAIpC,CAAC,CAAI,CACxH,SAASwC,EAAQ9B,EAAO,CAAE4B,EAAO,OAAQ5B,CAAK,CAAG,CACjD,SAASE,EAAOF,EAAO,CAAE4B,EAAO,QAAS5B,CAAK,CAAG,CACjD,SAAS6B,EAAOE,EAAG1D,EAAG,CAAM0D,EAAE1D,CAAC,EAAGqD,EAAE,MAAM,EAAGA,EAAE,QAAQE,EAAOF,EAAE,GAAG,GAAIA,EAAE,GAAG,EAAE,CAAG,CACrF,EAEAlE,GAAmB,SAAUsD,EAAG,CAC5B,IAAI9B,EAAGN,EACP,OAAOM,EAAI,CAAC,EAAG2B,EAAK,MAAM,EAAGA,EAAK,QAAS,SAAU5B,EAAG,CAAE,MAAMA,CAAG,CAAC,EAAG4B,EAAK,QAAQ,EAAG3B,EAAE,OAAO,UAAY,UAAY,CAAE,OAAO,IAAM,EAAGA,EAC1I,SAAS2B,EAAK7B,EAAGiD,EAAG,CAAE/C,EAAEF,GAAKgC,EAAEhC,GAAK,SAAUT,EAAG,CAAE,OAAQK,EAAI,CAACA,GAAK,CAAE,MAAOpB,EAAQwD,EAAEhC,GAAGT,CAAC,CAAC,EAAG,KAAMS,IAAM,QAAS,EAAIiD,EAAIA,EAAE1D,CAAC,EAAIA,CAAG,EAAI0D,CAAG,CAClJ,EAEAtE,GAAgB,SAAUqD,EAAG,CACzB,GAAI,CAAC,OAAO,cAAe,MAAM,IAAI,UAAU,sCAAsC,EACrF,IAAID,EAAIC,EAAE,OAAO,eAAgB,EACjC,OAAOD,EAAIA,EAAE,KAAKC,CAAC,GAAKA,EAAI,OAAO7D,GAAa,WAAaA,EAAS6D,CAAC,EAAIA,EAAE,OAAO,UAAU,EAAG,EAAI,CAAC,EAAGH,EAAK,MAAM,EAAGA,EAAK,OAAO,EAAGA,EAAK,QAAQ,EAAG,EAAE,OAAO,eAAiB,UAAY,CAAE,OAAO,IAAM,EAAG,GAC9M,SAASA,EAAK7B,EAAG,CAAE,EAAEA,GAAKgC,EAAEhC,IAAM,SAAUT,EAAG,CAAE,OAAO,IAAI,QAAQ,SAAU4B,EAASC,EAAQ,CAAE7B,EAAIyC,EAAEhC,GAAGT,CAAC,EAAGwD,EAAO5B,EAASC,EAAQ7B,EAAE,KAAMA,EAAE,KAAK,CAAG,CAAC,CAAG,CAAG,CAC/J,SAASwD,EAAO5B,EAASC,EAAQ1B,EAAGH,EAAG,CAAE,QAAQ,QAAQA,CAAC,EAAE,KAAK,SAASA,EAAG,CAAE4B,EAAQ,CAAE,MAAO5B,EAAG,KAAMG,CAAE,CAAC,CAAG,EAAG0B,CAAM,CAAG,CAC/H,EAEAxC,GAAuB,SAAUsE,EAAQC,EAAK,CAC1C,OAAI,OAAO,eAAkB,OAAO,eAAeD,EAAQ,MAAO,CAAE,MAAOC,CAAI,CAAC,EAAYD,EAAO,IAAMC,EAClGD,CACX,EAEA,IAAIE,EAAqB,OAAO,OAAU,SAASpB,EAAGzC,EAAG,CACrD,OAAO,eAAeyC,EAAG,UAAW,CAAE,WAAY,GAAM,MAAOzC,CAAE,CAAC,CACtE,EAAK,SAASyC,EAAGzC,EAAG,CAChByC,EAAE,QAAazC,CACnB,EAEAV,GAAe,SAAUwE,EAAK,CAC1B,GAAIA,GAAOA,EAAI,WAAY,OAAOA,EAClC,IAAI7B,EAAS,CAAC,EACd,GAAI6B,GAAO,KAAM,QAASpB,KAAKoB,EAASpB,IAAM,WAAa,OAAO,UAAU,eAAe,KAAKoB,EAAKpB,CAAC,GAAGhD,GAAgBuC,EAAQ6B,EAAKpB,CAAC,EACvI,OAAAmB,EAAmB5B,EAAQ6B,CAAG,EACvB7B,CACX,EAEA1C,GAAkB,SAAUuE,EAAK,CAC7B,OAAQA,GAAOA,EAAI,WAAcA,EAAM,CAAE,QAAWA,CAAI,CAC5D,EAEAtE,GAAyB,SAAUuE,EAAUC,EAAOC,EAAM,EAAG,CACzD,GAAIA,IAAS,KAAO,CAAC,EAAG,MAAM,IAAI,UAAU,+CAA+C,EAC3F,GAAI,OAAOD,GAAU,WAAaD,IAAaC,GAAS,CAAC,EAAI,CAACA,EAAM,IAAID,CAAQ,EAAG,MAAM,IAAI,UAAU,0EAA0E,EACjL,OAAOE,IAAS,IAAM,EAAIA,IAAS,IAAM,EAAE,KAAKF,CAAQ,EAAI,EAAI,EAAE,MAAQC,EAAM,IAAID,CAAQ,CAChG,EAEAtE,GAAyB,SAAUsE,EAAUC,EAAOrC,EAAOsC,EAAMP,EAAG,CAChE,GAAIO,IAAS,IAAK,MAAM,IAAI,UAAU,gCAAgC,EACtE,GAAIA,IAAS,KAAO,CAACP,EAAG,MAAM,IAAI,UAAU,+CAA+C,EAC3F,GAAI,OAAOM,GAAU,WAAaD,IAAaC,GAAS,CAACN,EAAI,CAACM,EAAM,IAAID,CAAQ,EAAG,MAAM,IAAI,UAAU,yEAAyE,EAChL,OAAQE,IAAS,IAAMP,EAAE,KAAKK,EAAUpC,CAAK,EAAI+B,EAAIA,EAAE,MAAQ/B,EAAQqC,EAAM,IAAID,EAAUpC,CAAK,EAAIA,CACxG,EAEA1B,EAAS,YAAa9B,EAAS,EAC/B8B,EAAS,WAAY7B,EAAQ,EAC7B6B,EAAS,SAAU5B,EAAM,EACzB4B,EAAS,aAAc3B,EAAU,EACjC2B,EAAS,UAAW1B,EAAO,EAC3B0B,EAAS,aAAczB,EAAU,EACjCyB,EAAS,YAAaxB,EAAS,EAC/BwB,EAAS,cAAevB,EAAW,EACnCuB,EAAS,eAAgBtB,EAAY,EACrCsB,EAAS,kBAAmBP,EAAe,EAC3CO,EAAS,WAAYrB,CAAQ,EAC7BqB,EAAS,SAAUpB,EAAM,EACzBoB,EAAS,WAAYnB,EAAQ,EAC7BmB,EAAS,iBAAkBlB,EAAc,EACzCkB,EAAS,gBAAiBjB,EAAa,EACvCiB,EAAS,UAAWhB,CAAO,EAC3BgB,EAAS,mBAAoBf,EAAgB,EAC7Ce,EAAS,mBAAoBd,EAAgB,EAC7Cc,EAAS,gBAAiBb,EAAa,EACvCa,EAAS,uBAAwBZ,EAAoB,EACrDY,EAAS,eAAgBX,EAAY,EACrCW,EAAS,kBAAmBV,EAAe,EAC3CU,EAAS,yBAA0BT,EAAsB,EACzDS,EAAS,yBAA0BR,EAAsB,CAC7D,CAAC,ICjTD,IAAAyE,GAAkB,WACZ,CACF,UAAAC,EACA,SAAAC,GACA,OAAAC,GACA,WAAAC,GACA,QAAAC,GACA,WAAAC,GACA,UAAAC,GACA,YAAAC,GACA,aAAAC,GACA,gBAAAC,GACA,SAAAC,EACA,OAAAC,EACA,SAAAC,GACA,eAAAC,GACA,cAAAC,EACA,QAAAC,GACA,iBAAAC,GACA,iBAAAC,GACA,cAAAC,GACA,qBAAAC,GACA,aAAAC,GACA,gBAAAC,GACA,uBAAAC,GACA,uBAAAC,EACJ,EAAI,GAAAC,QCtBE,SAAUC,EAAWC,EAAU,CACnC,OAAO,OAAOA,GAAU,UAC1B,CCGM,SAAUC,GAAoBC,EAAgC,CAClE,IAAMC,EAAS,SAACC,EAAa,CAC3B,MAAM,KAAKA,CAAQ,EACnBA,EAAS,MAAQ,IAAI,MAAK,EAAG,KAC/B,EAEMC,EAAWH,EAAWC,CAAM,EAClC,OAAAE,EAAS,UAAY,OAAO,OAAO,MAAM,SAAS,EAClDA,EAAS,UAAU,YAAcA,EAC1BA,CACT,CCDO,IAAMC,GAA+CC,GAC1D,SAACC,EAAM,CACL,OAAA,SAA4CC,EAA0B,CACpED,EAAO,IAAI,EACX,KAAK,QAAUC,EACRA,EAAO,OAAM;EACxBA,EAAO,IAAI,SAACC,EAAKC,EAAC,CAAK,OAAGA,EAAI,EAAC,KAAKD,EAAI,SAAQ,CAAzB,CAA6B,EAAE,KAAK;GAAM,EACzD,GACJ,KAAK,KAAO,sBACZ,KAAK,OAASD,CAChB,CARA,CAQC,ECvBC,SAAUG,EAAaC,EAA6BC,EAAO,CAC/D,GAAID,EAAK,CACP,IAAME,EAAQF,EAAI,QAAQC,CAAI,EAC9B,GAAKC,GAASF,EAAI,OAAOE,EAAO,CAAC,EAErC,CCOA,IAAAC,EAAA,UAAA,CAyBE,SAAAA,EAAoBC,EAA4B,CAA5B,KAAA,gBAAAA,EAdb,KAAA,OAAS,GAER,KAAA,WAAmD,KAMnD,KAAA,YAAqD,IAMV,CAQnD,OAAAD,EAAA,UAAA,YAAA,UAAA,aACME,EAEJ,GAAI,CAAC,KAAK,OAAQ,CAChB,KAAK,OAAS,GAGN,IAAAC,EAAe,KAAI,WAC3B,GAAIA,EAEF,GADA,KAAK,WAAa,KACd,MAAM,QAAQA,CAAU,MAC1B,QAAqBC,EAAAC,EAAAF,CAAU,EAAAG,EAAAF,EAAA,KAAA,EAAA,CAAAE,EAAA,KAAAA,EAAAF,EAAA,KAAA,EAAE,CAA5B,IAAMG,EAAMD,EAAA,MACfC,EAAO,OAAO,IAAI,yGAGpBJ,EAAW,OAAO,IAAI,EAIlB,IAAiBK,EAAqB,KAAI,gBAClD,GAAIC,EAAWD,CAAgB,EAC7B,GAAI,CACFA,EAAgB,QACTE,EAAP,CACAR,EAASQ,aAAaC,GAAsBD,EAAE,OAAS,CAACA,CAAC,EAIrD,IAAAE,EAAgB,KAAI,YAC5B,GAAIA,EAAa,CACf,KAAK,YAAc,SACnB,QAAwBC,EAAAR,EAAAO,CAAW,EAAAE,EAAAD,EAAA,KAAA,EAAA,CAAAC,EAAA,KAAAA,EAAAD,EAAA,KAAA,EAAE,CAAhC,IAAME,EAASD,EAAA,MAClB,GAAI,CACFE,GAAcD,CAAS,QAChBE,EAAP,CACAf,EAASA,GAAM,KAANA,EAAU,CAAA,EACfe,aAAeN,GACjBT,EAAMgB,EAAAA,EAAA,CAAA,EAAAC,EAAOjB,CAAM,CAAA,EAAAiB,EAAKF,EAAI,MAAM,CAAA,EAElCf,EAAO,KAAKe,CAAG,sGAMvB,GAAIf,EACF,MAAM,IAAIS,GAAoBT,CAAM,EAG1C,EAoBAF,EAAA,UAAA,IAAA,SAAIoB,EAAuB,OAGzB,GAAIA,GAAYA,IAAa,KAC3B,GAAI,KAAK,OAGPJ,GAAcI,CAAQ,MACjB,CACL,GAAIA,aAAoBpB,EAAc,CAGpC,GAAIoB,EAAS,QAAUA,EAAS,WAAW,IAAI,EAC7C,OAEFA,EAAS,WAAW,IAAI,GAEzB,KAAK,aAAcC,EAAA,KAAK,eAAW,MAAAA,IAAA,OAAAA,EAAI,CAAA,GAAI,KAAKD,CAAQ,EAG/D,EAOQpB,EAAA,UAAA,WAAR,SAAmBsB,EAAoB,CAC7B,IAAAnB,EAAe,KAAI,WAC3B,OAAOA,IAAemB,GAAW,MAAM,QAAQnB,CAAU,GAAKA,EAAW,SAASmB,CAAM,CAC1F,EASQtB,EAAA,UAAA,WAAR,SAAmBsB,EAAoB,CAC7B,IAAAnB,EAAe,KAAI,WAC3B,KAAK,WAAa,MAAM,QAAQA,CAAU,GAAKA,EAAW,KAAKmB,CAAM,EAAGnB,GAAcA,EAAa,CAACA,EAAYmB,CAAM,EAAIA,CAC5H,EAMQtB,EAAA,UAAA,cAAR,SAAsBsB,EAAoB,CAChC,IAAAnB,EAAe,KAAI,WACvBA,IAAemB,EACjB,KAAK,WAAa,KACT,MAAM,QAAQnB,CAAU,GACjCoB,EAAUpB,EAAYmB,CAAM,CAEhC,EAgBAtB,EAAA,UAAA,OAAA,SAAOoB,EAAsC,CACnC,IAAAR,EAAgB,KAAI,YAC5BA,GAAeW,EAAUX,EAAaQ,CAAQ,EAE1CA,aAAoBpB,GACtBoB,EAAS,cAAc,IAAI,CAE/B,EAlLcpB,EAAA,MAAS,UAAA,CACrB,IAAMwB,EAAQ,IAAIxB,EAClB,OAAAwB,EAAM,OAAS,GACRA,CACT,EAAE,EA+KJxB,GArLA,EAuLO,IAAMyB,GAAqBC,EAAa,MAEzC,SAAUC,GAAeC,EAAU,CACvC,OACEA,aAAiBF,GAChBE,GAAS,WAAYA,GAASC,EAAWD,EAAM,MAAM,GAAKC,EAAWD,EAAM,GAAG,GAAKC,EAAWD,EAAM,WAAW,CAEpH,CAEA,SAASE,GAAcC,EAAwC,CACzDF,EAAWE,CAAS,EACtBA,EAAS,EAETA,EAAU,YAAW,CAEzB,CChNO,IAAMC,EAAuB,CAClC,iBAAkB,KAClB,sBAAuB,KACvB,QAAS,OACT,sCAAuC,GACvC,yBAA0B,ICGrB,IAAMC,EAAmC,CAG9C,WAAA,SAAWC,EAAqBC,EAAgB,SAAEC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,EAAA,GAAA,UAAAA,GACxC,IAAAC,EAAaL,EAAe,SACpC,OAAIK,GAAQ,MAARA,EAAU,WACLA,EAAS,WAAU,MAAnBA,EAAQC,EAAA,CAAYL,EAASC,CAAO,EAAAK,EAAKJ,CAAI,CAAA,CAAA,EAE/C,WAAU,MAAA,OAAAG,EAAA,CAACL,EAASC,CAAO,EAAAK,EAAKJ,CAAI,CAAA,CAAA,CAC7C,EACA,aAAA,SAAaK,EAAM,CACT,IAAAH,EAAaL,EAAe,SACpC,QAAQK,GAAQ,KAAA,OAARA,EAAU,eAAgB,cAAcG,CAAa,CAC/D,EACA,SAAU,QCjBN,SAAUC,GAAqBC,EAAQ,CAC3CC,EAAgB,WAAW,UAAA,CACjB,IAAAC,EAAqBC,EAAM,iBACnC,GAAID,EAEFA,EAAiBF,CAAG,MAGpB,OAAMA,CAEV,CAAC,CACH,CCtBM,SAAUI,GAAI,CAAK,CCMlB,IAAMC,GAAyB,UAAA,CAAM,OAAAC,GAAmB,IAAK,OAAW,MAAS,CAA5C,EAAsE,EAO5G,SAAUC,GAAkBC,EAAU,CAC1C,OAAOF,GAAmB,IAAK,OAAWE,CAAK,CACjD,CAOM,SAAUC,GAAoBC,EAAQ,CAC1C,OAAOJ,GAAmB,IAAKI,EAAO,MAAS,CACjD,CAQM,SAAUJ,GAAmBK,EAAuBD,EAAYF,EAAU,CAC9E,MAAO,CACL,KAAIG,EACJ,MAAKD,EACL,MAAKF,EAET,CCrCA,IAAII,EAAuD,KASrD,SAAUC,EAAaC,EAAc,CACzC,GAAIC,EAAO,sCAAuC,CAChD,IAAMC,EAAS,CAACJ,EAKhB,GAJII,IACFJ,EAAU,CAAE,YAAa,GAAO,MAAO,IAAI,GAE7CE,EAAE,EACEE,EAAQ,CACJ,IAAAC,EAAyBL,EAAvBM,EAAWD,EAAA,YAAEE,EAAKF,EAAA,MAE1B,GADAL,EAAU,KACNM,EACF,MAAMC,QAMVL,EAAE,CAEN,CAMM,SAAUM,GAAaC,EAAQ,CAC/BN,EAAO,uCAAyCH,IAClDA,EAAQ,YAAc,GACtBA,EAAQ,MAAQS,EAEpB,CCrBA,IAAAC,EAAA,SAAAC,EAAA,CAAmCC,EAAAF,EAAAC,CAAA,EA6BjC,SAAAD,EAAYG,EAA6C,CAAzD,IAAAC,EACEH,EAAA,KAAA,IAAA,GAAO,KATC,OAAAG,EAAA,UAAqB,GAUzBD,GACFC,EAAK,YAAcD,EAGfE,GAAeF,CAAW,GAC5BA,EAAY,IAAIC,CAAI,GAGtBA,EAAK,YAAcE,IAEvB,CAzBO,OAAAN,EAAA,OAAP,SAAiBO,EAAwBC,EAA2BC,EAAqB,CACvF,OAAO,IAAIC,GAAeH,EAAMC,EAAOC,CAAQ,CACjD,EAgCAT,EAAA,UAAA,KAAA,SAAKW,EAAS,CACR,KAAK,UACPC,GAA0BC,GAAiBF,CAAK,EAAG,IAAI,EAEvD,KAAK,MAAMA,CAAM,CAErB,EASAX,EAAA,UAAA,MAAA,SAAMc,EAAS,CACT,KAAK,UACPF,GAA0BG,GAAkBD,CAAG,EAAG,IAAI,GAEtD,KAAK,UAAY,GACjB,KAAK,OAAOA,CAAG,EAEnB,EAQAd,EAAA,UAAA,SAAA,UAAA,CACM,KAAK,UACPY,GAA0BI,GAAuB,IAAI,GAErD,KAAK,UAAY,GACjB,KAAK,UAAS,EAElB,EAEAhB,EAAA,UAAA,YAAA,UAAA,CACO,KAAK,SACR,KAAK,UAAY,GACjBC,EAAA,UAAM,YAAW,KAAA,IAAA,EACjB,KAAK,YAAc,KAEvB,EAEUD,EAAA,UAAA,MAAV,SAAgBW,EAAQ,CACtB,KAAK,YAAY,KAAKA,CAAK,CAC7B,EAEUX,EAAA,UAAA,OAAV,SAAiBc,EAAQ,CACvB,GAAI,CACF,KAAK,YAAY,MAAMA,CAAG,UAE1B,KAAK,YAAW,EAEpB,EAEUd,EAAA,UAAA,UAAV,UAAA,CACE,GAAI,CACF,KAAK,YAAY,SAAQ,UAEzB,KAAK,YAAW,EAEpB,EACFA,CAAA,EApHmCiB,CAAY,EA2H/C,IAAMC,GAAQ,SAAS,UAAU,KAEjC,SAASC,GAAyCC,EAAQC,EAAY,CACpE,OAAOH,GAAM,KAAKE,EAAIC,CAAO,CAC/B,CAMA,IAAAC,GAAA,UAAA,CACE,SAAAA,EAAoBC,EAAqC,CAArC,KAAA,gBAAAA,CAAwC,CAE5D,OAAAD,EAAA,UAAA,KAAA,SAAKE,EAAQ,CACH,IAAAD,EAAoB,KAAI,gBAChC,GAAIA,EAAgB,KAClB,GAAI,CACFA,EAAgB,KAAKC,CAAK,QACnBC,EAAP,CACAC,GAAqBD,CAAK,EAGhC,EAEAH,EAAA,UAAA,MAAA,SAAMK,EAAQ,CACJ,IAAAJ,EAAoB,KAAI,gBAChC,GAAIA,EAAgB,MAClB,GAAI,CACFA,EAAgB,MAAMI,CAAG,QAClBF,EAAP,CACAC,GAAqBD,CAAK,OAG5BC,GAAqBC,CAAG,CAE5B,EAEAL,EAAA,UAAA,SAAA,UAAA,CACU,IAAAC,EAAoB,KAAI,gBAChC,GAAIA,EAAgB,SAClB,GAAI,CACFA,EAAgB,SAAQ,QACjBE,EAAP,CACAC,GAAqBD,CAAK,EAGhC,EACFH,CAAA,EArCA,EAuCAM,GAAA,SAAAC,EAAA,CAAuCC,EAAAF,EAAAC,CAAA,EACrC,SAAAD,EACEG,EACAN,EACAO,EAA8B,CAHhC,IAAAC,EAKEJ,EAAA,KAAA,IAAA,GAAO,KAEHN,EACJ,GAAIW,EAAWH,CAAc,GAAK,CAACA,EAGjCR,EAAkB,CAChB,KAAOQ,GAAc,KAAdA,EAAkB,OACzB,MAAON,GAAK,KAALA,EAAS,OAChB,SAAUO,GAAQ,KAARA,EAAY,YAEnB,CAEL,IAAIG,EACAF,GAAQG,EAAO,0BAIjBD,EAAU,OAAO,OAAOJ,CAAc,EACtCI,EAAQ,YAAc,UAAA,CAAM,OAAAF,EAAK,YAAW,CAAhB,EAC5BV,EAAkB,CAChB,KAAMQ,EAAe,MAAQZ,GAAKY,EAAe,KAAMI,CAAO,EAC9D,MAAOJ,EAAe,OAASZ,GAAKY,EAAe,MAAOI,CAAO,EACjE,SAAUJ,EAAe,UAAYZ,GAAKY,EAAe,SAAUI,CAAO,IAI5EZ,EAAkBQ,EAMtB,OAAAE,EAAK,YAAc,IAAIX,GAAiBC,CAAe,GACzD,CACF,OAAAK,CAAA,EAzCuCS,CAAU,EA2CjD,SAASC,GAAqBC,EAAU,CAClCC,EAAO,sCACTC,GAAaF,CAAK,EAIlBG,GAAqBH,CAAK,CAE9B,CAQA,SAASI,GAAoBC,EAAQ,CACnC,MAAMA,CACR,CAOA,SAASC,GAA0BC,EAA2CC,EAA2B,CAC/F,IAAAC,EAA0BR,EAAM,sBACxCQ,GAAyBC,EAAgB,WAAW,UAAA,CAAM,OAAAD,EAAsBF,EAAcC,CAAU,CAA9C,CAA+C,CAC3G,CAOO,IAAMG,GAA6D,CACxE,OAAQ,GACR,KAAMC,EACN,MAAOR,GACP,SAAUQ,GCjRL,IAAMC,EAA+B,UAAA,CAAM,OAAC,OAAO,QAAW,YAAc,OAAO,YAAe,cAAvD,EAAsE,ECyClH,SAAUC,EAAYC,EAAI,CAC9B,OAAOA,CACT,CCsCM,SAAUC,GAAoBC,EAA+B,CACjE,OAAIA,EAAI,SAAW,EACVC,EAGLD,EAAI,SAAW,EACVA,EAAI,GAGN,SAAeE,EAAQ,CAC5B,OAAOF,EAAI,OAAO,SAACG,EAAWC,EAAuB,CAAK,OAAAA,EAAGD,CAAI,CAAP,EAAUD,CAAY,CAClF,CACF,CC9EA,IAAAG,EAAA,UAAA,CAkBE,SAAAA,EAAYC,EAA6E,CACnFA,IACF,KAAK,WAAaA,EAEtB,CA4BA,OAAAD,EAAA,UAAA,KAAA,SAAQE,EAAyB,CAC/B,IAAMC,EAAa,IAAIH,EACvB,OAAAG,EAAW,OAAS,KACpBA,EAAW,SAAWD,EACfC,CACT,EA8IAH,EAAA,UAAA,UAAA,SACEI,EACAC,EACAC,EAA8B,CAHhC,IAAAC,EAAA,KAKQC,EAAaC,GAAaL,CAAc,EAAIA,EAAiB,IAAIM,GAAeN,EAAgBC,EAAOC,CAAQ,EAErH,OAAAK,EAAa,UAAA,CACL,IAAAC,EAAuBL,EAArBL,EAAQU,EAAA,SAAEC,EAAMD,EAAA,OACxBJ,EAAW,IACTN,EAGIA,EAAS,KAAKM,EAAYK,CAAM,EAChCA,EAIAN,EAAK,WAAWC,CAAU,EAG1BD,EAAK,cAAcC,CAAU,CAAC,CAEtC,CAAC,EAEMA,CACT,EAGUR,EAAA,UAAA,cAAV,SAAwBc,EAAmB,CACzC,GAAI,CACF,OAAO,KAAK,WAAWA,CAAI,QACpBC,EAAP,CAIAD,EAAK,MAAMC,CAAG,EAElB,EA6DAf,EAAA,UAAA,QAAA,SAAQgB,EAA0BC,EAAoC,CAAtE,IAAAV,EAAA,KACE,OAAAU,EAAcC,GAAeD,CAAW,EAEjC,IAAIA,EAAkB,SAACE,EAASC,EAAM,CAC3C,IAAMZ,EAAa,IAAIE,GAAkB,CACvC,KAAM,SAACW,EAAK,CACV,GAAI,CACFL,EAAKK,CAAK,QACHN,EAAP,CACAK,EAAOL,CAAG,EACVP,EAAW,YAAW,EAE1B,EACA,MAAOY,EACP,SAAUD,EACX,EACDZ,EAAK,UAAUC,CAAU,CAC3B,CAAC,CACH,EAGUR,EAAA,UAAA,WAAV,SAAqBQ,EAA2B,OAC9C,OAAOI,EAAA,KAAK,UAAM,MAAAA,IAAA,OAAA,OAAAA,EAAE,UAAUJ,CAAU,CAC1C,EAOAR,EAAA,UAACG,GAAD,UAAA,CACE,OAAO,IACT,EA4FAH,EAAA,UAAA,KAAA,UAAA,SAAKsB,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACH,OAAOC,GAAcF,CAAU,EAAE,IAAI,CACvC,EA6BAtB,EAAA,UAAA,UAAA,SAAUiB,EAAoC,CAA9C,IAAAV,EAAA,KACE,OAAAU,EAAcC,GAAeD,CAAW,EAEjC,IAAIA,EAAY,SAACE,EAASC,EAAM,CACrC,IAAIC,EACJd,EAAK,UACH,SAACkB,EAAI,CAAK,OAACJ,EAAQI,CAAT,EACV,SAACV,EAAQ,CAAK,OAAAK,EAAOL,CAAG,CAAV,EACd,UAAA,CAAM,OAAAI,EAAQE,CAAK,CAAb,CAAc,CAExB,CAAC,CACH,EA3aOrB,EAAA,OAAkC,SAAIC,EAAwD,CACnG,OAAO,IAAID,EAAcC,CAAS,CACpC,EA0aFD,GA/cA,EAwdA,SAAS0B,GAAeC,EAA+C,OACrE,OAAOC,EAAAD,GAAW,KAAXA,EAAeE,EAAO,WAAO,MAAAD,IAAA,OAAAA,EAAI,OAC1C,CAEA,SAASE,GAAcC,EAAU,CAC/B,OAAOA,GAASC,EAAWD,EAAM,IAAI,GAAKC,EAAWD,EAAM,KAAK,GAAKC,EAAWD,EAAM,QAAQ,CAChG,CAEA,SAASE,GAAgBF,EAAU,CACjC,OAAQA,GAASA,aAAiBG,GAAgBJ,GAAWC,CAAK,GAAKI,GAAeJ,CAAK,CAC7F,CC1eM,SAAUK,GAAQC,EAAW,CACjC,OAAOC,EAAWD,GAAM,KAAA,OAANA,EAAQ,IAAI,CAChC,CAMM,SAAUE,EACdC,EAAqF,CAErF,OAAO,SAACH,EAAqB,CAC3B,GAAID,GAAQC,CAAM,EAChB,OAAOA,EAAO,KAAK,SAA+BI,EAA2B,CAC3E,GAAI,CACF,OAAOD,EAAKC,EAAc,IAAI,QACvBC,EAAP,CACA,KAAK,MAAMA,CAAG,EAElB,CAAC,EAEH,MAAM,IAAI,UAAU,wCAAwC,CAC9D,CACF,CCjBM,SAAUC,EACdC,EACAC,EACAC,EACAC,EACAC,EAAuB,CAEvB,OAAO,IAAIC,GAAmBL,EAAaC,EAAQC,EAAYC,EAASC,CAAU,CACpF,CAMA,IAAAC,GAAA,SAAAC,EAAA,CAA2CC,EAAAF,EAAAC,CAAA,EAiBzC,SAAAD,EACEL,EACAC,EACAC,EACAC,EACQC,EACAI,EAAiC,CAN3C,IAAAC,EAoBEH,EAAA,KAAA,KAAMN,CAAW,GAAC,KAfV,OAAAS,EAAA,WAAAL,EACAK,EAAA,kBAAAD,EAeRC,EAAK,MAAQR,EACT,SAAuCS,EAAQ,CAC7C,GAAI,CACFT,EAAOS,CAAK,QACLC,EAAP,CACAX,EAAY,MAAMW,CAAG,EAEzB,EACAL,EAAA,UAAM,MACVG,EAAK,OAASN,EACV,SAAuCQ,EAAQ,CAC7C,GAAI,CACFR,EAAQQ,CAAG,QACJA,EAAP,CAEAX,EAAY,MAAMW,CAAG,UAGrB,KAAK,YAAW,EAEpB,EACAL,EAAA,UAAM,OACVG,EAAK,UAAYP,EACb,UAAA,CACE,GAAI,CACFA,EAAU,QACHS,EAAP,CAEAX,EAAY,MAAMW,CAAG,UAGrB,KAAK,YAAW,EAEpB,EACAL,EAAA,UAAM,WACZ,CAEA,OAAAD,EAAA,UAAA,YAAA,UAAA,OACE,GAAI,CAAC,KAAK,mBAAqB,KAAK,kBAAiB,EAAI,CAC/C,IAAAO,EAAW,KAAI,OACvBN,EAAA,UAAM,YAAW,KAAA,IAAA,EAEjB,CAACM,KAAUC,EAAA,KAAK,cAAU,MAAAA,IAAA,QAAAA,EAAA,KAAf,IAAI,GAEnB,EACFR,CAAA,EAnF2CS,CAAU,ECP9C,IAAMC,GAAuDC,GAClE,SAACC,EAAM,CACL,OAAA,UAAoC,CAClCA,EAAO,IAAI,EACX,KAAK,KAAO,0BACZ,KAAK,QAAU,qBACjB,CAJA,CAIC,ECXL,IAAAC,GAAA,SAAAC,EAAA,CAAgCC,EAAAF,EAAAC,CAAA,EAwB9B,SAAAD,GAAA,CAAA,IAAAG,EAEEF,EAAA,KAAA,IAAA,GAAO,KAzBT,OAAAE,EAAA,OAAS,GAEDA,EAAA,iBAAyC,KAGjDA,EAAA,UAA2B,CAAA,EAE3BA,EAAA,UAAY,GAEZA,EAAA,SAAW,GAEXA,EAAA,YAAmB,MAenB,CAGA,OAAAH,EAAA,UAAA,KAAA,SAAQI,EAAwB,CAC9B,IAAMC,EAAU,IAAIC,GAAiB,KAAM,IAAI,EAC/C,OAAAD,EAAQ,SAAWD,EACZC,CACT,EAGUL,EAAA,UAAA,eAAV,UAAA,CACE,GAAI,KAAK,OACP,MAAM,IAAIO,EAEd,EAEAP,EAAA,UAAA,KAAA,SAAKQ,EAAQ,CAAb,IAAAL,EAAA,KACEM,EAAa,UAAA,SAEX,GADAN,EAAK,eAAc,EACf,CAACA,EAAK,UAAW,CACdA,EAAK,mBACRA,EAAK,iBAAmB,MAAM,KAAKA,EAAK,SAAS,OAEnD,QAAuBO,EAAAC,EAAAR,EAAK,gBAAgB,EAAAS,EAAAF,EAAA,KAAA,EAAA,CAAAE,EAAA,KAAAA,EAAAF,EAAA,KAAA,EAAE,CAAzC,IAAMG,EAAQD,EAAA,MACjBC,EAAS,KAAKL,CAAK,qGAGzB,CAAC,CACH,EAEAR,EAAA,UAAA,MAAA,SAAMc,EAAQ,CAAd,IAAAX,EAAA,KACEM,EAAa,UAAA,CAEX,GADAN,EAAK,eAAc,EACf,CAACA,EAAK,UAAW,CACnBA,EAAK,SAAWA,EAAK,UAAY,GACjCA,EAAK,YAAcW,EAEnB,QADQC,EAAcZ,EAAI,UACnBY,EAAU,QACfA,EAAU,MAAK,EAAI,MAAMD,CAAG,EAGlC,CAAC,CACH,EAEAd,EAAA,UAAA,SAAA,UAAA,CAAA,IAAAG,EAAA,KACEM,EAAa,UAAA,CAEX,GADAN,EAAK,eAAc,EACf,CAACA,EAAK,UAAW,CACnBA,EAAK,UAAY,GAEjB,QADQY,EAAcZ,EAAI,UACnBY,EAAU,QACfA,EAAU,MAAK,EAAI,SAAQ,EAGjC,CAAC,CACH,EAEAf,EAAA,UAAA,YAAA,UAAA,CACE,KAAK,UAAY,KAAK,OAAS,GAC/B,KAAK,UAAY,KAAK,iBAAmB,IAC3C,EAEA,OAAA,eAAIA,EAAA,UAAA,WAAQ,KAAZ,UAAA,OACE,QAAOgB,EAAA,KAAK,aAAS,MAAAA,IAAA,OAAA,OAAAA,EAAE,QAAS,CAClC,kCAGUhB,EAAA,UAAA,cAAV,SAAwBiB,EAAyB,CAC/C,YAAK,eAAc,EACZhB,EAAA,UAAM,cAAa,KAAA,KAACgB,CAAU,CACvC,EAGUjB,EAAA,UAAA,WAAV,SAAqBiB,EAAyB,CAC5C,YAAK,eAAc,EACnB,KAAK,wBAAwBA,CAAU,EAChC,KAAK,gBAAgBA,CAAU,CACxC,EAGUjB,EAAA,UAAA,gBAAV,SAA0BiB,EAA2B,CAArD,IAAAd,EAAA,KACQa,EAAqC,KAAnCE,EAAQF,EAAA,SAAEG,EAASH,EAAA,UAAED,EAASC,EAAA,UACtC,OAAIE,GAAYC,EACPC,IAET,KAAK,iBAAmB,KACxBL,EAAU,KAAKE,CAAU,EAClB,IAAII,EAAa,UAAA,CACtBlB,EAAK,iBAAmB,KACxBmB,EAAUP,EAAWE,CAAU,CACjC,CAAC,EACH,EAGUjB,EAAA,UAAA,wBAAV,SAAkCiB,EAA2B,CACrD,IAAAD,EAAuC,KAArCE,EAAQF,EAAA,SAAEO,EAAWP,EAAA,YAAEG,EAASH,EAAA,UACpCE,EACFD,EAAW,MAAMM,CAAW,EACnBJ,GACTF,EAAW,SAAQ,CAEvB,EAQAjB,EAAA,UAAA,aAAA,UAAA,CACE,IAAMwB,EAAkB,IAAIC,EAC5B,OAAAD,EAAW,OAAS,KACbA,CACT,EAxHOxB,EAAA,OAAkC,SAAI0B,EAA0BC,EAAqB,CAC1F,OAAO,IAAIrB,GAAoBoB,EAAaC,CAAM,CACpD,EAuHF3B,GA7IgCyB,CAAU,EAkJ1C,IAAAG,GAAA,SAAAC,EAAA,CAAyCC,EAAAF,EAAAC,CAAA,EACvC,SAAAD,EAESG,EACPC,EAAsB,CAHxB,IAAAC,EAKEJ,EAAA,KAAA,IAAA,GAAO,KAHA,OAAAI,EAAA,YAAAF,EAIPE,EAAK,OAASD,GAChB,CAEA,OAAAJ,EAAA,UAAA,KAAA,SAAKM,EAAQ,UACXC,GAAAC,EAAA,KAAK,eAAW,MAAAA,IAAA,OAAA,OAAAA,EAAE,QAAI,MAAAD,IAAA,QAAAA,EAAA,KAAAC,EAAGF,CAAK,CAChC,EAEAN,EAAA,UAAA,MAAA,SAAMS,EAAQ,UACZF,GAAAC,EAAA,KAAK,eAAW,MAAAA,IAAA,OAAA,OAAAA,EAAE,SAAK,MAAAD,IAAA,QAAAA,EAAA,KAAAC,EAAGC,CAAG,CAC/B,EAEAT,EAAA,UAAA,SAAA,UAAA,UACEO,GAAAC,EAAA,KAAK,eAAW,MAAAA,IAAA,OAAA,OAAAA,EAAE,YAAQ,MAAAD,IAAA,QAAAA,EAAA,KAAAC,CAAA,CAC5B,EAGUR,EAAA,UAAA,WAAV,SAAqBU,EAAyB,SAC5C,OAAOH,GAAAC,EAAA,KAAK,UAAM,MAAAA,IAAA,OAAA,OAAAA,EAAE,UAAUE,CAAU,KAAC,MAAAH,IAAA,OAAAA,EAAII,EAC/C,EACFX,CAAA,EA1ByCY,EAAO,EC5JzC,IAAMC,EAA+C,CAC1D,IAAG,UAAA,CAGD,OAAQA,EAAsB,UAAY,MAAM,IAAG,CACrD,EACA,SAAU,QCwBZ,IAAAC,GAAA,SAAAC,EAAA,CAAsCC,EAAAF,EAAAC,CAAA,EAUpC,SAAAD,EACUG,EACAC,EACAC,EAA6D,CAF7DF,IAAA,SAAAA,EAAA,KACAC,IAAA,SAAAA,EAAA,KACAC,IAAA,SAAAA,EAAAC,GAHV,IAAAC,EAKEN,EAAA,KAAA,IAAA,GAAO,KAJC,OAAAM,EAAA,YAAAJ,EACAI,EAAA,YAAAH,EACAG,EAAA,mBAAAF,EAZFE,EAAA,QAA0B,CAAA,EAC1BA,EAAA,oBAAsB,GAc5BA,EAAK,oBAAsBH,IAAgB,IAC3CG,EAAK,YAAc,KAAK,IAAI,EAAGJ,CAAW,EAC1CI,EAAK,YAAc,KAAK,IAAI,EAAGH,CAAW,GAC5C,CAEA,OAAAJ,EAAA,UAAA,KAAA,SAAKQ,EAAQ,CACL,IAAAC,EAA+E,KAA7EC,EAASD,EAAA,UAAEE,EAAOF,EAAA,QAAEG,EAAmBH,EAAA,oBAAEJ,EAAkBI,EAAA,mBAAEL,EAAWK,EAAA,YAC3EC,IACHC,EAAQ,KAAKH,CAAK,EAClB,CAACI,GAAuBD,EAAQ,KAAKN,EAAmB,IAAG,EAAKD,CAAW,GAE7E,KAAK,YAAW,EAChBH,EAAA,UAAM,KAAI,KAAA,KAACO,CAAK,CAClB,EAGUR,EAAA,UAAA,WAAV,SAAqBa,EAAyB,CAC5C,KAAK,eAAc,EACnB,KAAK,YAAW,EAQhB,QANMC,EAAe,KAAK,gBAAgBD,CAAU,EAE9CJ,EAAmC,KAAjCG,EAAmBH,EAAA,oBAAEE,EAAOF,EAAA,QAG9BM,EAAOJ,EAAQ,MAAK,EACjBK,EAAI,EAAGA,EAAID,EAAK,QAAU,CAACF,EAAW,OAAQG,GAAKJ,EAAsB,EAAI,EACpFC,EAAW,KAAKE,EAAKC,EAAO,EAG9B,YAAK,wBAAwBH,CAAU,EAEhCC,CACT,EAEQd,EAAA,UAAA,YAAR,UAAA,CACQ,IAAAS,EAAoE,KAAlEN,EAAWM,EAAA,YAAEJ,EAAkBI,EAAA,mBAAEE,EAAOF,EAAA,QAAEG,EAAmBH,EAAA,oBAK/DQ,GAAsBL,EAAsB,EAAI,GAAKT,EAK3D,GAJAA,EAAc,KAAYc,EAAqBN,EAAQ,QAAUA,EAAQ,OAAO,EAAGA,EAAQ,OAASM,CAAkB,EAIlH,CAACL,EAAqB,CAKxB,QAJMM,EAAMb,EAAmB,IAAG,EAC9Bc,EAAO,EAGFH,EAAI,EAAGA,EAAIL,EAAQ,QAAWA,EAAQK,IAAiBE,EAAKF,GAAK,EACxEG,EAAOH,EAETG,GAAQR,EAAQ,OAAO,EAAGQ,EAAO,CAAC,EAEtC,EACFnB,CAAA,EAzEsCoB,EAAO,EClB7C,IAAAC,GAAA,SAAAC,EAAA,CAA+BC,EAAAF,EAAAC,CAAA,EAC7B,SAAAD,EAAYG,EAAsBC,EAAmD,QACnFH,EAAA,KAAA,IAAA,GAAO,IACT,CAWO,OAAAD,EAAA,UAAA,SAAP,SAAgBK,EAAWC,EAAiB,CAAjB,OAAAA,IAAA,SAAAA,EAAA,GAClB,IACT,EACFN,CAAA,EAjB+BO,CAAY,ECHpC,IAAMC,EAAqC,CAGhD,YAAA,SAAYC,EAAqBC,EAAgB,SAAEC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,EAAA,GAAA,UAAAA,GACzC,IAAAC,EAAaL,EAAgB,SACrC,OAAIK,GAAQ,MAARA,EAAU,YACLA,EAAS,YAAW,MAApBA,EAAQC,EAAA,CAAaL,EAASC,CAAO,EAAAK,EAAKJ,CAAI,CAAA,CAAA,EAEhD,YAAW,MAAA,OAAAG,EAAA,CAACL,EAASC,CAAO,EAAAK,EAAKJ,CAAI,CAAA,CAAA,CAC9C,EACA,cAAA,SAAcK,EAAM,CACV,IAAAH,EAAaL,EAAgB,SACrC,QAAQK,GAAQ,KAAA,OAARA,EAAU,gBAAiB,eAAeG,CAAa,CACjE,EACA,SAAU,QCrBZ,IAAAC,GAAA,SAAAC,EAAA,CAAoCC,EAAAF,EAAAC,CAAA,EAOlC,SAAAD,EAAsBG,EAAqCC,EAAmD,CAA9G,IAAAC,EACEJ,EAAA,KAAA,KAAME,EAAWC,CAAI,GAAC,KADF,OAAAC,EAAA,UAAAF,EAAqCE,EAAA,KAAAD,EAFjDC,EAAA,QAAmB,IAI7B,CAEO,OAAAL,EAAA,UAAA,SAAP,SAAgBM,EAAWC,EAAiB,OAC1C,GADyBA,IAAA,SAAAA,EAAA,GACrB,KAAK,OACP,OAAO,KAIT,KAAK,MAAQD,EAEb,IAAME,EAAK,KAAK,GACVL,EAAY,KAAK,UAuBvB,OAAIK,GAAM,OACR,KAAK,GAAK,KAAK,eAAeL,EAAWK,EAAID,CAAK,GAKpD,KAAK,QAAU,GAEf,KAAK,MAAQA,EAEb,KAAK,IAAKE,EAAA,KAAK,MAAE,MAAAA,IAAA,OAAAA,EAAI,KAAK,eAAeN,EAAW,KAAK,GAAII,CAAK,EAE3D,IACT,EAEUP,EAAA,UAAA,eAAV,SAAyBG,EAA2BO,EAAmBH,EAAiB,CAAjB,OAAAA,IAAA,SAAAA,EAAA,GAC9DI,EAAiB,YAAYR,EAAU,MAAM,KAAKA,EAAW,IAAI,EAAGI,CAAK,CAClF,EAEUP,EAAA,UAAA,eAAV,SAAyBY,EAA4BJ,EAAkBD,EAAwB,CAE7F,GAFqEA,IAAA,SAAAA,EAAA,GAEjEA,GAAS,MAAQ,KAAK,QAAUA,GAAS,KAAK,UAAY,GAC5D,OAAOC,EAILA,GAAM,MACRG,EAAiB,cAAcH,CAAE,CAIrC,EAMOR,EAAA,UAAA,QAAP,SAAeM,EAAUC,EAAa,CACpC,GAAI,KAAK,OACP,OAAO,IAAI,MAAM,8BAA8B,EAGjD,KAAK,QAAU,GACf,IAAMM,EAAQ,KAAK,SAASP,EAAOC,CAAK,EACxC,GAAIM,EACF,OAAOA,EACE,KAAK,UAAY,IAAS,KAAK,IAAM,OAc9C,KAAK,GAAK,KAAK,eAAe,KAAK,UAAW,KAAK,GAAI,IAAI,EAE/D,EAEUb,EAAA,UAAA,SAAV,SAAmBM,EAAUQ,EAAc,CACzC,IAAIC,EAAmB,GACnBC,EACJ,GAAI,CACF,KAAK,KAAKV,CAAK,QACRW,EAAP,CACAF,EAAU,GAIVC,EAAaC,GAAQ,IAAI,MAAM,oCAAoC,EAErE,GAAIF,EACF,YAAK,YAAW,EACTC,CAEX,EAEAhB,EAAA,UAAA,YAAA,UAAA,CACE,GAAI,CAAC,KAAK,OAAQ,CACV,IAAAS,EAAoB,KAAlBD,EAAEC,EAAA,GAAEN,EAASM,EAAA,UACbS,EAAYf,EAAS,QAE7B,KAAK,KAAO,KAAK,MAAQ,KAAK,UAAY,KAC1C,KAAK,QAAU,GAEfgB,EAAUD,EAAS,IAAI,EACnBV,GAAM,OACR,KAAK,GAAK,KAAK,eAAeL,EAAWK,EAAI,IAAI,GAGnD,KAAK,MAAQ,KACbP,EAAA,UAAM,YAAW,KAAA,IAAA,EAErB,EACFD,CAAA,EA9IoCoB,EAAM,ECgB1C,IAAAC,GAAA,UAAA,CAGE,SAAAA,EAAoBC,EAAoCC,EAAiC,CAAjCA,IAAA,SAAAA,EAAoBF,EAAU,KAAlE,KAAA,oBAAAC,EAClB,KAAK,IAAMC,CACb,CA6BO,OAAAF,EAAA,UAAA,SAAP,SAAmBG,EAAqDC,EAAmBC,EAAS,CAA5B,OAAAD,IAAA,SAAAA,EAAA,GAC/D,IAAI,KAAK,oBAAuB,KAAMD,CAAI,EAAE,SAASE,EAAOD,CAAK,CAC1E,EAnCcJ,EAAA,IAAoBM,EAAsB,IAoC1DN,GArCA,ECnBA,IAAAO,GAAA,SAAAC,EAAA,CAAoCC,EAAAF,EAAAC,CAAA,EAkBlC,SAAAD,EAAYG,EAAgCC,EAAiC,CAAjCA,IAAA,SAAAA,EAAoBC,GAAU,KAA1E,IAAAC,EACEL,EAAA,KAAA,KAAME,EAAiBC,CAAG,GAAC,KAlBtB,OAAAE,EAAA,QAAmC,CAAA,EAOnCA,EAAA,QAAmB,IAY1B,CAEO,OAAAN,EAAA,UAAA,MAAP,SAAaO,EAAwB,CAC3B,IAAAC,EAAY,KAAI,QAExB,GAAI,KAAK,QAAS,CAChBA,EAAQ,KAAKD,CAAM,EACnB,OAGF,IAAIE,EACJ,KAAK,QAAU,GAEf,EACE,IAAKA,EAAQF,EAAO,QAAQA,EAAO,MAAOA,EAAO,KAAK,EACpD,YAEMA,EAASC,EAAQ,MAAK,GAIhC,GAFA,KAAK,QAAU,GAEXC,EAAO,CACT,KAAQF,EAASC,EAAQ,MAAK,GAC5BD,EAAO,YAAW,EAEpB,MAAME,EAEV,EACFT,CAAA,EAhDoCK,EAAS,EC6CtC,IAAMK,EAAiB,IAAIC,GAAeC,EAAW,EAK/CC,GAAQH,ECUd,IAAMI,EAAQ,IAAIC,EAAkB,SAACC,EAAU,CAAK,OAAAA,EAAW,SAAQ,CAAnB,CAAqB,EC9D1E,SAAUC,GAAYC,EAAU,CACpC,OAAOA,GAASC,EAAWD,EAAM,QAAQ,CAC3C,CCDA,SAASE,GAAQC,EAAQ,CACvB,OAAOA,EAAIA,EAAI,OAAS,EAC1B,CAEM,SAAUC,GAAkBC,EAAW,CAC3C,OAAOC,EAAWJ,GAAKG,CAAI,CAAC,EAAIA,EAAK,IAAG,EAAK,MAC/C,CAEM,SAAUE,EAAaF,EAAW,CACtC,OAAOG,GAAYN,GAAKG,CAAI,CAAC,EAAIA,EAAK,IAAG,EAAK,MAChD,CAEM,SAAUI,GAAUJ,EAAaK,EAAoB,CACzD,OAAO,OAAOR,GAAKG,CAAI,GAAM,SAAWA,EAAK,IAAG,EAAMK,CACxD,CClBO,IAAMC,EAAe,SAAIC,EAAM,CAAwB,OAAAA,GAAK,OAAOA,EAAE,QAAW,UAAY,OAAOA,GAAM,UAAlD,ECMxD,SAAUC,GAAUC,EAAU,CAClC,OAAOC,EAAWD,GAAK,KAAA,OAALA,EAAO,IAAI,CAC/B,CCHM,SAAUE,GAAoBC,EAAU,CAC5C,OAAOC,EAAWD,EAAME,EAAkB,CAC5C,CCLM,SAAUC,GAAmBC,EAAQ,CACzC,OAAO,OAAO,eAAiBC,EAAWD,GAAG,KAAA,OAAHA,EAAM,OAAO,cAAc,CACvE,CCAM,SAAUE,GAAiCC,EAAU,CAEzD,OAAO,IAAI,UACT,iBACEA,IAAU,MAAQ,OAAOA,GAAU,SAAW,oBAAsB,IAAIA,EAAK,KAAG,0HACwC,CAE9H,CCXM,SAAUC,IAAiB,CAC/B,OAAI,OAAO,QAAW,YAAc,CAAC,OAAO,SACnC,aAGF,OAAO,QAChB,CAEO,IAAMC,GAAWD,GAAiB,ECJnC,SAAUE,GAAWC,EAAU,CACnC,OAAOC,EAAWD,GAAK,KAAA,OAALA,EAAQE,GAAgB,CAC5C,CCHM,SAAiBC,GAAsCC,EAAqC,mGAC1FC,EAASD,EAAe,UAAS,2DAGX,MAAA,CAAA,EAAAE,GAAMD,EAAO,KAAI,CAAE,CAAA,gBAArCE,EAAkBC,EAAA,KAAA,EAAhBC,EAAKF,EAAA,MAAEG,EAAIH,EAAA,KACfG,iBAAA,CAAA,EAAA,CAAA,SACF,MAAA,CAAA,EAAAF,EAAA,KAAA,CAAA,qBAEIC,CAAM,CAAA,SAAZ,MAAA,CAAA,EAAAD,EAAA,KAAA,CAAA,SAAA,OAAAA,EAAA,KAAA,mCAGF,OAAAH,EAAO,YAAW,6BAIhB,SAAUM,GAAwBC,EAAQ,CAG9C,OAAOC,EAAWD,GAAG,KAAA,OAAHA,EAAK,SAAS,CAClC,CCPM,SAAUE,EAAaC,EAAyB,CACpD,GAAIA,aAAiBC,EACnB,OAAOD,EAET,GAAIA,GAAS,KAAM,CACjB,GAAIE,GAAoBF,CAAK,EAC3B,OAAOG,GAAsBH,CAAK,EAEpC,GAAII,EAAYJ,CAAK,EACnB,OAAOK,GAAcL,CAAK,EAE5B,GAAIM,GAAUN,CAAK,EACjB,OAAOO,GAAYP,CAAK,EAE1B,GAAIQ,GAAgBR,CAAK,EACvB,OAAOS,GAAkBT,CAAK,EAEhC,GAAIU,GAAWV,CAAK,EAClB,OAAOW,GAAaX,CAAK,EAE3B,GAAIY,GAAqBZ,CAAK,EAC5B,OAAOa,GAAuBb,CAAK,EAIvC,MAAMc,GAAiCd,CAAK,CAC9C,CAMM,SAAUG,GAAyBY,EAAQ,CAC/C,OAAO,IAAId,EAAW,SAACe,EAAyB,CAC9C,IAAMC,EAAMF,EAAIG,GAAkB,EAClC,GAAIC,EAAWF,EAAI,SAAS,EAC1B,OAAOA,EAAI,UAAUD,CAAU,EAGjC,MAAM,IAAI,UAAU,gEAAgE,CACtF,CAAC,CACH,CASM,SAAUX,GAAiBe,EAAmB,CAClD,OAAO,IAAInB,EAAW,SAACe,EAAyB,CAU9C,QAASK,EAAI,EAAGA,EAAID,EAAM,QAAU,CAACJ,EAAW,OAAQK,IACtDL,EAAW,KAAKI,EAAMC,EAAE,EAE1BL,EAAW,SAAQ,CACrB,CAAC,CACH,CAEM,SAAUT,GAAee,EAAuB,CACpD,OAAO,IAAIrB,EAAW,SAACe,EAAyB,CAC9CM,EACG,KACC,SAACC,EAAK,CACCP,EAAW,SACdA,EAAW,KAAKO,CAAK,EACrBP,EAAW,SAAQ,EAEvB,EACA,SAACQ,EAAQ,CAAK,OAAAR,EAAW,MAAMQ,CAAG,CAApB,CAAqB,EAEpC,KAAK,KAAMC,EAAoB,CACpC,CAAC,CACH,CAEM,SAAUd,GAAgBe,EAAqB,CACnD,OAAO,IAAIzB,EAAW,SAACe,EAAyB,aAC9C,QAAoBW,EAAAC,EAAAF,CAAQ,EAAAG,EAAAF,EAAA,KAAA,EAAA,CAAAE,EAAA,KAAAA,EAAAF,EAAA,KAAA,EAAE,CAAzB,IAAMJ,EAAKM,EAAA,MAEd,GADAb,EAAW,KAAKO,CAAK,EACjBP,EAAW,OACb,yGAGJA,EAAW,SAAQ,CACrB,CAAC,CACH,CAEM,SAAUP,GAAqBqB,EAA+B,CAClE,OAAO,IAAI7B,EAAW,SAACe,EAAyB,CAC9Ce,GAAQD,EAAed,CAAU,EAAE,MAAM,SAACQ,EAAG,CAAK,OAAAR,EAAW,MAAMQ,CAAG,CAApB,CAAqB,CACzE,CAAC,CACH,CAEM,SAAUX,GAA0BmB,EAAqC,CAC7E,OAAOvB,GAAkBwB,GAAmCD,CAAc,CAAC,CAC7E,CAEA,SAAeD,GAAWD,EAAiCd,EAAyB,uIACxDkB,EAAAC,GAAAL,CAAa,gFAIrC,GAJeP,EAAKa,EAAA,MACpBpB,EAAW,KAAKO,CAAK,EAGjBP,EAAW,OACb,MAAA,CAAA,CAAA,6RAGJ,OAAAA,EAAW,SAAQ,WChHf,SAAUqB,EACdC,EACAC,EACAC,EACAC,EACAC,EAAc,CADdD,IAAA,SAAAA,EAAA,GACAC,IAAA,SAAAA,EAAA,IAEA,IAAMC,EAAuBJ,EAAU,SAAS,UAAA,CAC9CC,EAAI,EACAE,EACFJ,EAAmB,IAAI,KAAK,SAAS,KAAMG,CAAK,CAAC,EAEjD,KAAK,YAAW,CAEpB,EAAGA,CAAK,EAIR,GAFAH,EAAmB,IAAIK,CAAoB,EAEvC,CAACD,EAKH,OAAOC,CAEX,CCeM,SAAUC,GAAaC,EAA0BC,EAAS,CAAT,OAAAA,IAAA,SAAAA,EAAA,GAC9CC,EAAQ,SAACC,EAAQC,EAAU,CAChCD,EAAO,UACLE,EACED,EACA,SAACE,EAAK,CAAK,OAAAC,EAAgBH,EAAYJ,EAAW,UAAA,CAAM,OAAAI,EAAW,KAAKE,CAAK,CAArB,EAAwBL,CAAK,CAA1E,EACX,UAAA,CAAM,OAAAM,EAAgBH,EAAYJ,EAAW,UAAA,CAAM,OAAAI,EAAW,SAAQ,CAAnB,EAAuBH,CAAK,CAAzE,EACN,SAACO,EAAG,CAAK,OAAAD,EAAgBH,EAAYJ,EAAW,UAAA,CAAM,OAAAI,EAAW,MAAMI,CAAG,CAApB,EAAuBP,CAAK,CAAzE,CAA0E,CACpF,CAEL,CAAC,CACH,CCPM,SAAUQ,GAAeC,EAA0BC,EAAiB,CAAjB,OAAAA,IAAA,SAAAA,EAAA,GAChDC,EAAQ,SAACC,EAAQC,EAAU,CAChCA,EAAW,IAAIJ,EAAU,SAAS,UAAA,CAAM,OAAAG,EAAO,UAAUC,CAAU,CAA3B,EAA8BH,CAAK,CAAC,CAC9E,CAAC,CACH,CC7DM,SAAUI,GAAsBC,EAA6BC,EAAwB,CACzF,OAAOC,EAAUF,CAAK,EAAE,KAAKG,GAAYF,CAAS,EAAGG,GAAUH,CAAS,CAAC,CAC3E,CCFM,SAAUI,GAAmBC,EAAuBC,EAAwB,CAChF,OAAOC,EAAUF,CAAK,EAAE,KAAKG,GAAYF,CAAS,EAAGG,GAAUH,CAAS,CAAC,CAC3E,CCJM,SAAUI,GAAiBC,EAAqBC,EAAwB,CAC5E,OAAO,IAAIC,EAAc,SAACC,EAAU,CAElC,IAAIC,EAAI,EAER,OAAOH,EAAU,SAAS,UAAA,CACpBG,IAAMJ,EAAM,OAGdG,EAAW,SAAQ,GAInBA,EAAW,KAAKH,EAAMI,IAAI,EAIrBD,EAAW,QACd,KAAK,SAAQ,EAGnB,CAAC,CACH,CAAC,CACH,CCfM,SAAUE,GAAoBC,EAAoBC,EAAwB,CAC9E,OAAO,IAAIC,EAAc,SAACC,EAAU,CAClC,IAAIC,EAKJ,OAAAC,EAAgBF,EAAYF,EAAW,UAAA,CAErCG,EAAYJ,EAAcI,IAAgB,EAE1CC,EACEF,EACAF,EACA,UAAA,OACMK,EACAC,EACJ,GAAI,CAEDC,EAAkBJ,EAAS,KAAI,EAA7BE,EAAKE,EAAA,MAAED,EAAIC,EAAA,WACPC,EAAP,CAEAN,EAAW,MAAMM,CAAG,EACpB,OAGEF,EAKFJ,EAAW,SAAQ,EAGnBA,EAAW,KAAKG,CAAK,CAEzB,EACA,EACA,EAAI,CAER,CAAC,EAMM,UAAA,CAAM,OAAAI,EAAWN,GAAQ,KAAA,OAARA,EAAU,MAAM,GAAKA,EAAS,OAAM,CAA/C,CACf,CAAC,CACH,CCvDM,SAAUO,GAAyBC,EAAyBC,EAAwB,CACxF,GAAI,CAACD,EACH,MAAM,IAAI,MAAM,yBAAyB,EAE3C,OAAO,IAAIE,EAAc,SAACC,EAAU,CAClCC,EAAgBD,EAAYF,EAAW,UAAA,CACrC,IAAMI,EAAWL,EAAM,OAAO,eAAc,EAC5CI,EACED,EACAF,EACA,UAAA,CACEI,EAAS,KAAI,EAAG,KAAK,SAACC,EAAM,CACtBA,EAAO,KAGTH,EAAW,SAAQ,EAEnBA,EAAW,KAAKG,EAAO,KAAK,CAEhC,CAAC,CACH,EACA,EACA,EAAI,CAER,CAAC,CACH,CAAC,CACH,CCzBM,SAAUC,GAA8BC,EAA8BC,EAAwB,CAClG,OAAOC,GAAsBC,GAAmCH,CAAK,EAAGC,CAAS,CACnF,CCoBM,SAAUG,GAAaC,EAA2BC,EAAwB,CAC9E,GAAID,GAAS,KAAM,CACjB,GAAIE,GAAoBF,CAAK,EAC3B,OAAOG,GAAmBH,EAAOC,CAAS,EAE5C,GAAIG,EAAYJ,CAAK,EACnB,OAAOK,GAAcL,EAAOC,CAAS,EAEvC,GAAIK,GAAUN,CAAK,EACjB,OAAOO,GAAgBP,EAAOC,CAAS,EAEzC,GAAIO,GAAgBR,CAAK,EACvB,OAAOS,GAAsBT,EAAOC,CAAS,EAE/C,GAAIS,GAAWV,CAAK,EAClB,OAAOW,GAAiBX,EAAOC,CAAS,EAE1C,GAAIW,GAAqBZ,CAAK,EAC5B,OAAOa,GAA2Bb,EAAOC,CAAS,EAGtD,MAAMa,GAAiCd,CAAK,CAC9C,CCoDM,SAAUe,EAAQC,EAA2BC,EAAyB,CAC1E,OAAOA,EAAYC,GAAUF,EAAOC,CAAS,EAAIE,EAAUH,CAAK,CAClE,CCxBM,SAAUI,IAAE,SAAIC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACpB,IAAMC,EAAYC,EAAaH,CAAI,EACnC,OAAOI,EAAKJ,EAAaE,CAAS,CACpC,CC3EM,SAAUG,GAAYC,EAAU,CACpC,OAAOA,aAAiB,MAAQ,CAAC,MAAMA,CAAY,CACrD,CCsCM,SAAUC,EAAUC,EAAyCC,EAAa,CAC9E,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAEhC,IAAIC,EAAQ,EAGZF,EAAO,UACLG,EAAyBF,EAAY,SAACG,EAAQ,CAG5CH,EAAW,KAAKJ,EAAQ,KAAKC,EAASM,EAAOF,GAAO,CAAC,CACvD,CAAC,CAAC,CAEN,CAAC,CACH,CC1DQ,IAAAG,GAAY,MAAK,QAEzB,SAASC,GAAkBC,EAA6BC,EAAW,CAC/D,OAAOH,GAAQG,CAAI,EAAID,EAAE,MAAA,OAAAE,EAAA,CAAA,EAAAC,EAAIF,CAAI,CAAA,CAAA,EAAID,EAAGC,CAAI,CAChD,CAMM,SAAUG,GAAuBJ,EAA2B,CAC9D,OAAOK,EAAI,SAAAJ,EAAI,CAAI,OAAAF,GAAYC,EAAIC,CAAI,CAApB,CAAqB,CAC5C,CCKM,SAAUK,GACdC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EAAgC,CAGhC,IAAMC,EAAc,CAAA,EAEhBC,EAAS,EAETC,EAAQ,EAERC,EAAa,GAKXC,EAAgB,UAAA,CAIhBD,GAAc,CAACH,EAAO,QAAU,CAACC,GACnCR,EAAW,SAAQ,CAEvB,EAGMY,EAAY,SAACC,EAAQ,CAAK,OAACL,EAASN,EAAaY,EAAWD,CAAK,EAAIN,EAAO,KAAKM,CAAK,CAA5D,EAE1BC,EAAa,SAACD,EAAQ,CAI1BT,GAAUJ,EAAW,KAAKa,CAAY,EAItCL,IAKA,IAAIO,EAAgB,GAGpBC,EAAUf,EAAQY,EAAOJ,GAAO,CAAC,EAAE,UACjCQ,EACEjB,EACA,SAACkB,EAAU,CAGTf,GAAY,MAAZA,EAAee,CAAU,EAErBd,EAGFQ,EAAUM,CAAiB,EAG3BlB,EAAW,KAAKkB,CAAU,CAE9B,EACA,UAAA,CAGEH,EAAgB,EAClB,EAEA,OACA,UAAA,CAIE,GAAIA,EAKF,GAAI,CAIFP,IAKA,qBACE,IAAMW,EAAgBZ,EAAO,MAAK,EAI9BF,EACFe,EAAgBpB,EAAYK,EAAmB,UAAA,CAAM,OAAAS,EAAWK,CAAa,CAAxB,CAAyB,EAE9EL,EAAWK,CAAa,GARrBZ,EAAO,QAAUC,EAASN,OAYjCS,EAAa,QACNU,EAAP,CACArB,EAAW,MAAMqB,CAAG,EAG1B,CAAC,CACF,CAEL,EAGA,OAAAtB,EAAO,UACLkB,EAAyBjB,EAAYY,EAAW,UAAA,CAE9CF,EAAa,GACbC,EAAa,CACf,CAAC,CAAC,EAKG,UAAA,CACLL,GAAmB,MAAnBA,EAAmB,CACrB,CACF,CClEM,SAAUgB,EACdC,EACAC,EACAC,EAA6B,CAE7B,OAFAA,IAAA,SAAAA,EAAA,KAEIC,EAAWF,CAAc,EAEpBF,EAAS,SAACK,EAAGC,EAAC,CAAK,OAAAC,EAAI,SAACC,EAAQC,EAAU,CAAK,OAAAP,EAAeG,EAAGG,EAAGF,EAAGG,CAAE,CAA1B,CAA2B,EAAEC,EAAUT,EAAQI,EAAGC,CAAC,CAAC,CAAC,CAAjF,EAAoFH,CAAU,GAC/G,OAAOD,GAAmB,WACnCC,EAAaD,GAGRS,EAAQ,SAACC,EAAQC,EAAU,CAAK,OAAAC,GAAeF,EAAQC,EAAYZ,EAASE,CAAU,CAAtD,CAAuD,EAChG,CChCM,SAAUY,GAAyCC,EAA6B,CAA7B,OAAAA,IAAA,SAAAA,EAAA,KAChDC,EAASC,EAAUF,CAAU,CACtC,CCNM,SAAUG,IAAS,CACvB,OAAOC,GAAS,CAAC,CACnB,CCmDM,SAAUC,IAAM,SAACC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACrB,OAAOC,GAAS,EAAGC,EAAKH,EAAMI,EAAaJ,CAAI,CAAC,CAAC,CACnD,CC1GA,IAAMK,GAA0B,CAAC,cAAe,gBAAgB,EAC1DC,GAAqB,CAAC,mBAAoB,qBAAqB,EAC/DC,GAAgB,CAAC,KAAM,KAAK,EA8N5B,SAAUC,EACdC,EACAC,EACAC,EACAC,EAAsC,CAMtC,GAJIC,EAAWF,CAAO,IACpBC,EAAiBD,EACjBA,EAAU,QAERC,EACF,OAAOJ,EAAaC,EAAQC,EAAWC,CAA+B,EAAE,KAAKG,GAAiBF,CAAc,CAAC,EAUzG,IAAAG,EAAAC,EAEJC,GAAcR,CAAM,EAChBH,GAAmB,IAAI,SAACY,EAAU,CAAK,OAAA,SAACC,EAAY,CAAK,OAAAV,EAAOS,GAAYR,EAAWS,EAASR,CAA+B,CAAtE,CAAlB,CAAyF,EAElIS,GAAwBX,CAAM,EAC5BJ,GAAwB,IAAIgB,GAAwBZ,EAAQC,CAAS,CAAC,EACtEY,GAA0Bb,CAAM,EAChCF,GAAc,IAAIc,GAAwBZ,EAAQC,CAAS,CAAC,EAC5D,CAAA,EAAE,CAAA,EATDa,EAAGR,EAAA,GAAES,EAAMT,EAAA,GAgBlB,GAAI,CAACQ,GACCE,EAAYhB,CAAM,EACpB,OAAOiB,EAAS,SAACC,EAAc,CAAK,OAAAnB,EAAUmB,EAAWjB,EAAWC,CAA+B,CAA/D,CAAgE,EAClGiB,EAAUnB,CAAM,CAAC,EAOvB,GAAI,CAACc,EACH,MAAM,IAAI,UAAU,sBAAsB,EAG5C,OAAO,IAAIM,EAAc,SAACC,EAAU,CAIlC,IAAMX,EAAU,UAAA,SAACY,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GAAmB,OAAAF,EAAW,KAAK,EAAIC,EAAK,OAASA,EAAOA,EAAK,EAAE,CAAhD,EAEpC,OAAAR,EAAIJ,CAAO,EAEJ,UAAA,CAAM,OAAAK,EAAQL,CAAO,CAAf,CACf,CAAC,CACH,CASA,SAASE,GAAwBZ,EAAaC,EAAiB,CAC7D,OAAO,SAACQ,EAAkB,CAAK,OAAA,SAACC,EAAY,CAAK,OAAAV,EAAOS,GAAYR,EAAWS,CAAO,CAArC,CAAlB,CACjC,CAOA,SAASC,GAAwBX,EAAW,CAC1C,OAAOI,EAAWJ,EAAO,WAAW,GAAKI,EAAWJ,EAAO,cAAc,CAC3E,CAOA,SAASa,GAA0Bb,EAAW,CAC5C,OAAOI,EAAWJ,EAAO,EAAE,GAAKI,EAAWJ,EAAO,GAAG,CACvD,CAOA,SAASQ,GAAcR,EAAW,CAChC,OAAOI,EAAWJ,EAAO,gBAAgB,GAAKI,EAAWJ,EAAO,mBAAmB,CACrF,CCvMM,SAAUwB,EACdC,EACAC,EACAC,EAAyC,CAFzCF,IAAA,SAAAA,EAAA,GAEAE,IAAA,SAAAA,EAAAC,IAIA,IAAIC,EAAmB,GAEvB,OAAIH,GAAuB,OAIrBI,GAAYJ,CAAmB,EACjCC,EAAYD,EAIZG,EAAmBH,GAIhB,IAAIK,EAAW,SAACC,EAAU,CAI/B,IAAIC,EAAMC,GAAYT,CAAO,EAAI,CAACA,EAAUE,EAAW,IAAG,EAAKF,EAE3DQ,EAAM,IAERA,EAAM,GAIR,IAAIE,EAAI,EAGR,OAAOR,EAAU,SAAS,UAAA,CACnBK,EAAW,SAEdA,EAAW,KAAKG,GAAG,EAEf,GAAKN,EAGP,KAAK,SAAS,OAAWA,CAAgB,EAGzCG,EAAW,SAAQ,EAGzB,EAAGC,CAAG,CACR,CAAC,CACH,CCvIM,SAAUG,GAASC,EAAYC,EAAyC,CAArD,OAAAD,IAAA,SAAAA,EAAA,GAAYC,IAAA,SAAAA,EAAAC,GAC/BF,EAAS,IAEXA,EAAS,GAGJG,EAAMH,EAAQA,EAAQC,CAAS,CACxC,CCgCM,SAAUG,IAAK,SAACC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACpB,IAAMC,EAAYC,EAAaH,CAAI,EAC7BI,EAAaC,GAAUL,EAAM,GAAQ,EACrCM,EAAUN,EAChB,OAAQM,EAAQ,OAGZA,EAAQ,SAAW,EAEnBC,EAAUD,EAAQ,EAAE,EAEpBE,GAASJ,CAAU,EAAEK,EAAKH,EAASJ,CAAS,CAAC,EAL7CQ,CAMN,CCjEO,IAAMC,GAAQ,IAAIC,EAAkBC,CAAI,ECwBzC,SAAUC,EAAUC,EAAiDC,EAAa,CACtF,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAEhC,IAAIC,EAAQ,EAIZF,EAAO,UAILG,EAAyBF,EAAY,SAACG,EAAK,CAAK,OAAAP,EAAU,KAAKC,EAASM,EAAOF,GAAO,GAAKD,EAAW,KAAKG,CAAK,CAAhE,CAAiE,CAAC,CAEtH,CAAC,CACH,CC3BM,SAAUC,EAAQC,EAAa,CACnC,OAAOA,GAAS,EAEZ,UAAA,CAAM,OAAAC,CAAA,EACNC,EAAQ,SAACC,EAAQC,EAAU,CACzB,IAAIC,EAAO,EACXF,EAAO,UACLG,EAAyBF,EAAY,SAACG,EAAK,CAIrC,EAAEF,GAAQL,IACZI,EAAW,KAAKG,CAAK,EAIjBP,GAASK,GACXD,EAAW,SAAQ,EAGzB,CAAC,CAAC,CAEN,CAAC,CACP,CC9BM,SAAUI,IAAc,CAC5B,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChCD,EAAO,UAAUE,EAAyBD,EAAYE,CAAI,CAAC,CAC7D,CAAC,CACH,CCCM,SAAUC,GAASC,EAAQ,CAC/B,OAAOC,EAAI,UAAA,CAAM,OAAAD,CAAA,CAAK,CACxB,CCyCM,SAAUE,GACdC,EACAC,EAAmC,CAEnC,OAAIA,EAEK,SAACC,EAAqB,CAC3B,OAAAC,GAAOF,EAAkB,KAAKG,EAAK,CAAC,EAAGC,GAAc,CAAE,EAAGH,EAAO,KAAKH,GAAUC,CAAqB,CAAC,CAAC,CAAvG,EAGGM,EAAS,SAACC,EAAOC,EAAK,CAAK,OAAAR,EAAsBO,EAAOC,CAAK,EAAE,KAAKJ,EAAK,CAAC,EAAGK,GAAMF,CAAK,CAAC,CAA9D,CAA+D,CACnG,CCtCM,SAAUG,GAASC,EAAoBC,EAAyC,CAAzCA,IAAA,SAAAA,EAAAC,GAC3C,IAAMC,EAAWC,EAAMJ,EAAKC,CAAS,EACrC,OAAOI,GAAU,UAAA,CAAM,OAAAF,CAAA,CAAQ,CACjC,CC0EM,SAAUG,GACdC,EACAC,EAA0D,CAA1D,OAAAA,IAAA,SAAAA,EAA+BC,GAK/BF,EAAaA,GAAU,KAAVA,EAAcG,GAEpBC,EAAQ,SAACC,EAAQC,EAAU,CAGhC,IAAIC,EAEAC,EAAQ,GAEZH,EAAO,UACLI,EAAyBH,EAAY,SAACI,EAAK,CAEzC,IAAMC,EAAaV,EAAYS,CAAK,GAKhCF,GAAS,CAACR,EAAYO,EAAaI,CAAU,KAM/CH,EAAQ,GACRD,EAAcI,EAGdL,EAAW,KAAKI,CAAK,EAEzB,CAAC,CAAC,CAEN,CAAC,CACH,CAEA,SAASP,GAAeS,EAAQC,EAAM,CACpC,OAAOD,IAAMC,CACf,CCrHM,SAAUC,GAAYC,EAAoB,CAC9C,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAGhC,GAAI,CACFD,EAAO,UAAUC,CAAU,UAE3BA,EAAW,IAAIH,CAAQ,EAE3B,CAAC,CACH,CCyCM,SAAUI,GAAUC,EAAqC,OACzDC,EAAQ,IACRC,EAEJ,OAAIF,GAAiB,OACf,OAAOA,GAAkB,UACxBG,EAA4BH,EAAa,MAAzCC,EAAKE,IAAA,OAAG,IAAQA,EAAED,EAAUF,EAAa,OAE5CC,EAAQD,GAILC,GAAS,EACZ,UAAA,CAAM,OAAAG,CAAA,EACNC,EAAQ,SAACC,EAAQC,EAAU,CACzB,IAAIC,EAAQ,EACRC,EAEEC,EAAc,UAAA,CAGlB,GAFAD,GAAS,MAATA,EAAW,YAAW,EACtBA,EAAY,KACRP,GAAS,KAAM,CACjB,IAAMS,EAAW,OAAOT,GAAU,SAAWU,EAAMV,CAAK,EAAIW,EAAUX,EAAMM,CAAK,CAAC,EAC5EM,EAAqBC,EAAyBR,EAAY,UAAA,CAC9DO,EAAmB,YAAW,EAC9BE,EAAiB,CACnB,CAAC,EACDL,EAAS,UAAUG,CAAkB,OAErCE,EAAiB,CAErB,EAEMA,EAAoB,UAAA,CACxB,IAAIC,EAAY,GAChBR,EAAYH,EAAO,UACjBS,EAAyBR,EAAY,OAAW,UAAA,CAC1C,EAAEC,EAAQP,EACRQ,EACFC,EAAW,EAEXO,EAAY,GAGdV,EAAW,SAAQ,CAEvB,CAAC,CAAC,EAGAU,GACFP,EAAW,CAEf,EAEAM,EAAiB,CACnB,CAAC,CACP,CCtFM,SAAUE,GACdC,EACAC,EAA6G,CAE7G,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChC,IAAIC,EAAyD,KACzDC,EAAQ,EAERC,EAAa,GAIXC,EAAgB,UAAA,CAAM,OAAAD,GAAc,CAACF,GAAmBD,EAAW,SAAQ,CAArD,EAE5BD,EAAO,UACLM,EACEL,EACA,SAACM,EAAK,CAEJL,GAAe,MAAfA,EAAiB,YAAW,EAC5B,IAAIM,EAAa,EACXC,EAAaN,IAEnBO,EAAUb,EAAQU,EAAOE,CAAU,CAAC,EAAE,UACnCP,EAAkBI,EACjBL,EAIA,SAACU,EAAU,CAAK,OAAAV,EAAW,KAAKH,EAAiBA,EAAeS,EAAOI,EAAYF,EAAYD,GAAY,EAAIG,CAAU,CAAzG,EAChB,UAAA,CAIET,EAAkB,KAClBG,EAAa,CACf,CAAC,CACD,CAEN,EACA,UAAA,CACED,EAAa,GACbC,EAAa,CACf,CAAC,CACF,CAEL,CAAC,CACH,CCvFM,SAAUO,GAAaC,EAA8B,CACzD,OAAOC,EAAQ,SAACC,EAAQC,EAAU,CAChCC,EAAUJ,CAAQ,EAAE,UAAUK,EAAyBF,EAAY,UAAA,CAAM,OAAAA,EAAW,SAAQ,CAAnB,EAAuBG,CAAI,CAAC,EACrG,CAACH,EAAW,QAAUD,EAAO,UAAUC,CAAU,CACnD,CAAC,CACH,CCwDM,SAAUI,GACdC,EACAC,EACAC,EAA8B,CAK9B,IAAMC,EACJC,EAAWJ,CAAc,GAAKC,GAASC,EAElC,CAAE,KAAMF,EAA2E,MAAKC,EAAE,SAAQC,CAAA,EACnGF,EAEN,OAAOG,EACHE,EAAQ,SAACC,EAAQC,EAAU,QACzBC,EAAAL,EAAY,aAAS,MAAAK,IAAA,QAAAA,EAAA,KAArBL,CAAW,EACX,IAAIM,EAAU,GACdH,EAAO,UACLI,EACEH,EACA,SAACI,EAAK,QACJH,EAAAL,EAAY,QAAI,MAAAK,IAAA,QAAAA,EAAA,KAAhBL,EAAmBQ,CAAK,EACxBJ,EAAW,KAAKI,CAAK,CACvB,EACA,UAAA,OACEF,EAAU,IACVD,EAAAL,EAAY,YAAQ,MAAAK,IAAA,QAAAA,EAAA,KAApBL,CAAW,EACXI,EAAW,SAAQ,CACrB,EACA,SAACK,EAAG,OACFH,EAAU,IACVD,EAAAL,EAAY,SAAK,MAAAK,IAAA,QAAAA,EAAA,KAAjBL,EAAoBS,CAAG,EACvBL,EAAW,MAAMK,CAAG,CACtB,EACA,UAAA,SACMH,KACFD,EAAAL,EAAY,eAAW,MAAAK,IAAA,QAAAA,EAAA,KAAvBL,CAAW,IAEbU,EAAAV,EAAY,YAAQ,MAAAU,IAAA,QAAAA,EAAA,KAApBV,CAAW,CACb,CAAC,CACF,CAEL,CAAC,EAIDW,CACN,CCjGM,SAAUC,IAAc,SAAOC,EAAA,CAAA,EAAAC,EAAA,EAAAA,EAAA,UAAA,OAAAA,IAAAD,EAAAC,GAAA,UAAAA,GACnC,IAAMC,EAAUC,GAAkBH,CAAM,EAExC,OAAOI,EAAQ,SAACC,EAAQC,EAAU,CAehC,QAdMC,EAAMP,EAAO,OACbQ,EAAc,IAAI,MAAMD,CAAG,EAI7BE,EAAWT,EAAO,IAAI,UAAA,CAAM,MAAA,EAAA,CAAK,EAGjCU,EAAQ,cAMHC,EAAC,CACRC,EAAUZ,EAAOW,EAAE,EAAE,UACnBE,EACEP,EACA,SAACQ,EAAK,CACJN,EAAYG,GAAKG,EACb,CAACJ,GAAS,CAACD,EAASE,KAEtBF,EAASE,GAAK,IAKbD,EAAQD,EAAS,MAAMM,CAAQ,KAAON,EAAW,MAEtD,EAGAO,CAAI,CACL,GAnBIL,EAAI,EAAGA,EAAIJ,EAAKI,MAAhBA,CAAC,EAwBVN,EAAO,UACLQ,EAAyBP,EAAY,SAACQ,EAAK,CACzC,GAAIJ,EAAO,CAET,IAAMO,EAAMC,EAAA,CAAIJ,CAAK,EAAAK,EAAKX,CAAW,CAAA,EACrCF,EAAW,KAAKJ,EAAUA,EAAO,MAAA,OAAAgB,EAAA,CAAA,EAAAC,EAAIF,CAAM,CAAA,CAAA,EAAIA,CAAM,EAEzD,CAAC,CAAC,CAEN,CAAC,CACH,CC9DA,IAAMG,GAAY,SAAS,cAAc,KAAK,EAC9C,SAAS,KAAK,YAAYA,EAAS,EAGnC,IAAMC,GAAS,SAAS,cAAc,oBAAoB,EAC1D,GAAIA,GAAQ,CACV,IAAMC,EAAS,SAAS,cAAc,QAAQ,EAC9CA,EAAO,UAAY,yEACfD,GAAO,eACTA,GAAO,cAAc,aAAaC,EAAQD,EAAM,EAGlD,IAAME,EAAM,IAAIC,GAAuB,CAAC,EACxCD,EACG,KACCE,GAAqB,CACvB,EACG,UAAUC,GAAM,CACf,eAAe,QAAQ,uCAAU,GAAGA,GAAI,EACxCJ,EAAO,OAAS,CAACI,CACnB,CAAC,EAGLH,EAAI,KAAK,KAAK,MAAM,eAAe,QAAQ,sCAAQ,GAAK,MAAM,CAAC,EAC/DI,EAAUL,EAAQ,OAAO,EACtB,KACCM,GAAeL,CAAG,CACpB,EACG,UAAU,CAAC,CAAC,CAAEG,CAAE,IAAMH,EAAI,KAAK,CAACG,CAAE,CAAC,EAGxCG,GAAS,GAAG,EACT,KACCC,GAAUP,EAAI,KAAKQ,EAAOL,GAAM,CAACA,CAAE,CAAC,CAAC,EACrCM,EAAK,EAAE,EACPC,GAAO,CAAE,MAAO,IAAMV,EAAI,KAAKQ,EAAOL,GAAMA,CAAE,CAAC,CAAE,CAAC,EAClDQ,EAAS,IAAM,CACb,IAAMC,EAAW,SAAS,cAAc,KAAK,EAC7C,OAAAA,EAAS,UAAY,uCACrBA,EAAS,WAAa,OACtBf,GAAU,YAAYe,CAAQ,EACvBC,GAAMC,GAAOC,GAAGH,CAAQ,CAAC,EAC7B,KACCI,GAAS,IAAMJ,EAAS,OAAO,CAAC,EAChCL,GAAUP,EAAI,KAAKQ,EAAOL,GAAM,CAACA,CAAE,CAAC,CAAC,EACrCc,GAAUC,GAAMd,EAAUc,EAAI,OAAO,EAClC,KACCC,GAAI,IAAMD,EAAG,UAAU,IAAI,4EAAgB,CAAC,EAC5CE,GAAM,GAAI,EACVD,GAAI,IAAMD,EAAG,UAAU,OAAO,4EAAgB,CAAC,CACjD,CACF,CACF,CACJ,CAAC,CACH,EACG,UAAU,CACjB", + "names": ["require_tslib", "__commonJSMin", "exports", "module", "__extends", "__assign", "__rest", "__decorate", "__param", "__metadata", "__awaiter", "__generator", "__exportStar", "__values", "__read", "__spread", "__spreadArrays", "__spreadArray", "__await", "__asyncGenerator", "__asyncDelegator", "__asyncValues", "__makeTemplateObject", "__importStar", "__importDefault", "__classPrivateFieldGet", "__classPrivateFieldSet", "__createBinding", "factory", "root", "createExporter", "previous", "id", "v", "exporter", "extendStatics", "d", "b", "p", "__", "t", "s", "n", "e", "i", "decorators", "target", "key", "desc", "c", "r", "paramIndex", "decorator", "metadataKey", "metadataValue", "thisArg", "_arguments", "P", "generator", "adopt", "value", "resolve", "reject", "fulfilled", "step", "rejected", "result", "body", "_", "y", "g", "verb", "op", "m", "o", "k", "k2", "ar", "error", "il", "j", "jl", "to", "from", "pack", "l", "q", "a", "resume", "settle", "fulfill", "f", "cooked", "raw", "__setModuleDefault", "mod", "receiver", "state", "kind", "import_tslib", "__extends", "__assign", "__rest", "__decorate", "__param", "__metadata", "__awaiter", "__generator", "__exportStar", "__createBinding", "__values", "__read", "__spread", "__spreadArrays", "__spreadArray", "__await", "__asyncGenerator", "__asyncDelegator", "__asyncValues", "__makeTemplateObject", "__importStar", "__importDefault", "__classPrivateFieldGet", "__classPrivateFieldSet", "tslib", "isFunction", "value", "createErrorClass", "createImpl", "_super", "instance", "ctorFunc", "UnsubscriptionError", "createErrorClass", "_super", "errors", "err", "i", "arrRemove", "arr", "item", "index", "Subscription", "initialTeardown", "errors", "_parentage", "_parentage_1", "__values", "_parentage_1_1", "parent_1", "initialFinalizer", "isFunction", "e", "UnsubscriptionError", "_finalizers", "_finalizers_1", "_finalizers_1_1", "finalizer", "execFinalizer", "err", "__spreadArray", "__read", "teardown", "_a", "parent", "arrRemove", "empty", "EMPTY_SUBSCRIPTION", "Subscription", "isSubscription", "value", "isFunction", "execFinalizer", "finalizer", "config", "timeoutProvider", "handler", "timeout", "args", "_i", "delegate", "__spreadArray", "__read", "handle", "reportUnhandledError", "err", "timeoutProvider", "onUnhandledError", "config", "noop", "COMPLETE_NOTIFICATION", "createNotification", "errorNotification", "error", "nextNotification", "value", "kind", "context", "errorContext", "cb", "config", "isRoot", "_a", "errorThrown", "error", "captureError", "err", "Subscriber", "_super", "__extends", "destination", "_this", "isSubscription", "EMPTY_OBSERVER", "next", "error", "complete", "SafeSubscriber", "value", "handleStoppedNotification", "nextNotification", "err", "errorNotification", "COMPLETE_NOTIFICATION", "Subscription", "_bind", "bind", "fn", "thisArg", "ConsumerObserver", "partialObserver", "value", "error", "handleUnhandledError", "err", "SafeSubscriber", "_super", "__extends", "observerOrNext", "complete", "_this", "isFunction", "context_1", "config", "Subscriber", "handleUnhandledError", "error", "config", "captureError", "reportUnhandledError", "defaultErrorHandler", "err", "handleStoppedNotification", "notification", "subscriber", "onStoppedNotification", "timeoutProvider", "EMPTY_OBSERVER", "noop", "observable", "identity", "x", "pipeFromArray", "fns", "identity", "input", "prev", "fn", "Observable", "subscribe", "operator", "observable", "observerOrNext", "error", "complete", "_this", "subscriber", "isSubscriber", "SafeSubscriber", "errorContext", "_a", "source", "sink", "err", "next", "promiseCtor", "getPromiseCtor", "resolve", "reject", "value", "operations", "_i", "pipeFromArray", "x", "getPromiseCtor", "promiseCtor", "_a", "config", "isObserver", "value", "isFunction", "isSubscriber", "Subscriber", "isSubscription", "hasLift", "source", "isFunction", "operate", "init", "liftedSource", "err", "createOperatorSubscriber", "destination", "onNext", "onComplete", "onError", "onFinalize", "OperatorSubscriber", "_super", "__extends", "shouldUnsubscribe", "_this", "value", "err", "closed_1", "_a", "Subscriber", "ObjectUnsubscribedError", "createErrorClass", "_super", "Subject", "_super", "__extends", "_this", "operator", "subject", "AnonymousSubject", "ObjectUnsubscribedError", "value", "errorContext", "_b", "__values", "_c", "observer", "err", "observers", "_a", "subscriber", "hasError", "isStopped", "EMPTY_SUBSCRIPTION", "Subscription", "arrRemove", "thrownError", "observable", "Observable", "destination", "source", "AnonymousSubject", "_super", "__extends", "destination", "source", "_this", "value", "_b", "_a", "err", "subscriber", "EMPTY_SUBSCRIPTION", "Subject", "dateTimestampProvider", "ReplaySubject", "_super", "__extends", "_bufferSize", "_windowTime", "_timestampProvider", "dateTimestampProvider", "_this", "value", "_a", "isStopped", "_buffer", "_infiniteTimeWindow", "subscriber", "subscription", "copy", "i", "adjustedBufferSize", "now", "last", "Subject", "Action", "_super", "__extends", "scheduler", "work", "state", "delay", "Subscription", "intervalProvider", "handler", "timeout", "args", "_i", "delegate", "__spreadArray", "__read", "handle", "AsyncAction", "_super", "__extends", "scheduler", "work", "_this", "state", "delay", "id", "_a", "_id", "intervalProvider", "_scheduler", "error", "_delay", "errored", "errorValue", "e", "actions", "arrRemove", "Action", "Scheduler", "schedulerActionCtor", "now", "work", "delay", "state", "dateTimestampProvider", "AsyncScheduler", "_super", "__extends", "SchedulerAction", "now", "Scheduler", "_this", "action", "actions", "error", "asyncScheduler", "AsyncScheduler", "AsyncAction", "async", "EMPTY", "Observable", "subscriber", "isScheduler", "value", "isFunction", "last", "arr", "popResultSelector", "args", "isFunction", "popScheduler", "isScheduler", "popNumber", "defaultValue", "isArrayLike", "x", "isPromise", "value", "isFunction", "isInteropObservable", "input", "isFunction", "observable", "isAsyncIterable", "obj", "isFunction", "createInvalidObservableTypeError", "input", "getSymbolIterator", "iterator", "isIterable", "input", "isFunction", "iterator", "readableStreamLikeToAsyncGenerator", "readableStream", "reader", "__await", "_a", "_b", "value", "done", "isReadableStreamLike", "obj", "isFunction", "innerFrom", "input", "Observable", "isInteropObservable", "fromInteropObservable", "isArrayLike", "fromArrayLike", "isPromise", "fromPromise", "isAsyncIterable", "fromAsyncIterable", "isIterable", "fromIterable", "isReadableStreamLike", "fromReadableStreamLike", "createInvalidObservableTypeError", "obj", "subscriber", "obs", "observable", "isFunction", "array", "i", "promise", "value", "err", "reportUnhandledError", "iterable", "iterable_1", "__values", "iterable_1_1", "asyncIterable", "process", "readableStream", "readableStreamLikeToAsyncGenerator", "asyncIterable_1", "__asyncValues", "asyncIterable_1_1", "executeSchedule", "parentSubscription", "scheduler", "work", "delay", "repeat", "scheduleSubscription", "observeOn", "scheduler", "delay", "operate", "source", "subscriber", "createOperatorSubscriber", "value", "executeSchedule", "err", "subscribeOn", "scheduler", "delay", "operate", "source", "subscriber", "scheduleObservable", "input", "scheduler", "innerFrom", "subscribeOn", "observeOn", "schedulePromise", "input", "scheduler", "innerFrom", "subscribeOn", "observeOn", "scheduleArray", "input", "scheduler", "Observable", "subscriber", "i", "scheduleIterable", "input", "scheduler", "Observable", "subscriber", "iterator", "executeSchedule", "value", "done", "_a", "err", "isFunction", "scheduleAsyncIterable", "input", "scheduler", "Observable", "subscriber", "executeSchedule", "iterator", "result", "scheduleReadableStreamLike", "input", "scheduler", "scheduleAsyncIterable", "readableStreamLikeToAsyncGenerator", "scheduled", "input", "scheduler", "isInteropObservable", "scheduleObservable", "isArrayLike", "scheduleArray", "isPromise", "schedulePromise", "isAsyncIterable", "scheduleAsyncIterable", "isIterable", "scheduleIterable", "isReadableStreamLike", "scheduleReadableStreamLike", "createInvalidObservableTypeError", "from", "input", "scheduler", "scheduled", "innerFrom", "of", "args", "_i", "scheduler", "popScheduler", "from", "isValidDate", "value", "map", "project", "thisArg", "operate", "source", "subscriber", "index", "createOperatorSubscriber", "value", "isArray", "callOrApply", "fn", "args", "__spreadArray", "__read", "mapOneOrManyArgs", "map", "mergeInternals", "source", "subscriber", "project", "concurrent", "onBeforeNext", "expand", "innerSubScheduler", "additionalFinalizer", "buffer", "active", "index", "isComplete", "checkComplete", "outerNext", "value", "doInnerSub", "innerComplete", "innerFrom", "createOperatorSubscriber", "innerValue", "bufferedValue", "executeSchedule", "err", "mergeMap", "project", "resultSelector", "concurrent", "isFunction", "a", "i", "map", "b", "ii", "innerFrom", "operate", "source", "subscriber", "mergeInternals", "mergeAll", "concurrent", "mergeMap", "identity", "concatAll", "mergeAll", "concat", "args", "_i", "concatAll", "from", "popScheduler", "nodeEventEmitterMethods", "eventTargetMethods", "jqueryMethods", "fromEvent", "target", "eventName", "options", "resultSelector", "isFunction", "mapOneOrManyArgs", "_a", "__read", "isEventTarget", "methodName", "handler", "isNodeStyleEventEmitter", "toCommonHandlerRegistry", "isJQueryStyleEventEmitter", "add", "remove", "isArrayLike", "mergeMap", "subTarget", "innerFrom", "Observable", "subscriber", "args", "_i", "timer", "dueTime", "intervalOrScheduler", "scheduler", "async", "intervalDuration", "isScheduler", "Observable", "subscriber", "due", "isValidDate", "n", "interval", "period", "scheduler", "asyncScheduler", "timer", "merge", "args", "_i", "scheduler", "popScheduler", "concurrent", "popNumber", "sources", "innerFrom", "mergeAll", "from", "EMPTY", "NEVER", "Observable", "noop", "filter", "predicate", "thisArg", "operate", "source", "subscriber", "index", "createOperatorSubscriber", "value", "take", "count", "EMPTY", "operate", "source", "subscriber", "seen", "createOperatorSubscriber", "value", "ignoreElements", "operate", "source", "subscriber", "createOperatorSubscriber", "noop", "mapTo", "value", "map", "delayWhen", "delayDurationSelector", "subscriptionDelay", "source", "concat", "take", "ignoreElements", "mergeMap", "value", "index", "mapTo", "delay", "due", "scheduler", "asyncScheduler", "duration", "timer", "delayWhen", "distinctUntilChanged", "comparator", "keySelector", "identity", "defaultCompare", "operate", "source", "subscriber", "previousKey", "first", "createOperatorSubscriber", "value", "currentKey", "a", "b", "finalize", "callback", "operate", "source", "subscriber", "repeat", "countOrConfig", "count", "delay", "_a", "EMPTY", "operate", "source", "subscriber", "soFar", "sourceSub", "resubscribe", "notifier", "timer", "innerFrom", "notifierSubscriber_1", "createOperatorSubscriber", "subscribeToSource", "syncUnsub", "switchMap", "project", "resultSelector", "operate", "source", "subscriber", "innerSubscriber", "index", "isComplete", "checkComplete", "createOperatorSubscriber", "value", "innerIndex", "outerIndex", "innerFrom", "innerValue", "takeUntil", "notifier", "operate", "source", "subscriber", "innerFrom", "createOperatorSubscriber", "noop", "tap", "observerOrNext", "error", "complete", "tapObserver", "isFunction", "operate", "source", "subscriber", "_a", "isUnsub", "createOperatorSubscriber", "value", "err", "_b", "identity", "withLatestFrom", "inputs", "_i", "project", "popResultSelector", "operate", "source", "subscriber", "len", "otherValues", "hasValue", "ready", "i", "innerFrom", "createOperatorSubscriber", "value", "identity", "noop", "values", "__spreadArray", "__read", "container", "header", "button", "on$", "ReplaySubject", "distinctUntilChanged", "on", "fromEvent", "withLatestFrom", "interval", "takeUntil", "filter", "take", "repeat", "mergeMap", "instance", "merge", "NEVER", "of", "finalize", "switchMap", "el", "tap", "delay"] +} diff --git a/2.0.0/assets/javascripts/lunr/min/lunr.ar.min.js b/2.0.0/assets/javascripts/lunr/min/lunr.ar.min.js new file mode 100644 index 00000000..9b06c26c --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/min/lunr.ar.min.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.ar=function(){this.pipeline.reset(),this.pipeline.add(e.ar.trimmer,e.ar.stopWordFilter,e.ar.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.ar.stemmer))},e.ar.wordCharacters="ء-ٛٱـ",e.ar.trimmer=e.trimmerSupport.generateTrimmer(e.ar.wordCharacters),e.Pipeline.registerFunction(e.ar.trimmer,"trimmer-ar"),e.ar.stemmer=function(){var e=this;return e.result=!1,e.preRemoved=!1,e.sufRemoved=!1,e.pre={pre1:"ف ك ب و س ل ن ا ي ت",pre2:"ال لل",pre3:"بال وال فال تال كال ولل",pre4:"فبال كبال وبال وكال"},e.suf={suf1:"ه ك ت ن ا ي",suf2:"نك نه ها وك يا اه ون ين تن تم نا وا ان كم كن ني نن ما هم هن تك ته ات يه",suf3:"تين كهم نيه نهم ونه وها يهم ونا ونك وني وهم تكم تنا تها تني تهم كما كها ناه نكم هنا تان يها",suf4:"كموه ناها ونني ونهم تكما تموه تكاه كماه ناكم ناهم نيها وننا"},e.patterns=JSON.parse('{"pt43":[{"pt":[{"c":"ا","l":1}]},{"pt":[{"c":"ا,ت,ن,ي","l":0}],"mPt":[{"c":"ف","l":0,"m":1},{"c":"ع","l":1,"m":2},{"c":"ل","l":2,"m":3}]},{"pt":[{"c":"و","l":2}],"mPt":[{"c":"ف","l":0,"m":0},{"c":"ع","l":1,"m":1},{"c":"ل","l":2,"m":3}]},{"pt":[{"c":"ا","l":2}]},{"pt":[{"c":"ي","l":2}],"mPt":[{"c":"ف","l":0,"m":0},{"c":"ع","l":1,"m":1},{"c":"ا","l":2},{"c":"ل","l":3,"m":3}]},{"pt":[{"c":"م","l":0}]}],"pt53":[{"pt":[{"c":"ت","l":0},{"c":"ا","l":2}]},{"pt":[{"c":"ا,ن,ت,ي","l":0},{"c":"ت","l":2}],"mPt":[{"c":"ا","l":0},{"c":"ف","l":1,"m":1},{"c":"ت","l":2},{"c":"ع","l":3,"m":3},{"c":"ا","l":4},{"c":"ل","l":5,"m":4}]},{"pt":[{"c":"ا","l":0},{"c":"ا","l":2}],"mPt":[{"c":"ا","l":0},{"c":"ف","l":1,"m":1},{"c":"ع","l":2,"m":3},{"c":"ل","l":3,"m":4},{"c":"ا","l":4},{"c":"ل","l":5,"m":4}]},{"pt":[{"c":"ا","l":0},{"c":"ا","l":3}],"mPt":[{"c":"ف","l":0,"m":1},{"c":"ع","l":1,"m":2},{"c":"ل","l":2,"m":4}]},{"pt":[{"c":"ا","l":3},{"c":"ن","l":4}]},{"pt":[{"c":"ت","l":0},{"c":"ي","l":3}]},{"pt":[{"c":"م","l":0},{"c":"و","l":3}]},{"pt":[{"c":"ا","l":1},{"c":"و","l":3}]},{"pt":[{"c":"و","l":1},{"c":"ا","l":2}]},{"pt":[{"c":"م","l":0},{"c":"ا","l":3}]},{"pt":[{"c":"م","l":0},{"c":"ي","l":3}]},{"pt":[{"c":"ا","l":2},{"c":"ن","l":3}]},{"pt":[{"c":"م","l":0},{"c":"ن","l":1}],"mPt":[{"c":"ا","l":0},{"c":"ن","l":1},{"c":"ف","l":2,"m":2},{"c":"ع","l":3,"m":3},{"c":"ا","l":4},{"c":"ل","l":5,"m":4}]},{"pt":[{"c":"م","l":0},{"c":"ت","l":2}],"mPt":[{"c":"ا","l":0},{"c":"ف","l":1,"m":1},{"c":"ت","l":2},{"c":"ع","l":3,"m":3},{"c":"ا","l":4},{"c":"ل","l":5,"m":4}]},{"pt":[{"c":"م","l":0},{"c":"ا","l":2}]},{"pt":[{"c":"م","l":1},{"c":"ا","l":3}]},{"pt":[{"c":"ي,ت,ا,ن","l":0},{"c":"ت","l":1}],"mPt":[{"c":"ف","l":0,"m":2},{"c":"ع","l":1,"m":3},{"c":"ا","l":2},{"c":"ل","l":3,"m":4}]},{"pt":[{"c":"ت,ي,ا,ن","l":0},{"c":"ت","l":2}],"mPt":[{"c":"ا","l":0},{"c":"ف","l":1,"m":1},{"c":"ت","l":2},{"c":"ع","l":3,"m":3},{"c":"ا","l":4},{"c":"ل","l":5,"m":4}]},{"pt":[{"c":"ا","l":2},{"c":"ي","l":3}]},{"pt":[{"c":"ا,ي,ت,ن","l":0},{"c":"ن","l":1}],"mPt":[{"c":"ا","l":0},{"c":"ن","l":1},{"c":"ف","l":2,"m":2},{"c":"ع","l":3,"m":3},{"c":"ا","l":4},{"c":"ل","l":5,"m":4}]},{"pt":[{"c":"ا","l":3},{"c":"ء","l":4}]}],"pt63":[{"pt":[{"c":"ا","l":0},{"c":"ت","l":2},{"c":"ا","l":4}]},{"pt":[{"c":"ا,ت,ن,ي","l":0},{"c":"س","l":1},{"c":"ت","l":2}],"mPt":[{"c":"ا","l":0},{"c":"س","l":1},{"c":"ت","l":2},{"c":"ف","l":3,"m":3},{"c":"ع","l":4,"m":4},{"c":"ا","l":5},{"c":"ل","l":6,"m":5}]},{"pt":[{"c":"ا,ن,ت,ي","l":0},{"c":"و","l":3}]},{"pt":[{"c":"م","l":0},{"c":"س","l":1},{"c":"ت","l":2}],"mPt":[{"c":"ا","l":0},{"c":"س","l":1},{"c":"ت","l":2},{"c":"ف","l":3,"m":3},{"c":"ع","l":4,"m":4},{"c":"ا","l":5},{"c":"ل","l":6,"m":5}]},{"pt":[{"c":"ي","l":1},{"c":"ي","l":3},{"c":"ا","l":4},{"c":"ء","l":5}]},{"pt":[{"c":"ا","l":0},{"c":"ن","l":1},{"c":"ا","l":4}]}],"pt54":[{"pt":[{"c":"ت","l":0}]},{"pt":[{"c":"ا,ي,ت,ن","l":0}],"mPt":[{"c":"ا","l":0},{"c":"ف","l":1,"m":1},{"c":"ع","l":2,"m":2},{"c":"ل","l":3,"m":3},{"c":"ر","l":4,"m":4},{"c":"ا","l":5},{"c":"ر","l":6,"m":4}]},{"pt":[{"c":"م","l":0}],"mPt":[{"c":"ا","l":0},{"c":"ف","l":1,"m":1},{"c":"ع","l":2,"m":2},{"c":"ل","l":3,"m":3},{"c":"ر","l":4,"m":4},{"c":"ا","l":5},{"c":"ر","l":6,"m":4}]},{"pt":[{"c":"ا","l":2}]},{"pt":[{"c":"ا","l":0},{"c":"ن","l":2}]}],"pt64":[{"pt":[{"c":"ا","l":0},{"c":"ا","l":4}]},{"pt":[{"c":"م","l":0},{"c":"ت","l":1}]}],"pt73":[{"pt":[{"c":"ا","l":0},{"c":"س","l":1},{"c":"ت","l":2},{"c":"ا","l":5}]}],"pt75":[{"pt":[{"c":"ا","l":0},{"c":"ا","l":5}]}]}'),e.execArray=["cleanWord","removeDiacritics","cleanAlef","removeStopWords","normalizeHamzaAndAlef","removeStartWaw","removePre432","removeEndTaa","wordCheck"],e.stem=function(){var r=0;for(e.result=!1,e.preRemoved=!1,e.sufRemoved=!1;r=0)return!0},e.normalizeHamzaAndAlef=function(){return e.word=e.word.replace("ؤ","ء"),e.word=e.word.replace("ئ","ء"),e.word=e.word.replace(/([\u0627])\1+/gi,"ا"),!1},e.removeEndTaa=function(){return!(e.word.length>2)||(e.word=e.word.replace(/[\u0627]$/,""),e.word=e.word.replace("ة",""),!1)},e.removeStartWaw=function(){return e.word.length>3&&"و"==e.word[0]&&"و"==e.word[1]&&(e.word=e.word.slice(1)),!1},e.removePre432=function(){var r=e.word;if(e.word.length>=7){var t=new RegExp("^("+e.pre.pre4.split(" ").join("|")+")");e.word=e.word.replace(t,"")}if(e.word==r&&e.word.length>=6){var c=new RegExp("^("+e.pre.pre3.split(" ").join("|")+")");e.word=e.word.replace(c,"")}if(e.word==r&&e.word.length>=5){var l=new RegExp("^("+e.pre.pre2.split(" ").join("|")+")");e.word=e.word.replace(l,"")}return r!=e.word&&(e.preRemoved=!0),!1},e.patternCheck=function(r){for(var t=0;t3){var t=new RegExp("^("+e.pre.pre1.split(" ").join("|")+")");e.word=e.word.replace(t,"")}return r!=e.word&&(e.preRemoved=!0),!1},e.removeSuf1=function(){var r=e.word;if(0==e.sufRemoved&&e.word.length>3){var t=new RegExp("("+e.suf.suf1.split(" ").join("|")+")$");e.word=e.word.replace(t,"")}return r!=e.word&&(e.sufRemoved=!0),!1},e.removeSuf432=function(){var r=e.word;if(e.word.length>=6){var t=new RegExp("("+e.suf.suf4.split(" ").join("|")+")$");e.word=e.word.replace(t,"")}if(e.word==r&&e.word.length>=5){var c=new RegExp("("+e.suf.suf3.split(" ").join("|")+")$");e.word=e.word.replace(c,"")}if(e.word==r&&e.word.length>=4){var l=new RegExp("("+e.suf.suf2.split(" ").join("|")+")$");e.word=e.word.replace(l,"")}return r!=e.word&&(e.sufRemoved=!0),!1},e.wordCheck=function(){for(var r=(e.word,[e.removeSuf432,e.removeSuf1,e.removePre1]),t=0,c=!1;e.word.length>=7&&!e.result&&t=f.limit)return;f.cursor++}for(;!f.out_grouping(w,97,248);){if(f.cursor>=f.limit)return;f.cursor++}d=f.cursor,d=d&&(r=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,e=f.find_among_b(c,32),f.limit_backward=r,e))switch(f.bra=f.cursor,e){case 1:f.slice_del();break;case 2:f.in_grouping_b(p,97,229)&&f.slice_del()}}function t(){var e,r=f.limit-f.cursor;f.cursor>=d&&(e=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,f.find_among_b(l,4)?(f.bra=f.cursor,f.limit_backward=e,f.cursor=f.limit-r,f.cursor>f.limit_backward&&(f.cursor--,f.bra=f.cursor,f.slice_del())):f.limit_backward=e)}function s(){var e,r,i,n=f.limit-f.cursor;if(f.ket=f.cursor,f.eq_s_b(2,"st")&&(f.bra=f.cursor,f.eq_s_b(2,"ig")&&f.slice_del()),f.cursor=f.limit-n,f.cursor>=d&&(r=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,e=f.find_among_b(m,5),f.limit_backward=r,e))switch(f.bra=f.cursor,e){case 1:f.slice_del(),i=f.limit-f.cursor,t(),f.cursor=f.limit-i;break;case 2:f.slice_from("løs")}}function o(){var e;f.cursor>=d&&(e=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,f.out_grouping_b(w,97,248)?(f.bra=f.cursor,u=f.slice_to(u),f.limit_backward=e,f.eq_v_b(u)&&f.slice_del()):f.limit_backward=e)}var a,d,u,c=[new r("hed",-1,1),new r("ethed",0,1),new r("ered",-1,1),new r("e",-1,1),new r("erede",3,1),new r("ende",3,1),new r("erende",5,1),new r("ene",3,1),new r("erne",3,1),new r("ere",3,1),new r("en",-1,1),new r("heden",10,1),new r("eren",10,1),new r("er",-1,1),new r("heder",13,1),new r("erer",13,1),new r("s",-1,2),new r("heds",16,1),new r("es",16,1),new r("endes",18,1),new r("erendes",19,1),new r("enes",18,1),new r("ernes",18,1),new r("eres",18,1),new r("ens",16,1),new r("hedens",24,1),new r("erens",24,1),new r("ers",16,1),new r("ets",16,1),new r("erets",28,1),new r("et",-1,1),new r("eret",30,1)],l=[new r("gd",-1,-1),new r("dt",-1,-1),new r("gt",-1,-1),new r("kt",-1,-1)],m=[new r("ig",-1,1),new r("lig",0,1),new r("elig",1,1),new r("els",-1,1),new r("løst",-1,2)],w=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,48,0,128],p=[239,254,42,3,0,0,0,0,0,0,0,0,0,0,0,0,16],f=new i;this.setCurrent=function(e){f.setCurrent(e)},this.getCurrent=function(){return f.getCurrent()},this.stem=function(){var r=f.cursor;return e(),f.limit_backward=r,f.cursor=f.limit,n(),f.cursor=f.limit,t(),f.cursor=f.limit,s(),f.cursor=f.limit,o(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}}(),e.Pipeline.registerFunction(e.da.stemmer,"stemmer-da"),e.da.stopWordFilter=e.generateStopWordFilter("ad af alle alt anden at blev blive bliver da de dem den denne der deres det dette dig din disse dog du efter eller en end er et for fra ham han hans har havde have hende hendes her hos hun hvad hvis hvor i ikke ind jeg jer jo kunne man mange med meget men mig min mine mit mod ned noget nogle nu når og også om op os over på selv sig sin sine sit skal skulle som sådan thi til ud under var vi vil ville vor være været".split(" ")),e.Pipeline.registerFunction(e.da.stopWordFilter,"stopWordFilter-da")}}); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/lunr/min/lunr.de.min.js b/2.0.0/assets/javascripts/lunr/min/lunr.de.min.js new file mode 100644 index 00000000..f3b5c108 --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/min/lunr.de.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `German` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.de=function(){this.pipeline.reset(),this.pipeline.add(e.de.trimmer,e.de.stopWordFilter,e.de.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.de.stemmer))},e.de.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.de.trimmer=e.trimmerSupport.generateTrimmer(e.de.wordCharacters),e.Pipeline.registerFunction(e.de.trimmer,"trimmer-de"),e.de.stemmer=function(){var r=e.stemmerSupport.Among,n=e.stemmerSupport.SnowballProgram,i=new function(){function e(e,r,n){return!(!v.eq_s(1,e)||(v.ket=v.cursor,!v.in_grouping(p,97,252)))&&(v.slice_from(r),v.cursor=n,!0)}function i(){for(var r,n,i,s,t=v.cursor;;)if(r=v.cursor,v.bra=r,v.eq_s(1,"ß"))v.ket=v.cursor,v.slice_from("ss");else{if(r>=v.limit)break;v.cursor=r+1}for(v.cursor=t;;)for(n=v.cursor;;){if(i=v.cursor,v.in_grouping(p,97,252)){if(s=v.cursor,v.bra=s,e("u","U",i))break;if(v.cursor=s,e("y","Y",i))break}if(i>=v.limit)return void(v.cursor=n);v.cursor=i+1}}function s(){for(;!v.in_grouping(p,97,252);){if(v.cursor>=v.limit)return!0;v.cursor++}for(;!v.out_grouping(p,97,252);){if(v.cursor>=v.limit)return!0;v.cursor++}return!1}function t(){m=v.limit,l=m;var e=v.cursor+3;0<=e&&e<=v.limit&&(d=e,s()||(m=v.cursor,m=v.limit)return;v.cursor++}}}function c(){return m<=v.cursor}function u(){return l<=v.cursor}function a(){var e,r,n,i,s=v.limit-v.cursor;if(v.ket=v.cursor,(e=v.find_among_b(w,7))&&(v.bra=v.cursor,c()))switch(e){case 1:v.slice_del();break;case 2:v.slice_del(),v.ket=v.cursor,v.eq_s_b(1,"s")&&(v.bra=v.cursor,v.eq_s_b(3,"nis")&&v.slice_del());break;case 3:v.in_grouping_b(g,98,116)&&v.slice_del()}if(v.cursor=v.limit-s,v.ket=v.cursor,(e=v.find_among_b(f,4))&&(v.bra=v.cursor,c()))switch(e){case 1:v.slice_del();break;case 2:if(v.in_grouping_b(k,98,116)){var t=v.cursor-3;v.limit_backward<=t&&t<=v.limit&&(v.cursor=t,v.slice_del())}}if(v.cursor=v.limit-s,v.ket=v.cursor,(e=v.find_among_b(_,8))&&(v.bra=v.cursor,u()))switch(e){case 1:v.slice_del(),v.ket=v.cursor,v.eq_s_b(2,"ig")&&(v.bra=v.cursor,r=v.limit-v.cursor,v.eq_s_b(1,"e")||(v.cursor=v.limit-r,u()&&v.slice_del()));break;case 2:n=v.limit-v.cursor,v.eq_s_b(1,"e")||(v.cursor=v.limit-n,v.slice_del());break;case 3:if(v.slice_del(),v.ket=v.cursor,i=v.limit-v.cursor,!v.eq_s_b(2,"er")&&(v.cursor=v.limit-i,!v.eq_s_b(2,"en")))break;v.bra=v.cursor,c()&&v.slice_del();break;case 4:v.slice_del(),v.ket=v.cursor,e=v.find_among_b(b,2),e&&(v.bra=v.cursor,u()&&1==e&&v.slice_del())}}var d,l,m,h=[new r("",-1,6),new r("U",0,2),new r("Y",0,1),new r("ä",0,3),new r("ö",0,4),new r("ü",0,5)],w=[new r("e",-1,2),new r("em",-1,1),new r("en",-1,2),new r("ern",-1,1),new r("er",-1,1),new r("s",-1,3),new r("es",5,2)],f=[new r("en",-1,1),new r("er",-1,1),new r("st",-1,2),new r("est",2,1)],b=[new r("ig",-1,1),new r("lich",-1,1)],_=[new r("end",-1,1),new r("ig",-1,2),new r("ung",-1,1),new r("lich",-1,3),new r("isch",-1,2),new r("ik",-1,2),new r("heit",-1,3),new r("keit",-1,4)],p=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32,8],g=[117,30,5],k=[117,30,4],v=new n;this.setCurrent=function(e){v.setCurrent(e)},this.getCurrent=function(){return v.getCurrent()},this.stem=function(){var e=v.cursor;return i(),v.cursor=e,t(),v.limit_backward=e,v.cursor=v.limit,a(),v.cursor=v.limit_backward,o(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}}(),e.Pipeline.registerFunction(e.de.stemmer,"stemmer-de"),e.de.stopWordFilter=e.generateStopWordFilter("aber alle allem allen aller alles als also am an ander andere anderem anderen anderer anderes anderm andern anderr anders auch auf aus bei bin bis bist da damit dann das dasselbe dazu daß dein deine deinem deinen deiner deines dem demselben den denn denselben der derer derselbe derselben des desselben dessen dich die dies diese dieselbe dieselben diesem diesen dieser dieses dir doch dort du durch ein eine einem einen einer eines einig einige einigem einigen einiger einiges einmal er es etwas euch euer eure eurem euren eurer eures für gegen gewesen hab habe haben hat hatte hatten hier hin hinter ich ihm ihn ihnen ihr ihre ihrem ihren ihrer ihres im in indem ins ist jede jedem jeden jeder jedes jene jenem jenen jener jenes jetzt kann kein keine keinem keinen keiner keines können könnte machen man manche manchem manchen mancher manches mein meine meinem meinen meiner meines mich mir mit muss musste nach nicht nichts noch nun nur ob oder ohne sehr sein seine seinem seinen seiner seines selbst sich sie sind so solche solchem solchen solcher solches soll sollte sondern sonst um und uns unse unsem unsen unser unses unter viel vom von vor war waren warst was weg weil weiter welche welchem welchen welcher welches wenn werde werden wie wieder will wir wird wirst wo wollen wollte während würde würden zu zum zur zwar zwischen über".split(" ")),e.Pipeline.registerFunction(e.de.stopWordFilter,"stopWordFilter-de")}}); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/lunr/min/lunr.du.min.js b/2.0.0/assets/javascripts/lunr/min/lunr.du.min.js new file mode 100644 index 00000000..49a0f3f0 --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/min/lunr.du.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Dutch` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");console.warn('[Lunr Languages] Please use the "nl" instead of the "du". The "nl" code is the standard code for Dutch language, and "du" will be removed in the next major versions.'),e.du=function(){this.pipeline.reset(),this.pipeline.add(e.du.trimmer,e.du.stopWordFilter,e.du.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.du.stemmer))},e.du.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.du.trimmer=e.trimmerSupport.generateTrimmer(e.du.wordCharacters),e.Pipeline.registerFunction(e.du.trimmer,"trimmer-du"),e.du.stemmer=function(){var r=e.stemmerSupport.Among,i=e.stemmerSupport.SnowballProgram,n=new function(){function e(){for(var e,r,i,o=C.cursor;;){if(C.bra=C.cursor,e=C.find_among(b,11))switch(C.ket=C.cursor,e){case 1:C.slice_from("a");continue;case 2:C.slice_from("e");continue;case 3:C.slice_from("i");continue;case 4:C.slice_from("o");continue;case 5:C.slice_from("u");continue;case 6:if(C.cursor>=C.limit)break;C.cursor++;continue}break}for(C.cursor=o,C.bra=o,C.eq_s(1,"y")?(C.ket=C.cursor,C.slice_from("Y")):C.cursor=o;;)if(r=C.cursor,C.in_grouping(q,97,232)){if(i=C.cursor,C.bra=i,C.eq_s(1,"i"))C.ket=C.cursor,C.in_grouping(q,97,232)&&(C.slice_from("I"),C.cursor=r);else if(C.cursor=i,C.eq_s(1,"y"))C.ket=C.cursor,C.slice_from("Y"),C.cursor=r;else if(n(r))break}else if(n(r))break}function n(e){return C.cursor=e,e>=C.limit||(C.cursor++,!1)}function o(){_=C.limit,f=_,t()||(_=C.cursor,_<3&&(_=3),t()||(f=C.cursor))}function t(){for(;!C.in_grouping(q,97,232);){if(C.cursor>=C.limit)return!0;C.cursor++}for(;!C.out_grouping(q,97,232);){if(C.cursor>=C.limit)return!0;C.cursor++}return!1}function s(){for(var e;;)if(C.bra=C.cursor,e=C.find_among(p,3))switch(C.ket=C.cursor,e){case 1:C.slice_from("y");break;case 2:C.slice_from("i");break;case 3:if(C.cursor>=C.limit)return;C.cursor++}}function u(){return _<=C.cursor}function c(){return f<=C.cursor}function a(){var e=C.limit-C.cursor;C.find_among_b(g,3)&&(C.cursor=C.limit-e,C.ket=C.cursor,C.cursor>C.limit_backward&&(C.cursor--,C.bra=C.cursor,C.slice_del()))}function l(){var e;w=!1,C.ket=C.cursor,C.eq_s_b(1,"e")&&(C.bra=C.cursor,u()&&(e=C.limit-C.cursor,C.out_grouping_b(q,97,232)&&(C.cursor=C.limit-e,C.slice_del(),w=!0,a())))}function m(){var e;u()&&(e=C.limit-C.cursor,C.out_grouping_b(q,97,232)&&(C.cursor=C.limit-e,C.eq_s_b(3,"gem")||(C.cursor=C.limit-e,C.slice_del(),a())))}function d(){var e,r,i,n,o,t,s=C.limit-C.cursor;if(C.ket=C.cursor,e=C.find_among_b(h,5))switch(C.bra=C.cursor,e){case 1:u()&&C.slice_from("heid");break;case 2:m();break;case 3:u()&&C.out_grouping_b(z,97,232)&&C.slice_del()}if(C.cursor=C.limit-s,l(),C.cursor=C.limit-s,C.ket=C.cursor,C.eq_s_b(4,"heid")&&(C.bra=C.cursor,c()&&(r=C.limit-C.cursor,C.eq_s_b(1,"c")||(C.cursor=C.limit-r,C.slice_del(),C.ket=C.cursor,C.eq_s_b(2,"en")&&(C.bra=C.cursor,m())))),C.cursor=C.limit-s,C.ket=C.cursor,e=C.find_among_b(k,6))switch(C.bra=C.cursor,e){case 1:if(c()){if(C.slice_del(),i=C.limit-C.cursor,C.ket=C.cursor,C.eq_s_b(2,"ig")&&(C.bra=C.cursor,c()&&(n=C.limit-C.cursor,!C.eq_s_b(1,"e")))){C.cursor=C.limit-n,C.slice_del();break}C.cursor=C.limit-i,a()}break;case 2:c()&&(o=C.limit-C.cursor,C.eq_s_b(1,"e")||(C.cursor=C.limit-o,C.slice_del()));break;case 3:c()&&(C.slice_del(),l());break;case 4:c()&&C.slice_del();break;case 5:c()&&w&&C.slice_del()}C.cursor=C.limit-s,C.out_grouping_b(j,73,232)&&(t=C.limit-C.cursor,C.find_among_b(v,4)&&C.out_grouping_b(q,97,232)&&(C.cursor=C.limit-t,C.ket=C.cursor,C.cursor>C.limit_backward&&(C.cursor--,C.bra=C.cursor,C.slice_del())))}var f,_,w,b=[new r("",-1,6),new r("á",0,1),new r("ä",0,1),new r("é",0,2),new r("ë",0,2),new r("í",0,3),new r("ï",0,3),new r("ó",0,4),new r("ö",0,4),new r("ú",0,5),new r("ü",0,5)],p=[new r("",-1,3),new r("I",0,2),new r("Y",0,1)],g=[new r("dd",-1,-1),new r("kk",-1,-1),new r("tt",-1,-1)],h=[new r("ene",-1,2),new r("se",-1,3),new r("en",-1,2),new r("heden",2,1),new r("s",-1,3)],k=[new r("end",-1,1),new r("ig",-1,2),new r("ing",-1,1),new r("lijk",-1,3),new r("baar",-1,4),new r("bar",-1,5)],v=[new r("aa",-1,-1),new r("ee",-1,-1),new r("oo",-1,-1),new r("uu",-1,-1)],q=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],j=[1,0,0,17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],z=[17,67,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],C=new i;this.setCurrent=function(e){C.setCurrent(e)},this.getCurrent=function(){return C.getCurrent()},this.stem=function(){var r=C.cursor;return e(),C.cursor=r,o(),C.limit_backward=r,C.cursor=C.limit,d(),C.cursor=C.limit_backward,s(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}}(),e.Pipeline.registerFunction(e.du.stemmer,"stemmer-du"),e.du.stopWordFilter=e.generateStopWordFilter(" aan al alles als altijd andere ben bij daar dan dat de der deze die dit doch doen door dus een eens en er ge geen geweest haar had heb hebben heeft hem het hier hij hoe hun iemand iets ik in is ja je kan kon kunnen maar me meer men met mij mijn moet na naar niet niets nog nu of om omdat onder ons ook op over reeds te tegen toch toen tot u uit uw van veel voor want waren was wat werd wezen wie wil worden wordt zal ze zelf zich zij zijn zo zonder zou".split(" ")),e.Pipeline.registerFunction(e.du.stopWordFilter,"stopWordFilter-du")}}); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/lunr/min/lunr.es.min.js b/2.0.0/assets/javascripts/lunr/min/lunr.es.min.js new file mode 100644 index 00000000..2989d342 --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/min/lunr.es.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Spanish` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,s){"function"==typeof define&&define.amd?define(s):"object"==typeof exports?module.exports=s():s()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.es=function(){this.pipeline.reset(),this.pipeline.add(e.es.trimmer,e.es.stopWordFilter,e.es.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.es.stemmer))},e.es.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.es.trimmer=e.trimmerSupport.generateTrimmer(e.es.wordCharacters),e.Pipeline.registerFunction(e.es.trimmer,"trimmer-es"),e.es.stemmer=function(){var s=e.stemmerSupport.Among,r=e.stemmerSupport.SnowballProgram,n=new function(){function e(){if(A.out_grouping(x,97,252)){for(;!A.in_grouping(x,97,252);){if(A.cursor>=A.limit)return!0;A.cursor++}return!1}return!0}function n(){if(A.in_grouping(x,97,252)){var s=A.cursor;if(e()){if(A.cursor=s,!A.in_grouping(x,97,252))return!0;for(;!A.out_grouping(x,97,252);){if(A.cursor>=A.limit)return!0;A.cursor++}}return!1}return!0}function i(){var s,r=A.cursor;if(n()){if(A.cursor=r,!A.out_grouping(x,97,252))return;if(s=A.cursor,e()){if(A.cursor=s,!A.in_grouping(x,97,252)||A.cursor>=A.limit)return;A.cursor++}}g=A.cursor}function a(){for(;!A.in_grouping(x,97,252);){if(A.cursor>=A.limit)return!1;A.cursor++}for(;!A.out_grouping(x,97,252);){if(A.cursor>=A.limit)return!1;A.cursor++}return!0}function t(){var e=A.cursor;g=A.limit,p=g,v=g,i(),A.cursor=e,a()&&(p=A.cursor,a()&&(v=A.cursor))}function o(){for(var e;;){if(A.bra=A.cursor,e=A.find_among(k,6))switch(A.ket=A.cursor,e){case 1:A.slice_from("a");continue;case 2:A.slice_from("e");continue;case 3:A.slice_from("i");continue;case 4:A.slice_from("o");continue;case 5:A.slice_from("u");continue;case 6:if(A.cursor>=A.limit)break;A.cursor++;continue}break}}function u(){return g<=A.cursor}function w(){return p<=A.cursor}function c(){return v<=A.cursor}function m(){var e;if(A.ket=A.cursor,A.find_among_b(y,13)&&(A.bra=A.cursor,(e=A.find_among_b(q,11))&&u()))switch(e){case 1:A.bra=A.cursor,A.slice_from("iendo");break;case 2:A.bra=A.cursor,A.slice_from("ando");break;case 3:A.bra=A.cursor,A.slice_from("ar");break;case 4:A.bra=A.cursor,A.slice_from("er");break;case 5:A.bra=A.cursor,A.slice_from("ir");break;case 6:A.slice_del();break;case 7:A.eq_s_b(1,"u")&&A.slice_del()}}function l(e,s){if(!c())return!0;A.slice_del(),A.ket=A.cursor;var r=A.find_among_b(e,s);return r&&(A.bra=A.cursor,1==r&&c()&&A.slice_del()),!1}function d(e){return!c()||(A.slice_del(),A.ket=A.cursor,A.eq_s_b(2,e)&&(A.bra=A.cursor,c()&&A.slice_del()),!1)}function b(){var e;if(A.ket=A.cursor,e=A.find_among_b(S,46)){switch(A.bra=A.cursor,e){case 1:if(!c())return!1;A.slice_del();break;case 2:if(d("ic"))return!1;break;case 3:if(!c())return!1;A.slice_from("log");break;case 4:if(!c())return!1;A.slice_from("u");break;case 5:if(!c())return!1;A.slice_from("ente");break;case 6:if(!w())return!1;A.slice_del(),A.ket=A.cursor,e=A.find_among_b(C,4),e&&(A.bra=A.cursor,c()&&(A.slice_del(),1==e&&(A.ket=A.cursor,A.eq_s_b(2,"at")&&(A.bra=A.cursor,c()&&A.slice_del()))));break;case 7:if(l(P,3))return!1;break;case 8:if(l(F,3))return!1;break;case 9:if(d("at"))return!1}return!0}return!1}function f(){var e,s;if(A.cursor>=g&&(s=A.limit_backward,A.limit_backward=g,A.ket=A.cursor,e=A.find_among_b(W,12),A.limit_backward=s,e)){if(A.bra=A.cursor,1==e){if(!A.eq_s_b(1,"u"))return!1;A.slice_del()}return!0}return!1}function _(){var e,s,r,n;if(A.cursor>=g&&(s=A.limit_backward,A.limit_backward=g,A.ket=A.cursor,e=A.find_among_b(L,96),A.limit_backward=s,e))switch(A.bra=A.cursor,e){case 1:r=A.limit-A.cursor,A.eq_s_b(1,"u")?(n=A.limit-A.cursor,A.eq_s_b(1,"g")?A.cursor=A.limit-n:A.cursor=A.limit-r):A.cursor=A.limit-r,A.bra=A.cursor;case 2:A.slice_del()}}function h(){var e,s;if(A.ket=A.cursor,e=A.find_among_b(z,8))switch(A.bra=A.cursor,e){case 1:u()&&A.slice_del();break;case 2:u()&&(A.slice_del(),A.ket=A.cursor,A.eq_s_b(1,"u")&&(A.bra=A.cursor,s=A.limit-A.cursor,A.eq_s_b(1,"g")&&(A.cursor=A.limit-s,u()&&A.slice_del())))}}var v,p,g,k=[new s("",-1,6),new s("á",0,1),new s("é",0,2),new s("í",0,3),new s("ó",0,4),new s("ú",0,5)],y=[new s("la",-1,-1),new s("sela",0,-1),new s("le",-1,-1),new s("me",-1,-1),new s("se",-1,-1),new s("lo",-1,-1),new s("selo",5,-1),new s("las",-1,-1),new s("selas",7,-1),new s("les",-1,-1),new s("los",-1,-1),new s("selos",10,-1),new s("nos",-1,-1)],q=[new s("ando",-1,6),new s("iendo",-1,6),new s("yendo",-1,7),new s("ándo",-1,2),new s("iéndo",-1,1),new s("ar",-1,6),new s("er",-1,6),new s("ir",-1,6),new s("ár",-1,3),new s("ér",-1,4),new s("ír",-1,5)],C=[new s("ic",-1,-1),new s("ad",-1,-1),new s("os",-1,-1),new s("iv",-1,1)],P=[new s("able",-1,1),new s("ible",-1,1),new s("ante",-1,1)],F=[new s("ic",-1,1),new s("abil",-1,1),new s("iv",-1,1)],S=[new s("ica",-1,1),new s("ancia",-1,2),new s("encia",-1,5),new s("adora",-1,2),new s("osa",-1,1),new s("ista",-1,1),new s("iva",-1,9),new s("anza",-1,1),new s("logía",-1,3),new s("idad",-1,8),new s("able",-1,1),new s("ible",-1,1),new s("ante",-1,2),new s("mente",-1,7),new s("amente",13,6),new s("ación",-1,2),new s("ución",-1,4),new s("ico",-1,1),new s("ismo",-1,1),new s("oso",-1,1),new s("amiento",-1,1),new s("imiento",-1,1),new s("ivo",-1,9),new s("ador",-1,2),new s("icas",-1,1),new s("ancias",-1,2),new s("encias",-1,5),new s("adoras",-1,2),new s("osas",-1,1),new s("istas",-1,1),new s("ivas",-1,9),new s("anzas",-1,1),new s("logías",-1,3),new s("idades",-1,8),new s("ables",-1,1),new s("ibles",-1,1),new s("aciones",-1,2),new s("uciones",-1,4),new s("adores",-1,2),new s("antes",-1,2),new s("icos",-1,1),new s("ismos",-1,1),new s("osos",-1,1),new s("amientos",-1,1),new s("imientos",-1,1),new s("ivos",-1,9)],W=[new s("ya",-1,1),new s("ye",-1,1),new s("yan",-1,1),new s("yen",-1,1),new s("yeron",-1,1),new s("yendo",-1,1),new s("yo",-1,1),new s("yas",-1,1),new s("yes",-1,1),new s("yais",-1,1),new s("yamos",-1,1),new s("yó",-1,1)],L=[new s("aba",-1,2),new s("ada",-1,2),new s("ida",-1,2),new s("ara",-1,2),new s("iera",-1,2),new s("ía",-1,2),new s("aría",5,2),new s("ería",5,2),new s("iría",5,2),new s("ad",-1,2),new s("ed",-1,2),new s("id",-1,2),new s("ase",-1,2),new s("iese",-1,2),new s("aste",-1,2),new s("iste",-1,2),new s("an",-1,2),new s("aban",16,2),new s("aran",16,2),new s("ieran",16,2),new s("ían",16,2),new s("arían",20,2),new s("erían",20,2),new s("irían",20,2),new s("en",-1,1),new s("asen",24,2),new s("iesen",24,2),new s("aron",-1,2),new s("ieron",-1,2),new s("arán",-1,2),new s("erán",-1,2),new s("irán",-1,2),new s("ado",-1,2),new s("ido",-1,2),new s("ando",-1,2),new s("iendo",-1,2),new s("ar",-1,2),new s("er",-1,2),new s("ir",-1,2),new s("as",-1,2),new s("abas",39,2),new s("adas",39,2),new s("idas",39,2),new s("aras",39,2),new s("ieras",39,2),new s("ías",39,2),new s("arías",45,2),new s("erías",45,2),new s("irías",45,2),new s("es",-1,1),new s("ases",49,2),new s("ieses",49,2),new s("abais",-1,2),new s("arais",-1,2),new s("ierais",-1,2),new s("íais",-1,2),new s("aríais",55,2),new s("eríais",55,2),new s("iríais",55,2),new s("aseis",-1,2),new s("ieseis",-1,2),new s("asteis",-1,2),new s("isteis",-1,2),new s("áis",-1,2),new s("éis",-1,1),new s("aréis",64,2),new s("eréis",64,2),new s("iréis",64,2),new s("ados",-1,2),new s("idos",-1,2),new s("amos",-1,2),new s("ábamos",70,2),new s("áramos",70,2),new s("iéramos",70,2),new s("íamos",70,2),new s("aríamos",74,2),new s("eríamos",74,2),new s("iríamos",74,2),new s("emos",-1,1),new s("aremos",78,2),new s("eremos",78,2),new s("iremos",78,2),new s("ásemos",78,2),new s("iésemos",78,2),new s("imos",-1,2),new s("arás",-1,2),new s("erás",-1,2),new s("irás",-1,2),new s("ís",-1,2),new s("ará",-1,2),new s("erá",-1,2),new s("irá",-1,2),new s("aré",-1,2),new s("eré",-1,2),new s("iré",-1,2),new s("ió",-1,2)],z=[new s("a",-1,1),new s("e",-1,2),new s("o",-1,1),new s("os",-1,1),new s("á",-1,1),new s("é",-1,2),new s("í",-1,1),new s("ó",-1,1)],x=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,1,17,4,10],A=new r;this.setCurrent=function(e){A.setCurrent(e)},this.getCurrent=function(){return A.getCurrent()},this.stem=function(){var e=A.cursor;return t(),A.limit_backward=e,A.cursor=A.limit,m(),A.cursor=A.limit,b()||(A.cursor=A.limit,f()||(A.cursor=A.limit,_())),A.cursor=A.limit,h(),A.cursor=A.limit_backward,o(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}}(),e.Pipeline.registerFunction(e.es.stemmer,"stemmer-es"),e.es.stopWordFilter=e.generateStopWordFilter("a al algo algunas algunos ante antes como con contra cual cuando de del desde donde durante e el ella ellas ellos en entre era erais eran eras eres es esa esas ese eso esos esta estaba estabais estaban estabas estad estada estadas estado estados estamos estando estar estaremos estará estarán estarás estaré estaréis estaría estaríais estaríamos estarían estarías estas este estemos esto estos estoy estuve estuviera estuvierais estuvieran estuvieras estuvieron estuviese estuvieseis estuviesen estuvieses estuvimos estuviste estuvisteis estuviéramos estuviésemos estuvo está estábamos estáis están estás esté estéis estén estés fue fuera fuerais fueran fueras fueron fuese fueseis fuesen fueses fui fuimos fuiste fuisteis fuéramos fuésemos ha habida habidas habido habidos habiendo habremos habrá habrán habrás habré habréis habría habríais habríamos habrían habrías habéis había habíais habíamos habían habías han has hasta hay haya hayamos hayan hayas hayáis he hemos hube hubiera hubierais hubieran hubieras hubieron hubiese hubieseis hubiesen hubieses hubimos hubiste hubisteis hubiéramos hubiésemos hubo la las le les lo los me mi mis mucho muchos muy más mí mía mías mío míos nada ni no nos nosotras nosotros nuestra nuestras nuestro nuestros o os otra otras otro otros para pero poco por porque que quien quienes qué se sea seamos sean seas seremos será serán serás seré seréis sería seríais seríamos serían serías seáis sido siendo sin sobre sois somos son soy su sus suya suyas suyo suyos sí también tanto te tendremos tendrá tendrán tendrás tendré tendréis tendría tendríais tendríamos tendrían tendrías tened tenemos tenga tengamos tengan tengas tengo tengáis tenida tenidas tenido tenidos teniendo tenéis tenía teníais teníamos tenían tenías ti tiene tienen tienes todo todos tu tus tuve tuviera tuvierais tuvieran tuvieras tuvieron tuviese tuvieseis tuviesen tuvieses tuvimos tuviste tuvisteis tuviéramos tuviésemos tuvo tuya tuyas tuyo tuyos tú un una uno unos vosotras vosotros vuestra vuestras vuestro vuestros y ya yo él éramos".split(" ")),e.Pipeline.registerFunction(e.es.stopWordFilter,"stopWordFilter-es")}}); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/lunr/min/lunr.fi.min.js b/2.0.0/assets/javascripts/lunr/min/lunr.fi.min.js new file mode 100644 index 00000000..29f5dfce --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/min/lunr.fi.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Finnish` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(i,e){"function"==typeof define&&define.amd?define(e):"object"==typeof exports?module.exports=e():e()(i.lunr)}(this,function(){return function(i){if(void 0===i)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===i.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");i.fi=function(){this.pipeline.reset(),this.pipeline.add(i.fi.trimmer,i.fi.stopWordFilter,i.fi.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(i.fi.stemmer))},i.fi.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",i.fi.trimmer=i.trimmerSupport.generateTrimmer(i.fi.wordCharacters),i.Pipeline.registerFunction(i.fi.trimmer,"trimmer-fi"),i.fi.stemmer=function(){var e=i.stemmerSupport.Among,r=i.stemmerSupport.SnowballProgram,n=new function(){function i(){f=A.limit,d=f,n()||(f=A.cursor,n()||(d=A.cursor))}function n(){for(var i;;){if(i=A.cursor,A.in_grouping(W,97,246))break;if(A.cursor=i,i>=A.limit)return!0;A.cursor++}for(A.cursor=i;!A.out_grouping(W,97,246);){if(A.cursor>=A.limit)return!0;A.cursor++}return!1}function t(){return d<=A.cursor}function s(){var i,e;if(A.cursor>=f)if(e=A.limit_backward,A.limit_backward=f,A.ket=A.cursor,i=A.find_among_b(h,10)){switch(A.bra=A.cursor,A.limit_backward=e,i){case 1:if(!A.in_grouping_b(x,97,246))return;break;case 2:if(!t())return}A.slice_del()}else A.limit_backward=e}function o(){var i,e,r;if(A.cursor>=f)if(e=A.limit_backward,A.limit_backward=f,A.ket=A.cursor,i=A.find_among_b(v,9))switch(A.bra=A.cursor,A.limit_backward=e,i){case 1:r=A.limit-A.cursor,A.eq_s_b(1,"k")||(A.cursor=A.limit-r,A.slice_del());break;case 2:A.slice_del(),A.ket=A.cursor,A.eq_s_b(3,"kse")&&(A.bra=A.cursor,A.slice_from("ksi"));break;case 3:A.slice_del();break;case 4:A.find_among_b(p,6)&&A.slice_del();break;case 5:A.find_among_b(g,6)&&A.slice_del();break;case 6:A.find_among_b(j,2)&&A.slice_del()}else A.limit_backward=e}function l(){return A.find_among_b(q,7)}function a(){return A.eq_s_b(1,"i")&&A.in_grouping_b(L,97,246)}function u(){var i,e,r;if(A.cursor>=f)if(e=A.limit_backward,A.limit_backward=f,A.ket=A.cursor,i=A.find_among_b(C,30)){switch(A.bra=A.cursor,A.limit_backward=e,i){case 1:if(!A.eq_s_b(1,"a"))return;break;case 2:case 9:if(!A.eq_s_b(1,"e"))return;break;case 3:if(!A.eq_s_b(1,"i"))return;break;case 4:if(!A.eq_s_b(1,"o"))return;break;case 5:if(!A.eq_s_b(1,"ä"))return;break;case 6:if(!A.eq_s_b(1,"ö"))return;break;case 7:if(r=A.limit-A.cursor,!l()&&(A.cursor=A.limit-r,!A.eq_s_b(2,"ie"))){A.cursor=A.limit-r;break}if(A.cursor=A.limit-r,A.cursor<=A.limit_backward){A.cursor=A.limit-r;break}A.cursor--,A.bra=A.cursor;break;case 8:if(!A.in_grouping_b(W,97,246)||!A.out_grouping_b(W,97,246))return}A.slice_del(),k=!0}else A.limit_backward=e}function c(){var i,e,r;if(A.cursor>=d)if(e=A.limit_backward,A.limit_backward=d,A.ket=A.cursor,i=A.find_among_b(P,14)){if(A.bra=A.cursor,A.limit_backward=e,1==i){if(r=A.limit-A.cursor,A.eq_s_b(2,"po"))return;A.cursor=A.limit-r}A.slice_del()}else A.limit_backward=e}function m(){var i;A.cursor>=f&&(i=A.limit_backward,A.limit_backward=f,A.ket=A.cursor,A.find_among_b(F,2)?(A.bra=A.cursor,A.limit_backward=i,A.slice_del()):A.limit_backward=i)}function w(){var i,e,r,n,t,s;if(A.cursor>=f){if(e=A.limit_backward,A.limit_backward=f,A.ket=A.cursor,A.eq_s_b(1,"t")&&(A.bra=A.cursor,r=A.limit-A.cursor,A.in_grouping_b(W,97,246)&&(A.cursor=A.limit-r,A.slice_del(),A.limit_backward=e,n=A.limit-A.cursor,A.cursor>=d&&(A.cursor=d,t=A.limit_backward,A.limit_backward=A.cursor,A.cursor=A.limit-n,A.ket=A.cursor,i=A.find_among_b(S,2))))){if(A.bra=A.cursor,A.limit_backward=t,1==i){if(s=A.limit-A.cursor,A.eq_s_b(2,"po"))return;A.cursor=A.limit-s}return void A.slice_del()}A.limit_backward=e}}function _(){var i,e,r,n;if(A.cursor>=f){for(i=A.limit_backward,A.limit_backward=f,e=A.limit-A.cursor,l()&&(A.cursor=A.limit-e,A.ket=A.cursor,A.cursor>A.limit_backward&&(A.cursor--,A.bra=A.cursor,A.slice_del())),A.cursor=A.limit-e,A.ket=A.cursor,A.in_grouping_b(y,97,228)&&(A.bra=A.cursor,A.out_grouping_b(W,97,246)&&A.slice_del()),A.cursor=A.limit-e,A.ket=A.cursor,A.eq_s_b(1,"j")&&(A.bra=A.cursor,r=A.limit-A.cursor,A.eq_s_b(1,"o")?A.slice_del():(A.cursor=A.limit-r,A.eq_s_b(1,"u")&&A.slice_del())),A.cursor=A.limit-e,A.ket=A.cursor,A.eq_s_b(1,"o")&&(A.bra=A.cursor,A.eq_s_b(1,"j")&&A.slice_del()),A.cursor=A.limit-e,A.limit_backward=i;;){if(n=A.limit-A.cursor,A.out_grouping_b(W,97,246)){A.cursor=A.limit-n;break}if(A.cursor=A.limit-n,A.cursor<=A.limit_backward)return;A.cursor--}A.ket=A.cursor,A.cursor>A.limit_backward&&(A.cursor--,A.bra=A.cursor,b=A.slice_to(),A.eq_v_b(b)&&A.slice_del())}}var k,b,d,f,h=[new e("pa",-1,1),new e("sti",-1,2),new e("kaan",-1,1),new e("han",-1,1),new e("kin",-1,1),new e("hän",-1,1),new e("kään",-1,1),new e("ko",-1,1),new e("pä",-1,1),new e("kö",-1,1)],p=[new e("lla",-1,-1),new e("na",-1,-1),new e("ssa",-1,-1),new e("ta",-1,-1),new e("lta",3,-1),new e("sta",3,-1)],g=[new e("llä",-1,-1),new e("nä",-1,-1),new e("ssä",-1,-1),new e("tä",-1,-1),new e("ltä",3,-1),new e("stä",3,-1)],j=[new e("lle",-1,-1),new e("ine",-1,-1)],v=[new e("nsa",-1,3),new e("mme",-1,3),new e("nne",-1,3),new e("ni",-1,2),new e("si",-1,1),new e("an",-1,4),new e("en",-1,6),new e("än",-1,5),new e("nsä",-1,3)],q=[new e("aa",-1,-1),new e("ee",-1,-1),new e("ii",-1,-1),new e("oo",-1,-1),new e("uu",-1,-1),new e("ää",-1,-1),new e("öö",-1,-1)],C=[new e("a",-1,8),new e("lla",0,-1),new e("na",0,-1),new e("ssa",0,-1),new e("ta",0,-1),new e("lta",4,-1),new e("sta",4,-1),new e("tta",4,9),new e("lle",-1,-1),new e("ine",-1,-1),new e("ksi",-1,-1),new e("n",-1,7),new e("han",11,1),new e("den",11,-1,a),new e("seen",11,-1,l),new e("hen",11,2),new e("tten",11,-1,a),new e("hin",11,3),new e("siin",11,-1,a),new e("hon",11,4),new e("hän",11,5),new e("hön",11,6),new e("ä",-1,8),new e("llä",22,-1),new e("nä",22,-1),new e("ssä",22,-1),new e("tä",22,-1),new e("ltä",26,-1),new e("stä",26,-1),new e("ttä",26,9)],P=[new e("eja",-1,-1),new e("mma",-1,1),new e("imma",1,-1),new e("mpa",-1,1),new e("impa",3,-1),new e("mmi",-1,1),new e("immi",5,-1),new e("mpi",-1,1),new e("impi",7,-1),new e("ejä",-1,-1),new e("mmä",-1,1),new e("immä",10,-1),new e("mpä",-1,1),new e("impä",12,-1)],F=[new e("i",-1,-1),new e("j",-1,-1)],S=[new e("mma",-1,1),new e("imma",0,-1)],y=[17,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8],W=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32],L=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32],x=[17,97,24,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32],A=new r;this.setCurrent=function(i){A.setCurrent(i)},this.getCurrent=function(){return A.getCurrent()},this.stem=function(){var e=A.cursor;return i(),k=!1,A.limit_backward=e,A.cursor=A.limit,s(),A.cursor=A.limit,o(),A.cursor=A.limit,u(),A.cursor=A.limit,c(),A.cursor=A.limit,k?(m(),A.cursor=A.limit):(A.cursor=A.limit,w(),A.cursor=A.limit),_(),!0}};return function(i){return"function"==typeof i.update?i.update(function(i){return n.setCurrent(i),n.stem(),n.getCurrent()}):(n.setCurrent(i),n.stem(),n.getCurrent())}}(),i.Pipeline.registerFunction(i.fi.stemmer,"stemmer-fi"),i.fi.stopWordFilter=i.generateStopWordFilter("ei eivät emme en et ette että he heidän heidät heihin heille heillä heiltä heissä heistä heitä hän häneen hänelle hänellä häneltä hänen hänessä hänestä hänet häntä itse ja johon joiden joihin joiksi joilla joille joilta joina joissa joista joita joka joksi jolla jolle jolta jona jonka jos jossa josta jota jotka kanssa keiden keihin keiksi keille keillä keiltä keinä keissä keistä keitä keneen keneksi kenelle kenellä keneltä kenen kenenä kenessä kenestä kenet ketkä ketkä ketä koska kuin kuka kun me meidän meidät meihin meille meillä meiltä meissä meistä meitä mihin miksi mikä mille millä miltä minkä minkä minua minulla minulle minulta minun minussa minusta minut minuun minä minä missä mistä mitkä mitä mukaan mutta ne niiden niihin niiksi niille niillä niiltä niin niin niinä niissä niistä niitä noiden noihin noiksi noilla noille noilta noin noina noissa noista noita nuo nyt näiden näihin näiksi näille näillä näiltä näinä näissä näistä näitä nämä ole olemme olen olet olette oli olimme olin olisi olisimme olisin olisit olisitte olisivat olit olitte olivat olla olleet ollut on ovat poikki se sekä sen siihen siinä siitä siksi sille sillä sillä siltä sinua sinulla sinulle sinulta sinun sinussa sinusta sinut sinuun sinä sinä sitä tai te teidän teidät teihin teille teillä teiltä teissä teistä teitä tuo tuohon tuoksi tuolla tuolle tuolta tuon tuona tuossa tuosta tuota tähän täksi tälle tällä tältä tämä tämän tänä tässä tästä tätä vaan vai vaikka yli".split(" ")),i.Pipeline.registerFunction(i.fi.stopWordFilter,"stopWordFilter-fi")}}); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/lunr/min/lunr.fr.min.js b/2.0.0/assets/javascripts/lunr/min/lunr.fr.min.js new file mode 100644 index 00000000..68cd0094 --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/min/lunr.fr.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `French` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.fr=function(){this.pipeline.reset(),this.pipeline.add(e.fr.trimmer,e.fr.stopWordFilter,e.fr.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.fr.stemmer))},e.fr.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.fr.trimmer=e.trimmerSupport.generateTrimmer(e.fr.wordCharacters),e.Pipeline.registerFunction(e.fr.trimmer,"trimmer-fr"),e.fr.stemmer=function(){var r=e.stemmerSupport.Among,s=e.stemmerSupport.SnowballProgram,i=new function(){function e(e,r,s){return!(!W.eq_s(1,e)||(W.ket=W.cursor,!W.in_grouping(F,97,251)))&&(W.slice_from(r),W.cursor=s,!0)}function i(e,r,s){return!!W.eq_s(1,e)&&(W.ket=W.cursor,W.slice_from(r),W.cursor=s,!0)}function n(){for(var r,s;;){if(r=W.cursor,W.in_grouping(F,97,251)){if(W.bra=W.cursor,s=W.cursor,e("u","U",r))continue;if(W.cursor=s,e("i","I",r))continue;if(W.cursor=s,i("y","Y",r))continue}if(W.cursor=r,W.bra=r,!e("y","Y",r)){if(W.cursor=r,W.eq_s(1,"q")&&(W.bra=W.cursor,i("u","U",r)))continue;if(W.cursor=r,r>=W.limit)return;W.cursor++}}}function t(){for(;!W.in_grouping(F,97,251);){if(W.cursor>=W.limit)return!0;W.cursor++}for(;!W.out_grouping(F,97,251);){if(W.cursor>=W.limit)return!0;W.cursor++}return!1}function u(){var e=W.cursor;if(q=W.limit,g=q,p=q,W.in_grouping(F,97,251)&&W.in_grouping(F,97,251)&&W.cursor=W.limit){W.cursor=q;break}W.cursor++}while(!W.in_grouping(F,97,251))}q=W.cursor,W.cursor=e,t()||(g=W.cursor,t()||(p=W.cursor))}function o(){for(var e,r;;){if(r=W.cursor,W.bra=r,!(e=W.find_among(h,4)))break;switch(W.ket=W.cursor,e){case 1:W.slice_from("i");break;case 2:W.slice_from("u");break;case 3:W.slice_from("y");break;case 4:if(W.cursor>=W.limit)return;W.cursor++}}}function c(){return q<=W.cursor}function a(){return g<=W.cursor}function l(){return p<=W.cursor}function w(){var e,r;if(W.ket=W.cursor,e=W.find_among_b(C,43)){switch(W.bra=W.cursor,e){case 1:if(!l())return!1;W.slice_del();break;case 2:if(!l())return!1;W.slice_del(),W.ket=W.cursor,W.eq_s_b(2,"ic")&&(W.bra=W.cursor,l()?W.slice_del():W.slice_from("iqU"));break;case 3:if(!l())return!1;W.slice_from("log");break;case 4:if(!l())return!1;W.slice_from("u");break;case 5:if(!l())return!1;W.slice_from("ent");break;case 6:if(!c())return!1;if(W.slice_del(),W.ket=W.cursor,e=W.find_among_b(z,6))switch(W.bra=W.cursor,e){case 1:l()&&(W.slice_del(),W.ket=W.cursor,W.eq_s_b(2,"at")&&(W.bra=W.cursor,l()&&W.slice_del()));break;case 2:l()?W.slice_del():a()&&W.slice_from("eux");break;case 3:l()&&W.slice_del();break;case 4:c()&&W.slice_from("i")}break;case 7:if(!l())return!1;if(W.slice_del(),W.ket=W.cursor,e=W.find_among_b(y,3))switch(W.bra=W.cursor,e){case 1:l()?W.slice_del():W.slice_from("abl");break;case 2:l()?W.slice_del():W.slice_from("iqU");break;case 3:l()&&W.slice_del()}break;case 8:if(!l())return!1;if(W.slice_del(),W.ket=W.cursor,W.eq_s_b(2,"at")&&(W.bra=W.cursor,l()&&(W.slice_del(),W.ket=W.cursor,W.eq_s_b(2,"ic")))){W.bra=W.cursor,l()?W.slice_del():W.slice_from("iqU");break}break;case 9:W.slice_from("eau");break;case 10:if(!a())return!1;W.slice_from("al");break;case 11:if(l())W.slice_del();else{if(!a())return!1;W.slice_from("eux")}break;case 12:if(!a()||!W.out_grouping_b(F,97,251))return!1;W.slice_del();break;case 13:return c()&&W.slice_from("ant"),!1;case 14:return c()&&W.slice_from("ent"),!1;case 15:return r=W.limit-W.cursor,W.in_grouping_b(F,97,251)&&c()&&(W.cursor=W.limit-r,W.slice_del()),!1}return!0}return!1}function f(){var e,r;if(W.cursor=q){if(s=W.limit_backward,W.limit_backward=q,W.ket=W.cursor,e=W.find_among_b(P,7))switch(W.bra=W.cursor,e){case 1:if(l()){if(i=W.limit-W.cursor,!W.eq_s_b(1,"s")&&(W.cursor=W.limit-i,!W.eq_s_b(1,"t")))break;W.slice_del()}break;case 2:W.slice_from("i");break;case 3:W.slice_del();break;case 4:W.eq_s_b(2,"gu")&&W.slice_del()}W.limit_backward=s}}function b(){var e=W.limit-W.cursor;W.find_among_b(U,5)&&(W.cursor=W.limit-e,W.ket=W.cursor,W.cursor>W.limit_backward&&(W.cursor--,W.bra=W.cursor,W.slice_del()))}function d(){for(var e,r=1;W.out_grouping_b(F,97,251);)r--;if(r<=0){if(W.ket=W.cursor,e=W.limit-W.cursor,!W.eq_s_b(1,"é")&&(W.cursor=W.limit-e,!W.eq_s_b(1,"è")))return;W.bra=W.cursor,W.slice_from("e")}}function k(){if(!w()&&(W.cursor=W.limit,!f()&&(W.cursor=W.limit,!m())))return W.cursor=W.limit,void _();W.cursor=W.limit,W.ket=W.cursor,W.eq_s_b(1,"Y")?(W.bra=W.cursor,W.slice_from("i")):(W.cursor=W.limit,W.eq_s_b(1,"ç")&&(W.bra=W.cursor,W.slice_from("c")))}var p,g,q,v=[new r("col",-1,-1),new r("par",-1,-1),new r("tap",-1,-1)],h=[new r("",-1,4),new r("I",0,1),new r("U",0,2),new r("Y",0,3)],z=[new r("iqU",-1,3),new r("abl",-1,3),new r("Ièr",-1,4),new r("ièr",-1,4),new r("eus",-1,2),new r("iv",-1,1)],y=[new r("ic",-1,2),new r("abil",-1,1),new r("iv",-1,3)],C=[new r("iqUe",-1,1),new r("atrice",-1,2),new r("ance",-1,1),new r("ence",-1,5),new r("logie",-1,3),new r("able",-1,1),new r("isme",-1,1),new r("euse",-1,11),new r("iste",-1,1),new r("ive",-1,8),new r("if",-1,8),new r("usion",-1,4),new r("ation",-1,2),new r("ution",-1,4),new r("ateur",-1,2),new r("iqUes",-1,1),new r("atrices",-1,2),new r("ances",-1,1),new r("ences",-1,5),new r("logies",-1,3),new r("ables",-1,1),new r("ismes",-1,1),new r("euses",-1,11),new r("istes",-1,1),new r("ives",-1,8),new r("ifs",-1,8),new r("usions",-1,4),new r("ations",-1,2),new r("utions",-1,4),new r("ateurs",-1,2),new r("ments",-1,15),new r("ements",30,6),new r("issements",31,12),new r("ités",-1,7),new r("ment",-1,15),new r("ement",34,6),new r("issement",35,12),new r("amment",34,13),new r("emment",34,14),new r("aux",-1,10),new r("eaux",39,9),new r("eux",-1,1),new r("ité",-1,7)],x=[new r("ira",-1,1),new r("ie",-1,1),new r("isse",-1,1),new r("issante",-1,1),new r("i",-1,1),new r("irai",4,1),new r("ir",-1,1),new r("iras",-1,1),new r("ies",-1,1),new r("îmes",-1,1),new r("isses",-1,1),new r("issantes",-1,1),new r("îtes",-1,1),new r("is",-1,1),new r("irais",13,1),new r("issais",13,1),new r("irions",-1,1),new r("issions",-1,1),new r("irons",-1,1),new r("issons",-1,1),new r("issants",-1,1),new r("it",-1,1),new r("irait",21,1),new r("issait",21,1),new r("issant",-1,1),new r("iraIent",-1,1),new r("issaIent",-1,1),new r("irent",-1,1),new r("issent",-1,1),new r("iront",-1,1),new r("ît",-1,1),new r("iriez",-1,1),new r("issiez",-1,1),new r("irez",-1,1),new r("issez",-1,1)],I=[new r("a",-1,3),new r("era",0,2),new r("asse",-1,3),new r("ante",-1,3),new r("ée",-1,2),new r("ai",-1,3),new r("erai",5,2),new r("er",-1,2),new r("as",-1,3),new r("eras",8,2),new r("âmes",-1,3),new r("asses",-1,3),new r("antes",-1,3),new r("âtes",-1,3),new r("ées",-1,2),new r("ais",-1,3),new r("erais",15,2),new r("ions",-1,1),new r("erions",17,2),new r("assions",17,3),new r("erons",-1,2),new r("ants",-1,3),new r("és",-1,2),new r("ait",-1,3),new r("erait",23,2),new r("ant",-1,3),new r("aIent",-1,3),new r("eraIent",26,2),new r("èrent",-1,2),new r("assent",-1,3),new r("eront",-1,2),new r("ât",-1,3),new r("ez",-1,2),new r("iez",32,2),new r("eriez",33,2),new r("assiez",33,3),new r("erez",32,2),new r("é",-1,2)],P=[new r("e",-1,3),new r("Ière",0,2),new r("ière",0,2),new r("ion",-1,1),new r("Ier",-1,2),new r("ier",-1,2),new r("ë",-1,4)],U=[new r("ell",-1,-1),new r("eill",-1,-1),new r("enn",-1,-1),new r("onn",-1,-1),new r("ett",-1,-1)],F=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,128,130,103,8,5],S=[1,65,20,0,0,0,0,0,0,0,0,0,0,0,0,0,128],W=new s;this.setCurrent=function(e){W.setCurrent(e)},this.getCurrent=function(){return W.getCurrent()},this.stem=function(){var e=W.cursor;return n(),W.cursor=e,u(),W.limit_backward=e,W.cursor=W.limit,k(),W.cursor=W.limit,b(),W.cursor=W.limit,d(),W.cursor=W.limit_backward,o(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}}(),e.Pipeline.registerFunction(e.fr.stemmer,"stemmer-fr"),e.fr.stopWordFilter=e.generateStopWordFilter("ai aie aient aies ait as au aura aurai auraient aurais aurait auras aurez auriez aurions aurons auront aux avaient avais avait avec avez aviez avions avons ayant ayez ayons c ce ceci celà ces cet cette d dans de des du elle en es est et eu eue eues eurent eus eusse eussent eusses eussiez eussions eut eux eûmes eût eûtes furent fus fusse fussent fusses fussiez fussions fut fûmes fût fûtes ici il ils j je l la le les leur leurs lui m ma mais me mes moi mon même n ne nos notre nous on ont ou par pas pour qu que quel quelle quelles quels qui s sa sans se sera serai seraient serais serait seras serez seriez serions serons seront ses soi soient sois soit sommes son sont soyez soyons suis sur t ta te tes toi ton tu un une vos votre vous y à étaient étais était étant étiez étions été étée étées étés êtes".split(" ")),e.Pipeline.registerFunction(e.fr.stopWordFilter,"stopWordFilter-fr")}}); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/lunr/min/lunr.hi.min.js b/2.0.0/assets/javascripts/lunr/min/lunr.hi.min.js new file mode 100644 index 00000000..7dbc4140 --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/min/lunr.hi.min.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.hi=function(){this.pipeline.reset(),this.pipeline.add(e.hi.trimmer,e.hi.stopWordFilter,e.hi.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.hi.stemmer))},e.hi.wordCharacters="ऀ-ःऄ-एऐ-टठ-यर-िी-ॏॐ-य़ॠ-९॰-ॿa-zA-Za-zA-Z0-90-9",e.hi.trimmer=e.trimmerSupport.generateTrimmer(e.hi.wordCharacters),e.Pipeline.registerFunction(e.hi.trimmer,"trimmer-hi"),e.hi.stopWordFilter=e.generateStopWordFilter("अत अपना अपनी अपने अभी अंदर आदि आप इत्यादि इन इनका इन्हीं इन्हें इन्हों इस इसका इसकी इसके इसमें इसी इसे उन उनका उनकी उनके उनको उन्हीं उन्हें उन्हों उस उसके उसी उसे एक एवं एस ऐसे और कई कर करता करते करना करने करें कहते कहा का काफ़ी कि कितना किन्हें किन्हों किया किर किस किसी किसे की कुछ कुल के को कोई कौन कौनसा गया घर जब जहाँ जा जितना जिन जिन्हें जिन्हों जिस जिसे जीधर जैसा जैसे जो तक तब तरह तिन तिन्हें तिन्हों तिस तिसे तो था थी थे दबारा दिया दुसरा दूसरे दो द्वारा न नके नहीं ना निहायत नीचे ने पर पहले पूरा पे फिर बनी बही बहुत बाद बाला बिलकुल भी भीतर मगर मानो मे में यदि यह यहाँ यही या यिह ये रखें रहा रहे ऱ्वासा लिए लिये लेकिन व वग़ैरह वर्ग वह वहाँ वहीं वाले वुह वे वो सकता सकते सबसे सभी साथ साबुत साभ सारा से सो संग ही हुआ हुई हुए है हैं हो होता होती होते होना होने".split(" ")),e.hi.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}();var r=e.wordcut;r.init(),e.hi.tokenizer=function(i){if(!arguments.length||null==i||void 0==i)return[];if(Array.isArray(i))return i.map(function(r){return isLunr2?new e.Token(r.toLowerCase()):r.toLowerCase()});var t=i.toString().toLowerCase().replace(/^\s+/,"");return r.cut(t).split("|")},e.Pipeline.registerFunction(e.hi.stemmer,"stemmer-hi"),e.Pipeline.registerFunction(e.hi.stopWordFilter,"stopWordFilter-hi")}}); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/lunr/min/lunr.hu.min.js b/2.0.0/assets/javascripts/lunr/min/lunr.hu.min.js new file mode 100644 index 00000000..ed9d909f --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/min/lunr.hu.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Hungarian` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,n){"function"==typeof define&&define.amd?define(n):"object"==typeof exports?module.exports=n():n()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.hu=function(){this.pipeline.reset(),this.pipeline.add(e.hu.trimmer,e.hu.stopWordFilter,e.hu.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.hu.stemmer))},e.hu.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.hu.trimmer=e.trimmerSupport.generateTrimmer(e.hu.wordCharacters),e.Pipeline.registerFunction(e.hu.trimmer,"trimmer-hu"),e.hu.stemmer=function(){var n=e.stemmerSupport.Among,r=e.stemmerSupport.SnowballProgram,i=new function(){function e(){var e,n=L.cursor;if(d=L.limit,L.in_grouping(W,97,252))for(;;){if(e=L.cursor,L.out_grouping(W,97,252))return L.cursor=e,L.find_among(g,8)||(L.cursor=e,e=L.limit)return void(d=e);L.cursor++}if(L.cursor=n,L.out_grouping(W,97,252)){for(;!L.in_grouping(W,97,252);){if(L.cursor>=L.limit)return;L.cursor++}d=L.cursor}}function i(){return d<=L.cursor}function a(){var e;if(L.ket=L.cursor,(e=L.find_among_b(h,2))&&(L.bra=L.cursor,i()))switch(e){case 1:L.slice_from("a");break;case 2:L.slice_from("e")}}function t(){var e=L.limit-L.cursor;return!!L.find_among_b(p,23)&&(L.cursor=L.limit-e,!0)}function s(){if(L.cursor>L.limit_backward){L.cursor--,L.ket=L.cursor;var e=L.cursor-1;L.limit_backward<=e&&e<=L.limit&&(L.cursor=e,L.bra=e,L.slice_del())}}function c(){var e;if(L.ket=L.cursor,(e=L.find_among_b(_,2))&&(L.bra=L.cursor,i())){if((1==e||2==e)&&!t())return;L.slice_del(),s()}}function o(){L.ket=L.cursor,L.find_among_b(v,44)&&(L.bra=L.cursor,i()&&(L.slice_del(),a()))}function w(){var e;if(L.ket=L.cursor,(e=L.find_among_b(z,3))&&(L.bra=L.cursor,i()))switch(e){case 1:L.slice_from("e");break;case 2:case 3:L.slice_from("a")}}function l(){var e;if(L.ket=L.cursor,(e=L.find_among_b(y,6))&&(L.bra=L.cursor,i()))switch(e){case 1:case 2:L.slice_del();break;case 3:L.slice_from("a");break;case 4:L.slice_from("e")}}function u(){var e;if(L.ket=L.cursor,(e=L.find_among_b(j,2))&&(L.bra=L.cursor,i())){if((1==e||2==e)&&!t())return;L.slice_del(),s()}}function m(){var e;if(L.ket=L.cursor,(e=L.find_among_b(C,7))&&(L.bra=L.cursor,i()))switch(e){case 1:L.slice_from("a");break;case 2:L.slice_from("e");break;case 3:case 4:case 5:case 6:case 7:L.slice_del()}}function k(){var e;if(L.ket=L.cursor,(e=L.find_among_b(P,12))&&(L.bra=L.cursor,i()))switch(e){case 1:case 4:case 7:case 9:L.slice_del();break;case 2:case 5:case 8:L.slice_from("e");break;case 3:case 6:L.slice_from("a")}}function f(){var e;if(L.ket=L.cursor,(e=L.find_among_b(F,31))&&(L.bra=L.cursor,i()))switch(e){case 1:case 4:case 7:case 8:case 9:case 12:case 13:case 16:case 17:case 18:L.slice_del();break;case 2:case 5:case 10:case 14:case 19:L.slice_from("a");break;case 3:case 6:case 11:case 15:case 20:L.slice_from("e")}}function b(){var e;if(L.ket=L.cursor,(e=L.find_among_b(S,42))&&(L.bra=L.cursor,i()))switch(e){case 1:case 4:case 5:case 6:case 9:case 10:case 11:case 14:case 15:case 16:case 17:case 20:case 21:case 24:case 25:case 26:case 29:L.slice_del();break;case 2:case 7:case 12:case 18:case 22:case 27:L.slice_from("a");break;case 3:case 8:case 13:case 19:case 23:case 28:L.slice_from("e")}}var d,g=[new n("cs",-1,-1),new n("dzs",-1,-1),new n("gy",-1,-1),new n("ly",-1,-1),new n("ny",-1,-1),new n("sz",-1,-1),new n("ty",-1,-1),new n("zs",-1,-1)],h=[new n("á",-1,1),new n("é",-1,2)],p=[new n("bb",-1,-1),new n("cc",-1,-1),new n("dd",-1,-1),new n("ff",-1,-1),new n("gg",-1,-1),new n("jj",-1,-1),new n("kk",-1,-1),new n("ll",-1,-1),new n("mm",-1,-1),new n("nn",-1,-1),new n("pp",-1,-1),new n("rr",-1,-1),new n("ccs",-1,-1),new n("ss",-1,-1),new n("zzs",-1,-1),new n("tt",-1,-1),new n("vv",-1,-1),new n("ggy",-1,-1),new n("lly",-1,-1),new n("nny",-1,-1),new n("tty",-1,-1),new n("ssz",-1,-1),new n("zz",-1,-1)],_=[new n("al",-1,1),new n("el",-1,2)],v=[new n("ba",-1,-1),new n("ra",-1,-1),new n("be",-1,-1),new n("re",-1,-1),new n("ig",-1,-1),new n("nak",-1,-1),new n("nek",-1,-1),new n("val",-1,-1),new n("vel",-1,-1),new n("ul",-1,-1),new n("nál",-1,-1),new n("nél",-1,-1),new n("ból",-1,-1),new n("ról",-1,-1),new n("tól",-1,-1),new n("bõl",-1,-1),new n("rõl",-1,-1),new n("tõl",-1,-1),new n("ül",-1,-1),new n("n",-1,-1),new n("an",19,-1),new n("ban",20,-1),new n("en",19,-1),new n("ben",22,-1),new n("képpen",22,-1),new n("on",19,-1),new n("ön",19,-1),new n("képp",-1,-1),new n("kor",-1,-1),new n("t",-1,-1),new n("at",29,-1),new n("et",29,-1),new n("ként",29,-1),new n("anként",32,-1),new n("enként",32,-1),new n("onként",32,-1),new n("ot",29,-1),new n("ért",29,-1),new n("öt",29,-1),new n("hez",-1,-1),new n("hoz",-1,-1),new n("höz",-1,-1),new n("vá",-1,-1),new n("vé",-1,-1)],z=[new n("án",-1,2),new n("én",-1,1),new n("ánként",-1,3)],y=[new n("stul",-1,2),new n("astul",0,1),new n("ástul",0,3),new n("stül",-1,2),new n("estül",3,1),new n("éstül",3,4)],j=[new n("á",-1,1),new n("é",-1,2)],C=[new n("k",-1,7),new n("ak",0,4),new n("ek",0,6),new n("ok",0,5),new n("ák",0,1),new n("ék",0,2),new n("ök",0,3)],P=[new n("éi",-1,7),new n("áéi",0,6),new n("ééi",0,5),new n("é",-1,9),new n("ké",3,4),new n("aké",4,1),new n("eké",4,1),new n("oké",4,1),new n("áké",4,3),new n("éké",4,2),new n("öké",4,1),new n("éé",3,8)],F=[new n("a",-1,18),new n("ja",0,17),new n("d",-1,16),new n("ad",2,13),new n("ed",2,13),new n("od",2,13),new n("ád",2,14),new n("éd",2,15),new n("öd",2,13),new n("e",-1,18),new n("je",9,17),new n("nk",-1,4),new n("unk",11,1),new n("ánk",11,2),new n("énk",11,3),new n("ünk",11,1),new n("uk",-1,8),new n("juk",16,7),new n("ájuk",17,5),new n("ük",-1,8),new n("jük",19,7),new n("éjük",20,6),new n("m",-1,12),new n("am",22,9),new n("em",22,9),new n("om",22,9),new n("ám",22,10),new n("ém",22,11),new n("o",-1,18),new n("á",-1,19),new n("é",-1,20)],S=[new n("id",-1,10),new n("aid",0,9),new n("jaid",1,6),new n("eid",0,9),new n("jeid",3,6),new n("áid",0,7),new n("éid",0,8),new n("i",-1,15),new n("ai",7,14),new n("jai",8,11),new n("ei",7,14),new n("jei",10,11),new n("ái",7,12),new n("éi",7,13),new n("itek",-1,24),new n("eitek",14,21),new n("jeitek",15,20),new n("éitek",14,23),new n("ik",-1,29),new n("aik",18,26),new n("jaik",19,25),new n("eik",18,26),new n("jeik",21,25),new n("áik",18,27),new n("éik",18,28),new n("ink",-1,20),new n("aink",25,17),new n("jaink",26,16),new n("eink",25,17),new n("jeink",28,16),new n("áink",25,18),new n("éink",25,19),new n("aitok",-1,21),new n("jaitok",32,20),new n("áitok",-1,22),new n("im",-1,5),new n("aim",35,4),new n("jaim",36,1),new n("eim",35,4),new n("jeim",38,1),new n("áim",35,2),new n("éim",35,3)],W=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,1,17,52,14],L=new r;this.setCurrent=function(e){L.setCurrent(e)},this.getCurrent=function(){return L.getCurrent()},this.stem=function(){var n=L.cursor;return e(),L.limit_backward=n,L.cursor=L.limit,c(),L.cursor=L.limit,o(),L.cursor=L.limit,w(),L.cursor=L.limit,l(),L.cursor=L.limit,u(),L.cursor=L.limit,k(),L.cursor=L.limit,f(),L.cursor=L.limit,b(),L.cursor=L.limit,m(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}}(),e.Pipeline.registerFunction(e.hu.stemmer,"stemmer-hu"),e.hu.stopWordFilter=e.generateStopWordFilter("a abban ahhoz ahogy ahol aki akik akkor alatt amely amelyek amelyekben amelyeket amelyet amelynek ami amikor amit amolyan amíg annak arra arról az azok azon azonban azt aztán azután azzal azért be belül benne bár cikk cikkek cikkeket csak de e ebben eddig egy egyes egyetlen egyik egyre egyéb egész ehhez ekkor el ellen elsõ elég elõ elõször elõtt emilyen ennek erre ez ezek ezen ezt ezzel ezért fel felé hanem hiszen hogy hogyan igen ill ill. illetve ilyen ilyenkor ismét ison itt jobban jó jól kell kellett keressünk keresztül ki kívül között közül legalább legyen lehet lehetett lenne lenni lesz lett maga magát majd majd meg mellett mely melyek mert mi mikor milyen minden mindenki mindent mindig mint mintha mit mivel miért most már más másik még míg nagy nagyobb nagyon ne nekem neki nem nincs néha néhány nélkül olyan ott pedig persze rá s saját sem semmi sok sokat sokkal szemben szerint szinte számára talán tehát teljes tovább továbbá több ugyanis utolsó után utána vagy vagyis vagyok valaki valami valamint való van vannak vele vissza viszont volna volt voltak voltam voltunk által általában át én éppen és így õ õk õket össze úgy új újabb újra".split(" ")),e.Pipeline.registerFunction(e.hu.stopWordFilter,"stopWordFilter-hu")}}); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/lunr/min/lunr.it.min.js b/2.0.0/assets/javascripts/lunr/min/lunr.it.min.js new file mode 100644 index 00000000..344b6a3c --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/min/lunr.it.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Italian` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.it=function(){this.pipeline.reset(),this.pipeline.add(e.it.trimmer,e.it.stopWordFilter,e.it.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.it.stemmer))},e.it.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.it.trimmer=e.trimmerSupport.generateTrimmer(e.it.wordCharacters),e.Pipeline.registerFunction(e.it.trimmer,"trimmer-it"),e.it.stemmer=function(){var r=e.stemmerSupport.Among,n=e.stemmerSupport.SnowballProgram,i=new function(){function e(e,r,n){return!(!x.eq_s(1,e)||(x.ket=x.cursor,!x.in_grouping(L,97,249)))&&(x.slice_from(r),x.cursor=n,!0)}function i(){for(var r,n,i,o,t=x.cursor;;){if(x.bra=x.cursor,r=x.find_among(h,7))switch(x.ket=x.cursor,r){case 1:x.slice_from("à");continue;case 2:x.slice_from("è");continue;case 3:x.slice_from("ì");continue;case 4:x.slice_from("ò");continue;case 5:x.slice_from("ù");continue;case 6:x.slice_from("qU");continue;case 7:if(x.cursor>=x.limit)break;x.cursor++;continue}break}for(x.cursor=t;;)for(n=x.cursor;;){if(i=x.cursor,x.in_grouping(L,97,249)){if(x.bra=x.cursor,o=x.cursor,e("u","U",i))break;if(x.cursor=o,e("i","I",i))break}if(x.cursor=i,x.cursor>=x.limit)return void(x.cursor=n);x.cursor++}}function o(e){if(x.cursor=e,!x.in_grouping(L,97,249))return!1;for(;!x.out_grouping(L,97,249);){if(x.cursor>=x.limit)return!1;x.cursor++}return!0}function t(){if(x.in_grouping(L,97,249)){var e=x.cursor;if(x.out_grouping(L,97,249)){for(;!x.in_grouping(L,97,249);){if(x.cursor>=x.limit)return o(e);x.cursor++}return!0}return o(e)}return!1}function s(){var e,r=x.cursor;if(!t()){if(x.cursor=r,!x.out_grouping(L,97,249))return;if(e=x.cursor,x.out_grouping(L,97,249)){for(;!x.in_grouping(L,97,249);){if(x.cursor>=x.limit)return x.cursor=e,void(x.in_grouping(L,97,249)&&x.cursor=x.limit)return;x.cursor++}k=x.cursor}function a(){for(;!x.in_grouping(L,97,249);){if(x.cursor>=x.limit)return!1;x.cursor++}for(;!x.out_grouping(L,97,249);){if(x.cursor>=x.limit)return!1;x.cursor++}return!0}function u(){var e=x.cursor;k=x.limit,p=k,g=k,s(),x.cursor=e,a()&&(p=x.cursor,a()&&(g=x.cursor))}function c(){for(var e;;){if(x.bra=x.cursor,!(e=x.find_among(q,3)))break;switch(x.ket=x.cursor,e){case 1:x.slice_from("i");break;case 2:x.slice_from("u");break;case 3:if(x.cursor>=x.limit)return;x.cursor++}}}function w(){return k<=x.cursor}function l(){return p<=x.cursor}function m(){return g<=x.cursor}function f(){var e;if(x.ket=x.cursor,x.find_among_b(C,37)&&(x.bra=x.cursor,(e=x.find_among_b(z,5))&&w()))switch(e){case 1:x.slice_del();break;case 2:x.slice_from("e")}}function v(){var e;if(x.ket=x.cursor,!(e=x.find_among_b(S,51)))return!1;switch(x.bra=x.cursor,e){case 1:if(!m())return!1;x.slice_del();break;case 2:if(!m())return!1;x.slice_del(),x.ket=x.cursor,x.eq_s_b(2,"ic")&&(x.bra=x.cursor,m()&&x.slice_del());break;case 3:if(!m())return!1;x.slice_from("log");break;case 4:if(!m())return!1;x.slice_from("u");break;case 5:if(!m())return!1;x.slice_from("ente");break;case 6:if(!w())return!1;x.slice_del();break;case 7:if(!l())return!1;x.slice_del(),x.ket=x.cursor,e=x.find_among_b(P,4),e&&(x.bra=x.cursor,m()&&(x.slice_del(),1==e&&(x.ket=x.cursor,x.eq_s_b(2,"at")&&(x.bra=x.cursor,m()&&x.slice_del()))));break;case 8:if(!m())return!1;x.slice_del(),x.ket=x.cursor,e=x.find_among_b(F,3),e&&(x.bra=x.cursor,1==e&&m()&&x.slice_del());break;case 9:if(!m())return!1;x.slice_del(),x.ket=x.cursor,x.eq_s_b(2,"at")&&(x.bra=x.cursor,m()&&(x.slice_del(),x.ket=x.cursor,x.eq_s_b(2,"ic")&&(x.bra=x.cursor,m()&&x.slice_del())))}return!0}function b(){var e,r;x.cursor>=k&&(r=x.limit_backward,x.limit_backward=k,x.ket=x.cursor,e=x.find_among_b(W,87),e&&(x.bra=x.cursor,1==e&&x.slice_del()),x.limit_backward=r)}function d(){var e=x.limit-x.cursor;if(x.ket=x.cursor,x.in_grouping_b(y,97,242)&&(x.bra=x.cursor,w()&&(x.slice_del(),x.ket=x.cursor,x.eq_s_b(1,"i")&&(x.bra=x.cursor,w()))))return void x.slice_del();x.cursor=x.limit-e}function _(){d(),x.ket=x.cursor,x.eq_s_b(1,"h")&&(x.bra=x.cursor,x.in_grouping_b(U,99,103)&&w()&&x.slice_del())}var g,p,k,h=[new r("",-1,7),new r("qu",0,6),new r("á",0,1),new r("é",0,2),new r("í",0,3),new r("ó",0,4),new r("ú",0,5)],q=[new r("",-1,3),new r("I",0,1),new r("U",0,2)],C=[new r("la",-1,-1),new r("cela",0,-1),new r("gliela",0,-1),new r("mela",0,-1),new r("tela",0,-1),new r("vela",0,-1),new r("le",-1,-1),new r("cele",6,-1),new r("gliele",6,-1),new r("mele",6,-1),new r("tele",6,-1),new r("vele",6,-1),new r("ne",-1,-1),new r("cene",12,-1),new r("gliene",12,-1),new r("mene",12,-1),new r("sene",12,-1),new r("tene",12,-1),new r("vene",12,-1),new r("ci",-1,-1),new r("li",-1,-1),new r("celi",20,-1),new r("glieli",20,-1),new r("meli",20,-1),new r("teli",20,-1),new r("veli",20,-1),new r("gli",20,-1),new r("mi",-1,-1),new r("si",-1,-1),new r("ti",-1,-1),new r("vi",-1,-1),new r("lo",-1,-1),new r("celo",31,-1),new r("glielo",31,-1),new r("melo",31,-1),new r("telo",31,-1),new r("velo",31,-1)],z=[new r("ando",-1,1),new r("endo",-1,1),new r("ar",-1,2),new r("er",-1,2),new r("ir",-1,2)],P=[new r("ic",-1,-1),new r("abil",-1,-1),new r("os",-1,-1),new r("iv",-1,1)],F=[new r("ic",-1,1),new r("abil",-1,1),new r("iv",-1,1)],S=[new r("ica",-1,1),new r("logia",-1,3),new r("osa",-1,1),new r("ista",-1,1),new r("iva",-1,9),new r("anza",-1,1),new r("enza",-1,5),new r("ice",-1,1),new r("atrice",7,1),new r("iche",-1,1),new r("logie",-1,3),new r("abile",-1,1),new r("ibile",-1,1),new r("usione",-1,4),new r("azione",-1,2),new r("uzione",-1,4),new r("atore",-1,2),new r("ose",-1,1),new r("ante",-1,1),new r("mente",-1,1),new r("amente",19,7),new r("iste",-1,1),new r("ive",-1,9),new r("anze",-1,1),new r("enze",-1,5),new r("ici",-1,1),new r("atrici",25,1),new r("ichi",-1,1),new r("abili",-1,1),new r("ibili",-1,1),new r("ismi",-1,1),new r("usioni",-1,4),new r("azioni",-1,2),new r("uzioni",-1,4),new r("atori",-1,2),new r("osi",-1,1),new r("anti",-1,1),new r("amenti",-1,6),new r("imenti",-1,6),new r("isti",-1,1),new r("ivi",-1,9),new r("ico",-1,1),new r("ismo",-1,1),new r("oso",-1,1),new r("amento",-1,6),new r("imento",-1,6),new r("ivo",-1,9),new r("ità",-1,8),new r("istà",-1,1),new r("istè",-1,1),new r("istì",-1,1)],W=[new r("isca",-1,1),new r("enda",-1,1),new r("ata",-1,1),new r("ita",-1,1),new r("uta",-1,1),new r("ava",-1,1),new r("eva",-1,1),new r("iva",-1,1),new r("erebbe",-1,1),new r("irebbe",-1,1),new r("isce",-1,1),new r("ende",-1,1),new r("are",-1,1),new r("ere",-1,1),new r("ire",-1,1),new r("asse",-1,1),new r("ate",-1,1),new r("avate",16,1),new r("evate",16,1),new r("ivate",16,1),new r("ete",-1,1),new r("erete",20,1),new r("irete",20,1),new r("ite",-1,1),new r("ereste",-1,1),new r("ireste",-1,1),new r("ute",-1,1),new r("erai",-1,1),new r("irai",-1,1),new r("isci",-1,1),new r("endi",-1,1),new r("erei",-1,1),new r("irei",-1,1),new r("assi",-1,1),new r("ati",-1,1),new r("iti",-1,1),new r("eresti",-1,1),new r("iresti",-1,1),new r("uti",-1,1),new r("avi",-1,1),new r("evi",-1,1),new r("ivi",-1,1),new r("isco",-1,1),new r("ando",-1,1),new r("endo",-1,1),new r("Yamo",-1,1),new r("iamo",-1,1),new r("avamo",-1,1),new r("evamo",-1,1),new r("ivamo",-1,1),new r("eremo",-1,1),new r("iremo",-1,1),new r("assimo",-1,1),new r("ammo",-1,1),new r("emmo",-1,1),new r("eremmo",54,1),new r("iremmo",54,1),new r("immo",-1,1),new r("ano",-1,1),new r("iscano",58,1),new r("avano",58,1),new r("evano",58,1),new r("ivano",58,1),new r("eranno",-1,1),new r("iranno",-1,1),new r("ono",-1,1),new r("iscono",65,1),new r("arono",65,1),new r("erono",65,1),new r("irono",65,1),new r("erebbero",-1,1),new r("irebbero",-1,1),new r("assero",-1,1),new r("essero",-1,1),new r("issero",-1,1),new r("ato",-1,1),new r("ito",-1,1),new r("uto",-1,1),new r("avo",-1,1),new r("evo",-1,1),new r("ivo",-1,1),new r("ar",-1,1),new r("ir",-1,1),new r("erà",-1,1),new r("irà",-1,1),new r("erò",-1,1),new r("irò",-1,1)],L=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,128,128,8,2,1],y=[17,65,0,0,0,0,0,0,0,0,0,0,0,0,0,128,128,8,2],U=[17],x=new n;this.setCurrent=function(e){x.setCurrent(e)},this.getCurrent=function(){return x.getCurrent()},this.stem=function(){var e=x.cursor;return i(),x.cursor=e,u(),x.limit_backward=e,x.cursor=x.limit,f(),x.cursor=x.limit,v()||(x.cursor=x.limit,b()),x.cursor=x.limit,_(),x.cursor=x.limit_backward,c(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}}(),e.Pipeline.registerFunction(e.it.stemmer,"stemmer-it"),e.it.stopWordFilter=e.generateStopWordFilter("a abbia abbiamo abbiano abbiate ad agl agli ai al all alla alle allo anche avemmo avendo avesse avessero avessi avessimo aveste avesti avete aveva avevamo avevano avevate avevi avevo avrai avranno avrebbe avrebbero avrei avremmo avremo avreste avresti avrete avrà avrò avuta avute avuti avuto c che chi ci coi col come con contro cui da dagl dagli dai dal dall dalla dalle dallo degl degli dei del dell della delle dello di dov dove e ebbe ebbero ebbi ed era erano eravamo eravate eri ero essendo faccia facciamo facciano facciate faccio facemmo facendo facesse facessero facessi facessimo faceste facesti faceva facevamo facevano facevate facevi facevo fai fanno farai faranno farebbe farebbero farei faremmo faremo fareste faresti farete farà farò fece fecero feci fosse fossero fossi fossimo foste fosti fu fui fummo furono gli ha hai hanno ho i il in io l la le lei li lo loro lui ma mi mia mie miei mio ne negl negli nei nel nell nella nelle nello noi non nostra nostre nostri nostro o per perché più quale quanta quante quanti quanto quella quelle quelli quello questa queste questi questo sarai saranno sarebbe sarebbero sarei saremmo saremo sareste saresti sarete sarà sarò se sei si sia siamo siano siate siete sono sta stai stando stanno starai staranno starebbe starebbero starei staremmo staremo stareste staresti starete starà starò stava stavamo stavano stavate stavi stavo stemmo stesse stessero stessi stessimo steste stesti stette stettero stetti stia stiamo stiano stiate sto su sua sue sugl sugli sui sul sull sulla sulle sullo suo suoi ti tra tu tua tue tuo tuoi tutti tutto un una uno vi voi vostra vostre vostri vostro è".split(" ")),e.Pipeline.registerFunction(e.it.stopWordFilter,"stopWordFilter-it")}}); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/lunr/min/lunr.ja.min.js b/2.0.0/assets/javascripts/lunr/min/lunr.ja.min.js new file mode 100644 index 00000000..5f254ebe --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/min/lunr.ja.min.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r="2"==e.version[0];e.ja=function(){this.pipeline.reset(),this.pipeline.add(e.ja.trimmer,e.ja.stopWordFilter,e.ja.stemmer),r?this.tokenizer=e.ja.tokenizer:(e.tokenizer&&(e.tokenizer=e.ja.tokenizer),this.tokenizerFn&&(this.tokenizerFn=e.ja.tokenizer))};var t=new e.TinySegmenter;e.ja.tokenizer=function(i){var n,o,s,p,a,u,m,l,c,f;if(!arguments.length||null==i||void 0==i)return[];if(Array.isArray(i))return i.map(function(t){return r?new e.Token(t.toLowerCase()):t.toLowerCase()});for(o=i.toString().toLowerCase().replace(/^\s+/,""),n=o.length-1;n>=0;n--)if(/\S/.test(o.charAt(n))){o=o.substring(0,n+1);break}for(a=[],s=o.length,c=0,l=0;c<=s;c++)if(u=o.charAt(c),m=c-l,u.match(/\s/)||c==s){if(m>0)for(p=t.segment(o.slice(l,c)).filter(function(e){return!!e}),f=l,n=0;n=C.limit)break;C.cursor++;continue}break}for(C.cursor=o,C.bra=o,C.eq_s(1,"y")?(C.ket=C.cursor,C.slice_from("Y")):C.cursor=o;;)if(e=C.cursor,C.in_grouping(q,97,232)){if(i=C.cursor,C.bra=i,C.eq_s(1,"i"))C.ket=C.cursor,C.in_grouping(q,97,232)&&(C.slice_from("I"),C.cursor=e);else if(C.cursor=i,C.eq_s(1,"y"))C.ket=C.cursor,C.slice_from("Y"),C.cursor=e;else if(n(e))break}else if(n(e))break}function n(r){return C.cursor=r,r>=C.limit||(C.cursor++,!1)}function o(){_=C.limit,d=_,t()||(_=C.cursor,_<3&&(_=3),t()||(d=C.cursor))}function t(){for(;!C.in_grouping(q,97,232);){if(C.cursor>=C.limit)return!0;C.cursor++}for(;!C.out_grouping(q,97,232);){if(C.cursor>=C.limit)return!0;C.cursor++}return!1}function s(){for(var r;;)if(C.bra=C.cursor,r=C.find_among(p,3))switch(C.ket=C.cursor,r){case 1:C.slice_from("y");break;case 2:C.slice_from("i");break;case 3:if(C.cursor>=C.limit)return;C.cursor++}}function u(){return _<=C.cursor}function c(){return d<=C.cursor}function a(){var r=C.limit-C.cursor;C.find_among_b(g,3)&&(C.cursor=C.limit-r,C.ket=C.cursor,C.cursor>C.limit_backward&&(C.cursor--,C.bra=C.cursor,C.slice_del()))}function l(){var r;w=!1,C.ket=C.cursor,C.eq_s_b(1,"e")&&(C.bra=C.cursor,u()&&(r=C.limit-C.cursor,C.out_grouping_b(q,97,232)&&(C.cursor=C.limit-r,C.slice_del(),w=!0,a())))}function m(){var r;u()&&(r=C.limit-C.cursor,C.out_grouping_b(q,97,232)&&(C.cursor=C.limit-r,C.eq_s_b(3,"gem")||(C.cursor=C.limit-r,C.slice_del(),a())))}function f(){var r,e,i,n,o,t,s=C.limit-C.cursor;if(C.ket=C.cursor,r=C.find_among_b(h,5))switch(C.bra=C.cursor,r){case 1:u()&&C.slice_from("heid");break;case 2:m();break;case 3:u()&&C.out_grouping_b(j,97,232)&&C.slice_del()}if(C.cursor=C.limit-s,l(),C.cursor=C.limit-s,C.ket=C.cursor,C.eq_s_b(4,"heid")&&(C.bra=C.cursor,c()&&(e=C.limit-C.cursor,C.eq_s_b(1,"c")||(C.cursor=C.limit-e,C.slice_del(),C.ket=C.cursor,C.eq_s_b(2,"en")&&(C.bra=C.cursor,m())))),C.cursor=C.limit-s,C.ket=C.cursor,r=C.find_among_b(k,6))switch(C.bra=C.cursor,r){case 1:if(c()){if(C.slice_del(),i=C.limit-C.cursor,C.ket=C.cursor,C.eq_s_b(2,"ig")&&(C.bra=C.cursor,c()&&(n=C.limit-C.cursor,!C.eq_s_b(1,"e")))){C.cursor=C.limit-n,C.slice_del();break}C.cursor=C.limit-i,a()}break;case 2:c()&&(o=C.limit-C.cursor,C.eq_s_b(1,"e")||(C.cursor=C.limit-o,C.slice_del()));break;case 3:c()&&(C.slice_del(),l());break;case 4:c()&&C.slice_del();break;case 5:c()&&w&&C.slice_del()}C.cursor=C.limit-s,C.out_grouping_b(z,73,232)&&(t=C.limit-C.cursor,C.find_among_b(v,4)&&C.out_grouping_b(q,97,232)&&(C.cursor=C.limit-t,C.ket=C.cursor,C.cursor>C.limit_backward&&(C.cursor--,C.bra=C.cursor,C.slice_del())))}var d,_,w,b=[new e("",-1,6),new e("á",0,1),new e("ä",0,1),new e("é",0,2),new e("ë",0,2),new e("í",0,3),new e("ï",0,3),new e("ó",0,4),new e("ö",0,4),new e("ú",0,5),new e("ü",0,5)],p=[new e("",-1,3),new e("I",0,2),new e("Y",0,1)],g=[new e("dd",-1,-1),new e("kk",-1,-1),new e("tt",-1,-1)],h=[new e("ene",-1,2),new e("se",-1,3),new e("en",-1,2),new e("heden",2,1),new e("s",-1,3)],k=[new e("end",-1,1),new e("ig",-1,2),new e("ing",-1,1),new e("lijk",-1,3),new e("baar",-1,4),new e("bar",-1,5)],v=[new e("aa",-1,-1),new e("ee",-1,-1),new e("oo",-1,-1),new e("uu",-1,-1)],q=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],z=[1,0,0,17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],j=[17,67,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],C=new i;this.setCurrent=function(r){C.setCurrent(r)},this.getCurrent=function(){return C.getCurrent()},this.stem=function(){var e=C.cursor;return r(),C.cursor=e,o(),C.limit_backward=e,C.cursor=C.limit,f(),C.cursor=C.limit_backward,s(),!0}};return function(r){return"function"==typeof r.update?r.update(function(r){return n.setCurrent(r),n.stem(),n.getCurrent()}):(n.setCurrent(r),n.stem(),n.getCurrent())}}(),r.Pipeline.registerFunction(r.nl.stemmer,"stemmer-nl"),r.nl.stopWordFilter=r.generateStopWordFilter(" aan al alles als altijd andere ben bij daar dan dat de der deze die dit doch doen door dus een eens en er ge geen geweest haar had heb hebben heeft hem het hier hij hoe hun iemand iets ik in is ja je kan kon kunnen maar me meer men met mij mijn moet na naar niet niets nog nu of om omdat onder ons ook op over reeds te tegen toch toen tot u uit uw van veel voor want waren was wat werd wezen wie wil worden wordt zal ze zelf zich zij zijn zo zonder zou".split(" ")),r.Pipeline.registerFunction(r.nl.stopWordFilter,"stopWordFilter-nl")}}); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/lunr/min/lunr.no.min.js b/2.0.0/assets/javascripts/lunr/min/lunr.no.min.js new file mode 100644 index 00000000..92bc7e4e --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/min/lunr.no.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Norwegian` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.no=function(){this.pipeline.reset(),this.pipeline.add(e.no.trimmer,e.no.stopWordFilter,e.no.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.no.stemmer))},e.no.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.no.trimmer=e.trimmerSupport.generateTrimmer(e.no.wordCharacters),e.Pipeline.registerFunction(e.no.trimmer,"trimmer-no"),e.no.stemmer=function(){var r=e.stemmerSupport.Among,n=e.stemmerSupport.SnowballProgram,i=new function(){function e(){var e,r=w.cursor+3;if(a=w.limit,0<=r||r<=w.limit){for(s=r;;){if(e=w.cursor,w.in_grouping(d,97,248)){w.cursor=e;break}if(e>=w.limit)return;w.cursor=e+1}for(;!w.out_grouping(d,97,248);){if(w.cursor>=w.limit)return;w.cursor++}a=w.cursor,a=a&&(r=w.limit_backward,w.limit_backward=a,w.ket=w.cursor,e=w.find_among_b(m,29),w.limit_backward=r,e))switch(w.bra=w.cursor,e){case 1:w.slice_del();break;case 2:n=w.limit-w.cursor,w.in_grouping_b(c,98,122)?w.slice_del():(w.cursor=w.limit-n,w.eq_s_b(1,"k")&&w.out_grouping_b(d,97,248)&&w.slice_del());break;case 3:w.slice_from("er")}}function t(){var e,r=w.limit-w.cursor;w.cursor>=a&&(e=w.limit_backward,w.limit_backward=a,w.ket=w.cursor,w.find_among_b(u,2)?(w.bra=w.cursor,w.limit_backward=e,w.cursor=w.limit-r,w.cursor>w.limit_backward&&(w.cursor--,w.bra=w.cursor,w.slice_del())):w.limit_backward=e)}function o(){var e,r;w.cursor>=a&&(r=w.limit_backward,w.limit_backward=a,w.ket=w.cursor,e=w.find_among_b(l,11),e?(w.bra=w.cursor,w.limit_backward=r,1==e&&w.slice_del()):w.limit_backward=r)}var s,a,m=[new r("a",-1,1),new r("e",-1,1),new r("ede",1,1),new r("ande",1,1),new r("ende",1,1),new r("ane",1,1),new r("ene",1,1),new r("hetene",6,1),new r("erte",1,3),new r("en",-1,1),new r("heten",9,1),new r("ar",-1,1),new r("er",-1,1),new r("heter",12,1),new r("s",-1,2),new r("as",14,1),new r("es",14,1),new r("edes",16,1),new r("endes",16,1),new r("enes",16,1),new r("hetenes",19,1),new r("ens",14,1),new r("hetens",21,1),new r("ers",14,1),new r("ets",14,1),new r("et",-1,1),new r("het",25,1),new r("ert",-1,3),new r("ast",-1,1)],u=[new r("dt",-1,-1),new r("vt",-1,-1)],l=[new r("leg",-1,1),new r("eleg",0,1),new r("ig",-1,1),new r("eig",2,1),new r("lig",2,1),new r("elig",4,1),new r("els",-1,1),new r("lov",-1,1),new r("elov",7,1),new r("slov",7,1),new r("hetslov",9,1)],d=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,48,0,128],c=[119,125,149,1],w=new n;this.setCurrent=function(e){w.setCurrent(e)},this.getCurrent=function(){return w.getCurrent()},this.stem=function(){var r=w.cursor;return e(),w.limit_backward=r,w.cursor=w.limit,i(),w.cursor=w.limit,t(),w.cursor=w.limit,o(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}}(),e.Pipeline.registerFunction(e.no.stemmer,"stemmer-no"),e.no.stopWordFilter=e.generateStopWordFilter("alle at av bare begge ble blei bli blir blitt både båe da de deg dei deim deira deires dem den denne der dere deres det dette di din disse ditt du dykk dykkar då eg ein eit eitt eller elles en enn er et ett etter for fordi fra før ha hadde han hans har hennar henne hennes her hjå ho hoe honom hoss hossen hun hva hvem hver hvilke hvilken hvis hvor hvordan hvorfor i ikke ikkje ikkje ingen ingi inkje inn inni ja jeg kan kom korleis korso kun kunne kva kvar kvarhelst kven kvi kvifor man mange me med medan meg meget mellom men mi min mine mitt mot mykje ned no noe noen noka noko nokon nokor nokre nå når og også om opp oss over på samme seg selv si si sia sidan siden sin sine sitt sjøl skal skulle slik so som som somme somt så sånn til um upp ut uten var vart varte ved vere verte vi vil ville vore vors vort vår være være vært å".split(" ")),e.Pipeline.registerFunction(e.no.stopWordFilter,"stopWordFilter-no")}}); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/lunr/min/lunr.pt.min.js b/2.0.0/assets/javascripts/lunr/min/lunr.pt.min.js new file mode 100644 index 00000000..6c16996d --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/min/lunr.pt.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Portuguese` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.pt=function(){this.pipeline.reset(),this.pipeline.add(e.pt.trimmer,e.pt.stopWordFilter,e.pt.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.pt.stemmer))},e.pt.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.pt.trimmer=e.trimmerSupport.generateTrimmer(e.pt.wordCharacters),e.Pipeline.registerFunction(e.pt.trimmer,"trimmer-pt"),e.pt.stemmer=function(){var r=e.stemmerSupport.Among,s=e.stemmerSupport.SnowballProgram,n=new function(){function e(){for(var e;;){if(z.bra=z.cursor,e=z.find_among(k,3))switch(z.ket=z.cursor,e){case 1:z.slice_from("a~");continue;case 2:z.slice_from("o~");continue;case 3:if(z.cursor>=z.limit)break;z.cursor++;continue}break}}function n(){if(z.out_grouping(y,97,250)){for(;!z.in_grouping(y,97,250);){if(z.cursor>=z.limit)return!0;z.cursor++}return!1}return!0}function i(){if(z.in_grouping(y,97,250))for(;!z.out_grouping(y,97,250);){if(z.cursor>=z.limit)return!1;z.cursor++}return g=z.cursor,!0}function o(){var e,r,s=z.cursor;if(z.in_grouping(y,97,250))if(e=z.cursor,n()){if(z.cursor=e,i())return}else g=z.cursor;if(z.cursor=s,z.out_grouping(y,97,250)){if(r=z.cursor,n()){if(z.cursor=r,!z.in_grouping(y,97,250)||z.cursor>=z.limit)return;z.cursor++}g=z.cursor}}function t(){for(;!z.in_grouping(y,97,250);){if(z.cursor>=z.limit)return!1;z.cursor++}for(;!z.out_grouping(y,97,250);){if(z.cursor>=z.limit)return!1;z.cursor++}return!0}function a(){var e=z.cursor;g=z.limit,b=g,h=g,o(),z.cursor=e,t()&&(b=z.cursor,t()&&(h=z.cursor))}function u(){for(var e;;){if(z.bra=z.cursor,e=z.find_among(q,3))switch(z.ket=z.cursor,e){case 1:z.slice_from("ã");continue;case 2:z.slice_from("õ");continue;case 3:if(z.cursor>=z.limit)break;z.cursor++;continue}break}}function w(){return g<=z.cursor}function m(){return b<=z.cursor}function c(){return h<=z.cursor}function l(){var e;if(z.ket=z.cursor,!(e=z.find_among_b(F,45)))return!1;switch(z.bra=z.cursor,e){case 1:if(!c())return!1;z.slice_del();break;case 2:if(!c())return!1;z.slice_from("log");break;case 3:if(!c())return!1;z.slice_from("u");break;case 4:if(!c())return!1;z.slice_from("ente");break;case 5:if(!m())return!1;z.slice_del(),z.ket=z.cursor,e=z.find_among_b(j,4),e&&(z.bra=z.cursor,c()&&(z.slice_del(),1==e&&(z.ket=z.cursor,z.eq_s_b(2,"at")&&(z.bra=z.cursor,c()&&z.slice_del()))));break;case 6:if(!c())return!1;z.slice_del(),z.ket=z.cursor,e=z.find_among_b(C,3),e&&(z.bra=z.cursor,1==e&&c()&&z.slice_del());break;case 7:if(!c())return!1;z.slice_del(),z.ket=z.cursor,e=z.find_among_b(P,3),e&&(z.bra=z.cursor,1==e&&c()&&z.slice_del());break;case 8:if(!c())return!1;z.slice_del(),z.ket=z.cursor,z.eq_s_b(2,"at")&&(z.bra=z.cursor,c()&&z.slice_del());break;case 9:if(!w()||!z.eq_s_b(1,"e"))return!1;z.slice_from("ir")}return!0}function f(){var e,r;if(z.cursor>=g){if(r=z.limit_backward,z.limit_backward=g,z.ket=z.cursor,e=z.find_among_b(S,120))return z.bra=z.cursor,1==e&&z.slice_del(),z.limit_backward=r,!0;z.limit_backward=r}return!1}function d(){var e;z.ket=z.cursor,(e=z.find_among_b(W,7))&&(z.bra=z.cursor,1==e&&w()&&z.slice_del())}function v(e,r){if(z.eq_s_b(1,e)){z.bra=z.cursor;var s=z.limit-z.cursor;if(z.eq_s_b(1,r))return z.cursor=z.limit-s,w()&&z.slice_del(),!1}return!0}function p(){var e;if(z.ket=z.cursor,e=z.find_among_b(L,4))switch(z.bra=z.cursor,e){case 1:w()&&(z.slice_del(),z.ket=z.cursor,z.limit-z.cursor,v("u","g")&&v("i","c"));break;case 2:z.slice_from("c")}}function _(){if(!l()&&(z.cursor=z.limit,!f()))return z.cursor=z.limit,void d();z.cursor=z.limit,z.ket=z.cursor,z.eq_s_b(1,"i")&&(z.bra=z.cursor,z.eq_s_b(1,"c")&&(z.cursor=z.limit,w()&&z.slice_del()))}var h,b,g,k=[new r("",-1,3),new r("ã",0,1),new r("õ",0,2)],q=[new r("",-1,3),new r("a~",0,1),new r("o~",0,2)],j=[new r("ic",-1,-1),new r("ad",-1,-1),new r("os",-1,-1),new r("iv",-1,1)],C=[new r("ante",-1,1),new r("avel",-1,1),new r("ível",-1,1)],P=[new r("ic",-1,1),new r("abil",-1,1),new r("iv",-1,1)],F=[new r("ica",-1,1),new r("ância",-1,1),new r("ência",-1,4),new r("ira",-1,9),new r("adora",-1,1),new r("osa",-1,1),new r("ista",-1,1),new r("iva",-1,8),new r("eza",-1,1),new r("logía",-1,2),new r("idade",-1,7),new r("ante",-1,1),new r("mente",-1,6),new r("amente",12,5),new r("ável",-1,1),new r("ível",-1,1),new r("ución",-1,3),new r("ico",-1,1),new r("ismo",-1,1),new r("oso",-1,1),new r("amento",-1,1),new r("imento",-1,1),new r("ivo",-1,8),new r("aça~o",-1,1),new r("ador",-1,1),new r("icas",-1,1),new r("ências",-1,4),new r("iras",-1,9),new r("adoras",-1,1),new r("osas",-1,1),new r("istas",-1,1),new r("ivas",-1,8),new r("ezas",-1,1),new r("logías",-1,2),new r("idades",-1,7),new r("uciones",-1,3),new r("adores",-1,1),new r("antes",-1,1),new r("aço~es",-1,1),new r("icos",-1,1),new r("ismos",-1,1),new r("osos",-1,1),new r("amentos",-1,1),new r("imentos",-1,1),new r("ivos",-1,8)],S=[new r("ada",-1,1),new r("ida",-1,1),new r("ia",-1,1),new r("aria",2,1),new r("eria",2,1),new r("iria",2,1),new r("ara",-1,1),new r("era",-1,1),new r("ira",-1,1),new r("ava",-1,1),new r("asse",-1,1),new r("esse",-1,1),new r("isse",-1,1),new r("aste",-1,1),new r("este",-1,1),new r("iste",-1,1),new r("ei",-1,1),new r("arei",16,1),new r("erei",16,1),new r("irei",16,1),new r("am",-1,1),new r("iam",20,1),new r("ariam",21,1),new r("eriam",21,1),new r("iriam",21,1),new r("aram",20,1),new r("eram",20,1),new r("iram",20,1),new r("avam",20,1),new r("em",-1,1),new r("arem",29,1),new r("erem",29,1),new r("irem",29,1),new r("assem",29,1),new r("essem",29,1),new r("issem",29,1),new r("ado",-1,1),new r("ido",-1,1),new r("ando",-1,1),new r("endo",-1,1),new r("indo",-1,1),new r("ara~o",-1,1),new r("era~o",-1,1),new r("ira~o",-1,1),new r("ar",-1,1),new r("er",-1,1),new r("ir",-1,1),new r("as",-1,1),new r("adas",47,1),new r("idas",47,1),new r("ias",47,1),new r("arias",50,1),new r("erias",50,1),new r("irias",50,1),new r("aras",47,1),new r("eras",47,1),new r("iras",47,1),new r("avas",47,1),new r("es",-1,1),new r("ardes",58,1),new r("erdes",58,1),new r("irdes",58,1),new r("ares",58,1),new r("eres",58,1),new r("ires",58,1),new r("asses",58,1),new r("esses",58,1),new r("isses",58,1),new r("astes",58,1),new r("estes",58,1),new r("istes",58,1),new r("is",-1,1),new r("ais",71,1),new r("eis",71,1),new r("areis",73,1),new r("ereis",73,1),new r("ireis",73,1),new r("áreis",73,1),new r("éreis",73,1),new r("íreis",73,1),new r("ásseis",73,1),new r("ésseis",73,1),new r("ísseis",73,1),new r("áveis",73,1),new r("íeis",73,1),new r("aríeis",84,1),new r("eríeis",84,1),new r("iríeis",84,1),new r("ados",-1,1),new r("idos",-1,1),new r("amos",-1,1),new r("áramos",90,1),new r("éramos",90,1),new r("íramos",90,1),new r("ávamos",90,1),new r("íamos",90,1),new r("aríamos",95,1),new r("eríamos",95,1),new r("iríamos",95,1),new r("emos",-1,1),new r("aremos",99,1),new r("eremos",99,1),new r("iremos",99,1),new r("ássemos",99,1),new r("êssemos",99,1),new r("íssemos",99,1),new r("imos",-1,1),new r("armos",-1,1),new r("ermos",-1,1),new r("irmos",-1,1),new r("ámos",-1,1),new r("arás",-1,1),new r("erás",-1,1),new r("irás",-1,1),new r("eu",-1,1),new r("iu",-1,1),new r("ou",-1,1),new r("ará",-1,1),new r("erá",-1,1),new r("irá",-1,1)],W=[new r("a",-1,1),new r("i",-1,1),new r("o",-1,1),new r("os",-1,1),new r("á",-1,1),new r("í",-1,1),new r("ó",-1,1)],L=[new r("e",-1,1),new r("ç",-1,2),new r("é",-1,1),new r("ê",-1,1)],y=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,3,19,12,2],z=new s;this.setCurrent=function(e){z.setCurrent(e)},this.getCurrent=function(){return z.getCurrent()},this.stem=function(){var r=z.cursor;return e(),z.cursor=r,a(),z.limit_backward=r,z.cursor=z.limit,_(),z.cursor=z.limit,p(),z.cursor=z.limit_backward,u(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}}(),e.Pipeline.registerFunction(e.pt.stemmer,"stemmer-pt"),e.pt.stopWordFilter=e.generateStopWordFilter("a ao aos aquela aquelas aquele aqueles aquilo as até com como da das de dela delas dele deles depois do dos e ela elas ele eles em entre era eram essa essas esse esses esta estamos estas estava estavam este esteja estejam estejamos estes esteve estive estivemos estiver estivera estiveram estiverem estivermos estivesse estivessem estivéramos estivéssemos estou está estávamos estão eu foi fomos for fora foram forem formos fosse fossem fui fôramos fôssemos haja hajam hajamos havemos hei houve houvemos houver houvera houveram houverei houverem houveremos houveria houveriam houvermos houverá houverão houveríamos houvesse houvessem houvéramos houvéssemos há hão isso isto já lhe lhes mais mas me mesmo meu meus minha minhas muito na nas nem no nos nossa nossas nosso nossos num numa não nós o os ou para pela pelas pelo pelos por qual quando que quem se seja sejam sejamos sem serei seremos seria seriam será serão seríamos seu seus somos sou sua suas são só também te tem temos tenha tenham tenhamos tenho terei teremos teria teriam terá terão teríamos teu teus teve tinha tinham tive tivemos tiver tivera tiveram tiverem tivermos tivesse tivessem tivéramos tivéssemos tu tua tuas tém tínhamos um uma você vocês vos à às éramos".split(" ")),e.Pipeline.registerFunction(e.pt.stopWordFilter,"stopWordFilter-pt")}}); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/lunr/min/lunr.ro.min.js b/2.0.0/assets/javascripts/lunr/min/lunr.ro.min.js new file mode 100644 index 00000000..72771401 --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/min/lunr.ro.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Romanian` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,i){"function"==typeof define&&define.amd?define(i):"object"==typeof exports?module.exports=i():i()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.ro=function(){this.pipeline.reset(),this.pipeline.add(e.ro.trimmer,e.ro.stopWordFilter,e.ro.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.ro.stemmer))},e.ro.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.ro.trimmer=e.trimmerSupport.generateTrimmer(e.ro.wordCharacters),e.Pipeline.registerFunction(e.ro.trimmer,"trimmer-ro"),e.ro.stemmer=function(){var i=e.stemmerSupport.Among,r=e.stemmerSupport.SnowballProgram,n=new function(){function e(e,i){L.eq_s(1,e)&&(L.ket=L.cursor,L.in_grouping(W,97,259)&&L.slice_from(i))}function n(){for(var i,r;;){if(i=L.cursor,L.in_grouping(W,97,259)&&(r=L.cursor,L.bra=r,e("u","U"),L.cursor=r,e("i","I")),L.cursor=i,L.cursor>=L.limit)break;L.cursor++}}function t(){if(L.out_grouping(W,97,259)){for(;!L.in_grouping(W,97,259);){if(L.cursor>=L.limit)return!0;L.cursor++}return!1}return!0}function a(){if(L.in_grouping(W,97,259))for(;!L.out_grouping(W,97,259);){if(L.cursor>=L.limit)return!0;L.cursor++}return!1}function o(){var e,i,r=L.cursor;if(L.in_grouping(W,97,259)){if(e=L.cursor,!t())return void(h=L.cursor);if(L.cursor=e,!a())return void(h=L.cursor)}L.cursor=r,L.out_grouping(W,97,259)&&(i=L.cursor,t()&&(L.cursor=i,L.in_grouping(W,97,259)&&L.cursor=L.limit)return!1;L.cursor++}for(;!L.out_grouping(W,97,259);){if(L.cursor>=L.limit)return!1;L.cursor++}return!0}function c(){var e=L.cursor;h=L.limit,k=h,g=h,o(),L.cursor=e,u()&&(k=L.cursor,u()&&(g=L.cursor))}function s(){for(var e;;){if(L.bra=L.cursor,e=L.find_among(z,3))switch(L.ket=L.cursor,e){case 1:L.slice_from("i");continue;case 2:L.slice_from("u");continue;case 3:if(L.cursor>=L.limit)break;L.cursor++;continue}break}}function w(){return h<=L.cursor}function m(){return k<=L.cursor}function l(){return g<=L.cursor}function f(){var e,i;if(L.ket=L.cursor,(e=L.find_among_b(C,16))&&(L.bra=L.cursor,m()))switch(e){case 1:L.slice_del();break;case 2:L.slice_from("a");break;case 3:L.slice_from("e");break;case 4:L.slice_from("i");break;case 5:i=L.limit-L.cursor,L.eq_s_b(2,"ab")||(L.cursor=L.limit-i,L.slice_from("i"));break;case 6:L.slice_from("at");break;case 7:L.slice_from("aţi")}}function p(){var e,i=L.limit-L.cursor;if(L.ket=L.cursor,(e=L.find_among_b(P,46))&&(L.bra=L.cursor,m())){switch(e){case 1:L.slice_from("abil");break;case 2:L.slice_from("ibil");break;case 3:L.slice_from("iv");break;case 4:L.slice_from("ic");break;case 5:L.slice_from("at");break;case 6:L.slice_from("it")}return _=!0,L.cursor=L.limit-i,!0}return!1}function d(){var e,i;for(_=!1;;)if(i=L.limit-L.cursor,!p()){L.cursor=L.limit-i;break}if(L.ket=L.cursor,(e=L.find_among_b(F,62))&&(L.bra=L.cursor,l())){switch(e){case 1:L.slice_del();break;case 2:L.eq_s_b(1,"ţ")&&(L.bra=L.cursor,L.slice_from("t"));break;case 3:L.slice_from("ist")}_=!0}}function b(){var e,i,r;if(L.cursor>=h){if(i=L.limit_backward,L.limit_backward=h,L.ket=L.cursor,e=L.find_among_b(q,94))switch(L.bra=L.cursor,e){case 1:if(r=L.limit-L.cursor,!L.out_grouping_b(W,97,259)&&(L.cursor=L.limit-r,!L.eq_s_b(1,"u")))break;case 2:L.slice_del()}L.limit_backward=i}}function v(){var e;L.ket=L.cursor,(e=L.find_among_b(S,5))&&(L.bra=L.cursor,w()&&1==e&&L.slice_del())}var _,g,k,h,z=[new i("",-1,3),new i("I",0,1),new i("U",0,2)],C=[new i("ea",-1,3),new i("aţia",-1,7),new i("aua",-1,2),new i("iua",-1,4),new i("aţie",-1,7),new i("ele",-1,3),new i("ile",-1,5),new i("iile",6,4),new i("iei",-1,4),new i("atei",-1,6),new i("ii",-1,4),new i("ului",-1,1),new i("ul",-1,1),new i("elor",-1,3),new i("ilor",-1,4),new i("iilor",14,4)],P=[new i("icala",-1,4),new i("iciva",-1,4),new i("ativa",-1,5),new i("itiva",-1,6),new i("icale",-1,4),new i("aţiune",-1,5),new i("iţiune",-1,6),new i("atoare",-1,5),new i("itoare",-1,6),new i("ătoare",-1,5),new i("icitate",-1,4),new i("abilitate",-1,1),new i("ibilitate",-1,2),new i("ivitate",-1,3),new i("icive",-1,4),new i("ative",-1,5),new i("itive",-1,6),new i("icali",-1,4),new i("atori",-1,5),new i("icatori",18,4),new i("itori",-1,6),new i("ători",-1,5),new i("icitati",-1,4),new i("abilitati",-1,1),new i("ivitati",-1,3),new i("icivi",-1,4),new i("ativi",-1,5),new i("itivi",-1,6),new i("icităi",-1,4),new i("abilităi",-1,1),new i("ivităi",-1,3),new i("icităţi",-1,4),new i("abilităţi",-1,1),new i("ivităţi",-1,3),new i("ical",-1,4),new i("ator",-1,5),new i("icator",35,4),new i("itor",-1,6),new i("ător",-1,5),new i("iciv",-1,4),new i("ativ",-1,5),new i("itiv",-1,6),new i("icală",-1,4),new i("icivă",-1,4),new i("ativă",-1,5),new i("itivă",-1,6)],F=[new i("ica",-1,1),new i("abila",-1,1),new i("ibila",-1,1),new i("oasa",-1,1),new i("ata",-1,1),new i("ita",-1,1),new i("anta",-1,1),new i("ista",-1,3),new i("uta",-1,1),new i("iva",-1,1),new i("ic",-1,1),new i("ice",-1,1),new i("abile",-1,1),new i("ibile",-1,1),new i("isme",-1,3),new i("iune",-1,2),new i("oase",-1,1),new i("ate",-1,1),new i("itate",17,1),new i("ite",-1,1),new i("ante",-1,1),new i("iste",-1,3),new i("ute",-1,1),new i("ive",-1,1),new i("ici",-1,1),new i("abili",-1,1),new i("ibili",-1,1),new i("iuni",-1,2),new i("atori",-1,1),new i("osi",-1,1),new i("ati",-1,1),new i("itati",30,1),new i("iti",-1,1),new i("anti",-1,1),new i("isti",-1,3),new i("uti",-1,1),new i("işti",-1,3),new i("ivi",-1,1),new i("ităi",-1,1),new i("oşi",-1,1),new i("ităţi",-1,1),new i("abil",-1,1),new i("ibil",-1,1),new i("ism",-1,3),new i("ator",-1,1),new i("os",-1,1),new i("at",-1,1),new i("it",-1,1),new i("ant",-1,1),new i("ist",-1,3),new i("ut",-1,1),new i("iv",-1,1),new i("ică",-1,1),new i("abilă",-1,1),new i("ibilă",-1,1),new i("oasă",-1,1),new i("ată",-1,1),new i("ită",-1,1),new i("antă",-1,1),new i("istă",-1,3),new i("ută",-1,1),new i("ivă",-1,1)],q=[new i("ea",-1,1),new i("ia",-1,1),new i("esc",-1,1),new i("ăsc",-1,1),new i("ind",-1,1),new i("ând",-1,1),new i("are",-1,1),new i("ere",-1,1),new i("ire",-1,1),new i("âre",-1,1),new i("se",-1,2),new i("ase",10,1),new i("sese",10,2),new i("ise",10,1),new i("use",10,1),new i("âse",10,1),new i("eşte",-1,1),new i("ăşte",-1,1),new i("eze",-1,1),new i("ai",-1,1),new i("eai",19,1),new i("iai",19,1),new i("sei",-1,2),new i("eşti",-1,1),new i("ăşti",-1,1),new i("ui",-1,1),new i("ezi",-1,1),new i("âi",-1,1),new i("aşi",-1,1),new i("seşi",-1,2),new i("aseşi",29,1),new i("seseşi",29,2),new i("iseşi",29,1),new i("useşi",29,1),new i("âseşi",29,1),new i("işi",-1,1),new i("uşi",-1,1),new i("âşi",-1,1),new i("aţi",-1,2),new i("eaţi",38,1),new i("iaţi",38,1),new i("eţi",-1,2),new i("iţi",-1,2),new i("âţi",-1,2),new i("arăţi",-1,1),new i("serăţi",-1,2),new i("aserăţi",45,1),new i("seserăţi",45,2),new i("iserăţi",45,1),new i("userăţi",45,1),new i("âserăţi",45,1),new i("irăţi",-1,1),new i("urăţi",-1,1),new i("ârăţi",-1,1),new i("am",-1,1),new i("eam",54,1),new i("iam",54,1),new i("em",-1,2),new i("asem",57,1),new i("sesem",57,2),new i("isem",57,1),new i("usem",57,1),new i("âsem",57,1),new i("im",-1,2),new i("âm",-1,2),new i("ăm",-1,2),new i("arăm",65,1),new i("serăm",65,2),new i("aserăm",67,1),new i("seserăm",67,2),new i("iserăm",67,1),new i("userăm",67,1),new i("âserăm",67,1),new i("irăm",65,1),new i("urăm",65,1),new i("ârăm",65,1),new i("au",-1,1),new i("eau",76,1),new i("iau",76,1),new i("indu",-1,1),new i("ându",-1,1),new i("ez",-1,1),new i("ească",-1,1),new i("ară",-1,1),new i("seră",-1,2),new i("aseră",84,1),new i("seseră",84,2),new i("iseră",84,1),new i("useră",84,1),new i("âseră",84,1),new i("iră",-1,1),new i("ură",-1,1),new i("âră",-1,1),new i("ează",-1,1)],S=[new i("a",-1,1),new i("e",-1,1),new i("ie",1,1),new i("i",-1,1),new i("ă",-1,1)],W=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,2,32,0,0,4],L=new r;this.setCurrent=function(e){L.setCurrent(e)},this.getCurrent=function(){return L.getCurrent()},this.stem=function(){var e=L.cursor;return n(),L.cursor=e,c(),L.limit_backward=e,L.cursor=L.limit,f(),L.cursor=L.limit,d(),L.cursor=L.limit,_||(L.cursor=L.limit,b(),L.cursor=L.limit),v(),L.cursor=L.limit_backward,s(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}}(),e.Pipeline.registerFunction(e.ro.stemmer,"stemmer-ro"),e.ro.stopWordFilter=e.generateStopWordFilter("acea aceasta această aceea acei aceia acel acela acele acelea acest acesta aceste acestea aceşti aceştia acolo acord acum ai aia aibă aici al ale alea altceva altcineva am ar are asemenea asta astea astăzi asupra au avea avem aveţi azi aş aşadar aţi bine bucur bună ca care caut ce cel ceva chiar cinci cine cineva contra cu cum cumva curând curînd când cât câte câtva câţi cînd cît cîte cîtva cîţi că căci cărei căror cărui către da dacă dar datorită dată dau de deci deja deoarece departe deşi din dinaintea dintr- dintre doi doilea două drept după dă ea ei el ele eram este eu eşti face fata fi fie fiecare fii fim fiu fiţi frumos fără graţie halbă iar ieri la le li lor lui lângă lîngă mai mea mei mele mereu meu mi mie mine mult multă mulţi mulţumesc mâine mîine mă ne nevoie nici nicăieri nimeni nimeri nimic nişte noastre noastră noi noroc nostru nouă noştri nu opt ori oricare orice oricine oricum oricând oricât oricînd oricît oriunde patra patru patrulea pe pentru peste pic poate pot prea prima primul prin puţin puţina puţină până pînă rog sa sale sau se spate spre sub sunt suntem sunteţi sută sînt sîntem sînteţi să săi său ta tale te timp tine toate toată tot totuşi toţi trei treia treilea tu tăi tău un una unde undeva unei uneia unele uneori unii unor unora unu unui unuia unul vi voastre voastră voi vostru vouă voştri vreme vreo vreun vă zece zero zi zice îi îl îmi împotriva în înainte înaintea încotro încât încît între întrucât întrucît îţi ăla ălea ăsta ăstea ăştia şapte şase şi ştiu ţi ţie".split(" ")),e.Pipeline.registerFunction(e.ro.stopWordFilter,"stopWordFilter-ro")}}); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/lunr/min/lunr.ru.min.js b/2.0.0/assets/javascripts/lunr/min/lunr.ru.min.js new file mode 100644 index 00000000..186cc485 --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/min/lunr.ru.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Russian` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,n){"function"==typeof define&&define.amd?define(n):"object"==typeof exports?module.exports=n():n()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.ru=function(){this.pipeline.reset(),this.pipeline.add(e.ru.trimmer,e.ru.stopWordFilter,e.ru.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.ru.stemmer))},e.ru.wordCharacters="Ѐ-҄҇-ԯᴫᵸⷠ-ⷿꙀ-ꚟ︮︯",e.ru.trimmer=e.trimmerSupport.generateTrimmer(e.ru.wordCharacters),e.Pipeline.registerFunction(e.ru.trimmer,"trimmer-ru"),e.ru.stemmer=function(){var n=e.stemmerSupport.Among,r=e.stemmerSupport.SnowballProgram,t=new function(){function e(){for(;!W.in_grouping(S,1072,1103);){if(W.cursor>=W.limit)return!1;W.cursor++}return!0}function t(){for(;!W.out_grouping(S,1072,1103);){if(W.cursor>=W.limit)return!1;W.cursor++}return!0}function w(){b=W.limit,_=b,e()&&(b=W.cursor,t()&&e()&&t()&&(_=W.cursor))}function i(){return _<=W.cursor}function u(e,n){var r,t;if(W.ket=W.cursor,r=W.find_among_b(e,n)){switch(W.bra=W.cursor,r){case 1:if(t=W.limit-W.cursor,!W.eq_s_b(1,"а")&&(W.cursor=W.limit-t,!W.eq_s_b(1,"я")))return!1;case 2:W.slice_del()}return!0}return!1}function o(){return u(h,9)}function s(e,n){var r;return W.ket=W.cursor,!!(r=W.find_among_b(e,n))&&(W.bra=W.cursor,1==r&&W.slice_del(),!0)}function c(){return s(g,26)}function m(){return!!c()&&(u(C,8),!0)}function f(){return s(k,2)}function l(){return u(P,46)}function a(){s(v,36)}function p(){var e;W.ket=W.cursor,(e=W.find_among_b(F,2))&&(W.bra=W.cursor,i()&&1==e&&W.slice_del())}function d(){var e;if(W.ket=W.cursor,e=W.find_among_b(q,4))switch(W.bra=W.cursor,e){case 1:if(W.slice_del(),W.ket=W.cursor,!W.eq_s_b(1,"н"))break;W.bra=W.cursor;case 2:if(!W.eq_s_b(1,"н"))break;case 3:W.slice_del()}}var _,b,h=[new n("в",-1,1),new n("ив",0,2),new n("ыв",0,2),new n("вши",-1,1),new n("ивши",3,2),new n("ывши",3,2),new n("вшись",-1,1),new n("ившись",6,2),new n("ывшись",6,2)],g=[new n("ее",-1,1),new n("ие",-1,1),new n("ое",-1,1),new n("ые",-1,1),new n("ими",-1,1),new n("ыми",-1,1),new n("ей",-1,1),new n("ий",-1,1),new n("ой",-1,1),new n("ый",-1,1),new n("ем",-1,1),new n("им",-1,1),new n("ом",-1,1),new n("ым",-1,1),new n("его",-1,1),new n("ого",-1,1),new n("ему",-1,1),new n("ому",-1,1),new n("их",-1,1),new n("ых",-1,1),new n("ею",-1,1),new n("ою",-1,1),new n("ую",-1,1),new n("юю",-1,1),new n("ая",-1,1),new n("яя",-1,1)],C=[new n("ем",-1,1),new n("нн",-1,1),new n("вш",-1,1),new n("ивш",2,2),new n("ывш",2,2),new n("щ",-1,1),new n("ющ",5,1),new n("ующ",6,2)],k=[new n("сь",-1,1),new n("ся",-1,1)],P=[new n("ла",-1,1),new n("ила",0,2),new n("ыла",0,2),new n("на",-1,1),new n("ена",3,2),new n("ете",-1,1),new n("ите",-1,2),new n("йте",-1,1),new n("ейте",7,2),new n("уйте",7,2),new n("ли",-1,1),new n("или",10,2),new n("ыли",10,2),new n("й",-1,1),new n("ей",13,2),new n("уй",13,2),new n("л",-1,1),new n("ил",16,2),new n("ыл",16,2),new n("ем",-1,1),new n("им",-1,2),new n("ым",-1,2),new n("н",-1,1),new n("ен",22,2),new n("ло",-1,1),new n("ило",24,2),new n("ыло",24,2),new n("но",-1,1),new n("ено",27,2),new n("нно",27,1),new n("ет",-1,1),new n("ует",30,2),new n("ит",-1,2),new n("ыт",-1,2),new n("ют",-1,1),new n("уют",34,2),new n("ят",-1,2),new n("ны",-1,1),new n("ены",37,2),new n("ть",-1,1),new n("ить",39,2),new n("ыть",39,2),new n("ешь",-1,1),new n("ишь",-1,2),new n("ю",-1,2),new n("ую",44,2)],v=[new n("а",-1,1),new n("ев",-1,1),new n("ов",-1,1),new n("е",-1,1),new n("ие",3,1),new n("ье",3,1),new n("и",-1,1),new n("еи",6,1),new n("ии",6,1),new n("ами",6,1),new n("ями",6,1),new n("иями",10,1),new n("й",-1,1),new n("ей",12,1),new n("ией",13,1),new n("ий",12,1),new n("ой",12,1),new n("ам",-1,1),new n("ем",-1,1),new n("ием",18,1),new n("ом",-1,1),new n("ям",-1,1),new n("иям",21,1),new n("о",-1,1),new n("у",-1,1),new n("ах",-1,1),new n("ях",-1,1),new n("иях",26,1),new n("ы",-1,1),new n("ь",-1,1),new n("ю",-1,1),new n("ию",30,1),new n("ью",30,1),new n("я",-1,1),new n("ия",33,1),new n("ья",33,1)],F=[new n("ост",-1,1),new n("ость",-1,1)],q=[new n("ейше",-1,1),new n("н",-1,2),new n("ейш",-1,1),new n("ь",-1,3)],S=[33,65,8,232],W=new r;this.setCurrent=function(e){W.setCurrent(e)},this.getCurrent=function(){return W.getCurrent()},this.stem=function(){return w(),W.cursor=W.limit,!(W.cursor=i&&(e-=i,t[e>>3]&1<<(7&e)))return this.cursor++,!0}return!1},in_grouping_b:function(t,i,s){if(this.cursor>this.limit_backward){var e=r.charCodeAt(this.cursor-1);if(e<=s&&e>=i&&(e-=i,t[e>>3]&1<<(7&e)))return this.cursor--,!0}return!1},out_grouping:function(t,i,s){if(this.cursors||e>3]&1<<(7&e)))return this.cursor++,!0}return!1},out_grouping_b:function(t,i,s){if(this.cursor>this.limit_backward){var e=r.charCodeAt(this.cursor-1);if(e>s||e>3]&1<<(7&e)))return this.cursor--,!0}return!1},eq_s:function(t,i){if(this.limit-this.cursor>1),f=0,l=o0||e==s||c)break;c=!0}}for(;;){var _=t[s];if(o>=_.s_size){if(this.cursor=n+_.s_size,!_.method)return _.result;var b=_.method();if(this.cursor=n+_.s_size,b)return _.result}if((s=_.substring_i)<0)return 0}},find_among_b:function(t,i){for(var s=0,e=i,n=this.cursor,u=this.limit_backward,o=0,h=0,c=!1;;){for(var a=s+(e-s>>1),f=0,l=o=0;m--){if(n-l==u){f=-1;break}if(f=r.charCodeAt(n-1-l)-_.s[m])break;l++}if(f<0?(e=a,h=l):(s=a,o=l),e-s<=1){if(s>0||e==s||c)break;c=!0}}for(;;){var _=t[s];if(o>=_.s_size){if(this.cursor=n-_.s_size,!_.method)return _.result;var b=_.method();if(this.cursor=n-_.s_size,b)return _.result}if((s=_.substring_i)<0)return 0}},replace_s:function(t,i,s){var e=s.length-(i-t),n=r.substring(0,t),u=r.substring(i);return r=n+s+u,this.limit+=e,this.cursor>=i?this.cursor+=e:this.cursor>t&&(this.cursor=t),e},slice_check:function(){if(this.bra<0||this.bra>this.ket||this.ket>this.limit||this.limit>r.length)throw"faulty slice operation"},slice_from:function(r){this.slice_check(),this.replace_s(this.bra,this.ket,r)},slice_del:function(){this.slice_from("")},insert:function(r,t,i){var s=this.replace_s(r,t,i);r<=this.bra&&(this.bra+=s),r<=this.ket&&(this.ket+=s)},slice_to:function(){return this.slice_check(),r.substring(this.bra,this.ket)},eq_v_b:function(r){return this.eq_s_b(r.length,r)}}}},r.trimmerSupport={generateTrimmer:function(r){var t=new RegExp("^[^"+r+"]+"),i=new RegExp("[^"+r+"]+$");return function(r){return"function"==typeof r.update?r.update(function(r){return r.replace(t,"").replace(i,"")}):r.replace(t,"").replace(i,"")}}}}}); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/lunr/min/lunr.sv.min.js b/2.0.0/assets/javascripts/lunr/min/lunr.sv.min.js new file mode 100644 index 00000000..3e5eb640 --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/min/lunr.sv.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Swedish` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.sv=function(){this.pipeline.reset(),this.pipeline.add(e.sv.trimmer,e.sv.stopWordFilter,e.sv.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.sv.stemmer))},e.sv.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.sv.trimmer=e.trimmerSupport.generateTrimmer(e.sv.wordCharacters),e.Pipeline.registerFunction(e.sv.trimmer,"trimmer-sv"),e.sv.stemmer=function(){var r=e.stemmerSupport.Among,n=e.stemmerSupport.SnowballProgram,t=new function(){function e(){var e,r=w.cursor+3;if(o=w.limit,0<=r||r<=w.limit){for(a=r;;){if(e=w.cursor,w.in_grouping(l,97,246)){w.cursor=e;break}if(w.cursor=e,w.cursor>=w.limit)return;w.cursor++}for(;!w.out_grouping(l,97,246);){if(w.cursor>=w.limit)return;w.cursor++}o=w.cursor,o=o&&(w.limit_backward=o,w.cursor=w.limit,w.ket=w.cursor,e=w.find_among_b(u,37),w.limit_backward=r,e))switch(w.bra=w.cursor,e){case 1:w.slice_del();break;case 2:w.in_grouping_b(d,98,121)&&w.slice_del()}}function i(){var e=w.limit_backward;w.cursor>=o&&(w.limit_backward=o,w.cursor=w.limit,w.find_among_b(c,7)&&(w.cursor=w.limit,w.ket=w.cursor,w.cursor>w.limit_backward&&(w.bra=--w.cursor,w.slice_del())),w.limit_backward=e)}function s(){var e,r;if(w.cursor>=o){if(r=w.limit_backward,w.limit_backward=o,w.cursor=w.limit,w.ket=w.cursor,e=w.find_among_b(m,5))switch(w.bra=w.cursor,e){case 1:w.slice_del();break;case 2:w.slice_from("lös");break;case 3:w.slice_from("full")}w.limit_backward=r}}var a,o,u=[new r("a",-1,1),new r("arna",0,1),new r("erna",0,1),new r("heterna",2,1),new r("orna",0,1),new r("ad",-1,1),new r("e",-1,1),new r("ade",6,1),new r("ande",6,1),new r("arne",6,1),new r("are",6,1),new r("aste",6,1),new r("en",-1,1),new r("anden",12,1),new r("aren",12,1),new r("heten",12,1),new r("ern",-1,1),new r("ar",-1,1),new r("er",-1,1),new r("heter",18,1),new r("or",-1,1),new r("s",-1,2),new r("as",21,1),new r("arnas",22,1),new r("ernas",22,1),new r("ornas",22,1),new r("es",21,1),new r("ades",26,1),new r("andes",26,1),new r("ens",21,1),new r("arens",29,1),new r("hetens",29,1),new r("erns",21,1),new r("at",-1,1),new r("andet",-1,1),new r("het",-1,1),new r("ast",-1,1)],c=[new r("dd",-1,-1),new r("gd",-1,-1),new r("nn",-1,-1),new r("dt",-1,-1),new r("gt",-1,-1),new r("kt",-1,-1),new r("tt",-1,-1)],m=[new r("ig",-1,1),new r("lig",0,1),new r("els",-1,1),new r("fullt",-1,3),new r("löst",-1,2)],l=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,24,0,32],d=[119,127,149],w=new n;this.setCurrent=function(e){w.setCurrent(e)},this.getCurrent=function(){return w.getCurrent()},this.stem=function(){var r=w.cursor;return e(),w.limit_backward=r,w.cursor=w.limit,t(),w.cursor=w.limit,i(),w.cursor=w.limit,s(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return t.setCurrent(e),t.stem(),t.getCurrent()}):(t.setCurrent(e),t.stem(),t.getCurrent())}}(),e.Pipeline.registerFunction(e.sv.stemmer,"stemmer-sv"),e.sv.stopWordFilter=e.generateStopWordFilter("alla allt att av blev bli blir blivit de dem den denna deras dess dessa det detta dig din dina ditt du där då efter ej eller en er era ert ett från för ha hade han hans har henne hennes hon honom hur här i icke ingen inom inte jag ju kan kunde man med mellan men mig min mina mitt mot mycket ni nu när någon något några och om oss på samma sedan sig sin sina sitta själv skulle som så sådan sådana sådant till under upp ut utan vad var vara varför varit varje vars vart vem vi vid vilka vilkas vilken vilket vår våra vårt än är åt över".split(" ")),e.Pipeline.registerFunction(e.sv.stopWordFilter,"stopWordFilter-sv")}}); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/lunr/min/lunr.ta.min.js b/2.0.0/assets/javascripts/lunr/min/lunr.ta.min.js new file mode 100644 index 00000000..a644bed2 --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/min/lunr.ta.min.js @@ -0,0 +1 @@ +!function(e,t){"function"==typeof define&&define.amd?define(t):"object"==typeof exports?module.exports=t():t()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.ta=function(){this.pipeline.reset(),this.pipeline.add(e.ta.trimmer,e.ta.stopWordFilter,e.ta.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.ta.stemmer))},e.ta.wordCharacters="஀-உஊ-ஏஐ-ஙச-ட஠-னப-யர-ஹ஺-ிீ-௉ொ-௏ௐ-௙௚-௟௠-௩௪-௯௰-௹௺-௿a-zA-Za-zA-Z0-90-9",e.ta.trimmer=e.trimmerSupport.generateTrimmer(e.ta.wordCharacters),e.Pipeline.registerFunction(e.ta.trimmer,"trimmer-ta"),e.ta.stopWordFilter=e.generateStopWordFilter("அங்கு அங்கே அது அதை அந்த அவர் அவர்கள் அவள் அவன் அவை ஆக ஆகவே ஆகையால் ஆதலால் ஆதலினால் ஆனாலும் ஆனால் இங்கு இங்கே இது இதை இந்த இப்படி இவர் இவர்கள் இவள் இவன் இவை இவ்வளவு உனக்கு உனது உன் உன்னால் எங்கு எங்கே எது எதை எந்த எப்படி எவர் எவர்கள் எவள் எவன் எவை எவ்வளவு எனக்கு எனது எனவே என் என்ன என்னால் ஏது ஏன் தனது தன்னால் தானே தான் நாங்கள் நாம் நான் நீ நீங்கள்".split(" ")),e.ta.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}();var t=e.wordcut;t.init(),e.ta.tokenizer=function(r){if(!arguments.length||null==r||void 0==r)return[];if(Array.isArray(r))return r.map(function(t){return isLunr2?new e.Token(t.toLowerCase()):t.toLowerCase()});var i=r.toString().toLowerCase().replace(/^\s+/,"");return t.cut(i).split("|")},e.Pipeline.registerFunction(e.ta.stemmer,"stemmer-ta"),e.Pipeline.registerFunction(e.ta.stopWordFilter,"stopWordFilter-ta")}}); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/lunr/min/lunr.th.min.js b/2.0.0/assets/javascripts/lunr/min/lunr.th.min.js new file mode 100644 index 00000000..dee3aac6 --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/min/lunr.th.min.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r="2"==e.version[0];e.th=function(){this.pipeline.reset(),this.pipeline.add(e.th.trimmer),r?this.tokenizer=e.th.tokenizer:(e.tokenizer&&(e.tokenizer=e.th.tokenizer),this.tokenizerFn&&(this.tokenizerFn=e.th.tokenizer))},e.th.wordCharacters="[฀-๿]",e.th.trimmer=e.trimmerSupport.generateTrimmer(e.th.wordCharacters),e.Pipeline.registerFunction(e.th.trimmer,"trimmer-th");var t=e.wordcut;t.init(),e.th.tokenizer=function(i){if(!arguments.length||null==i||void 0==i)return[];if(Array.isArray(i))return i.map(function(t){return r?new e.Token(t):t});var n=i.toString().replace(/^\s+/,"");return t.cut(n).split("|")}}}); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/lunr/min/lunr.tr.min.js b/2.0.0/assets/javascripts/lunr/min/lunr.tr.min.js new file mode 100644 index 00000000..563f6ec1 --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/min/lunr.tr.min.js @@ -0,0 +1,18 @@ +/*! + * Lunr languages, `Turkish` language + * https://github.com/MihaiValentin/lunr-languages + * + * Copyright 2014, Mihai Valentin + * http://www.mozilla.org/MPL/ + */ +/*! + * based on + * Snowball JavaScript Library v0.3 + * http://code.google.com/p/urim/ + * http://snowball.tartarus.org/ + * + * Copyright 2010, Oleg Mazko + * http://www.mozilla.org/MPL/ + */ + +!function(r,i){"function"==typeof define&&define.amd?define(i):"object"==typeof exports?module.exports=i():i()(r.lunr)}(this,function(){return function(r){if(void 0===r)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===r.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");r.tr=function(){this.pipeline.reset(),this.pipeline.add(r.tr.trimmer,r.tr.stopWordFilter,r.tr.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(r.tr.stemmer))},r.tr.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",r.tr.trimmer=r.trimmerSupport.generateTrimmer(r.tr.wordCharacters),r.Pipeline.registerFunction(r.tr.trimmer,"trimmer-tr"),r.tr.stemmer=function(){var i=r.stemmerSupport.Among,e=r.stemmerSupport.SnowballProgram,n=new function(){function r(r,i,e){for(;;){var n=Dr.limit-Dr.cursor;if(Dr.in_grouping_b(r,i,e)){Dr.cursor=Dr.limit-n;break}if(Dr.cursor=Dr.limit-n,Dr.cursor<=Dr.limit_backward)return!1;Dr.cursor--}return!0}function n(){var i,e;i=Dr.limit-Dr.cursor,r(Wr,97,305);for(var n=0;nDr.limit_backward&&(Dr.cursor--,e=Dr.limit-Dr.cursor,i()))?(Dr.cursor=Dr.limit-e,!0):(Dr.cursor=Dr.limit-n,r()?(Dr.cursor=Dr.limit-n,!1):(Dr.cursor=Dr.limit-n,!(Dr.cursor<=Dr.limit_backward)&&(Dr.cursor--,!!i()&&(Dr.cursor=Dr.limit-n,!0))))}function u(r){return t(r,function(){return Dr.in_grouping_b(Wr,97,305)})}function o(){return u(function(){return Dr.eq_s_b(1,"n")})}function s(){return u(function(){return Dr.eq_s_b(1,"s")})}function c(){return u(function(){return Dr.eq_s_b(1,"y")})}function l(){return t(function(){return Dr.in_grouping_b(Lr,105,305)},function(){return Dr.out_grouping_b(Wr,97,305)})}function a(){return Dr.find_among_b(ur,10)&&l()}function m(){return n()&&Dr.in_grouping_b(Lr,105,305)&&s()}function d(){return Dr.find_among_b(or,2)}function f(){return n()&&Dr.in_grouping_b(Lr,105,305)&&c()}function b(){return n()&&Dr.find_among_b(sr,4)}function w(){return n()&&Dr.find_among_b(cr,4)&&o()}function _(){return n()&&Dr.find_among_b(lr,2)&&c()}function k(){return n()&&Dr.find_among_b(ar,2)}function p(){return n()&&Dr.find_among_b(mr,4)}function g(){return n()&&Dr.find_among_b(dr,2)}function y(){return n()&&Dr.find_among_b(fr,4)}function z(){return n()&&Dr.find_among_b(br,2)}function v(){return n()&&Dr.find_among_b(wr,2)&&c()}function h(){return Dr.eq_s_b(2,"ki")}function q(){return n()&&Dr.find_among_b(_r,2)&&o()}function C(){return n()&&Dr.find_among_b(kr,4)&&c()}function P(){return n()&&Dr.find_among_b(pr,4)}function F(){return n()&&Dr.find_among_b(gr,4)&&c()}function S(){return Dr.find_among_b(yr,4)}function W(){return n()&&Dr.find_among_b(zr,2)}function L(){return n()&&Dr.find_among_b(vr,4)}function x(){return n()&&Dr.find_among_b(hr,8)}function A(){return Dr.find_among_b(qr,2)}function E(){return n()&&Dr.find_among_b(Cr,32)&&c()}function j(){return Dr.find_among_b(Pr,8)&&c()}function T(){return n()&&Dr.find_among_b(Fr,4)&&c()}function Z(){return Dr.eq_s_b(3,"ken")&&c()}function B(){var r=Dr.limit-Dr.cursor;return!(T()||(Dr.cursor=Dr.limit-r,E()||(Dr.cursor=Dr.limit-r,j()||(Dr.cursor=Dr.limit-r,Z()))))}function D(){if(A()){var r=Dr.limit-Dr.cursor;if(S()||(Dr.cursor=Dr.limit-r,W()||(Dr.cursor=Dr.limit-r,C()||(Dr.cursor=Dr.limit-r,P()||(Dr.cursor=Dr.limit-r,F()||(Dr.cursor=Dr.limit-r))))),T())return!1}return!0}function G(){if(W()){Dr.bra=Dr.cursor,Dr.slice_del();var r=Dr.limit-Dr.cursor;return Dr.ket=Dr.cursor,x()||(Dr.cursor=Dr.limit-r,E()||(Dr.cursor=Dr.limit-r,j()||(Dr.cursor=Dr.limit-r,T()||(Dr.cursor=Dr.limit-r)))),nr=!1,!1}return!0}function H(){if(!L())return!0;var r=Dr.limit-Dr.cursor;return!E()&&(Dr.cursor=Dr.limit-r,!j())}function I(){var r,i=Dr.limit-Dr.cursor;return!(S()||(Dr.cursor=Dr.limit-i,F()||(Dr.cursor=Dr.limit-i,P()||(Dr.cursor=Dr.limit-i,C()))))||(Dr.bra=Dr.cursor,Dr.slice_del(),r=Dr.limit-Dr.cursor,Dr.ket=Dr.cursor,T()||(Dr.cursor=Dr.limit-r),!1)}function J(){var r,i=Dr.limit-Dr.cursor;if(Dr.ket=Dr.cursor,nr=!0,B()&&(Dr.cursor=Dr.limit-i,D()&&(Dr.cursor=Dr.limit-i,G()&&(Dr.cursor=Dr.limit-i,H()&&(Dr.cursor=Dr.limit-i,I()))))){if(Dr.cursor=Dr.limit-i,!x())return;Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,r=Dr.limit-Dr.cursor,S()||(Dr.cursor=Dr.limit-r,W()||(Dr.cursor=Dr.limit-r,C()||(Dr.cursor=Dr.limit-r,P()||(Dr.cursor=Dr.limit-r,F()||(Dr.cursor=Dr.limit-r))))),T()||(Dr.cursor=Dr.limit-r)}Dr.bra=Dr.cursor,Dr.slice_del()}function K(){var r,i,e,n;if(Dr.ket=Dr.cursor,h()){if(r=Dr.limit-Dr.cursor,p())return Dr.bra=Dr.cursor,Dr.slice_del(),i=Dr.limit-Dr.cursor,Dr.ket=Dr.cursor,W()?(Dr.bra=Dr.cursor,Dr.slice_del(),K()):(Dr.cursor=Dr.limit-i,a()&&(Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,W()&&(Dr.bra=Dr.cursor,Dr.slice_del(),K()))),!0;if(Dr.cursor=Dr.limit-r,w()){if(Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,e=Dr.limit-Dr.cursor,d())Dr.bra=Dr.cursor,Dr.slice_del();else{if(Dr.cursor=Dr.limit-e,Dr.ket=Dr.cursor,!a()&&(Dr.cursor=Dr.limit-e,!m()&&(Dr.cursor=Dr.limit-e,!K())))return!0;Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,W()&&(Dr.bra=Dr.cursor,Dr.slice_del(),K())}return!0}if(Dr.cursor=Dr.limit-r,g()){if(n=Dr.limit-Dr.cursor,d())Dr.bra=Dr.cursor,Dr.slice_del();else if(Dr.cursor=Dr.limit-n,m())Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,W()&&(Dr.bra=Dr.cursor,Dr.slice_del(),K());else if(Dr.cursor=Dr.limit-n,!K())return!1;return!0}}return!1}function M(r){if(Dr.ket=Dr.cursor,!g()&&(Dr.cursor=Dr.limit-r,!k()))return!1;var i=Dr.limit-Dr.cursor;if(d())Dr.bra=Dr.cursor,Dr.slice_del();else if(Dr.cursor=Dr.limit-i,m())Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,W()&&(Dr.bra=Dr.cursor,Dr.slice_del(),K());else if(Dr.cursor=Dr.limit-i,!K())return!1;return!0}function N(r){if(Dr.ket=Dr.cursor,!z()&&(Dr.cursor=Dr.limit-r,!b()))return!1;var i=Dr.limit-Dr.cursor;return!(!m()&&(Dr.cursor=Dr.limit-i,!d()))&&(Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,W()&&(Dr.bra=Dr.cursor,Dr.slice_del(),K()),!0)}function O(){var r,i=Dr.limit-Dr.cursor;return Dr.ket=Dr.cursor,!(!w()&&(Dr.cursor=Dr.limit-i,!v()))&&(Dr.bra=Dr.cursor,Dr.slice_del(),r=Dr.limit-Dr.cursor,Dr.ket=Dr.cursor,!(!W()||(Dr.bra=Dr.cursor,Dr.slice_del(),!K()))||(Dr.cursor=Dr.limit-r,Dr.ket=Dr.cursor,!(a()||(Dr.cursor=Dr.limit-r,m()||(Dr.cursor=Dr.limit-r,K())))||(Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,W()&&(Dr.bra=Dr.cursor,Dr.slice_del(),K()),!0)))}function Q(){var r,i,e=Dr.limit-Dr.cursor;if(Dr.ket=Dr.cursor,!p()&&(Dr.cursor=Dr.limit-e,!f()&&(Dr.cursor=Dr.limit-e,!_())))return!1;if(Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,r=Dr.limit-Dr.cursor,a())Dr.bra=Dr.cursor,Dr.slice_del(),i=Dr.limit-Dr.cursor,Dr.ket=Dr.cursor,W()||(Dr.cursor=Dr.limit-i);else if(Dr.cursor=Dr.limit-r,!W())return!0;return Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,K(),!0}function R(){var r,i,e=Dr.limit-Dr.cursor;if(Dr.ket=Dr.cursor,W())return Dr.bra=Dr.cursor,Dr.slice_del(),void K();if(Dr.cursor=Dr.limit-e,Dr.ket=Dr.cursor,q())if(Dr.bra=Dr.cursor,Dr.slice_del(),r=Dr.limit-Dr.cursor,Dr.ket=Dr.cursor,d())Dr.bra=Dr.cursor,Dr.slice_del();else{if(Dr.cursor=Dr.limit-r,Dr.ket=Dr.cursor,!a()&&(Dr.cursor=Dr.limit-r,!m())){if(Dr.cursor=Dr.limit-r,Dr.ket=Dr.cursor,!W())return;if(Dr.bra=Dr.cursor,Dr.slice_del(),!K())return}Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,W()&&(Dr.bra=Dr.cursor,Dr.slice_del(),K())}else if(Dr.cursor=Dr.limit-e,!M(e)&&(Dr.cursor=Dr.limit-e,!N(e))){if(Dr.cursor=Dr.limit-e,Dr.ket=Dr.cursor,y())return Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,i=Dr.limit-Dr.cursor,void(a()?(Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,W()&&(Dr.bra=Dr.cursor,Dr.slice_del(),K())):(Dr.cursor=Dr.limit-i,W()?(Dr.bra=Dr.cursor,Dr.slice_del(),K()):(Dr.cursor=Dr.limit-i,K())));if(Dr.cursor=Dr.limit-e,!O()){if(Dr.cursor=Dr.limit-e,d())return Dr.bra=Dr.cursor,void Dr.slice_del();Dr.cursor=Dr.limit-e,K()||(Dr.cursor=Dr.limit-e,Q()||(Dr.cursor=Dr.limit-e,Dr.ket=Dr.cursor,(a()||(Dr.cursor=Dr.limit-e,m()))&&(Dr.bra=Dr.cursor,Dr.slice_del(),Dr.ket=Dr.cursor,W()&&(Dr.bra=Dr.cursor,Dr.slice_del(),K()))))}}}function U(){var r;if(Dr.ket=Dr.cursor,r=Dr.find_among_b(Sr,4))switch(Dr.bra=Dr.cursor,r){case 1:Dr.slice_from("p");break;case 2:Dr.slice_from("ç");break;case 3:Dr.slice_from("t");break;case 4:Dr.slice_from("k")}}function V(){for(;;){var r=Dr.limit-Dr.cursor;if(Dr.in_grouping_b(Wr,97,305)){Dr.cursor=Dr.limit-r;break}if(Dr.cursor=Dr.limit-r,Dr.cursor<=Dr.limit_backward)return!1;Dr.cursor--}return!0}function X(r,i,e){if(Dr.cursor=Dr.limit-r,V()){var n=Dr.limit-Dr.cursor;if(!Dr.eq_s_b(1,i)&&(Dr.cursor=Dr.limit-n,!Dr.eq_s_b(1,e)))return!0;Dr.cursor=Dr.limit-r;var t=Dr.cursor;return Dr.insert(Dr.cursor,Dr.cursor,e),Dr.cursor=t,!1}return!0}function Y(){var r=Dr.limit-Dr.cursor;(Dr.eq_s_b(1,"d")||(Dr.cursor=Dr.limit-r,Dr.eq_s_b(1,"g")))&&X(r,"a","ı")&&X(r,"e","i")&&X(r,"o","u")&&X(r,"ö","ü")}function $(){for(var r,i=Dr.cursor,e=2;;){for(r=Dr.cursor;!Dr.in_grouping(Wr,97,305);){if(Dr.cursor>=Dr.limit)return Dr.cursor=r,!(e>0)&&(Dr.cursor=i,!0);Dr.cursor++}e--}}function rr(r,i,e){for(;!Dr.eq_s(i,e);){if(Dr.cursor>=Dr.limit)return!0;Dr.cursor++}return(tr=i)!=Dr.limit||(Dr.cursor=r,!1)}function ir(){var r=Dr.cursor;return!rr(r,2,"ad")||(Dr.cursor=r,!rr(r,5,"soyad"))}function er(){var r=Dr.cursor;return!ir()&&(Dr.limit_backward=r,Dr.cursor=Dr.limit,Y(),Dr.cursor=Dr.limit,U(),!0)}var nr,tr,ur=[new i("m",-1,-1),new i("n",-1,-1),new i("miz",-1,-1),new i("niz",-1,-1),new i("muz",-1,-1),new i("nuz",-1,-1),new i("müz",-1,-1),new i("nüz",-1,-1),new i("mız",-1,-1),new i("nız",-1,-1)],or=[new i("leri",-1,-1),new i("ları",-1,-1)],sr=[new i("ni",-1,-1),new i("nu",-1,-1),new i("nü",-1,-1),new i("nı",-1,-1)],cr=[new i("in",-1,-1),new i("un",-1,-1),new i("ün",-1,-1),new i("ın",-1,-1)],lr=[new i("a",-1,-1),new i("e",-1,-1)],ar=[new i("na",-1,-1),new i("ne",-1,-1)],mr=[new i("da",-1,-1),new i("ta",-1,-1),new i("de",-1,-1),new i("te",-1,-1)],dr=[new i("nda",-1,-1),new i("nde",-1,-1)],fr=[new i("dan",-1,-1),new i("tan",-1,-1),new i("den",-1,-1),new i("ten",-1,-1)],br=[new i("ndan",-1,-1),new i("nden",-1,-1)],wr=[new i("la",-1,-1),new i("le",-1,-1)],_r=[new i("ca",-1,-1),new i("ce",-1,-1)],kr=[new i("im",-1,-1),new i("um",-1,-1),new i("üm",-1,-1),new i("ım",-1,-1)],pr=[new i("sin",-1,-1),new i("sun",-1,-1),new i("sün",-1,-1),new i("sın",-1,-1)],gr=[new i("iz",-1,-1),new i("uz",-1,-1),new i("üz",-1,-1),new i("ız",-1,-1)],yr=[new i("siniz",-1,-1),new i("sunuz",-1,-1),new i("sünüz",-1,-1),new i("sınız",-1,-1)],zr=[new i("lar",-1,-1),new i("ler",-1,-1)],vr=[new i("niz",-1,-1),new i("nuz",-1,-1),new i("nüz",-1,-1),new i("nız",-1,-1)],hr=[new i("dir",-1,-1),new i("tir",-1,-1),new i("dur",-1,-1),new i("tur",-1,-1),new i("dür",-1,-1),new i("tür",-1,-1),new i("dır",-1,-1),new i("tır",-1,-1)],qr=[new i("casına",-1,-1),new i("cesine",-1,-1)],Cr=[new i("di",-1,-1),new i("ti",-1,-1),new i("dik",-1,-1),new i("tik",-1,-1),new i("duk",-1,-1),new i("tuk",-1,-1),new i("dük",-1,-1),new i("tük",-1,-1),new i("dık",-1,-1),new i("tık",-1,-1),new i("dim",-1,-1),new i("tim",-1,-1),new i("dum",-1,-1),new i("tum",-1,-1),new i("düm",-1,-1),new i("tüm",-1,-1),new i("dım",-1,-1),new i("tım",-1,-1),new i("din",-1,-1),new i("tin",-1,-1),new i("dun",-1,-1),new i("tun",-1,-1),new i("dün",-1,-1),new i("tün",-1,-1),new i("dın",-1,-1),new i("tın",-1,-1),new i("du",-1,-1),new i("tu",-1,-1),new i("dü",-1,-1),new i("tü",-1,-1),new i("dı",-1,-1),new i("tı",-1,-1)],Pr=[new i("sa",-1,-1),new i("se",-1,-1),new i("sak",-1,-1),new i("sek",-1,-1),new i("sam",-1,-1),new i("sem",-1,-1),new i("san",-1,-1),new i("sen",-1,-1)],Fr=[new i("miş",-1,-1),new i("muş",-1,-1),new i("müş",-1,-1),new i("mış",-1,-1)],Sr=[new i("b",-1,1),new i("c",-1,2),new i("d",-1,3),new i("ğ",-1,4)],Wr=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,8,0,0,0,0,0,0,1],Lr=[1,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0,0,0,0,0,1],xr=[1,64,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1],Ar=[17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,130],Er=[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1],jr=[17],Tr=[65],Zr=[65],Br=[["a",xr,97,305],["e",Ar,101,252],["ı",Er,97,305],["i",jr,101,105],["o",Tr,111,117],["ö",Zr,246,252],["u",Tr,111,117]],Dr=new e;this.setCurrent=function(r){Dr.setCurrent(r)},this.getCurrent=function(){return Dr.getCurrent()},this.stem=function(){return!!($()&&(Dr.limit_backward=Dr.cursor,Dr.cursor=Dr.limit,J(),Dr.cursor=Dr.limit,nr&&(R(),Dr.cursor=Dr.limit_backward,er())))}};return function(r){return"function"==typeof r.update?r.update(function(r){return n.setCurrent(r),n.stem(),n.getCurrent()}):(n.setCurrent(r),n.stem(),n.getCurrent())}}(),r.Pipeline.registerFunction(r.tr.stemmer,"stemmer-tr"),r.tr.stopWordFilter=r.generateStopWordFilter("acaba altmış altı ama ancak arada aslında ayrıca bana bazı belki ben benden beni benim beri beş bile bin bir biri birkaç birkez birçok birşey birşeyi biz bizden bize bizi bizim bu buna bunda bundan bunlar bunları bunların bunu bunun burada böyle böylece da daha dahi de defa değil diye diğer doksan dokuz dolayı dolayısıyla dört edecek eden ederek edilecek ediliyor edilmesi ediyor elli en etmesi etti ettiği ettiğini eğer gibi göre halen hangi hatta hem henüz hep hepsi her herhangi herkesin hiç hiçbir iki ile ilgili ise itibaren itibariyle için işte kadar karşın katrilyon kendi kendilerine kendini kendisi kendisine kendisini kez ki kim kimden kime kimi kimse kırk milyar milyon mu mü mı nasıl ne neden nedenle nerde nerede nereye niye niçin o olan olarak oldu olduklarını olduğu olduğunu olmadı olmadığı olmak olması olmayan olmaz olsa olsun olup olur olursa oluyor on ona ondan onlar onlardan onları onların onu onun otuz oysa pek rağmen sadece sanki sekiz seksen sen senden seni senin siz sizden sizi sizin tarafından trilyon tüm var vardı ve veya ya yani yapacak yapmak yaptı yaptıkları yaptığı yaptığını yapılan yapılması yapıyor yedi yerine yetmiş yine yirmi yoksa yüz zaten çok çünkü öyle üzere üç şey şeyden şeyi şeyler şu şuna şunda şundan şunları şunu şöyle".split(" ")),r.Pipeline.registerFunction(r.tr.stopWordFilter,"stopWordFilter-tr")}}); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/lunr/min/lunr.vi.min.js b/2.0.0/assets/javascripts/lunr/min/lunr.vi.min.js new file mode 100644 index 00000000..22aed28c --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/min/lunr.vi.min.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.vi=function(){this.pipeline.reset(),this.pipeline.add(e.vi.stopWordFilter,e.vi.trimmer)},e.vi.wordCharacters="[A-Za-ẓ̀͐́͑̉̃̓ÂâÊêÔôĂ-ăĐ-đƠ-ơƯ-ư]",e.vi.trimmer=e.trimmerSupport.generateTrimmer(e.vi.wordCharacters),e.Pipeline.registerFunction(e.vi.trimmer,"trimmer-vi"),e.vi.stopWordFilter=e.generateStopWordFilter("là cái nhưng mà".split(" "))}}); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/lunr/min/lunr.zh.min.js b/2.0.0/assets/javascripts/lunr/min/lunr.zh.min.js new file mode 100644 index 00000000..9838ef96 --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/min/lunr.zh.min.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r(require("@node-rs/jieba")):r()(e.lunr)}(this,function(e){return function(r,t){if(void 0===r)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===r.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var i="2"==r.version[0];r.zh=function(){this.pipeline.reset(),this.pipeline.add(r.zh.trimmer,r.zh.stopWordFilter,r.zh.stemmer),i?this.tokenizer=r.zh.tokenizer:(r.tokenizer&&(r.tokenizer=r.zh.tokenizer),this.tokenizerFn&&(this.tokenizerFn=r.zh.tokenizer))},r.zh.tokenizer=function(n){if(!arguments.length||null==n||void 0==n)return[];if(Array.isArray(n))return n.map(function(e){return i?new r.Token(e.toLowerCase()):e.toLowerCase()});t&&e.load(t);var o=n.toString().trim().toLowerCase(),s=[];e.cut(o,!0).forEach(function(e){s=s.concat(e.split(" "))}),s=s.filter(function(e){return!!e});var u=0;return s.map(function(e,t){if(i){var n=o.indexOf(e,u),s={};return s.position=[n,e.length],s.index=t,u=n,new r.Token(e,s)}return e})},r.zh.wordCharacters="\\w一-龥",r.zh.trimmer=r.trimmerSupport.generateTrimmer(r.zh.wordCharacters),r.Pipeline.registerFunction(r.zh.trimmer,"trimmer-zh"),r.zh.stemmer=function(){return function(e){return e}}(),r.Pipeline.registerFunction(r.zh.stemmer,"stemmer-zh"),r.zh.stopWordFilter=r.generateStopWordFilter("的 一 不 在 人 有 是 为 以 于 上 他 而 后 之 来 及 了 因 下 可 到 由 这 与 也 此 但 并 个 其 已 无 小 我 们 起 最 再 今 去 好 只 又 或 很 亦 某 把 那 你 乃 它 吧 被 比 别 趁 当 从 到 得 打 凡 儿 尔 该 各 给 跟 和 何 还 即 几 既 看 据 距 靠 啦 了 另 么 每 们 嘛 拿 哪 那 您 凭 且 却 让 仍 啥 如 若 使 谁 虽 随 同 所 她 哇 嗡 往 哪 些 向 沿 哟 用 于 咱 则 怎 曾 至 致 着 诸 自".split(" ")),r.Pipeline.registerFunction(r.zh.stopWordFilter,"stopWordFilter-zh")}}); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/lunr/tinyseg.js b/2.0.0/assets/javascripts/lunr/tinyseg.js new file mode 100644 index 00000000..167fa6dd --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/tinyseg.js @@ -0,0 +1,206 @@ +/** + * export the module via AMD, CommonJS or as a browser global + * Export code from https://github.com/umdjs/umd/blob/master/returnExports.js + */ +;(function (root, factory) { + if (typeof define === 'function' && define.amd) { + // AMD. Register as an anonymous module. + define(factory) + } else if (typeof exports === 'object') { + /** + * Node. Does not work with strict CommonJS, but + * only CommonJS-like environments that support module.exports, + * like Node. + */ + module.exports = factory() + } else { + // Browser globals (root is window) + factory()(root.lunr); + } +}(this, function () { + /** + * Just return a value to define the module export. + * This example returns an object, but the module + * can return a function as the exported value. + */ + + return function(lunr) { + // TinySegmenter 0.1 -- Super compact Japanese tokenizer in Javascript + // (c) 2008 Taku Kudo + // TinySegmenter is freely distributable under the terms of a new BSD licence. + // For details, see http://chasen.org/~taku/software/TinySegmenter/LICENCE.txt + + function TinySegmenter() { + var patterns = { + "[一二三四五六七八九十百千万億兆]":"M", + "[一-龠々〆ヵヶ]":"H", + "[ぁ-ん]":"I", + "[ァ-ヴーア-ン゙ー]":"K", + "[a-zA-Za-zA-Z]":"A", + "[0-90-9]":"N" + } + this.chartype_ = []; + for (var i in patterns) { + var regexp = new RegExp(i); + this.chartype_.push([regexp, patterns[i]]); + } + + this.BIAS__ = -332 + this.BC1__ = {"HH":6,"II":2461,"KH":406,"OH":-1378}; + this.BC2__ = {"AA":-3267,"AI":2744,"AN":-878,"HH":-4070,"HM":-1711,"HN":4012,"HO":3761,"IA":1327,"IH":-1184,"II":-1332,"IK":1721,"IO":5492,"KI":3831,"KK":-8741,"MH":-3132,"MK":3334,"OO":-2920}; + this.BC3__ = {"HH":996,"HI":626,"HK":-721,"HN":-1307,"HO":-836,"IH":-301,"KK":2762,"MK":1079,"MM":4034,"OA":-1652,"OH":266}; + this.BP1__ = {"BB":295,"OB":304,"OO":-125,"UB":352}; + this.BP2__ = {"BO":60,"OO":-1762}; + this.BQ1__ = {"BHH":1150,"BHM":1521,"BII":-1158,"BIM":886,"BMH":1208,"BNH":449,"BOH":-91,"BOO":-2597,"OHI":451,"OIH":-296,"OKA":1851,"OKH":-1020,"OKK":904,"OOO":2965}; + this.BQ2__ = {"BHH":118,"BHI":-1159,"BHM":466,"BIH":-919,"BKK":-1720,"BKO":864,"OHH":-1139,"OHM":-181,"OIH":153,"UHI":-1146}; + this.BQ3__ = {"BHH":-792,"BHI":2664,"BII":-299,"BKI":419,"BMH":937,"BMM":8335,"BNN":998,"BOH":775,"OHH":2174,"OHM":439,"OII":280,"OKH":1798,"OKI":-793,"OKO":-2242,"OMH":-2402,"OOO":11699}; + this.BQ4__ = {"BHH":-3895,"BIH":3761,"BII":-4654,"BIK":1348,"BKK":-1806,"BMI":-3385,"BOO":-12396,"OAH":926,"OHH":266,"OHK":-2036,"ONN":-973}; + this.BW1__ = {",と":660,",同":727,"B1あ":1404,"B1同":542,"、と":660,"、同":727,"」と":1682,"あっ":1505,"いう":1743,"いっ":-2055,"いる":672,"うし":-4817,"うん":665,"から":3472,"がら":600,"こう":-790,"こと":2083,"こん":-1262,"さら":-4143,"さん":4573,"した":2641,"して":1104,"すで":-3399,"そこ":1977,"それ":-871,"たち":1122,"ため":601,"った":3463,"つい":-802,"てい":805,"てき":1249,"でき":1127,"です":3445,"では":844,"とい":-4915,"とみ":1922,"どこ":3887,"ない":5713,"なっ":3015,"など":7379,"なん":-1113,"にし":2468,"には":1498,"にも":1671,"に対":-912,"の一":-501,"の中":741,"ませ":2448,"まで":1711,"まま":2600,"まる":-2155,"やむ":-1947,"よっ":-2565,"れた":2369,"れで":-913,"をし":1860,"を見":731,"亡く":-1886,"京都":2558,"取り":-2784,"大き":-2604,"大阪":1497,"平方":-2314,"引き":-1336,"日本":-195,"本当":-2423,"毎日":-2113,"目指":-724,"B1あ":1404,"B1同":542,"」と":1682}; + this.BW2__ = {"..":-11822,"11":-669,"――":-5730,"−−":-13175,"いう":-1609,"うか":2490,"かし":-1350,"かも":-602,"から":-7194,"かれ":4612,"がい":853,"がら":-3198,"きた":1941,"くな":-1597,"こと":-8392,"この":-4193,"させ":4533,"され":13168,"さん":-3977,"しい":-1819,"しか":-545,"した":5078,"して":972,"しな":939,"その":-3744,"たい":-1253,"たた":-662,"ただ":-3857,"たち":-786,"たと":1224,"たは":-939,"った":4589,"って":1647,"っと":-2094,"てい":6144,"てき":3640,"てく":2551,"ては":-3110,"ても":-3065,"でい":2666,"でき":-1528,"でし":-3828,"です":-4761,"でも":-4203,"とい":1890,"とこ":-1746,"とと":-2279,"との":720,"とみ":5168,"とも":-3941,"ない":-2488,"なが":-1313,"など":-6509,"なの":2614,"なん":3099,"にお":-1615,"にし":2748,"にな":2454,"によ":-7236,"に対":-14943,"に従":-4688,"に関":-11388,"のか":2093,"ので":-7059,"のに":-6041,"のの":-6125,"はい":1073,"はが":-1033,"はず":-2532,"ばれ":1813,"まし":-1316,"まで":-6621,"まれ":5409,"めて":-3153,"もい":2230,"もの":-10713,"らか":-944,"らし":-1611,"らに":-1897,"りし":651,"りま":1620,"れた":4270,"れて":849,"れば":4114,"ろう":6067,"われ":7901,"を通":-11877,"んだ":728,"んな":-4115,"一人":602,"一方":-1375,"一日":970,"一部":-1051,"上が":-4479,"会社":-1116,"出て":2163,"分の":-7758,"同党":970,"同日":-913,"大阪":-2471,"委員":-1250,"少な":-1050,"年度":-8669,"年間":-1626,"府県":-2363,"手権":-1982,"新聞":-4066,"日新":-722,"日本":-7068,"日米":3372,"曜日":-601,"朝鮮":-2355,"本人":-2697,"東京":-1543,"然と":-1384,"社会":-1276,"立て":-990,"第に":-1612,"米国":-4268,"11":-669}; + this.BW3__ = {"あた":-2194,"あり":719,"ある":3846,"い.":-1185,"い。":-1185,"いい":5308,"いえ":2079,"いく":3029,"いた":2056,"いっ":1883,"いる":5600,"いわ":1527,"うち":1117,"うと":4798,"えと":1454,"か.":2857,"か。":2857,"かけ":-743,"かっ":-4098,"かに":-669,"から":6520,"かり":-2670,"が,":1816,"が、":1816,"がき":-4855,"がけ":-1127,"がっ":-913,"がら":-4977,"がり":-2064,"きた":1645,"けど":1374,"こと":7397,"この":1542,"ころ":-2757,"さい":-714,"さを":976,"し,":1557,"し、":1557,"しい":-3714,"した":3562,"して":1449,"しな":2608,"しま":1200,"す.":-1310,"す。":-1310,"する":6521,"ず,":3426,"ず、":3426,"ずに":841,"そう":428,"た.":8875,"た。":8875,"たい":-594,"たの":812,"たり":-1183,"たる":-853,"だ.":4098,"だ。":4098,"だっ":1004,"った":-4748,"って":300,"てい":6240,"てお":855,"ても":302,"です":1437,"でに":-1482,"では":2295,"とう":-1387,"とし":2266,"との":541,"とも":-3543,"どう":4664,"ない":1796,"なく":-903,"など":2135,"に,":-1021,"に、":-1021,"にし":1771,"にな":1906,"には":2644,"の,":-724,"の、":-724,"の子":-1000,"は,":1337,"は、":1337,"べき":2181,"まし":1113,"ます":6943,"まっ":-1549,"まで":6154,"まれ":-793,"らし":1479,"られ":6820,"るる":3818,"れ,":854,"れ、":854,"れた":1850,"れて":1375,"れば":-3246,"れる":1091,"われ":-605,"んだ":606,"んで":798,"カ月":990,"会議":860,"入り":1232,"大会":2217,"始め":1681,"市":965,"新聞":-5055,"日,":974,"日、":974,"社会":2024,"カ月":990}; + this.TC1__ = {"AAA":1093,"HHH":1029,"HHM":580,"HII":998,"HOH":-390,"HOM":-331,"IHI":1169,"IOH":-142,"IOI":-1015,"IOM":467,"MMH":187,"OOI":-1832}; + this.TC2__ = {"HHO":2088,"HII":-1023,"HMM":-1154,"IHI":-1965,"KKH":703,"OII":-2649}; + this.TC3__ = {"AAA":-294,"HHH":346,"HHI":-341,"HII":-1088,"HIK":731,"HOH":-1486,"IHH":128,"IHI":-3041,"IHO":-1935,"IIH":-825,"IIM":-1035,"IOI":-542,"KHH":-1216,"KKA":491,"KKH":-1217,"KOK":-1009,"MHH":-2694,"MHM":-457,"MHO":123,"MMH":-471,"NNH":-1689,"NNO":662,"OHO":-3393}; + this.TC4__ = {"HHH":-203,"HHI":1344,"HHK":365,"HHM":-122,"HHN":182,"HHO":669,"HIH":804,"HII":679,"HOH":446,"IHH":695,"IHO":-2324,"IIH":321,"III":1497,"IIO":656,"IOO":54,"KAK":4845,"KKA":3386,"KKK":3065,"MHH":-405,"MHI":201,"MMH":-241,"MMM":661,"MOM":841}; + this.TQ1__ = {"BHHH":-227,"BHHI":316,"BHIH":-132,"BIHH":60,"BIII":1595,"BNHH":-744,"BOHH":225,"BOOO":-908,"OAKK":482,"OHHH":281,"OHIH":249,"OIHI":200,"OIIH":-68}; + this.TQ2__ = {"BIHH":-1401,"BIII":-1033,"BKAK":-543,"BOOO":-5591}; + this.TQ3__ = {"BHHH":478,"BHHM":-1073,"BHIH":222,"BHII":-504,"BIIH":-116,"BIII":-105,"BMHI":-863,"BMHM":-464,"BOMH":620,"OHHH":346,"OHHI":1729,"OHII":997,"OHMH":481,"OIHH":623,"OIIH":1344,"OKAK":2792,"OKHH":587,"OKKA":679,"OOHH":110,"OOII":-685}; + this.TQ4__ = {"BHHH":-721,"BHHM":-3604,"BHII":-966,"BIIH":-607,"BIII":-2181,"OAAA":-2763,"OAKK":180,"OHHH":-294,"OHHI":2446,"OHHO":480,"OHIH":-1573,"OIHH":1935,"OIHI":-493,"OIIH":626,"OIII":-4007,"OKAK":-8156}; + this.TW1__ = {"につい":-4681,"東京都":2026}; + this.TW2__ = {"ある程":-2049,"いった":-1256,"ころが":-2434,"しょう":3873,"その後":-4430,"だって":-1049,"ていた":1833,"として":-4657,"ともに":-4517,"もので":1882,"一気に":-792,"初めて":-1512,"同時に":-8097,"大きな":-1255,"対して":-2721,"社会党":-3216}; + this.TW3__ = {"いただ":-1734,"してい":1314,"として":-4314,"につい":-5483,"にとっ":-5989,"に当た":-6247,"ので,":-727,"ので、":-727,"のもの":-600,"れから":-3752,"十二月":-2287}; + this.TW4__ = {"いう.":8576,"いう。":8576,"からな":-2348,"してい":2958,"たが,":1516,"たが、":1516,"ている":1538,"という":1349,"ました":5543,"ません":1097,"ようと":-4258,"よると":5865}; + this.UC1__ = {"A":484,"K":93,"M":645,"O":-505}; + this.UC2__ = {"A":819,"H":1059,"I":409,"M":3987,"N":5775,"O":646}; + this.UC3__ = {"A":-1370,"I":2311}; + this.UC4__ = {"A":-2643,"H":1809,"I":-1032,"K":-3450,"M":3565,"N":3876,"O":6646}; + this.UC5__ = {"H":313,"I":-1238,"K":-799,"M":539,"O":-831}; + this.UC6__ = {"H":-506,"I":-253,"K":87,"M":247,"O":-387}; + this.UP1__ = {"O":-214}; + this.UP2__ = {"B":69,"O":935}; + this.UP3__ = {"B":189}; + this.UQ1__ = {"BH":21,"BI":-12,"BK":-99,"BN":142,"BO":-56,"OH":-95,"OI":477,"OK":410,"OO":-2422}; + this.UQ2__ = {"BH":216,"BI":113,"OK":1759}; + this.UQ3__ = {"BA":-479,"BH":42,"BI":1913,"BK":-7198,"BM":3160,"BN":6427,"BO":14761,"OI":-827,"ON":-3212}; + this.UW1__ = {",":156,"、":156,"「":-463,"あ":-941,"う":-127,"が":-553,"き":121,"こ":505,"で":-201,"と":-547,"ど":-123,"に":-789,"の":-185,"は":-847,"も":-466,"や":-470,"よ":182,"ら":-292,"り":208,"れ":169,"を":-446,"ん":-137,"・":-135,"主":-402,"京":-268,"区":-912,"午":871,"国":-460,"大":561,"委":729,"市":-411,"日":-141,"理":361,"生":-408,"県":-386,"都":-718,"「":-463,"・":-135}; + this.UW2__ = {",":-829,"、":-829,"〇":892,"「":-645,"」":3145,"あ":-538,"い":505,"う":134,"お":-502,"か":1454,"が":-856,"く":-412,"こ":1141,"さ":878,"ざ":540,"し":1529,"す":-675,"せ":300,"そ":-1011,"た":188,"だ":1837,"つ":-949,"て":-291,"で":-268,"と":-981,"ど":1273,"な":1063,"に":-1764,"の":130,"は":-409,"ひ":-1273,"べ":1261,"ま":600,"も":-1263,"や":-402,"よ":1639,"り":-579,"る":-694,"れ":571,"を":-2516,"ん":2095,"ア":-587,"カ":306,"キ":568,"ッ":831,"三":-758,"不":-2150,"世":-302,"中":-968,"主":-861,"事":492,"人":-123,"会":978,"保":362,"入":548,"初":-3025,"副":-1566,"北":-3414,"区":-422,"大":-1769,"天":-865,"太":-483,"子":-1519,"学":760,"実":1023,"小":-2009,"市":-813,"年":-1060,"強":1067,"手":-1519,"揺":-1033,"政":1522,"文":-1355,"新":-1682,"日":-1815,"明":-1462,"最":-630,"朝":-1843,"本":-1650,"東":-931,"果":-665,"次":-2378,"民":-180,"気":-1740,"理":752,"発":529,"目":-1584,"相":-242,"県":-1165,"立":-763,"第":810,"米":509,"自":-1353,"行":838,"西":-744,"見":-3874,"調":1010,"議":1198,"込":3041,"開":1758,"間":-1257,"「":-645,"」":3145,"ッ":831,"ア":-587,"カ":306,"キ":568}; + this.UW3__ = {",":4889,"1":-800,"−":-1723,"、":4889,"々":-2311,"〇":5827,"」":2670,"〓":-3573,"あ":-2696,"い":1006,"う":2342,"え":1983,"お":-4864,"か":-1163,"が":3271,"く":1004,"け":388,"げ":401,"こ":-3552,"ご":-3116,"さ":-1058,"し":-395,"す":584,"せ":3685,"そ":-5228,"た":842,"ち":-521,"っ":-1444,"つ":-1081,"て":6167,"で":2318,"と":1691,"ど":-899,"な":-2788,"に":2745,"の":4056,"は":4555,"ひ":-2171,"ふ":-1798,"へ":1199,"ほ":-5516,"ま":-4384,"み":-120,"め":1205,"も":2323,"や":-788,"よ":-202,"ら":727,"り":649,"る":5905,"れ":2773,"わ":-1207,"を":6620,"ん":-518,"ア":551,"グ":1319,"ス":874,"ッ":-1350,"ト":521,"ム":1109,"ル":1591,"ロ":2201,"ン":278,"・":-3794,"一":-1619,"下":-1759,"世":-2087,"両":3815,"中":653,"主":-758,"予":-1193,"二":974,"人":2742,"今":792,"他":1889,"以":-1368,"低":811,"何":4265,"作":-361,"保":-2439,"元":4858,"党":3593,"全":1574,"公":-3030,"六":755,"共":-1880,"円":5807,"再":3095,"分":457,"初":2475,"別":1129,"前":2286,"副":4437,"力":365,"動":-949,"務":-1872,"化":1327,"北":-1038,"区":4646,"千":-2309,"午":-783,"協":-1006,"口":483,"右":1233,"各":3588,"合":-241,"同":3906,"和":-837,"員":4513,"国":642,"型":1389,"場":1219,"外":-241,"妻":2016,"学":-1356,"安":-423,"実":-1008,"家":1078,"小":-513,"少":-3102,"州":1155,"市":3197,"平":-1804,"年":2416,"広":-1030,"府":1605,"度":1452,"建":-2352,"当":-3885,"得":1905,"思":-1291,"性":1822,"戸":-488,"指":-3973,"政":-2013,"教":-1479,"数":3222,"文":-1489,"新":1764,"日":2099,"旧":5792,"昨":-661,"時":-1248,"曜":-951,"最":-937,"月":4125,"期":360,"李":3094,"村":364,"東":-805,"核":5156,"森":2438,"業":484,"氏":2613,"民":-1694,"決":-1073,"法":1868,"海":-495,"無":979,"物":461,"特":-3850,"生":-273,"用":914,"町":1215,"的":7313,"直":-1835,"省":792,"県":6293,"知":-1528,"私":4231,"税":401,"立":-960,"第":1201,"米":7767,"系":3066,"約":3663,"級":1384,"統":-4229,"総":1163,"線":1255,"者":6457,"能":725,"自":-2869,"英":785,"見":1044,"調":-562,"財":-733,"費":1777,"車":1835,"軍":1375,"込":-1504,"通":-1136,"選":-681,"郎":1026,"郡":4404,"部":1200,"金":2163,"長":421,"開":-1432,"間":1302,"関":-1282,"雨":2009,"電":-1045,"非":2066,"駅":1620,"1":-800,"」":2670,"・":-3794,"ッ":-1350,"ア":551,"グ":1319,"ス":874,"ト":521,"ム":1109,"ル":1591,"ロ":2201,"ン":278}; + this.UW4__ = {",":3930,".":3508,"―":-4841,"、":3930,"。":3508,"〇":4999,"「":1895,"」":3798,"〓":-5156,"あ":4752,"い":-3435,"う":-640,"え":-2514,"お":2405,"か":530,"が":6006,"き":-4482,"ぎ":-3821,"く":-3788,"け":-4376,"げ":-4734,"こ":2255,"ご":1979,"さ":2864,"し":-843,"じ":-2506,"す":-731,"ず":1251,"せ":181,"そ":4091,"た":5034,"だ":5408,"ち":-3654,"っ":-5882,"つ":-1659,"て":3994,"で":7410,"と":4547,"な":5433,"に":6499,"ぬ":1853,"ね":1413,"の":7396,"は":8578,"ば":1940,"ひ":4249,"び":-4134,"ふ":1345,"へ":6665,"べ":-744,"ほ":1464,"ま":1051,"み":-2082,"む":-882,"め":-5046,"も":4169,"ゃ":-2666,"や":2795,"ょ":-1544,"よ":3351,"ら":-2922,"り":-9726,"る":-14896,"れ":-2613,"ろ":-4570,"わ":-1783,"を":13150,"ん":-2352,"カ":2145,"コ":1789,"セ":1287,"ッ":-724,"ト":-403,"メ":-1635,"ラ":-881,"リ":-541,"ル":-856,"ン":-3637,"・":-4371,"ー":-11870,"一":-2069,"中":2210,"予":782,"事":-190,"井":-1768,"人":1036,"以":544,"会":950,"体":-1286,"作":530,"側":4292,"先":601,"党":-2006,"共":-1212,"内":584,"円":788,"初":1347,"前":1623,"副":3879,"力":-302,"動":-740,"務":-2715,"化":776,"区":4517,"協":1013,"参":1555,"合":-1834,"和":-681,"員":-910,"器":-851,"回":1500,"国":-619,"園":-1200,"地":866,"場":-1410,"塁":-2094,"士":-1413,"多":1067,"大":571,"子":-4802,"学":-1397,"定":-1057,"寺":-809,"小":1910,"屋":-1328,"山":-1500,"島":-2056,"川":-2667,"市":2771,"年":374,"庁":-4556,"後":456,"性":553,"感":916,"所":-1566,"支":856,"改":787,"政":2182,"教":704,"文":522,"方":-856,"日":1798,"時":1829,"最":845,"月":-9066,"木":-485,"来":-442,"校":-360,"業":-1043,"氏":5388,"民":-2716,"気":-910,"沢":-939,"済":-543,"物":-735,"率":672,"球":-1267,"生":-1286,"産":-1101,"田":-2900,"町":1826,"的":2586,"目":922,"省":-3485,"県":2997,"空":-867,"立":-2112,"第":788,"米":2937,"系":786,"約":2171,"経":1146,"統":-1169,"総":940,"線":-994,"署":749,"者":2145,"能":-730,"般":-852,"行":-792,"規":792,"警":-1184,"議":-244,"谷":-1000,"賞":730,"車":-1481,"軍":1158,"輪":-1433,"込":-3370,"近":929,"道":-1291,"選":2596,"郎":-4866,"都":1192,"野":-1100,"銀":-2213,"長":357,"間":-2344,"院":-2297,"際":-2604,"電":-878,"領":-1659,"題":-792,"館":-1984,"首":1749,"高":2120,"「":1895,"」":3798,"・":-4371,"ッ":-724,"ー":-11870,"カ":2145,"コ":1789,"セ":1287,"ト":-403,"メ":-1635,"ラ":-881,"リ":-541,"ル":-856,"ン":-3637}; + this.UW5__ = {",":465,".":-299,"1":-514,"E2":-32768,"]":-2762,"、":465,"。":-299,"「":363,"あ":1655,"い":331,"う":-503,"え":1199,"お":527,"か":647,"が":-421,"き":1624,"ぎ":1971,"く":312,"げ":-983,"さ":-1537,"し":-1371,"す":-852,"だ":-1186,"ち":1093,"っ":52,"つ":921,"て":-18,"で":-850,"と":-127,"ど":1682,"な":-787,"に":-1224,"の":-635,"は":-578,"べ":1001,"み":502,"め":865,"ゃ":3350,"ょ":854,"り":-208,"る":429,"れ":504,"わ":419,"を":-1264,"ん":327,"イ":241,"ル":451,"ン":-343,"中":-871,"京":722,"会":-1153,"党":-654,"務":3519,"区":-901,"告":848,"員":2104,"大":-1296,"学":-548,"定":1785,"嵐":-1304,"市":-2991,"席":921,"年":1763,"思":872,"所":-814,"挙":1618,"新":-1682,"日":218,"月":-4353,"査":932,"格":1356,"機":-1508,"氏":-1347,"田":240,"町":-3912,"的":-3149,"相":1319,"省":-1052,"県":-4003,"研":-997,"社":-278,"空":-813,"統":1955,"者":-2233,"表":663,"語":-1073,"議":1219,"選":-1018,"郎":-368,"長":786,"間":1191,"題":2368,"館":-689,"1":-514,"E2":-32768,"「":363,"イ":241,"ル":451,"ン":-343}; + this.UW6__ = {",":227,".":808,"1":-270,"E1":306,"、":227,"。":808,"あ":-307,"う":189,"か":241,"が":-73,"く":-121,"こ":-200,"じ":1782,"す":383,"た":-428,"っ":573,"て":-1014,"で":101,"と":-105,"な":-253,"に":-149,"の":-417,"は":-236,"も":-206,"り":187,"る":-135,"を":195,"ル":-673,"ン":-496,"一":-277,"中":201,"件":-800,"会":624,"前":302,"区":1792,"員":-1212,"委":798,"学":-960,"市":887,"広":-695,"後":535,"業":-697,"相":753,"社":-507,"福":974,"空":-822,"者":1811,"連":463,"郎":1082,"1":-270,"E1":306,"ル":-673,"ン":-496}; + + return this; + } + TinySegmenter.prototype.ctype_ = function(str) { + for (var i in this.chartype_) { + if (str.match(this.chartype_[i][0])) { + return this.chartype_[i][1]; + } + } + return "O"; + } + + TinySegmenter.prototype.ts_ = function(v) { + if (v) { return v; } + return 0; + } + + TinySegmenter.prototype.segment = function(input) { + if (input == null || input == undefined || input == "") { + return []; + } + var result = []; + var seg = ["B3","B2","B1"]; + var ctype = ["O","O","O"]; + var o = input.split(""); + for (i = 0; i < o.length; ++i) { + seg.push(o[i]); + ctype.push(this.ctype_(o[i])) + } + seg.push("E1"); + seg.push("E2"); + seg.push("E3"); + ctype.push("O"); + ctype.push("O"); + ctype.push("O"); + var word = seg[3]; + var p1 = "U"; + var p2 = "U"; + var p3 = "U"; + for (var i = 4; i < seg.length - 3; ++i) { + var score = this.BIAS__; + var w1 = seg[i-3]; + var w2 = seg[i-2]; + var w3 = seg[i-1]; + var w4 = seg[i]; + var w5 = seg[i+1]; + var w6 = seg[i+2]; + var c1 = ctype[i-3]; + var c2 = ctype[i-2]; + var c3 = ctype[i-1]; + var c4 = ctype[i]; + var c5 = ctype[i+1]; + var c6 = ctype[i+2]; + score += this.ts_(this.UP1__[p1]); + score += this.ts_(this.UP2__[p2]); + score += this.ts_(this.UP3__[p3]); + score += this.ts_(this.BP1__[p1 + p2]); + score += this.ts_(this.BP2__[p2 + p3]); + score += this.ts_(this.UW1__[w1]); + score += this.ts_(this.UW2__[w2]); + score += this.ts_(this.UW3__[w3]); + score += this.ts_(this.UW4__[w4]); + score += this.ts_(this.UW5__[w5]); + score += this.ts_(this.UW6__[w6]); + score += this.ts_(this.BW1__[w2 + w3]); + score += this.ts_(this.BW2__[w3 + w4]); + score += this.ts_(this.BW3__[w4 + w5]); + score += this.ts_(this.TW1__[w1 + w2 + w3]); + score += this.ts_(this.TW2__[w2 + w3 + w4]); + score += this.ts_(this.TW3__[w3 + w4 + w5]); + score += this.ts_(this.TW4__[w4 + w5 + w6]); + score += this.ts_(this.UC1__[c1]); + score += this.ts_(this.UC2__[c2]); + score += this.ts_(this.UC3__[c3]); + score += this.ts_(this.UC4__[c4]); + score += this.ts_(this.UC5__[c5]); + score += this.ts_(this.UC6__[c6]); + score += this.ts_(this.BC1__[c2 + c3]); + score += this.ts_(this.BC2__[c3 + c4]); + score += this.ts_(this.BC3__[c4 + c5]); + score += this.ts_(this.TC1__[c1 + c2 + c3]); + score += this.ts_(this.TC2__[c2 + c3 + c4]); + score += this.ts_(this.TC3__[c3 + c4 + c5]); + score += this.ts_(this.TC4__[c4 + c5 + c6]); + // score += this.ts_(this.TC5__[c4 + c5 + c6]); + score += this.ts_(this.UQ1__[p1 + c1]); + score += this.ts_(this.UQ2__[p2 + c2]); + score += this.ts_(this.UQ3__[p3 + c3]); + score += this.ts_(this.BQ1__[p2 + c2 + c3]); + score += this.ts_(this.BQ2__[p2 + c3 + c4]); + score += this.ts_(this.BQ3__[p3 + c2 + c3]); + score += this.ts_(this.BQ4__[p3 + c3 + c4]); + score += this.ts_(this.TQ1__[p2 + c1 + c2 + c3]); + score += this.ts_(this.TQ2__[p2 + c2 + c3 + c4]); + score += this.ts_(this.TQ3__[p3 + c1 + c2 + c3]); + score += this.ts_(this.TQ4__[p3 + c2 + c3 + c4]); + var p = "O"; + if (score > 0) { + result.push(word); + word = ""; + p = "B"; + } + p1 = p2; + p2 = p3; + p3 = p; + word += seg[i]; + } + result.push(word); + + return result; + } + + lunr.TinySegmenter = TinySegmenter; + }; + +})); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/lunr/wordcut.js b/2.0.0/assets/javascripts/lunr/wordcut.js new file mode 100644 index 00000000..146f4b44 --- /dev/null +++ b/2.0.0/assets/javascripts/lunr/wordcut.js @@ -0,0 +1,6708 @@ +(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}(g.lunr || (g.lunr = {})).wordcut = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o 1; + }) + this.addWords(words, false) + } + if(finalize){ + this.finalizeDict(); + } + }, + + dictSeek: function (l, r, ch, strOffset, pos) { + var ans = null; + while (l <= r) { + var m = Math.floor((l + r) / 2), + dict_item = this.dict[m], + len = dict_item.length; + if (len <= strOffset) { + l = m + 1; + } else { + var ch_ = dict_item[strOffset]; + if (ch_ < ch) { + l = m + 1; + } else if (ch_ > ch) { + r = m - 1; + } else { + ans = m; + if (pos == LEFT) { + r = m - 1; + } else { + l = m + 1; + } + } + } + } + return ans; + }, + + isFinal: function (acceptor) { + return this.dict[acceptor.l].length == acceptor.strOffset; + }, + + createAcceptor: function () { + return { + l: 0, + r: this.dict.length - 1, + strOffset: 0, + isFinal: false, + dict: this, + transit: function (ch) { + return this.dict.transit(this, ch); + }, + isError: false, + tag: "DICT", + w: 1, + type: "DICT" + }; + }, + + transit: function (acceptor, ch) { + var l = this.dictSeek(acceptor.l, + acceptor.r, + ch, + acceptor.strOffset, + LEFT); + if (l !== null) { + var r = this.dictSeek(l, + acceptor.r, + ch, + acceptor.strOffset, + RIGHT); + acceptor.l = l; + acceptor.r = r; + acceptor.strOffset++; + acceptor.isFinal = this.isFinal(acceptor); + } else { + acceptor.isError = true; + } + return acceptor; + }, + + sortuniq: function(a){ + return a.sort().filter(function(item, pos, arr){ + return !pos || item != arr[pos - 1]; + }) + }, + + flatten: function(a){ + //[[1,2],[3]] -> [1,2,3] + return [].concat.apply([], a); + } +}; +module.exports = WordcutDict; + +}).call(this,"/dist/tmp") +},{"glob":16,"path":22}],3:[function(require,module,exports){ +var WordRule = { + createAcceptor: function(tag) { + if (tag["WORD_RULE"]) + return null; + + return {strOffset: 0, + isFinal: false, + transit: function(ch) { + var lch = ch.toLowerCase(); + if (lch >= "a" && lch <= "z") { + this.isFinal = true; + this.strOffset++; + } else { + this.isError = true; + } + return this; + }, + isError: false, + tag: "WORD_RULE", + type: "WORD_RULE", + w: 1}; + } +}; + +var NumberRule = { + createAcceptor: function(tag) { + if (tag["NUMBER_RULE"]) + return null; + + return {strOffset: 0, + isFinal: false, + transit: function(ch) { + if (ch >= "0" && ch <= "9") { + this.isFinal = true; + this.strOffset++; + } else { + this.isError = true; + } + return this; + }, + isError: false, + tag: "NUMBER_RULE", + type: "NUMBER_RULE", + w: 1}; + } +}; + +var SpaceRule = { + tag: "SPACE_RULE", + createAcceptor: function(tag) { + + if (tag["SPACE_RULE"]) + return null; + + return {strOffset: 0, + isFinal: false, + transit: function(ch) { + if (ch == " " || ch == "\t" || ch == "\r" || ch == "\n" || + ch == "\u00A0" || ch=="\u2003"//nbsp and emsp + ) { + this.isFinal = true; + this.strOffset++; + } else { + this.isError = true; + } + return this; + }, + isError: false, + tag: SpaceRule.tag, + w: 1, + type: "SPACE_RULE"}; + } +} + +var SingleSymbolRule = { + tag: "SINSYM", + createAcceptor: function(tag) { + return {strOffset: 0, + isFinal: false, + transit: function(ch) { + if (this.strOffset == 0 && ch.match(/^[\@\(\)\/\,\-\."`]$/)) { + this.isFinal = true; + this.strOffset++; + } else { + this.isError = true; + } + return this; + }, + isError: false, + tag: "SINSYM", + w: 1, + type: "SINSYM"}; + } +} + + +var LatinRules = [WordRule, SpaceRule, SingleSymbolRule, NumberRule]; + +module.exports = LatinRules; + +},{}],4:[function(require,module,exports){ +var _ = require("underscore") + , WordcutCore = require("./wordcut_core"); +var PathInfoBuilder = { + + /* + buildByPartAcceptors: function(path, acceptors, i) { + var + var genInfos = partAcceptors.reduce(function(genInfos, acceptor) { + + }, []); + + return genInfos; + } + */ + + buildByAcceptors: function(path, finalAcceptors, i) { + var self = this; + var infos = finalAcceptors.map(function(acceptor) { + var p = i - acceptor.strOffset + 1 + , _info = path[p]; + + var info = {p: p, + mw: _info.mw + (acceptor.mw === undefined ? 0 : acceptor.mw), + w: acceptor.w + _info.w, + unk: (acceptor.unk ? acceptor.unk : 0) + _info.unk, + type: acceptor.type}; + + if (acceptor.type == "PART") { + for(var j = p + 1; j <= i; j++) { + path[j].merge = p; + } + info.merge = p; + } + + return info; + }); + return infos.filter(function(info) { return info; }); + }, + + fallback: function(path, leftBoundary, text, i) { + var _info = path[leftBoundary]; + if (text[i].match(/[\u0E48-\u0E4E]/)) { + if (leftBoundary != 0) + leftBoundary = path[leftBoundary].p; + return {p: leftBoundary, + mw: 0, + w: 1 + _info.w, + unk: 1 + _info.unk, + type: "UNK"}; +/* } else if(leftBoundary > 0 && path[leftBoundary].type !== "UNK") { + leftBoundary = path[leftBoundary].p; + return {p: leftBoundary, + w: 1 + _info.w, + unk: 1 + _info.unk, + type: "UNK"}; */ + } else { + return {p: leftBoundary, + mw: _info.mw, + w: 1 + _info.w, + unk: 1 + _info.unk, + type: "UNK"}; + } + }, + + build: function(path, finalAcceptors, i, leftBoundary, text) { + var basicPathInfos = this.buildByAcceptors(path, finalAcceptors, i); + if (basicPathInfos.length > 0) { + return basicPathInfos; + } else { + return [this.fallback(path, leftBoundary, text, i)]; + } + } +}; + +module.exports = function() { + return _.clone(PathInfoBuilder); +} + +},{"./wordcut_core":8,"underscore":25}],5:[function(require,module,exports){ +var _ = require("underscore"); + + +var PathSelector = { + selectPath: function(paths) { + var path = paths.reduce(function(selectedPath, path) { + if (selectedPath == null) { + return path; + } else { + if (path.unk < selectedPath.unk) + return path; + if (path.unk == selectedPath.unk) { + if (path.mw < selectedPath.mw) + return path + if (path.mw == selectedPath.mw) { + if (path.w < selectedPath.w) + return path; + } + } + return selectedPath; + } + }, null); + return path; + }, + + createPath: function() { + return [{p:null, w:0, unk:0, type: "INIT", mw:0}]; + } +}; + +module.exports = function() { + return _.clone(PathSelector); +}; + +},{"underscore":25}],6:[function(require,module,exports){ +function isMatch(pat, offset, ch) { + if (pat.length <= offset) + return false; + var _ch = pat[offset]; + return _ch == ch || + (_ch.match(/[กข]/) && ch.match(/[ก-ฮ]/)) || + (_ch.match(/[มบ]/) && ch.match(/[ก-ฮ]/)) || + (_ch.match(/\u0E49/) && ch.match(/[\u0E48-\u0E4B]/)); +} + +var Rule0 = { + pat: "เหก็ม", + createAcceptor: function(tag) { + return {strOffset: 0, + isFinal: false, + transit: function(ch) { + if (isMatch(Rule0.pat, this.strOffset,ch)) { + this.isFinal = (this.strOffset + 1 == Rule0.pat.length); + this.strOffset++; + } else { + this.isError = true; + } + return this; + }, + isError: false, + tag: "THAI_RULE", + type: "THAI_RULE", + w: 1}; + } +}; + +var PartRule = { + createAcceptor: function(tag) { + return {strOffset: 0, + patterns: [ + "แก", "เก", "ก้", "กก์", "กา", "กี", "กิ", "กืก" + ], + isFinal: false, + transit: function(ch) { + var offset = this.strOffset; + this.patterns = this.patterns.filter(function(pat) { + return isMatch(pat, offset, ch); + }); + + if (this.patterns.length > 0) { + var len = 1 + offset; + this.isFinal = this.patterns.some(function(pat) { + return pat.length == len; + }); + this.strOffset++; + } else { + this.isError = true; + } + return this; + }, + isError: false, + tag: "PART", + type: "PART", + unk: 1, + w: 1}; + } +}; + +var ThaiRules = [Rule0, PartRule]; + +module.exports = ThaiRules; + +},{}],7:[function(require,module,exports){ +var sys = require("sys") + , WordcutDict = require("./dict") + , WordcutCore = require("./wordcut_core") + , PathInfoBuilder = require("./path_info_builder") + , PathSelector = require("./path_selector") + , Acceptors = require("./acceptors") + , latinRules = require("./latin_rules") + , thaiRules = require("./thai_rules") + , _ = require("underscore"); + + +var Wordcut = Object.create(WordcutCore); +Wordcut.defaultPathInfoBuilder = PathInfoBuilder; +Wordcut.defaultPathSelector = PathSelector; +Wordcut.defaultAcceptors = Acceptors; +Wordcut.defaultLatinRules = latinRules; +Wordcut.defaultThaiRules = thaiRules; +Wordcut.defaultDict = WordcutDict; + + +Wordcut.initNoDict = function(dict_path) { + var self = this; + self.pathInfoBuilder = new self.defaultPathInfoBuilder; + self.pathSelector = new self.defaultPathSelector; + self.acceptors = new self.defaultAcceptors; + self.defaultLatinRules.forEach(function(rule) { + self.acceptors.creators.push(rule); + }); + self.defaultThaiRules.forEach(function(rule) { + self.acceptors.creators.push(rule); + }); +}; + +Wordcut.init = function(dict_path, withDefault, additionalWords) { + withDefault = withDefault || false; + this.initNoDict(); + var dict = _.clone(this.defaultDict); + dict.init(dict_path, withDefault, additionalWords); + this.acceptors.creators.push(dict); +}; + +module.exports = Wordcut; + +},{"./acceptors":1,"./dict":2,"./latin_rules":3,"./path_info_builder":4,"./path_selector":5,"./thai_rules":6,"./wordcut_core":8,"sys":28,"underscore":25}],8:[function(require,module,exports){ +var WordcutCore = { + + buildPath: function(text) { + var self = this + , path = self.pathSelector.createPath() + , leftBoundary = 0; + self.acceptors.reset(); + for (var i = 0; i < text.length; i++) { + var ch = text[i]; + self.acceptors.transit(ch); + + var possiblePathInfos = self + .pathInfoBuilder + .build(path, + self.acceptors.getFinalAcceptors(), + i, + leftBoundary, + text); + var selectedPath = self.pathSelector.selectPath(possiblePathInfos) + + path.push(selectedPath); + if (selectedPath.type !== "UNK") { + leftBoundary = i; + } + } + return path; + }, + + pathToRanges: function(path) { + var e = path.length - 1 + , ranges = []; + + while (e > 0) { + var info = path[e] + , s = info.p; + + if (info.merge !== undefined && ranges.length > 0) { + var r = ranges[ranges.length - 1]; + r.s = info.merge; + s = r.s; + } else { + ranges.push({s:s, e:e}); + } + e = s; + } + return ranges.reverse(); + }, + + rangesToText: function(text, ranges, delimiter) { + return ranges.map(function(r) { + return text.substring(r.s, r.e); + }).join(delimiter); + }, + + cut: function(text, delimiter) { + var path = this.buildPath(text) + , ranges = this.pathToRanges(path); + return this + .rangesToText(text, ranges, + (delimiter === undefined ? "|" : delimiter)); + }, + + cutIntoRanges: function(text, noText) { + var path = this.buildPath(text) + , ranges = this.pathToRanges(path); + + if (!noText) { + ranges.forEach(function(r) { + r.text = text.substring(r.s, r.e); + }); + } + return ranges; + }, + + cutIntoArray: function(text) { + var path = this.buildPath(text) + , ranges = this.pathToRanges(path); + + return ranges.map(function(r) { + return text.substring(r.s, r.e) + }); + } +}; + +module.exports = WordcutCore; + +},{}],9:[function(require,module,exports){ +// http://wiki.commonjs.org/wiki/Unit_Testing/1.0 +// +// THIS IS NOT TESTED NOR LIKELY TO WORK OUTSIDE V8! +// +// Originally from narwhal.js (http://narwhaljs.org) +// Copyright (c) 2009 Thomas Robinson <280north.com> +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the 'Software'), to +// deal in the Software without restriction, including without limitation the +// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +// sell copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +// ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +// when used in node, this will actually load the util module we depend on +// versus loading the builtin util module as happens otherwise +// this is a bug in node module loading as far as I am concerned +var util = require('util/'); + +var pSlice = Array.prototype.slice; +var hasOwn = Object.prototype.hasOwnProperty; + +// 1. The assert module provides functions that throw +// AssertionError's when particular conditions are not met. The +// assert module must conform to the following interface. + +var assert = module.exports = ok; + +// 2. The AssertionError is defined in assert. +// new assert.AssertionError({ message: message, +// actual: actual, +// expected: expected }) + +assert.AssertionError = function AssertionError(options) { + this.name = 'AssertionError'; + this.actual = options.actual; + this.expected = options.expected; + this.operator = options.operator; + if (options.message) { + this.message = options.message; + this.generatedMessage = false; + } else { + this.message = getMessage(this); + this.generatedMessage = true; + } + var stackStartFunction = options.stackStartFunction || fail; + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, stackStartFunction); + } + else { + // non v8 browsers so we can have a stacktrace + var err = new Error(); + if (err.stack) { + var out = err.stack; + + // try to strip useless frames + var fn_name = stackStartFunction.name; + var idx = out.indexOf('\n' + fn_name); + if (idx >= 0) { + // once we have located the function frame + // we need to strip out everything before it (and its line) + var next_line = out.indexOf('\n', idx + 1); + out = out.substring(next_line + 1); + } + + this.stack = out; + } + } +}; + +// assert.AssertionError instanceof Error +util.inherits(assert.AssertionError, Error); + +function replacer(key, value) { + if (util.isUndefined(value)) { + return '' + value; + } + if (util.isNumber(value) && !isFinite(value)) { + return value.toString(); + } + if (util.isFunction(value) || util.isRegExp(value)) { + return value.toString(); + } + return value; +} + +function truncate(s, n) { + if (util.isString(s)) { + return s.length < n ? s : s.slice(0, n); + } else { + return s; + } +} + +function getMessage(self) { + return truncate(JSON.stringify(self.actual, replacer), 128) + ' ' + + self.operator + ' ' + + truncate(JSON.stringify(self.expected, replacer), 128); +} + +// At present only the three keys mentioned above are used and +// understood by the spec. Implementations or sub modules can pass +// other keys to the AssertionError's constructor - they will be +// ignored. + +// 3. All of the following functions must throw an AssertionError +// when a corresponding condition is not met, with a message that +// may be undefined if not provided. All assertion methods provide +// both the actual and expected values to the assertion error for +// display purposes. + +function fail(actual, expected, message, operator, stackStartFunction) { + throw new assert.AssertionError({ + message: message, + actual: actual, + expected: expected, + operator: operator, + stackStartFunction: stackStartFunction + }); +} + +// EXTENSION! allows for well behaved errors defined elsewhere. +assert.fail = fail; + +// 4. Pure assertion tests whether a value is truthy, as determined +// by !!guard. +// assert.ok(guard, message_opt); +// This statement is equivalent to assert.equal(true, !!guard, +// message_opt);. To test strictly for the value true, use +// assert.strictEqual(true, guard, message_opt);. + +function ok(value, message) { + if (!value) fail(value, true, message, '==', assert.ok); +} +assert.ok = ok; + +// 5. The equality assertion tests shallow, coercive equality with +// ==. +// assert.equal(actual, expected, message_opt); + +assert.equal = function equal(actual, expected, message) { + if (actual != expected) fail(actual, expected, message, '==', assert.equal); +}; + +// 6. The non-equality assertion tests for whether two objects are not equal +// with != assert.notEqual(actual, expected, message_opt); + +assert.notEqual = function notEqual(actual, expected, message) { + if (actual == expected) { + fail(actual, expected, message, '!=', assert.notEqual); + } +}; + +// 7. The equivalence assertion tests a deep equality relation. +// assert.deepEqual(actual, expected, message_opt); + +assert.deepEqual = function deepEqual(actual, expected, message) { + if (!_deepEqual(actual, expected)) { + fail(actual, expected, message, 'deepEqual', assert.deepEqual); + } +}; + +function _deepEqual(actual, expected) { + // 7.1. All identical values are equivalent, as determined by ===. + if (actual === expected) { + return true; + + } else if (util.isBuffer(actual) && util.isBuffer(expected)) { + if (actual.length != expected.length) return false; + + for (var i = 0; i < actual.length; i++) { + if (actual[i] !== expected[i]) return false; + } + + return true; + + // 7.2. If the expected value is a Date object, the actual value is + // equivalent if it is also a Date object that refers to the same time. + } else if (util.isDate(actual) && util.isDate(expected)) { + return actual.getTime() === expected.getTime(); + + // 7.3 If the expected value is a RegExp object, the actual value is + // equivalent if it is also a RegExp object with the same source and + // properties (`global`, `multiline`, `lastIndex`, `ignoreCase`). + } else if (util.isRegExp(actual) && util.isRegExp(expected)) { + return actual.source === expected.source && + actual.global === expected.global && + actual.multiline === expected.multiline && + actual.lastIndex === expected.lastIndex && + actual.ignoreCase === expected.ignoreCase; + + // 7.4. Other pairs that do not both pass typeof value == 'object', + // equivalence is determined by ==. + } else if (!util.isObject(actual) && !util.isObject(expected)) { + return actual == expected; + + // 7.5 For all other Object pairs, including Array objects, equivalence is + // determined by having the same number of owned properties (as verified + // with Object.prototype.hasOwnProperty.call), the same set of keys + // (although not necessarily the same order), equivalent values for every + // corresponding key, and an identical 'prototype' property. Note: this + // accounts for both named and indexed properties on Arrays. + } else { + return objEquiv(actual, expected); + } +} + +function isArguments(object) { + return Object.prototype.toString.call(object) == '[object Arguments]'; +} + +function objEquiv(a, b) { + if (util.isNullOrUndefined(a) || util.isNullOrUndefined(b)) + return false; + // an identical 'prototype' property. + if (a.prototype !== b.prototype) return false; + // if one is a primitive, the other must be same + if (util.isPrimitive(a) || util.isPrimitive(b)) { + return a === b; + } + var aIsArgs = isArguments(a), + bIsArgs = isArguments(b); + if ((aIsArgs && !bIsArgs) || (!aIsArgs && bIsArgs)) + return false; + if (aIsArgs) { + a = pSlice.call(a); + b = pSlice.call(b); + return _deepEqual(a, b); + } + var ka = objectKeys(a), + kb = objectKeys(b), + key, i; + // having the same number of owned properties (keys incorporates + // hasOwnProperty) + if (ka.length != kb.length) + return false; + //the same set of keys (although not necessarily the same order), + ka.sort(); + kb.sort(); + //~~~cheap key test + for (i = ka.length - 1; i >= 0; i--) { + if (ka[i] != kb[i]) + return false; + } + //equivalent values for every corresponding key, and + //~~~possibly expensive deep test + for (i = ka.length - 1; i >= 0; i--) { + key = ka[i]; + if (!_deepEqual(a[key], b[key])) return false; + } + return true; +} + +// 8. The non-equivalence assertion tests for any deep inequality. +// assert.notDeepEqual(actual, expected, message_opt); + +assert.notDeepEqual = function notDeepEqual(actual, expected, message) { + if (_deepEqual(actual, expected)) { + fail(actual, expected, message, 'notDeepEqual', assert.notDeepEqual); + } +}; + +// 9. The strict equality assertion tests strict equality, as determined by ===. +// assert.strictEqual(actual, expected, message_opt); + +assert.strictEqual = function strictEqual(actual, expected, message) { + if (actual !== expected) { + fail(actual, expected, message, '===', assert.strictEqual); + } +}; + +// 10. The strict non-equality assertion tests for strict inequality, as +// determined by !==. assert.notStrictEqual(actual, expected, message_opt); + +assert.notStrictEqual = function notStrictEqual(actual, expected, message) { + if (actual === expected) { + fail(actual, expected, message, '!==', assert.notStrictEqual); + } +}; + +function expectedException(actual, expected) { + if (!actual || !expected) { + return false; + } + + if (Object.prototype.toString.call(expected) == '[object RegExp]') { + return expected.test(actual); + } else if (actual instanceof expected) { + return true; + } else if (expected.call({}, actual) === true) { + return true; + } + + return false; +} + +function _throws(shouldThrow, block, expected, message) { + var actual; + + if (util.isString(expected)) { + message = expected; + expected = null; + } + + try { + block(); + } catch (e) { + actual = e; + } + + message = (expected && expected.name ? ' (' + expected.name + ').' : '.') + + (message ? ' ' + message : '.'); + + if (shouldThrow && !actual) { + fail(actual, expected, 'Missing expected exception' + message); + } + + if (!shouldThrow && expectedException(actual, expected)) { + fail(actual, expected, 'Got unwanted exception' + message); + } + + if ((shouldThrow && actual && expected && + !expectedException(actual, expected)) || (!shouldThrow && actual)) { + throw actual; + } +} + +// 11. Expected to throw an error: +// assert.throws(block, Error_opt, message_opt); + +assert.throws = function(block, /*optional*/error, /*optional*/message) { + _throws.apply(this, [true].concat(pSlice.call(arguments))); +}; + +// EXTENSION! This is annoying to write outside this module. +assert.doesNotThrow = function(block, /*optional*/message) { + _throws.apply(this, [false].concat(pSlice.call(arguments))); +}; + +assert.ifError = function(err) { if (err) {throw err;}}; + +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + if (hasOwn.call(obj, key)) keys.push(key); + } + return keys; +}; + +},{"util/":28}],10:[function(require,module,exports){ +'use strict'; +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + + var r = range(a, b, str); + + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} + +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} + +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + + if (ai >= 0 && bi > 0) { + begs = []; + left = str.length; + + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + + bi = str.indexOf(b, i + 1); + } + + i = ai < bi && ai >= 0 ? ai : bi; + } + + if (begs.length) { + result = [ left, right ]; + } + } + + return result; +} + +},{}],11:[function(require,module,exports){ +var concatMap = require('concat-map'); +var balanced = require('balanced-match'); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function identity(e) { + return e; +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; + + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + + return expansions; +} + + +},{"balanced-match":10,"concat-map":13}],12:[function(require,module,exports){ + +},{}],13:[function(require,module,exports){ +module.exports = function (xs, fn) { + var res = []; + for (var i = 0; i < xs.length; i++) { + var x = fn(xs[i], i); + if (isArray(x)) res.push.apply(res, x); + else res.push(x); + } + return res; +}; + +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; + +},{}],14:[function(require,module,exports){ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +function EventEmitter() { + this._events = this._events || {}; + this._maxListeners = this._maxListeners || undefined; +} +module.exports = EventEmitter; + +// Backwards-compat with node 0.10.x +EventEmitter.EventEmitter = EventEmitter; + +EventEmitter.prototype._events = undefined; +EventEmitter.prototype._maxListeners = undefined; + +// By default EventEmitters will print a warning if more than 10 listeners are +// added to it. This is a useful default which helps finding memory leaks. +EventEmitter.defaultMaxListeners = 10; + +// Obviously not all Emitters should be limited to 10. This function allows +// that to be increased. Set to zero for unlimited. +EventEmitter.prototype.setMaxListeners = function(n) { + if (!isNumber(n) || n < 0 || isNaN(n)) + throw TypeError('n must be a positive number'); + this._maxListeners = n; + return this; +}; + +EventEmitter.prototype.emit = function(type) { + var er, handler, len, args, i, listeners; + + if (!this._events) + this._events = {}; + + // If there is no 'error' event listener then throw. + if (type === 'error') { + if (!this._events.error || + (isObject(this._events.error) && !this._events.error.length)) { + er = arguments[1]; + if (er instanceof Error) { + throw er; // Unhandled 'error' event + } + throw TypeError('Uncaught, unspecified "error" event.'); + } + } + + handler = this._events[type]; + + if (isUndefined(handler)) + return false; + + if (isFunction(handler)) { + switch (arguments.length) { + // fast cases + case 1: + handler.call(this); + break; + case 2: + handler.call(this, arguments[1]); + break; + case 3: + handler.call(this, arguments[1], arguments[2]); + break; + // slower + default: + len = arguments.length; + args = new Array(len - 1); + for (i = 1; i < len; i++) + args[i - 1] = arguments[i]; + handler.apply(this, args); + } + } else if (isObject(handler)) { + len = arguments.length; + args = new Array(len - 1); + for (i = 1; i < len; i++) + args[i - 1] = arguments[i]; + + listeners = handler.slice(); + len = listeners.length; + for (i = 0; i < len; i++) + listeners[i].apply(this, args); + } + + return true; +}; + +EventEmitter.prototype.addListener = function(type, listener) { + var m; + + if (!isFunction(listener)) + throw TypeError('listener must be a function'); + + if (!this._events) + this._events = {}; + + // To avoid recursion in the case that type === "newListener"! Before + // adding it to the listeners, first emit "newListener". + if (this._events.newListener) + this.emit('newListener', type, + isFunction(listener.listener) ? + listener.listener : listener); + + if (!this._events[type]) + // Optimize the case of one listener. Don't need the extra array object. + this._events[type] = listener; + else if (isObject(this._events[type])) + // If we've already got an array, just append. + this._events[type].push(listener); + else + // Adding the second element, need to change to array. + this._events[type] = [this._events[type], listener]; + + // Check for listener leak + if (isObject(this._events[type]) && !this._events[type].warned) { + var m; + if (!isUndefined(this._maxListeners)) { + m = this._maxListeners; + } else { + m = EventEmitter.defaultMaxListeners; + } + + if (m && m > 0 && this._events[type].length > m) { + this._events[type].warned = true; + console.error('(node) warning: possible EventEmitter memory ' + + 'leak detected. %d listeners added. ' + + 'Use emitter.setMaxListeners() to increase limit.', + this._events[type].length); + if (typeof console.trace === 'function') { + // not supported in IE 10 + console.trace(); + } + } + } + + return this; +}; + +EventEmitter.prototype.on = EventEmitter.prototype.addListener; + +EventEmitter.prototype.once = function(type, listener) { + if (!isFunction(listener)) + throw TypeError('listener must be a function'); + + var fired = false; + + function g() { + this.removeListener(type, g); + + if (!fired) { + fired = true; + listener.apply(this, arguments); + } + } + + g.listener = listener; + this.on(type, g); + + return this; +}; + +// emits a 'removeListener' event iff the listener was removed +EventEmitter.prototype.removeListener = function(type, listener) { + var list, position, length, i; + + if (!isFunction(listener)) + throw TypeError('listener must be a function'); + + if (!this._events || !this._events[type]) + return this; + + list = this._events[type]; + length = list.length; + position = -1; + + if (list === listener || + (isFunction(list.listener) && list.listener === listener)) { + delete this._events[type]; + if (this._events.removeListener) + this.emit('removeListener', type, listener); + + } else if (isObject(list)) { + for (i = length; i-- > 0;) { + if (list[i] === listener || + (list[i].listener && list[i].listener === listener)) { + position = i; + break; + } + } + + if (position < 0) + return this; + + if (list.length === 1) { + list.length = 0; + delete this._events[type]; + } else { + list.splice(position, 1); + } + + if (this._events.removeListener) + this.emit('removeListener', type, listener); + } + + return this; +}; + +EventEmitter.prototype.removeAllListeners = function(type) { + var key, listeners; + + if (!this._events) + return this; + + // not listening for removeListener, no need to emit + if (!this._events.removeListener) { + if (arguments.length === 0) + this._events = {}; + else if (this._events[type]) + delete this._events[type]; + return this; + } + + // emit removeListener for all listeners on all events + if (arguments.length === 0) { + for (key in this._events) { + if (key === 'removeListener') continue; + this.removeAllListeners(key); + } + this.removeAllListeners('removeListener'); + this._events = {}; + return this; + } + + listeners = this._events[type]; + + if (isFunction(listeners)) { + this.removeListener(type, listeners); + } else { + // LIFO order + while (listeners.length) + this.removeListener(type, listeners[listeners.length - 1]); + } + delete this._events[type]; + + return this; +}; + +EventEmitter.prototype.listeners = function(type) { + var ret; + if (!this._events || !this._events[type]) + ret = []; + else if (isFunction(this._events[type])) + ret = [this._events[type]]; + else + ret = this._events[type].slice(); + return ret; +}; + +EventEmitter.listenerCount = function(emitter, type) { + var ret; + if (!emitter._events || !emitter._events[type]) + ret = 0; + else if (isFunction(emitter._events[type])) + ret = 1; + else + ret = emitter._events[type].length; + return ret; +}; + +function isFunction(arg) { + return typeof arg === 'function'; +} + +function isNumber(arg) { + return typeof arg === 'number'; +} + +function isObject(arg) { + return typeof arg === 'object' && arg !== null; +} + +function isUndefined(arg) { + return arg === void 0; +} + +},{}],15:[function(require,module,exports){ +(function (process){ +exports.alphasort = alphasort +exports.alphasorti = alphasorti +exports.setopts = setopts +exports.ownProp = ownProp +exports.makeAbs = makeAbs +exports.finish = finish +exports.mark = mark +exports.isIgnored = isIgnored +exports.childrenIgnored = childrenIgnored + +function ownProp (obj, field) { + return Object.prototype.hasOwnProperty.call(obj, field) +} + +var path = require("path") +var minimatch = require("minimatch") +var isAbsolute = require("path-is-absolute") +var Minimatch = minimatch.Minimatch + +function alphasorti (a, b) { + return a.toLowerCase().localeCompare(b.toLowerCase()) +} + +function alphasort (a, b) { + return a.localeCompare(b) +} + +function setupIgnores (self, options) { + self.ignore = options.ignore || [] + + if (!Array.isArray(self.ignore)) + self.ignore = [self.ignore] + + if (self.ignore.length) { + self.ignore = self.ignore.map(ignoreMap) + } +} + +function ignoreMap (pattern) { + var gmatcher = null + if (pattern.slice(-3) === '/**') { + var gpattern = pattern.replace(/(\/\*\*)+$/, '') + gmatcher = new Minimatch(gpattern) + } + + return { + matcher: new Minimatch(pattern), + gmatcher: gmatcher + } +} + +function setopts (self, pattern, options) { + if (!options) + options = {} + + // base-matching: just use globstar for that. + if (options.matchBase && -1 === pattern.indexOf("/")) { + if (options.noglobstar) { + throw new Error("base matching requires globstar") + } + pattern = "**/" + pattern + } + + self.silent = !!options.silent + self.pattern = pattern + self.strict = options.strict !== false + self.realpath = !!options.realpath + self.realpathCache = options.realpathCache || Object.create(null) + self.follow = !!options.follow + self.dot = !!options.dot + self.mark = !!options.mark + self.nodir = !!options.nodir + if (self.nodir) + self.mark = true + self.sync = !!options.sync + self.nounique = !!options.nounique + self.nonull = !!options.nonull + self.nosort = !!options.nosort + self.nocase = !!options.nocase + self.stat = !!options.stat + self.noprocess = !!options.noprocess + + self.maxLength = options.maxLength || Infinity + self.cache = options.cache || Object.create(null) + self.statCache = options.statCache || Object.create(null) + self.symlinks = options.symlinks || Object.create(null) + + setupIgnores(self, options) + + self.changedCwd = false + var cwd = process.cwd() + if (!ownProp(options, "cwd")) + self.cwd = cwd + else { + self.cwd = options.cwd + self.changedCwd = path.resolve(options.cwd) !== cwd + } + + self.root = options.root || path.resolve(self.cwd, "/") + self.root = path.resolve(self.root) + if (process.platform === "win32") + self.root = self.root.replace(/\\/g, "/") + + self.nomount = !!options.nomount + + // disable comments and negation unless the user explicitly + // passes in false as the option. + options.nonegate = options.nonegate === false ? false : true + options.nocomment = options.nocomment === false ? false : true + deprecationWarning(options) + + self.minimatch = new Minimatch(pattern, options) + self.options = self.minimatch.options +} + +// TODO(isaacs): remove entirely in v6 +// exported to reset in tests +exports.deprecationWarned +function deprecationWarning(options) { + if (!options.nonegate || !options.nocomment) { + if (process.noDeprecation !== true && !exports.deprecationWarned) { + var msg = 'glob WARNING: comments and negation will be disabled in v6' + if (process.throwDeprecation) + throw new Error(msg) + else if (process.traceDeprecation) + console.trace(msg) + else + console.error(msg) + + exports.deprecationWarned = true + } + } +} + +function finish (self) { + var nou = self.nounique + var all = nou ? [] : Object.create(null) + + for (var i = 0, l = self.matches.length; i < l; i ++) { + var matches = self.matches[i] + if (!matches || Object.keys(matches).length === 0) { + if (self.nonull) { + // do like the shell, and spit out the literal glob + var literal = self.minimatch.globSet[i] + if (nou) + all.push(literal) + else + all[literal] = true + } + } else { + // had matches + var m = Object.keys(matches) + if (nou) + all.push.apply(all, m) + else + m.forEach(function (m) { + all[m] = true + }) + } + } + + if (!nou) + all = Object.keys(all) + + if (!self.nosort) + all = all.sort(self.nocase ? alphasorti : alphasort) + + // at *some* point we statted all of these + if (self.mark) { + for (var i = 0; i < all.length; i++) { + all[i] = self._mark(all[i]) + } + if (self.nodir) { + all = all.filter(function (e) { + return !(/\/$/.test(e)) + }) + } + } + + if (self.ignore.length) + all = all.filter(function(m) { + return !isIgnored(self, m) + }) + + self.found = all +} + +function mark (self, p) { + var abs = makeAbs(self, p) + var c = self.cache[abs] + var m = p + if (c) { + var isDir = c === 'DIR' || Array.isArray(c) + var slash = p.slice(-1) === '/' + + if (isDir && !slash) + m += '/' + else if (!isDir && slash) + m = m.slice(0, -1) + + if (m !== p) { + var mabs = makeAbs(self, m) + self.statCache[mabs] = self.statCache[abs] + self.cache[mabs] = self.cache[abs] + } + } + + return m +} + +// lotta situps... +function makeAbs (self, f) { + var abs = f + if (f.charAt(0) === '/') { + abs = path.join(self.root, f) + } else if (isAbsolute(f) || f === '') { + abs = f + } else if (self.changedCwd) { + abs = path.resolve(self.cwd, f) + } else { + abs = path.resolve(f) + } + return abs +} + + +// Return true, if pattern ends with globstar '**', for the accompanying parent directory. +// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents +function isIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) + }) +} + +function childrenIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return !!(item.gmatcher && item.gmatcher.match(path)) + }) +} + +}).call(this,require('_process')) +},{"_process":24,"minimatch":20,"path":22,"path-is-absolute":23}],16:[function(require,module,exports){ +(function (process){ +// Approach: +// +// 1. Get the minimatch set +// 2. For each pattern in the set, PROCESS(pattern, false) +// 3. Store matches per-set, then uniq them +// +// PROCESS(pattern, inGlobStar) +// Get the first [n] items from pattern that are all strings +// Join these together. This is PREFIX. +// If there is no more remaining, then stat(PREFIX) and +// add to matches if it succeeds. END. +// +// If inGlobStar and PREFIX is symlink and points to dir +// set ENTRIES = [] +// else readdir(PREFIX) as ENTRIES +// If fail, END +// +// with ENTRIES +// If pattern[n] is GLOBSTAR +// // handle the case where the globstar match is empty +// // by pruning it out, and testing the resulting pattern +// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) +// // handle other cases. +// for ENTRY in ENTRIES (not dotfiles) +// // attach globstar + tail onto the entry +// // Mark that this entry is a globstar match +// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) +// +// else // not globstar +// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) +// Test ENTRY against pattern[n] +// If fails, continue +// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) +// +// Caveat: +// Cache all stats and readdirs results to minimize syscall. Since all +// we ever care about is existence and directory-ness, we can just keep +// `true` for files, and [children,...] for directories, or `false` for +// things that don't exist. + +module.exports = glob + +var fs = require('fs') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var inherits = require('inherits') +var EE = require('events').EventEmitter +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path-is-absolute') +var globSync = require('./sync.js') +var common = require('./common.js') +var alphasort = common.alphasort +var alphasorti = common.alphasorti +var setopts = common.setopts +var ownProp = common.ownProp +var inflight = require('inflight') +var util = require('util') +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +var once = require('once') + +function glob (pattern, options, cb) { + if (typeof options === 'function') cb = options, options = {} + if (!options) options = {} + + if (options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return globSync(pattern, options) + } + + return new Glob(pattern, options, cb) +} + +glob.sync = globSync +var GlobSync = glob.GlobSync = globSync.GlobSync + +// old api surface +glob.glob = glob + +glob.hasMagic = function (pattern, options_) { + var options = util._extend({}, options_) + options.noprocess = true + + var g = new Glob(pattern, options) + var set = g.minimatch.set + if (set.length > 1) + return true + + for (var j = 0; j < set[0].length; j++) { + if (typeof set[0][j] !== 'string') + return true + } + + return false +} + +glob.Glob = Glob +inherits(Glob, EE) +function Glob (pattern, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + + if (options && options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return new GlobSync(pattern, options) + } + + if (!(this instanceof Glob)) + return new Glob(pattern, options, cb) + + setopts(this, pattern, options) + this._didRealPath = false + + // process each pattern in the minimatch set + var n = this.minimatch.set.length + + // The matches are stored as {: true,...} so that + // duplicates are automagically pruned. + // Later, we do an Object.keys() on these. + // Keep them as a list so we can fill in when nonull is set. + this.matches = new Array(n) + + if (typeof cb === 'function') { + cb = once(cb) + this.on('error', cb) + this.on('end', function (matches) { + cb(null, matches) + }) + } + + var self = this + var n = this.minimatch.set.length + this._processing = 0 + this.matches = new Array(n) + + this._emitQueue = [] + this._processQueue = [] + this.paused = false + + if (this.noprocess) + return this + + if (n === 0) + return done() + + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false, done) + } + + function done () { + --self._processing + if (self._processing <= 0) + self._finish() + } +} + +Glob.prototype._finish = function () { + assert(this instanceof Glob) + if (this.aborted) + return + + if (this.realpath && !this._didRealpath) + return this._realpath() + + common.finish(this) + this.emit('end', this.found) +} + +Glob.prototype._realpath = function () { + if (this._didRealpath) + return + + this._didRealpath = true + + var n = this.matches.length + if (n === 0) + return this._finish() + + var self = this + for (var i = 0; i < this.matches.length; i++) + this._realpathSet(i, next) + + function next () { + if (--n === 0) + self._finish() + } +} + +Glob.prototype._realpathSet = function (index, cb) { + var matchset = this.matches[index] + if (!matchset) + return cb() + + var found = Object.keys(matchset) + var self = this + var n = found.length + + if (n === 0) + return cb() + + var set = this.matches[index] = Object.create(null) + found.forEach(function (p, i) { + // If there's a problem with the stat, then it means that + // one or more of the links in the realpath couldn't be + // resolved. just return the abs value in that case. + p = self._makeAbs(p) + fs.realpath(p, self.realpathCache, function (er, real) { + if (!er) + set[real] = true + else if (er.syscall === 'stat') + set[p] = true + else + self.emit('error', er) // srsly wtf right here + + if (--n === 0) { + self.matches[index] = set + cb() + } + }) + }) +} + +Glob.prototype._mark = function (p) { + return common.mark(this, p) +} + +Glob.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} + +Glob.prototype.abort = function () { + this.aborted = true + this.emit('abort') +} + +Glob.prototype.pause = function () { + if (!this.paused) { + this.paused = true + this.emit('pause') + } +} + +Glob.prototype.resume = function () { + if (this.paused) { + this.emit('resume') + this.paused = false + if (this._emitQueue.length) { + var eq = this._emitQueue.slice(0) + this._emitQueue.length = 0 + for (var i = 0; i < eq.length; i ++) { + var e = eq[i] + this._emitMatch(e[0], e[1]) + } + } + if (this._processQueue.length) { + var pq = this._processQueue.slice(0) + this._processQueue.length = 0 + for (var i = 0; i < pq.length; i ++) { + var p = pq[i] + this._processing-- + this._process(p[0], p[1], p[2], p[3]) + } + } + } +} + +Glob.prototype._process = function (pattern, index, inGlobStar, cb) { + assert(this instanceof Glob) + assert(typeof cb === 'function') + + if (this.aborted) + return + + this._processing++ + if (this.paused) { + this._processQueue.push([pattern, index, inGlobStar, cb]) + return + } + + //console.error('PROCESS %d', this._processing, pattern) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // see if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index, cb) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip _processing + if (childrenIgnored(this, read)) + return cb() + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) +} + +Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + +Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return cb() + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return cb() + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return cb() + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + this._process([e].concat(remain), index, inGlobStar, cb) + } + cb() +} + +Glob.prototype._emitMatch = function (index, e) { + if (this.aborted) + return + + if (this.matches[index][e]) + return + + if (isIgnored(this, e)) + return + + if (this.paused) { + this._emitQueue.push([index, e]) + return + } + + var abs = this._makeAbs(e) + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + if (this.mark) + e = this._mark(e) + + this.matches[index][e] = true + + var st = this.statCache[abs] + if (st) + this.emit('stat', e, st) + + this.emit('match', e) +} + +Glob.prototype._readdirInGlobStar = function (abs, cb) { + if (this.aborted) + return + + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false, cb) + + var lstatkey = 'lstat\0' + abs + var self = this + var lstatcb = inflight(lstatkey, lstatcb_) + + if (lstatcb) + fs.lstat(abs, lstatcb) + + function lstatcb_ (er, lstat) { + if (er) + return cb() + + var isSym = lstat.isSymbolicLink() + self.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && !lstat.isDirectory()) { + self.cache[abs] = 'FILE' + cb() + } else + self._readdir(abs, false, cb) + } +} + +Glob.prototype._readdir = function (abs, inGlobStar, cb) { + if (this.aborted) + return + + cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) + if (!cb) + return + + //console.error('RD %j %j', +inGlobStar, abs) + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs, cb) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return cb() + + if (Array.isArray(c)) + return cb(null, c) + } + + var self = this + fs.readdir(abs, readdirCb(this, abs, cb)) +} + +function readdirCb (self, abs, cb) { + return function (er, entries) { + if (er) + self._readdirError(abs, er, cb) + else + self._readdirEntries(abs, entries, cb) + } +} + +Glob.prototype._readdirEntries = function (abs, entries, cb) { + if (this.aborted) + return + + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + return cb(null, entries) +} + +Glob.prototype._readdirError = function (f, er, cb) { + if (this.aborted) + return + + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + this.cache[this._makeAbs(f)] = 'FILE' + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) { + this.emit('error', er) + // If the error is handled, then we abort + // if not, we threw out of here + this.abort() + } + if (!this.silent) + console.error('glob error', er) + break + } + + return cb() +} + +Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + + +Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + //console.error('pgs2', prefix, remain[0], entries) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return cb() + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false, cb) + + var isSym = this.symlinks[abs] + var len = entries.length + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return cb() + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true, cb) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true, cb) + } + + cb() +} + +Glob.prototype._processSimple = function (prefix, index, cb) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var self = this + this._stat(prefix, function (er, exists) { + self._processSimple2(prefix, index, er, exists, cb) + }) +} +Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { + + //console.error('ps2', prefix, exists) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return cb() + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) + cb() +} + +// Returns either 'DIR', 'FILE', or false +Glob.prototype._stat = function (f, cb) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return cb() + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return cb(null, c) + + if (needDir && c === 'FILE') + return cb() + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (stat !== undefined) { + if (stat === false) + return cb(null, stat) + else { + var type = stat.isDirectory() ? 'DIR' : 'FILE' + if (needDir && type === 'FILE') + return cb() + else + return cb(null, type, stat) + } + } + + var self = this + var statcb = inflight('stat\0' + abs, lstatcb_) + if (statcb) + fs.lstat(abs, statcb) + + function lstatcb_ (er, lstat) { + if (lstat && lstat.isSymbolicLink()) { + // If it's a symlink, then treat it as the target, unless + // the target does not exist, then treat it as a file. + return fs.stat(abs, function (er, stat) { + if (er) + self._stat2(f, abs, null, lstat, cb) + else + self._stat2(f, abs, er, stat, cb) + }) + } else { + self._stat2(f, abs, er, lstat, cb) + } + } +} + +Glob.prototype._stat2 = function (f, abs, er, stat, cb) { + if (er) { + this.statCache[abs] = false + return cb() + } + + var needDir = f.slice(-1) === '/' + this.statCache[abs] = stat + + if (abs.slice(-1) === '/' && !stat.isDirectory()) + return cb(null, false, stat) + + var c = stat.isDirectory() ? 'DIR' : 'FILE' + this.cache[abs] = this.cache[abs] || c + + if (needDir && c !== 'DIR') + return cb() + + return cb(null, c, stat) +} + +}).call(this,require('_process')) +},{"./common.js":15,"./sync.js":17,"_process":24,"assert":9,"events":14,"fs":12,"inflight":18,"inherits":19,"minimatch":20,"once":21,"path":22,"path-is-absolute":23,"util":28}],17:[function(require,module,exports){ +(function (process){ +module.exports = globSync +globSync.GlobSync = GlobSync + +var fs = require('fs') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var Glob = require('./glob.js').Glob +var util = require('util') +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path-is-absolute') +var common = require('./common.js') +var alphasort = common.alphasort +var alphasorti = common.alphasorti +var setopts = common.setopts +var ownProp = common.ownProp +var childrenIgnored = common.childrenIgnored + +function globSync (pattern, options) { + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + return new GlobSync(pattern, options).found +} + +function GlobSync (pattern, options) { + if (!pattern) + throw new Error('must provide pattern') + + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + if (!(this instanceof GlobSync)) + return new GlobSync(pattern, options) + + setopts(this, pattern, options) + + if (this.noprocess) + return this + + var n = this.minimatch.set.length + this.matches = new Array(n) + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false) + } + this._finish() +} + +GlobSync.prototype._finish = function () { + assert(this instanceof GlobSync) + if (this.realpath) { + var self = this + this.matches.forEach(function (matchset, index) { + var set = self.matches[index] = Object.create(null) + for (var p in matchset) { + try { + p = self._makeAbs(p) + var real = fs.realpathSync(p, self.realpathCache) + set[real] = true + } catch (er) { + if (er.syscall === 'stat') + set[self._makeAbs(p)] = true + else + throw er + } + } + }) + } + common.finish(this) +} + + +GlobSync.prototype._process = function (pattern, index, inGlobStar) { + assert(this instanceof GlobSync) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // See if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip processing + if (childrenIgnored(this, read)) + return + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar) +} + + +GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { + var entries = this._readdir(abs, inGlobStar) + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix.slice(-1) !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this.matches[index][e] = true + } + // This was the last one, and no stats were needed + return + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) + newPattern = [prefix, e] + else + newPattern = [e] + this._process(newPattern.concat(remain), index, inGlobStar) + } +} + + +GlobSync.prototype._emitMatch = function (index, e) { + var abs = this._makeAbs(e) + if (this.mark) + e = this._mark(e) + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[this._makeAbs(e)] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + if (this.stat) + this._stat(e) +} + + +GlobSync.prototype._readdirInGlobStar = function (abs) { + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false) + + var entries + var lstat + var stat + try { + lstat = fs.lstatSync(abs) + } catch (er) { + // lstat failed, doesn't exist + return null + } + + var isSym = lstat.isSymbolicLink() + this.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && !lstat.isDirectory()) + this.cache[abs] = 'FILE' + else + entries = this._readdir(abs, false) + + return entries +} + +GlobSync.prototype._readdir = function (abs, inGlobStar) { + var entries + + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return null + + if (Array.isArray(c)) + return c + } + + try { + return this._readdirEntries(abs, fs.readdirSync(abs)) + } catch (er) { + this._readdirError(abs, er) + return null + } +} + +GlobSync.prototype._readdirEntries = function (abs, entries) { + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + + // mark and cache dir-ness + return entries +} + +GlobSync.prototype._readdirError = function (f, er) { + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + this.cache[this._makeAbs(f)] = 'FILE' + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) + throw er + if (!this.silent) + console.error('glob error', er) + break + } +} + +GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { + + var entries = this._readdir(abs, inGlobStar) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false) + + var len = entries.length + var isSym = this.symlinks[abs] + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true) + } +} + +GlobSync.prototype._processSimple = function (prefix, index) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var exists = this._stat(prefix) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this.matches[index][prefix] = true +} + +// Returns either 'DIR', 'FILE', or false +GlobSync.prototype._stat = function (f) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return false + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return c + + if (needDir && c === 'FILE') + return false + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (!stat) { + var lstat + try { + lstat = fs.lstatSync(abs) + } catch (er) { + return false + } + + if (lstat.isSymbolicLink()) { + try { + stat = fs.statSync(abs) + } catch (er) { + stat = lstat + } + } else { + stat = lstat + } + } + + this.statCache[abs] = stat + + var c = stat.isDirectory() ? 'DIR' : 'FILE' + this.cache[abs] = this.cache[abs] || c + + if (needDir && c !== 'DIR') + return false + + return c +} + +GlobSync.prototype._mark = function (p) { + return common.mark(this, p) +} + +GlobSync.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} + +}).call(this,require('_process')) +},{"./common.js":15,"./glob.js":16,"_process":24,"assert":9,"fs":12,"minimatch":20,"path":22,"path-is-absolute":23,"util":28}],18:[function(require,module,exports){ +(function (process){ +var wrappy = require('wrappy') +var reqs = Object.create(null) +var once = require('once') + +module.exports = wrappy(inflight) + +function inflight (key, cb) { + if (reqs[key]) { + reqs[key].push(cb) + return null + } else { + reqs[key] = [cb] + return makeres(key) + } +} + +function makeres (key) { + return once(function RES () { + var cbs = reqs[key] + var len = cbs.length + var args = slice(arguments) + + // XXX It's somewhat ambiguous whether a new callback added in this + // pass should be queued for later execution if something in the + // list of callbacks throws, or if it should just be discarded. + // However, it's such an edge case that it hardly matters, and either + // choice is likely as surprising as the other. + // As it happens, we do go ahead and schedule it for later execution. + try { + for (var i = 0; i < len; i++) { + cbs[i].apply(null, args) + } + } finally { + if (cbs.length > len) { + // added more in the interim. + // de-zalgo, just in case, but don't call again. + cbs.splice(0, len) + process.nextTick(function () { + RES.apply(null, args) + }) + } else { + delete reqs[key] + } + } + }) +} + +function slice (args) { + var length = args.length + var array = [] + + for (var i = 0; i < length; i++) array[i] = args[i] + return array +} + +}).call(this,require('_process')) +},{"_process":24,"once":21,"wrappy":29}],19:[function(require,module,exports){ +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }); + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } +} + +},{}],20:[function(require,module,exports){ +module.exports = minimatch +minimatch.Minimatch = Minimatch + +var path = { sep: '/' } +try { + path = require('path') +} catch (er) {} + +var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +var expand = require('brace-expansion') + +var plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } +} + +// any single thing other than / +// don't need to escape / when using new RegExp() +var qmark = '[^/]' + +// * => any number of characters +var star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// characters that need to be escaped in RegExp. +var reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) +} + +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) + } +} + +function ext (a, b) { + a = a || {} + b = b || {} + var t = {} + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + return t +} + +minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return minimatch + + var orig = minimatch + + var m = function minimatch (p, pattern, options) { + return orig.minimatch(p, pattern, ext(def, options)) + } + + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + + return m +} + +Minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return Minimatch + return minimatch.defaults(def).Minimatch +} + +function minimatch (p, pattern, options) { + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } + + if (!options) options = {} + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + // "" only matches "" + if (pattern.trim() === '') return p === '' + + return new Minimatch(pattern, options).match(p) +} + +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } + + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } + + if (!options) options = {} + pattern = pattern.trim() + + // windows support: need to use /, not \ + if (path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') + } + + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + + // make the set of regexps etc. + this.make() +} + +Minimatch.prototype.debug = function () {} + +Minimatch.prototype.make = make +function make () { + // don't do it more than once. + if (this._made) return + + var pattern = this.pattern + var options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + var set = this.globSet = this.braceExpand() + + if (options.debug) this.debug = console.error + + this.debug(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) + + this.debug(this.pattern, set) + + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) + + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) + + this.debug(this.pattern, set) + + this.set = set +} + +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 + + if (options.nonegate) return + + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ + } + + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return braceExpand(pattern, options) +} + +Minimatch.prototype.braceExpand = braceExpand + +function braceExpand (pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options + } else { + options = {} + } + } + + pattern = typeof pattern === 'undefined' + ? this.pattern : pattern + + if (typeof pattern === 'undefined') { + throw new TypeError('undefined pattern') + } + + if (options.nobrace || + !pattern.match(/\{.*\}/)) { + // shortcut. no need to expand. + return [pattern] + } + + return expand(pattern) +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + if (pattern.length > 1024 * 64) { + throw new TypeError('pattern is too long') + } + + var options = this.options + + // shortcuts + if (!options.noglobstar && pattern === '**') return GLOBSTAR + if (pattern === '') return '' + + var re = '' + var hasMagic = !!options.nocase + var escaping = false + // ? => one single character + var patternListStack = [] + var negativeLists = [] + var stateChar + var inClass = false + var reClassStart = -1 + var classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + var patternStart = pattern.charAt(0) === '.' ? '' // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' + : '(?!\\.)' + var self = this + + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + self.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } + } + + for (var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) + + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += '\\' + c + escaping = false + continue + } + + switch (c) { + case '/': + // completely not allowed, even escaped. + // Should already be path-split by now. + return false + + case '\\': + clearStateChar() + escaping = true + continue + + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } + + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + self.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue + + case '(': + if (inClass) { + re += '(' + continue + } + + if (!stateChar) { + re += '\\(' + continue + } + + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }) + // negation is (?:(?!js)[^/]*) + re += stateChar === '!' ? '(?:(?!(?:' : '(?:' + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue + + case ')': + if (inClass || !patternListStack.length) { + re += '\\)' + continue + } + + clearStateChar() + hasMagic = true + var pl = patternListStack.pop() + // negation is (?:(?!js)[^/]*) + // The others are (?:) + re += pl.close + if (pl.type === '!') { + negativeLists.push(pl) + } + pl.reEnd = re.length + continue + + case '|': + if (inClass || !patternListStack.length || escaping) { + re += '\\|' + escaping = false + continue + } + + clearStateChar() + re += '|' + continue + + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += '\\' + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + escaping = false + continue + } + + // handle the case where we left a class open. + // "[z-a]" is valid, equivalent to "\[z-a\]" + if (inClass) { + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + var cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + cs + ']') + } catch (er) { + // not a valid class! + var sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' + hasMagic = hasMagic || sp[1] + inClass = false + continue + } + } + + // finish up the class. + hasMagic = true + inClass = false + re += c + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === '^' && inClass)) { + re += '\\' + } + + re += c + + } // switch + } // for + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.substr(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' + } + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) + + this.debug('tail=%j\n %s', tail, tail, pl, re) + var t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail + } + + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } + + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case '.': + case '[': + case '(': addPatternStart = true + } + + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n] + + var nlBefore = re.slice(0, nl.reStart) + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + var nlAfter = re.slice(nl.reEnd) + + nlLast += nlAfter + + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + var openParensBefore = nlBefore.split('(').length - 1 + var cleanAfter = nlAfter + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') + } + nlAfter = cleanAfter + + var dollar = '' + if (nlAfter === '' && isSub !== SUBPARSE) { + dollar = '$' + } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast + re = newRe + } + + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re + } + + if (addPatternStart) { + re = patternStart + re + } + + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } + + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } + + var flags = options.nocase ? 'i' : '' + try { + var regExp = new RegExp('^' + re + '$', flags) + } catch (er) { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') + } + + regExp._glob = pattern + regExp._src = re + + return regExp +} + +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} + +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp + + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set + + if (!set.length) { + this.regexp = false + return this.regexp + } + var options = this.options + + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + var flags = options.nocase ? 'i' : '' + + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === 'string') ? regExpEscape(p) + : p._src + }).join('\\\/') + }).join('|') + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' + + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' + + try { + this.regexp = new RegExp(re, flags) + } catch (ex) { + this.regexp = false + } + return this.regexp +} + +minimatch.match = function (list, pattern, options) { + options = options || {} + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (mm.options.nonull && !list.length) { + list.push(pattern) + } + return list +} + +Minimatch.prototype.match = match +function match (f, partial) { + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' + + if (f === '/' && partial) return true + + var options = this.options + + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') + } + + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) + + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. + + var set = this.set + this.debug(this.pattern, 'set', set) + + // Find the basename of the path by looking for the last non-empty segment + var filename + var i + for (i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } + + for (i = 0; i < set.length; i++) { + var pattern = set[i] + var file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + var hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } + } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate +} + +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options + + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) + + this.debug('matchOne', file.length, pattern.length) + + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] + + this.debug(pattern, p, f) + + // should be impossible. + // some invalid regexp stuff in the set. + if (p === false) return false + + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) + + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true + } + + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] + + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } + + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } + } + + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + if (options.nocase) { + hit = f.toLowerCase() === p.toLowerCase() + } else { + hit = f === p + } + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + var emptyFileEnd = (fi === fl - 1) && (file[fi] === '') + return emptyFileEnd + } + + // should be unreachable. + throw new Error('wtf?') +} + +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, '$1') +} + +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +} + +},{"brace-expansion":11,"path":22}],21:[function(require,module,exports){ +var wrappy = require('wrappy') +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} + +},{"wrappy":29}],22:[function(require,module,exports){ +(function (process){ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// resolves . and .. elements in a path array with directory names there +// must be no slashes, empty elements, or device names (c:\) in the array +// (so also no leading and trailing slashes - it does not distinguish +// relative and absolute paths) +function normalizeArray(parts, allowAboveRoot) { + // if the path tries to go above the root, `up` ends up > 0 + var up = 0; + for (var i = parts.length - 1; i >= 0; i--) { + var last = parts[i]; + if (last === '.') { + parts.splice(i, 1); + } else if (last === '..') { + parts.splice(i, 1); + up++; + } else if (up) { + parts.splice(i, 1); + up--; + } + } + + // if the path is allowed to go above the root, restore leading ..s + if (allowAboveRoot) { + for (; up--; up) { + parts.unshift('..'); + } + } + + return parts; +} + +// Split a filename into [root, dir, basename, ext], unix version +// 'root' is just a slash, or nothing. +var splitPathRe = + /^(\/?|)([\s\S]*?)((?:\.{1,2}|[^\/]+?|)(\.[^.\/]*|))(?:[\/]*)$/; +var splitPath = function(filename) { + return splitPathRe.exec(filename).slice(1); +}; + +// path.resolve([from ...], to) +// posix version +exports.resolve = function() { + var resolvedPath = '', + resolvedAbsolute = false; + + for (var i = arguments.length - 1; i >= -1 && !resolvedAbsolute; i--) { + var path = (i >= 0) ? arguments[i] : process.cwd(); + + // Skip empty and invalid entries + if (typeof path !== 'string') { + throw new TypeError('Arguments to path.resolve must be strings'); + } else if (!path) { + continue; + } + + resolvedPath = path + '/' + resolvedPath; + resolvedAbsolute = path.charAt(0) === '/'; + } + + // At this point the path should be resolved to a full absolute path, but + // handle relative paths to be safe (might happen when process.cwd() fails) + + // Normalize the path + resolvedPath = normalizeArray(filter(resolvedPath.split('/'), function(p) { + return !!p; + }), !resolvedAbsolute).join('/'); + + return ((resolvedAbsolute ? '/' : '') + resolvedPath) || '.'; +}; + +// path.normalize(path) +// posix version +exports.normalize = function(path) { + var isAbsolute = exports.isAbsolute(path), + trailingSlash = substr(path, -1) === '/'; + + // Normalize the path + path = normalizeArray(filter(path.split('/'), function(p) { + return !!p; + }), !isAbsolute).join('/'); + + if (!path && !isAbsolute) { + path = '.'; + } + if (path && trailingSlash) { + path += '/'; + } + + return (isAbsolute ? '/' : '') + path; +}; + +// posix version +exports.isAbsolute = function(path) { + return path.charAt(0) === '/'; +}; + +// posix version +exports.join = function() { + var paths = Array.prototype.slice.call(arguments, 0); + return exports.normalize(filter(paths, function(p, index) { + if (typeof p !== 'string') { + throw new TypeError('Arguments to path.join must be strings'); + } + return p; + }).join('/')); +}; + + +// path.relative(from, to) +// posix version +exports.relative = function(from, to) { + from = exports.resolve(from).substr(1); + to = exports.resolve(to).substr(1); + + function trim(arr) { + var start = 0; + for (; start < arr.length; start++) { + if (arr[start] !== '') break; + } + + var end = arr.length - 1; + for (; end >= 0; end--) { + if (arr[end] !== '') break; + } + + if (start > end) return []; + return arr.slice(start, end - start + 1); + } + + var fromParts = trim(from.split('/')); + var toParts = trim(to.split('/')); + + var length = Math.min(fromParts.length, toParts.length); + var samePartsLength = length; + for (var i = 0; i < length; i++) { + if (fromParts[i] !== toParts[i]) { + samePartsLength = i; + break; + } + } + + var outputParts = []; + for (var i = samePartsLength; i < fromParts.length; i++) { + outputParts.push('..'); + } + + outputParts = outputParts.concat(toParts.slice(samePartsLength)); + + return outputParts.join('/'); +}; + +exports.sep = '/'; +exports.delimiter = ':'; + +exports.dirname = function(path) { + var result = splitPath(path), + root = result[0], + dir = result[1]; + + if (!root && !dir) { + // No dirname whatsoever + return '.'; + } + + if (dir) { + // It has a dirname, strip trailing slash + dir = dir.substr(0, dir.length - 1); + } + + return root + dir; +}; + + +exports.basename = function(path, ext) { + var f = splitPath(path)[2]; + // TODO: make this comparison case-insensitive on windows? + if (ext && f.substr(-1 * ext.length) === ext) { + f = f.substr(0, f.length - ext.length); + } + return f; +}; + + +exports.extname = function(path) { + return splitPath(path)[3]; +}; + +function filter (xs, f) { + if (xs.filter) return xs.filter(f); + var res = []; + for (var i = 0; i < xs.length; i++) { + if (f(xs[i], i, xs)) res.push(xs[i]); + } + return res; +} + +// String.prototype.substr - negative index don't work in IE8 +var substr = 'ab'.substr(-1) === 'b' + ? function (str, start, len) { return str.substr(start, len) } + : function (str, start, len) { + if (start < 0) start = str.length + start; + return str.substr(start, len); + } +; + +}).call(this,require('_process')) +},{"_process":24}],23:[function(require,module,exports){ +(function (process){ +'use strict'; + +function posix(path) { + return path.charAt(0) === '/'; +} + +function win32(path) { + // https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56 + var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; + var result = splitDeviceRe.exec(path); + var device = result[1] || ''; + var isUnc = Boolean(device && device.charAt(1) !== ':'); + + // UNC paths are always absolute + return Boolean(result[2] || isUnc); +} + +module.exports = process.platform === 'win32' ? win32 : posix; +module.exports.posix = posix; +module.exports.win32 = win32; + +}).call(this,require('_process')) +},{"_process":24}],24:[function(require,module,exports){ +// shim for using process in browser +var process = module.exports = {}; + +// cached from whatever global is present so that test runners that stub it +// don't break things. But we need to wrap it in a try catch in case it is +// wrapped in strict mode code which doesn't define any globals. It's inside a +// function because try/catches deoptimize in certain engines. + +var cachedSetTimeout; +var cachedClearTimeout; + +function defaultSetTimout() { + throw new Error('setTimeout has not been defined'); +} +function defaultClearTimeout () { + throw new Error('clearTimeout has not been defined'); +} +(function () { + try { + if (typeof setTimeout === 'function') { + cachedSetTimeout = setTimeout; + } else { + cachedSetTimeout = defaultSetTimout; + } + } catch (e) { + cachedSetTimeout = defaultSetTimout; + } + try { + if (typeof clearTimeout === 'function') { + cachedClearTimeout = clearTimeout; + } else { + cachedClearTimeout = defaultClearTimeout; + } + } catch (e) { + cachedClearTimeout = defaultClearTimeout; + } +} ()) +function runTimeout(fun) { + if (cachedSetTimeout === setTimeout) { + //normal enviroments in sane situations + return setTimeout(fun, 0); + } + // if setTimeout wasn't available but was latter defined + if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) { + cachedSetTimeout = setTimeout; + return setTimeout(fun, 0); + } + try { + // when when somebody has screwed with setTimeout but no I.E. maddness + return cachedSetTimeout(fun, 0); + } catch(e){ + try { + // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally + return cachedSetTimeout.call(null, fun, 0); + } catch(e){ + // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error + return cachedSetTimeout.call(this, fun, 0); + } + } + + +} +function runClearTimeout(marker) { + if (cachedClearTimeout === clearTimeout) { + //normal enviroments in sane situations + return clearTimeout(marker); + } + // if clearTimeout wasn't available but was latter defined + if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) { + cachedClearTimeout = clearTimeout; + return clearTimeout(marker); + } + try { + // when when somebody has screwed with setTimeout but no I.E. maddness + return cachedClearTimeout(marker); + } catch (e){ + try { + // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally + return cachedClearTimeout.call(null, marker); + } catch (e){ + // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error. + // Some versions of I.E. have different rules for clearTimeout vs setTimeout + return cachedClearTimeout.call(this, marker); + } + } + + + +} +var queue = []; +var draining = false; +var currentQueue; +var queueIndex = -1; + +function cleanUpNextTick() { + if (!draining || !currentQueue) { + return; + } + draining = false; + if (currentQueue.length) { + queue = currentQueue.concat(queue); + } else { + queueIndex = -1; + } + if (queue.length) { + drainQueue(); + } +} + +function drainQueue() { + if (draining) { + return; + } + var timeout = runTimeout(cleanUpNextTick); + draining = true; + + var len = queue.length; + while(len) { + currentQueue = queue; + queue = []; + while (++queueIndex < len) { + if (currentQueue) { + currentQueue[queueIndex].run(); + } + } + queueIndex = -1; + len = queue.length; + } + currentQueue = null; + draining = false; + runClearTimeout(timeout); +} + +process.nextTick = function (fun) { + var args = new Array(arguments.length - 1); + if (arguments.length > 1) { + for (var i = 1; i < arguments.length; i++) { + args[i - 1] = arguments[i]; + } + } + queue.push(new Item(fun, args)); + if (queue.length === 1 && !draining) { + runTimeout(drainQueue); + } +}; + +// v8 likes predictible objects +function Item(fun, array) { + this.fun = fun; + this.array = array; +} +Item.prototype.run = function () { + this.fun.apply(null, this.array); +}; +process.title = 'browser'; +process.browser = true; +process.env = {}; +process.argv = []; +process.version = ''; // empty string to avoid regexp issues +process.versions = {}; + +function noop() {} + +process.on = noop; +process.addListener = noop; +process.once = noop; +process.off = noop; +process.removeListener = noop; +process.removeAllListeners = noop; +process.emit = noop; +process.prependListener = noop; +process.prependOnceListener = noop; + +process.listeners = function (name) { return [] } + +process.binding = function (name) { + throw new Error('process.binding is not supported'); +}; + +process.cwd = function () { return '/' }; +process.chdir = function (dir) { + throw new Error('process.chdir is not supported'); +}; +process.umask = function() { return 0; }; + +},{}],25:[function(require,module,exports){ +// Underscore.js 1.8.3 +// http://underscorejs.org +// (c) 2009-2015 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors +// Underscore may be freely distributed under the MIT license. + +(function() { + + // Baseline setup + // -------------- + + // Establish the root object, `window` in the browser, or `exports` on the server. + var root = this; + + // Save the previous value of the `_` variable. + var previousUnderscore = root._; + + // Save bytes in the minified (but not gzipped) version: + var ArrayProto = Array.prototype, ObjProto = Object.prototype, FuncProto = Function.prototype; + + // Create quick reference variables for speed access to core prototypes. + var + push = ArrayProto.push, + slice = ArrayProto.slice, + toString = ObjProto.toString, + hasOwnProperty = ObjProto.hasOwnProperty; + + // All **ECMAScript 5** native function implementations that we hope to use + // are declared here. + var + nativeIsArray = Array.isArray, + nativeKeys = Object.keys, + nativeBind = FuncProto.bind, + nativeCreate = Object.create; + + // Naked function reference for surrogate-prototype-swapping. + var Ctor = function(){}; + + // Create a safe reference to the Underscore object for use below. + var _ = function(obj) { + if (obj instanceof _) return obj; + if (!(this instanceof _)) return new _(obj); + this._wrapped = obj; + }; + + // Export the Underscore object for **Node.js**, with + // backwards-compatibility for the old `require()` API. If we're in + // the browser, add `_` as a global object. + if (typeof exports !== 'undefined') { + if (typeof module !== 'undefined' && module.exports) { + exports = module.exports = _; + } + exports._ = _; + } else { + root._ = _; + } + + // Current version. + _.VERSION = '1.8.3'; + + // Internal function that returns an efficient (for current engines) version + // of the passed-in callback, to be repeatedly applied in other Underscore + // functions. + var optimizeCb = function(func, context, argCount) { + if (context === void 0) return func; + switch (argCount == null ? 3 : argCount) { + case 1: return function(value) { + return func.call(context, value); + }; + case 2: return function(value, other) { + return func.call(context, value, other); + }; + case 3: return function(value, index, collection) { + return func.call(context, value, index, collection); + }; + case 4: return function(accumulator, value, index, collection) { + return func.call(context, accumulator, value, index, collection); + }; + } + return function() { + return func.apply(context, arguments); + }; + }; + + // A mostly-internal function to generate callbacks that can be applied + // to each element in a collection, returning the desired result — either + // identity, an arbitrary callback, a property matcher, or a property accessor. + var cb = function(value, context, argCount) { + if (value == null) return _.identity; + if (_.isFunction(value)) return optimizeCb(value, context, argCount); + if (_.isObject(value)) return _.matcher(value); + return _.property(value); + }; + _.iteratee = function(value, context) { + return cb(value, context, Infinity); + }; + + // An internal function for creating assigner functions. + var createAssigner = function(keysFunc, undefinedOnly) { + return function(obj) { + var length = arguments.length; + if (length < 2 || obj == null) return obj; + for (var index = 1; index < length; index++) { + var source = arguments[index], + keys = keysFunc(source), + l = keys.length; + for (var i = 0; i < l; i++) { + var key = keys[i]; + if (!undefinedOnly || obj[key] === void 0) obj[key] = source[key]; + } + } + return obj; + }; + }; + + // An internal function for creating a new object that inherits from another. + var baseCreate = function(prototype) { + if (!_.isObject(prototype)) return {}; + if (nativeCreate) return nativeCreate(prototype); + Ctor.prototype = prototype; + var result = new Ctor; + Ctor.prototype = null; + return result; + }; + + var property = function(key) { + return function(obj) { + return obj == null ? void 0 : obj[key]; + }; + }; + + // Helper for collection methods to determine whether a collection + // should be iterated as an array or as an object + // Related: http://people.mozilla.org/~jorendorff/es6-draft.html#sec-tolength + // Avoids a very nasty iOS 8 JIT bug on ARM-64. #2094 + var MAX_ARRAY_INDEX = Math.pow(2, 53) - 1; + var getLength = property('length'); + var isArrayLike = function(collection) { + var length = getLength(collection); + return typeof length == 'number' && length >= 0 && length <= MAX_ARRAY_INDEX; + }; + + // Collection Functions + // -------------------- + + // The cornerstone, an `each` implementation, aka `forEach`. + // Handles raw objects in addition to array-likes. Treats all + // sparse array-likes as if they were dense. + _.each = _.forEach = function(obj, iteratee, context) { + iteratee = optimizeCb(iteratee, context); + var i, length; + if (isArrayLike(obj)) { + for (i = 0, length = obj.length; i < length; i++) { + iteratee(obj[i], i, obj); + } + } else { + var keys = _.keys(obj); + for (i = 0, length = keys.length; i < length; i++) { + iteratee(obj[keys[i]], keys[i], obj); + } + } + return obj; + }; + + // Return the results of applying the iteratee to each element. + _.map = _.collect = function(obj, iteratee, context) { + iteratee = cb(iteratee, context); + var keys = !isArrayLike(obj) && _.keys(obj), + length = (keys || obj).length, + results = Array(length); + for (var index = 0; index < length; index++) { + var currentKey = keys ? keys[index] : index; + results[index] = iteratee(obj[currentKey], currentKey, obj); + } + return results; + }; + + // Create a reducing function iterating left or right. + function createReduce(dir) { + // Optimized iterator function as using arguments.length + // in the main function will deoptimize the, see #1991. + function iterator(obj, iteratee, memo, keys, index, length) { + for (; index >= 0 && index < length; index += dir) { + var currentKey = keys ? keys[index] : index; + memo = iteratee(memo, obj[currentKey], currentKey, obj); + } + return memo; + } + + return function(obj, iteratee, memo, context) { + iteratee = optimizeCb(iteratee, context, 4); + var keys = !isArrayLike(obj) && _.keys(obj), + length = (keys || obj).length, + index = dir > 0 ? 0 : length - 1; + // Determine the initial value if none is provided. + if (arguments.length < 3) { + memo = obj[keys ? keys[index] : index]; + index += dir; + } + return iterator(obj, iteratee, memo, keys, index, length); + }; + } + + // **Reduce** builds up a single result from a list of values, aka `inject`, + // or `foldl`. + _.reduce = _.foldl = _.inject = createReduce(1); + + // The right-associative version of reduce, also known as `foldr`. + _.reduceRight = _.foldr = createReduce(-1); + + // Return the first value which passes a truth test. Aliased as `detect`. + _.find = _.detect = function(obj, predicate, context) { + var key; + if (isArrayLike(obj)) { + key = _.findIndex(obj, predicate, context); + } else { + key = _.findKey(obj, predicate, context); + } + if (key !== void 0 && key !== -1) return obj[key]; + }; + + // Return all the elements that pass a truth test. + // Aliased as `select`. + _.filter = _.select = function(obj, predicate, context) { + var results = []; + predicate = cb(predicate, context); + _.each(obj, function(value, index, list) { + if (predicate(value, index, list)) results.push(value); + }); + return results; + }; + + // Return all the elements for which a truth test fails. + _.reject = function(obj, predicate, context) { + return _.filter(obj, _.negate(cb(predicate)), context); + }; + + // Determine whether all of the elements match a truth test. + // Aliased as `all`. + _.every = _.all = function(obj, predicate, context) { + predicate = cb(predicate, context); + var keys = !isArrayLike(obj) && _.keys(obj), + length = (keys || obj).length; + for (var index = 0; index < length; index++) { + var currentKey = keys ? keys[index] : index; + if (!predicate(obj[currentKey], currentKey, obj)) return false; + } + return true; + }; + + // Determine if at least one element in the object matches a truth test. + // Aliased as `any`. + _.some = _.any = function(obj, predicate, context) { + predicate = cb(predicate, context); + var keys = !isArrayLike(obj) && _.keys(obj), + length = (keys || obj).length; + for (var index = 0; index < length; index++) { + var currentKey = keys ? keys[index] : index; + if (predicate(obj[currentKey], currentKey, obj)) return true; + } + return false; + }; + + // Determine if the array or object contains a given item (using `===`). + // Aliased as `includes` and `include`. + _.contains = _.includes = _.include = function(obj, item, fromIndex, guard) { + if (!isArrayLike(obj)) obj = _.values(obj); + if (typeof fromIndex != 'number' || guard) fromIndex = 0; + return _.indexOf(obj, item, fromIndex) >= 0; + }; + + // Invoke a method (with arguments) on every item in a collection. + _.invoke = function(obj, method) { + var args = slice.call(arguments, 2); + var isFunc = _.isFunction(method); + return _.map(obj, function(value) { + var func = isFunc ? method : value[method]; + return func == null ? func : func.apply(value, args); + }); + }; + + // Convenience version of a common use case of `map`: fetching a property. + _.pluck = function(obj, key) { + return _.map(obj, _.property(key)); + }; + + // Convenience version of a common use case of `filter`: selecting only objects + // containing specific `key:value` pairs. + _.where = function(obj, attrs) { + return _.filter(obj, _.matcher(attrs)); + }; + + // Convenience version of a common use case of `find`: getting the first object + // containing specific `key:value` pairs. + _.findWhere = function(obj, attrs) { + return _.find(obj, _.matcher(attrs)); + }; + + // Return the maximum element (or element-based computation). + _.max = function(obj, iteratee, context) { + var result = -Infinity, lastComputed = -Infinity, + value, computed; + if (iteratee == null && obj != null) { + obj = isArrayLike(obj) ? obj : _.values(obj); + for (var i = 0, length = obj.length; i < length; i++) { + value = obj[i]; + if (value > result) { + result = value; + } + } + } else { + iteratee = cb(iteratee, context); + _.each(obj, function(value, index, list) { + computed = iteratee(value, index, list); + if (computed > lastComputed || computed === -Infinity && result === -Infinity) { + result = value; + lastComputed = computed; + } + }); + } + return result; + }; + + // Return the minimum element (or element-based computation). + _.min = function(obj, iteratee, context) { + var result = Infinity, lastComputed = Infinity, + value, computed; + if (iteratee == null && obj != null) { + obj = isArrayLike(obj) ? obj : _.values(obj); + for (var i = 0, length = obj.length; i < length; i++) { + value = obj[i]; + if (value < result) { + result = value; + } + } + } else { + iteratee = cb(iteratee, context); + _.each(obj, function(value, index, list) { + computed = iteratee(value, index, list); + if (computed < lastComputed || computed === Infinity && result === Infinity) { + result = value; + lastComputed = computed; + } + }); + } + return result; + }; + + // Shuffle a collection, using the modern version of the + // [Fisher-Yates shuffle](http://en.wikipedia.org/wiki/Fisher–Yates_shuffle). + _.shuffle = function(obj) { + var set = isArrayLike(obj) ? obj : _.values(obj); + var length = set.length; + var shuffled = Array(length); + for (var index = 0, rand; index < length; index++) { + rand = _.random(0, index); + if (rand !== index) shuffled[index] = shuffled[rand]; + shuffled[rand] = set[index]; + } + return shuffled; + }; + + // Sample **n** random values from a collection. + // If **n** is not specified, returns a single random element. + // The internal `guard` argument allows it to work with `map`. + _.sample = function(obj, n, guard) { + if (n == null || guard) { + if (!isArrayLike(obj)) obj = _.values(obj); + return obj[_.random(obj.length - 1)]; + } + return _.shuffle(obj).slice(0, Math.max(0, n)); + }; + + // Sort the object's values by a criterion produced by an iteratee. + _.sortBy = function(obj, iteratee, context) { + iteratee = cb(iteratee, context); + return _.pluck(_.map(obj, function(value, index, list) { + return { + value: value, + index: index, + criteria: iteratee(value, index, list) + }; + }).sort(function(left, right) { + var a = left.criteria; + var b = right.criteria; + if (a !== b) { + if (a > b || a === void 0) return 1; + if (a < b || b === void 0) return -1; + } + return left.index - right.index; + }), 'value'); + }; + + // An internal function used for aggregate "group by" operations. + var group = function(behavior) { + return function(obj, iteratee, context) { + var result = {}; + iteratee = cb(iteratee, context); + _.each(obj, function(value, index) { + var key = iteratee(value, index, obj); + behavior(result, value, key); + }); + return result; + }; + }; + + // Groups the object's values by a criterion. Pass either a string attribute + // to group by, or a function that returns the criterion. + _.groupBy = group(function(result, value, key) { + if (_.has(result, key)) result[key].push(value); else result[key] = [value]; + }); + + // Indexes the object's values by a criterion, similar to `groupBy`, but for + // when you know that your index values will be unique. + _.indexBy = group(function(result, value, key) { + result[key] = value; + }); + + // Counts instances of an object that group by a certain criterion. Pass + // either a string attribute to count by, or a function that returns the + // criterion. + _.countBy = group(function(result, value, key) { + if (_.has(result, key)) result[key]++; else result[key] = 1; + }); + + // Safely create a real, live array from anything iterable. + _.toArray = function(obj) { + if (!obj) return []; + if (_.isArray(obj)) return slice.call(obj); + if (isArrayLike(obj)) return _.map(obj, _.identity); + return _.values(obj); + }; + + // Return the number of elements in an object. + _.size = function(obj) { + if (obj == null) return 0; + return isArrayLike(obj) ? obj.length : _.keys(obj).length; + }; + + // Split a collection into two arrays: one whose elements all satisfy the given + // predicate, and one whose elements all do not satisfy the predicate. + _.partition = function(obj, predicate, context) { + predicate = cb(predicate, context); + var pass = [], fail = []; + _.each(obj, function(value, key, obj) { + (predicate(value, key, obj) ? pass : fail).push(value); + }); + return [pass, fail]; + }; + + // Array Functions + // --------------- + + // Get the first element of an array. Passing **n** will return the first N + // values in the array. Aliased as `head` and `take`. The **guard** check + // allows it to work with `_.map`. + _.first = _.head = _.take = function(array, n, guard) { + if (array == null) return void 0; + if (n == null || guard) return array[0]; + return _.initial(array, array.length - n); + }; + + // Returns everything but the last entry of the array. Especially useful on + // the arguments object. Passing **n** will return all the values in + // the array, excluding the last N. + _.initial = function(array, n, guard) { + return slice.call(array, 0, Math.max(0, array.length - (n == null || guard ? 1 : n))); + }; + + // Get the last element of an array. Passing **n** will return the last N + // values in the array. + _.last = function(array, n, guard) { + if (array == null) return void 0; + if (n == null || guard) return array[array.length - 1]; + return _.rest(array, Math.max(0, array.length - n)); + }; + + // Returns everything but the first entry of the array. Aliased as `tail` and `drop`. + // Especially useful on the arguments object. Passing an **n** will return + // the rest N values in the array. + _.rest = _.tail = _.drop = function(array, n, guard) { + return slice.call(array, n == null || guard ? 1 : n); + }; + + // Trim out all falsy values from an array. + _.compact = function(array) { + return _.filter(array, _.identity); + }; + + // Internal implementation of a recursive `flatten` function. + var flatten = function(input, shallow, strict, startIndex) { + var output = [], idx = 0; + for (var i = startIndex || 0, length = getLength(input); i < length; i++) { + var value = input[i]; + if (isArrayLike(value) && (_.isArray(value) || _.isArguments(value))) { + //flatten current level of array or arguments object + if (!shallow) value = flatten(value, shallow, strict); + var j = 0, len = value.length; + output.length += len; + while (j < len) { + output[idx++] = value[j++]; + } + } else if (!strict) { + output[idx++] = value; + } + } + return output; + }; + + // Flatten out an array, either recursively (by default), or just one level. + _.flatten = function(array, shallow) { + return flatten(array, shallow, false); + }; + + // Return a version of the array that does not contain the specified value(s). + _.without = function(array) { + return _.difference(array, slice.call(arguments, 1)); + }; + + // Produce a duplicate-free version of the array. If the array has already + // been sorted, you have the option of using a faster algorithm. + // Aliased as `unique`. + _.uniq = _.unique = function(array, isSorted, iteratee, context) { + if (!_.isBoolean(isSorted)) { + context = iteratee; + iteratee = isSorted; + isSorted = false; + } + if (iteratee != null) iteratee = cb(iteratee, context); + var result = []; + var seen = []; + for (var i = 0, length = getLength(array); i < length; i++) { + var value = array[i], + computed = iteratee ? iteratee(value, i, array) : value; + if (isSorted) { + if (!i || seen !== computed) result.push(value); + seen = computed; + } else if (iteratee) { + if (!_.contains(seen, computed)) { + seen.push(computed); + result.push(value); + } + } else if (!_.contains(result, value)) { + result.push(value); + } + } + return result; + }; + + // Produce an array that contains the union: each distinct element from all of + // the passed-in arrays. + _.union = function() { + return _.uniq(flatten(arguments, true, true)); + }; + + // Produce an array that contains every item shared between all the + // passed-in arrays. + _.intersection = function(array) { + var result = []; + var argsLength = arguments.length; + for (var i = 0, length = getLength(array); i < length; i++) { + var item = array[i]; + if (_.contains(result, item)) continue; + for (var j = 1; j < argsLength; j++) { + if (!_.contains(arguments[j], item)) break; + } + if (j === argsLength) result.push(item); + } + return result; + }; + + // Take the difference between one array and a number of other arrays. + // Only the elements present in just the first array will remain. + _.difference = function(array) { + var rest = flatten(arguments, true, true, 1); + return _.filter(array, function(value){ + return !_.contains(rest, value); + }); + }; + + // Zip together multiple lists into a single array -- elements that share + // an index go together. + _.zip = function() { + return _.unzip(arguments); + }; + + // Complement of _.zip. Unzip accepts an array of arrays and groups + // each array's elements on shared indices + _.unzip = function(array) { + var length = array && _.max(array, getLength).length || 0; + var result = Array(length); + + for (var index = 0; index < length; index++) { + result[index] = _.pluck(array, index); + } + return result; + }; + + // Converts lists into objects. Pass either a single array of `[key, value]` + // pairs, or two parallel arrays of the same length -- one of keys, and one of + // the corresponding values. + _.object = function(list, values) { + var result = {}; + for (var i = 0, length = getLength(list); i < length; i++) { + if (values) { + result[list[i]] = values[i]; + } else { + result[list[i][0]] = list[i][1]; + } + } + return result; + }; + + // Generator function to create the findIndex and findLastIndex functions + function createPredicateIndexFinder(dir) { + return function(array, predicate, context) { + predicate = cb(predicate, context); + var length = getLength(array); + var index = dir > 0 ? 0 : length - 1; + for (; index >= 0 && index < length; index += dir) { + if (predicate(array[index], index, array)) return index; + } + return -1; + }; + } + + // Returns the first index on an array-like that passes a predicate test + _.findIndex = createPredicateIndexFinder(1); + _.findLastIndex = createPredicateIndexFinder(-1); + + // Use a comparator function to figure out the smallest index at which + // an object should be inserted so as to maintain order. Uses binary search. + _.sortedIndex = function(array, obj, iteratee, context) { + iteratee = cb(iteratee, context, 1); + var value = iteratee(obj); + var low = 0, high = getLength(array); + while (low < high) { + var mid = Math.floor((low + high) / 2); + if (iteratee(array[mid]) < value) low = mid + 1; else high = mid; + } + return low; + }; + + // Generator function to create the indexOf and lastIndexOf functions + function createIndexFinder(dir, predicateFind, sortedIndex) { + return function(array, item, idx) { + var i = 0, length = getLength(array); + if (typeof idx == 'number') { + if (dir > 0) { + i = idx >= 0 ? idx : Math.max(idx + length, i); + } else { + length = idx >= 0 ? Math.min(idx + 1, length) : idx + length + 1; + } + } else if (sortedIndex && idx && length) { + idx = sortedIndex(array, item); + return array[idx] === item ? idx : -1; + } + if (item !== item) { + idx = predicateFind(slice.call(array, i, length), _.isNaN); + return idx >= 0 ? idx + i : -1; + } + for (idx = dir > 0 ? i : length - 1; idx >= 0 && idx < length; idx += dir) { + if (array[idx] === item) return idx; + } + return -1; + }; + } + + // Return the position of the first occurrence of an item in an array, + // or -1 if the item is not included in the array. + // If the array is large and already in sort order, pass `true` + // for **isSorted** to use binary search. + _.indexOf = createIndexFinder(1, _.findIndex, _.sortedIndex); + _.lastIndexOf = createIndexFinder(-1, _.findLastIndex); + + // Generate an integer Array containing an arithmetic progression. A port of + // the native Python `range()` function. See + // [the Python documentation](http://docs.python.org/library/functions.html#range). + _.range = function(start, stop, step) { + if (stop == null) { + stop = start || 0; + start = 0; + } + step = step || 1; + + var length = Math.max(Math.ceil((stop - start) / step), 0); + var range = Array(length); + + for (var idx = 0; idx < length; idx++, start += step) { + range[idx] = start; + } + + return range; + }; + + // Function (ahem) Functions + // ------------------ + + // Determines whether to execute a function as a constructor + // or a normal function with the provided arguments + var executeBound = function(sourceFunc, boundFunc, context, callingContext, args) { + if (!(callingContext instanceof boundFunc)) return sourceFunc.apply(context, args); + var self = baseCreate(sourceFunc.prototype); + var result = sourceFunc.apply(self, args); + if (_.isObject(result)) return result; + return self; + }; + + // Create a function bound to a given object (assigning `this`, and arguments, + // optionally). Delegates to **ECMAScript 5**'s native `Function.bind` if + // available. + _.bind = function(func, context) { + if (nativeBind && func.bind === nativeBind) return nativeBind.apply(func, slice.call(arguments, 1)); + if (!_.isFunction(func)) throw new TypeError('Bind must be called on a function'); + var args = slice.call(arguments, 2); + var bound = function() { + return executeBound(func, bound, context, this, args.concat(slice.call(arguments))); + }; + return bound; + }; + + // Partially apply a function by creating a version that has had some of its + // arguments pre-filled, without changing its dynamic `this` context. _ acts + // as a placeholder, allowing any combination of arguments to be pre-filled. + _.partial = function(func) { + var boundArgs = slice.call(arguments, 1); + var bound = function() { + var position = 0, length = boundArgs.length; + var args = Array(length); + for (var i = 0; i < length; i++) { + args[i] = boundArgs[i] === _ ? arguments[position++] : boundArgs[i]; + } + while (position < arguments.length) args.push(arguments[position++]); + return executeBound(func, bound, this, this, args); + }; + return bound; + }; + + // Bind a number of an object's methods to that object. Remaining arguments + // are the method names to be bound. Useful for ensuring that all callbacks + // defined on an object belong to it. + _.bindAll = function(obj) { + var i, length = arguments.length, key; + if (length <= 1) throw new Error('bindAll must be passed function names'); + for (i = 1; i < length; i++) { + key = arguments[i]; + obj[key] = _.bind(obj[key], obj); + } + return obj; + }; + + // Memoize an expensive function by storing its results. + _.memoize = function(func, hasher) { + var memoize = function(key) { + var cache = memoize.cache; + var address = '' + (hasher ? hasher.apply(this, arguments) : key); + if (!_.has(cache, address)) cache[address] = func.apply(this, arguments); + return cache[address]; + }; + memoize.cache = {}; + return memoize; + }; + + // Delays a function for the given number of milliseconds, and then calls + // it with the arguments supplied. + _.delay = function(func, wait) { + var args = slice.call(arguments, 2); + return setTimeout(function(){ + return func.apply(null, args); + }, wait); + }; + + // Defers a function, scheduling it to run after the current call stack has + // cleared. + _.defer = _.partial(_.delay, _, 1); + + // Returns a function, that, when invoked, will only be triggered at most once + // during a given window of time. Normally, the throttled function will run + // as much as it can, without ever going more than once per `wait` duration; + // but if you'd like to disable the execution on the leading edge, pass + // `{leading: false}`. To disable execution on the trailing edge, ditto. + _.throttle = function(func, wait, options) { + var context, args, result; + var timeout = null; + var previous = 0; + if (!options) options = {}; + var later = function() { + previous = options.leading === false ? 0 : _.now(); + timeout = null; + result = func.apply(context, args); + if (!timeout) context = args = null; + }; + return function() { + var now = _.now(); + if (!previous && options.leading === false) previous = now; + var remaining = wait - (now - previous); + context = this; + args = arguments; + if (remaining <= 0 || remaining > wait) { + if (timeout) { + clearTimeout(timeout); + timeout = null; + } + previous = now; + result = func.apply(context, args); + if (!timeout) context = args = null; + } else if (!timeout && options.trailing !== false) { + timeout = setTimeout(later, remaining); + } + return result; + }; + }; + + // Returns a function, that, as long as it continues to be invoked, will not + // be triggered. The function will be called after it stops being called for + // N milliseconds. If `immediate` is passed, trigger the function on the + // leading edge, instead of the trailing. + _.debounce = function(func, wait, immediate) { + var timeout, args, context, timestamp, result; + + var later = function() { + var last = _.now() - timestamp; + + if (last < wait && last >= 0) { + timeout = setTimeout(later, wait - last); + } else { + timeout = null; + if (!immediate) { + result = func.apply(context, args); + if (!timeout) context = args = null; + } + } + }; + + return function() { + context = this; + args = arguments; + timestamp = _.now(); + var callNow = immediate && !timeout; + if (!timeout) timeout = setTimeout(later, wait); + if (callNow) { + result = func.apply(context, args); + context = args = null; + } + + return result; + }; + }; + + // Returns the first function passed as an argument to the second, + // allowing you to adjust arguments, run code before and after, and + // conditionally execute the original function. + _.wrap = function(func, wrapper) { + return _.partial(wrapper, func); + }; + + // Returns a negated version of the passed-in predicate. + _.negate = function(predicate) { + return function() { + return !predicate.apply(this, arguments); + }; + }; + + // Returns a function that is the composition of a list of functions, each + // consuming the return value of the function that follows. + _.compose = function() { + var args = arguments; + var start = args.length - 1; + return function() { + var i = start; + var result = args[start].apply(this, arguments); + while (i--) result = args[i].call(this, result); + return result; + }; + }; + + // Returns a function that will only be executed on and after the Nth call. + _.after = function(times, func) { + return function() { + if (--times < 1) { + return func.apply(this, arguments); + } + }; + }; + + // Returns a function that will only be executed up to (but not including) the Nth call. + _.before = function(times, func) { + var memo; + return function() { + if (--times > 0) { + memo = func.apply(this, arguments); + } + if (times <= 1) func = null; + return memo; + }; + }; + + // Returns a function that will be executed at most one time, no matter how + // often you call it. Useful for lazy initialization. + _.once = _.partial(_.before, 2); + + // Object Functions + // ---------------- + + // Keys in IE < 9 that won't be iterated by `for key in ...` and thus missed. + var hasEnumBug = !{toString: null}.propertyIsEnumerable('toString'); + var nonEnumerableProps = ['valueOf', 'isPrototypeOf', 'toString', + 'propertyIsEnumerable', 'hasOwnProperty', 'toLocaleString']; + + function collectNonEnumProps(obj, keys) { + var nonEnumIdx = nonEnumerableProps.length; + var constructor = obj.constructor; + var proto = (_.isFunction(constructor) && constructor.prototype) || ObjProto; + + // Constructor is a special case. + var prop = 'constructor'; + if (_.has(obj, prop) && !_.contains(keys, prop)) keys.push(prop); + + while (nonEnumIdx--) { + prop = nonEnumerableProps[nonEnumIdx]; + if (prop in obj && obj[prop] !== proto[prop] && !_.contains(keys, prop)) { + keys.push(prop); + } + } + } + + // Retrieve the names of an object's own properties. + // Delegates to **ECMAScript 5**'s native `Object.keys` + _.keys = function(obj) { + if (!_.isObject(obj)) return []; + if (nativeKeys) return nativeKeys(obj); + var keys = []; + for (var key in obj) if (_.has(obj, key)) keys.push(key); + // Ahem, IE < 9. + if (hasEnumBug) collectNonEnumProps(obj, keys); + return keys; + }; + + // Retrieve all the property names of an object. + _.allKeys = function(obj) { + if (!_.isObject(obj)) return []; + var keys = []; + for (var key in obj) keys.push(key); + // Ahem, IE < 9. + if (hasEnumBug) collectNonEnumProps(obj, keys); + return keys; + }; + + // Retrieve the values of an object's properties. + _.values = function(obj) { + var keys = _.keys(obj); + var length = keys.length; + var values = Array(length); + for (var i = 0; i < length; i++) { + values[i] = obj[keys[i]]; + } + return values; + }; + + // Returns the results of applying the iteratee to each element of the object + // In contrast to _.map it returns an object + _.mapObject = function(obj, iteratee, context) { + iteratee = cb(iteratee, context); + var keys = _.keys(obj), + length = keys.length, + results = {}, + currentKey; + for (var index = 0; index < length; index++) { + currentKey = keys[index]; + results[currentKey] = iteratee(obj[currentKey], currentKey, obj); + } + return results; + }; + + // Convert an object into a list of `[key, value]` pairs. + _.pairs = function(obj) { + var keys = _.keys(obj); + var length = keys.length; + var pairs = Array(length); + for (var i = 0; i < length; i++) { + pairs[i] = [keys[i], obj[keys[i]]]; + } + return pairs; + }; + + // Invert the keys and values of an object. The values must be serializable. + _.invert = function(obj) { + var result = {}; + var keys = _.keys(obj); + for (var i = 0, length = keys.length; i < length; i++) { + result[obj[keys[i]]] = keys[i]; + } + return result; + }; + + // Return a sorted list of the function names available on the object. + // Aliased as `methods` + _.functions = _.methods = function(obj) { + var names = []; + for (var key in obj) { + if (_.isFunction(obj[key])) names.push(key); + } + return names.sort(); + }; + + // Extend a given object with all the properties in passed-in object(s). + _.extend = createAssigner(_.allKeys); + + // Assigns a given object with all the own properties in the passed-in object(s) + // (https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object/assign) + _.extendOwn = _.assign = createAssigner(_.keys); + + // Returns the first key on an object that passes a predicate test + _.findKey = function(obj, predicate, context) { + predicate = cb(predicate, context); + var keys = _.keys(obj), key; + for (var i = 0, length = keys.length; i < length; i++) { + key = keys[i]; + if (predicate(obj[key], key, obj)) return key; + } + }; + + // Return a copy of the object only containing the whitelisted properties. + _.pick = function(object, oiteratee, context) { + var result = {}, obj = object, iteratee, keys; + if (obj == null) return result; + if (_.isFunction(oiteratee)) { + keys = _.allKeys(obj); + iteratee = optimizeCb(oiteratee, context); + } else { + keys = flatten(arguments, false, false, 1); + iteratee = function(value, key, obj) { return key in obj; }; + obj = Object(obj); + } + for (var i = 0, length = keys.length; i < length; i++) { + var key = keys[i]; + var value = obj[key]; + if (iteratee(value, key, obj)) result[key] = value; + } + return result; + }; + + // Return a copy of the object without the blacklisted properties. + _.omit = function(obj, iteratee, context) { + if (_.isFunction(iteratee)) { + iteratee = _.negate(iteratee); + } else { + var keys = _.map(flatten(arguments, false, false, 1), String); + iteratee = function(value, key) { + return !_.contains(keys, key); + }; + } + return _.pick(obj, iteratee, context); + }; + + // Fill in a given object with default properties. + _.defaults = createAssigner(_.allKeys, true); + + // Creates an object that inherits from the given prototype object. + // If additional properties are provided then they will be added to the + // created object. + _.create = function(prototype, props) { + var result = baseCreate(prototype); + if (props) _.extendOwn(result, props); + return result; + }; + + // Create a (shallow-cloned) duplicate of an object. + _.clone = function(obj) { + if (!_.isObject(obj)) return obj; + return _.isArray(obj) ? obj.slice() : _.extend({}, obj); + }; + + // Invokes interceptor with the obj, and then returns obj. + // The primary purpose of this method is to "tap into" a method chain, in + // order to perform operations on intermediate results within the chain. + _.tap = function(obj, interceptor) { + interceptor(obj); + return obj; + }; + + // Returns whether an object has a given set of `key:value` pairs. + _.isMatch = function(object, attrs) { + var keys = _.keys(attrs), length = keys.length; + if (object == null) return !length; + var obj = Object(object); + for (var i = 0; i < length; i++) { + var key = keys[i]; + if (attrs[key] !== obj[key] || !(key in obj)) return false; + } + return true; + }; + + + // Internal recursive comparison function for `isEqual`. + var eq = function(a, b, aStack, bStack) { + // Identical objects are equal. `0 === -0`, but they aren't identical. + // See the [Harmony `egal` proposal](http://wiki.ecmascript.org/doku.php?id=harmony:egal). + if (a === b) return a !== 0 || 1 / a === 1 / b; + // A strict comparison is necessary because `null == undefined`. + if (a == null || b == null) return a === b; + // Unwrap any wrapped objects. + if (a instanceof _) a = a._wrapped; + if (b instanceof _) b = b._wrapped; + // Compare `[[Class]]` names. + var className = toString.call(a); + if (className !== toString.call(b)) return false; + switch (className) { + // Strings, numbers, regular expressions, dates, and booleans are compared by value. + case '[object RegExp]': + // RegExps are coerced to strings for comparison (Note: '' + /a/i === '/a/i') + case '[object String]': + // Primitives and their corresponding object wrappers are equivalent; thus, `"5"` is + // equivalent to `new String("5")`. + return '' + a === '' + b; + case '[object Number]': + // `NaN`s are equivalent, but non-reflexive. + // Object(NaN) is equivalent to NaN + if (+a !== +a) return +b !== +b; + // An `egal` comparison is performed for other numeric values. + return +a === 0 ? 1 / +a === 1 / b : +a === +b; + case '[object Date]': + case '[object Boolean]': + // Coerce dates and booleans to numeric primitive values. Dates are compared by their + // millisecond representations. Note that invalid dates with millisecond representations + // of `NaN` are not equivalent. + return +a === +b; + } + + var areArrays = className === '[object Array]'; + if (!areArrays) { + if (typeof a != 'object' || typeof b != 'object') return false; + + // Objects with different constructors are not equivalent, but `Object`s or `Array`s + // from different frames are. + var aCtor = a.constructor, bCtor = b.constructor; + if (aCtor !== bCtor && !(_.isFunction(aCtor) && aCtor instanceof aCtor && + _.isFunction(bCtor) && bCtor instanceof bCtor) + && ('constructor' in a && 'constructor' in b)) { + return false; + } + } + // Assume equality for cyclic structures. The algorithm for detecting cyclic + // structures is adapted from ES 5.1 section 15.12.3, abstract operation `JO`. + + // Initializing stack of traversed objects. + // It's done here since we only need them for objects and arrays comparison. + aStack = aStack || []; + bStack = bStack || []; + var length = aStack.length; + while (length--) { + // Linear search. Performance is inversely proportional to the number of + // unique nested structures. + if (aStack[length] === a) return bStack[length] === b; + } + + // Add the first object to the stack of traversed objects. + aStack.push(a); + bStack.push(b); + + // Recursively compare objects and arrays. + if (areArrays) { + // Compare array lengths to determine if a deep comparison is necessary. + length = a.length; + if (length !== b.length) return false; + // Deep compare the contents, ignoring non-numeric properties. + while (length--) { + if (!eq(a[length], b[length], aStack, bStack)) return false; + } + } else { + // Deep compare objects. + var keys = _.keys(a), key; + length = keys.length; + // Ensure that both objects contain the same number of properties before comparing deep equality. + if (_.keys(b).length !== length) return false; + while (length--) { + // Deep compare each member + key = keys[length]; + if (!(_.has(b, key) && eq(a[key], b[key], aStack, bStack))) return false; + } + } + // Remove the first object from the stack of traversed objects. + aStack.pop(); + bStack.pop(); + return true; + }; + + // Perform a deep comparison to check if two objects are equal. + _.isEqual = function(a, b) { + return eq(a, b); + }; + + // Is a given array, string, or object empty? + // An "empty" object has no enumerable own-properties. + _.isEmpty = function(obj) { + if (obj == null) return true; + if (isArrayLike(obj) && (_.isArray(obj) || _.isString(obj) || _.isArguments(obj))) return obj.length === 0; + return _.keys(obj).length === 0; + }; + + // Is a given value a DOM element? + _.isElement = function(obj) { + return !!(obj && obj.nodeType === 1); + }; + + // Is a given value an array? + // Delegates to ECMA5's native Array.isArray + _.isArray = nativeIsArray || function(obj) { + return toString.call(obj) === '[object Array]'; + }; + + // Is a given variable an object? + _.isObject = function(obj) { + var type = typeof obj; + return type === 'function' || type === 'object' && !!obj; + }; + + // Add some isType methods: isArguments, isFunction, isString, isNumber, isDate, isRegExp, isError. + _.each(['Arguments', 'Function', 'String', 'Number', 'Date', 'RegExp', 'Error'], function(name) { + _['is' + name] = function(obj) { + return toString.call(obj) === '[object ' + name + ']'; + }; + }); + + // Define a fallback version of the method in browsers (ahem, IE < 9), where + // there isn't any inspectable "Arguments" type. + if (!_.isArguments(arguments)) { + _.isArguments = function(obj) { + return _.has(obj, 'callee'); + }; + } + + // Optimize `isFunction` if appropriate. Work around some typeof bugs in old v8, + // IE 11 (#1621), and in Safari 8 (#1929). + if (typeof /./ != 'function' && typeof Int8Array != 'object') { + _.isFunction = function(obj) { + return typeof obj == 'function' || false; + }; + } + + // Is a given object a finite number? + _.isFinite = function(obj) { + return isFinite(obj) && !isNaN(parseFloat(obj)); + }; + + // Is the given value `NaN`? (NaN is the only number which does not equal itself). + _.isNaN = function(obj) { + return _.isNumber(obj) && obj !== +obj; + }; + + // Is a given value a boolean? + _.isBoolean = function(obj) { + return obj === true || obj === false || toString.call(obj) === '[object Boolean]'; + }; + + // Is a given value equal to null? + _.isNull = function(obj) { + return obj === null; + }; + + // Is a given variable undefined? + _.isUndefined = function(obj) { + return obj === void 0; + }; + + // Shortcut function for checking if an object has a given property directly + // on itself (in other words, not on a prototype). + _.has = function(obj, key) { + return obj != null && hasOwnProperty.call(obj, key); + }; + + // Utility Functions + // ----------------- + + // Run Underscore.js in *noConflict* mode, returning the `_` variable to its + // previous owner. Returns a reference to the Underscore object. + _.noConflict = function() { + root._ = previousUnderscore; + return this; + }; + + // Keep the identity function around for default iteratees. + _.identity = function(value) { + return value; + }; + + // Predicate-generating functions. Often useful outside of Underscore. + _.constant = function(value) { + return function() { + return value; + }; + }; + + _.noop = function(){}; + + _.property = property; + + // Generates a function for a given object that returns a given property. + _.propertyOf = function(obj) { + return obj == null ? function(){} : function(key) { + return obj[key]; + }; + }; + + // Returns a predicate for checking whether an object has a given set of + // `key:value` pairs. + _.matcher = _.matches = function(attrs) { + attrs = _.extendOwn({}, attrs); + return function(obj) { + return _.isMatch(obj, attrs); + }; + }; + + // Run a function **n** times. + _.times = function(n, iteratee, context) { + var accum = Array(Math.max(0, n)); + iteratee = optimizeCb(iteratee, context, 1); + for (var i = 0; i < n; i++) accum[i] = iteratee(i); + return accum; + }; + + // Return a random integer between min and max (inclusive). + _.random = function(min, max) { + if (max == null) { + max = min; + min = 0; + } + return min + Math.floor(Math.random() * (max - min + 1)); + }; + + // A (possibly faster) way to get the current timestamp as an integer. + _.now = Date.now || function() { + return new Date().getTime(); + }; + + // List of HTML entities for escaping. + var escapeMap = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + "'": ''', + '`': '`' + }; + var unescapeMap = _.invert(escapeMap); + + // Functions for escaping and unescaping strings to/from HTML interpolation. + var createEscaper = function(map) { + var escaper = function(match) { + return map[match]; + }; + // Regexes for identifying a key that needs to be escaped + var source = '(?:' + _.keys(map).join('|') + ')'; + var testRegexp = RegExp(source); + var replaceRegexp = RegExp(source, 'g'); + return function(string) { + string = string == null ? '' : '' + string; + return testRegexp.test(string) ? string.replace(replaceRegexp, escaper) : string; + }; + }; + _.escape = createEscaper(escapeMap); + _.unescape = createEscaper(unescapeMap); + + // If the value of the named `property` is a function then invoke it with the + // `object` as context; otherwise, return it. + _.result = function(object, property, fallback) { + var value = object == null ? void 0 : object[property]; + if (value === void 0) { + value = fallback; + } + return _.isFunction(value) ? value.call(object) : value; + }; + + // Generate a unique integer id (unique within the entire client session). + // Useful for temporary DOM ids. + var idCounter = 0; + _.uniqueId = function(prefix) { + var id = ++idCounter + ''; + return prefix ? prefix + id : id; + }; + + // By default, Underscore uses ERB-style template delimiters, change the + // following template settings to use alternative delimiters. + _.templateSettings = { + evaluate : /<%([\s\S]+?)%>/g, + interpolate : /<%=([\s\S]+?)%>/g, + escape : /<%-([\s\S]+?)%>/g + }; + + // When customizing `templateSettings`, if you don't want to define an + // interpolation, evaluation or escaping regex, we need one that is + // guaranteed not to match. + var noMatch = /(.)^/; + + // Certain characters need to be escaped so that they can be put into a + // string literal. + var escapes = { + "'": "'", + '\\': '\\', + '\r': 'r', + '\n': 'n', + '\u2028': 'u2028', + '\u2029': 'u2029' + }; + + var escaper = /\\|'|\r|\n|\u2028|\u2029/g; + + var escapeChar = function(match) { + return '\\' + escapes[match]; + }; + + // JavaScript micro-templating, similar to John Resig's implementation. + // Underscore templating handles arbitrary delimiters, preserves whitespace, + // and correctly escapes quotes within interpolated code. + // NB: `oldSettings` only exists for backwards compatibility. + _.template = function(text, settings, oldSettings) { + if (!settings && oldSettings) settings = oldSettings; + settings = _.defaults({}, settings, _.templateSettings); + + // Combine delimiters into one regular expression via alternation. + var matcher = RegExp([ + (settings.escape || noMatch).source, + (settings.interpolate || noMatch).source, + (settings.evaluate || noMatch).source + ].join('|') + '|$', 'g'); + + // Compile the template source, escaping string literals appropriately. + var index = 0; + var source = "__p+='"; + text.replace(matcher, function(match, escape, interpolate, evaluate, offset) { + source += text.slice(index, offset).replace(escaper, escapeChar); + index = offset + match.length; + + if (escape) { + source += "'+\n((__t=(" + escape + "))==null?'':_.escape(__t))+\n'"; + } else if (interpolate) { + source += "'+\n((__t=(" + interpolate + "))==null?'':__t)+\n'"; + } else if (evaluate) { + source += "';\n" + evaluate + "\n__p+='"; + } + + // Adobe VMs need the match returned to produce the correct offest. + return match; + }); + source += "';\n"; + + // If a variable is not specified, place data values in local scope. + if (!settings.variable) source = 'with(obj||{}){\n' + source + '}\n'; + + source = "var __t,__p='',__j=Array.prototype.join," + + "print=function(){__p+=__j.call(arguments,'');};\n" + + source + 'return __p;\n'; + + try { + var render = new Function(settings.variable || 'obj', '_', source); + } catch (e) { + e.source = source; + throw e; + } + + var template = function(data) { + return render.call(this, data, _); + }; + + // Provide the compiled source as a convenience for precompilation. + var argument = settings.variable || 'obj'; + template.source = 'function(' + argument + '){\n' + source + '}'; + + return template; + }; + + // Add a "chain" function. Start chaining a wrapped Underscore object. + _.chain = function(obj) { + var instance = _(obj); + instance._chain = true; + return instance; + }; + + // OOP + // --------------- + // If Underscore is called as a function, it returns a wrapped object that + // can be used OO-style. This wrapper holds altered versions of all the + // underscore functions. Wrapped objects may be chained. + + // Helper function to continue chaining intermediate results. + var result = function(instance, obj) { + return instance._chain ? _(obj).chain() : obj; + }; + + // Add your own custom functions to the Underscore object. + _.mixin = function(obj) { + _.each(_.functions(obj), function(name) { + var func = _[name] = obj[name]; + _.prototype[name] = function() { + var args = [this._wrapped]; + push.apply(args, arguments); + return result(this, func.apply(_, args)); + }; + }); + }; + + // Add all of the Underscore functions to the wrapper object. + _.mixin(_); + + // Add all mutator Array functions to the wrapper. + _.each(['pop', 'push', 'reverse', 'shift', 'sort', 'splice', 'unshift'], function(name) { + var method = ArrayProto[name]; + _.prototype[name] = function() { + var obj = this._wrapped; + method.apply(obj, arguments); + if ((name === 'shift' || name === 'splice') && obj.length === 0) delete obj[0]; + return result(this, obj); + }; + }); + + // Add all accessor Array functions to the wrapper. + _.each(['concat', 'join', 'slice'], function(name) { + var method = ArrayProto[name]; + _.prototype[name] = function() { + return result(this, method.apply(this._wrapped, arguments)); + }; + }); + + // Extracts the result from a wrapped and chained object. + _.prototype.value = function() { + return this._wrapped; + }; + + // Provide unwrapping proxy for some methods used in engine operations + // such as arithmetic and JSON stringification. + _.prototype.valueOf = _.prototype.toJSON = _.prototype.value; + + _.prototype.toString = function() { + return '' + this._wrapped; + }; + + // AMD registration happens at the end for compatibility with AMD loaders + // that may not enforce next-turn semantics on modules. Even though general + // practice for AMD registration is to be anonymous, underscore registers + // as a named module because, like jQuery, it is a base library that is + // popular enough to be bundled in a third party lib, but not be part of + // an AMD load request. Those cases could generate an error when an + // anonymous define() is called outside of a loader request. + if (typeof define === 'function' && define.amd) { + define('underscore', [], function() { + return _; + }); + } +}.call(this)); + +},{}],26:[function(require,module,exports){ +arguments[4][19][0].apply(exports,arguments) +},{"dup":19}],27:[function(require,module,exports){ +module.exports = function isBuffer(arg) { + return arg && typeof arg === 'object' + && typeof arg.copy === 'function' + && typeof arg.fill === 'function' + && typeof arg.readUInt8 === 'function'; +} +},{}],28:[function(require,module,exports){ +(function (process,global){ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var formatRegExp = /%[sdj%]/g; +exports.format = function(f) { + if (!isString(f)) { + var objects = []; + for (var i = 0; i < arguments.length; i++) { + objects.push(inspect(arguments[i])); + } + return objects.join(' '); + } + + var i = 1; + var args = arguments; + var len = args.length; + var str = String(f).replace(formatRegExp, function(x) { + if (x === '%%') return '%'; + if (i >= len) return x; + switch (x) { + case '%s': return String(args[i++]); + case '%d': return Number(args[i++]); + case '%j': + try { + return JSON.stringify(args[i++]); + } catch (_) { + return '[Circular]'; + } + default: + return x; + } + }); + for (var x = args[i]; i < len; x = args[++i]) { + if (isNull(x) || !isObject(x)) { + str += ' ' + x; + } else { + str += ' ' + inspect(x); + } + } + return str; +}; + + +// Mark that a method should not be used. +// Returns a modified function which warns once by default. +// If --no-deprecation is set, then it is a no-op. +exports.deprecate = function(fn, msg) { + // Allow for deprecating things in the process of starting up. + if (isUndefined(global.process)) { + return function() { + return exports.deprecate(fn, msg).apply(this, arguments); + }; + } + + if (process.noDeprecation === true) { + return fn; + } + + var warned = false; + function deprecated() { + if (!warned) { + if (process.throwDeprecation) { + throw new Error(msg); + } else if (process.traceDeprecation) { + console.trace(msg); + } else { + console.error(msg); + } + warned = true; + } + return fn.apply(this, arguments); + } + + return deprecated; +}; + + +var debugs = {}; +var debugEnviron; +exports.debuglog = function(set) { + if (isUndefined(debugEnviron)) + debugEnviron = process.env.NODE_DEBUG || ''; + set = set.toUpperCase(); + if (!debugs[set]) { + if (new RegExp('\\b' + set + '\\b', 'i').test(debugEnviron)) { + var pid = process.pid; + debugs[set] = function() { + var msg = exports.format.apply(exports, arguments); + console.error('%s %d: %s', set, pid, msg); + }; + } else { + debugs[set] = function() {}; + } + } + return debugs[set]; +}; + + +/** + * Echos the value of a value. Trys to print the value out + * in the best way possible given the different types. + * + * @param {Object} obj The object to print out. + * @param {Object} opts Optional options object that alters the output. + */ +/* legacy: obj, showHidden, depth, colors*/ +function inspect(obj, opts) { + // default options + var ctx = { + seen: [], + stylize: stylizeNoColor + }; + // legacy... + if (arguments.length >= 3) ctx.depth = arguments[2]; + if (arguments.length >= 4) ctx.colors = arguments[3]; + if (isBoolean(opts)) { + // legacy... + ctx.showHidden = opts; + } else if (opts) { + // got an "options" object + exports._extend(ctx, opts); + } + // set default options + if (isUndefined(ctx.showHidden)) ctx.showHidden = false; + if (isUndefined(ctx.depth)) ctx.depth = 2; + if (isUndefined(ctx.colors)) ctx.colors = false; + if (isUndefined(ctx.customInspect)) ctx.customInspect = true; + if (ctx.colors) ctx.stylize = stylizeWithColor; + return formatValue(ctx, obj, ctx.depth); +} +exports.inspect = inspect; + + +// http://en.wikipedia.org/wiki/ANSI_escape_code#graphics +inspect.colors = { + 'bold' : [1, 22], + 'italic' : [3, 23], + 'underline' : [4, 24], + 'inverse' : [7, 27], + 'white' : [37, 39], + 'grey' : [90, 39], + 'black' : [30, 39], + 'blue' : [34, 39], + 'cyan' : [36, 39], + 'green' : [32, 39], + 'magenta' : [35, 39], + 'red' : [31, 39], + 'yellow' : [33, 39] +}; + +// Don't use 'blue' not visible on cmd.exe +inspect.styles = { + 'special': 'cyan', + 'number': 'yellow', + 'boolean': 'yellow', + 'undefined': 'grey', + 'null': 'bold', + 'string': 'green', + 'date': 'magenta', + // "name": intentionally not styling + 'regexp': 'red' +}; + + +function stylizeWithColor(str, styleType) { + var style = inspect.styles[styleType]; + + if (style) { + return '\u001b[' + inspect.colors[style][0] + 'm' + str + + '\u001b[' + inspect.colors[style][1] + 'm'; + } else { + return str; + } +} + + +function stylizeNoColor(str, styleType) { + return str; +} + + +function arrayToHash(array) { + var hash = {}; + + array.forEach(function(val, idx) { + hash[val] = true; + }); + + return hash; +} + + +function formatValue(ctx, value, recurseTimes) { + // Provide a hook for user-specified inspect functions. + // Check that value is an object with an inspect function on it + if (ctx.customInspect && + value && + isFunction(value.inspect) && + // Filter out the util module, it's inspect function is special + value.inspect !== exports.inspect && + // Also filter out any prototype objects using the circular check. + !(value.constructor && value.constructor.prototype === value)) { + var ret = value.inspect(recurseTimes, ctx); + if (!isString(ret)) { + ret = formatValue(ctx, ret, recurseTimes); + } + return ret; + } + + // Primitive types cannot have properties + var primitive = formatPrimitive(ctx, value); + if (primitive) { + return primitive; + } + + // Look up the keys of the object. + var keys = Object.keys(value); + var visibleKeys = arrayToHash(keys); + + if (ctx.showHidden) { + keys = Object.getOwnPropertyNames(value); + } + + // IE doesn't make error fields non-enumerable + // http://msdn.microsoft.com/en-us/library/ie/dww52sbt(v=vs.94).aspx + if (isError(value) + && (keys.indexOf('message') >= 0 || keys.indexOf('description') >= 0)) { + return formatError(value); + } + + // Some type of object without properties can be shortcutted. + if (keys.length === 0) { + if (isFunction(value)) { + var name = value.name ? ': ' + value.name : ''; + return ctx.stylize('[Function' + name + ']', 'special'); + } + if (isRegExp(value)) { + return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); + } + if (isDate(value)) { + return ctx.stylize(Date.prototype.toString.call(value), 'date'); + } + if (isError(value)) { + return formatError(value); + } + } + + var base = '', array = false, braces = ['{', '}']; + + // Make Array say that they are Array + if (isArray(value)) { + array = true; + braces = ['[', ']']; + } + + // Make functions say that they are functions + if (isFunction(value)) { + var n = value.name ? ': ' + value.name : ''; + base = ' [Function' + n + ']'; + } + + // Make RegExps say that they are RegExps + if (isRegExp(value)) { + base = ' ' + RegExp.prototype.toString.call(value); + } + + // Make dates with properties first say the date + if (isDate(value)) { + base = ' ' + Date.prototype.toUTCString.call(value); + } + + // Make error with message first say the error + if (isError(value)) { + base = ' ' + formatError(value); + } + + if (keys.length === 0 && (!array || value.length == 0)) { + return braces[0] + base + braces[1]; + } + + if (recurseTimes < 0) { + if (isRegExp(value)) { + return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); + } else { + return ctx.stylize('[Object]', 'special'); + } + } + + ctx.seen.push(value); + + var output; + if (array) { + output = formatArray(ctx, value, recurseTimes, visibleKeys, keys); + } else { + output = keys.map(function(key) { + return formatProperty(ctx, value, recurseTimes, visibleKeys, key, array); + }); + } + + ctx.seen.pop(); + + return reduceToSingleString(output, base, braces); +} + + +function formatPrimitive(ctx, value) { + if (isUndefined(value)) + return ctx.stylize('undefined', 'undefined'); + if (isString(value)) { + var simple = '\'' + JSON.stringify(value).replace(/^"|"$/g, '') + .replace(/'/g, "\\'") + .replace(/\\"/g, '"') + '\''; + return ctx.stylize(simple, 'string'); + } + if (isNumber(value)) + return ctx.stylize('' + value, 'number'); + if (isBoolean(value)) + return ctx.stylize('' + value, 'boolean'); + // For some reason typeof null is "object", so special case here. + if (isNull(value)) + return ctx.stylize('null', 'null'); +} + + +function formatError(value) { + return '[' + Error.prototype.toString.call(value) + ']'; +} + + +function formatArray(ctx, value, recurseTimes, visibleKeys, keys) { + var output = []; + for (var i = 0, l = value.length; i < l; ++i) { + if (hasOwnProperty(value, String(i))) { + output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, + String(i), true)); + } else { + output.push(''); + } + } + keys.forEach(function(key) { + if (!key.match(/^\d+$/)) { + output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, + key, true)); + } + }); + return output; +} + + +function formatProperty(ctx, value, recurseTimes, visibleKeys, key, array) { + var name, str, desc; + desc = Object.getOwnPropertyDescriptor(value, key) || { value: value[key] }; + if (desc.get) { + if (desc.set) { + str = ctx.stylize('[Getter/Setter]', 'special'); + } else { + str = ctx.stylize('[Getter]', 'special'); + } + } else { + if (desc.set) { + str = ctx.stylize('[Setter]', 'special'); + } + } + if (!hasOwnProperty(visibleKeys, key)) { + name = '[' + key + ']'; + } + if (!str) { + if (ctx.seen.indexOf(desc.value) < 0) { + if (isNull(recurseTimes)) { + str = formatValue(ctx, desc.value, null); + } else { + str = formatValue(ctx, desc.value, recurseTimes - 1); + } + if (str.indexOf('\n') > -1) { + if (array) { + str = str.split('\n').map(function(line) { + return ' ' + line; + }).join('\n').substr(2); + } else { + str = '\n' + str.split('\n').map(function(line) { + return ' ' + line; + }).join('\n'); + } + } + } else { + str = ctx.stylize('[Circular]', 'special'); + } + } + if (isUndefined(name)) { + if (array && key.match(/^\d+$/)) { + return str; + } + name = JSON.stringify('' + key); + if (name.match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)) { + name = name.substr(1, name.length - 2); + name = ctx.stylize(name, 'name'); + } else { + name = name.replace(/'/g, "\\'") + .replace(/\\"/g, '"') + .replace(/(^"|"$)/g, "'"); + name = ctx.stylize(name, 'string'); + } + } + + return name + ': ' + str; +} + + +function reduceToSingleString(output, base, braces) { + var numLinesEst = 0; + var length = output.reduce(function(prev, cur) { + numLinesEst++; + if (cur.indexOf('\n') >= 0) numLinesEst++; + return prev + cur.replace(/\u001b\[\d\d?m/g, '').length + 1; + }, 0); + + if (length > 60) { + return braces[0] + + (base === '' ? '' : base + '\n ') + + ' ' + + output.join(',\n ') + + ' ' + + braces[1]; + } + + return braces[0] + base + ' ' + output.join(', ') + ' ' + braces[1]; +} + + +// NOTE: These type checking functions intentionally don't use `instanceof` +// because it is fragile and can be easily faked with `Object.create()`. +function isArray(ar) { + return Array.isArray(ar); +} +exports.isArray = isArray; + +function isBoolean(arg) { + return typeof arg === 'boolean'; +} +exports.isBoolean = isBoolean; + +function isNull(arg) { + return arg === null; +} +exports.isNull = isNull; + +function isNullOrUndefined(arg) { + return arg == null; +} +exports.isNullOrUndefined = isNullOrUndefined; + +function isNumber(arg) { + return typeof arg === 'number'; +} +exports.isNumber = isNumber; + +function isString(arg) { + return typeof arg === 'string'; +} +exports.isString = isString; + +function isSymbol(arg) { + return typeof arg === 'symbol'; +} +exports.isSymbol = isSymbol; + +function isUndefined(arg) { + return arg === void 0; +} +exports.isUndefined = isUndefined; + +function isRegExp(re) { + return isObject(re) && objectToString(re) === '[object RegExp]'; +} +exports.isRegExp = isRegExp; + +function isObject(arg) { + return typeof arg === 'object' && arg !== null; +} +exports.isObject = isObject; + +function isDate(d) { + return isObject(d) && objectToString(d) === '[object Date]'; +} +exports.isDate = isDate; + +function isError(e) { + return isObject(e) && + (objectToString(e) === '[object Error]' || e instanceof Error); +} +exports.isError = isError; + +function isFunction(arg) { + return typeof arg === 'function'; +} +exports.isFunction = isFunction; + +function isPrimitive(arg) { + return arg === null || + typeof arg === 'boolean' || + typeof arg === 'number' || + typeof arg === 'string' || + typeof arg === 'symbol' || // ES6 symbol + typeof arg === 'undefined'; +} +exports.isPrimitive = isPrimitive; + +exports.isBuffer = require('./support/isBuffer'); + +function objectToString(o) { + return Object.prototype.toString.call(o); +} + + +function pad(n) { + return n < 10 ? '0' + n.toString(10) : n.toString(10); +} + + +var months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', + 'Oct', 'Nov', 'Dec']; + +// 26 Feb 16:19:34 +function timestamp() { + var d = new Date(); + var time = [pad(d.getHours()), + pad(d.getMinutes()), + pad(d.getSeconds())].join(':'); + return [d.getDate(), months[d.getMonth()], time].join(' '); +} + + +// log is just a thin wrapper to console.log that prepends a timestamp +exports.log = function() { + console.log('%s - %s', timestamp(), exports.format.apply(exports, arguments)); +}; + + +/** + * Inherit the prototype methods from one constructor into another. + * + * The Function.prototype.inherits from lang.js rewritten as a standalone + * function (not on Function.prototype). NOTE: If this file is to be loaded + * during bootstrapping this function needs to be rewritten using some native + * functions as prototype setup using normal JavaScript does not work as + * expected during bootstrapping (see mirror.js in r114903). + * + * @param {function} ctor Constructor function which needs to inherit the + * prototype. + * @param {function} superCtor Constructor function to inherit prototype from. + */ +exports.inherits = require('inherits'); + +exports._extend = function(origin, add) { + // Don't do anything if add isn't an object + if (!add || !isObject(add)) return origin; + + var keys = Object.keys(add); + var i = keys.length; + while (i--) { + origin[keys[i]] = add[keys[i]]; + } + return origin; +}; + +function hasOwnProperty(obj, prop) { + return Object.prototype.hasOwnProperty.call(obj, prop); +} + +}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) +},{"./support/isBuffer":27,"_process":24,"inherits":26}],29:[function(require,module,exports){ +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret + } +} + +},{}]},{},[7])(7) +}); \ No newline at end of file diff --git a/2.0.0/assets/javascripts/workers/search.16e2a7d4.min.js b/2.0.0/assets/javascripts/workers/search.16e2a7d4.min.js new file mode 100644 index 00000000..e0dc159e --- /dev/null +++ b/2.0.0/assets/javascripts/workers/search.16e2a7d4.min.js @@ -0,0 +1,48 @@ +"use strict";(()=>{var ge=Object.create;var W=Object.defineProperty,ye=Object.defineProperties,me=Object.getOwnPropertyDescriptor,ve=Object.getOwnPropertyDescriptors,xe=Object.getOwnPropertyNames,G=Object.getOwnPropertySymbols,Se=Object.getPrototypeOf,X=Object.prototype.hasOwnProperty,Qe=Object.prototype.propertyIsEnumerable;var J=(t,e,r)=>e in t?W(t,e,{enumerable:!0,configurable:!0,writable:!0,value:r}):t[e]=r,M=(t,e)=>{for(var r in e||(e={}))X.call(e,r)&&J(t,r,e[r]);if(G)for(var r of G(e))Qe.call(e,r)&&J(t,r,e[r]);return t},Z=(t,e)=>ye(t,ve(e));var K=(t,e)=>()=>(e||t((e={exports:{}}).exports,e),e.exports);var be=(t,e,r,n)=>{if(e&&typeof e=="object"||typeof e=="function")for(let i of xe(e))!X.call(t,i)&&i!==r&&W(t,i,{get:()=>e[i],enumerable:!(n=me(e,i))||n.enumerable});return t};var H=(t,e,r)=>(r=t!=null?ge(Se(t)):{},be(e||!t||!t.__esModule?W(r,"default",{value:t,enumerable:!0}):r,t));var z=(t,e,r)=>new Promise((n,i)=>{var s=u=>{try{a(r.next(u))}catch(c){i(c)}},o=u=>{try{a(r.throw(u))}catch(c){i(c)}},a=u=>u.done?n(u.value):Promise.resolve(u.value).then(s,o);a((r=r.apply(t,e)).next())});var re=K((ee,te)=>{/** + * lunr - http://lunrjs.com - A bit like Solr, but much smaller and not as bright - 2.3.9 + * Copyright (C) 2020 Oliver Nightingale + * @license MIT + */(function(){var t=function(e){var r=new t.Builder;return r.pipeline.add(t.trimmer,t.stopWordFilter,t.stemmer),r.searchPipeline.add(t.stemmer),e.call(r,r),r.build()};t.version="2.3.9";/*! + * lunr.utils + * Copyright (C) 2020 Oliver Nightingale + */t.utils={},t.utils.warn=function(e){return function(r){e.console&&console.warn&&console.warn(r)}}(this),t.utils.asString=function(e){return e==null?"":e.toString()},t.utils.clone=function(e){if(e==null)return e;for(var r=Object.create(null),n=Object.keys(e),i=0;i0){var h=t.utils.clone(r)||{};h.position=[a,c],h.index=s.length,s.push(new t.Token(n.slice(a,o),h))}a=o+1}}return s},t.tokenizer.separator=/[\s\-]+/;/*! + * lunr.Pipeline + * Copyright (C) 2020 Oliver Nightingale + */t.Pipeline=function(){this._stack=[]},t.Pipeline.registeredFunctions=Object.create(null),t.Pipeline.registerFunction=function(e,r){r in this.registeredFunctions&&t.utils.warn("Overwriting existing registered function: "+r),e.label=r,t.Pipeline.registeredFunctions[e.label]=e},t.Pipeline.warnIfFunctionNotRegistered=function(e){var r=e.label&&e.label in this.registeredFunctions;r||t.utils.warn(`Function is not registered with pipeline. This may cause problems when serialising the index. +`,e)},t.Pipeline.load=function(e){var r=new t.Pipeline;return e.forEach(function(n){var i=t.Pipeline.registeredFunctions[n];if(i)r.add(i);else throw new Error("Cannot load unregistered function: "+n)}),r},t.Pipeline.prototype.add=function(){var e=Array.prototype.slice.call(arguments);e.forEach(function(r){t.Pipeline.warnIfFunctionNotRegistered(r),this._stack.push(r)},this)},t.Pipeline.prototype.after=function(e,r){t.Pipeline.warnIfFunctionNotRegistered(r);var n=this._stack.indexOf(e);if(n==-1)throw new Error("Cannot find existingFn");n=n+1,this._stack.splice(n,0,r)},t.Pipeline.prototype.before=function(e,r){t.Pipeline.warnIfFunctionNotRegistered(r);var n=this._stack.indexOf(e);if(n==-1)throw new Error("Cannot find existingFn");this._stack.splice(n,0,r)},t.Pipeline.prototype.remove=function(e){var r=this._stack.indexOf(e);r!=-1&&this._stack.splice(r,1)},t.Pipeline.prototype.run=function(e){for(var r=this._stack.length,n=0;n1&&(oe&&(n=s),o!=e);)i=n-r,s=r+Math.floor(i/2),o=this.elements[s*2];if(o==e||o>e)return s*2;if(ou?h+=2:a==u&&(r+=n[c+1]*i[h+1],c+=2,h+=2);return r},t.Vector.prototype.similarity=function(e){return this.dot(e)/this.magnitude()||0},t.Vector.prototype.toArray=function(){for(var e=new Array(this.elements.length/2),r=1,n=0;r0){var o=s.str.charAt(0),a;o in s.node.edges?a=s.node.edges[o]:(a=new t.TokenSet,s.node.edges[o]=a),s.str.length==1&&(a.final=!0),i.push({node:a,editsRemaining:s.editsRemaining,str:s.str.slice(1)})}if(s.editsRemaining!=0){if("*"in s.node.edges)var u=s.node.edges["*"];else{var u=new t.TokenSet;s.node.edges["*"]=u}if(s.str.length==0&&(u.final=!0),i.push({node:u,editsRemaining:s.editsRemaining-1,str:s.str}),s.str.length>1&&i.push({node:s.node,editsRemaining:s.editsRemaining-1,str:s.str.slice(1)}),s.str.length==1&&(s.node.final=!0),s.str.length>=1){if("*"in s.node.edges)var c=s.node.edges["*"];else{var c=new t.TokenSet;s.node.edges["*"]=c}s.str.length==1&&(c.final=!0),i.push({node:c,editsRemaining:s.editsRemaining-1,str:s.str.slice(1)})}if(s.str.length>1){var h=s.str.charAt(0),y=s.str.charAt(1),g;y in s.node.edges?g=s.node.edges[y]:(g=new t.TokenSet,s.node.edges[y]=g),s.str.length==1&&(g.final=!0),i.push({node:g,editsRemaining:s.editsRemaining-1,str:h+s.str.slice(2)})}}}return n},t.TokenSet.fromString=function(e){for(var r=new t.TokenSet,n=r,i=0,s=e.length;i=e;r--){var n=this.uncheckedNodes[r],i=n.child.toString();i in this.minimizedNodes?n.parent.edges[n.char]=this.minimizedNodes[i]:(n.child._str=i,this.minimizedNodes[i]=n.child),this.uncheckedNodes.pop()}};/*! + * lunr.Index + * Copyright (C) 2020 Oliver Nightingale + */t.Index=function(e){this.invertedIndex=e.invertedIndex,this.fieldVectors=e.fieldVectors,this.tokenSet=e.tokenSet,this.fields=e.fields,this.pipeline=e.pipeline},t.Index.prototype.search=function(e){return this.query(function(r){var n=new t.QueryParser(e,r);n.parse()})},t.Index.prototype.query=function(e){for(var r=new t.Query(this.fields),n=Object.create(null),i=Object.create(null),s=Object.create(null),o=Object.create(null),a=Object.create(null),u=0;u1?this._b=1:this._b=e},t.Builder.prototype.k1=function(e){this._k1=e},t.Builder.prototype.add=function(e,r){var n=e[this._ref],i=Object.keys(this._fields);this._documents[n]=r||{},this.documentCount+=1;for(var s=0;s=this.length)return t.QueryLexer.EOS;var e=this.str.charAt(this.pos);return this.pos+=1,e},t.QueryLexer.prototype.width=function(){return this.pos-this.start},t.QueryLexer.prototype.ignore=function(){this.start==this.pos&&(this.pos+=1),this.start=this.pos},t.QueryLexer.prototype.backup=function(){this.pos-=1},t.QueryLexer.prototype.acceptDigitRun=function(){var e,r;do e=this.next(),r=e.charCodeAt(0);while(r>47&&r<58);e!=t.QueryLexer.EOS&&this.backup()},t.QueryLexer.prototype.more=function(){return this.pos1&&(e.backup(),e.emit(t.QueryLexer.TERM)),e.ignore(),e.more())return t.QueryLexer.lexText},t.QueryLexer.lexEditDistance=function(e){return e.ignore(),e.acceptDigitRun(),e.emit(t.QueryLexer.EDIT_DISTANCE),t.QueryLexer.lexText},t.QueryLexer.lexBoost=function(e){return e.ignore(),e.acceptDigitRun(),e.emit(t.QueryLexer.BOOST),t.QueryLexer.lexText},t.QueryLexer.lexEOS=function(e){e.width()>0&&e.emit(t.QueryLexer.TERM)},t.QueryLexer.termSeparator=t.tokenizer.separator,t.QueryLexer.lexText=function(e){for(;;){var r=e.next();if(r==t.QueryLexer.EOS)return t.QueryLexer.lexEOS;if(r.charCodeAt(0)==92){e.escapeCharacter();continue}if(r==":")return t.QueryLexer.lexField;if(r=="~")return e.backup(),e.width()>0&&e.emit(t.QueryLexer.TERM),t.QueryLexer.lexEditDistance;if(r=="^")return e.backup(),e.width()>0&&e.emit(t.QueryLexer.TERM),t.QueryLexer.lexBoost;if(r=="+"&&e.width()===1||r=="-"&&e.width()===1)return e.emit(t.QueryLexer.PRESENCE),t.QueryLexer.lexText;if(r.match(t.QueryLexer.termSeparator))return t.QueryLexer.lexTerm}},t.QueryParser=function(e,r){this.lexer=new t.QueryLexer(e),this.query=r,this.currentClause={},this.lexemeIdx=0},t.QueryParser.prototype.parse=function(){this.lexer.run(),this.lexemes=this.lexer.lexemes;for(var e=t.QueryParser.parseClause;e;)e=e(this);return this.query},t.QueryParser.prototype.peekLexeme=function(){return this.lexemes[this.lexemeIdx]},t.QueryParser.prototype.consumeLexeme=function(){var e=this.peekLexeme();return this.lexemeIdx+=1,e},t.QueryParser.prototype.nextClause=function(){var e=this.currentClause;this.query.clause(e),this.currentClause={}},t.QueryParser.parseClause=function(e){var r=e.peekLexeme();if(r!=null)switch(r.type){case t.QueryLexer.PRESENCE:return t.QueryParser.parsePresence;case t.QueryLexer.FIELD:return t.QueryParser.parseField;case t.QueryLexer.TERM:return t.QueryParser.parseTerm;default:var n="expected either a field or a term, found "+r.type;throw r.str.length>=1&&(n+=" with value '"+r.str+"'"),new t.QueryParseError(n,r.start,r.end)}},t.QueryParser.parsePresence=function(e){var r=e.consumeLexeme();if(r!=null){switch(r.str){case"-":e.currentClause.presence=t.Query.presence.PROHIBITED;break;case"+":e.currentClause.presence=t.Query.presence.REQUIRED;break;default:var n="unrecognised presence operator'"+r.str+"'";throw new t.QueryParseError(n,r.start,r.end)}var i=e.peekLexeme();if(i==null){var n="expecting term or field, found nothing";throw new t.QueryParseError(n,r.start,r.end)}switch(i.type){case t.QueryLexer.FIELD:return t.QueryParser.parseField;case t.QueryLexer.TERM:return t.QueryParser.parseTerm;default:var n="expecting term or field, found '"+i.type+"'";throw new t.QueryParseError(n,i.start,i.end)}}},t.QueryParser.parseField=function(e){var r=e.consumeLexeme();if(r!=null){if(e.query.allFields.indexOf(r.str)==-1){var n=e.query.allFields.map(function(o){return"'"+o+"'"}).join(", "),i="unrecognised field '"+r.str+"', possible fields: "+n;throw new t.QueryParseError(i,r.start,r.end)}e.currentClause.fields=[r.str];var s=e.peekLexeme();if(s==null){var i="expecting term, found nothing";throw new t.QueryParseError(i,r.start,r.end)}switch(s.type){case t.QueryLexer.TERM:return t.QueryParser.parseTerm;default:var i="expecting term, found '"+s.type+"'";throw new t.QueryParseError(i,s.start,s.end)}}},t.QueryParser.parseTerm=function(e){var r=e.consumeLexeme();if(r!=null){e.currentClause.term=r.str.toLowerCase(),r.str.indexOf("*")!=-1&&(e.currentClause.usePipeline=!1);var n=e.peekLexeme();if(n==null){e.nextClause();return}switch(n.type){case t.QueryLexer.TERM:return e.nextClause(),t.QueryParser.parseTerm;case t.QueryLexer.FIELD:return e.nextClause(),t.QueryParser.parseField;case t.QueryLexer.EDIT_DISTANCE:return t.QueryParser.parseEditDistance;case t.QueryLexer.BOOST:return t.QueryParser.parseBoost;case t.QueryLexer.PRESENCE:return e.nextClause(),t.QueryParser.parsePresence;default:var i="Unexpected lexeme type '"+n.type+"'";throw new t.QueryParseError(i,n.start,n.end)}}},t.QueryParser.parseEditDistance=function(e){var r=e.consumeLexeme();if(r!=null){var n=parseInt(r.str,10);if(isNaN(n)){var i="edit distance must be numeric";throw new t.QueryParseError(i,r.start,r.end)}e.currentClause.editDistance=n;var s=e.peekLexeme();if(s==null){e.nextClause();return}switch(s.type){case t.QueryLexer.TERM:return e.nextClause(),t.QueryParser.parseTerm;case t.QueryLexer.FIELD:return e.nextClause(),t.QueryParser.parseField;case t.QueryLexer.EDIT_DISTANCE:return t.QueryParser.parseEditDistance;case t.QueryLexer.BOOST:return t.QueryParser.parseBoost;case t.QueryLexer.PRESENCE:return e.nextClause(),t.QueryParser.parsePresence;default:var i="Unexpected lexeme type '"+s.type+"'";throw new t.QueryParseError(i,s.start,s.end)}}},t.QueryParser.parseBoost=function(e){var r=e.consumeLexeme();if(r!=null){var n=parseInt(r.str,10);if(isNaN(n)){var i="boost must be numeric";throw new t.QueryParseError(i,r.start,r.end)}e.currentClause.boost=n;var s=e.peekLexeme();if(s==null){e.nextClause();return}switch(s.type){case t.QueryLexer.TERM:return e.nextClause(),t.QueryParser.parseTerm;case t.QueryLexer.FIELD:return e.nextClause(),t.QueryParser.parseField;case t.QueryLexer.EDIT_DISTANCE:return t.QueryParser.parseEditDistance;case t.QueryLexer.BOOST:return t.QueryParser.parseBoost;case t.QueryLexer.PRESENCE:return e.nextClause(),t.QueryParser.parsePresence;default:var i="Unexpected lexeme type '"+s.type+"'";throw new t.QueryParseError(i,s.start,s.end)}}},function(e,r){typeof define=="function"&&define.amd?define(r):typeof ee=="object"?te.exports=r():e.lunr=r()}(this,function(){return t})})()});var q=K((Re,ne)=>{"use strict";/*! + * escape-html + * Copyright(c) 2012-2013 TJ Holowaychuk + * Copyright(c) 2015 Andreas Lubbe + * Copyright(c) 2015 Tiancheng "Timothy" Gu + * MIT Licensed + */var Le=/["'&<>]/;ne.exports=we;function we(t){var e=""+t,r=Le.exec(e);if(!r)return e;var n,i="",s=0,o=0;for(s=r.index;s=0;r--){let n=t[r];typeof n=="string"?n=document.createTextNode(n):n.parentNode&&n.parentNode.removeChild(n),r?e.insertBefore(this.previousSibling,n):e.replaceChild(n,this)}}}));var ie=H(q());function se(t){let e=new Map,r=new Set;for(let n of t){let[i,s]=n.location.split("#"),o=n.location,a=n.title,u=n.tags,c=(0,ie.default)(n.text).replace(/\s+(?=[,.:;!?])/g,"").replace(/\s+/g," ");if(s){let h=e.get(i);r.has(h)?e.set(o,{location:o,title:a,text:c,parent:h}):(h.title=n.title,h.text=c,r.add(h))}else e.set(o,M({location:o,title:a,text:c},u&&{tags:u}))}return e}var oe=H(q());function ae(t,e){let r=new RegExp(t.separator,"img"),n=(i,s,o)=>`${s}${o}`;return i=>{i=i.replace(/[\s*+\-:~^]+/g," ").trim();let s=new RegExp(`(^|${t.separator})(${i.replace(/[|\\{}()[\]^$+*?.-]/g,"\\$&").replace(r,"|")})`,"img");return o=>(e?(0,oe.default)(o):o).replace(s,n).replace(/<\/mark>(\s+)]*>/img,"$1")}}function ue(t){let e=new lunr.Query(["title","text"]);return new lunr.QueryParser(t,e).parse(),e.clauses}function ce(t,e){var i;let r=new Set(t),n={};for(let s=0;s!n.has(i)))]}var U=class{constructor({config:e,docs:r,options:n}){this.options=n,this.documents=se(r),this.highlight=ae(e,!1),lunr.tokenizer.separator=new RegExp(e.separator),this.index=lunr(function(){e.lang.length===1&&e.lang[0]!=="en"?this.use(lunr[e.lang[0]]):e.lang.length>1&&this.use(lunr.multiLanguage(...e.lang));let i=Ee(["trimmer","stopWordFilter","stemmer"],n.pipeline);for(let s of e.lang.map(o=>o==="en"?lunr:lunr[o]))for(let o of i)this.pipeline.remove(s[o]),this.searchPipeline.remove(s[o]);this.ref("location"),this.field("title",{boost:1e3}),this.field("text"),this.field("tags",{boost:1e6,extractor:s=>{let{tags:o=[]}=s;return o.reduce((a,u)=>[...a,...lunr.tokenizer(u)],[])}});for(let s of r)this.add(s,{boost:s.boost})})}search(e){if(e)try{let r=this.highlight(e),n=ue(e).filter(o=>o.presence!==lunr.Query.presence.PROHIBITED),i=this.index.search(`${e}*`).reduce((o,{ref:a,score:u,matchData:c})=>{let h=this.documents.get(a);if(typeof h!="undefined"){let{location:y,title:g,text:b,tags:m,parent:Q}=h,p=ce(n,Object.keys(c.metadata)),d=+!Q+ +Object.values(p).every(w=>w);o.push(Z(M({location:y,title:r(g),text:r(b)},m&&{tags:m.map(r)}),{score:u*(1+d),terms:p}))}return o},[]).sort((o,a)=>a.score-o.score).reduce((o,a)=>{let u=this.documents.get(a.location);if(typeof u!="undefined"){let c="parent"in u?u.parent.location:u.location;o.set(c,[...o.get(c)||[],a])}return o},new Map),s;if(this.options.suggestions){let o=this.index.query(a=>{for(let u of n)a.term(u.term,{fields:["title"],presence:lunr.Query.presence.REQUIRED,wildcard:lunr.Query.wildcard.TRAILING})});s=o.length?Object.keys(o[0].matchData.metadata):[]}return M({items:[...i.values()]},typeof s!="undefined"&&{suggestions:s})}catch(r){console.warn(`Invalid query: ${e} \u2013 see https://bit.ly/2s3ChXG`)}return{items:[]}}};var Y;function ke(t){return z(this,null,function*(){let e="../lunr";if(typeof parent!="undefined"&&"IFrameWorker"in parent){let n=document.querySelector("script[src]"),[i]=n.src.split("/worker");e=e.replace("..",i)}let r=[];for(let n of t.lang){switch(n){case"ja":r.push(`${e}/tinyseg.js`);break;case"hi":case"th":r.push(`${e}/wordcut.js`);break}n!=="en"&&r.push(`${e}/min/lunr.${n}.min.js`)}t.lang.length>1&&r.push(`${e}/min/lunr.multi.min.js`),r.length&&(yield importScripts(`${e}/min/lunr.stemmer.support.min.js`,...r))})}function Te(t){return z(this,null,function*(){switch(t.type){case 0:return yield ke(t.data.config),Y=new U(t.data),{type:1};case 2:return{type:3,data:Y?Y.search(t.data):{items:[]}};default:throw new TypeError("Invalid message type")}})}self.lunr=le.default;addEventListener("message",t=>z(void 0,null,function*(){postMessage(yield Te(t.data))}));})(); +//# sourceMappingURL=search.16e2a7d4.min.js.map + diff --git a/2.0.0/assets/javascripts/workers/search.16e2a7d4.min.js.map b/2.0.0/assets/javascripts/workers/search.16e2a7d4.min.js.map new file mode 100644 index 00000000..fa01f374 --- /dev/null +++ b/2.0.0/assets/javascripts/workers/search.16e2a7d4.min.js.map @@ -0,0 +1,8 @@ +{ + "version": 3, + "sources": ["node_modules/lunr/lunr.js", "node_modules/escape-html/index.js", "src/assets/javascripts/integrations/search/worker/main/index.ts", "src/assets/javascripts/polyfills/index.ts", "src/assets/javascripts/integrations/search/document/index.ts", "src/assets/javascripts/integrations/search/highlighter/index.ts", "src/assets/javascripts/integrations/search/query/_/index.ts", "src/assets/javascripts/integrations/search/_/index.ts"], + "sourceRoot": "../../../..", + "sourcesContent": ["/**\n * lunr - http://lunrjs.com - A bit like Solr, but much smaller and not as bright - 2.3.9\n * Copyright (C) 2020 Oliver Nightingale\n * @license MIT\n */\n\n;(function(){\n\n/**\n * A convenience function for configuring and constructing\n * a new lunr Index.\n *\n * A lunr.Builder instance is created and the pipeline setup\n * with a trimmer, stop word filter and stemmer.\n *\n * This builder object is yielded to the configuration function\n * that is passed as a parameter, allowing the list of fields\n * and other builder parameters to be customised.\n *\n * All documents _must_ be added within the passed config function.\n *\n * @example\n * var idx = lunr(function () {\n * this.field('title')\n * this.field('body')\n * this.ref('id')\n *\n * documents.forEach(function (doc) {\n * this.add(doc)\n * }, this)\n * })\n *\n * @see {@link lunr.Builder}\n * @see {@link lunr.Pipeline}\n * @see {@link lunr.trimmer}\n * @see {@link lunr.stopWordFilter}\n * @see {@link lunr.stemmer}\n * @namespace {function} lunr\n */\nvar lunr = function (config) {\n var builder = new lunr.Builder\n\n builder.pipeline.add(\n lunr.trimmer,\n lunr.stopWordFilter,\n lunr.stemmer\n )\n\n builder.searchPipeline.add(\n lunr.stemmer\n )\n\n config.call(builder, builder)\n return builder.build()\n}\n\nlunr.version = \"2.3.9\"\n/*!\n * lunr.utils\n * Copyright (C) 2020 Oliver Nightingale\n */\n\n/**\n * A namespace containing utils for the rest of the lunr library\n * @namespace lunr.utils\n */\nlunr.utils = {}\n\n/**\n * Print a warning message to the console.\n *\n * @param {String} message The message to be printed.\n * @memberOf lunr.utils\n * @function\n */\nlunr.utils.warn = (function (global) {\n /* eslint-disable no-console */\n return function (message) {\n if (global.console && console.warn) {\n console.warn(message)\n }\n }\n /* eslint-enable no-console */\n})(this)\n\n/**\n * Convert an object to a string.\n *\n * In the case of `null` and `undefined` the function returns\n * the empty string, in all other cases the result of calling\n * `toString` on the passed object is returned.\n *\n * @param {Any} obj The object to convert to a string.\n * @return {String} string representation of the passed object.\n * @memberOf lunr.utils\n */\nlunr.utils.asString = function (obj) {\n if (obj === void 0 || obj === null) {\n return \"\"\n } else {\n return obj.toString()\n }\n}\n\n/**\n * Clones an object.\n *\n * Will create a copy of an existing object such that any mutations\n * on the copy cannot affect the original.\n *\n * Only shallow objects are supported, passing a nested object to this\n * function will cause a TypeError.\n *\n * Objects with primitives, and arrays of primitives are supported.\n *\n * @param {Object} obj The object to clone.\n * @return {Object} a clone of the passed object.\n * @throws {TypeError} when a nested object is passed.\n * @memberOf Utils\n */\nlunr.utils.clone = function (obj) {\n if (obj === null || obj === undefined) {\n return obj\n }\n\n var clone = Object.create(null),\n keys = Object.keys(obj)\n\n for (var i = 0; i < keys.length; i++) {\n var key = keys[i],\n val = obj[key]\n\n if (Array.isArray(val)) {\n clone[key] = val.slice()\n continue\n }\n\n if (typeof val === 'string' ||\n typeof val === 'number' ||\n typeof val === 'boolean') {\n clone[key] = val\n continue\n }\n\n throw new TypeError(\"clone is not deep and does not support nested objects\")\n }\n\n return clone\n}\nlunr.FieldRef = function (docRef, fieldName, stringValue) {\n this.docRef = docRef\n this.fieldName = fieldName\n this._stringValue = stringValue\n}\n\nlunr.FieldRef.joiner = \"/\"\n\nlunr.FieldRef.fromString = function (s) {\n var n = s.indexOf(lunr.FieldRef.joiner)\n\n if (n === -1) {\n throw \"malformed field ref string\"\n }\n\n var fieldRef = s.slice(0, n),\n docRef = s.slice(n + 1)\n\n return new lunr.FieldRef (docRef, fieldRef, s)\n}\n\nlunr.FieldRef.prototype.toString = function () {\n if (this._stringValue == undefined) {\n this._stringValue = this.fieldName + lunr.FieldRef.joiner + this.docRef\n }\n\n return this._stringValue\n}\n/*!\n * lunr.Set\n * Copyright (C) 2020 Oliver Nightingale\n */\n\n/**\n * A lunr set.\n *\n * @constructor\n */\nlunr.Set = function (elements) {\n this.elements = Object.create(null)\n\n if (elements) {\n this.length = elements.length\n\n for (var i = 0; i < this.length; i++) {\n this.elements[elements[i]] = true\n }\n } else {\n this.length = 0\n }\n}\n\n/**\n * A complete set that contains all elements.\n *\n * @static\n * @readonly\n * @type {lunr.Set}\n */\nlunr.Set.complete = {\n intersect: function (other) {\n return other\n },\n\n union: function () {\n return this\n },\n\n contains: function () {\n return true\n }\n}\n\n/**\n * An empty set that contains no elements.\n *\n * @static\n * @readonly\n * @type {lunr.Set}\n */\nlunr.Set.empty = {\n intersect: function () {\n return this\n },\n\n union: function (other) {\n return other\n },\n\n contains: function () {\n return false\n }\n}\n\n/**\n * Returns true if this set contains the specified object.\n *\n * @param {object} object - Object whose presence in this set is to be tested.\n * @returns {boolean} - True if this set contains the specified object.\n */\nlunr.Set.prototype.contains = function (object) {\n return !!this.elements[object]\n}\n\n/**\n * Returns a new set containing only the elements that are present in both\n * this set and the specified set.\n *\n * @param {lunr.Set} other - set to intersect with this set.\n * @returns {lunr.Set} a new set that is the intersection of this and the specified set.\n */\n\nlunr.Set.prototype.intersect = function (other) {\n var a, b, elements, intersection = []\n\n if (other === lunr.Set.complete) {\n return this\n }\n\n if (other === lunr.Set.empty) {\n return other\n }\n\n if (this.length < other.length) {\n a = this\n b = other\n } else {\n a = other\n b = this\n }\n\n elements = Object.keys(a.elements)\n\n for (var i = 0; i < elements.length; i++) {\n var element = elements[i]\n if (element in b.elements) {\n intersection.push(element)\n }\n }\n\n return new lunr.Set (intersection)\n}\n\n/**\n * Returns a new set combining the elements of this and the specified set.\n *\n * @param {lunr.Set} other - set to union with this set.\n * @return {lunr.Set} a new set that is the union of this and the specified set.\n */\n\nlunr.Set.prototype.union = function (other) {\n if (other === lunr.Set.complete) {\n return lunr.Set.complete\n }\n\n if (other === lunr.Set.empty) {\n return this\n }\n\n return new lunr.Set(Object.keys(this.elements).concat(Object.keys(other.elements)))\n}\n/**\n * A function to calculate the inverse document frequency for\n * a posting. This is shared between the builder and the index\n *\n * @private\n * @param {object} posting - The posting for a given term\n * @param {number} documentCount - The total number of documents.\n */\nlunr.idf = function (posting, documentCount) {\n var documentsWithTerm = 0\n\n for (var fieldName in posting) {\n if (fieldName == '_index') continue // Ignore the term index, its not a field\n documentsWithTerm += Object.keys(posting[fieldName]).length\n }\n\n var x = (documentCount - documentsWithTerm + 0.5) / (documentsWithTerm + 0.5)\n\n return Math.log(1 + Math.abs(x))\n}\n\n/**\n * A token wraps a string representation of a token\n * as it is passed through the text processing pipeline.\n *\n * @constructor\n * @param {string} [str=''] - The string token being wrapped.\n * @param {object} [metadata={}] - Metadata associated with this token.\n */\nlunr.Token = function (str, metadata) {\n this.str = str || \"\"\n this.metadata = metadata || {}\n}\n\n/**\n * Returns the token string that is being wrapped by this object.\n *\n * @returns {string}\n */\nlunr.Token.prototype.toString = function () {\n return this.str\n}\n\n/**\n * A token update function is used when updating or optionally\n * when cloning a token.\n *\n * @callback lunr.Token~updateFunction\n * @param {string} str - The string representation of the token.\n * @param {Object} metadata - All metadata associated with this token.\n */\n\n/**\n * Applies the given function to the wrapped string token.\n *\n * @example\n * token.update(function (str, metadata) {\n * return str.toUpperCase()\n * })\n *\n * @param {lunr.Token~updateFunction} fn - A function to apply to the token string.\n * @returns {lunr.Token}\n */\nlunr.Token.prototype.update = function (fn) {\n this.str = fn(this.str, this.metadata)\n return this\n}\n\n/**\n * Creates a clone of this token. Optionally a function can be\n * applied to the cloned token.\n *\n * @param {lunr.Token~updateFunction} [fn] - An optional function to apply to the cloned token.\n * @returns {lunr.Token}\n */\nlunr.Token.prototype.clone = function (fn) {\n fn = fn || function (s) { return s }\n return new lunr.Token (fn(this.str, this.metadata), this.metadata)\n}\n/*!\n * lunr.tokenizer\n * Copyright (C) 2020 Oliver Nightingale\n */\n\n/**\n * A function for splitting a string into tokens ready to be inserted into\n * the search index. Uses `lunr.tokenizer.separator` to split strings, change\n * the value of this property to change how strings are split into tokens.\n *\n * This tokenizer will convert its parameter to a string by calling `toString` and\n * then will split this string on the character in `lunr.tokenizer.separator`.\n * Arrays will have their elements converted to strings and wrapped in a lunr.Token.\n *\n * Optional metadata can be passed to the tokenizer, this metadata will be cloned and\n * added as metadata to every token that is created from the object to be tokenized.\n *\n * @static\n * @param {?(string|object|object[])} obj - The object to convert into tokens\n * @param {?object} metadata - Optional metadata to associate with every token\n * @returns {lunr.Token[]}\n * @see {@link lunr.Pipeline}\n */\nlunr.tokenizer = function (obj, metadata) {\n if (obj == null || obj == undefined) {\n return []\n }\n\n if (Array.isArray(obj)) {\n return obj.map(function (t) {\n return new lunr.Token(\n lunr.utils.asString(t).toLowerCase(),\n lunr.utils.clone(metadata)\n )\n })\n }\n\n var str = obj.toString().toLowerCase(),\n len = str.length,\n tokens = []\n\n for (var sliceEnd = 0, sliceStart = 0; sliceEnd <= len; sliceEnd++) {\n var char = str.charAt(sliceEnd),\n sliceLength = sliceEnd - sliceStart\n\n if ((char.match(lunr.tokenizer.separator) || sliceEnd == len)) {\n\n if (sliceLength > 0) {\n var tokenMetadata = lunr.utils.clone(metadata) || {}\n tokenMetadata[\"position\"] = [sliceStart, sliceLength]\n tokenMetadata[\"index\"] = tokens.length\n\n tokens.push(\n new lunr.Token (\n str.slice(sliceStart, sliceEnd),\n tokenMetadata\n )\n )\n }\n\n sliceStart = sliceEnd + 1\n }\n\n }\n\n return tokens\n}\n\n/**\n * The separator used to split a string into tokens. Override this property to change the behaviour of\n * `lunr.tokenizer` behaviour when tokenizing strings. By default this splits on whitespace and hyphens.\n *\n * @static\n * @see lunr.tokenizer\n */\nlunr.tokenizer.separator = /[\\s\\-]+/\n/*!\n * lunr.Pipeline\n * Copyright (C) 2020 Oliver Nightingale\n */\n\n/**\n * lunr.Pipelines maintain an ordered list of functions to be applied to all\n * tokens in documents entering the search index and queries being ran against\n * the index.\n *\n * An instance of lunr.Index created with the lunr shortcut will contain a\n * pipeline with a stop word filter and an English language stemmer. Extra\n * functions can be added before or after either of these functions or these\n * default functions can be removed.\n *\n * When run the pipeline will call each function in turn, passing a token, the\n * index of that token in the original list of all tokens and finally a list of\n * all the original tokens.\n *\n * The output of functions in the pipeline will be passed to the next function\n * in the pipeline. To exclude a token from entering the index the function\n * should return undefined, the rest of the pipeline will not be called with\n * this token.\n *\n * For serialisation of pipelines to work, all functions used in an instance of\n * a pipeline should be registered with lunr.Pipeline. Registered functions can\n * then be loaded. If trying to load a serialised pipeline that uses functions\n * that are not registered an error will be thrown.\n *\n * If not planning on serialising the pipeline then registering pipeline functions\n * is not necessary.\n *\n * @constructor\n */\nlunr.Pipeline = function () {\n this._stack = []\n}\n\nlunr.Pipeline.registeredFunctions = Object.create(null)\n\n/**\n * A pipeline function maps lunr.Token to lunr.Token. A lunr.Token contains the token\n * string as well as all known metadata. A pipeline function can mutate the token string\n * or mutate (or add) metadata for a given token.\n *\n * A pipeline function can indicate that the passed token should be discarded by returning\n * null, undefined or an empty string. This token will not be passed to any downstream pipeline\n * functions and will not be added to the index.\n *\n * Multiple tokens can be returned by returning an array of tokens. Each token will be passed\n * to any downstream pipeline functions and all will returned tokens will be added to the index.\n *\n * Any number of pipeline functions may be chained together using a lunr.Pipeline.\n *\n * @interface lunr.PipelineFunction\n * @param {lunr.Token} token - A token from the document being processed.\n * @param {number} i - The index of this token in the complete list of tokens for this document/field.\n * @param {lunr.Token[]} tokens - All tokens for this document/field.\n * @returns {(?lunr.Token|lunr.Token[])}\n */\n\n/**\n * Register a function with the pipeline.\n *\n * Functions that are used in the pipeline should be registered if the pipeline\n * needs to be serialised, or a serialised pipeline needs to be loaded.\n *\n * Registering a function does not add it to a pipeline, functions must still be\n * added to instances of the pipeline for them to be used when running a pipeline.\n *\n * @param {lunr.PipelineFunction} fn - The function to check for.\n * @param {String} label - The label to register this function with\n */\nlunr.Pipeline.registerFunction = function (fn, label) {\n if (label in this.registeredFunctions) {\n lunr.utils.warn('Overwriting existing registered function: ' + label)\n }\n\n fn.label = label\n lunr.Pipeline.registeredFunctions[fn.label] = fn\n}\n\n/**\n * Warns if the function is not registered as a Pipeline function.\n *\n * @param {lunr.PipelineFunction} fn - The function to check for.\n * @private\n */\nlunr.Pipeline.warnIfFunctionNotRegistered = function (fn) {\n var isRegistered = fn.label && (fn.label in this.registeredFunctions)\n\n if (!isRegistered) {\n lunr.utils.warn('Function is not registered with pipeline. This may cause problems when serialising the index.\\n', fn)\n }\n}\n\n/**\n * Loads a previously serialised pipeline.\n *\n * All functions to be loaded must already be registered with lunr.Pipeline.\n * If any function from the serialised data has not been registered then an\n * error will be thrown.\n *\n * @param {Object} serialised - The serialised pipeline to load.\n * @returns {lunr.Pipeline}\n */\nlunr.Pipeline.load = function (serialised) {\n var pipeline = new lunr.Pipeline\n\n serialised.forEach(function (fnName) {\n var fn = lunr.Pipeline.registeredFunctions[fnName]\n\n if (fn) {\n pipeline.add(fn)\n } else {\n throw new Error('Cannot load unregistered function: ' + fnName)\n }\n })\n\n return pipeline\n}\n\n/**\n * Adds new functions to the end of the pipeline.\n *\n * Logs a warning if the function has not been registered.\n *\n * @param {lunr.PipelineFunction[]} functions - Any number of functions to add to the pipeline.\n */\nlunr.Pipeline.prototype.add = function () {\n var fns = Array.prototype.slice.call(arguments)\n\n fns.forEach(function (fn) {\n lunr.Pipeline.warnIfFunctionNotRegistered(fn)\n this._stack.push(fn)\n }, this)\n}\n\n/**\n * Adds a single function after a function that already exists in the\n * pipeline.\n *\n * Logs a warning if the function has not been registered.\n *\n * @param {lunr.PipelineFunction} existingFn - A function that already exists in the pipeline.\n * @param {lunr.PipelineFunction} newFn - The new function to add to the pipeline.\n */\nlunr.Pipeline.prototype.after = function (existingFn, newFn) {\n lunr.Pipeline.warnIfFunctionNotRegistered(newFn)\n\n var pos = this._stack.indexOf(existingFn)\n if (pos == -1) {\n throw new Error('Cannot find existingFn')\n }\n\n pos = pos + 1\n this._stack.splice(pos, 0, newFn)\n}\n\n/**\n * Adds a single function before a function that already exists in the\n * pipeline.\n *\n * Logs a warning if the function has not been registered.\n *\n * @param {lunr.PipelineFunction} existingFn - A function that already exists in the pipeline.\n * @param {lunr.PipelineFunction} newFn - The new function to add to the pipeline.\n */\nlunr.Pipeline.prototype.before = function (existingFn, newFn) {\n lunr.Pipeline.warnIfFunctionNotRegistered(newFn)\n\n var pos = this._stack.indexOf(existingFn)\n if (pos == -1) {\n throw new Error('Cannot find existingFn')\n }\n\n this._stack.splice(pos, 0, newFn)\n}\n\n/**\n * Removes a function from the pipeline.\n *\n * @param {lunr.PipelineFunction} fn The function to remove from the pipeline.\n */\nlunr.Pipeline.prototype.remove = function (fn) {\n var pos = this._stack.indexOf(fn)\n if (pos == -1) {\n return\n }\n\n this._stack.splice(pos, 1)\n}\n\n/**\n * Runs the current list of functions that make up the pipeline against the\n * passed tokens.\n *\n * @param {Array} tokens The tokens to run through the pipeline.\n * @returns {Array}\n */\nlunr.Pipeline.prototype.run = function (tokens) {\n var stackLength = this._stack.length\n\n for (var i = 0; i < stackLength; i++) {\n var fn = this._stack[i]\n var memo = []\n\n for (var j = 0; j < tokens.length; j++) {\n var result = fn(tokens[j], j, tokens)\n\n if (result === null || result === void 0 || result === '') continue\n\n if (Array.isArray(result)) {\n for (var k = 0; k < result.length; k++) {\n memo.push(result[k])\n }\n } else {\n memo.push(result)\n }\n }\n\n tokens = memo\n }\n\n return tokens\n}\n\n/**\n * Convenience method for passing a string through a pipeline and getting\n * strings out. This method takes care of wrapping the passed string in a\n * token and mapping the resulting tokens back to strings.\n *\n * @param {string} str - The string to pass through the pipeline.\n * @param {?object} metadata - Optional metadata to associate with the token\n * passed to the pipeline.\n * @returns {string[]}\n */\nlunr.Pipeline.prototype.runString = function (str, metadata) {\n var token = new lunr.Token (str, metadata)\n\n return this.run([token]).map(function (t) {\n return t.toString()\n })\n}\n\n/**\n * Resets the pipeline by removing any existing processors.\n *\n */\nlunr.Pipeline.prototype.reset = function () {\n this._stack = []\n}\n\n/**\n * Returns a representation of the pipeline ready for serialisation.\n *\n * Logs a warning if the function has not been registered.\n *\n * @returns {Array}\n */\nlunr.Pipeline.prototype.toJSON = function () {\n return this._stack.map(function (fn) {\n lunr.Pipeline.warnIfFunctionNotRegistered(fn)\n\n return fn.label\n })\n}\n/*!\n * lunr.Vector\n * Copyright (C) 2020 Oliver Nightingale\n */\n\n/**\n * A vector is used to construct the vector space of documents and queries. These\n * vectors support operations to determine the similarity between two documents or\n * a document and a query.\n *\n * Normally no parameters are required for initializing a vector, but in the case of\n * loading a previously dumped vector the raw elements can be provided to the constructor.\n *\n * For performance reasons vectors are implemented with a flat array, where an elements\n * index is immediately followed by its value. E.g. [index, value, index, value]. This\n * allows the underlying array to be as sparse as possible and still offer decent\n * performance when being used for vector calculations.\n *\n * @constructor\n * @param {Number[]} [elements] - The flat list of element index and element value pairs.\n */\nlunr.Vector = function (elements) {\n this._magnitude = 0\n this.elements = elements || []\n}\n\n\n/**\n * Calculates the position within the vector to insert a given index.\n *\n * This is used internally by insert and upsert. If there are duplicate indexes then\n * the position is returned as if the value for that index were to be updated, but it\n * is the callers responsibility to check whether there is a duplicate at that index\n *\n * @param {Number} insertIdx - The index at which the element should be inserted.\n * @returns {Number}\n */\nlunr.Vector.prototype.positionForIndex = function (index) {\n // For an empty vector the tuple can be inserted at the beginning\n if (this.elements.length == 0) {\n return 0\n }\n\n var start = 0,\n end = this.elements.length / 2,\n sliceLength = end - start,\n pivotPoint = Math.floor(sliceLength / 2),\n pivotIndex = this.elements[pivotPoint * 2]\n\n while (sliceLength > 1) {\n if (pivotIndex < index) {\n start = pivotPoint\n }\n\n if (pivotIndex > index) {\n end = pivotPoint\n }\n\n if (pivotIndex == index) {\n break\n }\n\n sliceLength = end - start\n pivotPoint = start + Math.floor(sliceLength / 2)\n pivotIndex = this.elements[pivotPoint * 2]\n }\n\n if (pivotIndex == index) {\n return pivotPoint * 2\n }\n\n if (pivotIndex > index) {\n return pivotPoint * 2\n }\n\n if (pivotIndex < index) {\n return (pivotPoint + 1) * 2\n }\n}\n\n/**\n * Inserts an element at an index within the vector.\n *\n * Does not allow duplicates, will throw an error if there is already an entry\n * for this index.\n *\n * @param {Number} insertIdx - The index at which the element should be inserted.\n * @param {Number} val - The value to be inserted into the vector.\n */\nlunr.Vector.prototype.insert = function (insertIdx, val) {\n this.upsert(insertIdx, val, function () {\n throw \"duplicate index\"\n })\n}\n\n/**\n * Inserts or updates an existing index within the vector.\n *\n * @param {Number} insertIdx - The index at which the element should be inserted.\n * @param {Number} val - The value to be inserted into the vector.\n * @param {function} fn - A function that is called for updates, the existing value and the\n * requested value are passed as arguments\n */\nlunr.Vector.prototype.upsert = function (insertIdx, val, fn) {\n this._magnitude = 0\n var position = this.positionForIndex(insertIdx)\n\n if (this.elements[position] == insertIdx) {\n this.elements[position + 1] = fn(this.elements[position + 1], val)\n } else {\n this.elements.splice(position, 0, insertIdx, val)\n }\n}\n\n/**\n * Calculates the magnitude of this vector.\n *\n * @returns {Number}\n */\nlunr.Vector.prototype.magnitude = function () {\n if (this._magnitude) return this._magnitude\n\n var sumOfSquares = 0,\n elementsLength = this.elements.length\n\n for (var i = 1; i < elementsLength; i += 2) {\n var val = this.elements[i]\n sumOfSquares += val * val\n }\n\n return this._magnitude = Math.sqrt(sumOfSquares)\n}\n\n/**\n * Calculates the dot product of this vector and another vector.\n *\n * @param {lunr.Vector} otherVector - The vector to compute the dot product with.\n * @returns {Number}\n */\nlunr.Vector.prototype.dot = function (otherVector) {\n var dotProduct = 0,\n a = this.elements, b = otherVector.elements,\n aLen = a.length, bLen = b.length,\n aVal = 0, bVal = 0,\n i = 0, j = 0\n\n while (i < aLen && j < bLen) {\n aVal = a[i], bVal = b[j]\n if (aVal < bVal) {\n i += 2\n } else if (aVal > bVal) {\n j += 2\n } else if (aVal == bVal) {\n dotProduct += a[i + 1] * b[j + 1]\n i += 2\n j += 2\n }\n }\n\n return dotProduct\n}\n\n/**\n * Calculates the similarity between this vector and another vector.\n *\n * @param {lunr.Vector} otherVector - The other vector to calculate the\n * similarity with.\n * @returns {Number}\n */\nlunr.Vector.prototype.similarity = function (otherVector) {\n return this.dot(otherVector) / this.magnitude() || 0\n}\n\n/**\n * Converts the vector to an array of the elements within the vector.\n *\n * @returns {Number[]}\n */\nlunr.Vector.prototype.toArray = function () {\n var output = new Array (this.elements.length / 2)\n\n for (var i = 1, j = 0; i < this.elements.length; i += 2, j++) {\n output[j] = this.elements[i]\n }\n\n return output\n}\n\n/**\n * A JSON serializable representation of the vector.\n *\n * @returns {Number[]}\n */\nlunr.Vector.prototype.toJSON = function () {\n return this.elements\n}\n/* eslint-disable */\n/*!\n * lunr.stemmer\n * Copyright (C) 2020 Oliver Nightingale\n * Includes code from - http://tartarus.org/~martin/PorterStemmer/js.txt\n */\n\n/**\n * lunr.stemmer is an english language stemmer, this is a JavaScript\n * implementation of the PorterStemmer taken from http://tartarus.org/~martin\n *\n * @static\n * @implements {lunr.PipelineFunction}\n * @param {lunr.Token} token - The string to stem\n * @returns {lunr.Token}\n * @see {@link lunr.Pipeline}\n * @function\n */\nlunr.stemmer = (function(){\n var step2list = {\n \"ational\" : \"ate\",\n \"tional\" : \"tion\",\n \"enci\" : \"ence\",\n \"anci\" : \"ance\",\n \"izer\" : \"ize\",\n \"bli\" : \"ble\",\n \"alli\" : \"al\",\n \"entli\" : \"ent\",\n \"eli\" : \"e\",\n \"ousli\" : \"ous\",\n \"ization\" : \"ize\",\n \"ation\" : \"ate\",\n \"ator\" : \"ate\",\n \"alism\" : \"al\",\n \"iveness\" : \"ive\",\n \"fulness\" : \"ful\",\n \"ousness\" : \"ous\",\n \"aliti\" : \"al\",\n \"iviti\" : \"ive\",\n \"biliti\" : \"ble\",\n \"logi\" : \"log\"\n },\n\n step3list = {\n \"icate\" : \"ic\",\n \"ative\" : \"\",\n \"alize\" : \"al\",\n \"iciti\" : \"ic\",\n \"ical\" : \"ic\",\n \"ful\" : \"\",\n \"ness\" : \"\"\n },\n\n c = \"[^aeiou]\", // consonant\n v = \"[aeiouy]\", // vowel\n C = c + \"[^aeiouy]*\", // consonant sequence\n V = v + \"[aeiou]*\", // vowel sequence\n\n mgr0 = \"^(\" + C + \")?\" + V + C, // [C]VC... is m>0\n meq1 = \"^(\" + C + \")?\" + V + C + \"(\" + V + \")?$\", // [C]VC[V] is m=1\n mgr1 = \"^(\" + C + \")?\" + V + C + V + C, // [C]VCVC... is m>1\n s_v = \"^(\" + C + \")?\" + v; // vowel in stem\n\n var re_mgr0 = new RegExp(mgr0);\n var re_mgr1 = new RegExp(mgr1);\n var re_meq1 = new RegExp(meq1);\n var re_s_v = new RegExp(s_v);\n\n var re_1a = /^(.+?)(ss|i)es$/;\n var re2_1a = /^(.+?)([^s])s$/;\n var re_1b = /^(.+?)eed$/;\n var re2_1b = /^(.+?)(ed|ing)$/;\n var re_1b_2 = /.$/;\n var re2_1b_2 = /(at|bl|iz)$/;\n var re3_1b_2 = new RegExp(\"([^aeiouylsz])\\\\1$\");\n var re4_1b_2 = new RegExp(\"^\" + C + v + \"[^aeiouwxy]$\");\n\n var re_1c = /^(.+?[^aeiou])y$/;\n var re_2 = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;\n\n var re_3 = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;\n\n var re_4 = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/;\n var re2_4 = /^(.+?)(s|t)(ion)$/;\n\n var re_5 = /^(.+?)e$/;\n var re_5_1 = /ll$/;\n var re3_5 = new RegExp(\"^\" + C + v + \"[^aeiouwxy]$\");\n\n var porterStemmer = function porterStemmer(w) {\n var stem,\n suffix,\n firstch,\n re,\n re2,\n re3,\n re4;\n\n if (w.length < 3) { return w; }\n\n firstch = w.substr(0,1);\n if (firstch == \"y\") {\n w = firstch.toUpperCase() + w.substr(1);\n }\n\n // Step 1a\n re = re_1a\n re2 = re2_1a;\n\n if (re.test(w)) { w = w.replace(re,\"$1$2\"); }\n else if (re2.test(w)) { w = w.replace(re2,\"$1$2\"); }\n\n // Step 1b\n re = re_1b;\n re2 = re2_1b;\n if (re.test(w)) {\n var fp = re.exec(w);\n re = re_mgr0;\n if (re.test(fp[1])) {\n re = re_1b_2;\n w = w.replace(re,\"\");\n }\n } else if (re2.test(w)) {\n var fp = re2.exec(w);\n stem = fp[1];\n re2 = re_s_v;\n if (re2.test(stem)) {\n w = stem;\n re2 = re2_1b_2;\n re3 = re3_1b_2;\n re4 = re4_1b_2;\n if (re2.test(w)) { w = w + \"e\"; }\n else if (re3.test(w)) { re = re_1b_2; w = w.replace(re,\"\"); }\n else if (re4.test(w)) { w = w + \"e\"; }\n }\n }\n\n // Step 1c - replace suffix y or Y by i if preceded by a non-vowel which is not the first letter of the word (so cry -> cri, by -> by, say -> say)\n re = re_1c;\n if (re.test(w)) {\n var fp = re.exec(w);\n stem = fp[1];\n w = stem + \"i\";\n }\n\n // Step 2\n re = re_2;\n if (re.test(w)) {\n var fp = re.exec(w);\n stem = fp[1];\n suffix = fp[2];\n re = re_mgr0;\n if (re.test(stem)) {\n w = stem + step2list[suffix];\n }\n }\n\n // Step 3\n re = re_3;\n if (re.test(w)) {\n var fp = re.exec(w);\n stem = fp[1];\n suffix = fp[2];\n re = re_mgr0;\n if (re.test(stem)) {\n w = stem + step3list[suffix];\n }\n }\n\n // Step 4\n re = re_4;\n re2 = re2_4;\n if (re.test(w)) {\n var fp = re.exec(w);\n stem = fp[1];\n re = re_mgr1;\n if (re.test(stem)) {\n w = stem;\n }\n } else if (re2.test(w)) {\n var fp = re2.exec(w);\n stem = fp[1] + fp[2];\n re2 = re_mgr1;\n if (re2.test(stem)) {\n w = stem;\n }\n }\n\n // Step 5\n re = re_5;\n if (re.test(w)) {\n var fp = re.exec(w);\n stem = fp[1];\n re = re_mgr1;\n re2 = re_meq1;\n re3 = re3_5;\n if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) {\n w = stem;\n }\n }\n\n re = re_5_1;\n re2 = re_mgr1;\n if (re.test(w) && re2.test(w)) {\n re = re_1b_2;\n w = w.replace(re,\"\");\n }\n\n // and turn initial Y back to y\n\n if (firstch == \"y\") {\n w = firstch.toLowerCase() + w.substr(1);\n }\n\n return w;\n };\n\n return function (token) {\n return token.update(porterStemmer);\n }\n})();\n\nlunr.Pipeline.registerFunction(lunr.stemmer, 'stemmer')\n/*!\n * lunr.stopWordFilter\n * Copyright (C) 2020 Oliver Nightingale\n */\n\n/**\n * lunr.generateStopWordFilter builds a stopWordFilter function from the provided\n * list of stop words.\n *\n * The built in lunr.stopWordFilter is built using this generator and can be used\n * to generate custom stopWordFilters for applications or non English languages.\n *\n * @function\n * @param {Array} token The token to pass through the filter\n * @returns {lunr.PipelineFunction}\n * @see lunr.Pipeline\n * @see lunr.stopWordFilter\n */\nlunr.generateStopWordFilter = function (stopWords) {\n var words = stopWords.reduce(function (memo, stopWord) {\n memo[stopWord] = stopWord\n return memo\n }, {})\n\n return function (token) {\n if (token && words[token.toString()] !== token.toString()) return token\n }\n}\n\n/**\n * lunr.stopWordFilter is an English language stop word list filter, any words\n * contained in the list will not be passed through the filter.\n *\n * This is intended to be used in the Pipeline. If the token does not pass the\n * filter then undefined will be returned.\n *\n * @function\n * @implements {lunr.PipelineFunction}\n * @params {lunr.Token} token - A token to check for being a stop word.\n * @returns {lunr.Token}\n * @see {@link lunr.Pipeline}\n */\nlunr.stopWordFilter = lunr.generateStopWordFilter([\n 'a',\n 'able',\n 'about',\n 'across',\n 'after',\n 'all',\n 'almost',\n 'also',\n 'am',\n 'among',\n 'an',\n 'and',\n 'any',\n 'are',\n 'as',\n 'at',\n 'be',\n 'because',\n 'been',\n 'but',\n 'by',\n 'can',\n 'cannot',\n 'could',\n 'dear',\n 'did',\n 'do',\n 'does',\n 'either',\n 'else',\n 'ever',\n 'every',\n 'for',\n 'from',\n 'get',\n 'got',\n 'had',\n 'has',\n 'have',\n 'he',\n 'her',\n 'hers',\n 'him',\n 'his',\n 'how',\n 'however',\n 'i',\n 'if',\n 'in',\n 'into',\n 'is',\n 'it',\n 'its',\n 'just',\n 'least',\n 'let',\n 'like',\n 'likely',\n 'may',\n 'me',\n 'might',\n 'most',\n 'must',\n 'my',\n 'neither',\n 'no',\n 'nor',\n 'not',\n 'of',\n 'off',\n 'often',\n 'on',\n 'only',\n 'or',\n 'other',\n 'our',\n 'own',\n 'rather',\n 'said',\n 'say',\n 'says',\n 'she',\n 'should',\n 'since',\n 'so',\n 'some',\n 'than',\n 'that',\n 'the',\n 'their',\n 'them',\n 'then',\n 'there',\n 'these',\n 'they',\n 'this',\n 'tis',\n 'to',\n 'too',\n 'twas',\n 'us',\n 'wants',\n 'was',\n 'we',\n 'were',\n 'what',\n 'when',\n 'where',\n 'which',\n 'while',\n 'who',\n 'whom',\n 'why',\n 'will',\n 'with',\n 'would',\n 'yet',\n 'you',\n 'your'\n])\n\nlunr.Pipeline.registerFunction(lunr.stopWordFilter, 'stopWordFilter')\n/*!\n * lunr.trimmer\n * Copyright (C) 2020 Oliver Nightingale\n */\n\n/**\n * lunr.trimmer is a pipeline function for trimming non word\n * characters from the beginning and end of tokens before they\n * enter the index.\n *\n * This implementation may not work correctly for non latin\n * characters and should either be removed or adapted for use\n * with languages with non-latin characters.\n *\n * @static\n * @implements {lunr.PipelineFunction}\n * @param {lunr.Token} token The token to pass through the filter\n * @returns {lunr.Token}\n * @see lunr.Pipeline\n */\nlunr.trimmer = function (token) {\n return token.update(function (s) {\n return s.replace(/^\\W+/, '').replace(/\\W+$/, '')\n })\n}\n\nlunr.Pipeline.registerFunction(lunr.trimmer, 'trimmer')\n/*!\n * lunr.TokenSet\n * Copyright (C) 2020 Oliver Nightingale\n */\n\n/**\n * A token set is used to store the unique list of all tokens\n * within an index. Token sets are also used to represent an\n * incoming query to the index, this query token set and index\n * token set are then intersected to find which tokens to look\n * up in the inverted index.\n *\n * A token set can hold multiple tokens, as in the case of the\n * index token set, or it can hold a single token as in the\n * case of a simple query token set.\n *\n * Additionally token sets are used to perform wildcard matching.\n * Leading, contained and trailing wildcards are supported, and\n * from this edit distance matching can also be provided.\n *\n * Token sets are implemented as a minimal finite state automata,\n * where both common prefixes and suffixes are shared between tokens.\n * This helps to reduce the space used for storing the token set.\n *\n * @constructor\n */\nlunr.TokenSet = function () {\n this.final = false\n this.edges = {}\n this.id = lunr.TokenSet._nextId\n lunr.TokenSet._nextId += 1\n}\n\n/**\n * Keeps track of the next, auto increment, identifier to assign\n * to a new tokenSet.\n *\n * TokenSets require a unique identifier to be correctly minimised.\n *\n * @private\n */\nlunr.TokenSet._nextId = 1\n\n/**\n * Creates a TokenSet instance from the given sorted array of words.\n *\n * @param {String[]} arr - A sorted array of strings to create the set from.\n * @returns {lunr.TokenSet}\n * @throws Will throw an error if the input array is not sorted.\n */\nlunr.TokenSet.fromArray = function (arr) {\n var builder = new lunr.TokenSet.Builder\n\n for (var i = 0, len = arr.length; i < len; i++) {\n builder.insert(arr[i])\n }\n\n builder.finish()\n return builder.root\n}\n\n/**\n * Creates a token set from a query clause.\n *\n * @private\n * @param {Object} clause - A single clause from lunr.Query.\n * @param {string} clause.term - The query clause term.\n * @param {number} [clause.editDistance] - The optional edit distance for the term.\n * @returns {lunr.TokenSet}\n */\nlunr.TokenSet.fromClause = function (clause) {\n if ('editDistance' in clause) {\n return lunr.TokenSet.fromFuzzyString(clause.term, clause.editDistance)\n } else {\n return lunr.TokenSet.fromString(clause.term)\n }\n}\n\n/**\n * Creates a token set representing a single string with a specified\n * edit distance.\n *\n * Insertions, deletions, substitutions and transpositions are each\n * treated as an edit distance of 1.\n *\n * Increasing the allowed edit distance will have a dramatic impact\n * on the performance of both creating and intersecting these TokenSets.\n * It is advised to keep the edit distance less than 3.\n *\n * @param {string} str - The string to create the token set from.\n * @param {number} editDistance - The allowed edit distance to match.\n * @returns {lunr.Vector}\n */\nlunr.TokenSet.fromFuzzyString = function (str, editDistance) {\n var root = new lunr.TokenSet\n\n var stack = [{\n node: root,\n editsRemaining: editDistance,\n str: str\n }]\n\n while (stack.length) {\n var frame = stack.pop()\n\n // no edit\n if (frame.str.length > 0) {\n var char = frame.str.charAt(0),\n noEditNode\n\n if (char in frame.node.edges) {\n noEditNode = frame.node.edges[char]\n } else {\n noEditNode = new lunr.TokenSet\n frame.node.edges[char] = noEditNode\n }\n\n if (frame.str.length == 1) {\n noEditNode.final = true\n }\n\n stack.push({\n node: noEditNode,\n editsRemaining: frame.editsRemaining,\n str: frame.str.slice(1)\n })\n }\n\n if (frame.editsRemaining == 0) {\n continue\n }\n\n // insertion\n if (\"*\" in frame.node.edges) {\n var insertionNode = frame.node.edges[\"*\"]\n } else {\n var insertionNode = new lunr.TokenSet\n frame.node.edges[\"*\"] = insertionNode\n }\n\n if (frame.str.length == 0) {\n insertionNode.final = true\n }\n\n stack.push({\n node: insertionNode,\n editsRemaining: frame.editsRemaining - 1,\n str: frame.str\n })\n\n // deletion\n // can only do a deletion if we have enough edits remaining\n // and if there are characters left to delete in the string\n if (frame.str.length > 1) {\n stack.push({\n node: frame.node,\n editsRemaining: frame.editsRemaining - 1,\n str: frame.str.slice(1)\n })\n }\n\n // deletion\n // just removing the last character from the str\n if (frame.str.length == 1) {\n frame.node.final = true\n }\n\n // substitution\n // can only do a substitution if we have enough edits remaining\n // and if there are characters left to substitute\n if (frame.str.length >= 1) {\n if (\"*\" in frame.node.edges) {\n var substitutionNode = frame.node.edges[\"*\"]\n } else {\n var substitutionNode = new lunr.TokenSet\n frame.node.edges[\"*\"] = substitutionNode\n }\n\n if (frame.str.length == 1) {\n substitutionNode.final = true\n }\n\n stack.push({\n node: substitutionNode,\n editsRemaining: frame.editsRemaining - 1,\n str: frame.str.slice(1)\n })\n }\n\n // transposition\n // can only do a transposition if there are edits remaining\n // and there are enough characters to transpose\n if (frame.str.length > 1) {\n var charA = frame.str.charAt(0),\n charB = frame.str.charAt(1),\n transposeNode\n\n if (charB in frame.node.edges) {\n transposeNode = frame.node.edges[charB]\n } else {\n transposeNode = new lunr.TokenSet\n frame.node.edges[charB] = transposeNode\n }\n\n if (frame.str.length == 1) {\n transposeNode.final = true\n }\n\n stack.push({\n node: transposeNode,\n editsRemaining: frame.editsRemaining - 1,\n str: charA + frame.str.slice(2)\n })\n }\n }\n\n return root\n}\n\n/**\n * Creates a TokenSet from a string.\n *\n * The string may contain one or more wildcard characters (*)\n * that will allow wildcard matching when intersecting with\n * another TokenSet.\n *\n * @param {string} str - The string to create a TokenSet from.\n * @returns {lunr.TokenSet}\n */\nlunr.TokenSet.fromString = function (str) {\n var node = new lunr.TokenSet,\n root = node\n\n /*\n * Iterates through all characters within the passed string\n * appending a node for each character.\n *\n * When a wildcard character is found then a self\n * referencing edge is introduced to continually match\n * any number of any characters.\n */\n for (var i = 0, len = str.length; i < len; i++) {\n var char = str[i],\n final = (i == len - 1)\n\n if (char == \"*\") {\n node.edges[char] = node\n node.final = final\n\n } else {\n var next = new lunr.TokenSet\n next.final = final\n\n node.edges[char] = next\n node = next\n }\n }\n\n return root\n}\n\n/**\n * Converts this TokenSet into an array of strings\n * contained within the TokenSet.\n *\n * This is not intended to be used on a TokenSet that\n * contains wildcards, in these cases the results are\n * undefined and are likely to cause an infinite loop.\n *\n * @returns {string[]}\n */\nlunr.TokenSet.prototype.toArray = function () {\n var words = []\n\n var stack = [{\n prefix: \"\",\n node: this\n }]\n\n while (stack.length) {\n var frame = stack.pop(),\n edges = Object.keys(frame.node.edges),\n len = edges.length\n\n if (frame.node.final) {\n /* In Safari, at this point the prefix is sometimes corrupted, see:\n * https://github.com/olivernn/lunr.js/issues/279 Calling any\n * String.prototype method forces Safari to \"cast\" this string to what\n * it's supposed to be, fixing the bug. */\n frame.prefix.charAt(0)\n words.push(frame.prefix)\n }\n\n for (var i = 0; i < len; i++) {\n var edge = edges[i]\n\n stack.push({\n prefix: frame.prefix.concat(edge),\n node: frame.node.edges[edge]\n })\n }\n }\n\n return words\n}\n\n/**\n * Generates a string representation of a TokenSet.\n *\n * This is intended to allow TokenSets to be used as keys\n * in objects, largely to aid the construction and minimisation\n * of a TokenSet. As such it is not designed to be a human\n * friendly representation of the TokenSet.\n *\n * @returns {string}\n */\nlunr.TokenSet.prototype.toString = function () {\n // NOTE: Using Object.keys here as this.edges is very likely\n // to enter 'hash-mode' with many keys being added\n //\n // avoiding a for-in loop here as it leads to the function\n // being de-optimised (at least in V8). From some simple\n // benchmarks the performance is comparable, but allowing\n // V8 to optimize may mean easy performance wins in the future.\n\n if (this._str) {\n return this._str\n }\n\n var str = this.final ? '1' : '0',\n labels = Object.keys(this.edges).sort(),\n len = labels.length\n\n for (var i = 0; i < len; i++) {\n var label = labels[i],\n node = this.edges[label]\n\n str = str + label + node.id\n }\n\n return str\n}\n\n/**\n * Returns a new TokenSet that is the intersection of\n * this TokenSet and the passed TokenSet.\n *\n * This intersection will take into account any wildcards\n * contained within the TokenSet.\n *\n * @param {lunr.TokenSet} b - An other TokenSet to intersect with.\n * @returns {lunr.TokenSet}\n */\nlunr.TokenSet.prototype.intersect = function (b) {\n var output = new lunr.TokenSet,\n frame = undefined\n\n var stack = [{\n qNode: b,\n output: output,\n node: this\n }]\n\n while (stack.length) {\n frame = stack.pop()\n\n // NOTE: As with the #toString method, we are using\n // Object.keys and a for loop instead of a for-in loop\n // as both of these objects enter 'hash' mode, causing\n // the function to be de-optimised in V8\n var qEdges = Object.keys(frame.qNode.edges),\n qLen = qEdges.length,\n nEdges = Object.keys(frame.node.edges),\n nLen = nEdges.length\n\n for (var q = 0; q < qLen; q++) {\n var qEdge = qEdges[q]\n\n for (var n = 0; n < nLen; n++) {\n var nEdge = nEdges[n]\n\n if (nEdge == qEdge || qEdge == '*') {\n var node = frame.node.edges[nEdge],\n qNode = frame.qNode.edges[qEdge],\n final = node.final && qNode.final,\n next = undefined\n\n if (nEdge in frame.output.edges) {\n // an edge already exists for this character\n // no need to create a new node, just set the finality\n // bit unless this node is already final\n next = frame.output.edges[nEdge]\n next.final = next.final || final\n\n } else {\n // no edge exists yet, must create one\n // set the finality bit and insert it\n // into the output\n next = new lunr.TokenSet\n next.final = final\n frame.output.edges[nEdge] = next\n }\n\n stack.push({\n qNode: qNode,\n output: next,\n node: node\n })\n }\n }\n }\n }\n\n return output\n}\nlunr.TokenSet.Builder = function () {\n this.previousWord = \"\"\n this.root = new lunr.TokenSet\n this.uncheckedNodes = []\n this.minimizedNodes = {}\n}\n\nlunr.TokenSet.Builder.prototype.insert = function (word) {\n var node,\n commonPrefix = 0\n\n if (word < this.previousWord) {\n throw new Error (\"Out of order word insertion\")\n }\n\n for (var i = 0; i < word.length && i < this.previousWord.length; i++) {\n if (word[i] != this.previousWord[i]) break\n commonPrefix++\n }\n\n this.minimize(commonPrefix)\n\n if (this.uncheckedNodes.length == 0) {\n node = this.root\n } else {\n node = this.uncheckedNodes[this.uncheckedNodes.length - 1].child\n }\n\n for (var i = commonPrefix; i < word.length; i++) {\n var nextNode = new lunr.TokenSet,\n char = word[i]\n\n node.edges[char] = nextNode\n\n this.uncheckedNodes.push({\n parent: node,\n char: char,\n child: nextNode\n })\n\n node = nextNode\n }\n\n node.final = true\n this.previousWord = word\n}\n\nlunr.TokenSet.Builder.prototype.finish = function () {\n this.minimize(0)\n}\n\nlunr.TokenSet.Builder.prototype.minimize = function (downTo) {\n for (var i = this.uncheckedNodes.length - 1; i >= downTo; i--) {\n var node = this.uncheckedNodes[i],\n childKey = node.child.toString()\n\n if (childKey in this.minimizedNodes) {\n node.parent.edges[node.char] = this.minimizedNodes[childKey]\n } else {\n // Cache the key for this node since\n // we know it can't change anymore\n node.child._str = childKey\n\n this.minimizedNodes[childKey] = node.child\n }\n\n this.uncheckedNodes.pop()\n }\n}\n/*!\n * lunr.Index\n * Copyright (C) 2020 Oliver Nightingale\n */\n\n/**\n * An index contains the built index of all documents and provides a query interface\n * to the index.\n *\n * Usually instances of lunr.Index will not be created using this constructor, instead\n * lunr.Builder should be used to construct new indexes, or lunr.Index.load should be\n * used to load previously built and serialized indexes.\n *\n * @constructor\n * @param {Object} attrs - The attributes of the built search index.\n * @param {Object} attrs.invertedIndex - An index of term/field to document reference.\n * @param {Object} attrs.fieldVectors - Field vectors\n * @param {lunr.TokenSet} attrs.tokenSet - An set of all corpus tokens.\n * @param {string[]} attrs.fields - The names of indexed document fields.\n * @param {lunr.Pipeline} attrs.pipeline - The pipeline to use for search terms.\n */\nlunr.Index = function (attrs) {\n this.invertedIndex = attrs.invertedIndex\n this.fieldVectors = attrs.fieldVectors\n this.tokenSet = attrs.tokenSet\n this.fields = attrs.fields\n this.pipeline = attrs.pipeline\n}\n\n/**\n * A result contains details of a document matching a search query.\n * @typedef {Object} lunr.Index~Result\n * @property {string} ref - The reference of the document this result represents.\n * @property {number} score - A number between 0 and 1 representing how similar this document is to the query.\n * @property {lunr.MatchData} matchData - Contains metadata about this match including which term(s) caused the match.\n */\n\n/**\n * Although lunr provides the ability to create queries using lunr.Query, it also provides a simple\n * query language which itself is parsed into an instance of lunr.Query.\n *\n * For programmatically building queries it is advised to directly use lunr.Query, the query language\n * is best used for human entered text rather than program generated text.\n *\n * At its simplest queries can just be a single term, e.g. `hello`, multiple terms are also supported\n * and will be combined with OR, e.g `hello world` will match documents that contain either 'hello'\n * or 'world', though those that contain both will rank higher in the results.\n *\n * Wildcards can be included in terms to match one or more unspecified characters, these wildcards can\n * be inserted anywhere within the term, and more than one wildcard can exist in a single term. Adding\n * wildcards will increase the number of documents that will be found but can also have a negative\n * impact on query performance, especially with wildcards at the beginning of a term.\n *\n * Terms can be restricted to specific fields, e.g. `title:hello`, only documents with the term\n * hello in the title field will match this query. Using a field not present in the index will lead\n * to an error being thrown.\n *\n * Modifiers can also be added to terms, lunr supports edit distance and boost modifiers on terms. A term\n * boost will make documents matching that term score higher, e.g. `foo^5`. Edit distance is also supported\n * to provide fuzzy matching, e.g. 'hello~2' will match documents with hello with an edit distance of 2.\n * Avoid large values for edit distance to improve query performance.\n *\n * Each term also supports a presence modifier. By default a term's presence in document is optional, however\n * this can be changed to either required or prohibited. For a term's presence to be required in a document the\n * term should be prefixed with a '+', e.g. `+foo bar` is a search for documents that must contain 'foo' and\n * optionally contain 'bar'. Conversely a leading '-' sets the terms presence to prohibited, i.e. it must not\n * appear in a document, e.g. `-foo bar` is a search for documents that do not contain 'foo' but may contain 'bar'.\n *\n * To escape special characters the backslash character '\\' can be used, this allows searches to include\n * characters that would normally be considered modifiers, e.g. `foo\\~2` will search for a term \"foo~2\" instead\n * of attempting to apply a boost of 2 to the search term \"foo\".\n *\n * @typedef {string} lunr.Index~QueryString\n * @example Simple single term query\n * hello\n * @example Multiple term query\n * hello world\n * @example term scoped to a field\n * title:hello\n * @example term with a boost of 10\n * hello^10\n * @example term with an edit distance of 2\n * hello~2\n * @example terms with presence modifiers\n * -foo +bar baz\n */\n\n/**\n * Performs a search against the index using lunr query syntax.\n *\n * Results will be returned sorted by their score, the most relevant results\n * will be returned first. For details on how the score is calculated, please see\n * the {@link https://lunrjs.com/guides/searching.html#scoring|guide}.\n *\n * For more programmatic querying use lunr.Index#query.\n *\n * @param {lunr.Index~QueryString} queryString - A string containing a lunr query.\n * @throws {lunr.QueryParseError} If the passed query string cannot be parsed.\n * @returns {lunr.Index~Result[]}\n */\nlunr.Index.prototype.search = function (queryString) {\n return this.query(function (query) {\n var parser = new lunr.QueryParser(queryString, query)\n parser.parse()\n })\n}\n\n/**\n * A query builder callback provides a query object to be used to express\n * the query to perform on the index.\n *\n * @callback lunr.Index~queryBuilder\n * @param {lunr.Query} query - The query object to build up.\n * @this lunr.Query\n */\n\n/**\n * Performs a query against the index using the yielded lunr.Query object.\n *\n * If performing programmatic queries against the index, this method is preferred\n * over lunr.Index#search so as to avoid the additional query parsing overhead.\n *\n * A query object is yielded to the supplied function which should be used to\n * express the query to be run against the index.\n *\n * Note that although this function takes a callback parameter it is _not_ an\n * asynchronous operation, the callback is just yielded a query object to be\n * customized.\n *\n * @param {lunr.Index~queryBuilder} fn - A function that is used to build the query.\n * @returns {lunr.Index~Result[]}\n */\nlunr.Index.prototype.query = function (fn) {\n // for each query clause\n // * process terms\n // * expand terms from token set\n // * find matching documents and metadata\n // * get document vectors\n // * score documents\n\n var query = new lunr.Query(this.fields),\n matchingFields = Object.create(null),\n queryVectors = Object.create(null),\n termFieldCache = Object.create(null),\n requiredMatches = Object.create(null),\n prohibitedMatches = Object.create(null)\n\n /*\n * To support field level boosts a query vector is created per\n * field. An empty vector is eagerly created to support negated\n * queries.\n */\n for (var i = 0; i < this.fields.length; i++) {\n queryVectors[this.fields[i]] = new lunr.Vector\n }\n\n fn.call(query, query)\n\n for (var i = 0; i < query.clauses.length; i++) {\n /*\n * Unless the pipeline has been disabled for this term, which is\n * the case for terms with wildcards, we need to pass the clause\n * term through the search pipeline. A pipeline returns an array\n * of processed terms. Pipeline functions may expand the passed\n * term, which means we may end up performing multiple index lookups\n * for a single query term.\n */\n var clause = query.clauses[i],\n terms = null,\n clauseMatches = lunr.Set.empty\n\n if (clause.usePipeline) {\n terms = this.pipeline.runString(clause.term, {\n fields: clause.fields\n })\n } else {\n terms = [clause.term]\n }\n\n for (var m = 0; m < terms.length; m++) {\n var term = terms[m]\n\n /*\n * Each term returned from the pipeline needs to use the same query\n * clause object, e.g. the same boost and or edit distance. The\n * simplest way to do this is to re-use the clause object but mutate\n * its term property.\n */\n clause.term = term\n\n /*\n * From the term in the clause we create a token set which will then\n * be used to intersect the indexes token set to get a list of terms\n * to lookup in the inverted index\n */\n var termTokenSet = lunr.TokenSet.fromClause(clause),\n expandedTerms = this.tokenSet.intersect(termTokenSet).toArray()\n\n /*\n * If a term marked as required does not exist in the tokenSet it is\n * impossible for the search to return any matches. We set all the field\n * scoped required matches set to empty and stop examining any further\n * clauses.\n */\n if (expandedTerms.length === 0 && clause.presence === lunr.Query.presence.REQUIRED) {\n for (var k = 0; k < clause.fields.length; k++) {\n var field = clause.fields[k]\n requiredMatches[field] = lunr.Set.empty\n }\n\n break\n }\n\n for (var j = 0; j < expandedTerms.length; j++) {\n /*\n * For each term get the posting and termIndex, this is required for\n * building the query vector.\n */\n var expandedTerm = expandedTerms[j],\n posting = this.invertedIndex[expandedTerm],\n termIndex = posting._index\n\n for (var k = 0; k < clause.fields.length; k++) {\n /*\n * For each field that this query term is scoped by (by default\n * all fields are in scope) we need to get all the document refs\n * that have this term in that field.\n *\n * The posting is the entry in the invertedIndex for the matching\n * term from above.\n */\n var field = clause.fields[k],\n fieldPosting = posting[field],\n matchingDocumentRefs = Object.keys(fieldPosting),\n termField = expandedTerm + \"/\" + field,\n matchingDocumentsSet = new lunr.Set(matchingDocumentRefs)\n\n /*\n * if the presence of this term is required ensure that the matching\n * documents are added to the set of required matches for this clause.\n *\n */\n if (clause.presence == lunr.Query.presence.REQUIRED) {\n clauseMatches = clauseMatches.union(matchingDocumentsSet)\n\n if (requiredMatches[field] === undefined) {\n requiredMatches[field] = lunr.Set.complete\n }\n }\n\n /*\n * if the presence of this term is prohibited ensure that the matching\n * documents are added to the set of prohibited matches for this field,\n * creating that set if it does not yet exist.\n */\n if (clause.presence == lunr.Query.presence.PROHIBITED) {\n if (prohibitedMatches[field] === undefined) {\n prohibitedMatches[field] = lunr.Set.empty\n }\n\n prohibitedMatches[field] = prohibitedMatches[field].union(matchingDocumentsSet)\n\n /*\n * Prohibited matches should not be part of the query vector used for\n * similarity scoring and no metadata should be extracted so we continue\n * to the next field\n */\n continue\n }\n\n /*\n * The query field vector is populated using the termIndex found for\n * the term and a unit value with the appropriate boost applied.\n * Using upsert because there could already be an entry in the vector\n * for the term we are working with. In that case we just add the scores\n * together.\n */\n queryVectors[field].upsert(termIndex, clause.boost, function (a, b) { return a + b })\n\n /**\n * If we've already seen this term, field combo then we've already collected\n * the matching documents and metadata, no need to go through all that again\n */\n if (termFieldCache[termField]) {\n continue\n }\n\n for (var l = 0; l < matchingDocumentRefs.length; l++) {\n /*\n * All metadata for this term/field/document triple\n * are then extracted and collected into an instance\n * of lunr.MatchData ready to be returned in the query\n * results\n */\n var matchingDocumentRef = matchingDocumentRefs[l],\n matchingFieldRef = new lunr.FieldRef (matchingDocumentRef, field),\n metadata = fieldPosting[matchingDocumentRef],\n fieldMatch\n\n if ((fieldMatch = matchingFields[matchingFieldRef]) === undefined) {\n matchingFields[matchingFieldRef] = new lunr.MatchData (expandedTerm, field, metadata)\n } else {\n fieldMatch.add(expandedTerm, field, metadata)\n }\n\n }\n\n termFieldCache[termField] = true\n }\n }\n }\n\n /**\n * If the presence was required we need to update the requiredMatches field sets.\n * We do this after all fields for the term have collected their matches because\n * the clause terms presence is required in _any_ of the fields not _all_ of the\n * fields.\n */\n if (clause.presence === lunr.Query.presence.REQUIRED) {\n for (var k = 0; k < clause.fields.length; k++) {\n var field = clause.fields[k]\n requiredMatches[field] = requiredMatches[field].intersect(clauseMatches)\n }\n }\n }\n\n /**\n * Need to combine the field scoped required and prohibited\n * matching documents into a global set of required and prohibited\n * matches\n */\n var allRequiredMatches = lunr.Set.complete,\n allProhibitedMatches = lunr.Set.empty\n\n for (var i = 0; i < this.fields.length; i++) {\n var field = this.fields[i]\n\n if (requiredMatches[field]) {\n allRequiredMatches = allRequiredMatches.intersect(requiredMatches[field])\n }\n\n if (prohibitedMatches[field]) {\n allProhibitedMatches = allProhibitedMatches.union(prohibitedMatches[field])\n }\n }\n\n var matchingFieldRefs = Object.keys(matchingFields),\n results = [],\n matches = Object.create(null)\n\n /*\n * If the query is negated (contains only prohibited terms)\n * we need to get _all_ fieldRefs currently existing in the\n * index. This is only done when we know that the query is\n * entirely prohibited terms to avoid any cost of getting all\n * fieldRefs unnecessarily.\n *\n * Additionally, blank MatchData must be created to correctly\n * populate the results.\n */\n if (query.isNegated()) {\n matchingFieldRefs = Object.keys(this.fieldVectors)\n\n for (var i = 0; i < matchingFieldRefs.length; i++) {\n var matchingFieldRef = matchingFieldRefs[i]\n var fieldRef = lunr.FieldRef.fromString(matchingFieldRef)\n matchingFields[matchingFieldRef] = new lunr.MatchData\n }\n }\n\n for (var i = 0; i < matchingFieldRefs.length; i++) {\n /*\n * Currently we have document fields that match the query, but we\n * need to return documents. The matchData and scores are combined\n * from multiple fields belonging to the same document.\n *\n * Scores are calculated by field, using the query vectors created\n * above, and combined into a final document score using addition.\n */\n var fieldRef = lunr.FieldRef.fromString(matchingFieldRefs[i]),\n docRef = fieldRef.docRef\n\n if (!allRequiredMatches.contains(docRef)) {\n continue\n }\n\n if (allProhibitedMatches.contains(docRef)) {\n continue\n }\n\n var fieldVector = this.fieldVectors[fieldRef],\n score = queryVectors[fieldRef.fieldName].similarity(fieldVector),\n docMatch\n\n if ((docMatch = matches[docRef]) !== undefined) {\n docMatch.score += score\n docMatch.matchData.combine(matchingFields[fieldRef])\n } else {\n var match = {\n ref: docRef,\n score: score,\n matchData: matchingFields[fieldRef]\n }\n matches[docRef] = match\n results.push(match)\n }\n }\n\n /*\n * Sort the results objects by score, highest first.\n */\n return results.sort(function (a, b) {\n return b.score - a.score\n })\n}\n\n/**\n * Prepares the index for JSON serialization.\n *\n * The schema for this JSON blob will be described in a\n * separate JSON schema file.\n *\n * @returns {Object}\n */\nlunr.Index.prototype.toJSON = function () {\n var invertedIndex = Object.keys(this.invertedIndex)\n .sort()\n .map(function (term) {\n return [term, this.invertedIndex[term]]\n }, this)\n\n var fieldVectors = Object.keys(this.fieldVectors)\n .map(function (ref) {\n return [ref, this.fieldVectors[ref].toJSON()]\n }, this)\n\n return {\n version: lunr.version,\n fields: this.fields,\n fieldVectors: fieldVectors,\n invertedIndex: invertedIndex,\n pipeline: this.pipeline.toJSON()\n }\n}\n\n/**\n * Loads a previously serialized lunr.Index\n *\n * @param {Object} serializedIndex - A previously serialized lunr.Index\n * @returns {lunr.Index}\n */\nlunr.Index.load = function (serializedIndex) {\n var attrs = {},\n fieldVectors = {},\n serializedVectors = serializedIndex.fieldVectors,\n invertedIndex = Object.create(null),\n serializedInvertedIndex = serializedIndex.invertedIndex,\n tokenSetBuilder = new lunr.TokenSet.Builder,\n pipeline = lunr.Pipeline.load(serializedIndex.pipeline)\n\n if (serializedIndex.version != lunr.version) {\n lunr.utils.warn(\"Version mismatch when loading serialised index. Current version of lunr '\" + lunr.version + \"' does not match serialized index '\" + serializedIndex.version + \"'\")\n }\n\n for (var i = 0; i < serializedVectors.length; i++) {\n var tuple = serializedVectors[i],\n ref = tuple[0],\n elements = tuple[1]\n\n fieldVectors[ref] = new lunr.Vector(elements)\n }\n\n for (var i = 0; i < serializedInvertedIndex.length; i++) {\n var tuple = serializedInvertedIndex[i],\n term = tuple[0],\n posting = tuple[1]\n\n tokenSetBuilder.insert(term)\n invertedIndex[term] = posting\n }\n\n tokenSetBuilder.finish()\n\n attrs.fields = serializedIndex.fields\n\n attrs.fieldVectors = fieldVectors\n attrs.invertedIndex = invertedIndex\n attrs.tokenSet = tokenSetBuilder.root\n attrs.pipeline = pipeline\n\n return new lunr.Index(attrs)\n}\n/*!\n * lunr.Builder\n * Copyright (C) 2020 Oliver Nightingale\n */\n\n/**\n * lunr.Builder performs indexing on a set of documents and\n * returns instances of lunr.Index ready for querying.\n *\n * All configuration of the index is done via the builder, the\n * fields to index, the document reference, the text processing\n * pipeline and document scoring parameters are all set on the\n * builder before indexing.\n *\n * @constructor\n * @property {string} _ref - Internal reference to the document reference field.\n * @property {string[]} _fields - Internal reference to the document fields to index.\n * @property {object} invertedIndex - The inverted index maps terms to document fields.\n * @property {object} documentTermFrequencies - Keeps track of document term frequencies.\n * @property {object} documentLengths - Keeps track of the length of documents added to the index.\n * @property {lunr.tokenizer} tokenizer - Function for splitting strings into tokens for indexing.\n * @property {lunr.Pipeline} pipeline - The pipeline performs text processing on tokens before indexing.\n * @property {lunr.Pipeline} searchPipeline - A pipeline for processing search terms before querying the index.\n * @property {number} documentCount - Keeps track of the total number of documents indexed.\n * @property {number} _b - A parameter to control field length normalization, setting this to 0 disabled normalization, 1 fully normalizes field lengths, the default value is 0.75.\n * @property {number} _k1 - A parameter to control how quickly an increase in term frequency results in term frequency saturation, the default value is 1.2.\n * @property {number} termIndex - A counter incremented for each unique term, used to identify a terms position in the vector space.\n * @property {array} metadataWhitelist - A list of metadata keys that have been whitelisted for entry in the index.\n */\nlunr.Builder = function () {\n this._ref = \"id\"\n this._fields = Object.create(null)\n this._documents = Object.create(null)\n this.invertedIndex = Object.create(null)\n this.fieldTermFrequencies = {}\n this.fieldLengths = {}\n this.tokenizer = lunr.tokenizer\n this.pipeline = new lunr.Pipeline\n this.searchPipeline = new lunr.Pipeline\n this.documentCount = 0\n this._b = 0.75\n this._k1 = 1.2\n this.termIndex = 0\n this.metadataWhitelist = []\n}\n\n/**\n * Sets the document field used as the document reference. Every document must have this field.\n * The type of this field in the document should be a string, if it is not a string it will be\n * coerced into a string by calling toString.\n *\n * The default ref is 'id'.\n *\n * The ref should _not_ be changed during indexing, it should be set before any documents are\n * added to the index. Changing it during indexing can lead to inconsistent results.\n *\n * @param {string} ref - The name of the reference field in the document.\n */\nlunr.Builder.prototype.ref = function (ref) {\n this._ref = ref\n}\n\n/**\n * A function that is used to extract a field from a document.\n *\n * Lunr expects a field to be at the top level of a document, if however the field\n * is deeply nested within a document an extractor function can be used to extract\n * the right field for indexing.\n *\n * @callback fieldExtractor\n * @param {object} doc - The document being added to the index.\n * @returns {?(string|object|object[])} obj - The object that will be indexed for this field.\n * @example Extracting a nested field\n * function (doc) { return doc.nested.field }\n */\n\n/**\n * Adds a field to the list of document fields that will be indexed. Every document being\n * indexed should have this field. Null values for this field in indexed documents will\n * not cause errors but will limit the chance of that document being retrieved by searches.\n *\n * All fields should be added before adding documents to the index. Adding fields after\n * a document has been indexed will have no effect on already indexed documents.\n *\n * Fields can be boosted at build time. This allows terms within that field to have more\n * importance when ranking search results. Use a field boost to specify that matches within\n * one field are more important than other fields.\n *\n * @param {string} fieldName - The name of a field to index in all documents.\n * @param {object} attributes - Optional attributes associated with this field.\n * @param {number} [attributes.boost=1] - Boost applied to all terms within this field.\n * @param {fieldExtractor} [attributes.extractor] - Function to extract a field from a document.\n * @throws {RangeError} fieldName cannot contain unsupported characters '/'\n */\nlunr.Builder.prototype.field = function (fieldName, attributes) {\n if (/\\//.test(fieldName)) {\n throw new RangeError (\"Field '\" + fieldName + \"' contains illegal character '/'\")\n }\n\n this._fields[fieldName] = attributes || {}\n}\n\n/**\n * A parameter to tune the amount of field length normalisation that is applied when\n * calculating relevance scores. A value of 0 will completely disable any normalisation\n * and a value of 1 will fully normalise field lengths. The default is 0.75. Values of b\n * will be clamped to the range 0 - 1.\n *\n * @param {number} number - The value to set for this tuning parameter.\n */\nlunr.Builder.prototype.b = function (number) {\n if (number < 0) {\n this._b = 0\n } else if (number > 1) {\n this._b = 1\n } else {\n this._b = number\n }\n}\n\n/**\n * A parameter that controls the speed at which a rise in term frequency results in term\n * frequency saturation. The default value is 1.2. Setting this to a higher value will give\n * slower saturation levels, a lower value will result in quicker saturation.\n *\n * @param {number} number - The value to set for this tuning parameter.\n */\nlunr.Builder.prototype.k1 = function (number) {\n this._k1 = number\n}\n\n/**\n * Adds a document to the index.\n *\n * Before adding fields to the index the index should have been fully setup, with the document\n * ref and all fields to index already having been specified.\n *\n * The document must have a field name as specified by the ref (by default this is 'id') and\n * it should have all fields defined for indexing, though null or undefined values will not\n * cause errors.\n *\n * Entire documents can be boosted at build time. Applying a boost to a document indicates that\n * this document should rank higher in search results than other documents.\n *\n * @param {object} doc - The document to add to the index.\n * @param {object} attributes - Optional attributes associated with this document.\n * @param {number} [attributes.boost=1] - Boost applied to all terms within this document.\n */\nlunr.Builder.prototype.add = function (doc, attributes) {\n var docRef = doc[this._ref],\n fields = Object.keys(this._fields)\n\n this._documents[docRef] = attributes || {}\n this.documentCount += 1\n\n for (var i = 0; i < fields.length; i++) {\n var fieldName = fields[i],\n extractor = this._fields[fieldName].extractor,\n field = extractor ? extractor(doc) : doc[fieldName],\n tokens = this.tokenizer(field, {\n fields: [fieldName]\n }),\n terms = this.pipeline.run(tokens),\n fieldRef = new lunr.FieldRef (docRef, fieldName),\n fieldTerms = Object.create(null)\n\n this.fieldTermFrequencies[fieldRef] = fieldTerms\n this.fieldLengths[fieldRef] = 0\n\n // store the length of this field for this document\n this.fieldLengths[fieldRef] += terms.length\n\n // calculate term frequencies for this field\n for (var j = 0; j < terms.length; j++) {\n var term = terms[j]\n\n if (fieldTerms[term] == undefined) {\n fieldTerms[term] = 0\n }\n\n fieldTerms[term] += 1\n\n // add to inverted index\n // create an initial posting if one doesn't exist\n if (this.invertedIndex[term] == undefined) {\n var posting = Object.create(null)\n posting[\"_index\"] = this.termIndex\n this.termIndex += 1\n\n for (var k = 0; k < fields.length; k++) {\n posting[fields[k]] = Object.create(null)\n }\n\n this.invertedIndex[term] = posting\n }\n\n // add an entry for this term/fieldName/docRef to the invertedIndex\n if (this.invertedIndex[term][fieldName][docRef] == undefined) {\n this.invertedIndex[term][fieldName][docRef] = Object.create(null)\n }\n\n // store all whitelisted metadata about this token in the\n // inverted index\n for (var l = 0; l < this.metadataWhitelist.length; l++) {\n var metadataKey = this.metadataWhitelist[l],\n metadata = term.metadata[metadataKey]\n\n if (this.invertedIndex[term][fieldName][docRef][metadataKey] == undefined) {\n this.invertedIndex[term][fieldName][docRef][metadataKey] = []\n }\n\n this.invertedIndex[term][fieldName][docRef][metadataKey].push(metadata)\n }\n }\n\n }\n}\n\n/**\n * Calculates the average document length for this index\n *\n * @private\n */\nlunr.Builder.prototype.calculateAverageFieldLengths = function () {\n\n var fieldRefs = Object.keys(this.fieldLengths),\n numberOfFields = fieldRefs.length,\n accumulator = {},\n documentsWithField = {}\n\n for (var i = 0; i < numberOfFields; i++) {\n var fieldRef = lunr.FieldRef.fromString(fieldRefs[i]),\n field = fieldRef.fieldName\n\n documentsWithField[field] || (documentsWithField[field] = 0)\n documentsWithField[field] += 1\n\n accumulator[field] || (accumulator[field] = 0)\n accumulator[field] += this.fieldLengths[fieldRef]\n }\n\n var fields = Object.keys(this._fields)\n\n for (var i = 0; i < fields.length; i++) {\n var fieldName = fields[i]\n accumulator[fieldName] = accumulator[fieldName] / documentsWithField[fieldName]\n }\n\n this.averageFieldLength = accumulator\n}\n\n/**\n * Builds a vector space model of every document using lunr.Vector\n *\n * @private\n */\nlunr.Builder.prototype.createFieldVectors = function () {\n var fieldVectors = {},\n fieldRefs = Object.keys(this.fieldTermFrequencies),\n fieldRefsLength = fieldRefs.length,\n termIdfCache = Object.create(null)\n\n for (var i = 0; i < fieldRefsLength; i++) {\n var fieldRef = lunr.FieldRef.fromString(fieldRefs[i]),\n fieldName = fieldRef.fieldName,\n fieldLength = this.fieldLengths[fieldRef],\n fieldVector = new lunr.Vector,\n termFrequencies = this.fieldTermFrequencies[fieldRef],\n terms = Object.keys(termFrequencies),\n termsLength = terms.length\n\n\n var fieldBoost = this._fields[fieldName].boost || 1,\n docBoost = this._documents[fieldRef.docRef].boost || 1\n\n for (var j = 0; j < termsLength; j++) {\n var term = terms[j],\n tf = termFrequencies[term],\n termIndex = this.invertedIndex[term]._index,\n idf, score, scoreWithPrecision\n\n if (termIdfCache[term] === undefined) {\n idf = lunr.idf(this.invertedIndex[term], this.documentCount)\n termIdfCache[term] = idf\n } else {\n idf = termIdfCache[term]\n }\n\n score = idf * ((this._k1 + 1) * tf) / (this._k1 * (1 - this._b + this._b * (fieldLength / this.averageFieldLength[fieldName])) + tf)\n score *= fieldBoost\n score *= docBoost\n scoreWithPrecision = Math.round(score * 1000) / 1000\n // Converts 1.23456789 to 1.234.\n // Reducing the precision so that the vectors take up less\n // space when serialised. Doing it now so that they behave\n // the same before and after serialisation. Also, this is\n // the fastest approach to reducing a number's precision in\n // JavaScript.\n\n fieldVector.insert(termIndex, scoreWithPrecision)\n }\n\n fieldVectors[fieldRef] = fieldVector\n }\n\n this.fieldVectors = fieldVectors\n}\n\n/**\n * Creates a token set of all tokens in the index using lunr.TokenSet\n *\n * @private\n */\nlunr.Builder.prototype.createTokenSet = function () {\n this.tokenSet = lunr.TokenSet.fromArray(\n Object.keys(this.invertedIndex).sort()\n )\n}\n\n/**\n * Builds the index, creating an instance of lunr.Index.\n *\n * This completes the indexing process and should only be called\n * once all documents have been added to the index.\n *\n * @returns {lunr.Index}\n */\nlunr.Builder.prototype.build = function () {\n this.calculateAverageFieldLengths()\n this.createFieldVectors()\n this.createTokenSet()\n\n return new lunr.Index({\n invertedIndex: this.invertedIndex,\n fieldVectors: this.fieldVectors,\n tokenSet: this.tokenSet,\n fields: Object.keys(this._fields),\n pipeline: this.searchPipeline\n })\n}\n\n/**\n * Applies a plugin to the index builder.\n *\n * A plugin is a function that is called with the index builder as its context.\n * Plugins can be used to customise or extend the behaviour of the index\n * in some way. A plugin is just a function, that encapsulated the custom\n * behaviour that should be applied when building the index.\n *\n * The plugin function will be called with the index builder as its argument, additional\n * arguments can also be passed when calling use. The function will be called\n * with the index builder as its context.\n *\n * @param {Function} plugin The plugin to apply.\n */\nlunr.Builder.prototype.use = function (fn) {\n var args = Array.prototype.slice.call(arguments, 1)\n args.unshift(this)\n fn.apply(this, args)\n}\n/**\n * Contains and collects metadata about a matching document.\n * A single instance of lunr.MatchData is returned as part of every\n * lunr.Index~Result.\n *\n * @constructor\n * @param {string} term - The term this match data is associated with\n * @param {string} field - The field in which the term was found\n * @param {object} metadata - The metadata recorded about this term in this field\n * @property {object} metadata - A cloned collection of metadata associated with this document.\n * @see {@link lunr.Index~Result}\n */\nlunr.MatchData = function (term, field, metadata) {\n var clonedMetadata = Object.create(null),\n metadataKeys = Object.keys(metadata || {})\n\n // Cloning the metadata to prevent the original\n // being mutated during match data combination.\n // Metadata is kept in an array within the inverted\n // index so cloning the data can be done with\n // Array#slice\n for (var i = 0; i < metadataKeys.length; i++) {\n var key = metadataKeys[i]\n clonedMetadata[key] = metadata[key].slice()\n }\n\n this.metadata = Object.create(null)\n\n if (term !== undefined) {\n this.metadata[term] = Object.create(null)\n this.metadata[term][field] = clonedMetadata\n }\n}\n\n/**\n * An instance of lunr.MatchData will be created for every term that matches a\n * document. However only one instance is required in a lunr.Index~Result. This\n * method combines metadata from another instance of lunr.MatchData with this\n * objects metadata.\n *\n * @param {lunr.MatchData} otherMatchData - Another instance of match data to merge with this one.\n * @see {@link lunr.Index~Result}\n */\nlunr.MatchData.prototype.combine = function (otherMatchData) {\n var terms = Object.keys(otherMatchData.metadata)\n\n for (var i = 0; i < terms.length; i++) {\n var term = terms[i],\n fields = Object.keys(otherMatchData.metadata[term])\n\n if (this.metadata[term] == undefined) {\n this.metadata[term] = Object.create(null)\n }\n\n for (var j = 0; j < fields.length; j++) {\n var field = fields[j],\n keys = Object.keys(otherMatchData.metadata[term][field])\n\n if (this.metadata[term][field] == undefined) {\n this.metadata[term][field] = Object.create(null)\n }\n\n for (var k = 0; k < keys.length; k++) {\n var key = keys[k]\n\n if (this.metadata[term][field][key] == undefined) {\n this.metadata[term][field][key] = otherMatchData.metadata[term][field][key]\n } else {\n this.metadata[term][field][key] = this.metadata[term][field][key].concat(otherMatchData.metadata[term][field][key])\n }\n\n }\n }\n }\n}\n\n/**\n * Add metadata for a term/field pair to this instance of match data.\n *\n * @param {string} term - The term this match data is associated with\n * @param {string} field - The field in which the term was found\n * @param {object} metadata - The metadata recorded about this term in this field\n */\nlunr.MatchData.prototype.add = function (term, field, metadata) {\n if (!(term in this.metadata)) {\n this.metadata[term] = Object.create(null)\n this.metadata[term][field] = metadata\n return\n }\n\n if (!(field in this.metadata[term])) {\n this.metadata[term][field] = metadata\n return\n }\n\n var metadataKeys = Object.keys(metadata)\n\n for (var i = 0; i < metadataKeys.length; i++) {\n var key = metadataKeys[i]\n\n if (key in this.metadata[term][field]) {\n this.metadata[term][field][key] = this.metadata[term][field][key].concat(metadata[key])\n } else {\n this.metadata[term][field][key] = metadata[key]\n }\n }\n}\n/**\n * A lunr.Query provides a programmatic way of defining queries to be performed\n * against a {@link lunr.Index}.\n *\n * Prefer constructing a lunr.Query using the {@link lunr.Index#query} method\n * so the query object is pre-initialized with the right index fields.\n *\n * @constructor\n * @property {lunr.Query~Clause[]} clauses - An array of query clauses.\n * @property {string[]} allFields - An array of all available fields in a lunr.Index.\n */\nlunr.Query = function (allFields) {\n this.clauses = []\n this.allFields = allFields\n}\n\n/**\n * Constants for indicating what kind of automatic wildcard insertion will be used when constructing a query clause.\n *\n * This allows wildcards to be added to the beginning and end of a term without having to manually do any string\n * concatenation.\n *\n * The wildcard constants can be bitwise combined to select both leading and trailing wildcards.\n *\n * @constant\n * @default\n * @property {number} wildcard.NONE - The term will have no wildcards inserted, this is the default behaviour\n * @property {number} wildcard.LEADING - Prepend the term with a wildcard, unless a leading wildcard already exists\n * @property {number} wildcard.TRAILING - Append a wildcard to the term, unless a trailing wildcard already exists\n * @see lunr.Query~Clause\n * @see lunr.Query#clause\n * @see lunr.Query#term\n * @example query term with trailing wildcard\n * query.term('foo', { wildcard: lunr.Query.wildcard.TRAILING })\n * @example query term with leading and trailing wildcard\n * query.term('foo', {\n * wildcard: lunr.Query.wildcard.LEADING | lunr.Query.wildcard.TRAILING\n * })\n */\n\nlunr.Query.wildcard = new String (\"*\")\nlunr.Query.wildcard.NONE = 0\nlunr.Query.wildcard.LEADING = 1\nlunr.Query.wildcard.TRAILING = 2\n\n/**\n * Constants for indicating what kind of presence a term must have in matching documents.\n *\n * @constant\n * @enum {number}\n * @see lunr.Query~Clause\n * @see lunr.Query#clause\n * @see lunr.Query#term\n * @example query term with required presence\n * query.term('foo', { presence: lunr.Query.presence.REQUIRED })\n */\nlunr.Query.presence = {\n /**\n * Term's presence in a document is optional, this is the default value.\n */\n OPTIONAL: 1,\n\n /**\n * Term's presence in a document is required, documents that do not contain\n * this term will not be returned.\n */\n REQUIRED: 2,\n\n /**\n * Term's presence in a document is prohibited, documents that do contain\n * this term will not be returned.\n */\n PROHIBITED: 3\n}\n\n/**\n * A single clause in a {@link lunr.Query} contains a term and details on how to\n * match that term against a {@link lunr.Index}.\n *\n * @typedef {Object} lunr.Query~Clause\n * @property {string[]} fields - The fields in an index this clause should be matched against.\n * @property {number} [boost=1] - Any boost that should be applied when matching this clause.\n * @property {number} [editDistance] - Whether the term should have fuzzy matching applied, and how fuzzy the match should be.\n * @property {boolean} [usePipeline] - Whether the term should be passed through the search pipeline.\n * @property {number} [wildcard=lunr.Query.wildcard.NONE] - Whether the term should have wildcards appended or prepended.\n * @property {number} [presence=lunr.Query.presence.OPTIONAL] - The terms presence in any matching documents.\n */\n\n/**\n * Adds a {@link lunr.Query~Clause} to this query.\n *\n * Unless the clause contains the fields to be matched all fields will be matched. In addition\n * a default boost of 1 is applied to the clause.\n *\n * @param {lunr.Query~Clause} clause - The clause to add to this query.\n * @see lunr.Query~Clause\n * @returns {lunr.Query}\n */\nlunr.Query.prototype.clause = function (clause) {\n if (!('fields' in clause)) {\n clause.fields = this.allFields\n }\n\n if (!('boost' in clause)) {\n clause.boost = 1\n }\n\n if (!('usePipeline' in clause)) {\n clause.usePipeline = true\n }\n\n if (!('wildcard' in clause)) {\n clause.wildcard = lunr.Query.wildcard.NONE\n }\n\n if ((clause.wildcard & lunr.Query.wildcard.LEADING) && (clause.term.charAt(0) != lunr.Query.wildcard)) {\n clause.term = \"*\" + clause.term\n }\n\n if ((clause.wildcard & lunr.Query.wildcard.TRAILING) && (clause.term.slice(-1) != lunr.Query.wildcard)) {\n clause.term = \"\" + clause.term + \"*\"\n }\n\n if (!('presence' in clause)) {\n clause.presence = lunr.Query.presence.OPTIONAL\n }\n\n this.clauses.push(clause)\n\n return this\n}\n\n/**\n * A negated query is one in which every clause has a presence of\n * prohibited. These queries require some special processing to return\n * the expected results.\n *\n * @returns boolean\n */\nlunr.Query.prototype.isNegated = function () {\n for (var i = 0; i < this.clauses.length; i++) {\n if (this.clauses[i].presence != lunr.Query.presence.PROHIBITED) {\n return false\n }\n }\n\n return true\n}\n\n/**\n * Adds a term to the current query, under the covers this will create a {@link lunr.Query~Clause}\n * to the list of clauses that make up this query.\n *\n * The term is used as is, i.e. no tokenization will be performed by this method. Instead conversion\n * to a token or token-like string should be done before calling this method.\n *\n * The term will be converted to a string by calling `toString`. Multiple terms can be passed as an\n * array, each term in the array will share the same options.\n *\n * @param {object|object[]} term - The term(s) to add to the query.\n * @param {object} [options] - Any additional properties to add to the query clause.\n * @returns {lunr.Query}\n * @see lunr.Query#clause\n * @see lunr.Query~Clause\n * @example adding a single term to a query\n * query.term(\"foo\")\n * @example adding a single term to a query and specifying search fields, term boost and automatic trailing wildcard\n * query.term(\"foo\", {\n * fields: [\"title\"],\n * boost: 10,\n * wildcard: lunr.Query.wildcard.TRAILING\n * })\n * @example using lunr.tokenizer to convert a string to tokens before using them as terms\n * query.term(lunr.tokenizer(\"foo bar\"))\n */\nlunr.Query.prototype.term = function (term, options) {\n if (Array.isArray(term)) {\n term.forEach(function (t) { this.term(t, lunr.utils.clone(options)) }, this)\n return this\n }\n\n var clause = options || {}\n clause.term = term.toString()\n\n this.clause(clause)\n\n return this\n}\nlunr.QueryParseError = function (message, start, end) {\n this.name = \"QueryParseError\"\n this.message = message\n this.start = start\n this.end = end\n}\n\nlunr.QueryParseError.prototype = new Error\nlunr.QueryLexer = function (str) {\n this.lexemes = []\n this.str = str\n this.length = str.length\n this.pos = 0\n this.start = 0\n this.escapeCharPositions = []\n}\n\nlunr.QueryLexer.prototype.run = function () {\n var state = lunr.QueryLexer.lexText\n\n while (state) {\n state = state(this)\n }\n}\n\nlunr.QueryLexer.prototype.sliceString = function () {\n var subSlices = [],\n sliceStart = this.start,\n sliceEnd = this.pos\n\n for (var i = 0; i < this.escapeCharPositions.length; i++) {\n sliceEnd = this.escapeCharPositions[i]\n subSlices.push(this.str.slice(sliceStart, sliceEnd))\n sliceStart = sliceEnd + 1\n }\n\n subSlices.push(this.str.slice(sliceStart, this.pos))\n this.escapeCharPositions.length = 0\n\n return subSlices.join('')\n}\n\nlunr.QueryLexer.prototype.emit = function (type) {\n this.lexemes.push({\n type: type,\n str: this.sliceString(),\n start: this.start,\n end: this.pos\n })\n\n this.start = this.pos\n}\n\nlunr.QueryLexer.prototype.escapeCharacter = function () {\n this.escapeCharPositions.push(this.pos - 1)\n this.pos += 1\n}\n\nlunr.QueryLexer.prototype.next = function () {\n if (this.pos >= this.length) {\n return lunr.QueryLexer.EOS\n }\n\n var char = this.str.charAt(this.pos)\n this.pos += 1\n return char\n}\n\nlunr.QueryLexer.prototype.width = function () {\n return this.pos - this.start\n}\n\nlunr.QueryLexer.prototype.ignore = function () {\n if (this.start == this.pos) {\n this.pos += 1\n }\n\n this.start = this.pos\n}\n\nlunr.QueryLexer.prototype.backup = function () {\n this.pos -= 1\n}\n\nlunr.QueryLexer.prototype.acceptDigitRun = function () {\n var char, charCode\n\n do {\n char = this.next()\n charCode = char.charCodeAt(0)\n } while (charCode > 47 && charCode < 58)\n\n if (char != lunr.QueryLexer.EOS) {\n this.backup()\n }\n}\n\nlunr.QueryLexer.prototype.more = function () {\n return this.pos < this.length\n}\n\nlunr.QueryLexer.EOS = 'EOS'\nlunr.QueryLexer.FIELD = 'FIELD'\nlunr.QueryLexer.TERM = 'TERM'\nlunr.QueryLexer.EDIT_DISTANCE = 'EDIT_DISTANCE'\nlunr.QueryLexer.BOOST = 'BOOST'\nlunr.QueryLexer.PRESENCE = 'PRESENCE'\n\nlunr.QueryLexer.lexField = function (lexer) {\n lexer.backup()\n lexer.emit(lunr.QueryLexer.FIELD)\n lexer.ignore()\n return lunr.QueryLexer.lexText\n}\n\nlunr.QueryLexer.lexTerm = function (lexer) {\n if (lexer.width() > 1) {\n lexer.backup()\n lexer.emit(lunr.QueryLexer.TERM)\n }\n\n lexer.ignore()\n\n if (lexer.more()) {\n return lunr.QueryLexer.lexText\n }\n}\n\nlunr.QueryLexer.lexEditDistance = function (lexer) {\n lexer.ignore()\n lexer.acceptDigitRun()\n lexer.emit(lunr.QueryLexer.EDIT_DISTANCE)\n return lunr.QueryLexer.lexText\n}\n\nlunr.QueryLexer.lexBoost = function (lexer) {\n lexer.ignore()\n lexer.acceptDigitRun()\n lexer.emit(lunr.QueryLexer.BOOST)\n return lunr.QueryLexer.lexText\n}\n\nlunr.QueryLexer.lexEOS = function (lexer) {\n if (lexer.width() > 0) {\n lexer.emit(lunr.QueryLexer.TERM)\n }\n}\n\n// This matches the separator used when tokenising fields\n// within a document. These should match otherwise it is\n// not possible to search for some tokens within a document.\n//\n// It is possible for the user to change the separator on the\n// tokenizer so it _might_ clash with any other of the special\n// characters already used within the search string, e.g. :.\n//\n// This means that it is possible to change the separator in\n// such a way that makes some words unsearchable using a search\n// string.\nlunr.QueryLexer.termSeparator = lunr.tokenizer.separator\n\nlunr.QueryLexer.lexText = function (lexer) {\n while (true) {\n var char = lexer.next()\n\n if (char == lunr.QueryLexer.EOS) {\n return lunr.QueryLexer.lexEOS\n }\n\n // Escape character is '\\'\n if (char.charCodeAt(0) == 92) {\n lexer.escapeCharacter()\n continue\n }\n\n if (char == \":\") {\n return lunr.QueryLexer.lexField\n }\n\n if (char == \"~\") {\n lexer.backup()\n if (lexer.width() > 0) {\n lexer.emit(lunr.QueryLexer.TERM)\n }\n return lunr.QueryLexer.lexEditDistance\n }\n\n if (char == \"^\") {\n lexer.backup()\n if (lexer.width() > 0) {\n lexer.emit(lunr.QueryLexer.TERM)\n }\n return lunr.QueryLexer.lexBoost\n }\n\n // \"+\" indicates term presence is required\n // checking for length to ensure that only\n // leading \"+\" are considered\n if (char == \"+\" && lexer.width() === 1) {\n lexer.emit(lunr.QueryLexer.PRESENCE)\n return lunr.QueryLexer.lexText\n }\n\n // \"-\" indicates term presence is prohibited\n // checking for length to ensure that only\n // leading \"-\" are considered\n if (char == \"-\" && lexer.width() === 1) {\n lexer.emit(lunr.QueryLexer.PRESENCE)\n return lunr.QueryLexer.lexText\n }\n\n if (char.match(lunr.QueryLexer.termSeparator)) {\n return lunr.QueryLexer.lexTerm\n }\n }\n}\n\nlunr.QueryParser = function (str, query) {\n this.lexer = new lunr.QueryLexer (str)\n this.query = query\n this.currentClause = {}\n this.lexemeIdx = 0\n}\n\nlunr.QueryParser.prototype.parse = function () {\n this.lexer.run()\n this.lexemes = this.lexer.lexemes\n\n var state = lunr.QueryParser.parseClause\n\n while (state) {\n state = state(this)\n }\n\n return this.query\n}\n\nlunr.QueryParser.prototype.peekLexeme = function () {\n return this.lexemes[this.lexemeIdx]\n}\n\nlunr.QueryParser.prototype.consumeLexeme = function () {\n var lexeme = this.peekLexeme()\n this.lexemeIdx += 1\n return lexeme\n}\n\nlunr.QueryParser.prototype.nextClause = function () {\n var completedClause = this.currentClause\n this.query.clause(completedClause)\n this.currentClause = {}\n}\n\nlunr.QueryParser.parseClause = function (parser) {\n var lexeme = parser.peekLexeme()\n\n if (lexeme == undefined) {\n return\n }\n\n switch (lexeme.type) {\n case lunr.QueryLexer.PRESENCE:\n return lunr.QueryParser.parsePresence\n case lunr.QueryLexer.FIELD:\n return lunr.QueryParser.parseField\n case lunr.QueryLexer.TERM:\n return lunr.QueryParser.parseTerm\n default:\n var errorMessage = \"expected either a field or a term, found \" + lexeme.type\n\n if (lexeme.str.length >= 1) {\n errorMessage += \" with value '\" + lexeme.str + \"'\"\n }\n\n throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end)\n }\n}\n\nlunr.QueryParser.parsePresence = function (parser) {\n var lexeme = parser.consumeLexeme()\n\n if (lexeme == undefined) {\n return\n }\n\n switch (lexeme.str) {\n case \"-\":\n parser.currentClause.presence = lunr.Query.presence.PROHIBITED\n break\n case \"+\":\n parser.currentClause.presence = lunr.Query.presence.REQUIRED\n break\n default:\n var errorMessage = \"unrecognised presence operator'\" + lexeme.str + \"'\"\n throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end)\n }\n\n var nextLexeme = parser.peekLexeme()\n\n if (nextLexeme == undefined) {\n var errorMessage = \"expecting term or field, found nothing\"\n throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end)\n }\n\n switch (nextLexeme.type) {\n case lunr.QueryLexer.FIELD:\n return lunr.QueryParser.parseField\n case lunr.QueryLexer.TERM:\n return lunr.QueryParser.parseTerm\n default:\n var errorMessage = \"expecting term or field, found '\" + nextLexeme.type + \"'\"\n throw new lunr.QueryParseError (errorMessage, nextLexeme.start, nextLexeme.end)\n }\n}\n\nlunr.QueryParser.parseField = function (parser) {\n var lexeme = parser.consumeLexeme()\n\n if (lexeme == undefined) {\n return\n }\n\n if (parser.query.allFields.indexOf(lexeme.str) == -1) {\n var possibleFields = parser.query.allFields.map(function (f) { return \"'\" + f + \"'\" }).join(', '),\n errorMessage = \"unrecognised field '\" + lexeme.str + \"', possible fields: \" + possibleFields\n\n throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end)\n }\n\n parser.currentClause.fields = [lexeme.str]\n\n var nextLexeme = parser.peekLexeme()\n\n if (nextLexeme == undefined) {\n var errorMessage = \"expecting term, found nothing\"\n throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end)\n }\n\n switch (nextLexeme.type) {\n case lunr.QueryLexer.TERM:\n return lunr.QueryParser.parseTerm\n default:\n var errorMessage = \"expecting term, found '\" + nextLexeme.type + \"'\"\n throw new lunr.QueryParseError (errorMessage, nextLexeme.start, nextLexeme.end)\n }\n}\n\nlunr.QueryParser.parseTerm = function (parser) {\n var lexeme = parser.consumeLexeme()\n\n if (lexeme == undefined) {\n return\n }\n\n parser.currentClause.term = lexeme.str.toLowerCase()\n\n if (lexeme.str.indexOf(\"*\") != -1) {\n parser.currentClause.usePipeline = false\n }\n\n var nextLexeme = parser.peekLexeme()\n\n if (nextLexeme == undefined) {\n parser.nextClause()\n return\n }\n\n switch (nextLexeme.type) {\n case lunr.QueryLexer.TERM:\n parser.nextClause()\n return lunr.QueryParser.parseTerm\n case lunr.QueryLexer.FIELD:\n parser.nextClause()\n return lunr.QueryParser.parseField\n case lunr.QueryLexer.EDIT_DISTANCE:\n return lunr.QueryParser.parseEditDistance\n case lunr.QueryLexer.BOOST:\n return lunr.QueryParser.parseBoost\n case lunr.QueryLexer.PRESENCE:\n parser.nextClause()\n return lunr.QueryParser.parsePresence\n default:\n var errorMessage = \"Unexpected lexeme type '\" + nextLexeme.type + \"'\"\n throw new lunr.QueryParseError (errorMessage, nextLexeme.start, nextLexeme.end)\n }\n}\n\nlunr.QueryParser.parseEditDistance = function (parser) {\n var lexeme = parser.consumeLexeme()\n\n if (lexeme == undefined) {\n return\n }\n\n var editDistance = parseInt(lexeme.str, 10)\n\n if (isNaN(editDistance)) {\n var errorMessage = \"edit distance must be numeric\"\n throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end)\n }\n\n parser.currentClause.editDistance = editDistance\n\n var nextLexeme = parser.peekLexeme()\n\n if (nextLexeme == undefined) {\n parser.nextClause()\n return\n }\n\n switch (nextLexeme.type) {\n case lunr.QueryLexer.TERM:\n parser.nextClause()\n return lunr.QueryParser.parseTerm\n case lunr.QueryLexer.FIELD:\n parser.nextClause()\n return lunr.QueryParser.parseField\n case lunr.QueryLexer.EDIT_DISTANCE:\n return lunr.QueryParser.parseEditDistance\n case lunr.QueryLexer.BOOST:\n return lunr.QueryParser.parseBoost\n case lunr.QueryLexer.PRESENCE:\n parser.nextClause()\n return lunr.QueryParser.parsePresence\n default:\n var errorMessage = \"Unexpected lexeme type '\" + nextLexeme.type + \"'\"\n throw new lunr.QueryParseError (errorMessage, nextLexeme.start, nextLexeme.end)\n }\n}\n\nlunr.QueryParser.parseBoost = function (parser) {\n var lexeme = parser.consumeLexeme()\n\n if (lexeme == undefined) {\n return\n }\n\n var boost = parseInt(lexeme.str, 10)\n\n if (isNaN(boost)) {\n var errorMessage = \"boost must be numeric\"\n throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end)\n }\n\n parser.currentClause.boost = boost\n\n var nextLexeme = parser.peekLexeme()\n\n if (nextLexeme == undefined) {\n parser.nextClause()\n return\n }\n\n switch (nextLexeme.type) {\n case lunr.QueryLexer.TERM:\n parser.nextClause()\n return lunr.QueryParser.parseTerm\n case lunr.QueryLexer.FIELD:\n parser.nextClause()\n return lunr.QueryParser.parseField\n case lunr.QueryLexer.EDIT_DISTANCE:\n return lunr.QueryParser.parseEditDistance\n case lunr.QueryLexer.BOOST:\n return lunr.QueryParser.parseBoost\n case lunr.QueryLexer.PRESENCE:\n parser.nextClause()\n return lunr.QueryParser.parsePresence\n default:\n var errorMessage = \"Unexpected lexeme type '\" + nextLexeme.type + \"'\"\n throw new lunr.QueryParseError (errorMessage, nextLexeme.start, nextLexeme.end)\n }\n}\n\n /**\n * export the module via AMD, CommonJS or as a browser global\n * Export code from https://github.com/umdjs/umd/blob/master/returnExports.js\n */\n ;(function (root, factory) {\n if (typeof define === 'function' && define.amd) {\n // AMD. Register as an anonymous module.\n define(factory)\n } else if (typeof exports === 'object') {\n /**\n * Node. Does not work with strict CommonJS, but\n * only CommonJS-like enviroments that support module.exports,\n * like Node.\n */\n module.exports = factory()\n } else {\n // Browser globals (root is window)\n root.lunr = factory()\n }\n }(this, function () {\n /**\n * Just return a value to define the module export.\n * This example returns an object, but the module\n * can return a function as the exported value.\n */\n return lunr\n }))\n})();\n", "/*!\n * escape-html\n * Copyright(c) 2012-2013 TJ Holowaychuk\n * Copyright(c) 2015 Andreas Lubbe\n * Copyright(c) 2015 Tiancheng \"Timothy\" Gu\n * MIT Licensed\n */\n\n'use strict';\n\n/**\n * Module variables.\n * @private\n */\n\nvar matchHtmlRegExp = /[\"'&<>]/;\n\n/**\n * Module exports.\n * @public\n */\n\nmodule.exports = escapeHtml;\n\n/**\n * Escape special characters in the given string of html.\n *\n * @param {string} string The string to escape for inserting into HTML\n * @return {string}\n * @public\n */\n\nfunction escapeHtml(string) {\n var str = '' + string;\n var match = matchHtmlRegExp.exec(str);\n\n if (!match) {\n return str;\n }\n\n var escape;\n var html = '';\n var index = 0;\n var lastIndex = 0;\n\n for (index = match.index; index < str.length; index++) {\n switch (str.charCodeAt(index)) {\n case 34: // \"\n escape = '"';\n break;\n case 38: // &\n escape = '&';\n break;\n case 39: // '\n escape = ''';\n break;\n case 60: // <\n escape = '<';\n break;\n case 62: // >\n escape = '>';\n break;\n default:\n continue;\n }\n\n if (lastIndex !== index) {\n html += str.substring(lastIndex, index);\n }\n\n lastIndex = index + 1;\n html += escape;\n }\n\n return lastIndex !== index\n ? html + str.substring(lastIndex, index)\n : html;\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A RTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport lunr from \"lunr\"\n\nimport \"~/polyfills\"\n\nimport { Search, SearchIndexConfig } from \"../../_\"\nimport {\n SearchMessage,\n SearchMessageType\n} from \"../message\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Add support for usage with `iframe-worker` polyfill\n *\n * While `importScripts` is synchronous when executed inside of a web worker,\n * it's not possible to provide a synchronous polyfilled implementation. The\n * cool thing is that awaiting a non-Promise is a noop, so extending the type\n * definition to return a `Promise` shouldn't break anything.\n *\n * @see https://bit.ly/2PjDnXi - GitHub comment\n */\ndeclare global {\n function importScripts(...urls: string[]): Promise | void\n}\n\n/* ----------------------------------------------------------------------------\n * Data\n * ------------------------------------------------------------------------- */\n\n/**\n * Search index\n */\nlet index: Search\n\n/* ----------------------------------------------------------------------------\n * Helper functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch (= import) multi-language support through `lunr-languages`\n *\n * This function automatically imports the stemmers necessary to process the\n * languages, which are defined through the search index configuration.\n *\n * If the worker runs inside of an `iframe` (when using `iframe-worker` as\n * a shim), the base URL for the stemmers to be loaded must be determined by\n * searching for the first `script` element with a `src` attribute, which will\n * contain the contents of this script.\n *\n * @param config - Search index configuration\n *\n * @returns Promise resolving with no result\n */\nasync function setupSearchLanguages(\n config: SearchIndexConfig\n): Promise {\n let base = \"../lunr\"\n\n /* Detect `iframe-worker` and fix base URL */\n if (typeof parent !== \"undefined\" && \"IFrameWorker\" in parent) {\n const worker = document.querySelector(\"script[src]\")!\n const [path] = worker.src.split(\"/worker\")\n\n /* Prefix base with path */\n base = base.replace(\"..\", path)\n }\n\n /* Add scripts for languages */\n const scripts = []\n for (const lang of config.lang) {\n switch (lang) {\n\n /* Add segmenter for Japanese */\n case \"ja\":\n scripts.push(`${base}/tinyseg.js`)\n break\n\n /* Add segmenter for Hindi and Thai */\n case \"hi\":\n case \"th\":\n scripts.push(`${base}/wordcut.js`)\n break\n }\n\n /* Add language support */\n if (lang !== \"en\")\n scripts.push(`${base}/min/lunr.${lang}.min.js`)\n }\n\n /* Add multi-language support */\n if (config.lang.length > 1)\n scripts.push(`${base}/min/lunr.multi.min.js`)\n\n /* Load scripts synchronously */\n if (scripts.length)\n await importScripts(\n `${base}/min/lunr.stemmer.support.min.js`,\n ...scripts\n )\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Message handler\n *\n * @param message - Source message\n *\n * @returns Target message\n */\nexport async function handler(\n message: SearchMessage\n): Promise {\n switch (message.type) {\n\n /* Search setup message */\n case SearchMessageType.SETUP:\n await setupSearchLanguages(message.data.config)\n index = new Search(message.data)\n return {\n type: SearchMessageType.READY\n }\n\n /* Search query message */\n case SearchMessageType.QUERY:\n return {\n type: SearchMessageType.RESULT,\n data: index ? index.search(message.data) : { items: [] }\n }\n\n /* All other messages */\n default:\n throw new TypeError(\"Invalid message type\")\n }\n}\n\n/* ----------------------------------------------------------------------------\n * Worker\n * ------------------------------------------------------------------------- */\n\n/* @ts-expect-error - expose Lunr.js in global scope, or stemmers won't work */\nself.lunr = lunr\n\n/* Handle messages */\naddEventListener(\"message\", async ev => {\n postMessage(await handler(ev.data))\n})\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n/* ----------------------------------------------------------------------------\n * Polyfills\n * ------------------------------------------------------------------------- */\n\n/* Polyfill `Object.entries` */\nif (!Object.entries)\n Object.entries = function (obj: object) {\n const data: [string, string][] = []\n for (const key of Object.keys(obj))\n // @ts-expect-error - ignore property access warning\n data.push([key, obj[key]])\n\n /* Return entries */\n return data\n }\n\n/* Polyfill `Object.values` */\nif (!Object.values)\n Object.values = function (obj: object) {\n const data: string[] = []\n for (const key of Object.keys(obj))\n // @ts-expect-error - ignore property access warning\n data.push(obj[key])\n\n /* Return values */\n return data\n }\n\n/* ------------------------------------------------------------------------- */\n\n/* Polyfills for `Element` */\nif (typeof Element !== \"undefined\") {\n\n /* Polyfill `Element.scrollTo` */\n if (!Element.prototype.scrollTo)\n Element.prototype.scrollTo = function (\n x?: ScrollToOptions | number, y?: number\n ): void {\n if (typeof x === \"object\") {\n this.scrollLeft = x.left!\n this.scrollTop = x.top!\n } else {\n this.scrollLeft = x!\n this.scrollTop = y!\n }\n }\n\n /* Polyfill `Element.replaceWith` */\n if (!Element.prototype.replaceWith)\n Element.prototype.replaceWith = function (\n ...nodes: Array\n ): void {\n const parent = this.parentNode\n if (parent) {\n if (nodes.length === 0)\n parent.removeChild(this)\n\n /* Replace children and create text nodes */\n for (let i = nodes.length - 1; i >= 0; i--) {\n let node = nodes[i]\n if (typeof node === \"string\")\n node = document.createTextNode(node)\n else if (node.parentNode)\n node.parentNode.removeChild(node)\n\n /* Replace child or insert before previous sibling */\n if (!i)\n parent.replaceChild(node, this)\n else\n parent.insertBefore(this.previousSibling!, node)\n }\n }\n }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport escapeHTML from \"escape-html\"\n\nimport { SearchIndexDocument } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search document\n */\nexport interface SearchDocument extends SearchIndexDocument {\n parent?: SearchIndexDocument /* Parent article */\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Search document mapping\n */\nexport type SearchDocumentMap = Map\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Create a search document mapping\n *\n * @param docs - Search index documents\n *\n * @returns Search document map\n */\nexport function setupSearchDocumentMap(\n docs: SearchIndexDocument[]\n): SearchDocumentMap {\n const documents = new Map()\n const parents = new Set()\n for (const doc of docs) {\n const [path, hash] = doc.location.split(\"#\")\n\n /* Extract location, title and tags */\n const location = doc.location\n const title = doc.title\n const tags = doc.tags\n\n /* Escape and cleanup text */\n const text = escapeHTML(doc.text)\n .replace(/\\s+(?=[,.:;!?])/g, \"\")\n .replace(/\\s+/g, \" \")\n\n /* Handle section */\n if (hash) {\n const parent = documents.get(path)!\n\n /* Ignore first section, override article */\n if (!parents.has(parent)) {\n parent.title = doc.title\n parent.text = text\n\n /* Remember that we processed the article */\n parents.add(parent)\n\n /* Add subsequent section */\n } else {\n documents.set(location, {\n location,\n title,\n text,\n parent\n })\n }\n\n /* Add article */\n } else {\n documents.set(location, {\n location,\n title,\n text,\n ...tags && { tags }\n })\n }\n }\n return documents\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport escapeHTML from \"escape-html\"\n\nimport { SearchIndexConfig } from \"../_\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search highlight function\n *\n * @param value - Value\n *\n * @returns Highlighted value\n */\nexport type SearchHighlightFn = (value: string) => string\n\n/**\n * Search highlight factory function\n *\n * @param query - Query value\n *\n * @returns Search highlight function\n */\nexport type SearchHighlightFactoryFn = (query: string) => SearchHighlightFn\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Create a search highlighter\n *\n * @param config - Search index configuration\n * @param escape - Whether to escape HTML\n *\n * @returns Search highlight factory function\n */\nexport function setupSearchHighlighter(\n config: SearchIndexConfig, escape: boolean\n): SearchHighlightFactoryFn {\n const separator = new RegExp(config.separator, \"img\")\n const highlight = (_: unknown, data: string, term: string) => {\n return `${data}${term}`\n }\n\n /* Return factory function */\n return (query: string) => {\n query = query\n .replace(/[\\s*+\\-:~^]+/g, \" \")\n .trim()\n\n /* Create search term match expression */\n const match = new RegExp(`(^|${config.separator})(${\n query\n .replace(/[|\\\\{}()[\\]^$+*?.-]/g, \"\\\\$&\")\n .replace(separator, \"|\")\n })`, \"img\")\n\n /* Highlight string value */\n return value => (\n escape\n ? escapeHTML(value)\n : value\n )\n .replace(match, highlight)\n .replace(/<\\/mark>(\\s+)]*>/img, \"$1\")\n }\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search query clause\n */\nexport interface SearchQueryClause {\n presence: lunr.Query.presence /* Clause presence */\n term: string /* Clause term */\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Search query terms\n */\nexport type SearchQueryTerms = Record\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Parse a search query for analysis\n *\n * @param value - Query value\n *\n * @returns Search query clauses\n */\nexport function parseSearchQuery(\n value: string\n): SearchQueryClause[] {\n const query = new (lunr as any).Query([\"title\", \"text\"])\n const parser = new (lunr as any).QueryParser(value, query)\n\n /* Parse and return query clauses */\n parser.parse()\n return query.clauses\n}\n\n/**\n * Analyze the search query clauses in regard to the search terms found\n *\n * @param query - Search query clauses\n * @param terms - Search terms\n *\n * @returns Search query terms\n */\nexport function getSearchQueryTerms(\n query: SearchQueryClause[], terms: string[]\n): SearchQueryTerms {\n const clauses = new Set(query)\n\n /* Match query clauses against terms */\n const result: SearchQueryTerms = {}\n for (let t = 0; t < terms.length; t++)\n for (const clause of clauses)\n if (terms[t].startsWith(clause.term)) {\n result[clause.term] = true\n clauses.delete(clause)\n }\n\n /* Annotate unmatched non-stopword query clauses */\n for (const clause of clauses)\n if (lunr.stopWordFilter?.(clause.term as any))\n result[clause.term] = false\n\n /* Return query terms */\n return result\n}\n", "/*\n * Copyright (c) 2016-2022 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport {\n SearchDocument,\n SearchDocumentMap,\n setupSearchDocumentMap\n} from \"../document\"\nimport {\n SearchHighlightFactoryFn,\n setupSearchHighlighter\n} from \"../highlighter\"\nimport { SearchOptions } from \"../options\"\nimport {\n SearchQueryTerms,\n getSearchQueryTerms,\n parseSearchQuery\n} from \"../query\"\n\n/* ----------------------------------------------------------------------------\n * Types\n * ------------------------------------------------------------------------- */\n\n/**\n * Search index configuration\n */\nexport interface SearchIndexConfig {\n lang: string[] /* Search languages */\n separator: string /* Search separator */\n}\n\n/**\n * Search index document\n */\nexport interface SearchIndexDocument {\n location: string /* Document location */\n title: string /* Document title */\n text: string /* Document text */\n tags?: string[] /* Document tags */\n boost?: number /* Document boost */\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Search index\n *\n * This interfaces describes the format of the `search_index.json` file which\n * is automatically built by the MkDocs search plugin.\n */\nexport interface SearchIndex {\n config: SearchIndexConfig /* Search index configuration */\n docs: SearchIndexDocument[] /* Search index documents */\n options: SearchOptions /* Search options */\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Search metadata\n */\nexport interface SearchMetadata {\n score: number /* Score (relevance) */\n terms: SearchQueryTerms /* Search query terms */\n}\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Search result document\n */\nexport type SearchResultDocument = SearchDocument & SearchMetadata\n\n/**\n * Search result item\n */\nexport type SearchResultItem = SearchResultDocument[]\n\n/* ------------------------------------------------------------------------- */\n\n/**\n * Search result\n */\nexport interface SearchResult {\n items: SearchResultItem[] /* Search result items */\n suggestions?: string[] /* Search suggestions */\n}\n\n/* ----------------------------------------------------------------------------\n * Functions\n * ------------------------------------------------------------------------- */\n\n/**\n * Compute the difference of two lists of strings\n *\n * @param a - 1st list of strings\n * @param b - 2nd list of strings\n *\n * @returns Difference\n */\nfunction difference(a: string[], b: string[]): string[] {\n const [x, y] = [new Set(a), new Set(b)]\n return [\n ...new Set([...x].filter(value => !y.has(value)))\n ]\n}\n\n/* ----------------------------------------------------------------------------\n * Class\n * ------------------------------------------------------------------------- */\n\n/**\n * Search index\n */\nexport class Search {\n\n /**\n * Search document mapping\n *\n * A mapping of URLs (including hash fragments) to the actual articles and\n * sections of the documentation. The search document mapping must be created\n * regardless of whether the index was prebuilt or not, as Lunr.js itself\n * only stores the actual index.\n */\n protected documents: SearchDocumentMap\n\n /**\n * Search highlight factory function\n */\n protected highlight: SearchHighlightFactoryFn\n\n /**\n * The underlying Lunr.js search index\n */\n protected index: lunr.Index\n\n /**\n * Search options\n */\n protected options: SearchOptions\n\n /**\n * Create the search integration\n *\n * @param data - Search index\n */\n public constructor({ config, docs, options }: SearchIndex) {\n this.options = options\n\n /* Set up document map and highlighter factory */\n this.documents = setupSearchDocumentMap(docs)\n this.highlight = setupSearchHighlighter(config, false)\n\n /* Set separator for tokenizer */\n lunr.tokenizer.separator = new RegExp(config.separator)\n\n /* Create search index */\n this.index = lunr(function () {\n\n /* Set up multi-language support */\n if (config.lang.length === 1 && config.lang[0] !== \"en\") {\n this.use((lunr as any)[config.lang[0]])\n } else if (config.lang.length > 1) {\n this.use((lunr as any).multiLanguage(...config.lang))\n }\n\n /* Compute functions to be removed from the pipeline */\n const fns = difference([\n \"trimmer\", \"stopWordFilter\", \"stemmer\"\n ], options.pipeline)\n\n /* Remove functions from the pipeline for registered languages */\n for (const lang of config.lang.map(language => (\n language === \"en\" ? lunr : (lunr as any)[language]\n ))) {\n for (const fn of fns) {\n this.pipeline.remove(lang[fn])\n this.searchPipeline.remove(lang[fn])\n }\n }\n\n /* Set up reference */\n this.ref(\"location\")\n\n /* Set up fields */\n this.field(\"title\", { boost: 1e3 })\n this.field(\"text\")\n this.field(\"tags\", { boost: 1e6, extractor: doc => {\n const { tags = [] } = doc as SearchDocument\n return tags.reduce((list, tag) => [\n ...list,\n ...lunr.tokenizer(tag)\n ], [] as lunr.Token[])\n } })\n\n /* Index documents */\n for (const doc of docs)\n this.add(doc, { boost: doc.boost })\n })\n }\n\n /**\n * Search for matching documents\n *\n * The search index which MkDocs provides is divided up into articles, which\n * contain the whole content of the individual pages, and sections, which only\n * contain the contents of the subsections obtained by breaking the individual\n * pages up at `h1` ... `h6`. As there may be many sections on different pages\n * with identical titles (for example within this very project, e.g. \"Usage\"\n * or \"Installation\"), they need to be put into the context of the containing\n * page. For this reason, section results are grouped within their respective\n * articles which are the top-level results that are returned.\n *\n * @param query - Query value\n *\n * @returns Search results\n */\n public search(query: string): SearchResult {\n if (query) {\n try {\n const highlight = this.highlight(query)\n\n /* Parse query to extract clauses for analysis */\n const clauses = parseSearchQuery(query)\n .filter(clause => (\n clause.presence !== lunr.Query.presence.PROHIBITED\n ))\n\n /* Perform search and post-process results */\n const groups = this.index.search(`${query}*`)\n\n /* Apply post-query boosts based on title and search query terms */\n .reduce((item, { ref, score, matchData }) => {\n const document = this.documents.get(ref)\n if (typeof document !== \"undefined\") {\n const { location, title, text, tags, parent } = document\n\n /* Compute and analyze search query terms */\n const terms = getSearchQueryTerms(\n clauses,\n Object.keys(matchData.metadata)\n )\n\n /* Highlight title and text and apply post-query boosts */\n const boost = +!parent + +Object.values(terms).every(t => t)\n item.push({\n location,\n title: highlight(title),\n text: highlight(text),\n ...tags && { tags: tags.map(highlight) },\n score: score * (1 + boost),\n terms\n })\n }\n return item\n }, [])\n\n /* Sort search results again after applying boosts */\n .sort((a, b) => b.score - a.score)\n\n /* Group search results by page */\n .reduce((items, result) => {\n const document = this.documents.get(result.location)\n if (typeof document !== \"undefined\") {\n const ref = \"parent\" in document\n ? document.parent!.location\n : document.location\n items.set(ref, [...items.get(ref) || [], result])\n }\n return items\n }, new Map())\n\n /* Generate search suggestions, if desired */\n let suggestions: string[] | undefined\n if (this.options.suggestions) {\n const titles = this.index.query(builder => {\n for (const clause of clauses)\n builder.term(clause.term, {\n fields: [\"title\"],\n presence: lunr.Query.presence.REQUIRED,\n wildcard: lunr.Query.wildcard.TRAILING\n })\n })\n\n /* Retrieve suggestions for best match */\n suggestions = titles.length\n ? Object.keys(titles[0].matchData.metadata)\n : []\n }\n\n /* Return items and suggestions */\n return {\n items: [...groups.values()],\n ...typeof suggestions !== \"undefined\" && { suggestions }\n }\n\n /* Log errors to console (for now) */\n } catch {\n console.warn(`Invalid query: ${query} \u2013 see https://bit.ly/2s3ChXG`)\n }\n }\n\n /* Return nothing in case of error or empty query */\n return { items: [] }\n }\n}\n"], + "mappings": "glCAAA,IAAAA,GAAAC,EAAA,CAAAC,GAAAC,KAAA;AAAA;AAAA;AAAA;AAAA,IAME,UAAU,CAiCZ,IAAIC,EAAO,SAAUC,EAAQ,CAC3B,IAAIC,EAAU,IAAIF,EAAK,QAEvB,OAAAE,EAAQ,SAAS,IACfF,EAAK,QACLA,EAAK,eACLA,EAAK,OACP,EAEAE,EAAQ,eAAe,IACrBF,EAAK,OACP,EAEAC,EAAO,KAAKC,EAASA,CAAO,EACrBA,EAAQ,MAAM,CACvB,EAEAF,EAAK,QAAU,QACf;AAAA;AAAA;AAAA,GASAA,EAAK,MAAQ,CAAC,EASdA,EAAK,MAAM,KAAQ,SAAUG,EAAQ,CAEnC,OAAO,SAAUC,EAAS,CACpBD,EAAO,SAAW,QAAQ,MAC5B,QAAQ,KAAKC,CAAO,CAExB,CAEF,EAAG,IAAI,EAaPJ,EAAK,MAAM,SAAW,SAAUK,EAAK,CACnC,OAAsBA,GAAQ,KACrB,GAEAA,EAAI,SAAS,CAExB,EAkBAL,EAAK,MAAM,MAAQ,SAAUK,EAAK,CAChC,GAAIA,GAAQ,KACV,OAAOA,EAMT,QAHIC,EAAQ,OAAO,OAAO,IAAI,EAC1BC,EAAO,OAAO,KAAKF,CAAG,EAEjB,EAAI,EAAG,EAAIE,EAAK,OAAQ,IAAK,CACpC,IAAIC,EAAMD,EAAK,GACXE,EAAMJ,EAAIG,GAEd,GAAI,MAAM,QAAQC,CAAG,EAAG,CACtBH,EAAME,GAAOC,EAAI,MAAM,EACvB,QACF,CAEA,GAAI,OAAOA,GAAQ,UACf,OAAOA,GAAQ,UACf,OAAOA,GAAQ,UAAW,CAC5BH,EAAME,GAAOC,EACb,QACF,CAEA,MAAM,IAAI,UAAU,uDAAuD,CAC7E,CAEA,OAAOH,CACT,EACAN,EAAK,SAAW,SAAUU,EAAQC,EAAWC,EAAa,CACxD,KAAK,OAASF,EACd,KAAK,UAAYC,EACjB,KAAK,aAAeC,CACtB,EAEAZ,EAAK,SAAS,OAAS,IAEvBA,EAAK,SAAS,WAAa,SAAUa,EAAG,CACtC,IAAIC,EAAID,EAAE,QAAQb,EAAK,SAAS,MAAM,EAEtC,GAAIc,IAAM,GACR,KAAM,6BAGR,IAAIC,EAAWF,EAAE,MAAM,EAAGC,CAAC,EACvBJ,EAASG,EAAE,MAAMC,EAAI,CAAC,EAE1B,OAAO,IAAId,EAAK,SAAUU,EAAQK,EAAUF,CAAC,CAC/C,EAEAb,EAAK,SAAS,UAAU,SAAW,UAAY,CAC7C,OAAI,KAAK,cAAgB,OACvB,KAAK,aAAe,KAAK,UAAYA,EAAK,SAAS,OAAS,KAAK,QAG5D,KAAK,YACd,EACA;AAAA;AAAA;AAAA,GAUAA,EAAK,IAAM,SAAUgB,EAAU,CAG7B,GAFA,KAAK,SAAW,OAAO,OAAO,IAAI,EAE9BA,EAAU,CACZ,KAAK,OAASA,EAAS,OAEvB,QAASC,EAAI,EAAGA,EAAI,KAAK,OAAQA,IAC/B,KAAK,SAASD,EAASC,IAAM,EAEjC,MACE,KAAK,OAAS,CAElB,EASAjB,EAAK,IAAI,SAAW,CAClB,UAAW,SAAUkB,EAAO,CAC1B,OAAOA,CACT,EAEA,MAAO,UAAY,CACjB,OAAO,IACT,EAEA,SAAU,UAAY,CACpB,MAAO,EACT,CACF,EASAlB,EAAK,IAAI,MAAQ,CACf,UAAW,UAAY,CACrB,OAAO,IACT,EAEA,MAAO,SAAUkB,EAAO,CACtB,OAAOA,CACT,EAEA,SAAU,UAAY,CACpB,MAAO,EACT,CACF,EAQAlB,EAAK,IAAI,UAAU,SAAW,SAAUmB,EAAQ,CAC9C,MAAO,CAAC,CAAC,KAAK,SAASA,EACzB,EAUAnB,EAAK,IAAI,UAAU,UAAY,SAAUkB,EAAO,CAC9C,IAAIE,EAAGC,EAAGL,EAAUM,EAAe,CAAC,EAEpC,GAAIJ,IAAUlB,EAAK,IAAI,SACrB,OAAO,KAGT,GAAIkB,IAAUlB,EAAK,IAAI,MACrB,OAAOkB,EAGL,KAAK,OAASA,EAAM,QACtBE,EAAI,KACJC,EAAIH,IAEJE,EAAIF,EACJG,EAAI,MAGNL,EAAW,OAAO,KAAKI,EAAE,QAAQ,EAEjC,QAASH,EAAI,EAAGA,EAAID,EAAS,OAAQC,IAAK,CACxC,IAAIM,EAAUP,EAASC,GACnBM,KAAWF,EAAE,UACfC,EAAa,KAAKC,CAAO,CAE7B,CAEA,OAAO,IAAIvB,EAAK,IAAKsB,CAAY,CACnC,EASAtB,EAAK,IAAI,UAAU,MAAQ,SAAUkB,EAAO,CAC1C,OAAIA,IAAUlB,EAAK,IAAI,SACdA,EAAK,IAAI,SAGdkB,IAAUlB,EAAK,IAAI,MACd,KAGF,IAAIA,EAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE,OAAO,OAAO,KAAKkB,EAAM,QAAQ,CAAC,CAAC,CACpF,EASAlB,EAAK,IAAM,SAAUwB,EAASC,EAAe,CAC3C,IAAIC,EAAoB,EAExB,QAASf,KAAaa,EAChBb,GAAa,WACjBe,GAAqB,OAAO,KAAKF,EAAQb,EAAU,EAAE,QAGvD,IAAIgB,GAAKF,EAAgBC,EAAoB,KAAQA,EAAoB,IAEzE,OAAO,KAAK,IAAI,EAAI,KAAK,IAAIC,CAAC,CAAC,CACjC,EAUA3B,EAAK,MAAQ,SAAU4B,EAAKC,EAAU,CACpC,KAAK,IAAMD,GAAO,GAClB,KAAK,SAAWC,GAAY,CAAC,CAC/B,EAOA7B,EAAK,MAAM,UAAU,SAAW,UAAY,CAC1C,OAAO,KAAK,GACd,EAsBAA,EAAK,MAAM,UAAU,OAAS,SAAU8B,EAAI,CAC1C,YAAK,IAAMA,EAAG,KAAK,IAAK,KAAK,QAAQ,EAC9B,IACT,EASA9B,EAAK,MAAM,UAAU,MAAQ,SAAU8B,EAAI,CACzC,OAAAA,EAAKA,GAAM,SAAUjB,EAAG,CAAE,OAAOA,CAAE,EAC5B,IAAIb,EAAK,MAAO8B,EAAG,KAAK,IAAK,KAAK,QAAQ,EAAG,KAAK,QAAQ,CACnE,EACA;AAAA;AAAA;AAAA,GAuBA9B,EAAK,UAAY,SAAUK,EAAKwB,EAAU,CACxC,GAAIxB,GAAO,MAAQA,GAAO,KACxB,MAAO,CAAC,EAGV,GAAI,MAAM,QAAQA,CAAG,EACnB,OAAOA,EAAI,IAAI,SAAU0B,EAAG,CAC1B,OAAO,IAAI/B,EAAK,MACdA,EAAK,MAAM,SAAS+B,CAAC,EAAE,YAAY,EACnC/B,EAAK,MAAM,MAAM6B,CAAQ,CAC3B,CACF,CAAC,EAOH,QAJID,EAAMvB,EAAI,SAAS,EAAE,YAAY,EACjC2B,EAAMJ,EAAI,OACVK,EAAS,CAAC,EAELC,EAAW,EAAGC,EAAa,EAAGD,GAAYF,EAAKE,IAAY,CAClE,IAAIE,EAAOR,EAAI,OAAOM,CAAQ,EAC1BG,EAAcH,EAAWC,EAE7B,GAAKC,EAAK,MAAMpC,EAAK,UAAU,SAAS,GAAKkC,GAAYF,EAAM,CAE7D,GAAIK,EAAc,EAAG,CACnB,IAAIC,EAAgBtC,EAAK,MAAM,MAAM6B,CAAQ,GAAK,CAAC,EACnDS,EAAc,SAAc,CAACH,EAAYE,CAAW,EACpDC,EAAc,MAAWL,EAAO,OAEhCA,EAAO,KACL,IAAIjC,EAAK,MACP4B,EAAI,MAAMO,EAAYD,CAAQ,EAC9BI,CACF,CACF,CACF,CAEAH,EAAaD,EAAW,CAC1B,CAEF,CAEA,OAAOD,CACT,EASAjC,EAAK,UAAU,UAAY,UAC3B;AAAA;AAAA;AAAA,GAkCAA,EAAK,SAAW,UAAY,CAC1B,KAAK,OAAS,CAAC,CACjB,EAEAA,EAAK,SAAS,oBAAsB,OAAO,OAAO,IAAI,EAmCtDA,EAAK,SAAS,iBAAmB,SAAU8B,EAAIS,EAAO,CAChDA,KAAS,KAAK,qBAChBvC,EAAK,MAAM,KAAK,6CAA+CuC,CAAK,EAGtET,EAAG,MAAQS,EACXvC,EAAK,SAAS,oBAAoB8B,EAAG,OAASA,CAChD,EAQA9B,EAAK,SAAS,4BAA8B,SAAU8B,EAAI,CACxD,IAAIU,EAAeV,EAAG,OAAUA,EAAG,SAAS,KAAK,oBAE5CU,GACHxC,EAAK,MAAM,KAAK;AAAA,EAAmG8B,CAAE,CAEzH,EAYA9B,EAAK,SAAS,KAAO,SAAUyC,EAAY,CACzC,IAAIC,EAAW,IAAI1C,EAAK,SAExB,OAAAyC,EAAW,QAAQ,SAAUE,EAAQ,CACnC,IAAIb,EAAK9B,EAAK,SAAS,oBAAoB2C,GAE3C,GAAIb,EACFY,EAAS,IAAIZ,CAAE,MAEf,OAAM,IAAI,MAAM,sCAAwCa,CAAM,CAElE,CAAC,EAEMD,CACT,EASA1C,EAAK,SAAS,UAAU,IAAM,UAAY,CACxC,IAAI4C,EAAM,MAAM,UAAU,MAAM,KAAK,SAAS,EAE9CA,EAAI,QAAQ,SAAUd,EAAI,CACxB9B,EAAK,SAAS,4BAA4B8B,CAAE,EAC5C,KAAK,OAAO,KAAKA,CAAE,CACrB,EAAG,IAAI,CACT,EAWA9B,EAAK,SAAS,UAAU,MAAQ,SAAU6C,EAAYC,EAAO,CAC3D9C,EAAK,SAAS,4BAA4B8C,CAAK,EAE/C,IAAIC,EAAM,KAAK,OAAO,QAAQF,CAAU,EACxC,GAAIE,GAAO,GACT,MAAM,IAAI,MAAM,wBAAwB,EAG1CA,EAAMA,EAAM,EACZ,KAAK,OAAO,OAAOA,EAAK,EAAGD,CAAK,CAClC,EAWA9C,EAAK,SAAS,UAAU,OAAS,SAAU6C,EAAYC,EAAO,CAC5D9C,EAAK,SAAS,4BAA4B8C,CAAK,EAE/C,IAAIC,EAAM,KAAK,OAAO,QAAQF,CAAU,EACxC,GAAIE,GAAO,GACT,MAAM,IAAI,MAAM,wBAAwB,EAG1C,KAAK,OAAO,OAAOA,EAAK,EAAGD,CAAK,CAClC,EAOA9C,EAAK,SAAS,UAAU,OAAS,SAAU8B,EAAI,CAC7C,IAAIiB,EAAM,KAAK,OAAO,QAAQjB,CAAE,EAC5BiB,GAAO,IAIX,KAAK,OAAO,OAAOA,EAAK,CAAC,CAC3B,EASA/C,EAAK,SAAS,UAAU,IAAM,SAAUiC,EAAQ,CAG9C,QAFIe,EAAc,KAAK,OAAO,OAErB/B,EAAI,EAAGA,EAAI+B,EAAa/B,IAAK,CAIpC,QAHIa,EAAK,KAAK,OAAOb,GACjBgC,EAAO,CAAC,EAEHC,EAAI,EAAGA,EAAIjB,EAAO,OAAQiB,IAAK,CACtC,IAAIC,EAASrB,EAAGG,EAAOiB,GAAIA,EAAGjB,CAAM,EAEpC,GAAI,EAAAkB,GAAW,MAA6BA,IAAW,IAEvD,GAAI,MAAM,QAAQA,CAAM,EACtB,QAASC,EAAI,EAAGA,EAAID,EAAO,OAAQC,IACjCH,EAAK,KAAKE,EAAOC,EAAE,OAGrBH,EAAK,KAAKE,CAAM,CAEpB,CAEAlB,EAASgB,CACX,CAEA,OAAOhB,CACT,EAYAjC,EAAK,SAAS,UAAU,UAAY,SAAU4B,EAAKC,EAAU,CAC3D,IAAIwB,EAAQ,IAAIrD,EAAK,MAAO4B,EAAKC,CAAQ,EAEzC,OAAO,KAAK,IAAI,CAACwB,CAAK,CAAC,EAAE,IAAI,SAAUtB,EAAG,CACxC,OAAOA,EAAE,SAAS,CACpB,CAAC,CACH,EAMA/B,EAAK,SAAS,UAAU,MAAQ,UAAY,CAC1C,KAAK,OAAS,CAAC,CACjB,EASAA,EAAK,SAAS,UAAU,OAAS,UAAY,CAC3C,OAAO,KAAK,OAAO,IAAI,SAAU8B,EAAI,CACnC,OAAA9B,EAAK,SAAS,4BAA4B8B,CAAE,EAErCA,EAAG,KACZ,CAAC,CACH,EACA;AAAA;AAAA;AAAA,GAqBA9B,EAAK,OAAS,SAAUgB,EAAU,CAChC,KAAK,WAAa,EAClB,KAAK,SAAWA,GAAY,CAAC,CAC/B,EAaAhB,EAAK,OAAO,UAAU,iBAAmB,SAAUsD,EAAO,CAExD,GAAI,KAAK,SAAS,QAAU,EAC1B,MAAO,GAST,QANIC,EAAQ,EACRC,EAAM,KAAK,SAAS,OAAS,EAC7BnB,EAAcmB,EAAMD,EACpBE,EAAa,KAAK,MAAMpB,EAAc,CAAC,EACvCqB,EAAa,KAAK,SAASD,EAAa,GAErCpB,EAAc,IACfqB,EAAaJ,IACfC,EAAQE,GAGNC,EAAaJ,IACfE,EAAMC,GAGJC,GAAcJ,IAIlBjB,EAAcmB,EAAMD,EACpBE,EAAaF,EAAQ,KAAK,MAAMlB,EAAc,CAAC,EAC/CqB,EAAa,KAAK,SAASD,EAAa,GAO1C,GAJIC,GAAcJ,GAIdI,EAAaJ,EACf,OAAOG,EAAa,EAGtB,GAAIC,EAAaJ,EACf,OAAQG,EAAa,GAAK,CAE9B,EAWAzD,EAAK,OAAO,UAAU,OAAS,SAAU2D,EAAWlD,EAAK,CACvD,KAAK,OAAOkD,EAAWlD,EAAK,UAAY,CACtC,KAAM,iBACR,CAAC,CACH,EAUAT,EAAK,OAAO,UAAU,OAAS,SAAU2D,EAAWlD,EAAKqB,EAAI,CAC3D,KAAK,WAAa,EAClB,IAAI8B,EAAW,KAAK,iBAAiBD,CAAS,EAE1C,KAAK,SAASC,IAAaD,EAC7B,KAAK,SAASC,EAAW,GAAK9B,EAAG,KAAK,SAAS8B,EAAW,GAAInD,CAAG,EAEjE,KAAK,SAAS,OAAOmD,EAAU,EAAGD,EAAWlD,CAAG,CAEpD,EAOAT,EAAK,OAAO,UAAU,UAAY,UAAY,CAC5C,GAAI,KAAK,WAAY,OAAO,KAAK,WAKjC,QAHI6D,EAAe,EACfC,EAAiB,KAAK,SAAS,OAE1B7C,EAAI,EAAGA,EAAI6C,EAAgB7C,GAAK,EAAG,CAC1C,IAAIR,EAAM,KAAK,SAASQ,GACxB4C,GAAgBpD,EAAMA,CACxB,CAEA,OAAO,KAAK,WAAa,KAAK,KAAKoD,CAAY,CACjD,EAQA7D,EAAK,OAAO,UAAU,IAAM,SAAU+D,EAAa,CAOjD,QANIC,EAAa,EACb5C,EAAI,KAAK,SAAUC,EAAI0C,EAAY,SACnCE,EAAO7C,EAAE,OAAQ8C,EAAO7C,EAAE,OAC1B8C,EAAO,EAAGC,EAAO,EACjBnD,EAAI,EAAGiC,EAAI,EAERjC,EAAIgD,GAAQf,EAAIgB,GACrBC,EAAO/C,EAAEH,GAAImD,EAAO/C,EAAE6B,GAClBiB,EAAOC,EACTnD,GAAK,EACIkD,EAAOC,EAChBlB,GAAK,EACIiB,GAAQC,IACjBJ,GAAc5C,EAAEH,EAAI,GAAKI,EAAE6B,EAAI,GAC/BjC,GAAK,EACLiC,GAAK,GAIT,OAAOc,CACT,EASAhE,EAAK,OAAO,UAAU,WAAa,SAAU+D,EAAa,CACxD,OAAO,KAAK,IAAIA,CAAW,EAAI,KAAK,UAAU,GAAK,CACrD,EAOA/D,EAAK,OAAO,UAAU,QAAU,UAAY,CAG1C,QAFIqE,EAAS,IAAI,MAAO,KAAK,SAAS,OAAS,CAAC,EAEvCpD,EAAI,EAAGiC,EAAI,EAAGjC,EAAI,KAAK,SAAS,OAAQA,GAAK,EAAGiC,IACvDmB,EAAOnB,GAAK,KAAK,SAASjC,GAG5B,OAAOoD,CACT,EAOArE,EAAK,OAAO,UAAU,OAAS,UAAY,CACzC,OAAO,KAAK,QACd,EAEA;AAAA;AAAA;AAAA;AAAA,GAiBAA,EAAK,QAAW,UAAU,CACxB,IAAIsE,EAAY,CACZ,QAAY,MACZ,OAAW,OACX,KAAS,OACT,KAAS,OACT,KAAS,MACT,IAAQ,MACR,KAAS,KACT,MAAU,MACV,IAAQ,IACR,MAAU,MACV,QAAY,MACZ,MAAU,MACV,KAAS,MACT,MAAU,KACV,QAAY,MACZ,QAAY,MACZ,QAAY,MACZ,MAAU,KACV,MAAU,MACV,OAAW,MACX,KAAS,KACX,EAEAC,EAAY,CACV,MAAU,KACV,MAAU,GACV,MAAU,KACV,MAAU,KACV,KAAS,KACT,IAAQ,GACR,KAAS,EACX,EAEAC,EAAI,WACJC,EAAI,WACJC,EAAIF,EAAI,aACRG,EAAIF,EAAI,WAERG,EAAO,KAAOF,EAAI,KAAOC,EAAID,EAC7BG,EAAO,KAAOH,EAAI,KAAOC,EAAID,EAAI,IAAMC,EAAI,MAC3CG,EAAO,KAAOJ,EAAI,KAAOC,EAAID,EAAIC,EAAID,EACrCK,EAAM,KAAOL,EAAI,KAAOD,EAEtBO,EAAU,IAAI,OAAOJ,CAAI,EACzBK,EAAU,IAAI,OAAOH,CAAI,EACzBI,EAAU,IAAI,OAAOL,CAAI,EACzBM,EAAS,IAAI,OAAOJ,CAAG,EAEvBK,EAAQ,kBACRC,EAAS,iBACTC,EAAQ,aACRC,EAAS,kBACTC,EAAU,KACVC,EAAW,cACXC,EAAW,IAAI,OAAO,oBAAoB,EAC1CC,EAAW,IAAI,OAAO,IAAMjB,EAAID,EAAI,cAAc,EAElDmB,EAAQ,mBACRC,EAAO,2IAEPC,EAAO,iDAEPC,EAAO,sFACPC,EAAQ,oBAERC,EAAO,WACPC,EAAS,MACTC,EAAQ,IAAI,OAAO,IAAMzB,EAAID,EAAI,cAAc,EAE/C2B,EAAgB,SAAuBC,EAAG,CAC5C,IAAIC,EACFC,EACAC,EACAC,EACAC,EACAC,EACAC,EAEF,GAAIP,EAAE,OAAS,EAAK,OAAOA,EAiB3B,GAfAG,EAAUH,EAAE,OAAO,EAAE,CAAC,EAClBG,GAAW,MACbH,EAAIG,EAAQ,YAAY,EAAIH,EAAE,OAAO,CAAC,GAIxCI,EAAKrB,EACLsB,EAAMrB,EAEFoB,EAAG,KAAKJ,CAAC,EAAKA,EAAIA,EAAE,QAAQI,EAAG,MAAM,EAChCC,EAAI,KAAKL,CAAC,IAAKA,EAAIA,EAAE,QAAQK,EAAI,MAAM,GAGhDD,EAAKnB,EACLoB,EAAMnB,EACFkB,EAAG,KAAKJ,CAAC,EAAG,CACd,IAAIQ,EAAKJ,EAAG,KAAKJ,CAAC,EAClBI,EAAKzB,EACDyB,EAAG,KAAKI,EAAG,EAAE,IACfJ,EAAKjB,EACLa,EAAIA,EAAE,QAAQI,EAAG,EAAE,EAEvB,SAAWC,EAAI,KAAKL,CAAC,EAAG,CACtB,IAAIQ,EAAKH,EAAI,KAAKL,CAAC,EACnBC,EAAOO,EAAG,GACVH,EAAMvB,EACFuB,EAAI,KAAKJ,CAAI,IACfD,EAAIC,EACJI,EAAMjB,EACNkB,EAAMjB,EACNkB,EAAMjB,EACFe,EAAI,KAAKL,CAAC,EAAKA,EAAIA,EAAI,IAClBM,EAAI,KAAKN,CAAC,GAAKI,EAAKjB,EAASa,EAAIA,EAAE,QAAQI,EAAG,EAAE,GAChDG,EAAI,KAAKP,CAAC,IAAKA,EAAIA,EAAI,KAEpC,CAIA,GADAI,EAAKb,EACDa,EAAG,KAAKJ,CAAC,EAAG,CACd,IAAIQ,EAAKJ,EAAG,KAAKJ,CAAC,EAClBC,EAAOO,EAAG,GACVR,EAAIC,EAAO,GACb,CAIA,GADAG,EAAKZ,EACDY,EAAG,KAAKJ,CAAC,EAAG,CACd,IAAIQ,EAAKJ,EAAG,KAAKJ,CAAC,EAClBC,EAAOO,EAAG,GACVN,EAASM,EAAG,GACZJ,EAAKzB,EACDyB,EAAG,KAAKH,CAAI,IACdD,EAAIC,EAAOhC,EAAUiC,GAEzB,CAIA,GADAE,EAAKX,EACDW,EAAG,KAAKJ,CAAC,EAAG,CACd,IAAIQ,EAAKJ,EAAG,KAAKJ,CAAC,EAClBC,EAAOO,EAAG,GACVN,EAASM,EAAG,GACZJ,EAAKzB,EACDyB,EAAG,KAAKH,CAAI,IACdD,EAAIC,EAAO/B,EAAUgC,GAEzB,CAKA,GAFAE,EAAKV,EACLW,EAAMV,EACFS,EAAG,KAAKJ,CAAC,EAAG,CACd,IAAIQ,EAAKJ,EAAG,KAAKJ,CAAC,EAClBC,EAAOO,EAAG,GACVJ,EAAKxB,EACDwB,EAAG,KAAKH,CAAI,IACdD,EAAIC,EAER,SAAWI,EAAI,KAAKL,CAAC,EAAG,CACtB,IAAIQ,EAAKH,EAAI,KAAKL,CAAC,EACnBC,EAAOO,EAAG,GAAKA,EAAG,GAClBH,EAAMzB,EACFyB,EAAI,KAAKJ,CAAI,IACfD,EAAIC,EAER,CAIA,GADAG,EAAKR,EACDQ,EAAG,KAAKJ,CAAC,EAAG,CACd,IAAIQ,EAAKJ,EAAG,KAAKJ,CAAC,EAClBC,EAAOO,EAAG,GACVJ,EAAKxB,EACLyB,EAAMxB,EACNyB,EAAMR,GACFM,EAAG,KAAKH,CAAI,GAAMI,EAAI,KAAKJ,CAAI,GAAK,CAAEK,EAAI,KAAKL,CAAI,KACrDD,EAAIC,EAER,CAEA,OAAAG,EAAKP,EACLQ,EAAMzB,EACFwB,EAAG,KAAKJ,CAAC,GAAKK,EAAI,KAAKL,CAAC,IAC1BI,EAAKjB,EACLa,EAAIA,EAAE,QAAQI,EAAG,EAAE,GAKjBD,GAAW,MACbH,EAAIG,EAAQ,YAAY,EAAIH,EAAE,OAAO,CAAC,GAGjCA,CACT,EAEA,OAAO,SAAUhD,EAAO,CACtB,OAAOA,EAAM,OAAO+C,CAAa,CACnC,CACF,EAAG,EAEHpG,EAAK,SAAS,iBAAiBA,EAAK,QAAS,SAAS,EACtD;AAAA;AAAA;AAAA,GAkBAA,EAAK,uBAAyB,SAAU8G,EAAW,CACjD,IAAIC,EAAQD,EAAU,OAAO,SAAU7D,EAAM+D,EAAU,CACrD,OAAA/D,EAAK+D,GAAYA,EACV/D,CACT,EAAG,CAAC,CAAC,EAEL,OAAO,SAAUI,EAAO,CACtB,GAAIA,GAAS0D,EAAM1D,EAAM,SAAS,KAAOA,EAAM,SAAS,EAAG,OAAOA,CACpE,CACF,EAeArD,EAAK,eAAiBA,EAAK,uBAAuB,CAChD,IACA,OACA,QACA,SACA,QACA,MACA,SACA,OACA,KACA,QACA,KACA,MACA,MACA,MACA,KACA,KACA,KACA,UACA,OACA,MACA,KACA,MACA,SACA,QACA,OACA,MACA,KACA,OACA,SACA,OACA,OACA,QACA,MACA,OACA,MACA,MACA,MACA,MACA,OACA,KACA,MACA,OACA,MACA,MACA,MACA,UACA,IACA,KACA,KACA,OACA,KACA,KACA,MACA,OACA,QACA,MACA,OACA,SACA,MACA,KACA,QACA,OACA,OACA,KACA,UACA,KACA,MACA,MACA,KACA,MACA,QACA,KACA,OACA,KACA,QACA,MACA,MACA,SACA,OACA,MACA,OACA,MACA,SACA,QACA,KACA,OACA,OACA,OACA,MACA,QACA,OACA,OACA,QACA,QACA,OACA,OACA,MACA,KACA,MACA,OACA,KACA,QACA,MACA,KACA,OACA,OACA,OACA,QACA,QACA,QACA,MACA,OACA,MACA,OACA,OACA,QACA,MACA,MACA,MACF,CAAC,EAEDA,EAAK,SAAS,iBAAiBA,EAAK,eAAgB,gBAAgB,EACpE;AAAA;AAAA;AAAA,GAoBAA,EAAK,QAAU,SAAUqD,EAAO,CAC9B,OAAOA,EAAM,OAAO,SAAUxC,EAAG,CAC/B,OAAOA,EAAE,QAAQ,OAAQ,EAAE,EAAE,QAAQ,OAAQ,EAAE,CACjD,CAAC,CACH,EAEAb,EAAK,SAAS,iBAAiBA,EAAK,QAAS,SAAS,EACtD;AAAA;AAAA;AAAA,GA0BAA,EAAK,SAAW,UAAY,CAC1B,KAAK,MAAQ,GACb,KAAK,MAAQ,CAAC,EACd,KAAK,GAAKA,EAAK,SAAS,QACxBA,EAAK,SAAS,SAAW,CAC3B,EAUAA,EAAK,SAAS,QAAU,EASxBA,EAAK,SAAS,UAAY,SAAUiH,EAAK,CAGvC,QAFI/G,EAAU,IAAIF,EAAK,SAAS,QAEvBiB,EAAI,EAAGe,EAAMiF,EAAI,OAAQhG,EAAIe,EAAKf,IACzCf,EAAQ,OAAO+G,EAAIhG,EAAE,EAGvB,OAAAf,EAAQ,OAAO,EACRA,EAAQ,IACjB,EAWAF,EAAK,SAAS,WAAa,SAAUkH,EAAQ,CAC3C,MAAI,iBAAkBA,EACblH,EAAK,SAAS,gBAAgBkH,EAAO,KAAMA,EAAO,YAAY,EAE9DlH,EAAK,SAAS,WAAWkH,EAAO,IAAI,CAE/C,EAiBAlH,EAAK,SAAS,gBAAkB,SAAU4B,EAAKuF,EAAc,CAS3D,QARIC,EAAO,IAAIpH,EAAK,SAEhBqH,EAAQ,CAAC,CACX,KAAMD,EACN,eAAgBD,EAChB,IAAKvF,CACP,CAAC,EAEMyF,EAAM,QAAQ,CACnB,IAAIC,EAAQD,EAAM,IAAI,EAGtB,GAAIC,EAAM,IAAI,OAAS,EAAG,CACxB,IAAIlF,EAAOkF,EAAM,IAAI,OAAO,CAAC,EACzBC,EAEAnF,KAAQkF,EAAM,KAAK,MACrBC,EAAaD,EAAM,KAAK,MAAMlF,IAE9BmF,EAAa,IAAIvH,EAAK,SACtBsH,EAAM,KAAK,MAAMlF,GAAQmF,GAGvBD,EAAM,IAAI,QAAU,IACtBC,EAAW,MAAQ,IAGrBF,EAAM,KAAK,CACT,KAAME,EACN,eAAgBD,EAAM,eACtB,IAAKA,EAAM,IAAI,MAAM,CAAC,CACxB,CAAC,CACH,CAEA,GAAIA,EAAM,gBAAkB,EAK5B,IAAI,MAAOA,EAAM,KAAK,MACpB,IAAIE,EAAgBF,EAAM,KAAK,MAAM,SAChC,CACL,IAAIE,EAAgB,IAAIxH,EAAK,SAC7BsH,EAAM,KAAK,MAAM,KAAOE,CAC1B,CAgCA,GA9BIF,EAAM,IAAI,QAAU,IACtBE,EAAc,MAAQ,IAGxBH,EAAM,KAAK,CACT,KAAMG,EACN,eAAgBF,EAAM,eAAiB,EACvC,IAAKA,EAAM,GACb,CAAC,EAKGA,EAAM,IAAI,OAAS,GACrBD,EAAM,KAAK,CACT,KAAMC,EAAM,KACZ,eAAgBA,EAAM,eAAiB,EACvC,IAAKA,EAAM,IAAI,MAAM,CAAC,CACxB,CAAC,EAKCA,EAAM,IAAI,QAAU,IACtBA,EAAM,KAAK,MAAQ,IAMjBA,EAAM,IAAI,QAAU,EAAG,CACzB,GAAI,MAAOA,EAAM,KAAK,MACpB,IAAIG,EAAmBH,EAAM,KAAK,MAAM,SACnC,CACL,IAAIG,EAAmB,IAAIzH,EAAK,SAChCsH,EAAM,KAAK,MAAM,KAAOG,CAC1B,CAEIH,EAAM,IAAI,QAAU,IACtBG,EAAiB,MAAQ,IAG3BJ,EAAM,KAAK,CACT,KAAMI,EACN,eAAgBH,EAAM,eAAiB,EACvC,IAAKA,EAAM,IAAI,MAAM,CAAC,CACxB,CAAC,CACH,CAKA,GAAIA,EAAM,IAAI,OAAS,EAAG,CACxB,IAAII,EAAQJ,EAAM,IAAI,OAAO,CAAC,EAC1BK,EAAQL,EAAM,IAAI,OAAO,CAAC,EAC1BM,EAEAD,KAASL,EAAM,KAAK,MACtBM,EAAgBN,EAAM,KAAK,MAAMK,IAEjCC,EAAgB,IAAI5H,EAAK,SACzBsH,EAAM,KAAK,MAAMK,GAASC,GAGxBN,EAAM,IAAI,QAAU,IACtBM,EAAc,MAAQ,IAGxBP,EAAM,KAAK,CACT,KAAMO,EACN,eAAgBN,EAAM,eAAiB,EACvC,IAAKI,EAAQJ,EAAM,IAAI,MAAM,CAAC,CAChC,CAAC,CACH,EACF,CAEA,OAAOF,CACT,EAYApH,EAAK,SAAS,WAAa,SAAU4B,EAAK,CAYxC,QAXIiG,EAAO,IAAI7H,EAAK,SAChBoH,EAAOS,EAUF,EAAI,EAAG7F,EAAMJ,EAAI,OAAQ,EAAII,EAAK,IAAK,CAC9C,IAAII,EAAOR,EAAI,GACXkG,EAAS,GAAK9F,EAAM,EAExB,GAAII,GAAQ,IACVyF,EAAK,MAAMzF,GAAQyF,EACnBA,EAAK,MAAQC,MAER,CACL,IAAIC,EAAO,IAAI/H,EAAK,SACpB+H,EAAK,MAAQD,EAEbD,EAAK,MAAMzF,GAAQ2F,EACnBF,EAAOE,CACT,CACF,CAEA,OAAOX,CACT,EAYApH,EAAK,SAAS,UAAU,QAAU,UAAY,CAQ5C,QAPI+G,EAAQ,CAAC,EAETM,EAAQ,CAAC,CACX,OAAQ,GACR,KAAM,IACR,CAAC,EAEMA,EAAM,QAAQ,CACnB,IAAIC,EAAQD,EAAM,IAAI,EAClBW,EAAQ,OAAO,KAAKV,EAAM,KAAK,KAAK,EACpCtF,EAAMgG,EAAM,OAEZV,EAAM,KAAK,QAKbA,EAAM,OAAO,OAAO,CAAC,EACrBP,EAAM,KAAKO,EAAM,MAAM,GAGzB,QAASrG,EAAI,EAAGA,EAAIe,EAAKf,IAAK,CAC5B,IAAIgH,EAAOD,EAAM/G,GAEjBoG,EAAM,KAAK,CACT,OAAQC,EAAM,OAAO,OAAOW,CAAI,EAChC,KAAMX,EAAM,KAAK,MAAMW,EACzB,CAAC,CACH,CACF,CAEA,OAAOlB,CACT,EAYA/G,EAAK,SAAS,UAAU,SAAW,UAAY,CAS7C,GAAI,KAAK,KACP,OAAO,KAAK,KAOd,QAJI4B,EAAM,KAAK,MAAQ,IAAM,IACzBsG,EAAS,OAAO,KAAK,KAAK,KAAK,EAAE,KAAK,EACtClG,EAAMkG,EAAO,OAER,EAAI,EAAG,EAAIlG,EAAK,IAAK,CAC5B,IAAIO,EAAQ2F,EAAO,GACfL,EAAO,KAAK,MAAMtF,GAEtBX,EAAMA,EAAMW,EAAQsF,EAAK,EAC3B,CAEA,OAAOjG,CACT,EAYA5B,EAAK,SAAS,UAAU,UAAY,SAAUqB,EAAG,CAU/C,QATIgD,EAAS,IAAIrE,EAAK,SAClBsH,EAAQ,OAERD,EAAQ,CAAC,CACX,MAAOhG,EACP,OAAQgD,EACR,KAAM,IACR,CAAC,EAEMgD,EAAM,QAAQ,CACnBC,EAAQD,EAAM,IAAI,EAWlB,QALIc,EAAS,OAAO,KAAKb,EAAM,MAAM,KAAK,EACtCc,EAAOD,EAAO,OACdE,EAAS,OAAO,KAAKf,EAAM,KAAK,KAAK,EACrCgB,EAAOD,EAAO,OAETE,EAAI,EAAGA,EAAIH,EAAMG,IAGxB,QAFIC,EAAQL,EAAOI,GAEVzH,EAAI,EAAGA,EAAIwH,EAAMxH,IAAK,CAC7B,IAAI2H,EAAQJ,EAAOvH,GAEnB,GAAI2H,GAASD,GAASA,GAAS,IAAK,CAClC,IAAIX,EAAOP,EAAM,KAAK,MAAMmB,GACxBC,EAAQpB,EAAM,MAAM,MAAMkB,GAC1BV,EAAQD,EAAK,OAASa,EAAM,MAC5BX,EAAO,OAEPU,KAASnB,EAAM,OAAO,OAIxBS,EAAOT,EAAM,OAAO,MAAMmB,GAC1BV,EAAK,MAAQA,EAAK,OAASD,IAM3BC,EAAO,IAAI/H,EAAK,SAChB+H,EAAK,MAAQD,EACbR,EAAM,OAAO,MAAMmB,GAASV,GAG9BV,EAAM,KAAK,CACT,MAAOqB,EACP,OAAQX,EACR,KAAMF,CACR,CAAC,CACH,CACF,CAEJ,CAEA,OAAOxD,CACT,EACArE,EAAK,SAAS,QAAU,UAAY,CAClC,KAAK,aAAe,GACpB,KAAK,KAAO,IAAIA,EAAK,SACrB,KAAK,eAAiB,CAAC,EACvB,KAAK,eAAiB,CAAC,CACzB,EAEAA,EAAK,SAAS,QAAQ,UAAU,OAAS,SAAU2I,EAAM,CACvD,IAAId,EACAe,EAAe,EAEnB,GAAID,EAAO,KAAK,aACd,MAAM,IAAI,MAAO,6BAA6B,EAGhD,QAAS,EAAI,EAAG,EAAIA,EAAK,QAAU,EAAI,KAAK,aAAa,QACnDA,EAAK,IAAM,KAAK,aAAa,GAD8B,IAE/DC,IAGF,KAAK,SAASA,CAAY,EAEtB,KAAK,eAAe,QAAU,EAChCf,EAAO,KAAK,KAEZA,EAAO,KAAK,eAAe,KAAK,eAAe,OAAS,GAAG,MAG7D,QAAS,EAAIe,EAAc,EAAID,EAAK,OAAQ,IAAK,CAC/C,IAAIE,EAAW,IAAI7I,EAAK,SACpBoC,EAAOuG,EAAK,GAEhBd,EAAK,MAAMzF,GAAQyG,EAEnB,KAAK,eAAe,KAAK,CACvB,OAAQhB,EACR,KAAMzF,EACN,MAAOyG,CACT,CAAC,EAEDhB,EAAOgB,CACT,CAEAhB,EAAK,MAAQ,GACb,KAAK,aAAec,CACtB,EAEA3I,EAAK,SAAS,QAAQ,UAAU,OAAS,UAAY,CACnD,KAAK,SAAS,CAAC,CACjB,EAEAA,EAAK,SAAS,QAAQ,UAAU,SAAW,SAAU8I,EAAQ,CAC3D,QAAS7H,EAAI,KAAK,eAAe,OAAS,EAAGA,GAAK6H,EAAQ7H,IAAK,CAC7D,IAAI4G,EAAO,KAAK,eAAe5G,GAC3B8H,EAAWlB,EAAK,MAAM,SAAS,EAE/BkB,KAAY,KAAK,eACnBlB,EAAK,OAAO,MAAMA,EAAK,MAAQ,KAAK,eAAekB,IAInDlB,EAAK,MAAM,KAAOkB,EAElB,KAAK,eAAeA,GAAYlB,EAAK,OAGvC,KAAK,eAAe,IAAI,CAC1B,CACF,EACA;AAAA;AAAA;AAAA,GAqBA7H,EAAK,MAAQ,SAAUgJ,EAAO,CAC5B,KAAK,cAAgBA,EAAM,cAC3B,KAAK,aAAeA,EAAM,aAC1B,KAAK,SAAWA,EAAM,SACtB,KAAK,OAASA,EAAM,OACpB,KAAK,SAAWA,EAAM,QACxB,EAyEAhJ,EAAK,MAAM,UAAU,OAAS,SAAUiJ,EAAa,CACnD,OAAO,KAAK,MAAM,SAAUC,EAAO,CACjC,IAAIC,EAAS,IAAInJ,EAAK,YAAYiJ,EAAaC,CAAK,EACpDC,EAAO,MAAM,CACf,CAAC,CACH,EA2BAnJ,EAAK,MAAM,UAAU,MAAQ,SAAU8B,EAAI,CAoBzC,QAZIoH,EAAQ,IAAIlJ,EAAK,MAAM,KAAK,MAAM,EAClCoJ,EAAiB,OAAO,OAAO,IAAI,EACnCC,EAAe,OAAO,OAAO,IAAI,EACjCC,EAAiB,OAAO,OAAO,IAAI,EACnCC,EAAkB,OAAO,OAAO,IAAI,EACpCC,EAAoB,OAAO,OAAO,IAAI,EAOjCvI,EAAI,EAAGA,EAAI,KAAK,OAAO,OAAQA,IACtCoI,EAAa,KAAK,OAAOpI,IAAM,IAAIjB,EAAK,OAG1C8B,EAAG,KAAKoH,EAAOA,CAAK,EAEpB,QAASjI,EAAI,EAAGA,EAAIiI,EAAM,QAAQ,OAAQjI,IAAK,CAS7C,IAAIiG,EAASgC,EAAM,QAAQjI,GACvBwI,EAAQ,KACRC,EAAgB1J,EAAK,IAAI,MAEzBkH,EAAO,YACTuC,EAAQ,KAAK,SAAS,UAAUvC,EAAO,KAAM,CAC3C,OAAQA,EAAO,MACjB,CAAC,EAEDuC,EAAQ,CAACvC,EAAO,IAAI,EAGtB,QAASyC,EAAI,EAAGA,EAAIF,EAAM,OAAQE,IAAK,CACrC,IAAIC,EAAOH,EAAME,GAQjBzC,EAAO,KAAO0C,EAOd,IAAIC,EAAe7J,EAAK,SAAS,WAAWkH,CAAM,EAC9C4C,EAAgB,KAAK,SAAS,UAAUD,CAAY,EAAE,QAAQ,EAQlE,GAAIC,EAAc,SAAW,GAAK5C,EAAO,WAAalH,EAAK,MAAM,SAAS,SAAU,CAClF,QAASoD,EAAI,EAAGA,EAAI8D,EAAO,OAAO,OAAQ9D,IAAK,CAC7C,IAAI2G,EAAQ7C,EAAO,OAAO9D,GAC1BmG,EAAgBQ,GAAS/J,EAAK,IAAI,KACpC,CAEA,KACF,CAEA,QAASkD,EAAI,EAAGA,EAAI4G,EAAc,OAAQ5G,IASxC,QAJI8G,EAAeF,EAAc5G,GAC7B1B,EAAU,KAAK,cAAcwI,GAC7BC,EAAYzI,EAAQ,OAEf4B,EAAI,EAAGA,EAAI8D,EAAO,OAAO,OAAQ9D,IAAK,CAS7C,IAAI2G,EAAQ7C,EAAO,OAAO9D,GACtB8G,EAAe1I,EAAQuI,GACvBI,EAAuB,OAAO,KAAKD,CAAY,EAC/CE,EAAYJ,EAAe,IAAMD,EACjCM,EAAuB,IAAIrK,EAAK,IAAImK,CAAoB,EAoB5D,GAbIjD,EAAO,UAAYlH,EAAK,MAAM,SAAS,WACzC0J,EAAgBA,EAAc,MAAMW,CAAoB,EAEpDd,EAAgBQ,KAAW,SAC7BR,EAAgBQ,GAAS/J,EAAK,IAAI,WASlCkH,EAAO,UAAYlH,EAAK,MAAM,SAAS,WAAY,CACjDwJ,EAAkBO,KAAW,SAC/BP,EAAkBO,GAAS/J,EAAK,IAAI,OAGtCwJ,EAAkBO,GAASP,EAAkBO,GAAO,MAAMM,CAAoB,EAO9E,QACF,CAeA,GANAhB,EAAaU,GAAO,OAAOE,EAAW/C,EAAO,MAAO,SAAU9F,GAAGC,GAAG,CAAE,OAAOD,GAAIC,EAAE,CAAC,EAMhF,CAAAiI,EAAec,GAInB,SAASE,EAAI,EAAGA,EAAIH,EAAqB,OAAQG,IAAK,CAOpD,IAAIC,EAAsBJ,EAAqBG,GAC3CE,EAAmB,IAAIxK,EAAK,SAAUuK,EAAqBR,CAAK,EAChElI,EAAWqI,EAAaK,GACxBE,GAECA,EAAarB,EAAeoB,MAAuB,OACtDpB,EAAeoB,GAAoB,IAAIxK,EAAK,UAAWgK,EAAcD,EAAOlI,CAAQ,EAEpF4I,EAAW,IAAIT,EAAcD,EAAOlI,CAAQ,CAGhD,CAEAyH,EAAec,GAAa,GAC9B,CAEJ,CAQA,GAAIlD,EAAO,WAAalH,EAAK,MAAM,SAAS,SAC1C,QAASoD,EAAI,EAAGA,EAAI8D,EAAO,OAAO,OAAQ9D,IAAK,CAC7C,IAAI2G,EAAQ7C,EAAO,OAAO9D,GAC1BmG,EAAgBQ,GAASR,EAAgBQ,GAAO,UAAUL,CAAa,CACzE,CAEJ,CAUA,QAHIgB,EAAqB1K,EAAK,IAAI,SAC9B2K,EAAuB3K,EAAK,IAAI,MAE3BiB,EAAI,EAAGA,EAAI,KAAK,OAAO,OAAQA,IAAK,CAC3C,IAAI8I,EAAQ,KAAK,OAAO9I,GAEpBsI,EAAgBQ,KAClBW,EAAqBA,EAAmB,UAAUnB,EAAgBQ,EAAM,GAGtEP,EAAkBO,KACpBY,EAAuBA,EAAqB,MAAMnB,EAAkBO,EAAM,EAE9E,CAEA,IAAIa,EAAoB,OAAO,KAAKxB,CAAc,EAC9CyB,EAAU,CAAC,EACXC,EAAU,OAAO,OAAO,IAAI,EAYhC,GAAI5B,EAAM,UAAU,EAAG,CACrB0B,EAAoB,OAAO,KAAK,KAAK,YAAY,EAEjD,QAAS3J,EAAI,EAAGA,EAAI2J,EAAkB,OAAQ3J,IAAK,CACjD,IAAIuJ,EAAmBI,EAAkB3J,GACrCF,EAAWf,EAAK,SAAS,WAAWwK,CAAgB,EACxDpB,EAAeoB,GAAoB,IAAIxK,EAAK,SAC9C,CACF,CAEA,QAASiB,EAAI,EAAGA,EAAI2J,EAAkB,OAAQ3J,IAAK,CASjD,IAAIF,EAAWf,EAAK,SAAS,WAAW4K,EAAkB3J,EAAE,EACxDP,EAASK,EAAS,OAEtB,GAAI,EAAC2J,EAAmB,SAAShK,CAAM,GAInC,CAAAiK,EAAqB,SAASjK,CAAM,EAIxC,KAAIqK,EAAc,KAAK,aAAahK,GAChCiK,EAAQ3B,EAAatI,EAAS,WAAW,WAAWgK,CAAW,EAC/DE,EAEJ,IAAKA,EAAWH,EAAQpK,MAAa,OACnCuK,EAAS,OAASD,EAClBC,EAAS,UAAU,QAAQ7B,EAAerI,EAAS,MAC9C,CACL,IAAImK,EAAQ,CACV,IAAKxK,EACL,MAAOsK,EACP,UAAW5B,EAAerI,EAC5B,EACA+J,EAAQpK,GAAUwK,EAClBL,EAAQ,KAAKK,CAAK,CACpB,EACF,CAKA,OAAOL,EAAQ,KAAK,SAAUzJ,GAAGC,GAAG,CAClC,OAAOA,GAAE,MAAQD,GAAE,KACrB,CAAC,CACH,EAUApB,EAAK,MAAM,UAAU,OAAS,UAAY,CACxC,IAAImL,EAAgB,OAAO,KAAK,KAAK,aAAa,EAC/C,KAAK,EACL,IAAI,SAAUvB,EAAM,CACnB,MAAO,CAACA,EAAM,KAAK,cAAcA,EAAK,CACxC,EAAG,IAAI,EAELwB,EAAe,OAAO,KAAK,KAAK,YAAY,EAC7C,IAAI,SAAUC,EAAK,CAClB,MAAO,CAACA,EAAK,KAAK,aAAaA,GAAK,OAAO,CAAC,CAC9C,EAAG,IAAI,EAET,MAAO,CACL,QAASrL,EAAK,QACd,OAAQ,KAAK,OACb,aAAcoL,EACd,cAAeD,EACf,SAAU,KAAK,SAAS,OAAO,CACjC,CACF,EAQAnL,EAAK,MAAM,KAAO,SAAUsL,EAAiB,CAC3C,IAAItC,EAAQ,CAAC,EACToC,EAAe,CAAC,EAChBG,EAAoBD,EAAgB,aACpCH,EAAgB,OAAO,OAAO,IAAI,EAClCK,EAA0BF,EAAgB,cAC1CG,EAAkB,IAAIzL,EAAK,SAAS,QACpC0C,EAAW1C,EAAK,SAAS,KAAKsL,EAAgB,QAAQ,EAEtDA,EAAgB,SAAWtL,EAAK,SAClCA,EAAK,MAAM,KAAK,4EAA8EA,EAAK,QAAU,sCAAwCsL,EAAgB,QAAU,GAAG,EAGpL,QAASrK,EAAI,EAAGA,EAAIsK,EAAkB,OAAQtK,IAAK,CACjD,IAAIyK,EAAQH,EAAkBtK,GAC1BoK,EAAMK,EAAM,GACZ1K,EAAW0K,EAAM,GAErBN,EAAaC,GAAO,IAAIrL,EAAK,OAAOgB,CAAQ,CAC9C,CAEA,QAASC,EAAI,EAAGA,EAAIuK,EAAwB,OAAQvK,IAAK,CACvD,IAAIyK,EAAQF,EAAwBvK,GAChC2I,EAAO8B,EAAM,GACblK,EAAUkK,EAAM,GAEpBD,EAAgB,OAAO7B,CAAI,EAC3BuB,EAAcvB,GAAQpI,CACxB,CAEA,OAAAiK,EAAgB,OAAO,EAEvBzC,EAAM,OAASsC,EAAgB,OAE/BtC,EAAM,aAAeoC,EACrBpC,EAAM,cAAgBmC,EACtBnC,EAAM,SAAWyC,EAAgB,KACjCzC,EAAM,SAAWtG,EAEV,IAAI1C,EAAK,MAAMgJ,CAAK,CAC7B,EACA;AAAA;AAAA;AAAA,GA6BAhJ,EAAK,QAAU,UAAY,CACzB,KAAK,KAAO,KACZ,KAAK,QAAU,OAAO,OAAO,IAAI,EACjC,KAAK,WAAa,OAAO,OAAO,IAAI,EACpC,KAAK,cAAgB,OAAO,OAAO,IAAI,EACvC,KAAK,qBAAuB,CAAC,EAC7B,KAAK,aAAe,CAAC,EACrB,KAAK,UAAYA,EAAK,UACtB,KAAK,SAAW,IAAIA,EAAK,SACzB,KAAK,eAAiB,IAAIA,EAAK,SAC/B,KAAK,cAAgB,EACrB,KAAK,GAAK,IACV,KAAK,IAAM,IACX,KAAK,UAAY,EACjB,KAAK,kBAAoB,CAAC,CAC5B,EAcAA,EAAK,QAAQ,UAAU,IAAM,SAAUqL,EAAK,CAC1C,KAAK,KAAOA,CACd,EAkCArL,EAAK,QAAQ,UAAU,MAAQ,SAAUW,EAAWgL,EAAY,CAC9D,GAAI,KAAK,KAAKhL,CAAS,EACrB,MAAM,IAAI,WAAY,UAAYA,EAAY,kCAAkC,EAGlF,KAAK,QAAQA,GAAagL,GAAc,CAAC,CAC3C,EAUA3L,EAAK,QAAQ,UAAU,EAAI,SAAU4L,EAAQ,CACvCA,EAAS,EACX,KAAK,GAAK,EACDA,EAAS,EAClB,KAAK,GAAK,EAEV,KAAK,GAAKA,CAEd,EASA5L,EAAK,QAAQ,UAAU,GAAK,SAAU4L,EAAQ,CAC5C,KAAK,IAAMA,CACb,EAmBA5L,EAAK,QAAQ,UAAU,IAAM,SAAU6L,EAAKF,EAAY,CACtD,IAAIjL,EAASmL,EAAI,KAAK,MAClBC,EAAS,OAAO,KAAK,KAAK,OAAO,EAErC,KAAK,WAAWpL,GAAUiL,GAAc,CAAC,EACzC,KAAK,eAAiB,EAEtB,QAAS1K,EAAI,EAAGA,EAAI6K,EAAO,OAAQ7K,IAAK,CACtC,IAAIN,EAAYmL,EAAO7K,GACnB8K,EAAY,KAAK,QAAQpL,GAAW,UACpCoJ,EAAQgC,EAAYA,EAAUF,CAAG,EAAIA,EAAIlL,GACzCsB,EAAS,KAAK,UAAU8H,EAAO,CAC7B,OAAQ,CAACpJ,CAAS,CACpB,CAAC,EACD8I,EAAQ,KAAK,SAAS,IAAIxH,CAAM,EAChClB,EAAW,IAAIf,EAAK,SAAUU,EAAQC,CAAS,EAC/CqL,EAAa,OAAO,OAAO,IAAI,EAEnC,KAAK,qBAAqBjL,GAAYiL,EACtC,KAAK,aAAajL,GAAY,EAG9B,KAAK,aAAaA,IAAa0I,EAAM,OAGrC,QAASvG,EAAI,EAAGA,EAAIuG,EAAM,OAAQvG,IAAK,CACrC,IAAI0G,EAAOH,EAAMvG,GAUjB,GARI8I,EAAWpC,IAAS,OACtBoC,EAAWpC,GAAQ,GAGrBoC,EAAWpC,IAAS,EAIhB,KAAK,cAAcA,IAAS,KAAW,CACzC,IAAIpI,EAAU,OAAO,OAAO,IAAI,EAChCA,EAAQ,OAAY,KAAK,UACzB,KAAK,WAAa,EAElB,QAAS4B,EAAI,EAAGA,EAAI0I,EAAO,OAAQ1I,IACjC5B,EAAQsK,EAAO1I,IAAM,OAAO,OAAO,IAAI,EAGzC,KAAK,cAAcwG,GAAQpI,CAC7B,CAGI,KAAK,cAAcoI,GAAMjJ,GAAWD,IAAW,OACjD,KAAK,cAAckJ,GAAMjJ,GAAWD,GAAU,OAAO,OAAO,IAAI,GAKlE,QAAS4J,EAAI,EAAGA,EAAI,KAAK,kBAAkB,OAAQA,IAAK,CACtD,IAAI2B,EAAc,KAAK,kBAAkB3B,GACrCzI,EAAW+H,EAAK,SAASqC,GAEzB,KAAK,cAAcrC,GAAMjJ,GAAWD,GAAQuL,IAAgB,OAC9D,KAAK,cAAcrC,GAAMjJ,GAAWD,GAAQuL,GAAe,CAAC,GAG9D,KAAK,cAAcrC,GAAMjJ,GAAWD,GAAQuL,GAAa,KAAKpK,CAAQ,CACxE,CACF,CAEF,CACF,EAOA7B,EAAK,QAAQ,UAAU,6BAA+B,UAAY,CAOhE,QALIkM,EAAY,OAAO,KAAK,KAAK,YAAY,EACzCC,EAAiBD,EAAU,OAC3BE,EAAc,CAAC,EACfC,EAAqB,CAAC,EAEjBpL,EAAI,EAAGA,EAAIkL,EAAgBlL,IAAK,CACvC,IAAIF,EAAWf,EAAK,SAAS,WAAWkM,EAAUjL,EAAE,EAChD8I,EAAQhJ,EAAS,UAErBsL,EAAmBtC,KAAWsC,EAAmBtC,GAAS,GAC1DsC,EAAmBtC,IAAU,EAE7BqC,EAAYrC,KAAWqC,EAAYrC,GAAS,GAC5CqC,EAAYrC,IAAU,KAAK,aAAahJ,EAC1C,CAIA,QAFI+K,EAAS,OAAO,KAAK,KAAK,OAAO,EAE5B7K,EAAI,EAAGA,EAAI6K,EAAO,OAAQ7K,IAAK,CACtC,IAAIN,EAAYmL,EAAO7K,GACvBmL,EAAYzL,GAAayL,EAAYzL,GAAa0L,EAAmB1L,EACvE,CAEA,KAAK,mBAAqByL,CAC5B,EAOApM,EAAK,QAAQ,UAAU,mBAAqB,UAAY,CAMtD,QALIoL,EAAe,CAAC,EAChBc,EAAY,OAAO,KAAK,KAAK,oBAAoB,EACjDI,EAAkBJ,EAAU,OAC5BK,EAAe,OAAO,OAAO,IAAI,EAE5BtL,EAAI,EAAGA,EAAIqL,EAAiBrL,IAAK,CAaxC,QAZIF,EAAWf,EAAK,SAAS,WAAWkM,EAAUjL,EAAE,EAChDN,EAAYI,EAAS,UACrByL,EAAc,KAAK,aAAazL,GAChCgK,EAAc,IAAI/K,EAAK,OACvByM,EAAkB,KAAK,qBAAqB1L,GAC5C0I,EAAQ,OAAO,KAAKgD,CAAe,EACnCC,EAAcjD,EAAM,OAGpBkD,EAAa,KAAK,QAAQhM,GAAW,OAAS,EAC9CiM,EAAW,KAAK,WAAW7L,EAAS,QAAQ,OAAS,EAEhDmC,EAAI,EAAGA,EAAIwJ,EAAaxJ,IAAK,CACpC,IAAI0G,EAAOH,EAAMvG,GACb2J,EAAKJ,EAAgB7C,GACrBK,EAAY,KAAK,cAAcL,GAAM,OACrCkD,EAAK9B,EAAO+B,EAEZR,EAAa3C,KAAU,QACzBkD,EAAM9M,EAAK,IAAI,KAAK,cAAc4J,GAAO,KAAK,aAAa,EAC3D2C,EAAa3C,GAAQkD,GAErBA,EAAMP,EAAa3C,GAGrBoB,EAAQ8B,IAAQ,KAAK,IAAM,GAAKD,IAAO,KAAK,KAAO,EAAI,KAAK,GAAK,KAAK,IAAML,EAAc,KAAK,mBAAmB7L,KAAekM,GACjI7B,GAAS2B,EACT3B,GAAS4B,EACTG,EAAqB,KAAK,MAAM/B,EAAQ,GAAI,EAAI,IAQhDD,EAAY,OAAOd,EAAW8C,CAAkB,CAClD,CAEA3B,EAAarK,GAAYgK,CAC3B,CAEA,KAAK,aAAeK,CACtB,EAOApL,EAAK,QAAQ,UAAU,eAAiB,UAAY,CAClD,KAAK,SAAWA,EAAK,SAAS,UAC5B,OAAO,KAAK,KAAK,aAAa,EAAE,KAAK,CACvC,CACF,EAUAA,EAAK,QAAQ,UAAU,MAAQ,UAAY,CACzC,YAAK,6BAA6B,EAClC,KAAK,mBAAmB,EACxB,KAAK,eAAe,EAEb,IAAIA,EAAK,MAAM,CACpB,cAAe,KAAK,cACpB,aAAc,KAAK,aACnB,SAAU,KAAK,SACf,OAAQ,OAAO,KAAK,KAAK,OAAO,EAChC,SAAU,KAAK,cACjB,CAAC,CACH,EAgBAA,EAAK,QAAQ,UAAU,IAAM,SAAU8B,EAAI,CACzC,IAAIkL,EAAO,MAAM,UAAU,MAAM,KAAK,UAAW,CAAC,EAClDA,EAAK,QAAQ,IAAI,EACjBlL,EAAG,MAAM,KAAMkL,CAAI,CACrB,EAaAhN,EAAK,UAAY,SAAU4J,EAAMG,EAAOlI,EAAU,CAShD,QARIoL,EAAiB,OAAO,OAAO,IAAI,EACnCC,EAAe,OAAO,KAAKrL,GAAY,CAAC,CAAC,EAOpCZ,EAAI,EAAGA,EAAIiM,EAAa,OAAQjM,IAAK,CAC5C,IAAIT,EAAM0M,EAAajM,GACvBgM,EAAezM,GAAOqB,EAASrB,GAAK,MAAM,CAC5C,CAEA,KAAK,SAAW,OAAO,OAAO,IAAI,EAE9BoJ,IAAS,SACX,KAAK,SAASA,GAAQ,OAAO,OAAO,IAAI,EACxC,KAAK,SAASA,GAAMG,GAASkD,EAEjC,EAWAjN,EAAK,UAAU,UAAU,QAAU,SAAUmN,EAAgB,CAG3D,QAFI1D,EAAQ,OAAO,KAAK0D,EAAe,QAAQ,EAEtClM,EAAI,EAAGA,EAAIwI,EAAM,OAAQxI,IAAK,CACrC,IAAI2I,EAAOH,EAAMxI,GACb6K,EAAS,OAAO,KAAKqB,EAAe,SAASvD,EAAK,EAElD,KAAK,SAASA,IAAS,OACzB,KAAK,SAASA,GAAQ,OAAO,OAAO,IAAI,GAG1C,QAAS1G,EAAI,EAAGA,EAAI4I,EAAO,OAAQ5I,IAAK,CACtC,IAAI6G,EAAQ+B,EAAO5I,GACf3C,EAAO,OAAO,KAAK4M,EAAe,SAASvD,GAAMG,EAAM,EAEvD,KAAK,SAASH,GAAMG,IAAU,OAChC,KAAK,SAASH,GAAMG,GAAS,OAAO,OAAO,IAAI,GAGjD,QAAS3G,EAAI,EAAGA,EAAI7C,EAAK,OAAQ6C,IAAK,CACpC,IAAI5C,EAAMD,EAAK6C,GAEX,KAAK,SAASwG,GAAMG,GAAOvJ,IAAQ,KACrC,KAAK,SAASoJ,GAAMG,GAAOvJ,GAAO2M,EAAe,SAASvD,GAAMG,GAAOvJ,GAEvE,KAAK,SAASoJ,GAAMG,GAAOvJ,GAAO,KAAK,SAASoJ,GAAMG,GAAOvJ,GAAK,OAAO2M,EAAe,SAASvD,GAAMG,GAAOvJ,EAAI,CAGtH,CACF,CACF,CACF,EASAR,EAAK,UAAU,UAAU,IAAM,SAAU4J,EAAMG,EAAOlI,EAAU,CAC9D,GAAI,EAAE+H,KAAQ,KAAK,UAAW,CAC5B,KAAK,SAASA,GAAQ,OAAO,OAAO,IAAI,EACxC,KAAK,SAASA,GAAMG,GAASlI,EAC7B,MACF,CAEA,GAAI,EAAEkI,KAAS,KAAK,SAASH,IAAQ,CACnC,KAAK,SAASA,GAAMG,GAASlI,EAC7B,MACF,CAIA,QAFIqL,EAAe,OAAO,KAAKrL,CAAQ,EAE9BZ,EAAI,EAAGA,EAAIiM,EAAa,OAAQjM,IAAK,CAC5C,IAAIT,EAAM0M,EAAajM,GAEnBT,KAAO,KAAK,SAASoJ,GAAMG,GAC7B,KAAK,SAASH,GAAMG,GAAOvJ,GAAO,KAAK,SAASoJ,GAAMG,GAAOvJ,GAAK,OAAOqB,EAASrB,EAAI,EAEtF,KAAK,SAASoJ,GAAMG,GAAOvJ,GAAOqB,EAASrB,EAE/C,CACF,EAYAR,EAAK,MAAQ,SAAUoN,EAAW,CAChC,KAAK,QAAU,CAAC,EAChB,KAAK,UAAYA,CACnB,EA0BApN,EAAK,MAAM,SAAW,IAAI,OAAQ,GAAG,EACrCA,EAAK,MAAM,SAAS,KAAO,EAC3BA,EAAK,MAAM,SAAS,QAAU,EAC9BA,EAAK,MAAM,SAAS,SAAW,EAa/BA,EAAK,MAAM,SAAW,CAIpB,SAAU,EAMV,SAAU,EAMV,WAAY,CACd,EAyBAA,EAAK,MAAM,UAAU,OAAS,SAAUkH,EAAQ,CAC9C,MAAM,WAAYA,IAChBA,EAAO,OAAS,KAAK,WAGjB,UAAWA,IACfA,EAAO,MAAQ,GAGX,gBAAiBA,IACrBA,EAAO,YAAc,IAGjB,aAAcA,IAClBA,EAAO,SAAWlH,EAAK,MAAM,SAAS,MAGnCkH,EAAO,SAAWlH,EAAK,MAAM,SAAS,SAAakH,EAAO,KAAK,OAAO,CAAC,GAAKlH,EAAK,MAAM,WAC1FkH,EAAO,KAAO,IAAMA,EAAO,MAGxBA,EAAO,SAAWlH,EAAK,MAAM,SAAS,UAAckH,EAAO,KAAK,MAAM,EAAE,GAAKlH,EAAK,MAAM,WAC3FkH,EAAO,KAAO,GAAKA,EAAO,KAAO,KAG7B,aAAcA,IAClBA,EAAO,SAAWlH,EAAK,MAAM,SAAS,UAGxC,KAAK,QAAQ,KAAKkH,CAAM,EAEjB,IACT,EASAlH,EAAK,MAAM,UAAU,UAAY,UAAY,CAC3C,QAASiB,EAAI,EAAGA,EAAI,KAAK,QAAQ,OAAQA,IACvC,GAAI,KAAK,QAAQA,GAAG,UAAYjB,EAAK,MAAM,SAAS,WAClD,MAAO,GAIX,MAAO,EACT,EA4BAA,EAAK,MAAM,UAAU,KAAO,SAAU4J,EAAMyD,EAAS,CACnD,GAAI,MAAM,QAAQzD,CAAI,EACpB,OAAAA,EAAK,QAAQ,SAAU7H,EAAG,CAAE,KAAK,KAAKA,EAAG/B,EAAK,MAAM,MAAMqN,CAAO,CAAC,CAAE,EAAG,IAAI,EACpE,KAGT,IAAInG,EAASmG,GAAW,CAAC,EACzB,OAAAnG,EAAO,KAAO0C,EAAK,SAAS,EAE5B,KAAK,OAAO1C,CAAM,EAEX,IACT,EACAlH,EAAK,gBAAkB,SAAUI,EAASmD,EAAOC,EAAK,CACpD,KAAK,KAAO,kBACZ,KAAK,QAAUpD,EACf,KAAK,MAAQmD,EACb,KAAK,IAAMC,CACb,EAEAxD,EAAK,gBAAgB,UAAY,IAAI,MACrCA,EAAK,WAAa,SAAU4B,EAAK,CAC/B,KAAK,QAAU,CAAC,EAChB,KAAK,IAAMA,EACX,KAAK,OAASA,EAAI,OAClB,KAAK,IAAM,EACX,KAAK,MAAQ,EACb,KAAK,oBAAsB,CAAC,CAC9B,EAEA5B,EAAK,WAAW,UAAU,IAAM,UAAY,CAG1C,QAFIsN,EAAQtN,EAAK,WAAW,QAErBsN,GACLA,EAAQA,EAAM,IAAI,CAEtB,EAEAtN,EAAK,WAAW,UAAU,YAAc,UAAY,CAKlD,QAJIuN,EAAY,CAAC,EACbpL,EAAa,KAAK,MAClBD,EAAW,KAAK,IAEX,EAAI,EAAG,EAAI,KAAK,oBAAoB,OAAQ,IACnDA,EAAW,KAAK,oBAAoB,GACpCqL,EAAU,KAAK,KAAK,IAAI,MAAMpL,EAAYD,CAAQ,CAAC,EACnDC,EAAaD,EAAW,EAG1B,OAAAqL,EAAU,KAAK,KAAK,IAAI,MAAMpL,EAAY,KAAK,GAAG,CAAC,EACnD,KAAK,oBAAoB,OAAS,EAE3BoL,EAAU,KAAK,EAAE,CAC1B,EAEAvN,EAAK,WAAW,UAAU,KAAO,SAAUwN,EAAM,CAC/C,KAAK,QAAQ,KAAK,CAChB,KAAMA,EACN,IAAK,KAAK,YAAY,EACtB,MAAO,KAAK,MACZ,IAAK,KAAK,GACZ,CAAC,EAED,KAAK,MAAQ,KAAK,GACpB,EAEAxN,EAAK,WAAW,UAAU,gBAAkB,UAAY,CACtD,KAAK,oBAAoB,KAAK,KAAK,IAAM,CAAC,EAC1C,KAAK,KAAO,CACd,EAEAA,EAAK,WAAW,UAAU,KAAO,UAAY,CAC3C,GAAI,KAAK,KAAO,KAAK,OACnB,OAAOA,EAAK,WAAW,IAGzB,IAAIoC,EAAO,KAAK,IAAI,OAAO,KAAK,GAAG,EACnC,YAAK,KAAO,EACLA,CACT,EAEApC,EAAK,WAAW,UAAU,MAAQ,UAAY,CAC5C,OAAO,KAAK,IAAM,KAAK,KACzB,EAEAA,EAAK,WAAW,UAAU,OAAS,UAAY,CACzC,KAAK,OAAS,KAAK,MACrB,KAAK,KAAO,GAGd,KAAK,MAAQ,KAAK,GACpB,EAEAA,EAAK,WAAW,UAAU,OAAS,UAAY,CAC7C,KAAK,KAAO,CACd,EAEAA,EAAK,WAAW,UAAU,eAAiB,UAAY,CACrD,IAAIoC,EAAMqL,EAEV,GACErL,EAAO,KAAK,KAAK,EACjBqL,EAAWrL,EAAK,WAAW,CAAC,QACrBqL,EAAW,IAAMA,EAAW,IAEjCrL,GAAQpC,EAAK,WAAW,KAC1B,KAAK,OAAO,CAEhB,EAEAA,EAAK,WAAW,UAAU,KAAO,UAAY,CAC3C,OAAO,KAAK,IAAM,KAAK,MACzB,EAEAA,EAAK,WAAW,IAAM,MACtBA,EAAK,WAAW,MAAQ,QACxBA,EAAK,WAAW,KAAO,OACvBA,EAAK,WAAW,cAAgB,gBAChCA,EAAK,WAAW,MAAQ,QACxBA,EAAK,WAAW,SAAW,WAE3BA,EAAK,WAAW,SAAW,SAAU0N,EAAO,CAC1C,OAAAA,EAAM,OAAO,EACbA,EAAM,KAAK1N,EAAK,WAAW,KAAK,EAChC0N,EAAM,OAAO,EACN1N,EAAK,WAAW,OACzB,EAEAA,EAAK,WAAW,QAAU,SAAU0N,EAAO,CAQzC,GAPIA,EAAM,MAAM,EAAI,IAClBA,EAAM,OAAO,EACbA,EAAM,KAAK1N,EAAK,WAAW,IAAI,GAGjC0N,EAAM,OAAO,EAETA,EAAM,KAAK,EACb,OAAO1N,EAAK,WAAW,OAE3B,EAEAA,EAAK,WAAW,gBAAkB,SAAU0N,EAAO,CACjD,OAAAA,EAAM,OAAO,EACbA,EAAM,eAAe,EACrBA,EAAM,KAAK1N,EAAK,WAAW,aAAa,EACjCA,EAAK,WAAW,OACzB,EAEAA,EAAK,WAAW,SAAW,SAAU0N,EAAO,CAC1C,OAAAA,EAAM,OAAO,EACbA,EAAM,eAAe,EACrBA,EAAM,KAAK1N,EAAK,WAAW,KAAK,EACzBA,EAAK,WAAW,OACzB,EAEAA,EAAK,WAAW,OAAS,SAAU0N,EAAO,CACpCA,EAAM,MAAM,EAAI,GAClBA,EAAM,KAAK1N,EAAK,WAAW,IAAI,CAEnC,EAaAA,EAAK,WAAW,cAAgBA,EAAK,UAAU,UAE/CA,EAAK,WAAW,QAAU,SAAU0N,EAAO,CACzC,OAAa,CACX,IAAItL,EAAOsL,EAAM,KAAK,EAEtB,GAAItL,GAAQpC,EAAK,WAAW,IAC1B,OAAOA,EAAK,WAAW,OAIzB,GAAIoC,EAAK,WAAW,CAAC,GAAK,GAAI,CAC5BsL,EAAM,gBAAgB,EACtB,QACF,CAEA,GAAItL,GAAQ,IACV,OAAOpC,EAAK,WAAW,SAGzB,GAAIoC,GAAQ,IACV,OAAAsL,EAAM,OAAO,EACTA,EAAM,MAAM,EAAI,GAClBA,EAAM,KAAK1N,EAAK,WAAW,IAAI,EAE1BA,EAAK,WAAW,gBAGzB,GAAIoC,GAAQ,IACV,OAAAsL,EAAM,OAAO,EACTA,EAAM,MAAM,EAAI,GAClBA,EAAM,KAAK1N,EAAK,WAAW,IAAI,EAE1BA,EAAK,WAAW,SAczB,GARIoC,GAAQ,KAAOsL,EAAM,MAAM,IAAM,GAQjCtL,GAAQ,KAAOsL,EAAM,MAAM,IAAM,EACnC,OAAAA,EAAM,KAAK1N,EAAK,WAAW,QAAQ,EAC5BA,EAAK,WAAW,QAGzB,GAAIoC,EAAK,MAAMpC,EAAK,WAAW,aAAa,EAC1C,OAAOA,EAAK,WAAW,OAE3B,CACF,EAEAA,EAAK,YAAc,SAAU4B,EAAKsH,EAAO,CACvC,KAAK,MAAQ,IAAIlJ,EAAK,WAAY4B,CAAG,EACrC,KAAK,MAAQsH,EACb,KAAK,cAAgB,CAAC,EACtB,KAAK,UAAY,CACnB,EAEAlJ,EAAK,YAAY,UAAU,MAAQ,UAAY,CAC7C,KAAK,MAAM,IAAI,EACf,KAAK,QAAU,KAAK,MAAM,QAI1B,QAFIsN,EAAQtN,EAAK,YAAY,YAEtBsN,GACLA,EAAQA,EAAM,IAAI,EAGpB,OAAO,KAAK,KACd,EAEAtN,EAAK,YAAY,UAAU,WAAa,UAAY,CAClD,OAAO,KAAK,QAAQ,KAAK,UAC3B,EAEAA,EAAK,YAAY,UAAU,cAAgB,UAAY,CACrD,IAAI2N,EAAS,KAAK,WAAW,EAC7B,YAAK,WAAa,EACXA,CACT,EAEA3N,EAAK,YAAY,UAAU,WAAa,UAAY,CAClD,IAAI4N,EAAkB,KAAK,cAC3B,KAAK,MAAM,OAAOA,CAAe,EACjC,KAAK,cAAgB,CAAC,CACxB,EAEA5N,EAAK,YAAY,YAAc,SAAUmJ,EAAQ,CAC/C,IAAIwE,EAASxE,EAAO,WAAW,EAE/B,GAAIwE,GAAU,KAId,OAAQA,EAAO,KAAM,CACnB,KAAK3N,EAAK,WAAW,SACnB,OAAOA,EAAK,YAAY,cAC1B,KAAKA,EAAK,WAAW,MACnB,OAAOA,EAAK,YAAY,WAC1B,KAAKA,EAAK,WAAW,KACnB,OAAOA,EAAK,YAAY,UAC1B,QACE,IAAI6N,EAAe,4CAA8CF,EAAO,KAExE,MAAIA,EAAO,IAAI,QAAU,IACvBE,GAAgB,gBAAkBF,EAAO,IAAM,KAG3C,IAAI3N,EAAK,gBAAiB6N,EAAcF,EAAO,MAAOA,EAAO,GAAG,CAC1E,CACF,EAEA3N,EAAK,YAAY,cAAgB,SAAUmJ,EAAQ,CACjD,IAAIwE,EAASxE,EAAO,cAAc,EAElC,GAAIwE,GAAU,KAId,QAAQA,EAAO,IAAK,CAClB,IAAK,IACHxE,EAAO,cAAc,SAAWnJ,EAAK,MAAM,SAAS,WACpD,MACF,IAAK,IACHmJ,EAAO,cAAc,SAAWnJ,EAAK,MAAM,SAAS,SACpD,MACF,QACE,IAAI6N,EAAe,kCAAoCF,EAAO,IAAM,IACpE,MAAM,IAAI3N,EAAK,gBAAiB6N,EAAcF,EAAO,MAAOA,EAAO,GAAG,CAC1E,CAEA,IAAIG,EAAa3E,EAAO,WAAW,EAEnC,GAAI2E,GAAc,KAAW,CAC3B,IAAID,EAAe,yCACnB,MAAM,IAAI7N,EAAK,gBAAiB6N,EAAcF,EAAO,MAAOA,EAAO,GAAG,CACxE,CAEA,OAAQG,EAAW,KAAM,CACvB,KAAK9N,EAAK,WAAW,MACnB,OAAOA,EAAK,YAAY,WAC1B,KAAKA,EAAK,WAAW,KACnB,OAAOA,EAAK,YAAY,UAC1B,QACE,IAAI6N,EAAe,mCAAqCC,EAAW,KAAO,IAC1E,MAAM,IAAI9N,EAAK,gBAAiB6N,EAAcC,EAAW,MAAOA,EAAW,GAAG,CAClF,EACF,EAEA9N,EAAK,YAAY,WAAa,SAAUmJ,EAAQ,CAC9C,IAAIwE,EAASxE,EAAO,cAAc,EAElC,GAAIwE,GAAU,KAId,IAAIxE,EAAO,MAAM,UAAU,QAAQwE,EAAO,GAAG,GAAK,GAAI,CACpD,IAAII,EAAiB5E,EAAO,MAAM,UAAU,IAAI,SAAU6E,EAAG,CAAE,MAAO,IAAMA,EAAI,GAAI,CAAC,EAAE,KAAK,IAAI,EAC5FH,EAAe,uBAAyBF,EAAO,IAAM,uBAAyBI,EAElF,MAAM,IAAI/N,EAAK,gBAAiB6N,EAAcF,EAAO,MAAOA,EAAO,GAAG,CACxE,CAEAxE,EAAO,cAAc,OAAS,CAACwE,EAAO,GAAG,EAEzC,IAAIG,EAAa3E,EAAO,WAAW,EAEnC,GAAI2E,GAAc,KAAW,CAC3B,IAAID,EAAe,gCACnB,MAAM,IAAI7N,EAAK,gBAAiB6N,EAAcF,EAAO,MAAOA,EAAO,GAAG,CACxE,CAEA,OAAQG,EAAW,KAAM,CACvB,KAAK9N,EAAK,WAAW,KACnB,OAAOA,EAAK,YAAY,UAC1B,QACE,IAAI6N,EAAe,0BAA4BC,EAAW,KAAO,IACjE,MAAM,IAAI9N,EAAK,gBAAiB6N,EAAcC,EAAW,MAAOA,EAAW,GAAG,CAClF,EACF,EAEA9N,EAAK,YAAY,UAAY,SAAUmJ,EAAQ,CAC7C,IAAIwE,EAASxE,EAAO,cAAc,EAElC,GAAIwE,GAAU,KAId,CAAAxE,EAAO,cAAc,KAAOwE,EAAO,IAAI,YAAY,EAE/CA,EAAO,IAAI,QAAQ,GAAG,GAAK,KAC7BxE,EAAO,cAAc,YAAc,IAGrC,IAAI2E,EAAa3E,EAAO,WAAW,EAEnC,GAAI2E,GAAc,KAAW,CAC3B3E,EAAO,WAAW,EAClB,MACF,CAEA,OAAQ2E,EAAW,KAAM,CACvB,KAAK9N,EAAK,WAAW,KACnB,OAAAmJ,EAAO,WAAW,EACXnJ,EAAK,YAAY,UAC1B,KAAKA,EAAK,WAAW,MACnB,OAAAmJ,EAAO,WAAW,EACXnJ,EAAK,YAAY,WAC1B,KAAKA,EAAK,WAAW,cACnB,OAAOA,EAAK,YAAY,kBAC1B,KAAKA,EAAK,WAAW,MACnB,OAAOA,EAAK,YAAY,WAC1B,KAAKA,EAAK,WAAW,SACnB,OAAAmJ,EAAO,WAAW,EACXnJ,EAAK,YAAY,cAC1B,QACE,IAAI6N,EAAe,2BAA6BC,EAAW,KAAO,IAClE,MAAM,IAAI9N,EAAK,gBAAiB6N,EAAcC,EAAW,MAAOA,EAAW,GAAG,CAClF,EACF,EAEA9N,EAAK,YAAY,kBAAoB,SAAUmJ,EAAQ,CACrD,IAAIwE,EAASxE,EAAO,cAAc,EAElC,GAAIwE,GAAU,KAId,KAAIxG,EAAe,SAASwG,EAAO,IAAK,EAAE,EAE1C,GAAI,MAAMxG,CAAY,EAAG,CACvB,IAAI0G,EAAe,gCACnB,MAAM,IAAI7N,EAAK,gBAAiB6N,EAAcF,EAAO,MAAOA,EAAO,GAAG,CACxE,CAEAxE,EAAO,cAAc,aAAehC,EAEpC,IAAI2G,EAAa3E,EAAO,WAAW,EAEnC,GAAI2E,GAAc,KAAW,CAC3B3E,EAAO,WAAW,EAClB,MACF,CAEA,OAAQ2E,EAAW,KAAM,CACvB,KAAK9N,EAAK,WAAW,KACnB,OAAAmJ,EAAO,WAAW,EACXnJ,EAAK,YAAY,UAC1B,KAAKA,EAAK,WAAW,MACnB,OAAAmJ,EAAO,WAAW,EACXnJ,EAAK,YAAY,WAC1B,KAAKA,EAAK,WAAW,cACnB,OAAOA,EAAK,YAAY,kBAC1B,KAAKA,EAAK,WAAW,MACnB,OAAOA,EAAK,YAAY,WAC1B,KAAKA,EAAK,WAAW,SACnB,OAAAmJ,EAAO,WAAW,EACXnJ,EAAK,YAAY,cAC1B,QACE,IAAI6N,EAAe,2BAA6BC,EAAW,KAAO,IAClE,MAAM,IAAI9N,EAAK,gBAAiB6N,EAAcC,EAAW,MAAOA,EAAW,GAAG,CAClF,EACF,EAEA9N,EAAK,YAAY,WAAa,SAAUmJ,EAAQ,CAC9C,IAAIwE,EAASxE,EAAO,cAAc,EAElC,GAAIwE,GAAU,KAId,KAAIM,EAAQ,SAASN,EAAO,IAAK,EAAE,EAEnC,GAAI,MAAMM,CAAK,EAAG,CAChB,IAAIJ,EAAe,wBACnB,MAAM,IAAI7N,EAAK,gBAAiB6N,EAAcF,EAAO,MAAOA,EAAO,GAAG,CACxE,CAEAxE,EAAO,cAAc,MAAQ8E,EAE7B,IAAIH,EAAa3E,EAAO,WAAW,EAEnC,GAAI2E,GAAc,KAAW,CAC3B3E,EAAO,WAAW,EAClB,MACF,CAEA,OAAQ2E,EAAW,KAAM,CACvB,KAAK9N,EAAK,WAAW,KACnB,OAAAmJ,EAAO,WAAW,EACXnJ,EAAK,YAAY,UAC1B,KAAKA,EAAK,WAAW,MACnB,OAAAmJ,EAAO,WAAW,EACXnJ,EAAK,YAAY,WAC1B,KAAKA,EAAK,WAAW,cACnB,OAAOA,EAAK,YAAY,kBAC1B,KAAKA,EAAK,WAAW,MACnB,OAAOA,EAAK,YAAY,WAC1B,KAAKA,EAAK,WAAW,SACnB,OAAAmJ,EAAO,WAAW,EACXnJ,EAAK,YAAY,cAC1B,QACE,IAAI6N,EAAe,2BAA6BC,EAAW,KAAO,IAClE,MAAM,IAAI9N,EAAK,gBAAiB6N,EAAcC,EAAW,MAAOA,EAAW,GAAG,CAClF,EACF,EAMI,SAAU1G,EAAM8G,EAAS,CACrB,OAAO,QAAW,YAAc,OAAO,IAEzC,OAAOA,CAAO,EACL,OAAOpO,IAAY,SAM5BC,GAAO,QAAUmO,EAAQ,EAGzB9G,EAAK,KAAO8G,EAAQ,CAExB,EAAE,KAAM,UAAY,CAMlB,OAAOlO,CACT,CAAC,CACH,GAAG,ICl5GH,IAAAmO,EAAAC,EAAA,CAAAC,GAAAC,KAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAeA,IAAIC,GAAkB,UAOtBD,GAAO,QAAUE,GAUjB,SAASA,GAAWC,EAAQ,CAC1B,IAAIC,EAAM,GAAKD,EACXE,EAAQJ,GAAgB,KAAKG,CAAG,EAEpC,GAAI,CAACC,EACH,OAAOD,EAGT,IAAIE,EACAC,EAAO,GACPC,EAAQ,EACRC,EAAY,EAEhB,IAAKD,EAAQH,EAAM,MAAOG,EAAQJ,EAAI,OAAQI,IAAS,CACrD,OAAQJ,EAAI,WAAWI,CAAK,EAAG,CAC7B,IAAK,IACHF,EAAS,SACT,MACF,IAAK,IACHA,EAAS,QACT,MACF,IAAK,IACHA,EAAS,QACT,MACF,IAAK,IACHA,EAAS,OACT,MACF,IAAK,IACHA,EAAS,OACT,MACF,QACE,QACJ,CAEIG,IAAcD,IAChBD,GAAQH,EAAI,UAAUK,EAAWD,CAAK,GAGxCC,EAAYD,EAAQ,EACpBD,GAAQD,CACV,CAEA,OAAOG,IAAcD,EACjBD,EAAOH,EAAI,UAAUK,EAAWD,CAAK,EACrCD,CACN,ICvDA,IAAAG,GAAiB,QCKZ,OAAO,UACV,OAAO,QAAU,SAAUC,EAAa,CACtC,IAAMC,EAA2B,CAAC,EAClC,QAAWC,KAAO,OAAO,KAAKF,CAAG,EAE/BC,EAAK,KAAK,CAACC,EAAKF,EAAIE,EAAI,CAAC,EAG3B,OAAOD,CACT,GAGG,OAAO,SACV,OAAO,OAAS,SAAUD,EAAa,CACrC,IAAMC,EAAiB,CAAC,EACxB,QAAWC,KAAO,OAAO,KAAKF,CAAG,EAE/BC,EAAK,KAAKD,EAAIE,EAAI,EAGpB,OAAOD,CACT,GAKE,OAAO,SAAY,cAGhB,QAAQ,UAAU,WACrB,QAAQ,UAAU,SAAW,SAC3BE,EAA8BC,EACxB,CACF,OAAOD,GAAM,UACf,KAAK,WAAaA,EAAE,KACpB,KAAK,UAAYA,EAAE,MAEnB,KAAK,WAAaA,EAClB,KAAK,UAAYC,EAErB,GAGG,QAAQ,UAAU,cACrB,QAAQ,UAAU,YAAc,YAC3BC,EACG,CACN,IAAMC,EAAS,KAAK,WACpB,GAAIA,EAAQ,CACND,EAAM,SAAW,GACnBC,EAAO,YAAY,IAAI,EAGzB,QAASC,EAAIF,EAAM,OAAS,EAAGE,GAAK,EAAGA,IAAK,CAC1C,IAAIC,EAAOH,EAAME,GACb,OAAOC,GAAS,SAClBA,EAAO,SAAS,eAAeA,CAAI,EAC5BA,EAAK,YACZA,EAAK,WAAW,YAAYA,CAAI,EAG7BD,EAGHD,EAAO,aAAa,KAAK,gBAAkBE,CAAI,EAF/CF,EAAO,aAAaE,EAAM,IAAI,CAGlC,CACF,CACF,ICxEJ,IAAAC,GAAuB,OAiChB,SAASC,GACdC,EACmB,CACnB,IAAMC,EAAY,IAAI,IAChBC,EAAY,IAAI,IACtB,QAAWC,KAAOH,EAAM,CACtB,GAAM,CAACI,EAAMC,CAAI,EAAIF,EAAI,SAAS,MAAM,GAAG,EAGrCG,EAAWH,EAAI,SACfI,EAAWJ,EAAI,MACfK,EAAWL,EAAI,KAGfM,KAAO,GAAAC,SAAWP,EAAI,IAAI,EAC7B,QAAQ,mBAAoB,EAAE,EAC9B,QAAQ,OAAQ,GAAG,EAGtB,GAAIE,EAAM,CACR,IAAMM,EAASV,EAAU,IAAIG,CAAI,EAG5BF,EAAQ,IAAIS,CAAM,EASrBV,EAAU,IAAIK,EAAU,CACtB,SAAAA,EACA,MAAAC,EACA,KAAAE,EACA,OAAAE,CACF,CAAC,GAbDA,EAAO,MAAQR,EAAI,MACnBQ,EAAO,KAAQF,EAGfP,EAAQ,IAAIS,CAAM,EAatB,MACEV,EAAU,IAAIK,EAAUM,EAAA,CACtB,SAAAN,EACA,MAAAC,EACA,KAAAE,GACGD,GAAQ,CAAE,KAAAA,CAAK,EACnB,CAEL,CACA,OAAOP,CACT,CCpFA,IAAAY,GAAuB,OAsChB,SAASC,GACdC,EAA2BC,EACD,CAC1B,IAAMC,EAAY,IAAI,OAAOF,EAAO,UAAW,KAAK,EAC9CG,EAAY,CAACC,EAAYC,EAAcC,IACpC,GAAGD,4BAA+BC,WAI3C,OAAQC,GAAkB,CACxBA,EAAQA,EACL,QAAQ,gBAAiB,GAAG,EAC5B,KAAK,EAGR,IAAMC,EAAQ,IAAI,OAAO,MAAMR,EAAO,cACpCO,EACG,QAAQ,uBAAwB,MAAM,EACtC,QAAQL,EAAW,GAAG,KACtB,KAAK,EAGV,OAAOO,IACLR,KACI,GAAAS,SAAWD,CAAK,EAChBA,GAED,QAAQD,EAAOL,CAAS,EACxB,QAAQ,8BAA+B,IAAI,CAClD,CACF,CCtCO,SAASQ,GACdC,EACqB,CACrB,IAAMC,EAAS,IAAK,KAAa,MAAM,CAAC,QAAS,MAAM,CAAC,EAIxD,OAHe,IAAK,KAAa,YAAYD,EAAOC,CAAK,EAGlD,MAAM,EACNA,EAAM,OACf,CAUO,SAASC,GACdD,EAA4BE,EACV,CAzEpB,IAAAC,EA0EE,IAAMC,EAAU,IAAI,IAAuBJ,CAAK,EAG1CK,EAA2B,CAAC,EAClC,QAASC,EAAI,EAAGA,EAAIJ,EAAM,OAAQI,IAChC,QAAWC,KAAUH,EACfF,EAAMI,GAAG,WAAWC,EAAO,IAAI,IACjCF,EAAOE,EAAO,MAAQ,GACtBH,EAAQ,OAAOG,CAAM,GAI3B,QAAWA,KAAUH,GACfD,EAAA,KAAK,iBAAL,MAAAA,EAAA,UAAsBI,EAAO,QAC/BF,EAAOE,EAAO,MAAQ,IAG1B,OAAOF,CACT,CC2BA,SAASG,GAAWC,EAAaC,EAAuB,CACtD,GAAM,CAACC,EAAGC,CAAC,EAAI,CAAC,IAAI,IAAIH,CAAC,EAAG,IAAI,IAAIC,CAAC,CAAC,EACtC,MAAO,CACL,GAAG,IAAI,IAAI,CAAC,GAAGC,CAAC,EAAE,OAAOE,GAAS,CAACD,EAAE,IAAIC,CAAK,CAAC,CAAC,CAClD,CACF,CASO,IAAMC,EAAN,KAAa,CAgCX,YAAY,CAAE,OAAAC,EAAQ,KAAAC,EAAM,QAAAC,CAAQ,EAAgB,CACzD,KAAK,QAAUA,EAGf,KAAK,UAAYC,GAAuBF,CAAI,EAC5C,KAAK,UAAYG,GAAuBJ,EAAQ,EAAK,EAGrD,KAAK,UAAU,UAAY,IAAI,OAAOA,EAAO,SAAS,EAGtD,KAAK,MAAQ,KAAK,UAAY,CAGxBA,EAAO,KAAK,SAAW,GAAKA,EAAO,KAAK,KAAO,KACjD,KAAK,IAAK,KAAaA,EAAO,KAAK,GAAG,EAC7BA,EAAO,KAAK,OAAS,GAC9B,KAAK,IAAK,KAAa,cAAc,GAAGA,EAAO,IAAI,CAAC,EAItD,IAAMK,EAAMZ,GAAW,CACrB,UAAW,iBAAkB,SAC/B,EAAGS,EAAQ,QAAQ,EAGnB,QAAWI,KAAQN,EAAO,KAAK,IAAIO,GACjCA,IAAa,KAAO,KAAQ,KAAaA,EAC1C,EACC,QAAWC,KAAMH,EACf,KAAK,SAAS,OAAOC,EAAKE,EAAG,EAC7B,KAAK,eAAe,OAAOF,EAAKE,EAAG,EAKvC,KAAK,IAAI,UAAU,EAGnB,KAAK,MAAM,QAAS,CAAE,MAAO,GAAI,CAAC,EAClC,KAAK,MAAM,MAAM,EACjB,KAAK,MAAM,OAAQ,CAAE,MAAO,IAAK,UAAWC,GAAO,CACjD,GAAM,CAAE,KAAAC,EAAO,CAAC,CAAE,EAAID,EACtB,OAAOC,EAAK,OAAO,CAACC,EAAMC,IAAQ,CAChC,GAAGD,EACH,GAAG,KAAK,UAAUC,CAAG,CACvB,EAAG,CAAC,CAAiB,CACvB,CAAE,CAAC,EAGH,QAAWH,KAAOR,EAChB,KAAK,IAAIQ,EAAK,CAAE,MAAOA,EAAI,KAAM,CAAC,CACtC,CAAC,CACH,CAkBO,OAAOI,EAA6B,CACzC,GAAIA,EACF,GAAI,CACF,IAAMC,EAAY,KAAK,UAAUD,CAAK,EAGhCE,EAAUC,GAAiBH,CAAK,EACnC,OAAOI,GACNA,EAAO,WAAa,KAAK,MAAM,SAAS,UACzC,EAGGC,EAAS,KAAK,MAAM,OAAO,GAAGL,IAAQ,EAGzC,OAAyB,CAACM,EAAM,CAAE,IAAAC,EAAK,MAAAC,EAAO,UAAAC,CAAU,IAAM,CAC7D,IAAMC,EAAW,KAAK,UAAU,IAAIH,CAAG,EACvC,GAAI,OAAOG,GAAa,YAAa,CACnC,GAAM,CAAE,SAAAC,EAAU,MAAAC,EAAO,KAAAC,EAAM,KAAAhB,EAAM,OAAAiB,CAAO,EAAIJ,EAG1CK,EAAQC,GACZd,EACA,OAAO,KAAKO,EAAU,QAAQ,CAChC,EAGMQ,EAAQ,CAAC,CAACH,GAAS,CAAC,OAAO,OAAOC,CAAK,EAAE,MAAMG,GAAKA,CAAC,EAC3DZ,EAAK,KAAKa,EAAAC,EAAA,CACR,SAAAT,EACA,MAAOV,EAAUW,CAAK,EACtB,KAAOX,EAAUY,CAAI,GAClBhB,GAAQ,CAAE,KAAMA,EAAK,IAAII,CAAS,CAAE,GAJ/B,CAKR,MAAOO,GAAS,EAAIS,GACpB,MAAAF,CACF,EAAC,CACH,CACA,OAAOT,CACT,EAAG,CAAC,CAAC,EAGJ,KAAK,CAACzB,EAAGC,IAAMA,EAAE,MAAQD,EAAE,KAAK,EAGhC,OAAO,CAACwC,EAAOC,IAAW,CACzB,IAAMZ,EAAW,KAAK,UAAU,IAAIY,EAAO,QAAQ,EACnD,GAAI,OAAOZ,GAAa,YAAa,CACnC,IAAMH,EAAM,WAAYG,EACpBA,EAAS,OAAQ,SACjBA,EAAS,SACbW,EAAM,IAAId,EAAK,CAAC,GAAGc,EAAM,IAAId,CAAG,GAAK,CAAC,EAAGe,CAAM,CAAC,CAClD,CACA,OAAOD,CACT,EAAG,IAAI,GAA+B,EAGpCE,EACJ,GAAI,KAAK,QAAQ,YAAa,CAC5B,IAAMC,EAAS,KAAK,MAAM,MAAMC,GAAW,CACzC,QAAWrB,KAAUF,EACnBuB,EAAQ,KAAKrB,EAAO,KAAM,CACxB,OAAQ,CAAC,OAAO,EAChB,SAAU,KAAK,MAAM,SAAS,SAC9B,SAAU,KAAK,MAAM,SAAS,QAChC,CAAC,CACL,CAAC,EAGDmB,EAAcC,EAAO,OACjB,OAAO,KAAKA,EAAO,GAAG,UAAU,QAAQ,EACxC,CAAC,CACP,CAGA,OAAOJ,EAAA,CACL,MAAO,CAAC,GAAGf,EAAO,OAAO,CAAC,GACvB,OAAOkB,GAAgB,aAAe,CAAE,YAAAA,CAAY,EAI3D,OAAQG,EAAN,CACA,QAAQ,KAAK,kBAAkB1B,qCAAoC,CACrE,CAIF,MAAO,CAAE,MAAO,CAAC,CAAE,CACrB,CACF,EL3QA,IAAI2B,EAqBJ,SAAeC,GACbC,EACe,QAAAC,EAAA,sBACf,IAAIC,EAAO,UAGX,GAAI,OAAO,QAAW,aAAe,iBAAkB,OAAQ,CAC7D,IAAMC,EAAS,SAAS,cAAiC,aAAa,EAChE,CAACC,CAAI,EAAID,EAAO,IAAI,MAAM,SAAS,EAGzCD,EAAOA,EAAK,QAAQ,KAAME,CAAI,CAChC,CAGA,IAAMC,EAAU,CAAC,EACjB,QAAWC,KAAQN,EAAO,KAAM,CAC9B,OAAQM,EAAM,CAGZ,IAAK,KACHD,EAAQ,KAAK,GAAGH,cAAiB,EACjC,MAGF,IAAK,KACL,IAAK,KACHG,EAAQ,KAAK,GAAGH,cAAiB,EACjC,KACJ,CAGII,IAAS,MACXD,EAAQ,KAAK,GAAGH,cAAiBI,UAAa,CAClD,CAGIN,EAAO,KAAK,OAAS,GACvBK,EAAQ,KAAK,GAAGH,yBAA4B,EAG1CG,EAAQ,SACV,MAAM,cACJ,GAAGH,oCACH,GAAGG,CACL,EACJ,GAaA,SAAsBE,GACpBC,EACwB,QAAAP,EAAA,sBACxB,OAAQO,EAAQ,KAAM,CAGpB,OACE,aAAMT,GAAqBS,EAAQ,KAAK,MAAM,EAC9CV,EAAQ,IAAIW,EAAOD,EAAQ,IAAI,EACxB,CACL,MACF,EAGF,OACE,MAAO,CACL,OACA,KAAMV,EAAQA,EAAM,OAAOU,EAAQ,IAAI,EAAI,CAAE,MAAO,CAAC,CAAE,CACzD,EAGF,QACE,MAAM,IAAI,UAAU,sBAAsB,CAC9C,CACF,GAOA,KAAK,KAAO,GAAAE,QAGZ,iBAAiB,UAAiBC,GAAMV,EAAA,wBACtC,YAAY,MAAMM,GAAQI,EAAG,IAAI,CAAC,CACpC,EAAC", + "names": ["require_lunr", "__commonJSMin", "exports", "module", "lunr", "config", "builder", "global", "message", "obj", "clone", "keys", "key", "val", "docRef", "fieldName", "stringValue", "s", "n", "fieldRef", "elements", "i", "other", "object", "a", "b", "intersection", "element", "posting", "documentCount", "documentsWithTerm", "x", "str", "metadata", "fn", "t", "len", "tokens", "sliceEnd", "sliceStart", "char", "sliceLength", "tokenMetadata", "label", "isRegistered", "serialised", "pipeline", "fnName", "fns", "existingFn", "newFn", "pos", "stackLength", "memo", "j", "result", "k", "token", "index", "start", "end", "pivotPoint", "pivotIndex", "insertIdx", "position", "sumOfSquares", "elementsLength", "otherVector", "dotProduct", "aLen", "bLen", "aVal", "bVal", "output", "step2list", "step3list", "c", "v", "C", "V", "mgr0", "meq1", "mgr1", "s_v", "re_mgr0", "re_mgr1", "re_meq1", "re_s_v", "re_1a", "re2_1a", "re_1b", "re2_1b", "re_1b_2", "re2_1b_2", "re3_1b_2", "re4_1b_2", "re_1c", "re_2", "re_3", "re_4", "re2_4", "re_5", "re_5_1", "re3_5", "porterStemmer", "w", "stem", "suffix", "firstch", "re", "re2", "re3", "re4", "fp", "stopWords", "words", "stopWord", "arr", "clause", "editDistance", "root", "stack", "frame", "noEditNode", "insertionNode", "substitutionNode", "charA", "charB", "transposeNode", "node", "final", "next", "edges", "edge", "labels", "qEdges", "qLen", "nEdges", "nLen", "q", "qEdge", "nEdge", "qNode", "word", "commonPrefix", "nextNode", "downTo", "childKey", "attrs", "queryString", "query", "parser", "matchingFields", "queryVectors", "termFieldCache", "requiredMatches", "prohibitedMatches", "terms", "clauseMatches", "m", "term", "termTokenSet", "expandedTerms", "field", "expandedTerm", "termIndex", "fieldPosting", "matchingDocumentRefs", "termField", "matchingDocumentsSet", "l", "matchingDocumentRef", "matchingFieldRef", "fieldMatch", "allRequiredMatches", "allProhibitedMatches", "matchingFieldRefs", "results", "matches", "fieldVector", "score", "docMatch", "match", "invertedIndex", "fieldVectors", "ref", "serializedIndex", "serializedVectors", "serializedInvertedIndex", "tokenSetBuilder", "tuple", "attributes", "number", "doc", "fields", "extractor", "fieldTerms", "metadataKey", "fieldRefs", "numberOfFields", "accumulator", "documentsWithField", "fieldRefsLength", "termIdfCache", "fieldLength", "termFrequencies", "termsLength", "fieldBoost", "docBoost", "tf", "idf", "scoreWithPrecision", "args", "clonedMetadata", "metadataKeys", "otherMatchData", "allFields", "options", "state", "subSlices", "type", "charCode", "lexer", "lexeme", "completedClause", "errorMessage", "nextLexeme", "possibleFields", "f", "boost", "factory", "require_escape_html", "__commonJSMin", "exports", "module", "matchHtmlRegExp", "escapeHtml", "string", "str", "match", "escape", "html", "index", "lastIndex", "import_lunr", "obj", "data", "key", "x", "y", "nodes", "parent", "i", "node", "import_escape_html", "setupSearchDocumentMap", "docs", "documents", "parents", "doc", "path", "hash", "location", "title", "tags", "text", "escapeHTML", "parent", "__spreadValues", "import_escape_html", "setupSearchHighlighter", "config", "escape", "separator", "highlight", "_", "data", "term", "query", "match", "value", "escapeHTML", "parseSearchQuery", "value", "query", "getSearchQueryTerms", "terms", "_a", "clauses", "result", "t", "clause", "difference", "a", "b", "x", "y", "value", "Search", "config", "docs", "options", "setupSearchDocumentMap", "setupSearchHighlighter", "fns", "lang", "language", "fn", "doc", "tags", "list", "tag", "query", "highlight", "clauses", "parseSearchQuery", "clause", "groups", "item", "ref", "score", "matchData", "document", "location", "title", "text", "parent", "terms", "getSearchQueryTerms", "boost", "t", "__spreadProps", "__spreadValues", "items", "result", "suggestions", "titles", "builder", "e", "index", "setupSearchLanguages", "config", "__async", "base", "worker", "path", "scripts", "lang", "handler", "message", "Search", "lunr", "ev"] +} diff --git a/2.0.0/assets/stylesheets/extra.0d2c79a8.min.css b/2.0.0/assets/stylesheets/extra.0d2c79a8.min.css new file mode 100644 index 00000000..6e23ef17 --- /dev/null +++ b/2.0.0/assets/stylesheets/extra.0d2c79a8.min.css @@ -0,0 +1 @@ +@charset "UTF-8";@keyframes ᴴₒᴴₒᴴₒ{0%{transform:translate3d(var(--left-start),0,0)}to{transform:translate3d(var(--left-end),110vh,0)}}.ᴴₒᴴₒᴴₒ{--size:1vw;background:#fff;border:1px solid #ddd;border-radius:50%;cursor:pointer;height:var(--size);opacity:1;position:fixed;top:-5vh;transition:opacity 1s;width:var(--size);z-index:10}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):first-child{--size:0.4vw;--left-start:7vw;--left-end:-8vw;animation:ᴴₒᴴₒᴴₒ 12s linear infinite both;animation-delay:-4s;left:24vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(2){--size:0.4vw;--left-start:9vw;--left-end:0vw;animation:ᴴₒᴴₒᴴₒ 18s linear infinite both;animation-delay:-2s;left:68vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(3){--size:0.4vw;--left-start:1vw;--left-end:7vw;animation:ᴴₒᴴₒᴴₒ 11s linear infinite both;animation-delay:-6s;left:10vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(4){--size:0.5vw;--left-start:8vw;--left-end:10vw;animation:ᴴₒᴴₒᴴₒ 18s linear infinite both;animation-delay:-8s;left:63vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(5){--size:0.5vw;--left-start:5vw;--left-end:9vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-4s;left:58vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(6){--size:0.1vw;--left-start:3vw;--left-end:10vw;animation:ᴴₒᴴₒᴴₒ 14s linear infinite both;animation-delay:-1s;left:55vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(7){--size:0.2vw;--left-start:-2vw;--left-end:6vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-7s;left:50vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(8){--size:0.3vw;--left-start:7vw;--left-end:7vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-3s;left:65vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(9){--size:0.2vw;--left-start:4vw;--left-end:5vw;animation:ᴴₒᴴₒᴴₒ 13s linear infinite both;animation-delay:-2s;left:1vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(10){--size:0.3vw;--left-start:2vw;--left-end:-3vw;animation:ᴴₒᴴₒᴴₒ 12s linear infinite both;animation-delay:-10s;left:92vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(11){--size:0.2vw;--left-start:1vw;--left-end:8vw;animation:ᴴₒᴴₒᴴₒ 13s linear infinite both;animation-delay:-6s;left:5vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(12){--size:0.4vw;--left-start:9vw;--left-end:1vw;animation:ᴴₒᴴₒᴴₒ 18s linear infinite both;animation-delay:-3s;left:77vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(13){--size:0.1vw;--left-start:-3vw;--left-end:3vw;animation:ᴴₒᴴₒᴴₒ 18s linear infinite both;animation-delay:-7s;left:93vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(14){--size:0.5vw;--left-start:0vw;--left-end:-5vw;animation:ᴴₒᴴₒᴴₒ 12s linear infinite both;animation-delay:-4s;left:35vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(15){--size:0.1vw;--left-start:-9vw;--left-end:4vw;animation:ᴴₒᴴₒᴴₒ 20s linear infinite both;animation-delay:-6s;left:15vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(16){--size:0.1vw;--left-start:1vw;--left-end:9vw;animation:ᴴₒᴴₒᴴₒ 17s linear infinite both;animation-delay:-6s;left:100vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(17){--size:0.1vw;--left-start:1vw;--left-end:0vw;animation:ᴴₒᴴₒᴴₒ 17s linear infinite both;animation-delay:-1s;left:44vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(18){--size:0.4vw;--left-start:-9vw;--left-end:-9vw;animation:ᴴₒᴴₒᴴₒ 16s linear infinite both;animation-delay:-6s;left:69vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(19){--size:0.2vw;--left-start:3vw;--left-end:-8vw;animation:ᴴₒᴴₒᴴₒ 14s linear infinite both;animation-delay:-1s;left:32vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(20){--size:0.1vw;--left-start:-7vw;--left-end:8vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-8s;left:59vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(21){--size:0.2vw;--left-start:-1vw;--left-end:-8vw;animation:ᴴₒᴴₒᴴₒ 13s linear infinite both;animation-delay:-6s;left:96vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(22){--size:0.2vw;--left-start:9vw;--left-end:1vw;animation:ᴴₒᴴₒᴴₒ 11s linear infinite both;animation-delay:-7s;left:78vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(23){--size:0.4vw;--left-start:5vw;--left-end:-2vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-10s;left:29vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(24){--size:0.1vw;--left-start:-4vw;--left-end:1vw;animation:ᴴₒᴴₒᴴₒ 20s linear infinite both;animation-delay:-7s;left:83vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(25){--size:0.3vw;--left-start:-1vw;--left-end:2vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-8s;left:95vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(26){--size:0.5vw;--left-start:-3vw;--left-end:-6vw;animation:ᴴₒᴴₒᴴₒ 18s linear infinite both;animation-delay:-8s;left:74vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(27){--size:0.5vw;--left-start:9vw;--left-end:-9vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-2s;left:94vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(28){--size:0.1vw;--left-start:0vw;--left-end:-4vw;animation:ᴴₒᴴₒᴴₒ 15s linear infinite both;animation-delay:-4s;left:95vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(29){--size:0.5vw;--left-start:8vw;--left-end:4vw;animation:ᴴₒᴴₒᴴₒ 11s linear infinite both;animation-delay:-3s;left:42vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(30){--size:0.4vw;--left-start:-5vw;--left-end:0vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-10s;left:8vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(31){--size:0.4vw;--left-start:-7vw;--left-end:3vw;animation:ᴴₒᴴₒᴴₒ 11s linear infinite both;animation-delay:-4s;left:77vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(32){--size:0.4vw;--left-start:8vw;--left-end:-5vw;animation:ᴴₒᴴₒᴴₒ 15s linear infinite both;animation-delay:-3s;left:80vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(33){--size:0.2vw;--left-start:-3vw;--left-end:8vw;animation:ᴴₒᴴₒᴴₒ 20s linear infinite both;animation-delay:-6s;left:15vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(34){--size:0.5vw;--left-start:5vw;--left-end:1vw;animation:ᴴₒᴴₒᴴₒ 13s linear infinite both;animation-delay:-1s;left:91vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(35){--size:0.3vw;--left-start:-6vw;--left-end:-5vw;animation:ᴴₒᴴₒᴴₒ 11s linear infinite both;animation-delay:-5s;left:93vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(36){--size:0.1vw;--left-start:10vw;--left-end:10vw;animation:ᴴₒᴴₒᴴₒ 13s linear infinite both;animation-delay:-10s;left:59vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(37){--size:0.3vw;--left-start:4vw;--left-end:6vw;animation:ᴴₒᴴₒᴴₒ 14s linear infinite both;animation-delay:-8s;left:35vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(38){--size:0.5vw;--left-start:8vw;--left-end:-3vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-6s;left:6vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(39){--size:0.2vw;--left-start:-6vw;--left-end:-2vw;animation:ᴴₒᴴₒᴴₒ 14s linear infinite both;animation-delay:-7s;left:58vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(40){--size:0.4vw;--left-start:3vw;--left-end:-5vw;animation:ᴴₒᴴₒᴴₒ 13s linear infinite both;animation-delay:-4s;left:15vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(41){--size:0.1vw;--left-start:2vw;--left-end:-7vw;animation:ᴴₒᴴₒᴴₒ 17s linear infinite both;animation-delay:-7s;left:24vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(42){--size:0.3vw;--left-start:8vw;--left-end:3vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-9s;left:36vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(43){--size:0.2vw;--left-start:-9vw;--left-end:-3vw;animation:ᴴₒᴴₒᴴₒ 13s linear infinite both;animation-delay:-10s;left:23vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(44){--size:0.1vw;--left-start:4vw;--left-end:-6vw;animation:ᴴₒᴴₒᴴₒ 16s linear infinite both;animation-delay:-6s;left:9vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(45){--size:0.1vw;--left-start:-3vw;--left-end:-5vw;animation:ᴴₒᴴₒᴴₒ 19s linear infinite both;animation-delay:-5s;left:62vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(46){--size:0.3vw;--left-start:0vw;--left-end:2vw;animation:ᴴₒᴴₒᴴₒ 20s linear infinite both;animation-delay:-4s;left:1vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(47){--size:0.4vw;--left-start:8vw;--left-end:-4vw;animation:ᴴₒᴴₒᴴₒ 14s linear infinite both;animation-delay:-1s;left:76vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(48){--size:0.2vw;--left-start:5vw;--left-end:-3vw;animation:ᴴₒᴴₒᴴₒ 15s linear infinite both;animation-delay:-5s;left:19vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(49){--size:0.4vw;--left-start:1vw;--left-end:-1vw;animation:ᴴₒᴴₒᴴₒ 18s linear infinite both;animation-delay:-4s;left:72vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(50){--size:0.4vw;--left-start:8vw;--left-end:-6vw;animation:ᴴₒᴴₒᴴₒ 16s linear infinite both;animation-delay:-10s;left:25vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(51){--size:0.1vw;--left-start:-5vw;--left-end:-8vw;animation:ᴴₒᴴₒᴴₒ 17s linear infinite both;animation-delay:-9s;left:71vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(52){--size:0.4vw;--left-start:-4vw;--left-end:9vw;animation:ᴴₒᴴₒᴴₒ 15s linear infinite both;animation-delay:-7s;left:30vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(53){--size:0.5vw;--left-start:-1vw;--left-end:-8vw;animation:ᴴₒᴴₒᴴₒ 15s linear infinite both;animation-delay:-4s;left:37vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(54){--size:0.4vw;--left-start:-1vw;--left-end:-1vw;animation:ᴴₒᴴₒᴴₒ 12s linear infinite both;animation-delay:-9s;left:48vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(55){--size:0.5vw;--left-start:8vw;--left-end:6vw;animation:ᴴₒᴴₒᴴₒ 20s linear infinite both;animation-delay:-6s;left:65vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(56){--size:0.4vw;--left-start:9vw;--left-end:5vw;animation:ᴴₒᴴₒᴴₒ 18s linear infinite both;animation-delay:-6s;left:53vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(57){--size:0.4vw;--left-start:3vw;--left-end:-9vw;animation:ᴴₒᴴₒᴴₒ 12s linear infinite both;animation-delay:-1s;left:76vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(58){--size:0.2vw;--left-start:-7vw;--left-end:0vw;animation:ᴴₒᴴₒᴴₒ 16s linear infinite both;animation-delay:-9s;left:54vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(59){--size:0.1vw;--left-start:-9vw;--left-end:-2vw;animation:ᴴₒᴴₒᴴₒ 20s linear infinite both;animation-delay:-1s;left:66vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(60){--size:0.3vw;--left-start:-6vw;--left-end:2vw;animation:ᴴₒᴴₒᴴₒ 11s linear infinite both;animation-delay:-7s;left:91vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(61){--size:0.4vw;--left-start:6vw;--left-end:-8vw;animation:ᴴₒᴴₒᴴₒ 14s linear infinite both;animation-delay:-7s;left:35vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(62){--size:0.4vw;--left-start:-6vw;--left-end:2vw;animation:ᴴₒᴴₒᴴₒ 16s linear infinite both;animation-delay:-3s;left:86vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(63){--size:0.5vw;--left-start:-7vw;--left-end:7vw;animation:ᴴₒᴴₒᴴₒ 20s linear infinite both;animation-delay:-5s;left:86vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(64){--size:0.2vw;--left-start:-9vw;--left-end:1vw;animation:ᴴₒᴴₒᴴₒ 13s linear infinite both;animation-delay:-5s;left:53vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(65){--size:0.2vw;--left-start:-2vw;--left-end:3vw;animation:ᴴₒᴴₒᴴₒ 11s linear infinite both;animation-delay:-6s;left:56vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(66){--size:0.5vw;--left-start:1vw;--left-end:8vw;animation:ᴴₒᴴₒᴴₒ 17s linear infinite both;animation-delay:-5s;left:58vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(67){--size:0.5vw;--left-start:2vw;--left-end:9vw;animation:ᴴₒᴴₒᴴₒ 15s linear infinite both;animation-delay:-5s;left:14vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(68){--size:0.3vw;--left-start:-1vw;--left-end:6vw;animation:ᴴₒᴴₒᴴₒ 14s linear infinite both;animation-delay:-1s;left:100vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(69){--size:0.2vw;--left-start:9vw;--left-end:-2vw;animation:ᴴₒᴴₒᴴₒ 15s linear infinite both;animation-delay:-7s;left:8vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(70){--size:0.4vw;--left-start:-5vw;--left-end:8vw;animation:ᴴₒᴴₒᴴₒ 11s linear infinite both;animation-delay:-4s;left:82vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(71){--size:0.4vw;--left-start:3vw;--left-end:-7vw;animation:ᴴₒᴴₒᴴₒ 13s linear infinite both;animation-delay:-6s;left:26vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(72){--size:0.2vw;--left-start:-2vw;--left-end:-3vw;animation:ᴴₒᴴₒᴴₒ 15s linear infinite both;animation-delay:-3s;left:24vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(73){--size:0.3vw;--left-start:-7vw;--left-end:-8vw;animation:ᴴₒᴴₒᴴₒ 16s linear infinite both;animation-delay:-2s;left:2vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(74){--size:0.4vw;--left-start:-9vw;--left-end:-3vw;animation:ᴴₒᴴₒᴴₒ 14s linear infinite both;animation-delay:-10s;left:94vw}.ᴴₒᴴₒᴴₒ:not(.ᴴₒᴴₒᴴₒ--gotcha):nth-child(75){--size:0.3vw;--left-start:7vw;--left-end:2vw;animation:ᴴₒᴴₒᴴₒ 17s linear infinite both;animation-delay:-2s;left:26vw}.ᴴₒᴴₒᴴₒ:nth-child(5n){filter:blur(2px)}.ᴴₒᴴₒᴴₒ--ᵍₒᵗ꜀ᴴₐ{opacity:0}.ᴴₒᴴₒᴴₒ__button{display:block}.ᴴₒᴴₒᴴₒ__button:after{background-color:currentcolor;content:"";display:block;height:24px;margin:0 auto;-webkit-mask-image:url('data:image/svg+xml;charset=utf-8,');mask-image:url('data:image/svg+xml;charset=utf-8,');-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;width:24px}.ᴴₒᴴₒᴴₒ__button[hidden]:after{-webkit-mask-image:url('data:image/svg+xml;charset=utf-8,');mask-image:url('data:image/svg+xml;charset=utf-8,')} \ No newline at end of file diff --git a/2.0.0/assets/stylesheets/extra.0d2c79a8.min.css.map b/2.0.0/assets/stylesheets/extra.0d2c79a8.min.css.map new file mode 100644 index 00000000..cd262c03 --- /dev/null +++ b/2.0.0/assets/stylesheets/extra.0d2c79a8.min.css.map @@ -0,0 +1 @@ +{"version":3,"sources":["src/assets/stylesheets/extra.scss","../../../src/assets/stylesheets/extra.scss"],"names":[],"mappings":"AA6BA,gBCpBA,CDoBA,kBACE,GACE,4CC1BF,CD4BA,GACE,8CC1BF,CACF,CDkCA,QACE,UAAA,CAOA,eAAA,CACA,qBAAA,CACA,iBAAA,CACA,cAAA,CAJA,kBAAA,CAMA,SAAA,CAVA,cAAA,CACA,QAAA,CAQA,qBAAA,CANA,iBAAA,CADA,UCzBF,CDqCI,yCACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SClCN,CD6BI,0CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC1BN,CDqBI,0CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SClBN,CDaI,0CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCVN,CDKI,0CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCFN,CDHI,0CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCMN,CDXI,0CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCcN,CDnBI,0CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsBN,CD3BI,0CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,QC8BN,CDnCI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,oBAAA,CAFA,SCsCN,CD3CI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,QC8CN,CDnDI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsDN,CD3DI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8DN,CDnEI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsEN,CD3EI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8EN,CDnFI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,UCsFN,CD3FI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8FN,CDnGI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsGN,CD3GI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8GN,CDnHI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsHN,CD3HI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8HN,CDnII,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsIN,CD3II,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,oBAAA,CAFA,SC8IN,CDnJI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsJN,CD3JI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8JN,CDnKI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsKN,CD3KI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8KN,CDnLI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsLN,CD3LI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8LN,CDnMI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,oBAAA,CAFA,QCsMN,CD3MI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8MN,CDnNI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsNN,CD3NI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8NN,CDnOI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsON,CD3OI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8ON,CDnPI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,oBAAA,CAFA,SCsPN,CD3PI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8PN,CDnQI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,QCsQN,CD3QI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8QN,CDnRI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsRN,CD3RI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8RN,CDnSI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsSN,CD3SI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,oBAAA,CAFA,SC8SN,CDnTI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,QCsTN,CD3TI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8TN,CDnUI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,QCsUN,CD3UI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8UN,CDnVI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsVN,CD3VI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8VN,CDnWI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,oBAAA,CAFA,SCsWN,CD3WI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8WN,CDnXI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsXN,CD3XI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8XN,CDnYI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsYN,CD3YI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8YN,CDnZI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsZN,CD3ZI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8ZN,CDnaI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsaN,CD3aI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8aN,CDnbI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsbN,CD3bI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8bN,CDncI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCscN,CD3cI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8cN,CDndI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsdN,CD3dI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8dN,CDneI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCseN,CD3eI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8eN,CDnfI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,UCsfN,CD3fI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,QC8fN,CDngBI,2CACE,YAAA,CACA,iBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCsgBN,CD3gBI,2CACE,YAAA,CACA,gBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8gBN,CDnhBI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SCshBN,CD3hBI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,QC8hBN,CDniBI,2CACE,YAAA,CACA,iBAAA,CACA,eAAA,CAGA,yCAAA,CACA,oBAAA,CAFA,SCsiBN,CD3iBI,2CACE,YAAA,CACA,gBAAA,CACA,cAAA,CAGA,yCAAA,CACA,mBAAA,CAFA,SC8iBN,CDviBE,sBACE,gBCyiBJ,CDriBE,gBACE,SCuiBJ,CDniBE,gBACE,aCqiBJ,CDjiBE,sBAKE,6BAAA,CAKA,UAAA,CATA,aAAA,CAEA,WAAA,CACA,aAAA,CAEA,ooBAAA,CAAA,4nBAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CAPA,UC2iBJ,CD/hBE,8BACE,qqBAAA,CAAA,6pBCiiBJ","file":"extra.css"} \ No newline at end of file diff --git a/2.0.0/assets/stylesheets/main.975780f9.min.css b/2.0.0/assets/stylesheets/main.975780f9.min.css new file mode 100644 index 00000000..dac48ba7 --- /dev/null +++ b/2.0.0/assets/stylesheets/main.975780f9.min.css @@ -0,0 +1 @@ +@charset "UTF-8";html{-webkit-text-size-adjust:none;-moz-text-size-adjust:none;text-size-adjust:none;box-sizing:border-box}*,:after,:before{box-sizing:inherit}@media (prefers-reduced-motion){*,:after,:before{transition:none!important}}body{margin:0}a,button,input,label{-webkit-tap-highlight-color:transparent}a{color:inherit;text-decoration:none}hr{border:0;box-sizing:initial;display:block;height:.05rem;overflow:visible;padding:0}small{font-size:80%}sub,sup{line-height:1em}img{border-style:none}table{border-collapse:initial;border-spacing:0}td,th{font-weight:400;vertical-align:top}button{background:#0000;border:0;font-family:inherit;font-size:inherit;margin:0;padding:0}input{border:0;outline:none}:root{--md-primary-fg-color:#4051b5;--md-primary-fg-color--light:#5d6cc0;--md-primary-fg-color--dark:#303fa1;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3;--md-accent-fg-color:#526cfe;--md-accent-fg-color--transparent:#526cfe1a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}:root,[data-md-color-scheme=default]{--md-default-fg-color:#000000de;--md-default-fg-color--light:#0000008a;--md-default-fg-color--lighter:#00000052;--md-default-fg-color--lightest:#00000012;--md-default-bg-color:#fff;--md-default-bg-color--light:#ffffffb3;--md-default-bg-color--lighter:#ffffff4d;--md-default-bg-color--lightest:#ffffff1f;--md-code-fg-color:#36464e;--md-code-bg-color:#f5f5f5;--md-code-hl-color:#ffff0080;--md-code-hl-number-color:#d52a2a;--md-code-hl-special-color:#db1457;--md-code-hl-function-color:#a846b9;--md-code-hl-constant-color:#6e59d9;--md-code-hl-keyword-color:#3f6ec6;--md-code-hl-string-color:#1c7d4d;--md-code-hl-name-color:var(--md-code-fg-color);--md-code-hl-operator-color:var(--md-default-fg-color--light);--md-code-hl-punctuation-color:var(--md-default-fg-color--light);--md-code-hl-comment-color:var(--md-default-fg-color--light);--md-code-hl-generic-color:var(--md-default-fg-color--light);--md-code-hl-variable-color:var(--md-default-fg-color--light);--md-typeset-color:var(--md-default-fg-color);--md-typeset-a-color:var(--md-primary-fg-color);--md-typeset-mark-color:#ffff0080;--md-typeset-del-color:#f5503d26;--md-typeset-ins-color:#0bd57026;--md-typeset-kbd-color:#fafafa;--md-typeset-kbd-accent-color:#fff;--md-typeset-kbd-border-color:#b8b8b8;--md-typeset-table-color:#0000001f;--md-admonition-fg-color:var(--md-default-fg-color);--md-admonition-bg-color:var(--md-default-bg-color);--md-footer-fg-color:#fff;--md-footer-fg-color--light:#ffffffb3;--md-footer-fg-color--lighter:#ffffff4d;--md-footer-bg-color:#000000de;--md-footer-bg-color--dark:#00000052;--md-shadow-z1:0 0.2rem 0.5rem #0000000d,0 0 0.05rem #0000001a;--md-shadow-z2:0 0.2rem 0.5rem #0000001a,0 0 0.05rem #00000040;--md-shadow-z3:0 0.2rem 0.5rem #0003,0 0 0.05rem #00000059}.md-icon svg{fill:currentcolor;display:block;height:1.2rem;width:1.2rem}body{-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale;--md-text-font-family:var(--md-text-font,_),-apple-system,BlinkMacSystemFont,Helvetica,Arial,sans-serif;--md-code-font-family:var(--md-code-font,_),SFMono-Regular,Consolas,Menlo,monospace}body,input{font-feature-settings:"kern","liga";font-family:var(--md-text-font-family)}body,code,input,kbd,pre{color:var(--md-typeset-color)}code,kbd,pre{font-feature-settings:"kern";font-family:var(--md-code-font-family)}:root{--md-typeset-table-sort-icon:url('data:image/svg+xml;charset=utf-8,');--md-typeset-table-sort-icon--asc:url('data:image/svg+xml;charset=utf-8,');--md-typeset-table-sort-icon--desc:url('data:image/svg+xml;charset=utf-8,')}.md-typeset{-webkit-print-color-adjust:exact;color-adjust:exact;font-size:.8rem;line-height:1.6}@media print{.md-typeset{font-size:.68rem}}.md-typeset blockquote,.md-typeset dl,.md-typeset figure,.md-typeset ol,.md-typeset pre,.md-typeset ul{margin-bottom:1em;margin-top:1em}.md-typeset h1{color:var(--md-default-fg-color--light);font-size:2em;line-height:1.3;margin:0 0 1.25em}.md-typeset h1,.md-typeset h2{font-weight:300;letter-spacing:-.01em}.md-typeset h2{font-size:1.5625em;line-height:1.4;margin:1.6em 0 .64em}.md-typeset h3{font-size:1.25em;font-weight:400;letter-spacing:-.01em;line-height:1.5;margin:1.6em 0 .8em}.md-typeset h2+h3{margin-top:.8em}.md-typeset h4{font-weight:700;letter-spacing:-.01em;margin:1em 0}.md-typeset h5,.md-typeset h6{color:var(--md-default-fg-color--light);font-size:.8em;font-weight:700;letter-spacing:-.01em;margin:1.25em 0}.md-typeset h5{text-transform:uppercase}.md-typeset hr{border-bottom:.05rem solid var(--md-default-fg-color--lightest);display:flow-root;margin:1.5em 0}.md-typeset a{color:var(--md-typeset-a-color);word-break:break-word}.md-typeset a,.md-typeset a:before{transition:color 125ms}.md-typeset a:focus,.md-typeset a:hover{color:var(--md-accent-fg-color)}.md-typeset a:focus code,.md-typeset a:hover code{background-color:var(--md-accent-fg-color--transparent)}.md-typeset a code{color:currentcolor;transition:background-color 125ms}.md-typeset a.focus-visible{outline-color:var(--md-accent-fg-color);outline-offset:.2rem}.md-typeset code,.md-typeset kbd,.md-typeset pre{color:var(--md-code-fg-color);direction:ltr;font-variant-ligatures:none}@media print{.md-typeset code,.md-typeset kbd,.md-typeset pre{white-space:pre-wrap}}.md-typeset code{background-color:var(--md-code-bg-color);border-radius:.1rem;-webkit-box-decoration-break:clone;box-decoration-break:clone;font-size:.85em;padding:0 .2941176471em;word-break:break-word}.md-typeset code:not(.focus-visible){-webkit-tap-highlight-color:transparent;outline:none}.md-typeset pre{display:flow-root;line-height:1.4;position:relative}.md-typeset pre>code{-webkit-box-decoration-break:slice;box-decoration-break:slice;box-shadow:none;display:block;margin:0;outline-color:var(--md-accent-fg-color);overflow:auto;padding:.7720588235em 1.1764705882em;scrollbar-color:var(--md-default-fg-color--lighter) #0000;scrollbar-width:thin;touch-action:auto;word-break:normal}.md-typeset pre>code:hover{scrollbar-color:var(--md-accent-fg-color) #0000}.md-typeset pre>code::-webkit-scrollbar{height:.2rem;width:.2rem}.md-typeset pre>code::-webkit-scrollbar-thumb{background-color:var(--md-default-fg-color--lighter)}.md-typeset pre>code::-webkit-scrollbar-thumb:hover{background-color:var(--md-accent-fg-color)}.md-typeset kbd{background-color:var(--md-typeset-kbd-color);border-radius:.1rem;box-shadow:0 .1rem 0 .05rem var(--md-typeset-kbd-border-color),0 .1rem 0 var(--md-typeset-kbd-border-color),0 -.1rem .2rem var(--md-typeset-kbd-accent-color) inset;color:var(--md-default-fg-color);display:inline-block;font-size:.75em;padding:0 .6666666667em;vertical-align:text-top;word-break:break-word}.md-typeset mark{background-color:var(--md-typeset-mark-color);-webkit-box-decoration-break:clone;box-decoration-break:clone;color:inherit;word-break:break-word}.md-typeset abbr{border-bottom:.05rem dotted var(--md-default-fg-color--light);cursor:help;text-decoration:none}@media (hover:none){.md-typeset abbr{position:relative}.md-typeset abbr[title]:-webkit-any(:focus,:hover):after{background-color:var(--md-default-fg-color);border-radius:.1rem;box-shadow:var(--md-shadow-z3);color:var(--md-default-bg-color);content:attr(title);display:inline-block;font-size:.7rem;margin-top:2em;max-width:80%;min-width:-webkit-max-content;min-width:max-content;padding:.2rem .3rem;position:absolute;width:auto}.md-typeset abbr[title]:-moz-any(:focus,:hover):after{background-color:var(--md-default-fg-color);border-radius:.1rem;box-shadow:var(--md-shadow-z3);color:var(--md-default-bg-color);content:attr(title);display:inline-block;font-size:.7rem;margin-top:2em;max-width:80%;min-width:-moz-max-content;min-width:max-content;padding:.2rem .3rem;position:absolute;width:auto}[dir=ltr] .md-typeset abbr[title]:-webkit-any(:focus,:hover):after{left:0}[dir=ltr] .md-typeset abbr[title]:-moz-any(:focus,:hover):after{left:0}[dir=ltr] .md-typeset abbr[title]:is(:focus,:hover):after{left:0}[dir=rtl] .md-typeset abbr[title]:-webkit-any(:focus,:hover):after{right:0}[dir=rtl] .md-typeset abbr[title]:-moz-any(:focus,:hover):after{right:0}[dir=rtl] .md-typeset abbr[title]:is(:focus,:hover):after{right:0}.md-typeset abbr[title]:is(:focus,:hover):after{background-color:var(--md-default-fg-color);border-radius:.1rem;box-shadow:var(--md-shadow-z3);color:var(--md-default-bg-color);content:attr(title);display:inline-block;font-size:.7rem;margin-top:2em;max-width:80%;min-width:-webkit-max-content;min-width:-moz-max-content;min-width:max-content;padding:.2rem .3rem;position:absolute;width:auto}}.md-typeset small{opacity:.75}[dir=ltr] .md-typeset sub,[dir=ltr] .md-typeset sup{margin-left:.078125em}[dir=rtl] .md-typeset sub,[dir=rtl] .md-typeset sup{margin-right:.078125em}[dir=ltr] .md-typeset blockquote{padding-left:.6rem}[dir=rtl] .md-typeset blockquote{padding-right:.6rem}[dir=ltr] .md-typeset blockquote{border-left:.2rem solid var(--md-default-fg-color--lighter)}[dir=rtl] .md-typeset blockquote{border-right:.2rem solid var(--md-default-fg-color--lighter)}.md-typeset blockquote{color:var(--md-default-fg-color--light);margin-left:0;margin-right:0}.md-typeset ul{list-style-type:disc}[dir=ltr] .md-typeset ol,[dir=ltr] .md-typeset ul{margin-left:.625em}[dir=rtl] .md-typeset ol,[dir=rtl] .md-typeset ul{margin-right:.625em}.md-typeset ol,.md-typeset ul{padding:0}.md-typeset ol:not([hidden]),.md-typeset ul:not([hidden]){display:flow-root}.md-typeset ol ol,.md-typeset ul ol{list-style-type:lower-alpha}.md-typeset ol ol ol,.md-typeset ul ol ol{list-style-type:lower-roman}[dir=ltr] .md-typeset ol li,[dir=ltr] .md-typeset ul li{margin-left:1.25em}[dir=rtl] .md-typeset ol li,[dir=rtl] .md-typeset ul li{margin-right:1.25em}.md-typeset ol li,.md-typeset ul li{margin-bottom:.5em}.md-typeset ol li blockquote,.md-typeset ol li p,.md-typeset ul li blockquote,.md-typeset ul li p{margin:.5em 0}.md-typeset ol li:last-child,.md-typeset ul li:last-child{margin-bottom:0}.md-typeset ol li :-webkit-any(ul,ol),.md-typeset ul li :-webkit-any(ul,ol){margin-bottom:.5em;margin-top:.5em}.md-typeset ol li :-moz-any(ul,ol),.md-typeset ul li :-moz-any(ul,ol){margin-bottom:.5em;margin-top:.5em}[dir=ltr] .md-typeset ol li :-webkit-any(ul,ol),[dir=ltr] .md-typeset ul li :-webkit-any(ul,ol){margin-left:.625em}[dir=ltr] .md-typeset ol li :-moz-any(ul,ol),[dir=ltr] .md-typeset ul li :-moz-any(ul,ol){margin-left:.625em}[dir=ltr] .md-typeset ol li :is(ul,ol),[dir=ltr] .md-typeset ul li :is(ul,ol){margin-left:.625em}[dir=rtl] .md-typeset ol li :-webkit-any(ul,ol),[dir=rtl] .md-typeset ul li :-webkit-any(ul,ol){margin-right:.625em}[dir=rtl] .md-typeset ol li :-moz-any(ul,ol),[dir=rtl] .md-typeset ul li :-moz-any(ul,ol){margin-right:.625em}[dir=rtl] .md-typeset ol li :is(ul,ol),[dir=rtl] .md-typeset ul li :is(ul,ol){margin-right:.625em}.md-typeset ol li :is(ul,ol),.md-typeset ul li :is(ul,ol){margin-bottom:.5em;margin-top:.5em}[dir=ltr] .md-typeset dd{margin-left:1.875em}[dir=rtl] .md-typeset dd{margin-right:1.875em}.md-typeset dd{margin-bottom:1.5em;margin-top:1em}.md-typeset img,.md-typeset svg,.md-typeset video{height:auto;max-width:100%}.md-typeset img[align=left]{margin:1em 1em 1em 0}.md-typeset img[align=right]{margin:1em 0 1em 1em}.md-typeset img[align]:only-child{margin-top:0}.md-typeset img[src$="#gh-dark-mode-only"],.md-typeset img[src$="#only-dark"]{display:none}.md-typeset figure{display:flow-root;margin:1em auto;max-width:100%;text-align:center;width:-webkit-fit-content;width:-moz-fit-content;width:fit-content}.md-typeset figure img{display:block}.md-typeset figcaption{font-style:italic;margin:1em auto;max-width:24rem}.md-typeset iframe{max-width:100%}.md-typeset table:not([class]){background-color:var(--md-default-bg-color);border:.05rem solid var(--md-typeset-table-color);border-radius:.1rem;display:inline-block;font-size:.64rem;max-width:100%;overflow:auto;touch-action:auto}@media print{.md-typeset table:not([class]){display:table}}.md-typeset table:not([class])+*{margin-top:1.5em}.md-typeset table:not([class]) :-webkit-any(th,td)>:first-child{margin-top:0}.md-typeset table:not([class]) :-moz-any(th,td)>:first-child{margin-top:0}.md-typeset table:not([class]) :is(th,td)>:first-child{margin-top:0}.md-typeset table:not([class]) :-webkit-any(th,td)>:last-child{margin-bottom:0}.md-typeset table:not([class]) :-moz-any(th,td)>:last-child{margin-bottom:0}.md-typeset table:not([class]) :is(th,td)>:last-child{margin-bottom:0}.md-typeset table:not([class]) :-webkit-any(th,td):not([align]){text-align:left}.md-typeset table:not([class]) :-moz-any(th,td):not([align]){text-align:left}.md-typeset table:not([class]) :is(th,td):not([align]){text-align:left}[dir=rtl] .md-typeset table:not([class]) :-webkit-any(th,td):not([align]){text-align:right}[dir=rtl] .md-typeset table:not([class]) :-moz-any(th,td):not([align]){text-align:right}[dir=rtl] .md-typeset table:not([class]) :is(th,td):not([align]){text-align:right}.md-typeset table:not([class]) th{font-weight:700;min-width:5rem;padding:.9375em 1.25em;vertical-align:top}.md-typeset table:not([class]) td{border-top:.05rem solid var(--md-typeset-table-color);padding:.9375em 1.25em;vertical-align:top}.md-typeset table:not([class]) tbody tr{transition:background-color 125ms}.md-typeset table:not([class]) tbody tr:hover{background-color:rgba(0,0,0,.035);box-shadow:0 .05rem 0 var(--md-default-bg-color) inset}.md-typeset table:not([class]) a{word-break:normal}.md-typeset table th[role=columnheader]{cursor:pointer}[dir=ltr] .md-typeset table th[role=columnheader]:after{margin-left:.5em}[dir=rtl] .md-typeset table th[role=columnheader]:after{margin-right:.5em}.md-typeset table th[role=columnheader]:after{content:"";display:inline-block;height:1.2em;-webkit-mask-image:var(--md-typeset-table-sort-icon);mask-image:var(--md-typeset-table-sort-icon);-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;transition:background-color 125ms;vertical-align:text-bottom;width:1.2em}.md-typeset table th[role=columnheader]:hover:after{background-color:var(--md-default-fg-color--lighter)}.md-typeset table th[role=columnheader][aria-sort=ascending]:after{background-color:var(--md-default-fg-color--light);-webkit-mask-image:var(--md-typeset-table-sort-icon--asc);mask-image:var(--md-typeset-table-sort-icon--asc)}.md-typeset table th[role=columnheader][aria-sort=descending]:after{background-color:var(--md-default-fg-color--light);-webkit-mask-image:var(--md-typeset-table-sort-icon--desc);mask-image:var(--md-typeset-table-sort-icon--desc)}.md-typeset__scrollwrap{margin:1em -.8rem;overflow-x:auto;touch-action:auto}.md-typeset__table{display:inline-block;margin-bottom:.5em;padding:0 .8rem}@media print{.md-typeset__table{display:block}}html .md-typeset__table table{display:table;margin:0;overflow:hidden;width:100%}@media screen and (max-width:44.9375em){.md-content__inner>pre{margin:1em -.8rem}.md-content__inner>pre code{border-radius:0}}.md-banner{background-color:var(--md-footer-bg-color);color:var(--md-footer-fg-color);overflow:auto}@media print{.md-banner{display:none}}.md-banner--warning{background:var(--md-typeset-mark-color);color:var(--md-default-fg-color)}.md-banner__inner{font-size:.7rem;margin:.6rem auto;padding:0 .8rem}[dir=ltr] .md-banner__button{float:right}[dir=rtl] .md-banner__button{float:left}.md-banner__button{color:inherit;cursor:pointer;transition:opacity .25s}.md-banner__button:hover{opacity:.7}html{font-size:125%;height:100%;overflow-x:hidden}@media screen and (min-width:100em){html{font-size:137.5%}}@media screen and (min-width:125em){html{font-size:150%}}body{background-color:var(--md-default-bg-color);display:flex;flex-direction:column;font-size:.5rem;min-height:100%;position:relative;width:100%}@media print{body{display:block}}@media screen and (max-width:59.9375em){body[data-md-scrolllock]{position:fixed}}.md-grid{margin-left:auto;margin-right:auto;max-width:61rem}.md-container{display:flex;flex-direction:column;flex-grow:1}@media print{.md-container{display:block}}.md-main{flex-grow:1}.md-main__inner{display:flex;height:100%;margin-top:1.5rem}.md-ellipsis{overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.md-toggle{display:none}.md-option{height:0;opacity:0;position:absolute;width:0}.md-option:checked+label:not([hidden]){display:block}.md-option.focus-visible+label{outline-color:var(--md-accent-fg-color);outline-style:auto}.md-skip{background-color:var(--md-default-fg-color);border-radius:.1rem;color:var(--md-default-bg-color);font-size:.64rem;margin:.5rem;opacity:0;outline-color:var(--md-accent-fg-color);padding:.3rem .5rem;position:fixed;transform:translateY(.4rem);z-index:-1}.md-skip:focus{opacity:1;transform:translateY(0);transition:transform .25s cubic-bezier(.4,0,.2,1),opacity 175ms 75ms;z-index:10}@page{margin:25mm}:root{--md-clipboard-icon:url('data:image/svg+xml;charset=utf-8,')}.md-clipboard{border-radius:.1rem;color:var(--md-default-fg-color--lightest);cursor:pointer;height:1.5em;outline-color:var(--md-accent-fg-color);outline-offset:.1rem;position:absolute;right:.5em;top:.5em;transition:color .25s;width:1.5em;z-index:1}@media print{.md-clipboard{display:none}}.md-clipboard:not(.focus-visible){-webkit-tap-highlight-color:transparent;outline:none}:hover>.md-clipboard{color:var(--md-default-fg-color--light)}.md-clipboard:-webkit-any(:focus,:hover){color:var(--md-accent-fg-color)}.md-clipboard:-moz-any(:focus,:hover){color:var(--md-accent-fg-color)}.md-clipboard:is(:focus,:hover){color:var(--md-accent-fg-color)}.md-clipboard:after{background-color:currentcolor;content:"";display:block;height:1.125em;margin:0 auto;-webkit-mask-image:var(--md-clipboard-icon);mask-image:var(--md-clipboard-icon);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;width:1.125em}.md-clipboard--inline{cursor:pointer}.md-clipboard--inline code{transition:color .25s,background-color .25s}.md-clipboard--inline:-webkit-any(:focus,:hover) code{background-color:var(--md-accent-fg-color--transparent);color:var(--md-accent-fg-color)}.md-clipboard--inline:-moz-any(:focus,:hover) code{background-color:var(--md-accent-fg-color--transparent);color:var(--md-accent-fg-color)}.md-clipboard--inline:is(:focus,:hover) code{background-color:var(--md-accent-fg-color--transparent);color:var(--md-accent-fg-color)}@keyframes consent{0%{opacity:0;transform:translateY(100%)}to{opacity:1;transform:translateY(0)}}@keyframes overlay{0%{opacity:0}to{opacity:1}}.md-consent__overlay{animation:overlay .25s both;-webkit-backdrop-filter:blur(.1rem);backdrop-filter:blur(.1rem);background-color:#0000008a;height:100%;opacity:1;position:fixed;top:0;width:100%;z-index:5}.md-consent__inner{animation:consent .5s cubic-bezier(.1,.7,.1,1) both;background-color:var(--md-default-bg-color);border:0;border-radius:.1rem;bottom:0;box-shadow:0 0 .2rem #0000001a,0 .2rem .4rem #0003;max-height:100%;overflow:auto;padding:0;position:fixed;width:100%;z-index:5}.md-consent__form{padding:.8rem}.md-consent__settings{display:none;margin:1em 0}input:checked+.md-consent__settings{display:block}.md-consent__controls{margin-bottom:.8rem}.md-typeset .md-consent__controls .md-button{display:inline}@media screen and (max-width:44.9375em){.md-typeset .md-consent__controls .md-button{display:block;margin-top:.4rem;text-align:center;width:100%}}.md-consent label{cursor:pointer}.md-content{flex-grow:1;min-width:0}.md-content__inner{margin:0 .8rem 1.2rem;padding-top:.6rem}@media screen and (min-width:76.25em){[dir=ltr] .md-sidebar--primary:not([hidden])~.md-content>.md-content__inner{margin-left:1.2rem}[dir=ltr] .md-sidebar--secondary:not([hidden])~.md-content>.md-content__inner,[dir=rtl] .md-sidebar--primary:not([hidden])~.md-content>.md-content__inner{margin-right:1.2rem}[dir=rtl] .md-sidebar--secondary:not([hidden])~.md-content>.md-content__inner{margin-left:1.2rem}}.md-content__inner:before{content:"";display:block;height:.4rem}.md-content__inner>:last-child{margin-bottom:0}[dir=ltr] .md-content__button{float:right}[dir=rtl] .md-content__button{float:left}[dir=ltr] .md-content__button{margin-left:.4rem}[dir=rtl] .md-content__button{margin-right:.4rem}.md-content__button{margin:.4rem 0;padding:0}@media print{.md-content__button{display:none}}.md-typeset .md-content__button{color:var(--md-default-fg-color--lighter)}.md-content__button svg{display:inline;vertical-align:top}[dir=rtl] .md-content__button svg{transform:scaleX(-1)}[dir=ltr] .md-dialog{right:.8rem}[dir=rtl] .md-dialog{left:.8rem}.md-dialog{background-color:var(--md-default-fg-color);border-radius:.1rem;bottom:.8rem;box-shadow:var(--md-shadow-z3);min-width:11.1rem;opacity:0;padding:.4rem .6rem;pointer-events:none;position:fixed;transform:translateY(100%);transition:transform 0ms .4s,opacity .4s;z-index:4}@media print{.md-dialog{display:none}}.md-dialog--active{opacity:1;pointer-events:auto;transform:translateY(0);transition:transform .4s cubic-bezier(.075,.85,.175,1),opacity .4s}.md-dialog__inner{color:var(--md-default-bg-color);font-size:.7rem}.md-feedback{margin:2em 0 1em;text-align:center}.md-feedback fieldset{border:none;margin:0;padding:0}.md-feedback__title{font-weight:700;margin:1em auto}.md-feedback__inner{position:relative}.md-feedback__list{align-content:baseline;display:flex;flex-wrap:wrap;justify-content:center;position:relative}.md-feedback__list:hover .md-icon:not(:disabled){color:var(--md-default-fg-color--lighter)}:disabled .md-feedback__list{min-height:1.8rem}.md-feedback__icon{color:var(--md-default-fg-color--light);cursor:pointer;flex-shrink:0;margin:0 .1rem;transition:color 125ms}.md-feedback__icon:not(:disabled).md-icon:hover{color:var(--md-accent-fg-color)}.md-feedback__icon:disabled{color:var(--md-default-fg-color--lightest);pointer-events:none}.md-feedback__note{opacity:0;position:relative;transform:translateY(.4rem);transition:transform .4s cubic-bezier(.1,.7,.1,1),opacity .15s}.md-feedback__note>*{margin:0 auto;max-width:16rem}:disabled .md-feedback__note{opacity:1;transform:translateY(0)}.md-footer{background-color:var(--md-footer-bg-color);color:var(--md-footer-fg-color)}@media print{.md-footer{display:none}}.md-footer__inner{justify-content:space-between;overflow:auto;padding:.2rem}.md-footer__inner:not([hidden]){display:flex}.md-footer__link{display:flex;flex-grow:0.01;outline-color:var(--md-accent-fg-color);overflow:hidden;padding-bottom:.4rem;padding-top:1.4rem;transition:opacity .25s}.md-footer__link:-webkit-any(:focus,:hover){opacity:.7}.md-footer__link:-moz-any(:focus,:hover){opacity:.7}.md-footer__link:is(:focus,:hover){opacity:.7}[dir=rtl] .md-footer__link svg{transform:scaleX(-1)}@media screen and (max-width:44.9375em){.md-footer__link--prev .md-footer__title{display:none}}[dir=ltr] .md-footer__link--next{margin-left:auto}[dir=rtl] .md-footer__link--next{margin-right:auto}.md-footer__link--next{text-align:right}[dir=rtl] .md-footer__link--next{text-align:left}.md-footer__title{flex-grow:1;font-size:.9rem;line-height:2.4rem;max-width:calc(100% - 2.4rem);padding:0 1rem;position:relative;white-space:nowrap}.md-footer__button{margin:.2rem;padding:.4rem}.md-footer__direction{font-size:.64rem;left:0;margin-top:-1rem;opacity:.7;padding:0 1rem;position:absolute;right:0}.md-footer-meta{background-color:var(--md-footer-bg-color--dark)}.md-footer-meta__inner{display:flex;flex-wrap:wrap;justify-content:space-between;padding:.2rem}html .md-footer-meta.md-typeset a{color:var(--md-footer-fg-color--light)}html .md-footer-meta.md-typeset a:-webkit-any(:focus,:hover){color:var(--md-footer-fg-color)}html .md-footer-meta.md-typeset a:-moz-any(:focus,:hover){color:var(--md-footer-fg-color)}html .md-footer-meta.md-typeset a:is(:focus,:hover){color:var(--md-footer-fg-color)}.md-copyright{color:var(--md-footer-fg-color--lighter);font-size:.64rem;margin:auto .6rem;padding:.4rem 0;width:100%}@media screen and (min-width:45em){.md-copyright{width:auto}}.md-copyright__highlight{color:var(--md-footer-fg-color--light)}.md-social{margin:0 .4rem;padding:.2rem 0 .6rem}@media screen and (min-width:45em){.md-social{padding:.6rem 0}}.md-social__link{display:inline-block;height:1.6rem;text-align:center;width:1.6rem}.md-social__link:before{line-height:1.9}.md-social__link svg{fill:currentcolor;max-height:.8rem;vertical-align:-25%}.md-typeset .md-button{border:.1rem solid;border-radius:.1rem;color:var(--md-primary-fg-color);cursor:pointer;display:inline-block;font-weight:700;padding:.625em 2em;transition:color 125ms,background-color 125ms,border-color 125ms}.md-typeset .md-button--primary{background-color:var(--md-primary-fg-color);border-color:var(--md-primary-fg-color);color:var(--md-primary-bg-color)}.md-typeset .md-button:-webkit-any(:focus,:hover){background-color:var(--md-accent-fg-color);border-color:var(--md-accent-fg-color);color:var(--md-accent-bg-color)}.md-typeset .md-button:-moz-any(:focus,:hover){background-color:var(--md-accent-fg-color);border-color:var(--md-accent-fg-color);color:var(--md-accent-bg-color)}.md-typeset .md-button:is(:focus,:hover){background-color:var(--md-accent-fg-color);border-color:var(--md-accent-fg-color);color:var(--md-accent-bg-color)}[dir=ltr] .md-typeset .md-input{border-top-left-radius:.1rem}[dir=ltr] .md-typeset .md-input,[dir=rtl] .md-typeset .md-input{border-top-right-radius:.1rem}[dir=rtl] .md-typeset .md-input{border-top-left-radius:.1rem}.md-typeset .md-input{border-bottom:.1rem solid var(--md-default-fg-color--lighter);box-shadow:var(--md-shadow-z1);font-size:.8rem;height:1.8rem;padding:0 .6rem;transition:border .25s,box-shadow .25s}.md-typeset .md-input:-webkit-any(:focus,:hover){border-bottom-color:var(--md-accent-fg-color);box-shadow:var(--md-shadow-z2)}.md-typeset .md-input:-moz-any(:focus,:hover){border-bottom-color:var(--md-accent-fg-color);box-shadow:var(--md-shadow-z2)}.md-typeset .md-input:is(:focus,:hover){border-bottom-color:var(--md-accent-fg-color);box-shadow:var(--md-shadow-z2)}.md-typeset .md-input--stretch{width:100%}.md-header{background-color:var(--md-primary-fg-color);box-shadow:0 0 .2rem #0000,0 .2rem .4rem #0000;color:var(--md-primary-bg-color);display:block;left:0;position:-webkit-sticky;position:sticky;right:0;top:0;z-index:4}@media print{.md-header{display:none}}.md-header[hidden]{transform:translateY(-100%);transition:transform .25s cubic-bezier(.8,0,.6,1),box-shadow .25s}.md-header--shadow{box-shadow:0 0 .2rem #0000001a,0 .2rem .4rem #0003;transition:transform .25s cubic-bezier(.1,.7,.1,1),box-shadow .25s}.md-header__inner{align-items:center;display:flex;padding:0 .2rem}.md-header__button{color:currentcolor;cursor:pointer;margin:.2rem;outline-color:var(--md-accent-fg-color);padding:.4rem;position:relative;transition:opacity .25s;vertical-align:middle;z-index:1}.md-header__button:hover{opacity:.7}.md-header__button:not([hidden]){display:inline-block}.md-header__button:not(.focus-visible){-webkit-tap-highlight-color:transparent;outline:none}.md-header__button.md-logo{margin:.2rem;padding:.4rem}@media screen and (max-width:76.1875em){.md-header__button.md-logo{display:none}}.md-header__button.md-logo :-webkit-any(img,svg){fill:currentcolor;display:block;height:1.2rem;width:auto}.md-header__button.md-logo :-moz-any(img,svg){fill:currentcolor;display:block;height:1.2rem;width:auto}.md-header__button.md-logo :is(img,svg){fill:currentcolor;display:block;height:1.2rem;width:auto}@media screen and (min-width:60em){.md-header__button[for=__search]{display:none}}.no-js .md-header__button[for=__search]{display:none}[dir=rtl] .md-header__button[for=__search] svg{transform:scaleX(-1)}@media screen and (min-width:76.25em){.md-header__button[for=__drawer]{display:none}}.md-header__topic{display:flex;max-width:100%;position:absolute;transition:transform .4s cubic-bezier(.1,.7,.1,1),opacity .15s;white-space:nowrap}.md-header__topic+.md-header__topic{opacity:0;pointer-events:none;transform:translateX(1.25rem);transition:transform .4s cubic-bezier(1,.7,.1,.1),opacity .15s;z-index:-1}[dir=rtl] .md-header__topic+.md-header__topic{transform:translateX(-1.25rem)}.md-header__topic:first-child{font-weight:700}[dir=ltr] .md-header__title{margin-right:.4rem}[dir=rtl] .md-header__title{margin-left:.4rem}[dir=ltr] .md-header__title{margin-left:1rem}[dir=rtl] .md-header__title{margin-right:1rem}.md-header__title{flex-grow:1;font-size:.9rem;height:2.4rem;line-height:2.4rem}.md-header__title--active .md-header__topic{opacity:0;pointer-events:none;transform:translateX(-1.25rem);transition:transform .4s cubic-bezier(1,.7,.1,.1),opacity .15s;z-index:-1}[dir=rtl] .md-header__title--active .md-header__topic{transform:translateX(1.25rem)}.md-header__title--active .md-header__topic+.md-header__topic{opacity:1;pointer-events:auto;transform:translateX(0);transition:transform .4s cubic-bezier(.1,.7,.1,1),opacity .15s;z-index:0}.md-header__title>.md-header__ellipsis{height:100%;position:relative;width:100%}.md-header__option{display:flex;flex-shrink:0;max-width:100%;transition:max-width 0ms .25s,opacity .25s .25s;white-space:nowrap}[data-md-toggle=search]:checked~.md-header .md-header__option{max-width:0;opacity:0;transition:max-width 0ms,opacity 0ms}.md-header__source{display:none}@media screen and (min-width:60em){[dir=ltr] .md-header__source{margin-left:1rem}[dir=rtl] .md-header__source{margin-right:1rem}.md-header__source{display:block;max-width:11.7rem;width:11.7rem}}@media screen and (min-width:76.25em){[dir=ltr] .md-header__source{margin-left:1.4rem}[dir=rtl] .md-header__source{margin-right:1.4rem}}:root{--md-nav-icon--prev:url('data:image/svg+xml;charset=utf-8,');--md-nav-icon--next:url('data:image/svg+xml;charset=utf-8,');--md-toc-icon:url('data:image/svg+xml;charset=utf-8,')}.md-nav{font-size:.7rem;line-height:1.3}.md-nav__title{display:block;font-weight:700;overflow:hidden;padding:0 .6rem;text-overflow:ellipsis}.md-nav__title .md-nav__button{display:none}.md-nav__title .md-nav__button img{height:100%;width:auto}.md-nav__title .md-nav__button.md-logo :-webkit-any(img,svg){fill:currentcolor;display:block;height:2.4rem;max-width:100%;object-fit:contain;width:auto}.md-nav__title .md-nav__button.md-logo :-moz-any(img,svg){fill:currentcolor;display:block;height:2.4rem;max-width:100%;object-fit:contain;width:auto}.md-nav__title .md-nav__button.md-logo :is(img,svg){fill:currentcolor;display:block;height:2.4rem;max-width:100%;object-fit:contain;width:auto}.md-nav__list{list-style:none;margin:0;padding:0}.md-nav__item{padding:0 .6rem}[dir=ltr] .md-nav__item .md-nav__item{padding-right:0}[dir=rtl] .md-nav__item .md-nav__item{padding-left:0}.md-nav__link{align-items:center;cursor:pointer;display:flex;justify-content:space-between;margin-top:.625em;overflow:hidden;scroll-snap-align:start;text-overflow:ellipsis;transition:color 125ms}.md-nav__link--passed{color:var(--md-default-fg-color--light)}.md-nav__item .md-nav__link--active{color:var(--md-typeset-a-color)}.md-nav__item .md-nav__link--index [href]{width:100%}.md-nav__link:-webkit-any(:focus,:hover){color:var(--md-accent-fg-color)}.md-nav__link:-moz-any(:focus,:hover){color:var(--md-accent-fg-color)}.md-nav__link:is(:focus,:hover){color:var(--md-accent-fg-color)}.md-nav__link.focus-visible{outline-color:var(--md-accent-fg-color);outline-offset:.2rem}.md-nav--primary .md-nav__link[for=__toc]{display:none}.md-nav--primary .md-nav__link[for=__toc] .md-icon:after{background-color:currentcolor;display:block;height:100%;-webkit-mask-image:var(--md-toc-icon);mask-image:var(--md-toc-icon);width:100%}.md-nav--primary .md-nav__link[for=__toc]~.md-nav{display:none}.md-nav__link>*{cursor:pointer;display:flex}.md-nav__icon{flex-shrink:0}.md-nav__source{display:none}@media screen and (max-width:76.1875em){.md-nav--primary,.md-nav--primary .md-nav{background-color:var(--md-default-bg-color);display:flex;flex-direction:column;height:100%;left:0;position:absolute;right:0;top:0;z-index:1}.md-nav--primary :-webkit-any(.md-nav__title,.md-nav__item){font-size:.8rem;line-height:1.5}.md-nav--primary :-moz-any(.md-nav__title,.md-nav__item){font-size:.8rem;line-height:1.5}.md-nav--primary :is(.md-nav__title,.md-nav__item){font-size:.8rem;line-height:1.5}.md-nav--primary .md-nav__title{background-color:var(--md-default-fg-color--lightest);color:var(--md-default-fg-color--light);cursor:pointer;height:5.6rem;line-height:2.4rem;padding:3rem .8rem .2rem;position:relative;white-space:nowrap}[dir=ltr] .md-nav--primary .md-nav__title .md-nav__icon{left:.4rem}[dir=rtl] .md-nav--primary .md-nav__title .md-nav__icon{right:.4rem}.md-nav--primary .md-nav__title .md-nav__icon{display:block;height:1.2rem;margin:.2rem;position:absolute;top:.4rem;width:1.2rem}.md-nav--primary .md-nav__title .md-nav__icon:after{background-color:currentcolor;content:"";display:block;height:100%;-webkit-mask-image:var(--md-nav-icon--prev);mask-image:var(--md-nav-icon--prev);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;width:100%}.md-nav--primary .md-nav__title~.md-nav__list{background-color:var(--md-default-bg-color);box-shadow:0 .05rem 0 var(--md-default-fg-color--lightest) inset;overflow-y:auto;scroll-snap-type:y mandatory;touch-action:pan-y}.md-nav--primary .md-nav__title~.md-nav__list>:first-child{border-top:0}.md-nav--primary .md-nav__title[for=__drawer]{background-color:var(--md-primary-fg-color);color:var(--md-primary-bg-color);font-weight:700}.md-nav--primary .md-nav__title .md-logo{display:block;left:.2rem;margin:.2rem;padding:.4rem;position:absolute;right:.2rem;top:.2rem}.md-nav--primary .md-nav__list{flex:1}.md-nav--primary .md-nav__item{border-top:.05rem solid var(--md-default-fg-color--lightest);padding:0}.md-nav--primary .md-nav__item--active>.md-nav__link{color:var(--md-typeset-a-color)}.md-nav--primary .md-nav__item--active>.md-nav__link:-webkit-any(:focus,:hover){color:var(--md-accent-fg-color)}.md-nav--primary .md-nav__item--active>.md-nav__link:-moz-any(:focus,:hover){color:var(--md-accent-fg-color)}.md-nav--primary .md-nav__item--active>.md-nav__link:is(:focus,:hover){color:var(--md-accent-fg-color)}.md-nav--primary .md-nav__link{margin-top:0;padding:.6rem .8rem}[dir=ltr] .md-nav--primary .md-nav__link .md-nav__icon{margin-right:-.2rem}[dir=rtl] .md-nav--primary .md-nav__link .md-nav__icon{margin-left:-.2rem}.md-nav--primary .md-nav__link .md-nav__icon{font-size:1.2rem;height:1.2rem;width:1.2rem}.md-nav--primary .md-nav__link .md-nav__icon:after{background-color:currentcolor;content:"";display:block;height:100%;-webkit-mask-image:var(--md-nav-icon--next);mask-image:var(--md-nav-icon--next);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;width:100%}[dir=rtl] .md-nav--primary .md-nav__icon:after{transform:scale(-1)}.md-nav--primary .md-nav--secondary .md-nav{background-color:initial;position:static}[dir=ltr] .md-nav--primary .md-nav--secondary .md-nav .md-nav__link{padding-left:1.4rem}[dir=rtl] .md-nav--primary .md-nav--secondary .md-nav .md-nav__link{padding-right:1.4rem}[dir=ltr] .md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav__link{padding-left:2rem}[dir=rtl] .md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav__link{padding-right:2rem}[dir=ltr] .md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav .md-nav__link{padding-left:2.6rem}[dir=rtl] .md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav .md-nav__link{padding-right:2.6rem}[dir=ltr] .md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav .md-nav .md-nav__link{padding-left:3.2rem}[dir=rtl] .md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav .md-nav .md-nav__link{padding-right:3.2rem}.md-nav--secondary{background-color:initial}.md-nav__toggle~.md-nav{display:flex;opacity:0;transform:translateX(100%);transition:transform .25s cubic-bezier(.8,0,.6,1),opacity 125ms 50ms}[dir=rtl] .md-nav__toggle~.md-nav{transform:translateX(-100%)}.md-nav__toggle:checked~.md-nav{opacity:1;transform:translateX(0);transition:transform .25s cubic-bezier(.4,0,.2,1),opacity 125ms 125ms}.md-nav__toggle:checked~.md-nav>.md-nav__list{-webkit-backface-visibility:hidden;backface-visibility:hidden}}@media screen and (max-width:59.9375em){.md-nav--primary .md-nav__link[for=__toc]{display:flex}.md-nav--primary .md-nav__link[for=__toc] .md-icon:after{content:""}.md-nav--primary .md-nav__link[for=__toc]+.md-nav__link{display:none}.md-nav--primary .md-nav__link[for=__toc]~.md-nav{display:flex}.md-nav__source{background-color:var(--md-primary-fg-color--dark);color:var(--md-primary-bg-color);display:block;padding:0 .2rem}}@media screen and (min-width:60em) and (max-width:76.1875em){.md-nav--integrated .md-nav__link[for=__toc]{display:flex}.md-nav--integrated .md-nav__link[for=__toc] .md-icon:after{content:""}.md-nav--integrated .md-nav__link[for=__toc]+.md-nav__link{display:none}.md-nav--integrated .md-nav__link[for=__toc]~.md-nav{display:flex}}@media screen and (min-width:60em){.md-nav--secondary .md-nav__title{background:var(--md-default-bg-color);box-shadow:0 0 .4rem .4rem var(--md-default-bg-color);position:-webkit-sticky;position:sticky;top:0;z-index:1}.md-nav--secondary .md-nav__title[for=__toc]{scroll-snap-align:start}.md-nav--secondary .md-nav__title .md-nav__icon{display:none}}@media screen and (min-width:76.25em){.md-nav{transition:max-height .25s cubic-bezier(.86,0,.07,1)}.md-nav--primary .md-nav__title{background:var(--md-default-bg-color);box-shadow:0 0 .4rem .4rem var(--md-default-bg-color);position:-webkit-sticky;position:sticky;top:0;z-index:1}.md-nav--primary .md-nav__title[for=__drawer]{scroll-snap-align:start}.md-nav--primary .md-nav__title .md-nav__icon,.md-nav__toggle~.md-nav{display:none}.md-nav__toggle:-webkit-any(:checked,:indeterminate)~.md-nav{display:block}.md-nav__toggle:-moz-any(:checked,:indeterminate)~.md-nav{display:block}.md-nav__toggle:is(:checked,:indeterminate)~.md-nav{display:block}.md-nav__item--nested>.md-nav>.md-nav__title{display:none}.md-nav__item--section{display:block;margin:1.25em 0}.md-nav__item--section:last-child{margin-bottom:0}.md-nav__item--section>.md-nav__link{font-weight:700;pointer-events:none}.md-nav__item--section>.md-nav__link--index [href]{pointer-events:auto}.md-nav__item--section>.md-nav__link .md-nav__icon{display:none}.md-nav__item--section>.md-nav{display:block}.md-nav__item--section>.md-nav>.md-nav__list>.md-nav__item{padding:0}.md-nav__icon{border-radius:100%;height:.9rem;transition:background-color .25s,transform .25s;width:.9rem}[dir=rtl] .md-nav__icon{transform:rotate(180deg)}.md-nav__icon:hover{background-color:var(--md-accent-fg-color--transparent)}.md-nav__icon:after{background-color:currentcolor;content:"";display:inline-block;height:100%;-webkit-mask-image:var(--md-nav-icon--next);mask-image:var(--md-nav-icon--next);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;vertical-align:-.1rem;width:100%}.md-nav__item--nested .md-nav__toggle:checked~.md-nav__link .md-nav__icon,.md-nav__item--nested .md-nav__toggle:indeterminate~.md-nav__link .md-nav__icon{transform:rotate(90deg)}.md-nav--lifted>.md-nav__list>.md-nav__item,.md-nav--lifted>.md-nav__list>.md-nav__item--nested,.md-nav--lifted>.md-nav__title{display:none}.md-nav--lifted>.md-nav__list>.md-nav__item--active{display:block;padding:0}.md-nav--lifted>.md-nav__list>.md-nav__item--active>.md-nav__link{background:var(--md-default-bg-color);box-shadow:0 0 .4rem .4rem var(--md-default-bg-color);font-weight:700;margin-top:0;padding:0 .6rem;position:-webkit-sticky;position:sticky;top:0;z-index:1}.md-nav--lifted>.md-nav__list>.md-nav__item--active>.md-nav__link:not(.md-nav__link--index){pointer-events:none}.md-nav--lifted>.md-nav__list>.md-nav__item--active>.md-nav__link .md-nav__icon{display:none}.md-nav--lifted .md-nav[data-md-level="1"]{display:block}[dir=ltr] .md-nav--lifted .md-nav[data-md-level="1"]>.md-nav__list>.md-nav__item{padding-right:.6rem}[dir=rtl] .md-nav--lifted .md-nav[data-md-level="1"]>.md-nav__list>.md-nav__item{padding-left:.6rem}.md-nav--integrated>.md-nav__list>.md-nav__item--active:not(.md-nav__item--nested){padding:0 .6rem}.md-nav--integrated>.md-nav__list>.md-nav__item--active:not(.md-nav__item--nested)>.md-nav__link{padding:0}[dir=ltr] .md-nav--integrated>.md-nav__list>.md-nav__item--active .md-nav--secondary{border-left:.05rem solid var(--md-primary-fg-color)}[dir=rtl] .md-nav--integrated>.md-nav__list>.md-nav__item--active .md-nav--secondary{border-right:.05rem solid var(--md-primary-fg-color)}.md-nav--integrated>.md-nav__list>.md-nav__item--active .md-nav--secondary{display:block;margin-bottom:1.25em}.md-nav--integrated>.md-nav__list>.md-nav__item--active .md-nav--secondary>.md-nav__title{display:none}}:root{--md-search-result-icon:url('data:image/svg+xml;charset=utf-8,')}.md-search{position:relative}@media screen and (min-width:60em){.md-search{padding:.2rem 0}}.no-js .md-search{display:none}.md-search__overlay{opacity:0;z-index:1}@media screen and (max-width:59.9375em){[dir=ltr] .md-search__overlay{left:-2.2rem}[dir=rtl] .md-search__overlay{right:-2.2rem}.md-search__overlay{background-color:var(--md-default-bg-color);border-radius:1rem;height:2rem;overflow:hidden;pointer-events:none;position:absolute;top:-1rem;transform-origin:center;transition:transform .3s .1s,opacity .2s .2s;width:2rem}[data-md-toggle=search]:checked~.md-header .md-search__overlay{opacity:1;transition:transform .4s,opacity .1s}}@media screen and (min-width:60em){[dir=ltr] .md-search__overlay{left:0}[dir=rtl] .md-search__overlay{right:0}.md-search__overlay{background-color:#0000008a;cursor:pointer;height:0;position:fixed;top:0;transition:width 0ms .25s,height 0ms .25s,opacity .25s;width:0}[data-md-toggle=search]:checked~.md-header .md-search__overlay{height:200vh;opacity:1;transition:width 0ms,height 0ms,opacity .25s;width:100%}}@media screen and (max-width:29.9375em){[data-md-toggle=search]:checked~.md-header .md-search__overlay{transform:scale(45)}}@media screen and (min-width:30em) and (max-width:44.9375em){[data-md-toggle=search]:checked~.md-header .md-search__overlay{transform:scale(60)}}@media screen and (min-width:45em) and (max-width:59.9375em){[data-md-toggle=search]:checked~.md-header .md-search__overlay{transform:scale(75)}}.md-search__inner{-webkit-backface-visibility:hidden;backface-visibility:hidden}@media screen and (max-width:59.9375em){[dir=ltr] .md-search__inner{left:0}[dir=rtl] .md-search__inner{right:0}.md-search__inner{height:0;opacity:0;overflow:hidden;position:fixed;top:0;transform:translateX(5%);transition:width 0ms .3s,height 0ms .3s,transform .15s cubic-bezier(.4,0,.2,1) .15s,opacity .15s .15s;width:0;z-index:2}[dir=rtl] .md-search__inner{transform:translateX(-5%)}[data-md-toggle=search]:checked~.md-header .md-search__inner{height:100%;opacity:1;transform:translateX(0);transition:width 0ms 0ms,height 0ms 0ms,transform .15s cubic-bezier(.1,.7,.1,1) .15s,opacity .15s .15s;width:100%}}@media screen and (min-width:60em){[dir=ltr] .md-search__inner{float:right}[dir=rtl] .md-search__inner{float:left}.md-search__inner{padding:.1rem 0;position:relative;transition:width .25s cubic-bezier(.1,.7,.1,1);width:11.7rem}}@media screen and (min-width:60em) and (max-width:76.1875em){[data-md-toggle=search]:checked~.md-header .md-search__inner{width:23.4rem}}@media screen and (min-width:76.25em){[data-md-toggle=search]:checked~.md-header .md-search__inner{width:34.4rem}}.md-search__form{background-color:var(--md-default-bg-color);box-shadow:0 0 .6rem #0000;height:2.4rem;position:relative;transition:color .25s,background-color .25s;z-index:2}@media screen and (min-width:60em){.md-search__form{background-color:#00000042;border-radius:.1rem;height:1.8rem}.md-search__form:hover{background-color:#ffffff1f}}[data-md-toggle=search]:checked~.md-header .md-search__form{background-color:var(--md-default-bg-color);border-radius:.1rem .1rem 0 0;box-shadow:0 0 .6rem #00000012;color:var(--md-default-fg-color)}[dir=ltr] .md-search__input{padding-left:3.6rem;padding-right:2.2rem}[dir=rtl] .md-search__input{padding-left:2.2rem;padding-right:3.6rem}.md-search__input{background:#0000;font-size:.9rem;height:100%;position:relative;text-overflow:ellipsis;width:100%;z-index:2}.md-search__input::placeholder{transition:color .25s}.md-search__input::placeholder,.md-search__input~.md-search__icon{color:var(--md-default-fg-color--light)}.md-search__input::-ms-clear{display:none}@media screen and (max-width:59.9375em){.md-search__input{font-size:.9rem;height:2.4rem;width:100%}}@media screen and (min-width:60em){[dir=ltr] .md-search__input{padding-left:2.2rem}[dir=rtl] .md-search__input{padding-right:2.2rem}.md-search__input{color:inherit;font-size:.8rem}.md-search__input::placeholder{color:var(--md-primary-bg-color--light)}.md-search__input+.md-search__icon{color:var(--md-primary-bg-color)}[data-md-toggle=search]:checked~.md-header .md-search__input{text-overflow:clip}[data-md-toggle=search]:checked~.md-header .md-search__input+.md-search__icon,[data-md-toggle=search]:checked~.md-header .md-search__input::placeholder{color:var(--md-default-fg-color--light)}}.md-search__icon{cursor:pointer;display:inline-block;height:1.2rem;transition:color .25s,opacity .25s;width:1.2rem}.md-search__icon:hover{opacity:.7}[dir=ltr] .md-search__icon[for=__search]{left:.5rem}[dir=rtl] .md-search__icon[for=__search]{right:.5rem}.md-search__icon[for=__search]{position:absolute;top:.3rem;z-index:2}[dir=rtl] .md-search__icon[for=__search] svg{transform:scaleX(-1)}@media screen and (max-width:59.9375em){[dir=ltr] .md-search__icon[for=__search]{left:.8rem}[dir=rtl] .md-search__icon[for=__search]{right:.8rem}.md-search__icon[for=__search]{top:.6rem}.md-search__icon[for=__search] svg:first-child{display:none}}@media screen and (min-width:60em){.md-search__icon[for=__search]{pointer-events:none}.md-search__icon[for=__search] svg:last-child{display:none}}[dir=ltr] .md-search__options{right:.5rem}[dir=rtl] .md-search__options{left:.5rem}.md-search__options{pointer-events:none;position:absolute;top:.3rem;z-index:2}@media screen and (max-width:59.9375em){[dir=ltr] .md-search__options{right:.8rem}[dir=rtl] .md-search__options{left:.8rem}.md-search__options{top:.6rem}}[dir=ltr] .md-search__options>*{margin-left:.2rem}[dir=rtl] .md-search__options>*{margin-right:.2rem}.md-search__options>*{color:var(--md-default-fg-color--light);opacity:0;transform:scale(.75);transition:transform .15s cubic-bezier(.1,.7,.1,1),opacity .15s}.md-search__options>:not(.focus-visible){-webkit-tap-highlight-color:transparent;outline:none}[data-md-toggle=search]:checked~.md-header .md-search__input:valid~.md-search__options>*{opacity:1;pointer-events:auto;transform:scale(1)}[data-md-toggle=search]:checked~.md-header .md-search__input:valid~.md-search__options>:hover{opacity:.7}[dir=ltr] .md-search__suggest{padding-left:3.6rem;padding-right:2.2rem}[dir=rtl] .md-search__suggest{padding-left:2.2rem;padding-right:3.6rem}.md-search__suggest{align-items:center;color:var(--md-default-fg-color--lighter);display:flex;font-size:.9rem;height:100%;opacity:0;position:absolute;top:0;transition:opacity 50ms;white-space:nowrap;width:100%}@media screen and (min-width:60em){[dir=ltr] .md-search__suggest{padding-left:2.2rem}[dir=rtl] .md-search__suggest{padding-right:2.2rem}.md-search__suggest{font-size:.8rem}}[data-md-toggle=search]:checked~.md-header .md-search__suggest{opacity:1;transition:opacity .3s .1s}[dir=ltr] .md-search__output{border-bottom-left-radius:.1rem}[dir=ltr] .md-search__output,[dir=rtl] .md-search__output{border-bottom-right-radius:.1rem}[dir=rtl] .md-search__output{border-bottom-left-radius:.1rem}.md-search__output{overflow:hidden;position:absolute;width:100%;z-index:1}@media screen and (max-width:59.9375em){.md-search__output{bottom:0;top:2.4rem}}@media screen and (min-width:60em){.md-search__output{opacity:0;top:1.9rem;transition:opacity .4s}[data-md-toggle=search]:checked~.md-header .md-search__output{box-shadow:var(--md-shadow-z3);opacity:1}}.md-search__scrollwrap{-webkit-backface-visibility:hidden;backface-visibility:hidden;background-color:var(--md-default-bg-color);height:100%;overflow-y:auto;touch-action:pan-y}@media (-webkit-max-device-pixel-ratio:1),(max-resolution:1dppx){.md-search__scrollwrap{transform:translateZ(0)}}@media screen and (min-width:60em) and (max-width:76.1875em){.md-search__scrollwrap{width:23.4rem}}@media screen and (min-width:76.25em){.md-search__scrollwrap{width:34.4rem}}@media screen and (min-width:60em){.md-search__scrollwrap{max-height:0;scrollbar-color:var(--md-default-fg-color--lighter) #0000;scrollbar-width:thin}[data-md-toggle=search]:checked~.md-header .md-search__scrollwrap{max-height:75vh}.md-search__scrollwrap:hover{scrollbar-color:var(--md-accent-fg-color) #0000}.md-search__scrollwrap::-webkit-scrollbar{height:.2rem;width:.2rem}.md-search__scrollwrap::-webkit-scrollbar-thumb{background-color:var(--md-default-fg-color--lighter)}.md-search__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:var(--md-accent-fg-color)}}.md-search-result{color:var(--md-default-fg-color);word-break:break-word}.md-search-result__meta{background-color:var(--md-default-fg-color--lightest);color:var(--md-default-fg-color--light);font-size:.64rem;line-height:1.8rem;padding:0 .8rem;scroll-snap-align:start}@media screen and (min-width:60em){[dir=ltr] .md-search-result__meta{padding-left:2.2rem}[dir=rtl] .md-search-result__meta{padding-right:2.2rem}}.md-search-result__list{list-style:none;margin:0;padding:0;-webkit-user-select:none;-moz-user-select:none;user-select:none}.md-search-result__item{box-shadow:0 -.05rem var(--md-default-fg-color--lightest)}.md-search-result__item:first-child{box-shadow:none}.md-search-result__link{display:block;outline:none;scroll-snap-align:start;transition:background-color .25s}.md-search-result__link:-webkit-any(:focus,:hover){background-color:var(--md-accent-fg-color--transparent)}.md-search-result__link:-moz-any(:focus,:hover){background-color:var(--md-accent-fg-color--transparent)}.md-search-result__link:is(:focus,:hover){background-color:var(--md-accent-fg-color--transparent)}.md-search-result__link:last-child p:last-child{margin-bottom:.6rem}.md-search-result__more summary{color:var(--md-typeset-a-color);cursor:pointer;display:block;font-size:.64rem;outline:none;padding:.75em .8rem;scroll-snap-align:start;transition:color .25s,background-color .25s}@media screen and (min-width:60em){[dir=ltr] .md-search-result__more summary{padding-left:2.2rem}[dir=rtl] .md-search-result__more summary{padding-right:2.2rem}}.md-search-result__more summary:-webkit-any(:focus,:hover){background-color:var(--md-accent-fg-color--transparent);color:var(--md-accent-fg-color)}.md-search-result__more summary:-moz-any(:focus,:hover){background-color:var(--md-accent-fg-color--transparent);color:var(--md-accent-fg-color)}.md-search-result__more summary:is(:focus,:hover){background-color:var(--md-accent-fg-color--transparent);color:var(--md-accent-fg-color)}.md-search-result__more summary::marker{display:none}.md-search-result__more summary::-webkit-details-marker{display:none}.md-search-result__more summary~*>*{opacity:.65}.md-search-result__article{overflow:hidden;padding:0 .8rem;position:relative}@media screen and (min-width:60em){[dir=ltr] .md-search-result__article{padding-left:2.2rem}[dir=rtl] .md-search-result__article{padding-right:2.2rem}}.md-search-result__article--document .md-search-result__title{font-size:.8rem;font-weight:400;line-height:1.4;margin:.55rem 0}[dir=ltr] .md-search-result__icon{left:0}[dir=rtl] .md-search-result__icon{right:0}.md-search-result__icon{color:var(--md-default-fg-color--light);height:1.2rem;margin:.5rem;position:absolute;width:1.2rem}@media screen and (max-width:59.9375em){.md-search-result__icon{display:none}}.md-search-result__icon:after{background-color:currentcolor;content:"";display:inline-block;height:100%;-webkit-mask-image:var(--md-search-result-icon);mask-image:var(--md-search-result-icon);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;width:100%}[dir=rtl] .md-search-result__icon:after{transform:scaleX(-1)}.md-search-result__title{font-size:.64rem;font-weight:700;line-height:1.6;margin:.5em 0}.md-search-result__teaser{-webkit-box-orient:vertical;-webkit-line-clamp:2;color:var(--md-default-fg-color--light);display:-webkit-box;font-size:.64rem;line-height:1.6;margin:.5em 0;max-height:2rem;overflow:hidden;text-overflow:ellipsis}@media screen and (max-width:44.9375em){.md-search-result__teaser{-webkit-line-clamp:3;max-height:3rem}}@media screen and (min-width:60em) and (max-width:76.1875em){.md-search-result__teaser{-webkit-line-clamp:3;max-height:3rem}}.md-search-result__teaser mark{background-color:initial;text-decoration:underline}.md-search-result__terms{font-size:.64rem;font-style:italic;margin:.5em 0}.md-search-result mark{background-color:initial;color:var(--md-accent-fg-color)}.md-select{position:relative;z-index:1}.md-select__inner{background-color:var(--md-default-bg-color);border-radius:.1rem;box-shadow:var(--md-shadow-z2);color:var(--md-default-fg-color);left:50%;margin-top:.2rem;max-height:0;opacity:0;position:absolute;top:calc(100% - .2rem);transform:translate3d(-50%,.3rem,0);transition:transform .25s 375ms,opacity .25s .25s,max-height 0ms .5s}.md-select:-webkit-any(:focus-within,:hover) .md-select__inner{max-height:10rem;opacity:1;transform:translate3d(-50%,0,0);-webkit-transition:transform .25s cubic-bezier(.1,.7,.1,1),opacity .25s,max-height 0ms;transition:transform .25s cubic-bezier(.1,.7,.1,1),opacity .25s,max-height 0ms}.md-select:-moz-any(:focus-within,:hover) .md-select__inner{max-height:10rem;opacity:1;transform:translate3d(-50%,0,0);-moz-transition:transform .25s cubic-bezier(.1,.7,.1,1),opacity .25s,max-height 0ms;transition:transform .25s cubic-bezier(.1,.7,.1,1),opacity .25s,max-height 0ms}.md-select:is(:focus-within,:hover) .md-select__inner{max-height:10rem;opacity:1;transform:translate3d(-50%,0,0);transition:transform .25s cubic-bezier(.1,.7,.1,1),opacity .25s,max-height 0ms}.md-select__inner:after{border-bottom:.2rem solid #0000;border-bottom-color:var(--md-default-bg-color);border-left:.2rem solid #0000;border-right:.2rem solid #0000;border-top:0;content:"";height:0;left:50%;margin-left:-.2rem;margin-top:-.2rem;position:absolute;top:0;width:0}.md-select__list{border-radius:.1rem;font-size:.8rem;list-style-type:none;margin:0;max-height:inherit;overflow:auto;padding:0}.md-select__item{line-height:1.8rem}[dir=ltr] .md-select__link{padding-left:.6rem;padding-right:1.2rem}[dir=rtl] .md-select__link{padding-left:1.2rem;padding-right:.6rem}.md-select__link{cursor:pointer;display:block;outline:none;scroll-snap-align:start;transition:background-color .25s,color .25s;width:100%}.md-select__link:-webkit-any(:focus,:hover){color:var(--md-accent-fg-color)}.md-select__link:-moz-any(:focus,:hover){color:var(--md-accent-fg-color)}.md-select__link:is(:focus,:hover){color:var(--md-accent-fg-color)}.md-select__link:focus{background-color:var(--md-default-fg-color--lightest)}.md-sidebar{align-self:flex-start;flex-shrink:0;padding:1.2rem 0;position:-webkit-sticky;position:sticky;top:2.4rem;width:12.1rem}@media print{.md-sidebar{display:none}}@media screen and (max-width:76.1875em){[dir=ltr] .md-sidebar--primary{left:-12.1rem}[dir=rtl] .md-sidebar--primary{right:-12.1rem}.md-sidebar--primary{background-color:var(--md-default-bg-color);display:block;height:100%;position:fixed;top:0;transform:translateX(0);transition:transform .25s cubic-bezier(.4,0,.2,1),box-shadow .25s;width:12.1rem;z-index:5}[data-md-toggle=drawer]:checked~.md-container .md-sidebar--primary{box-shadow:var(--md-shadow-z3);transform:translateX(12.1rem)}[dir=rtl] [data-md-toggle=drawer]:checked~.md-container .md-sidebar--primary{transform:translateX(-12.1rem)}.md-sidebar--primary .md-sidebar__scrollwrap{bottom:0;left:0;margin:0;overflow:hidden;position:absolute;right:0;scroll-snap-type:none;top:0}}@media screen and (min-width:76.25em){.md-sidebar{height:0}.no-js .md-sidebar{height:auto}.md-header--lifted~.md-container .md-sidebar{top:4.8rem}}.md-sidebar--secondary{display:none;order:2}@media screen and (min-width:60em){.md-sidebar--secondary{height:0}.no-js .md-sidebar--secondary{height:auto}.md-sidebar--secondary:not([hidden]){display:block}.md-sidebar--secondary .md-sidebar__scrollwrap{touch-action:pan-y}}.md-sidebar__scrollwrap{scrollbar-gutter:stable;-webkit-backface-visibility:hidden;backface-visibility:hidden;margin:0 .2rem;overflow-y:auto;scrollbar-color:var(--md-default-fg-color--lighter) #0000;scrollbar-width:thin}.md-sidebar__scrollwrap:hover{scrollbar-color:var(--md-accent-fg-color) #0000}.md-sidebar__scrollwrap::-webkit-scrollbar{height:.2rem;width:.2rem}.md-sidebar__scrollwrap::-webkit-scrollbar-thumb{background-color:var(--md-default-fg-color--lighter)}.md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover{background-color:var(--md-accent-fg-color)}@supports selector(::-webkit-scrollbar){.md-sidebar__scrollwrap{scrollbar-gutter:auto}[dir=ltr] .md-sidebar__inner{padding-right:calc(100% - 11.5rem)}[dir=rtl] .md-sidebar__inner{padding-left:calc(100% - 11.5rem)}}@media screen and (max-width:76.1875em){.md-overlay{background-color:#0000008a;height:0;opacity:0;position:fixed;top:0;transition:width 0ms .25s,height 0ms .25s,opacity .25s;width:0;z-index:5}[data-md-toggle=drawer]:checked~.md-overlay{height:100%;opacity:1;transition:width 0ms,height 0ms,opacity .25s;width:100%}}@keyframes facts{0%{height:0}to{height:.65rem}}@keyframes fact{0%{opacity:0;transform:translateY(100%)}50%{opacity:0}to{opacity:1;transform:translateY(0)}}:root{--md-source-forks-icon:url('data:image/svg+xml;charset=utf-8,');--md-source-repositories-icon:url('data:image/svg+xml;charset=utf-8,');--md-source-stars-icon:url('data:image/svg+xml;charset=utf-8,');--md-source-version-icon:url('data:image/svg+xml;charset=utf-8,')}.md-source{-webkit-backface-visibility:hidden;backface-visibility:hidden;display:block;font-size:.65rem;line-height:1.2;outline-color:var(--md-accent-fg-color);transition:opacity .25s;white-space:nowrap}.md-source:hover{opacity:.7}.md-source__icon{display:inline-block;height:2.4rem;vertical-align:middle;width:2rem}[dir=ltr] .md-source__icon svg{margin-left:.6rem}[dir=rtl] .md-source__icon svg{margin-right:.6rem}.md-source__icon svg{margin-top:.6rem}[dir=ltr] .md-source__icon+.md-source__repository{margin-left:-2rem}[dir=rtl] .md-source__icon+.md-source__repository{margin-right:-2rem}[dir=ltr] .md-source__icon+.md-source__repository{padding-left:2rem}[dir=rtl] .md-source__icon+.md-source__repository{padding-right:2rem}[dir=ltr] .md-source__repository{margin-left:.6rem}[dir=rtl] .md-source__repository{margin-right:.6rem}.md-source__repository{display:inline-block;max-width:calc(100% - 1.2rem);overflow:hidden;text-overflow:ellipsis;vertical-align:middle}.md-source__facts{display:flex;font-size:.55rem;gap:.4rem;list-style-type:none;margin:.1rem 0 0;opacity:.75;overflow:hidden;padding:0;width:100%}.md-source__repository--active .md-source__facts{animation:facts .25s ease-in}.md-source__fact{overflow:hidden;text-overflow:ellipsis}.md-source__repository--active .md-source__fact{animation:fact .4s ease-out}[dir=ltr] .md-source__fact:before{margin-right:.1rem}[dir=rtl] .md-source__fact:before{margin-left:.1rem}.md-source__fact:before{background-color:currentcolor;content:"";display:inline-block;height:.6rem;-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;vertical-align:text-top;width:.6rem}.md-source__fact:nth-child(1n+2){flex-shrink:0}.md-source__fact--version:before{-webkit-mask-image:var(--md-source-version-icon);mask-image:var(--md-source-version-icon)}.md-source__fact--stars:before{-webkit-mask-image:var(--md-source-stars-icon);mask-image:var(--md-source-stars-icon)}.md-source__fact--forks:before{-webkit-mask-image:var(--md-source-forks-icon);mask-image:var(--md-source-forks-icon)}.md-source__fact--repositories:before{-webkit-mask-image:var(--md-source-repositories-icon);mask-image:var(--md-source-repositories-icon)}.md-tabs{background-color:var(--md-primary-fg-color);color:var(--md-primary-bg-color);display:block;line-height:1.3;overflow:auto;width:100%;z-index:3}@media print{.md-tabs{display:none}}@media screen and (max-width:76.1875em){.md-tabs{display:none}}.md-tabs[hidden]{pointer-events:none}[dir=ltr] .md-tabs__list{margin-left:.2rem}[dir=rtl] .md-tabs__list{margin-right:.2rem}.md-tabs__list{contain:content;list-style:none;margin:0;padding:0;white-space:nowrap}.md-tabs__item{display:inline-block;height:2.4rem;padding-left:.6rem;padding-right:.6rem}.md-tabs__link{-webkit-backface-visibility:hidden;backface-visibility:hidden;display:block;font-size:.7rem;margin-top:.8rem;opacity:.7;outline-color:var(--md-accent-fg-color);outline-offset:.2rem;transition:transform .4s cubic-bezier(.1,.7,.1,1),opacity .25s}.md-tabs__link--active,.md-tabs__link:-webkit-any(:focus,:hover){color:inherit;opacity:1}.md-tabs__link--active,.md-tabs__link:-moz-any(:focus,:hover){color:inherit;opacity:1}.md-tabs__link--active,.md-tabs__link:is(:focus,:hover){color:inherit;opacity:1}.md-tabs__item:nth-child(2) .md-tabs__link{transition-delay:20ms}.md-tabs__item:nth-child(3) .md-tabs__link{transition-delay:40ms}.md-tabs__item:nth-child(4) .md-tabs__link{transition-delay:60ms}.md-tabs__item:nth-child(5) .md-tabs__link{transition-delay:80ms}.md-tabs__item:nth-child(6) .md-tabs__link{transition-delay:.1s}.md-tabs__item:nth-child(7) .md-tabs__link{transition-delay:.12s}.md-tabs__item:nth-child(8) .md-tabs__link{transition-delay:.14s}.md-tabs__item:nth-child(9) .md-tabs__link{transition-delay:.16s}.md-tabs__item:nth-child(10) .md-tabs__link{transition-delay:.18s}.md-tabs__item:nth-child(11) .md-tabs__link{transition-delay:.2s}.md-tabs__item:nth-child(12) .md-tabs__link{transition-delay:.22s}.md-tabs__item:nth-child(13) .md-tabs__link{transition-delay:.24s}.md-tabs__item:nth-child(14) .md-tabs__link{transition-delay:.26s}.md-tabs__item:nth-child(15) .md-tabs__link{transition-delay:.28s}.md-tabs__item:nth-child(16) .md-tabs__link{transition-delay:.3s}.md-tabs[hidden] .md-tabs__link{opacity:0;transform:translateY(50%);transition:transform 0ms .1s,opacity .1s}:root{--md-tag-icon:url('data:image/svg+xml;charset=utf-8,')}.md-typeset .md-tags{margin-bottom:.75em;margin-top:-.125em}[dir=ltr] .md-typeset .md-tag{margin-right:.5em}[dir=rtl] .md-typeset .md-tag{margin-left:.5em}.md-typeset .md-tag{background:var(--md-default-fg-color--lightest);border-radius:2.4rem;display:inline-block;font-size:.64rem;font-weight:700;letter-spacing:normal;line-height:1.6;margin-bottom:.5em;padding:.3125em .9375em;vertical-align:middle}.md-typeset .md-tag[href]{-webkit-tap-highlight-color:transparent;color:inherit;outline:none;transition:color 125ms,background-color 125ms}.md-typeset .md-tag[href]:focus,.md-typeset .md-tag[href]:hover{background-color:var(--md-accent-fg-color);color:var(--md-accent-bg-color)}[id]>.md-typeset .md-tag{vertical-align:text-top}.md-typeset .md-tag-icon:before{background-color:var(--md-default-fg-color--lighter);content:"";display:inline-block;height:1.2em;margin-right:.4em;-webkit-mask-image:var(--md-tag-icon);mask-image:var(--md-tag-icon);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;transition:background-color 125ms;vertical-align:text-bottom;width:1.2em}.md-typeset .md-tag-icon:-webkit-any(a:focus,a:hover):before{background-color:var(--md-accent-bg-color)}.md-typeset .md-tag-icon:-moz-any(a:focus,a:hover):before{background-color:var(--md-accent-bg-color)}.md-typeset .md-tag-icon:is(a:focus,a:hover):before{background-color:var(--md-accent-bg-color)}@keyframes pulse{0%{box-shadow:0 0 0 0 var(--md-default-fg-color--lightest);transform:scale(.95)}75%{box-shadow:0 0 0 .625em #0000;transform:scale(1)}to{box-shadow:0 0 0 0 #0000;transform:scale(.95)}}:root{--md-tooltip-width:20rem}.md-tooltip{-webkit-backface-visibility:hidden;backface-visibility:hidden;background-color:var(--md-default-bg-color);border-radius:.1rem;box-shadow:var(--md-shadow-z2);color:var(--md-default-fg-color);font-family:var(--md-text-font-family);left:clamp(var(--md-tooltip-0,0rem) + .8rem,var(--md-tooltip-x),100vw + var(--md-tooltip-0,0rem) + .8rem - var(--md-tooltip-width) - 2 * .8rem);max-width:calc(100vw - 1.6rem);opacity:0;position:absolute;top:var(--md-tooltip-y);transform:translateY(-.4rem);transition:transform 0ms .25s,opacity .25s,z-index .25s;width:var(--md-tooltip-width);z-index:0}.md-tooltip--active{opacity:1;transform:translateY(0);transition:transform .25s cubic-bezier(.1,.7,.1,1),opacity .25s,z-index 0ms;z-index:2}:-webkit-any(.focus-visible>.md-tooltip,.md-tooltip:target){outline:var(--md-accent-fg-color) auto}:-moz-any(.focus-visible>.md-tooltip,.md-tooltip:target){outline:var(--md-accent-fg-color) auto}:is(.focus-visible>.md-tooltip,.md-tooltip:target){outline:var(--md-accent-fg-color) auto}.md-tooltip__inner{font-size:.64rem;padding:.8rem}.md-tooltip__inner.md-typeset>:first-child{margin-top:0}.md-tooltip__inner.md-typeset>:last-child{margin-bottom:0}.md-annotation{font-weight:400;outline:none;white-space:normal}[dir=rtl] .md-annotation{direction:rtl}.md-annotation:not([hidden]){display:inline-block;line-height:1.325}.md-annotation__index{cursor:pointer;font-family:var(--md-code-font-family);font-size:.85em;margin:0 1ch;outline:none;position:relative;-webkit-user-select:none;-moz-user-select:none;user-select:none;z-index:0}.md-annotation .md-annotation__index{color:#fff;transition:z-index .25s}.md-annotation .md-annotation__index:-webkit-any(:focus,:hover){color:#fff}.md-annotation .md-annotation__index:-moz-any(:focus,:hover){color:#fff}.md-annotation .md-annotation__index:is(:focus,:hover){color:#fff}.md-annotation__index:after{background-color:var(--md-default-fg-color--lighter);border-radius:2ch;content:"";height:2.2ch;left:-.125em;margin:0 -.4ch;padding:0 .4ch;position:absolute;top:0;transition:color .25s,background-color .25s;width:calc(100% + 1.2ch);width:max(2.2ch,100% + 1.2ch);z-index:-1}@media not all and (prefers-reduced-motion){[data-md-visible]>.md-annotation__index:after{animation:pulse 2s infinite}}.md-tooltip--active+.md-annotation__index:after{animation:none;transition:color .25s,background-color .25s}code .md-annotation__index{font-family:var(--md-code-font-family);font-size:inherit}:-webkit-any(.md-tooltip--active+.md-annotation__index,:hover>.md-annotation__index){color:var(--md-accent-bg-color)}:-moz-any(.md-tooltip--active+.md-annotation__index,:hover>.md-annotation__index){color:var(--md-accent-bg-color)}:is(.md-tooltip--active+.md-annotation__index,:hover>.md-annotation__index){color:var(--md-accent-bg-color)}:-webkit-any(.md-tooltip--active+.md-annotation__index,:hover>.md-annotation__index):after{background-color:var(--md-accent-fg-color)}:-moz-any(.md-tooltip--active+.md-annotation__index,:hover>.md-annotation__index):after{background-color:var(--md-accent-fg-color)}:is(.md-tooltip--active+.md-annotation__index,:hover>.md-annotation__index):after{background-color:var(--md-accent-fg-color)}.md-tooltip--active+.md-annotation__index{animation:none;transition:none;z-index:2}.md-annotation__index [data-md-annotation-id]{display:inline-block;line-height:90%}.md-annotation__index [data-md-annotation-id]:before{content:attr(data-md-annotation-id);display:inline-block;padding-bottom:.1em;transform:scale(1.15);transition:transform .4s cubic-bezier(.1,.7,.1,1);vertical-align:.065em}@media not print{.md-annotation__index [data-md-annotation-id]:before{content:"+"}:focus-within>.md-annotation__index [data-md-annotation-id]:before{transform:scale(1.25) rotate(45deg)}}[dir=ltr] .md-top{margin-left:50%}[dir=rtl] .md-top{margin-right:50%}.md-top{background-color:var(--md-default-bg-color);border-radius:1.6rem;box-shadow:var(--md-shadow-z2);color:var(--md-default-fg-color--light);display:block;font-size:.7rem;outline:none;padding:.4rem .8rem;position:fixed;top:3.2rem;transform:translate(-50%);transition:color 125ms,background-color 125ms,transform 125ms cubic-bezier(.4,0,.2,1),opacity 125ms;z-index:2}@media print{.md-top{display:none}}[dir=rtl] .md-top{transform:translate(50%)}.md-top[hidden]{opacity:0;pointer-events:none;transform:translate(-50%,.2rem);transition-duration:0ms}[dir=rtl] .md-top[hidden]{transform:translate(50%,.2rem)}.md-top:-webkit-any(:focus,:hover){background-color:var(--md-accent-fg-color);color:var(--md-accent-bg-color)}.md-top:-moz-any(:focus,:hover){background-color:var(--md-accent-fg-color);color:var(--md-accent-bg-color)}.md-top:is(:focus,:hover){background-color:var(--md-accent-fg-color);color:var(--md-accent-bg-color)}.md-top svg{display:inline-block;vertical-align:-.5em}@keyframes hoverfix{0%{pointer-events:none}}:root{--md-version-icon:url('data:image/svg+xml;charset=utf-8,')}.md-version{flex-shrink:0;font-size:.8rem;height:2.4rem}[dir=ltr] .md-version__current{margin-left:1.4rem;margin-right:.4rem}[dir=rtl] .md-version__current{margin-left:.4rem;margin-right:1.4rem}.md-version__current{color:inherit;cursor:pointer;outline:none;position:relative;top:.05rem}[dir=ltr] .md-version__current:after{margin-left:.4rem}[dir=rtl] .md-version__current:after{margin-right:.4rem}.md-version__current:after{background-color:currentcolor;content:"";display:inline-block;height:.6rem;-webkit-mask-image:var(--md-version-icon);mask-image:var(--md-version-icon);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;width:.4rem}.md-version__list{background-color:var(--md-default-bg-color);border-radius:.1rem;box-shadow:var(--md-shadow-z2);color:var(--md-default-fg-color);list-style-type:none;margin:.2rem .8rem;max-height:0;opacity:0;overflow:auto;padding:0;position:absolute;scroll-snap-type:y mandatory;top:.15rem;transition:max-height 0ms .5s,opacity .25s .25s;z-index:3}.md-version:-webkit-any(:focus-within,:hover) .md-version__list{max-height:10rem;opacity:1;-webkit-transition:max-height 0ms,opacity .25s;transition:max-height 0ms,opacity .25s}.md-version:-moz-any(:focus-within,:hover) .md-version__list{max-height:10rem;opacity:1;-moz-transition:max-height 0ms,opacity .25s;transition:max-height 0ms,opacity .25s}.md-version:is(:focus-within,:hover) .md-version__list{max-height:10rem;opacity:1;transition:max-height 0ms,opacity .25s}@media (pointer:coarse){.md-version:hover .md-version__list{animation:hoverfix .25s forwards}.md-version:focus-within .md-version__list{animation:none}}.md-version__item{line-height:1.8rem}[dir=ltr] .md-version__link{padding-left:.6rem;padding-right:1.2rem}[dir=rtl] .md-version__link{padding-left:1.2rem;padding-right:.6rem}.md-version__link{cursor:pointer;display:block;outline:none;scroll-snap-align:start;transition:color .25s,background-color .25s;white-space:nowrap;width:100%}.md-version__link:-webkit-any(:focus,:hover){color:var(--md-accent-fg-color)}.md-version__link:-moz-any(:focus,:hover){color:var(--md-accent-fg-color)}.md-version__link:is(:focus,:hover){color:var(--md-accent-fg-color)}.md-version__link:focus{background-color:var(--md-default-fg-color--lightest)}:root{--md-admonition-icon--note:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--abstract:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--info:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--tip:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--success:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--question:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--warning:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--failure:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--danger:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--bug:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--example:url('data:image/svg+xml;charset=utf-8,');--md-admonition-icon--quote:url('data:image/svg+xml;charset=utf-8,')}.md-typeset .admonition,.md-typeset details{background-color:var(--md-admonition-bg-color);border:.05rem solid #448aff;border-radius:.2rem;box-shadow:var(--md-shadow-z1);color:var(--md-admonition-fg-color);display:flow-root;font-size:.64rem;margin:1.5625em 0;padding:0 .6rem;page-break-inside:avoid}@media print{.md-typeset .admonition,.md-typeset details{box-shadow:none}}.md-typeset .admonition>*,.md-typeset details>*{box-sizing:border-box}.md-typeset .admonition :-webkit-any(.admonition,details),.md-typeset details :-webkit-any(.admonition,details){margin-bottom:1em;margin-top:1em}.md-typeset .admonition :-moz-any(.admonition,details),.md-typeset details :-moz-any(.admonition,details){margin-bottom:1em;margin-top:1em}.md-typeset .admonition :is(.admonition,details),.md-typeset details :is(.admonition,details){margin-bottom:1em;margin-top:1em}.md-typeset .admonition .md-typeset__scrollwrap,.md-typeset details .md-typeset__scrollwrap{margin:1em -.6rem}.md-typeset .admonition .md-typeset__table,.md-typeset details .md-typeset__table{padding:0 .6rem}.md-typeset .admonition>.tabbed-set:only-child,.md-typeset details>.tabbed-set:only-child{margin-top:0}html .md-typeset .admonition>:last-child,html .md-typeset details>:last-child{margin-bottom:.6rem}[dir=ltr] .md-typeset .admonition-title,[dir=ltr] .md-typeset summary{padding-left:2rem;padding-right:.6rem}[dir=rtl] .md-typeset .admonition-title,[dir=rtl] .md-typeset summary{padding-left:.6rem;padding-right:2rem}[dir=ltr] .md-typeset .admonition-title,[dir=ltr] .md-typeset summary{border-left-width:.2rem}[dir=rtl] .md-typeset .admonition-title,[dir=rtl] .md-typeset summary{border-right-width:.2rem}[dir=ltr] .md-typeset .admonition-title,[dir=ltr] .md-typeset summary{border-top-left-radius:.1rem}[dir=ltr] .md-typeset .admonition-title,[dir=ltr] .md-typeset summary,[dir=rtl] .md-typeset .admonition-title,[dir=rtl] .md-typeset summary{border-top-right-radius:.1rem}[dir=rtl] .md-typeset .admonition-title,[dir=rtl] .md-typeset summary{border-top-left-radius:.1rem}.md-typeset .admonition-title,.md-typeset summary{background-color:#448aff1a;border:none;font-weight:700;margin:0 -.6rem;padding-bottom:.4rem;padding-top:.4rem;position:relative}html .md-typeset .admonition-title:last-child,html .md-typeset summary:last-child{margin-bottom:0}[dir=ltr] .md-typeset .admonition-title:before,[dir=ltr] .md-typeset summary:before{left:.6rem}[dir=rtl] .md-typeset .admonition-title:before,[dir=rtl] .md-typeset summary:before{right:.6rem}.md-typeset .admonition-title:before,.md-typeset summary:before{background-color:#448aff;content:"";height:1rem;-webkit-mask-image:var(--md-admonition-icon--note);mask-image:var(--md-admonition-icon--note);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;position:absolute;top:.625em;width:1rem}.md-typeset .admonition-title code,.md-typeset summary code{box-shadow:0 0 0 .05rem var(--md-default-fg-color--lightest)}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.note){border-color:#448aff}.md-typeset :-moz-any(.admonition,details):-moz-any(.note){border-color:#448aff}.md-typeset :is(.admonition,details):is(.note){border-color:#448aff}.md-typeset :-webkit-any(.note)>:-webkit-any(.admonition-title,summary){background-color:#448aff1a}.md-typeset :-moz-any(.note)>:-moz-any(.admonition-title,summary){background-color:#448aff1a}.md-typeset :is(.note)>:is(.admonition-title,summary){background-color:#448aff1a}.md-typeset :-webkit-any(.note)>:-webkit-any(.admonition-title,summary):before{background-color:#448aff;-webkit-mask-image:var(--md-admonition-icon--note);mask-image:var(--md-admonition-icon--note)}.md-typeset :-moz-any(.note)>:-moz-any(.admonition-title,summary):before{background-color:#448aff;mask-image:var(--md-admonition-icon--note)}.md-typeset :is(.note)>:is(.admonition-title,summary):before{background-color:#448aff;-webkit-mask-image:var(--md-admonition-icon--note);mask-image:var(--md-admonition-icon--note)}.md-typeset :-webkit-any(.note)>:-webkit-any(.admonition-title,summary):after{color:#448aff}.md-typeset :-moz-any(.note)>:-moz-any(.admonition-title,summary):after{color:#448aff}.md-typeset :is(.note)>:is(.admonition-title,summary):after{color:#448aff}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.abstract,.summary,.tldr){border-color:#00b0ff}.md-typeset :-moz-any(.admonition,details):-moz-any(.abstract,.summary,.tldr){border-color:#00b0ff}.md-typeset :is(.admonition,details):is(.abstract,.summary,.tldr){border-color:#00b0ff}.md-typeset :-webkit-any(.abstract,.summary,.tldr)>:-webkit-any(.admonition-title,summary){background-color:#00b0ff1a}.md-typeset :-moz-any(.abstract,.summary,.tldr)>:-moz-any(.admonition-title,summary){background-color:#00b0ff1a}.md-typeset :is(.abstract,.summary,.tldr)>:is(.admonition-title,summary){background-color:#00b0ff1a}.md-typeset :-webkit-any(.abstract,.summary,.tldr)>:-webkit-any(.admonition-title,summary):before{background-color:#00b0ff;-webkit-mask-image:var(--md-admonition-icon--abstract);mask-image:var(--md-admonition-icon--abstract)}.md-typeset :-moz-any(.abstract,.summary,.tldr)>:-moz-any(.admonition-title,summary):before{background-color:#00b0ff;mask-image:var(--md-admonition-icon--abstract)}.md-typeset :is(.abstract,.summary,.tldr)>:is(.admonition-title,summary):before{background-color:#00b0ff;-webkit-mask-image:var(--md-admonition-icon--abstract);mask-image:var(--md-admonition-icon--abstract)}.md-typeset :-webkit-any(.abstract,.summary,.tldr)>:-webkit-any(.admonition-title,summary):after{color:#00b0ff}.md-typeset :-moz-any(.abstract,.summary,.tldr)>:-moz-any(.admonition-title,summary):after{color:#00b0ff}.md-typeset :is(.abstract,.summary,.tldr)>:is(.admonition-title,summary):after{color:#00b0ff}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.info,.todo){border-color:#00b8d4}.md-typeset :-moz-any(.admonition,details):-moz-any(.info,.todo){border-color:#00b8d4}.md-typeset :is(.admonition,details):is(.info,.todo){border-color:#00b8d4}.md-typeset :-webkit-any(.info,.todo)>:-webkit-any(.admonition-title,summary){background-color:#00b8d41a}.md-typeset :-moz-any(.info,.todo)>:-moz-any(.admonition-title,summary){background-color:#00b8d41a}.md-typeset :is(.info,.todo)>:is(.admonition-title,summary){background-color:#00b8d41a}.md-typeset :-webkit-any(.info,.todo)>:-webkit-any(.admonition-title,summary):before{background-color:#00b8d4;-webkit-mask-image:var(--md-admonition-icon--info);mask-image:var(--md-admonition-icon--info)}.md-typeset :-moz-any(.info,.todo)>:-moz-any(.admonition-title,summary):before{background-color:#00b8d4;mask-image:var(--md-admonition-icon--info)}.md-typeset :is(.info,.todo)>:is(.admonition-title,summary):before{background-color:#00b8d4;-webkit-mask-image:var(--md-admonition-icon--info);mask-image:var(--md-admonition-icon--info)}.md-typeset :-webkit-any(.info,.todo)>:-webkit-any(.admonition-title,summary):after{color:#00b8d4}.md-typeset :-moz-any(.info,.todo)>:-moz-any(.admonition-title,summary):after{color:#00b8d4}.md-typeset :is(.info,.todo)>:is(.admonition-title,summary):after{color:#00b8d4}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.tip,.hint,.important){border-color:#00bfa5}.md-typeset :-moz-any(.admonition,details):-moz-any(.tip,.hint,.important){border-color:#00bfa5}.md-typeset :is(.admonition,details):is(.tip,.hint,.important){border-color:#00bfa5}.md-typeset :-webkit-any(.tip,.hint,.important)>:-webkit-any(.admonition-title,summary){background-color:#00bfa51a}.md-typeset :-moz-any(.tip,.hint,.important)>:-moz-any(.admonition-title,summary){background-color:#00bfa51a}.md-typeset :is(.tip,.hint,.important)>:is(.admonition-title,summary){background-color:#00bfa51a}.md-typeset :-webkit-any(.tip,.hint,.important)>:-webkit-any(.admonition-title,summary):before{background-color:#00bfa5;-webkit-mask-image:var(--md-admonition-icon--tip);mask-image:var(--md-admonition-icon--tip)}.md-typeset :-moz-any(.tip,.hint,.important)>:-moz-any(.admonition-title,summary):before{background-color:#00bfa5;mask-image:var(--md-admonition-icon--tip)}.md-typeset :is(.tip,.hint,.important)>:is(.admonition-title,summary):before{background-color:#00bfa5;-webkit-mask-image:var(--md-admonition-icon--tip);mask-image:var(--md-admonition-icon--tip)}.md-typeset :-webkit-any(.tip,.hint,.important)>:-webkit-any(.admonition-title,summary):after{color:#00bfa5}.md-typeset :-moz-any(.tip,.hint,.important)>:-moz-any(.admonition-title,summary):after{color:#00bfa5}.md-typeset :is(.tip,.hint,.important)>:is(.admonition-title,summary):after{color:#00bfa5}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.success,.check,.done){border-color:#00c853}.md-typeset :-moz-any(.admonition,details):-moz-any(.success,.check,.done){border-color:#00c853}.md-typeset :is(.admonition,details):is(.success,.check,.done){border-color:#00c853}.md-typeset :-webkit-any(.success,.check,.done)>:-webkit-any(.admonition-title,summary){background-color:#00c8531a}.md-typeset :-moz-any(.success,.check,.done)>:-moz-any(.admonition-title,summary){background-color:#00c8531a}.md-typeset :is(.success,.check,.done)>:is(.admonition-title,summary){background-color:#00c8531a}.md-typeset :-webkit-any(.success,.check,.done)>:-webkit-any(.admonition-title,summary):before{background-color:#00c853;-webkit-mask-image:var(--md-admonition-icon--success);mask-image:var(--md-admonition-icon--success)}.md-typeset :-moz-any(.success,.check,.done)>:-moz-any(.admonition-title,summary):before{background-color:#00c853;mask-image:var(--md-admonition-icon--success)}.md-typeset :is(.success,.check,.done)>:is(.admonition-title,summary):before{background-color:#00c853;-webkit-mask-image:var(--md-admonition-icon--success);mask-image:var(--md-admonition-icon--success)}.md-typeset :-webkit-any(.success,.check,.done)>:-webkit-any(.admonition-title,summary):after{color:#00c853}.md-typeset :-moz-any(.success,.check,.done)>:-moz-any(.admonition-title,summary):after{color:#00c853}.md-typeset :is(.success,.check,.done)>:is(.admonition-title,summary):after{color:#00c853}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.question,.help,.faq){border-color:#64dd17}.md-typeset :-moz-any(.admonition,details):-moz-any(.question,.help,.faq){border-color:#64dd17}.md-typeset :is(.admonition,details):is(.question,.help,.faq){border-color:#64dd17}.md-typeset :-webkit-any(.question,.help,.faq)>:-webkit-any(.admonition-title,summary){background-color:#64dd171a}.md-typeset :-moz-any(.question,.help,.faq)>:-moz-any(.admonition-title,summary){background-color:#64dd171a}.md-typeset :is(.question,.help,.faq)>:is(.admonition-title,summary){background-color:#64dd171a}.md-typeset :-webkit-any(.question,.help,.faq)>:-webkit-any(.admonition-title,summary):before{background-color:#64dd17;-webkit-mask-image:var(--md-admonition-icon--question);mask-image:var(--md-admonition-icon--question)}.md-typeset :-moz-any(.question,.help,.faq)>:-moz-any(.admonition-title,summary):before{background-color:#64dd17;mask-image:var(--md-admonition-icon--question)}.md-typeset :is(.question,.help,.faq)>:is(.admonition-title,summary):before{background-color:#64dd17;-webkit-mask-image:var(--md-admonition-icon--question);mask-image:var(--md-admonition-icon--question)}.md-typeset :-webkit-any(.question,.help,.faq)>:-webkit-any(.admonition-title,summary):after{color:#64dd17}.md-typeset :-moz-any(.question,.help,.faq)>:-moz-any(.admonition-title,summary):after{color:#64dd17}.md-typeset :is(.question,.help,.faq)>:is(.admonition-title,summary):after{color:#64dd17}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.warning,.caution,.attention){border-color:#ff9100}.md-typeset :-moz-any(.admonition,details):-moz-any(.warning,.caution,.attention){border-color:#ff9100}.md-typeset :is(.admonition,details):is(.warning,.caution,.attention){border-color:#ff9100}.md-typeset :-webkit-any(.warning,.caution,.attention)>:-webkit-any(.admonition-title,summary){background-color:#ff91001a}.md-typeset :-moz-any(.warning,.caution,.attention)>:-moz-any(.admonition-title,summary){background-color:#ff91001a}.md-typeset :is(.warning,.caution,.attention)>:is(.admonition-title,summary){background-color:#ff91001a}.md-typeset :-webkit-any(.warning,.caution,.attention)>:-webkit-any(.admonition-title,summary):before{background-color:#ff9100;-webkit-mask-image:var(--md-admonition-icon--warning);mask-image:var(--md-admonition-icon--warning)}.md-typeset :-moz-any(.warning,.caution,.attention)>:-moz-any(.admonition-title,summary):before{background-color:#ff9100;mask-image:var(--md-admonition-icon--warning)}.md-typeset :is(.warning,.caution,.attention)>:is(.admonition-title,summary):before{background-color:#ff9100;-webkit-mask-image:var(--md-admonition-icon--warning);mask-image:var(--md-admonition-icon--warning)}.md-typeset :-webkit-any(.warning,.caution,.attention)>:-webkit-any(.admonition-title,summary):after{color:#ff9100}.md-typeset :-moz-any(.warning,.caution,.attention)>:-moz-any(.admonition-title,summary):after{color:#ff9100}.md-typeset :is(.warning,.caution,.attention)>:is(.admonition-title,summary):after{color:#ff9100}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.failure,.fail,.missing){border-color:#ff5252}.md-typeset :-moz-any(.admonition,details):-moz-any(.failure,.fail,.missing){border-color:#ff5252}.md-typeset :is(.admonition,details):is(.failure,.fail,.missing){border-color:#ff5252}.md-typeset :-webkit-any(.failure,.fail,.missing)>:-webkit-any(.admonition-title,summary){background-color:#ff52521a}.md-typeset :-moz-any(.failure,.fail,.missing)>:-moz-any(.admonition-title,summary){background-color:#ff52521a}.md-typeset :is(.failure,.fail,.missing)>:is(.admonition-title,summary){background-color:#ff52521a}.md-typeset :-webkit-any(.failure,.fail,.missing)>:-webkit-any(.admonition-title,summary):before{background-color:#ff5252;-webkit-mask-image:var(--md-admonition-icon--failure);mask-image:var(--md-admonition-icon--failure)}.md-typeset :-moz-any(.failure,.fail,.missing)>:-moz-any(.admonition-title,summary):before{background-color:#ff5252;mask-image:var(--md-admonition-icon--failure)}.md-typeset :is(.failure,.fail,.missing)>:is(.admonition-title,summary):before{background-color:#ff5252;-webkit-mask-image:var(--md-admonition-icon--failure);mask-image:var(--md-admonition-icon--failure)}.md-typeset :-webkit-any(.failure,.fail,.missing)>:-webkit-any(.admonition-title,summary):after{color:#ff5252}.md-typeset :-moz-any(.failure,.fail,.missing)>:-moz-any(.admonition-title,summary):after{color:#ff5252}.md-typeset :is(.failure,.fail,.missing)>:is(.admonition-title,summary):after{color:#ff5252}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.danger,.error){border-color:#ff1744}.md-typeset :-moz-any(.admonition,details):-moz-any(.danger,.error){border-color:#ff1744}.md-typeset :is(.admonition,details):is(.danger,.error){border-color:#ff1744}.md-typeset :-webkit-any(.danger,.error)>:-webkit-any(.admonition-title,summary){background-color:#ff17441a}.md-typeset :-moz-any(.danger,.error)>:-moz-any(.admonition-title,summary){background-color:#ff17441a}.md-typeset :is(.danger,.error)>:is(.admonition-title,summary){background-color:#ff17441a}.md-typeset :-webkit-any(.danger,.error)>:-webkit-any(.admonition-title,summary):before{background-color:#ff1744;-webkit-mask-image:var(--md-admonition-icon--danger);mask-image:var(--md-admonition-icon--danger)}.md-typeset :-moz-any(.danger,.error)>:-moz-any(.admonition-title,summary):before{background-color:#ff1744;mask-image:var(--md-admonition-icon--danger)}.md-typeset :is(.danger,.error)>:is(.admonition-title,summary):before{background-color:#ff1744;-webkit-mask-image:var(--md-admonition-icon--danger);mask-image:var(--md-admonition-icon--danger)}.md-typeset :-webkit-any(.danger,.error)>:-webkit-any(.admonition-title,summary):after{color:#ff1744}.md-typeset :-moz-any(.danger,.error)>:-moz-any(.admonition-title,summary):after{color:#ff1744}.md-typeset :is(.danger,.error)>:is(.admonition-title,summary):after{color:#ff1744}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.bug){border-color:#f50057}.md-typeset :-moz-any(.admonition,details):-moz-any(.bug){border-color:#f50057}.md-typeset :is(.admonition,details):is(.bug){border-color:#f50057}.md-typeset :-webkit-any(.bug)>:-webkit-any(.admonition-title,summary){background-color:#f500571a}.md-typeset :-moz-any(.bug)>:-moz-any(.admonition-title,summary){background-color:#f500571a}.md-typeset :is(.bug)>:is(.admonition-title,summary){background-color:#f500571a}.md-typeset :-webkit-any(.bug)>:-webkit-any(.admonition-title,summary):before{background-color:#f50057;-webkit-mask-image:var(--md-admonition-icon--bug);mask-image:var(--md-admonition-icon--bug)}.md-typeset :-moz-any(.bug)>:-moz-any(.admonition-title,summary):before{background-color:#f50057;mask-image:var(--md-admonition-icon--bug)}.md-typeset :is(.bug)>:is(.admonition-title,summary):before{background-color:#f50057;-webkit-mask-image:var(--md-admonition-icon--bug);mask-image:var(--md-admonition-icon--bug)}.md-typeset :-webkit-any(.bug)>:-webkit-any(.admonition-title,summary):after{color:#f50057}.md-typeset :-moz-any(.bug)>:-moz-any(.admonition-title,summary):after{color:#f50057}.md-typeset :is(.bug)>:is(.admonition-title,summary):after{color:#f50057}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.example){border-color:#7c4dff}.md-typeset :-moz-any(.admonition,details):-moz-any(.example){border-color:#7c4dff}.md-typeset :is(.admonition,details):is(.example){border-color:#7c4dff}.md-typeset :-webkit-any(.example)>:-webkit-any(.admonition-title,summary){background-color:#7c4dff1a}.md-typeset :-moz-any(.example)>:-moz-any(.admonition-title,summary){background-color:#7c4dff1a}.md-typeset :is(.example)>:is(.admonition-title,summary){background-color:#7c4dff1a}.md-typeset :-webkit-any(.example)>:-webkit-any(.admonition-title,summary):before{background-color:#7c4dff;-webkit-mask-image:var(--md-admonition-icon--example);mask-image:var(--md-admonition-icon--example)}.md-typeset :-moz-any(.example)>:-moz-any(.admonition-title,summary):before{background-color:#7c4dff;mask-image:var(--md-admonition-icon--example)}.md-typeset :is(.example)>:is(.admonition-title,summary):before{background-color:#7c4dff;-webkit-mask-image:var(--md-admonition-icon--example);mask-image:var(--md-admonition-icon--example)}.md-typeset :-webkit-any(.example)>:-webkit-any(.admonition-title,summary):after{color:#7c4dff}.md-typeset :-moz-any(.example)>:-moz-any(.admonition-title,summary):after{color:#7c4dff}.md-typeset :is(.example)>:is(.admonition-title,summary):after{color:#7c4dff}.md-typeset :-webkit-any(.admonition,details):-webkit-any(.quote,.cite){border-color:#9e9e9e}.md-typeset :-moz-any(.admonition,details):-moz-any(.quote,.cite){border-color:#9e9e9e}.md-typeset :is(.admonition,details):is(.quote,.cite){border-color:#9e9e9e}.md-typeset :-webkit-any(.quote,.cite)>:-webkit-any(.admonition-title,summary){background-color:#9e9e9e1a}.md-typeset :-moz-any(.quote,.cite)>:-moz-any(.admonition-title,summary){background-color:#9e9e9e1a}.md-typeset :is(.quote,.cite)>:is(.admonition-title,summary){background-color:#9e9e9e1a}.md-typeset :-webkit-any(.quote,.cite)>:-webkit-any(.admonition-title,summary):before{background-color:#9e9e9e;-webkit-mask-image:var(--md-admonition-icon--quote);mask-image:var(--md-admonition-icon--quote)}.md-typeset :-moz-any(.quote,.cite)>:-moz-any(.admonition-title,summary):before{background-color:#9e9e9e;mask-image:var(--md-admonition-icon--quote)}.md-typeset :is(.quote,.cite)>:is(.admonition-title,summary):before{background-color:#9e9e9e;-webkit-mask-image:var(--md-admonition-icon--quote);mask-image:var(--md-admonition-icon--quote)}.md-typeset :-webkit-any(.quote,.cite)>:-webkit-any(.admonition-title,summary):after{color:#9e9e9e}.md-typeset :-moz-any(.quote,.cite)>:-moz-any(.admonition-title,summary):after{color:#9e9e9e}.md-typeset :is(.quote,.cite)>:is(.admonition-title,summary):after{color:#9e9e9e}:root{--md-footnotes-icon:url('data:image/svg+xml;charset=utf-8,')}.md-typeset .footnote{color:var(--md-default-fg-color--light);font-size:.64rem}[dir=ltr] .md-typeset .footnote>ol{margin-left:0}[dir=rtl] .md-typeset .footnote>ol{margin-right:0}.md-typeset .footnote>ol>li{transition:color 125ms}.md-typeset .footnote>ol>li:target{color:var(--md-default-fg-color)}.md-typeset .footnote>ol>li:focus-within .footnote-backref{opacity:1;transform:translateX(0);transition:none}.md-typeset .footnote>ol>li:-webkit-any(:hover,:target) .footnote-backref{opacity:1;transform:translateX(0)}.md-typeset .footnote>ol>li:-moz-any(:hover,:target) .footnote-backref{opacity:1;transform:translateX(0)}.md-typeset .footnote>ol>li:is(:hover,:target) .footnote-backref{opacity:1;transform:translateX(0)}.md-typeset .footnote>ol>li>:first-child{margin-top:0}.md-typeset .footnote-ref{font-size:.75em;font-weight:700}html .md-typeset .footnote-ref{outline-offset:.1rem}.md-typeset [id^="fnref:"]:target>.footnote-ref{outline:auto}.md-typeset .footnote-backref{color:var(--md-typeset-a-color);display:inline-block;font-size:0;opacity:0;transform:translateX(.25rem);transition:color .25s,transform .25s .25s,opacity 125ms .25s;vertical-align:text-bottom}@media print{.md-typeset .footnote-backref{color:var(--md-typeset-a-color);opacity:1;transform:translateX(0)}}[dir=rtl] .md-typeset .footnote-backref{transform:translateX(-.25rem)}.md-typeset .footnote-backref:hover{color:var(--md-accent-fg-color)}.md-typeset .footnote-backref:before{background-color:currentcolor;content:"";display:inline-block;height:.8rem;-webkit-mask-image:var(--md-footnotes-icon);mask-image:var(--md-footnotes-icon);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;width:.8rem}[dir=rtl] .md-typeset .footnote-backref:before svg{transform:scaleX(-1)}[dir=ltr] .md-typeset .headerlink{margin-left:.5rem}[dir=rtl] .md-typeset .headerlink{margin-right:.5rem}.md-typeset .headerlink{color:var(--md-default-fg-color--lighter);display:inline-block;opacity:0;transition:color .25s,opacity 125ms}@media print{.md-typeset .headerlink{display:none}}.md-typeset .headerlink:focus,.md-typeset :-webkit-any(:hover,:target)>.headerlink{opacity:1;-webkit-transition:color .25s,opacity 125ms;transition:color .25s,opacity 125ms}.md-typeset .headerlink:focus,.md-typeset :-moz-any(:hover,:target)>.headerlink{opacity:1;-moz-transition:color .25s,opacity 125ms;transition:color .25s,opacity 125ms}.md-typeset .headerlink:focus,.md-typeset :is(:hover,:target)>.headerlink{opacity:1;transition:color .25s,opacity 125ms}.md-typeset .headerlink:-webkit-any(:focus,:hover),.md-typeset :target>.headerlink{color:var(--md-accent-fg-color)}.md-typeset .headerlink:-moz-any(:focus,:hover),.md-typeset :target>.headerlink{color:var(--md-accent-fg-color)}.md-typeset .headerlink:is(:focus,:hover),.md-typeset :target>.headerlink{color:var(--md-accent-fg-color)}.md-typeset :target{--md-scroll-margin:3.6rem;--md-scroll-offset:0rem;scroll-margin-top:calc(var(--md-scroll-margin) - var(--md-scroll-offset))}@media screen and (min-width:76.25em){.md-header--lifted~.md-container .md-typeset :target{--md-scroll-margin:6rem}}.md-typeset :-webkit-any(h1,h2,h3):target{--md-scroll-offset:0.2rem}.md-typeset :-moz-any(h1,h2,h3):target{--md-scroll-offset:0.2rem}.md-typeset :is(h1,h2,h3):target{--md-scroll-offset:0.2rem}.md-typeset h4:target{--md-scroll-offset:0.15rem}.md-typeset div.arithmatex{overflow:auto}@media screen and (max-width:44.9375em){.md-typeset div.arithmatex{margin:0 -.8rem}}.md-typeset div.arithmatex>*{margin-left:auto!important;margin-right:auto!important;padding:0 .8rem;touch-action:auto;width:-webkit-min-content;width:-moz-min-content;width:min-content}.md-typeset div.arithmatex>* mjx-container{margin:0!important}.md-typeset :-webkit-any(del,ins,.comment).critic{-webkit-box-decoration-break:clone;box-decoration-break:clone}.md-typeset :-moz-any(del,ins,.comment).critic{box-decoration-break:clone}.md-typeset :is(del,ins,.comment).critic{-webkit-box-decoration-break:clone;box-decoration-break:clone}.md-typeset del.critic{background-color:var(--md-typeset-del-color)}.md-typeset ins.critic{background-color:var(--md-typeset-ins-color)}.md-typeset .critic.comment{color:var(--md-code-hl-comment-color)}.md-typeset .critic.comment:before{content:"/* "}.md-typeset .critic.comment:after{content:" */"}.md-typeset .critic.block{box-shadow:none;display:block;margin:1em 0;overflow:auto;padding-left:.8rem;padding-right:.8rem}.md-typeset .critic.block>:first-child{margin-top:.5em}.md-typeset .critic.block>:last-child{margin-bottom:.5em}:root{--md-details-icon:url('data:image/svg+xml;charset=utf-8,')}.md-typeset details{display:flow-root;overflow:visible;padding-top:0}.md-typeset details[open]>summary:after{transform:rotate(90deg)}.md-typeset details:not([open]){box-shadow:none;padding-bottom:0}.md-typeset details:not([open])>summary{border-radius:.1rem}[dir=ltr] .md-typeset summary{padding-right:1.8rem}[dir=rtl] .md-typeset summary{padding-left:1.8rem}[dir=ltr] .md-typeset summary{border-top-left-radius:.1rem}[dir=ltr] .md-typeset summary,[dir=rtl] .md-typeset summary{border-top-right-radius:.1rem}[dir=rtl] .md-typeset summary{border-top-left-radius:.1rem}.md-typeset summary{cursor:pointer;display:block;min-height:1rem}.md-typeset summary.focus-visible{outline-color:var(--md-accent-fg-color);outline-offset:.2rem}.md-typeset summary:not(.focus-visible){-webkit-tap-highlight-color:transparent;outline:none}[dir=ltr] .md-typeset summary:after{right:.4rem}[dir=rtl] .md-typeset summary:after{left:.4rem}.md-typeset summary:after{background-color:currentcolor;content:"";height:1rem;-webkit-mask-image:var(--md-details-icon);mask-image:var(--md-details-icon);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;position:absolute;top:.625em;transform:rotate(0deg);transition:transform .25s;width:1rem}[dir=rtl] .md-typeset summary:after{transform:rotate(180deg)}.md-typeset summary::marker{display:none}.md-typeset summary::-webkit-details-marker{display:none}.md-typeset :-webkit-any(.emojione,.twemoji,.gemoji){display:inline-flex;height:1.125em;vertical-align:text-top}.md-typeset :-moz-any(.emojione,.twemoji,.gemoji){display:inline-flex;height:1.125em;vertical-align:text-top}.md-typeset :is(.emojione,.twemoji,.gemoji){display:inline-flex;height:1.125em;vertical-align:text-top}.md-typeset :-webkit-any(.emojione,.twemoji,.gemoji) svg{fill:currentcolor;max-height:100%;width:1.125em}.md-typeset :-moz-any(.emojione,.twemoji,.gemoji) svg{fill:currentcolor;max-height:100%;width:1.125em}.md-typeset :is(.emojione,.twemoji,.gemoji) svg{fill:currentcolor;max-height:100%;width:1.125em}.highlight :-webkit-any(.o,.ow){color:var(--md-code-hl-operator-color)}.highlight :-moz-any(.o,.ow){color:var(--md-code-hl-operator-color)}.highlight :is(.o,.ow){color:var(--md-code-hl-operator-color)}.highlight .p{color:var(--md-code-hl-punctuation-color)}.highlight :-webkit-any(.cpf,.l,.s,.sb,.sc,.s2,.si,.s1,.ss){color:var(--md-code-hl-string-color)}.highlight :-moz-any(.cpf,.l,.s,.sb,.sc,.s2,.si,.s1,.ss){color:var(--md-code-hl-string-color)}.highlight :is(.cpf,.l,.s,.sb,.sc,.s2,.si,.s1,.ss){color:var(--md-code-hl-string-color)}.highlight :-webkit-any(.cp,.se,.sh,.sr,.sx){color:var(--md-code-hl-special-color)}.highlight :-moz-any(.cp,.se,.sh,.sr,.sx){color:var(--md-code-hl-special-color)}.highlight :is(.cp,.se,.sh,.sr,.sx){color:var(--md-code-hl-special-color)}.highlight :-webkit-any(.m,.mb,.mf,.mh,.mi,.il,.mo){color:var(--md-code-hl-number-color)}.highlight :-moz-any(.m,.mb,.mf,.mh,.mi,.il,.mo){color:var(--md-code-hl-number-color)}.highlight :is(.m,.mb,.mf,.mh,.mi,.il,.mo){color:var(--md-code-hl-number-color)}.highlight :-webkit-any(.k,.kd,.kn,.kp,.kr,.kt){color:var(--md-code-hl-keyword-color)}.highlight :-moz-any(.k,.kd,.kn,.kp,.kr,.kt){color:var(--md-code-hl-keyword-color)}.highlight :is(.k,.kd,.kn,.kp,.kr,.kt){color:var(--md-code-hl-keyword-color)}.highlight :-webkit-any(.kc,.n){color:var(--md-code-hl-name-color)}.highlight :-moz-any(.kc,.n){color:var(--md-code-hl-name-color)}.highlight :is(.kc,.n){color:var(--md-code-hl-name-color)}.highlight :-webkit-any(.no,.nb,.bp){color:var(--md-code-hl-constant-color)}.highlight :-moz-any(.no,.nb,.bp){color:var(--md-code-hl-constant-color)}.highlight :is(.no,.nb,.bp){color:var(--md-code-hl-constant-color)}.highlight :-webkit-any(.nc,.ne,.nf,.nn){color:var(--md-code-hl-function-color)}.highlight :-moz-any(.nc,.ne,.nf,.nn){color:var(--md-code-hl-function-color)}.highlight :is(.nc,.ne,.nf,.nn){color:var(--md-code-hl-function-color)}.highlight :-webkit-any(.nd,.ni,.nl,.nt){color:var(--md-code-hl-keyword-color)}.highlight :-moz-any(.nd,.ni,.nl,.nt){color:var(--md-code-hl-keyword-color)}.highlight :is(.nd,.ni,.nl,.nt){color:var(--md-code-hl-keyword-color)}.highlight :-webkit-any(.c,.cm,.c1,.ch,.cs,.sd){color:var(--md-code-hl-comment-color)}.highlight :-moz-any(.c,.cm,.c1,.ch,.cs,.sd){color:var(--md-code-hl-comment-color)}.highlight :is(.c,.cm,.c1,.ch,.cs,.sd){color:var(--md-code-hl-comment-color)}.highlight :-webkit-any(.na,.nv,.vc,.vg,.vi){color:var(--md-code-hl-variable-color)}.highlight :-moz-any(.na,.nv,.vc,.vg,.vi){color:var(--md-code-hl-variable-color)}.highlight :is(.na,.nv,.vc,.vg,.vi){color:var(--md-code-hl-variable-color)}.highlight :-webkit-any(.ge,.gr,.gh,.go,.gp,.gs,.gu,.gt){color:var(--md-code-hl-generic-color)}.highlight :-moz-any(.ge,.gr,.gh,.go,.gp,.gs,.gu,.gt){color:var(--md-code-hl-generic-color)}.highlight :is(.ge,.gr,.gh,.go,.gp,.gs,.gu,.gt){color:var(--md-code-hl-generic-color)}.highlight :-webkit-any(.gd,.gi){border-radius:.1rem;margin:0 -.125em;padding:0 .125em}.highlight :-moz-any(.gd,.gi){border-radius:.1rem;margin:0 -.125em;padding:0 .125em}.highlight :is(.gd,.gi){border-radius:.1rem;margin:0 -.125em;padding:0 .125em}.highlight .gd{background-color:var(--md-typeset-del-color)}.highlight .gi{background-color:var(--md-typeset-ins-color)}.highlight .hll{background-color:var(--md-code-hl-color);display:block;margin:0 -1.1764705882em;padding:0 1.1764705882em}.highlight span.filename{background-color:var(--md-code-bg-color);border-bottom:.05rem solid var(--md-default-fg-color--lightest);border-top-left-radius:.1rem;border-top-right-radius:.1rem;display:flow-root;font-size:.85em;font-weight:700;margin-top:1em;padding:.6617647059em 1.1764705882em;position:relative}.highlight span.filename+pre{margin-top:0}.highlight span.filename+pre>code{border-top-left-radius:0;border-top-right-radius:0}.highlight [data-linenos]:before{background-color:var(--md-code-bg-color);box-shadow:-.05rem 0 var(--md-default-fg-color--lightest) inset;color:var(--md-default-fg-color--light);content:attr(data-linenos);float:left;left:-1.1764705882em;margin-left:-1.1764705882em;margin-right:1.1764705882em;padding-left:1.1764705882em;position:-webkit-sticky;position:sticky;-webkit-user-select:none;-moz-user-select:none;user-select:none;z-index:3}.highlight code a[id]{position:absolute;visibility:hidden}.highlight code[data-md-copying] .hll{display:contents}.highlight code[data-md-copying] .md-annotation{display:none}.highlighttable{display:flow-root}.highlighttable :-webkit-any(tbody,td){display:block;padding:0}.highlighttable :-moz-any(tbody,td){display:block;padding:0}.highlighttable :is(tbody,td){display:block;padding:0}.highlighttable tr{display:flex}.highlighttable pre{margin:0}.highlighttable th.filename{flex-grow:1;padding:0;text-align:left}.highlighttable th.filename span.filename{margin-top:0}.highlighttable .linenos{background-color:var(--md-code-bg-color);border-bottom-left-radius:.1rem;border-top-left-radius:.1rem;font-size:.85em;padding:.7720588235em 0 .7720588235em 1.1764705882em;-webkit-user-select:none;-moz-user-select:none;user-select:none}.highlighttable .linenodiv{box-shadow:-.05rem 0 var(--md-default-fg-color--lightest) inset;padding-right:.5882352941em}.highlighttable .linenodiv pre{color:var(--md-default-fg-color--light);text-align:right}.highlighttable .code{flex:1;min-width:0}.linenodiv a{color:inherit}.md-typeset .highlighttable{direction:ltr;margin:1em 0}.md-typeset .highlighttable>tbody>tr>.code>div>pre>code{border-bottom-left-radius:0;border-top-left-radius:0}.md-typeset .highlight+.result{border:.05rem solid var(--md-code-bg-color);border-bottom-left-radius:.1rem;border-bottom-right-radius:.1rem;border-top-width:.1rem;margin-top:-1.125em;overflow:visible;padding:0 1em}.md-typeset .highlight+.result:after{clear:both;content:"";display:block}@media screen and (max-width:44.9375em){.md-content__inner>.highlight{margin:1em -.8rem}.md-content__inner>.highlight>.filename,.md-content__inner>.highlight>.highlighttable>tbody>tr>.code>div>pre>code,.md-content__inner>.highlight>.highlighttable>tbody>tr>.filename span.filename,.md-content__inner>.highlight>.highlighttable>tbody>tr>.linenos,.md-content__inner>.highlight>pre>code{border-radius:0}.md-content__inner>.highlight+.result{border-left-width:0;border-radius:0;border-right-width:0;margin-left:-.8rem;margin-right:-.8rem}}.md-typeset .keys kbd:-webkit-any(:before,:after){-moz-osx-font-smoothing:initial;-webkit-font-smoothing:initial;color:inherit;margin:0;position:relative}.md-typeset .keys kbd:-moz-any(:before,:after){-moz-osx-font-smoothing:initial;-webkit-font-smoothing:initial;color:inherit;margin:0;position:relative}.md-typeset .keys kbd:is(:before,:after){-moz-osx-font-smoothing:initial;-webkit-font-smoothing:initial;color:inherit;margin:0;position:relative}.md-typeset .keys span{color:var(--md-default-fg-color--light);padding:0 .2em}.md-typeset .keys .key-alt:before,.md-typeset .keys .key-left-alt:before,.md-typeset .keys .key-right-alt:before{content:"⎇";padding-right:.4em}.md-typeset .keys .key-command:before,.md-typeset .keys .key-left-command:before,.md-typeset .keys .key-right-command:before{content:"⌘";padding-right:.4em}.md-typeset .keys .key-control:before,.md-typeset .keys .key-left-control:before,.md-typeset .keys .key-right-control:before{content:"⌃";padding-right:.4em}.md-typeset .keys .key-left-meta:before,.md-typeset .keys .key-meta:before,.md-typeset .keys .key-right-meta:before{content:"◆";padding-right:.4em}.md-typeset .keys .key-left-option:before,.md-typeset .keys .key-option:before,.md-typeset .keys .key-right-option:before{content:"⌥";padding-right:.4em}.md-typeset .keys .key-left-shift:before,.md-typeset .keys .key-right-shift:before,.md-typeset .keys .key-shift:before{content:"⇧";padding-right:.4em}.md-typeset .keys .key-left-super:before,.md-typeset .keys .key-right-super:before,.md-typeset .keys .key-super:before{content:"❖";padding-right:.4em}.md-typeset .keys .key-left-windows:before,.md-typeset .keys .key-right-windows:before,.md-typeset .keys .key-windows:before{content:"⊞";padding-right:.4em}.md-typeset .keys .key-arrow-down:before{content:"↓";padding-right:.4em}.md-typeset .keys .key-arrow-left:before{content:"←";padding-right:.4em}.md-typeset .keys .key-arrow-right:before{content:"→";padding-right:.4em}.md-typeset .keys .key-arrow-up:before{content:"↑";padding-right:.4em}.md-typeset .keys .key-backspace:before{content:"⌫";padding-right:.4em}.md-typeset .keys .key-backtab:before{content:"⇤";padding-right:.4em}.md-typeset .keys .key-caps-lock:before{content:"⇪";padding-right:.4em}.md-typeset .keys .key-clear:before{content:"⌧";padding-right:.4em}.md-typeset .keys .key-context-menu:before{content:"☰";padding-right:.4em}.md-typeset .keys .key-delete:before{content:"⌦";padding-right:.4em}.md-typeset .keys .key-eject:before{content:"⏏";padding-right:.4em}.md-typeset .keys .key-end:before{content:"⤓";padding-right:.4em}.md-typeset .keys .key-escape:before{content:"⎋";padding-right:.4em}.md-typeset .keys .key-home:before{content:"⤒";padding-right:.4em}.md-typeset .keys .key-insert:before{content:"⎀";padding-right:.4em}.md-typeset .keys .key-page-down:before{content:"⇟";padding-right:.4em}.md-typeset .keys .key-page-up:before{content:"⇞";padding-right:.4em}.md-typeset .keys .key-print-screen:before{content:"⎙";padding-right:.4em}.md-typeset .keys .key-tab:after{content:"⇥";padding-left:.4em}.md-typeset .keys .key-num-enter:after{content:"⌤";padding-left:.4em}.md-typeset .keys .key-enter:after{content:"⏎";padding-left:.4em}:root{--md-tabbed-icon--prev:url('data:image/svg+xml;charset=utf-8,');--md-tabbed-icon--next:url('data:image/svg+xml;charset=utf-8,')}.md-typeset .tabbed-set{border-radius:.1rem;display:flex;flex-flow:column wrap;margin:1em 0;position:relative}.md-typeset .tabbed-set>input{height:0;opacity:0;position:absolute;width:0}.md-typeset .tabbed-set>input:target{--md-scroll-offset:0.625em}.md-typeset .tabbed-labels{-ms-overflow-style:none;box-shadow:0 -.05rem var(--md-default-fg-color--lightest) inset;display:flex;max-width:100%;overflow:auto;scrollbar-width:none}@media print{.md-typeset .tabbed-labels{display:contents}}@media screen{.js .md-typeset .tabbed-labels{position:relative}.js .md-typeset .tabbed-labels:before{background:var(--md-accent-fg-color);bottom:0;content:"";display:block;height:2px;left:0;position:absolute;transform:translateX(var(--md-indicator-x));transition:width 225ms,transform .25s;transition-timing-function:cubic-bezier(.4,0,.2,1);width:var(--md-indicator-width)}}.md-typeset .tabbed-labels::-webkit-scrollbar{display:none}.md-typeset .tabbed-labels>label{border-bottom:.1rem solid #0000;border-radius:.1rem .1rem 0 0;color:var(--md-default-fg-color--light);cursor:pointer;flex-shrink:0;font-size:.64rem;font-weight:700;padding:.78125em 1.25em .625em;scroll-margin-inline-start:1rem;transition:background-color .25s,color .25s;white-space:nowrap;width:auto}@media print{.md-typeset .tabbed-labels>label:first-child{order:1}.md-typeset .tabbed-labels>label:nth-child(2){order:2}.md-typeset .tabbed-labels>label:nth-child(3){order:3}.md-typeset .tabbed-labels>label:nth-child(4){order:4}.md-typeset .tabbed-labels>label:nth-child(5){order:5}.md-typeset .tabbed-labels>label:nth-child(6){order:6}.md-typeset .tabbed-labels>label:nth-child(7){order:7}.md-typeset .tabbed-labels>label:nth-child(8){order:8}.md-typeset .tabbed-labels>label:nth-child(9){order:9}.md-typeset .tabbed-labels>label:nth-child(10){order:10}.md-typeset .tabbed-labels>label:nth-child(11){order:11}.md-typeset .tabbed-labels>label:nth-child(12){order:12}.md-typeset .tabbed-labels>label:nth-child(13){order:13}.md-typeset .tabbed-labels>label:nth-child(14){order:14}.md-typeset .tabbed-labels>label:nth-child(15){order:15}.md-typeset .tabbed-labels>label:nth-child(16){order:16}.md-typeset .tabbed-labels>label:nth-child(17){order:17}.md-typeset .tabbed-labels>label:nth-child(18){order:18}.md-typeset .tabbed-labels>label:nth-child(19){order:19}.md-typeset .tabbed-labels>label:nth-child(20){order:20}}.md-typeset .tabbed-labels>label:hover{color:var(--md-accent-fg-color)}.md-typeset .tabbed-content{width:100%}@media print{.md-typeset .tabbed-content{display:contents}}.md-typeset .tabbed-block{display:none}@media print{.md-typeset .tabbed-block{display:block}.md-typeset .tabbed-block:first-child{order:1}.md-typeset .tabbed-block:nth-child(2){order:2}.md-typeset .tabbed-block:nth-child(3){order:3}.md-typeset .tabbed-block:nth-child(4){order:4}.md-typeset .tabbed-block:nth-child(5){order:5}.md-typeset .tabbed-block:nth-child(6){order:6}.md-typeset .tabbed-block:nth-child(7){order:7}.md-typeset .tabbed-block:nth-child(8){order:8}.md-typeset .tabbed-block:nth-child(9){order:9}.md-typeset .tabbed-block:nth-child(10){order:10}.md-typeset .tabbed-block:nth-child(11){order:11}.md-typeset .tabbed-block:nth-child(12){order:12}.md-typeset .tabbed-block:nth-child(13){order:13}.md-typeset .tabbed-block:nth-child(14){order:14}.md-typeset .tabbed-block:nth-child(15){order:15}.md-typeset .tabbed-block:nth-child(16){order:16}.md-typeset .tabbed-block:nth-child(17){order:17}.md-typeset .tabbed-block:nth-child(18){order:18}.md-typeset .tabbed-block:nth-child(19){order:19}.md-typeset .tabbed-block:nth-child(20){order:20}}.md-typeset .tabbed-block>.highlight:first-child>pre,.md-typeset .tabbed-block>pre:first-child{margin:0}.md-typeset .tabbed-block>.highlight:first-child>pre>code,.md-typeset .tabbed-block>pre:first-child>code{border-top-left-radius:0;border-top-right-radius:0}.md-typeset .tabbed-block>.highlight:first-child>.filename{border-top-left-radius:0;border-top-right-radius:0;margin:0}.md-typeset .tabbed-block>.highlight:first-child>.highlighttable{margin:0}.md-typeset .tabbed-block>.highlight:first-child>.highlighttable>tbody>tr>.filename span.filename,.md-typeset .tabbed-block>.highlight:first-child>.highlighttable>tbody>tr>.linenos{border-top-left-radius:0;border-top-right-radius:0;margin:0}.md-typeset .tabbed-block>.highlight:first-child>.highlighttable>tbody>tr>.code>div>pre>code{border-top-left-radius:0;border-top-right-radius:0}.md-typeset .tabbed-block>.highlight:first-child+.result{margin-top:-.125em}.md-typeset .tabbed-block>.tabbed-set{margin:0}.md-typeset .tabbed-button{align-self:center;border-radius:100%;color:var(--md-default-fg-color--light);cursor:pointer;display:block;height:.9rem;margin-top:.1rem;pointer-events:auto;transition:background-color .25s;width:.9rem}.md-typeset .tabbed-button:hover{background-color:var(--md-accent-fg-color--transparent);color:var(--md-accent-fg-color)}.md-typeset .tabbed-button:after{background-color:currentcolor;content:"";display:block;height:100%;-webkit-mask-image:var(--md-tabbed-icon--prev);mask-image:var(--md-tabbed-icon--prev);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;transition:background-color .25s,transform .25s;width:100%}.md-typeset .tabbed-control{background:linear-gradient(to right,var(--md-default-bg-color) 60%,#0000);display:flex;height:1.9rem;justify-content:start;pointer-events:none;position:absolute;transition:opacity 125ms;width:1.2rem}[dir=rtl] .md-typeset .tabbed-control{transform:rotate(180deg)}.md-typeset .tabbed-control[hidden]{opacity:0}.md-typeset .tabbed-control--next{background:linear-gradient(to left,var(--md-default-bg-color) 60%,#0000);justify-content:end;right:0}.md-typeset .tabbed-control--next .tabbed-button:after{-webkit-mask-image:var(--md-tabbed-icon--next);mask-image:var(--md-tabbed-icon--next)}@media screen and (max-width:44.9375em){[dir=ltr] .md-content__inner>.tabbed-set .tabbed-labels{padding-left:.8rem}[dir=rtl] .md-content__inner>.tabbed-set .tabbed-labels{padding-right:.8rem}.md-content__inner>.tabbed-set .tabbed-labels{margin:0 -.8rem;max-width:100vw;scroll-padding-inline-start:.8rem}[dir=ltr] .md-content__inner>.tabbed-set .tabbed-labels:after{padding-right:.8rem}[dir=rtl] .md-content__inner>.tabbed-set .tabbed-labels:after{padding-left:.8rem}.md-content__inner>.tabbed-set .tabbed-labels:after{content:""}[dir=ltr] .md-content__inner>.tabbed-set .tabbed-labels~.tabbed-control--prev{margin-left:-.8rem}[dir=rtl] .md-content__inner>.tabbed-set .tabbed-labels~.tabbed-control--prev{margin-right:-.8rem}[dir=ltr] .md-content__inner>.tabbed-set .tabbed-labels~.tabbed-control--prev{padding-left:.8rem}[dir=rtl] .md-content__inner>.tabbed-set .tabbed-labels~.tabbed-control--prev{padding-right:.8rem}.md-content__inner>.tabbed-set .tabbed-labels~.tabbed-control--prev{width:2rem}[dir=ltr] .md-content__inner>.tabbed-set .tabbed-labels~.tabbed-control--next{margin-right:-.8rem}[dir=rtl] .md-content__inner>.tabbed-set .tabbed-labels~.tabbed-control--next{margin-left:-.8rem}[dir=ltr] .md-content__inner>.tabbed-set .tabbed-labels~.tabbed-control--next{padding-right:.8rem}[dir=rtl] .md-content__inner>.tabbed-set .tabbed-labels~.tabbed-control--next{padding-left:.8rem}.md-content__inner>.tabbed-set .tabbed-labels~.tabbed-control--next{width:2rem}}@media screen{.md-typeset .tabbed-set>input:first-child:checked~.tabbed-labels>:first-child,.md-typeset .tabbed-set>input:nth-child(10):checked~.tabbed-labels>:nth-child(10),.md-typeset .tabbed-set>input:nth-child(11):checked~.tabbed-labels>:nth-child(11),.md-typeset .tabbed-set>input:nth-child(12):checked~.tabbed-labels>:nth-child(12),.md-typeset .tabbed-set>input:nth-child(13):checked~.tabbed-labels>:nth-child(13),.md-typeset .tabbed-set>input:nth-child(14):checked~.tabbed-labels>:nth-child(14),.md-typeset .tabbed-set>input:nth-child(15):checked~.tabbed-labels>:nth-child(15),.md-typeset .tabbed-set>input:nth-child(16):checked~.tabbed-labels>:nth-child(16),.md-typeset .tabbed-set>input:nth-child(17):checked~.tabbed-labels>:nth-child(17),.md-typeset .tabbed-set>input:nth-child(18):checked~.tabbed-labels>:nth-child(18),.md-typeset .tabbed-set>input:nth-child(19):checked~.tabbed-labels>:nth-child(19),.md-typeset .tabbed-set>input:nth-child(2):checked~.tabbed-labels>:nth-child(2),.md-typeset .tabbed-set>input:nth-child(20):checked~.tabbed-labels>:nth-child(20),.md-typeset .tabbed-set>input:nth-child(3):checked~.tabbed-labels>:nth-child(3),.md-typeset .tabbed-set>input:nth-child(4):checked~.tabbed-labels>:nth-child(4),.md-typeset .tabbed-set>input:nth-child(5):checked~.tabbed-labels>:nth-child(5),.md-typeset .tabbed-set>input:nth-child(6):checked~.tabbed-labels>:nth-child(6),.md-typeset .tabbed-set>input:nth-child(7):checked~.tabbed-labels>:nth-child(7),.md-typeset .tabbed-set>input:nth-child(8):checked~.tabbed-labels>:nth-child(8),.md-typeset .tabbed-set>input:nth-child(9):checked~.tabbed-labels>:nth-child(9){color:var(--md-accent-fg-color)}.md-typeset .no-js .tabbed-set>input:first-child:checked~.tabbed-labels>:first-child,.md-typeset .no-js .tabbed-set>input:nth-child(10):checked~.tabbed-labels>:nth-child(10),.md-typeset .no-js .tabbed-set>input:nth-child(11):checked~.tabbed-labels>:nth-child(11),.md-typeset .no-js .tabbed-set>input:nth-child(12):checked~.tabbed-labels>:nth-child(12),.md-typeset .no-js .tabbed-set>input:nth-child(13):checked~.tabbed-labels>:nth-child(13),.md-typeset .no-js .tabbed-set>input:nth-child(14):checked~.tabbed-labels>:nth-child(14),.md-typeset .no-js .tabbed-set>input:nth-child(15):checked~.tabbed-labels>:nth-child(15),.md-typeset .no-js .tabbed-set>input:nth-child(16):checked~.tabbed-labels>:nth-child(16),.md-typeset .no-js .tabbed-set>input:nth-child(17):checked~.tabbed-labels>:nth-child(17),.md-typeset .no-js .tabbed-set>input:nth-child(18):checked~.tabbed-labels>:nth-child(18),.md-typeset .no-js .tabbed-set>input:nth-child(19):checked~.tabbed-labels>:nth-child(19),.md-typeset .no-js .tabbed-set>input:nth-child(2):checked~.tabbed-labels>:nth-child(2),.md-typeset .no-js .tabbed-set>input:nth-child(20):checked~.tabbed-labels>:nth-child(20),.md-typeset .no-js .tabbed-set>input:nth-child(3):checked~.tabbed-labels>:nth-child(3),.md-typeset .no-js .tabbed-set>input:nth-child(4):checked~.tabbed-labels>:nth-child(4),.md-typeset .no-js .tabbed-set>input:nth-child(5):checked~.tabbed-labels>:nth-child(5),.md-typeset .no-js .tabbed-set>input:nth-child(6):checked~.tabbed-labels>:nth-child(6),.md-typeset .no-js .tabbed-set>input:nth-child(7):checked~.tabbed-labels>:nth-child(7),.md-typeset .no-js .tabbed-set>input:nth-child(8):checked~.tabbed-labels>:nth-child(8),.md-typeset .no-js .tabbed-set>input:nth-child(9):checked~.tabbed-labels>:nth-child(9),.no-js .md-typeset .tabbed-set>input:first-child:checked~.tabbed-labels>:first-child,.no-js .md-typeset .tabbed-set>input:nth-child(10):checked~.tabbed-labels>:nth-child(10),.no-js .md-typeset .tabbed-set>input:nth-child(11):checked~.tabbed-labels>:nth-child(11),.no-js .md-typeset .tabbed-set>input:nth-child(12):checked~.tabbed-labels>:nth-child(12),.no-js .md-typeset .tabbed-set>input:nth-child(13):checked~.tabbed-labels>:nth-child(13),.no-js .md-typeset .tabbed-set>input:nth-child(14):checked~.tabbed-labels>:nth-child(14),.no-js .md-typeset .tabbed-set>input:nth-child(15):checked~.tabbed-labels>:nth-child(15),.no-js .md-typeset .tabbed-set>input:nth-child(16):checked~.tabbed-labels>:nth-child(16),.no-js .md-typeset .tabbed-set>input:nth-child(17):checked~.tabbed-labels>:nth-child(17),.no-js .md-typeset .tabbed-set>input:nth-child(18):checked~.tabbed-labels>:nth-child(18),.no-js .md-typeset .tabbed-set>input:nth-child(19):checked~.tabbed-labels>:nth-child(19),.no-js .md-typeset .tabbed-set>input:nth-child(2):checked~.tabbed-labels>:nth-child(2),.no-js .md-typeset .tabbed-set>input:nth-child(20):checked~.tabbed-labels>:nth-child(20),.no-js .md-typeset .tabbed-set>input:nth-child(3):checked~.tabbed-labels>:nth-child(3),.no-js .md-typeset .tabbed-set>input:nth-child(4):checked~.tabbed-labels>:nth-child(4),.no-js .md-typeset .tabbed-set>input:nth-child(5):checked~.tabbed-labels>:nth-child(5),.no-js .md-typeset .tabbed-set>input:nth-child(6):checked~.tabbed-labels>:nth-child(6),.no-js .md-typeset .tabbed-set>input:nth-child(7):checked~.tabbed-labels>:nth-child(7),.no-js .md-typeset .tabbed-set>input:nth-child(8):checked~.tabbed-labels>:nth-child(8),.no-js .md-typeset .tabbed-set>input:nth-child(9):checked~.tabbed-labels>:nth-child(9){border-color:var(--md-accent-fg-color)}}.md-typeset .tabbed-set>input:first-child.focus-visible~.tabbed-labels>:first-child,.md-typeset .tabbed-set>input:nth-child(10).focus-visible~.tabbed-labels>:nth-child(10),.md-typeset .tabbed-set>input:nth-child(11).focus-visible~.tabbed-labels>:nth-child(11),.md-typeset .tabbed-set>input:nth-child(12).focus-visible~.tabbed-labels>:nth-child(12),.md-typeset .tabbed-set>input:nth-child(13).focus-visible~.tabbed-labels>:nth-child(13),.md-typeset .tabbed-set>input:nth-child(14).focus-visible~.tabbed-labels>:nth-child(14),.md-typeset .tabbed-set>input:nth-child(15).focus-visible~.tabbed-labels>:nth-child(15),.md-typeset .tabbed-set>input:nth-child(16).focus-visible~.tabbed-labels>:nth-child(16),.md-typeset .tabbed-set>input:nth-child(17).focus-visible~.tabbed-labels>:nth-child(17),.md-typeset .tabbed-set>input:nth-child(18).focus-visible~.tabbed-labels>:nth-child(18),.md-typeset .tabbed-set>input:nth-child(19).focus-visible~.tabbed-labels>:nth-child(19),.md-typeset .tabbed-set>input:nth-child(2).focus-visible~.tabbed-labels>:nth-child(2),.md-typeset .tabbed-set>input:nth-child(20).focus-visible~.tabbed-labels>:nth-child(20),.md-typeset .tabbed-set>input:nth-child(3).focus-visible~.tabbed-labels>:nth-child(3),.md-typeset .tabbed-set>input:nth-child(4).focus-visible~.tabbed-labels>:nth-child(4),.md-typeset .tabbed-set>input:nth-child(5).focus-visible~.tabbed-labels>:nth-child(5),.md-typeset .tabbed-set>input:nth-child(6).focus-visible~.tabbed-labels>:nth-child(6),.md-typeset .tabbed-set>input:nth-child(7).focus-visible~.tabbed-labels>:nth-child(7),.md-typeset .tabbed-set>input:nth-child(8).focus-visible~.tabbed-labels>:nth-child(8),.md-typeset .tabbed-set>input:nth-child(9).focus-visible~.tabbed-labels>:nth-child(9){background-color:var(--md-accent-fg-color--transparent)}.md-typeset .tabbed-set>input:first-child:checked~.tabbed-content>:first-child,.md-typeset .tabbed-set>input:nth-child(10):checked~.tabbed-content>:nth-child(10),.md-typeset .tabbed-set>input:nth-child(11):checked~.tabbed-content>:nth-child(11),.md-typeset .tabbed-set>input:nth-child(12):checked~.tabbed-content>:nth-child(12),.md-typeset .tabbed-set>input:nth-child(13):checked~.tabbed-content>:nth-child(13),.md-typeset .tabbed-set>input:nth-child(14):checked~.tabbed-content>:nth-child(14),.md-typeset .tabbed-set>input:nth-child(15):checked~.tabbed-content>:nth-child(15),.md-typeset .tabbed-set>input:nth-child(16):checked~.tabbed-content>:nth-child(16),.md-typeset .tabbed-set>input:nth-child(17):checked~.tabbed-content>:nth-child(17),.md-typeset .tabbed-set>input:nth-child(18):checked~.tabbed-content>:nth-child(18),.md-typeset .tabbed-set>input:nth-child(19):checked~.tabbed-content>:nth-child(19),.md-typeset .tabbed-set>input:nth-child(2):checked~.tabbed-content>:nth-child(2),.md-typeset .tabbed-set>input:nth-child(20):checked~.tabbed-content>:nth-child(20),.md-typeset .tabbed-set>input:nth-child(3):checked~.tabbed-content>:nth-child(3),.md-typeset .tabbed-set>input:nth-child(4):checked~.tabbed-content>:nth-child(4),.md-typeset .tabbed-set>input:nth-child(5):checked~.tabbed-content>:nth-child(5),.md-typeset .tabbed-set>input:nth-child(6):checked~.tabbed-content>:nth-child(6),.md-typeset .tabbed-set>input:nth-child(7):checked~.tabbed-content>:nth-child(7),.md-typeset .tabbed-set>input:nth-child(8):checked~.tabbed-content>:nth-child(8),.md-typeset .tabbed-set>input:nth-child(9):checked~.tabbed-content>:nth-child(9){display:block}:root{--md-tasklist-icon:url('data:image/svg+xml;charset=utf-8,');--md-tasklist-icon--checked:url('data:image/svg+xml;charset=utf-8,')}.md-typeset .task-list-item{list-style-type:none;position:relative}[dir=ltr] .md-typeset .task-list-item [type=checkbox]{left:-2em}[dir=rtl] .md-typeset .task-list-item [type=checkbox]{right:-2em}.md-typeset .task-list-item [type=checkbox]{position:absolute;top:.45em}.md-typeset .task-list-control [type=checkbox]{opacity:0;z-index:-1}[dir=ltr] .md-typeset .task-list-indicator:before{left:-1.5em}[dir=rtl] .md-typeset .task-list-indicator:before{right:-1.5em}.md-typeset .task-list-indicator:before{background-color:var(--md-default-fg-color--lightest);content:"";height:1.25em;-webkit-mask-image:var(--md-tasklist-icon);mask-image:var(--md-tasklist-icon);-webkit-mask-position:center;mask-position:center;-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat;-webkit-mask-size:contain;mask-size:contain;position:absolute;top:.15em;width:1.25em}.md-typeset [type=checkbox]:checked+.task-list-indicator:before{background-color:#00e676;-webkit-mask-image:var(--md-tasklist-icon--checked);mask-image:var(--md-tasklist-icon--checked)}:root>*{--md-mermaid-font-family:var(--md-text-font-family),sans-serif;--md-mermaid-edge-color:var(--md-code-fg-color);--md-mermaid-node-bg-color:var(--md-accent-fg-color--transparent);--md-mermaid-node-fg-color:var(--md-accent-fg-color);--md-mermaid-label-bg-color:var(--md-default-bg-color);--md-mermaid-label-fg-color:var(--md-code-fg-color)}.mermaid{line-height:normal;margin:1em 0}@media screen and (min-width:45em){[dir=ltr] .md-typeset .inline{float:left}[dir=rtl] .md-typeset .inline{float:right}[dir=ltr] .md-typeset .inline{margin-right:.8rem}[dir=rtl] .md-typeset .inline{margin-left:.8rem}.md-typeset .inline{margin-bottom:.8rem;margin-top:0;width:11.7rem}[dir=ltr] .md-typeset .inline.end{float:right}[dir=rtl] .md-typeset .inline.end{float:left}[dir=ltr] .md-typeset .inline.end{margin-left:.8rem;margin-right:0}[dir=rtl] .md-typeset .inline.end{margin-left:0;margin-right:.8rem}} \ No newline at end of file diff --git a/2.0.0/assets/stylesheets/main.975780f9.min.css.map b/2.0.0/assets/stylesheets/main.975780f9.min.css.map new file mode 100644 index 00000000..5e13ffb9 --- /dev/null +++ b/2.0.0/assets/stylesheets/main.975780f9.min.css.map @@ -0,0 +1 @@ +{"version":3,"sources":["src/assets/stylesheets/main/extensions/pymdownx/_keys.scss","../../../src/assets/stylesheets/main.scss","src/assets/stylesheets/main/_resets.scss","src/assets/stylesheets/main/_colors.scss","src/assets/stylesheets/main/_icons.scss","src/assets/stylesheets/main/_typeset.scss","src/assets/stylesheets/utilities/_break.scss","src/assets/stylesheets/main/layout/_banner.scss","src/assets/stylesheets/main/layout/_base.scss","src/assets/stylesheets/main/layout/_clipboard.scss","src/assets/stylesheets/main/layout/_consent.scss","src/assets/stylesheets/main/layout/_content.scss","src/assets/stylesheets/main/layout/_dialog.scss","src/assets/stylesheets/main/layout/_feedback.scss","src/assets/stylesheets/main/layout/_footer.scss","src/assets/stylesheets/main/layout/_form.scss","src/assets/stylesheets/main/layout/_header.scss","src/assets/stylesheets/main/layout/_nav.scss","src/assets/stylesheets/main/layout/_search.scss","src/assets/stylesheets/main/layout/_select.scss","src/assets/stylesheets/main/layout/_sidebar.scss","src/assets/stylesheets/main/layout/_source.scss","src/assets/stylesheets/main/layout/_tabs.scss","src/assets/stylesheets/main/layout/_tag.scss","src/assets/stylesheets/main/layout/_tooltip.scss","src/assets/stylesheets/main/layout/_top.scss","src/assets/stylesheets/main/layout/_version.scss","src/assets/stylesheets/main/extensions/markdown/_admonition.scss","node_modules/material-design-color/material-color.scss","src/assets/stylesheets/main/extensions/markdown/_footnotes.scss","src/assets/stylesheets/main/extensions/markdown/_toc.scss","src/assets/stylesheets/main/extensions/pymdownx/_arithmatex.scss","src/assets/stylesheets/main/extensions/pymdownx/_critic.scss","src/assets/stylesheets/main/extensions/pymdownx/_details.scss","src/assets/stylesheets/main/extensions/pymdownx/_emoji.scss","src/assets/stylesheets/main/extensions/pymdownx/_highlight.scss","src/assets/stylesheets/main/extensions/pymdownx/_tabbed.scss","src/assets/stylesheets/main/extensions/pymdownx/_tasklist.scss","src/assets/stylesheets/main/integrations/_mermaid.scss","src/assets/stylesheets/main/_modifiers.scss"],"names":[],"mappings":"AAgGM,gBCo+GN,CCxiHA,KAEE,6BAAA,CAAA,0BAAA,CAAA,qBAAA,CADA,qBDzBF,CC8BA,iBAGE,kBD3BF,CC8BE,gCANF,iBAOI,yBDzBF,CACF,CC6BA,KACE,QD1BF,CC8BA,qBAIE,uCD3BF,CC+BA,EACE,aAAA,CACA,oBD5BF,CCgCA,GAME,QAAA,CAJA,kBAAA,CADA,aAAA,CAEA,aAAA,CAEA,gBAAA,CADA,SD3BF,CCiCA,MACE,aD9BF,CCkCA,QAEE,eD/BF,CCmCA,IACE,iBDhCF,CCoCA,MACE,uBAAA,CACA,gBDjCF,CCqCA,MAEE,eAAA,CACA,kBDlCF,CCsCA,OAKE,gBAAA,CACA,QAAA,CAFA,mBAAA,CADA,iBAAA,CAFA,QAAA,CACA,SD/BF,CCuCA,MACE,QAAA,CACA,YDpCF,CErDA,MAIE,6BAAA,CACA,oCAAA,CACA,mCAAA,CACA,0BAAA,CACA,sCAAA,CAGA,4BAAA,CACA,2CAAA,CACA,yBAAA,CACA,qCFmDF,CEpCA,qCAGE,+BAAA,CACA,sCAAA,CACA,wCAAA,CACA,yCAAA,CACA,0BAAA,CACA,sCAAA,CACA,wCAAA,CACA,yCAAA,CAGA,0BAAA,CACA,0BAAA,CAGA,4BAAA,CACA,iCAAA,CACA,kCAAA,CACA,mCAAA,CACA,mCAAA,CACA,kCAAA,CACA,iCAAA,CACA,+CAAA,CACA,6DAAA,CACA,gEAAA,CACA,4DAAA,CACA,4DAAA,CACA,6DAAA,CAGA,6CAAA,CAGA,+CAAA,CAGA,iCAAA,CAGA,gCAAA,CACA,gCAAA,CAGA,8BAAA,CACA,kCAAA,CACA,qCAAA,CAGA,kCAAA,CAGA,mDAAA,CACA,mDAAA,CAGA,yBAAA,CACA,qCAAA,CACA,uCAAA,CACA,8BAAA,CACA,oCAAA,CAGA,8DAAA,CAKA,8DAAA,CAKA,0DFaF,CGjHE,aAIE,iBAAA,CAHA,aAAA,CAEA,aAAA,CADA,YHsHJ,CI3HA,KACE,kCAAA,CACA,iCAAA,CAGA,uGAAA,CAKA,mFJ4HF,CItHA,WAGE,mCAAA,CACA,sCJyHF,CIrHA,wBANE,6BJmIF,CI7HA,aAIE,4BAAA,CACA,sCJwHF,CIhHA,MACE,0NAAA,CACA,mNAAA,CACA,oNJmHF,CI5GA,YAGE,gCAAA,CAAA,kBAAA,CAFA,eAAA,CACA,eJgHF,CI3GE,aAPF,YAQI,gBJ8GF,CACF,CI3GE,uGAME,iBAAA,CAAA,cJ6GJ,CIzGE,eAEE,uCAAA,CAEA,aAAA,CACA,eAAA,CAJA,iBJgHJ,CIvGE,8BAPE,eAAA,CAGA,qBJkHJ,CI9GE,eAGE,kBAAA,CACA,eAAA,CAHA,oBJ6GJ,CIrGE,eAGE,gBAAA,CADA,eAAA,CAGA,qBAAA,CADA,eAAA,CAHA,mBJ2GJ,CInGE,kBACE,eJqGJ,CIjGE,eAEE,eAAA,CACA,qBAAA,CAFA,YJqGJ,CI/FE,8BAGE,uCAAA,CAEA,cAAA,CADA,eAAA,CAEA,qBAAA,CAJA,eJqGJ,CI7FE,eACE,wBJ+FJ,CI3FE,eAGE,+DAAA,CAFA,iBAAA,CACA,cJ8FJ,CIzFE,cACE,+BAAA,CACA,qBJ2FJ,CIxFI,mCAEE,sBJyFN,CIrFI,wCAEE,+BJsFN,CInFM,kDACE,uDJqFR,CIhFI,mBACE,kBAAA,CACA,iCJkFN,CI9EI,4BACE,uCAAA,CACA,oBJgFN,CI3EE,iDAGE,6BAAA,CACA,aAAA,CACA,2BJ6EJ,CI1EI,aARF,iDASI,oBJ+EJ,CACF,CI3EE,iBAIE,wCAAA,CACA,mBAAA,CACA,kCAAA,CAAA,0BAAA,CAJA,eAAA,CADA,uBAAA,CAEA,qBJgFJ,CI1EI,qCAEE,uCAAA,CADA,YJ6EN,CIvEE,gBAEE,iBAAA,CACA,eAAA,CAFA,iBJ2EJ,CItEI,qBAQE,kCAAA,CAAA,0BAAA,CADA,eAAA,CANA,aAAA,CACA,QAAA,CAIA,uCAAA,CAFA,aAAA,CADA,oCAAA,CAQA,yDAAA,CADA,oBAAA,CADA,iBAAA,CAJA,iBJ8EN,CIrEM,2BACE,+CJuER,CInEM,wCAEE,YAAA,CADA,WJsER,CIjEM,8CACE,oDJmER,CIhEQ,oDACE,0CJkEV,CI3DE,gBAOE,4CAAA,CACA,mBAAA,CACA,mKACE,CAPF,gCAAA,CAFA,oBAAA,CAGA,eAAA,CAFA,uBAAA,CAGA,uBAAA,CACA,qBJgEJ,CItDE,iBAGE,6CAAA,CACA,kCAAA,CAAA,0BAAA,CAHA,aAAA,CACA,qBJ0DJ,CIpDE,iBAEE,6DAAA,CACA,WAAA,CAFA,oBJwDJ,CInDI,oBANF,iBAOI,iBJsDJ,CInDI,yDAWE,2CAAA,CACA,mBAAA,CACA,8BAAA,CAJA,gCAAA,CAKA,mBAAA,CAXA,oBAAA,CAOA,eAAA,CAHA,cAAA,CADA,aAAA,CADA,6BAAA,CAAA,qBAAA,CAGA,mBAAA,CAPA,iBAAA,CAGA,UJ+DN,CInEI,sDAWE,2CAAA,CACA,mBAAA,CACA,8BAAA,CAJA,gCAAA,CAKA,mBAAA,CAXA,oBAAA,CAOA,eAAA,CAHA,cAAA,CADA,aAAA,CADA,0BAAA,CAAA,qBAAA,CAGA,mBAAA,CAPA,iBAAA,CAGA,UJ+DN,CInEI,mEAEE,MJiEN,CInEI,gEAEE,MJiEN,CInEI,0DAEE,MJiEN,CInEI,mEAEE,OJiEN,CInEI,gEAEE,OJiEN,CInEI,0DAEE,OJiEN,CInEI,gDAWE,2CAAA,CACA,mBAAA,CACA,8BAAA,CAJA,gCAAA,CAKA,mBAAA,CAXA,oBAAA,CAOA,eAAA,CAHA,cAAA,CADA,aAAA,CADA,6BAAA,CAAA,0BAAA,CAAA,qBAAA,CAGA,mBAAA,CAPA,iBAAA,CAGA,UJ+DN,CACF,CIhDE,kBACE,WJkDJ,CI9CE,oDAEE,qBJgDJ,CIlDE,oDAEE,sBJgDJ,CI5CE,iCACE,kBJiDJ,CIlDE,iCACE,mBJiDJ,CIlDE,iCAIE,2DJ8CJ,CIlDE,iCAIE,4DJ8CJ,CIlDE,uBAGE,uCAAA,CADA,aAAA,CAAA,cJgDJ,CI1CE,eACE,oBJ4CJ,CIxCE,kDAEE,kBJ2CJ,CI7CE,kDAEE,mBJ2CJ,CI7CE,8BAGE,SJ0CJ,CIvCI,0DACE,iBJ0CN,CItCI,oCACE,2BJyCN,CItCM,0CACE,2BJyCR,CIpCI,wDAEE,kBJuCN,CIzCI,wDAEE,mBJuCN,CIzCI,oCACE,kBJwCN,CIpCM,kGAEE,aJwCR,CIpCM,0DACE,eJuCR,CInCM,4EACE,kBAAA,CAAA,eJuCR,CIxCM,sEACE,kBAAA,CAAA,eJuCR,CIxCM,gGAEE,kBJsCR,CIxCM,0FAEE,kBJsCR,CIxCM,8EAEE,kBJsCR,CIxCM,gGAEE,mBJsCR,CIxCM,0FAEE,mBJsCR,CIxCM,8EAEE,mBJsCR,CIxCM,0DACE,kBAAA,CAAA,eJuCR,CIhCE,yBAEE,mBJkCJ,CIpCE,yBAEE,oBJkCJ,CIpCE,eACE,mBAAA,CAAA,cJmCJ,CI9BE,kDAIE,WAAA,CADA,cJiCJ,CIzBI,4BAEE,oBJ2BN,CIvBI,6BAEE,oBJyBN,CIrBI,kCACE,YJuBN,CInBI,8EAEE,YJoBN,CIfE,mBACE,iBAAA,CAGA,eAAA,CADA,cAAA,CAEA,iBAAA,CAHA,yBAAA,CAAA,sBAAA,CAAA,iBJoBJ,CIdI,uBACE,aJgBN,CIXE,uBAGE,iBAAA,CADA,eAAA,CADA,eJeJ,CITE,mBACE,cJWJ,CIPE,+BAKE,2CAAA,CACA,iDAAA,CACA,mBAAA,CANA,oBAAA,CAGA,gBAAA,CAFA,cAAA,CACA,aAAA,CAKA,iBJSJ,CINI,aAXF,+BAYI,aJSJ,CACF,CIJI,iCACE,gBJMN,CICM,gEACE,YJCR,CIFM,6DACE,YJCR,CIFM,uDACE,YJCR,CIGM,+DACE,eJDR,CIAM,4DACE,eJDR,CIAM,sDACE,eJDR,CIMI,gEACE,eJJN,CIGI,6DACE,eJJN,CIGI,uDACE,eJJN,CIOM,0EACE,gBJLR,CIIM,uEACE,gBJLR,CIIM,iEACE,gBJLR,CIUI,kCAGE,eAAA,CAFA,cAAA,CACA,sBAAA,CAEA,kBJRN,CIYI,kCAGE,qDAAA,CAFA,sBAAA,CACA,kBJTN,CIcI,wCACE,iCJZN,CIeM,8CACE,iCAAA,CACA,sDJbR,CIkBI,iCACE,iBJhBN,CIqBE,wCACE,cJnBJ,CIsBI,wDAIE,gBJdN,CIUI,wDAIE,iBJdN,CIUI,8CAUE,UAAA,CATA,oBAAA,CAEA,YAAA,CAGA,oDAAA,CAAA,4CAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CACA,iCAAA,CAJA,0BAAA,CAHA,WJZN,CIwBI,oDACE,oDJtBN,CI0BI,mEACE,kDAAA,CACA,yDAAA,CAAA,iDJxBN,CI4BI,oEACE,kDAAA,CACA,0DAAA,CAAA,kDJ1BN,CI+BE,wBACE,iBAAA,CACA,eAAA,CACA,iBJ7BJ,CIiCE,mBACE,oBAAA,CACA,kBAAA,CACA,eJ/BJ,CIkCI,aANF,mBAOI,aJ/BJ,CACF,CIkCI,8BACE,aAAA,CAEA,QAAA,CACA,eAAA,CAFA,UJ9BN,CK7VI,wCD0YF,uBACE,iBJzCF,CI4CE,4BACE,eJ1CJ,CACF,CM/hBA,WAGE,0CAAA,CADA,+BAAA,CADA,aNmiBF,CM9hBE,aANF,WAOI,YNiiBF,CACF,CM9hBE,oBAEE,uCAAA,CADA,gCNiiBJ,CM5hBE,kBAGE,eAAA,CAFA,iBAAA,CACA,eN+hBJ,CM1hBE,6BACE,WN+hBJ,CMhiBE,6BACE,UN+hBJ,CMhiBE,mBAEE,aAAA,CACA,cAAA,CACA,uBN4hBJ,CMzhBI,yBACE,UN2hBN,CO3jBA,KASE,cAAA,CARA,WAAA,CACA,iBP+jBF,CK3ZI,oCEtKJ,KAaI,gBPwjBF,CACF,CKhaI,oCEtKJ,KAkBI,cPwjBF,CACF,COnjBA,KASE,2CAAA,CAPA,YAAA,CACA,qBAAA,CAKA,eAAA,CAHA,eAAA,CAJA,iBAAA,CAGA,UPyjBF,COjjBE,aAZF,KAaI,aPojBF,CACF,CKjaI,wCEhJF,yBAII,cPijBJ,CACF,COxiBA,SAEE,gBAAA,CAAA,iBAAA,CADA,eP4iBF,COviBA,cACE,YAAA,CACA,qBAAA,CACA,WP0iBF,COviBE,aANF,cAOI,aP0iBF,CACF,COtiBA,SACE,WPyiBF,COtiBE,gBACE,YAAA,CACA,WAAA,CACA,iBPwiBJ,COniBA,aACE,eAAA,CAEA,sBAAA,CADA,kBPuiBF,CO7hBA,WACE,YPgiBF,CO3hBA,WAGE,QAAA,CACA,SAAA,CAHA,iBAAA,CACA,OPgiBF,CO3hBE,uCACE,aP6hBJ,COzhBE,+BAEE,uCAAA,CADA,kBP4hBJ,COthBA,SASE,2CAAA,CACA,mBAAA,CAHA,gCAAA,CACA,gBAAA,CAHA,YAAA,CAQA,SAAA,CAFA,uCAAA,CALA,mBAAA,CALA,cAAA,CAWA,2BAAA,CARA,UPgiBF,COphBE,eAGE,SAAA,CADA,uBAAA,CAEA,oEACE,CAJF,UPyhBJ,CO3gBA,MACE,WP8gBF,CQxqBA,MACE,+PR0qBF,CQpqBA,cAQE,mBAAA,CADA,0CAAA,CAIA,cAAA,CALA,YAAA,CAGA,uCAAA,CACA,oBAAA,CATA,iBAAA,CAEA,UAAA,CADA,QAAA,CAUA,qBAAA,CAPA,WAAA,CADA,SR+qBF,CQpqBE,aAfF,cAgBI,YRuqBF,CACF,CQpqBE,kCAEE,uCAAA,CADA,YRuqBJ,CQlqBE,qBACE,uCRoqBJ,CQhqBE,yCACE,+BRkqBJ,CQnqBE,sCACE,+BRkqBJ,CQnqBE,gCACE,+BRkqBJ,CQ7pBE,oBAKE,6BAAA,CAKA,UAAA,CATA,aAAA,CAEA,cAAA,CACA,aAAA,CAEA,2CAAA,CAAA,mCAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CAPA,aRuqBJ,CQ3pBE,sBACE,cR6pBJ,CQ1pBI,2BACE,2CR4pBN,CQtpBI,sDAEE,uDAAA,CADA,+BRypBN,CQ1pBI,mDAEE,uDAAA,CADA,+BRypBN,CQ1pBI,6CAEE,uDAAA,CADA,+BRypBN,CS/tBA,mBACE,GAEE,SAAA,CADA,0BTmuBF,CS/tBA,GAEE,SAAA,CADA,uBTkuBF,CACF,CS7tBA,mBACE,GACE,ST+tBF,CS5tBA,GACE,ST8tBF,CACF,CSntBE,qBASE,2BAAA,CADA,mCAAA,CAAA,2BAAA,CAFA,0BAAA,CADA,WAAA,CAEA,SAAA,CANA,cAAA,CACA,KAAA,CAEA,UAAA,CADA,ST2tBJ,CSjtBE,mBAcE,mDAAA,CANA,2CAAA,CACA,QAAA,CACA,mBAAA,CARA,QAAA,CASA,kDACE,CAPF,eAAA,CAEA,aAAA,CADA,SAAA,CALA,cAAA,CAGA,UAAA,CADA,ST4tBJ,CS7sBE,kBACE,aT+sBJ,CS3sBE,sBACE,YAAA,CACA,YT6sBJ,CS1sBI,oCACE,aT4sBN,CSvsBE,sBACE,mBTysBJ,CStsBI,6CACE,cTwsBN,CKlmBI,wCIvGA,6CAKI,aAAA,CAEA,gBAAA,CACA,iBAAA,CAFA,UT0sBN,CACF,CSnsBE,kBACE,cTqsBJ,CUtyBA,YACE,WAAA,CAIA,WVsyBF,CUnyBE,mBACE,qBAAA,CACA,iBVqyBJ,CKzoBI,sCKtJE,4EACE,kBVkyBN,CU9xBI,0JACE,mBVgyBN,CUjyBI,8EACE,kBVgyBN,CACF,CU3xBI,0BAGE,UAAA,CAFA,aAAA,CACA,YV8xBN,CUzxBI,+BACE,eV2xBN,CUrxBE,8BACE,WV0xBJ,CU3xBE,8BACE,UV0xBJ,CU3xBE,8BAGE,iBVwxBJ,CU3xBE,8BAGE,kBVwxBJ,CU3xBE,oBAEE,cAAA,CAEA,SVuxBJ,CUpxBI,aAPF,oBAQI,YVuxBJ,CACF,CUpxBI,gCACE,yCVsxBN,CUlxBI,wBACE,cAAA,CACA,kBVoxBN,CUjxBM,kCACE,oBVmxBR,CWp1BA,qBAEE,WXk2BF,CWp2BA,qBAEE,UXk2BF,CWp2BA,WAOE,2CAAA,CACA,mBAAA,CALA,YAAA,CAMA,8BAAA,CAJA,iBAAA,CAMA,SAAA,CALA,mBAAA,CASA,mBAAA,CAdA,cAAA,CASA,0BAAA,CAEA,wCACE,CATF,SXg2BF,CWl1BE,aAlBF,WAmBI,YXq1BF,CACF,CWl1BE,mBAEE,SAAA,CAIA,mBAAA,CALA,uBAAA,CAEA,kEXq1BJ,CW90BE,kBACE,gCAAA,CACA,eXg1BJ,CYn3BA,aACE,gBAAA,CACA,iBZs3BF,CYn3BE,sBAGE,WAAA,CAFA,QAAA,CACA,SZs3BJ,CYj3BE,oBAEE,eAAA,CADA,eZo3BJ,CY/2BE,oBACE,iBZi3BJ,CY72BE,mBAIE,sBAAA,CAFA,YAAA,CACA,cAAA,CAEA,sBAAA,CAJA,iBZm3BJ,CY52BI,iDACE,yCZ82BN,CY12BI,6BACE,iBZ42BN,CYv2BE,mBAGE,uCAAA,CACA,cAAA,CAHA,aAAA,CACA,cAAA,CAGA,sBZy2BJ,CYt2BI,gDACE,+BZw2BN,CYp2BI,4BACE,0CAAA,CACA,mBZs2BN,CYj2BE,mBAGE,SAAA,CAFA,iBAAA,CACA,2BAAA,CAEA,8DZm2BJ,CY91BI,qBAEE,aAAA,CADA,eZi2BN,CY51BI,6BAEE,SAAA,CADA,uBZ+1BN,Ca76BA,WAEE,0CAAA,CADA,+Bbi7BF,Ca76BE,aALF,WAMI,Ybg7BF,CACF,Ca76BE,kBACE,6BAAA,CAEA,aAAA,CADA,abg7BJ,Ca56BI,gCACE,Yb86BN,Caz6BE,iBACE,YAAA,CAKA,cAAA,CAIA,uCAAA,CADA,eAAA,CADA,oBAAA,CADA,kBAAA,CAIA,uBbu6BJ,Cap6BI,4CACE,Ubs6BN,Cav6BI,yCACE,Ubs6BN,Cav6BI,mCACE,Ubs6BN,Cal6BI,+BACE,oBbo6BN,CKrxBI,wCQrII,yCACE,Yb65BR,CACF,Cax5BI,iCACE,gBb25BN,Ca55BI,iCACE,iBb25BN,Ca55BI,uBAEE,gBb05BN,Cav5BM,iCACE,eby5BR,Can5BE,kBAEE,WAAA,CAGA,eAAA,CACA,kBAAA,CAHA,6BAAA,CACA,cAAA,CAHA,iBAAA,CAMA,kBbq5BJ,Caj5BE,mBACE,YAAA,CACA,abm5BJ,Ca/4BE,sBAKE,gBAAA,CAHA,MAAA,CACA,gBAAA,CAGA,UAAA,CAFA,cAAA,CAHA,iBAAA,CACA,Obq5BJ,Ca54BA,gBACE,gDb+4BF,Ca54BE,uBACE,YAAA,CACA,cAAA,CACA,6BAAA,CACA,ab84BJ,Ca14BE,kCACE,sCb44BJ,Caz4BI,6DACE,+Bb24BN,Ca54BI,0DACE,+Bb24BN,Ca54BI,oDACE,+Bb24BN,Can4BA,cAIE,wCAAA,CACA,gBAAA,CAHA,iBAAA,CACA,eAAA,CAFA,Ub04BF,CKj2BI,mCQ1CJ,cASI,Ubs4BF,CACF,Cal4BE,yBACE,sCbo4BJ,Ca73BA,WACE,cAAA,CACA,qBbg4BF,CK92BI,mCQpBJ,WAMI,ebg4BF,CACF,Ca73BE,iBACE,oBAAA,CAEA,aAAA,CACA,iBAAA,CAFA,Ybi4BJ,Ca53BI,wBACE,eb83BN,Ca13BI,qBAGE,iBAAA,CAFA,gBAAA,CACA,mBb63BN,CcpiCE,uBAKE,kBAAA,CACA,mBAAA,CAHA,gCAAA,CAIA,cAAA,CANA,oBAAA,CAGA,eAAA,CAFA,kBAAA,CAMA,gEduiCJ,CcjiCI,gCAEE,2CAAA,CACA,uCAAA,CAFA,gCdqiCN,Cc/hCI,kDAEE,0CAAA,CACA,sCAAA,CAFA,+BdmiCN,CcpiCI,+CAEE,0CAAA,CACA,sCAAA,CAFA,+BdmiCN,CcpiCI,yCAEE,0CAAA,CACA,sCAAA,CAFA,+BdmiCN,Cc5hCE,gCAKE,4BdiiCJ,CctiCE,gEAME,6BdgiCJ,CctiCE,gCAME,4BdgiCJ,CctiCE,sBAIE,6DAAA,CAGA,8BAAA,CAJA,eAAA,CAFA,aAAA,CACA,eAAA,CAMA,sCd8hCJ,CczhCI,iDACE,6CAAA,CACA,8Bd2hCN,Cc7hCI,8CACE,6CAAA,CACA,8Bd2hCN,Cc7hCI,wCACE,6CAAA,CACA,8Bd2hCN,CcvhCI,+BACE,UdyhCN,Ce5kCA,WAOE,2CAAA,CAGA,8CACE,CALF,gCAAA,CADA,aAAA,CAFA,MAAA,CAFA,uBAAA,CAAA,eAAA,CAEA,OAAA,CADA,KAAA,CAEA,SfmlCF,CexkCE,aAfF,WAgBI,Yf2kCF,CACF,CexkCE,mBACE,2BAAA,CACA,iEf0kCJ,CepkCE,mBACE,kDACE,CAEF,kEfokCJ,Ce9jCE,kBAEE,kBAAA,CADA,YAAA,CAEA,efgkCJ,Ce5jCE,mBAKE,kBAAA,CAGA,cAAA,CALA,YAAA,CAIA,uCAAA,CAHA,aAAA,CAHA,iBAAA,CAQA,uBAAA,CAHA,qBAAA,CAJA,SfqkCJ,Ce3jCI,yBACE,Uf6jCN,CezjCI,iCACE,oBf2jCN,CevjCI,uCAEE,uCAAA,CADA,Yf0jCN,CerjCI,2BACE,YAAA,CACA,afujCN,CK18BI,wCU/GA,2BAMI,YfujCN,CACF,CepjCM,iDAIE,iBAAA,CAHA,aAAA,CAEA,aAAA,CADA,UfwjCR,Ce1jCM,8CAIE,iBAAA,CAHA,aAAA,CAEA,aAAA,CADA,UfwjCR,Ce1jCM,wCAIE,iBAAA,CAHA,aAAA,CAEA,aAAA,CADA,UfwjCR,CKx+BI,mCUzEA,iCAII,YfijCN,CACF,Ce9iCM,wCACE,YfgjCR,Ce5iCM,+CACE,oBf8iCR,CKn/BI,sCUtDA,iCAII,YfyiCN,CACF,CepiCE,kBAEE,YAAA,CACA,cAAA,CAFA,iBAAA,CAIA,8DACE,CAFF,kBfuiCJ,CejiCI,oCAGE,SAAA,CAIA,mBAAA,CALA,6BAAA,CAEA,8DACE,CAJF,UfuiCN,Ce9hCM,8CACE,8BfgiCR,Ce3hCI,8BACE,ef6hCN,CexhCE,4BAGE,kBf6hCJ,CehiCE,4BAGE,iBf6hCJ,CehiCE,4BAIE,gBf4hCJ,CehiCE,4BAIE,iBf4hCJ,CehiCE,kBACE,WAAA,CAIA,eAAA,CAHA,aAAA,CAIA,kBf0hCJ,CevhCI,4CAGE,SAAA,CAIA,mBAAA,CALA,8BAAA,CAEA,8DACE,CAJF,Uf6hCN,CephCM,sDACE,6BfshCR,CelhCM,8DAGE,SAAA,CAIA,mBAAA,CALA,uBAAA,CAEA,8DACE,CAJF,SfwhCR,Ce7gCI,uCAGE,WAAA,CAFA,iBAAA,CACA,UfghCN,Ce1gCE,mBACE,YAAA,CACA,aAAA,CACA,cAAA,CAEA,+CACE,CAFF,kBf6gCJ,CevgCI,8DACE,WAAA,CACA,SAAA,CACA,oCfygCN,CelgCE,mBACE,YfogCJ,CKzjCI,mCUoDF,6BAQI,gBfogCJ,Ce5gCA,6BAQI,iBfogCJ,Ce5gCA,mBAKI,aAAA,CAEA,iBAAA,CADA,afsgCJ,CACF,CKjkCI,sCUoDF,6BAaI,kBfogCJ,CejhCA,6BAaI,mBfogCJ,CACF,CgB5uCA,MACE,0MAAA,CACA,gMAAA,CACA,yNhB+uCF,CgBzuCA,QACE,eAAA,CACA,ehB4uCF,CgBzuCE,eACE,aAAA,CAGA,eAAA,CADA,eAAA,CADA,eAAA,CAGA,sBhB2uCJ,CgBxuCI,+BACE,YhB0uCN,CgBvuCM,mCAEE,WAAA,CADA,UhB0uCR,CgBluCQ,6DAME,iBAAA,CALA,aAAA,CAGA,aAAA,CADA,cAAA,CAEA,kBAAA,CAHA,UhBwuCV,CgB1uCQ,0DAME,iBAAA,CALA,aAAA,CAGA,aAAA,CADA,cAAA,CAEA,kBAAA,CAHA,UhBwuCV,CgB1uCQ,oDAME,iBAAA,CALA,aAAA,CAGA,aAAA,CADA,cAAA,CAEA,kBAAA,CAHA,UhBwuCV,CgB7tCE,cAGE,eAAA,CAFA,QAAA,CACA,ShBguCJ,CgB3tCE,cACE,ehB6tCJ,CgB1tCI,sCACE,ehB4tCN,CgB7tCI,sCACE,chB4tCN,CgBvtCE,cAEE,kBAAA,CAKA,cAAA,CANA,YAAA,CAEA,6BAAA,CACA,iBAAA,CACA,eAAA,CAIA,uBAAA,CAHA,sBAAA,CAEA,sBhB0tCJ,CgBttCI,sBACE,uChBwtCN,CgBptCI,oCACE,+BhBstCN,CgBltCI,0CACE,UhBotCN,CgBhtCI,yCACE,+BhBktCN,CgBntCI,sCACE,+BhBktCN,CgBntCI,gCACE,+BhBktCN,CgB9sCI,4BACE,uCAAA,CACA,oBhBgtCN,CgB5sCI,0CACE,YhB8sCN,CgB3sCM,yDAKE,6BAAA,CAJA,aAAA,CAEA,WAAA,CACA,qCAAA,CAAA,6BAAA,CAFA,UhBgtCR,CgBzsCM,kDACE,YhB2sCR,CgBtsCI,gBAEE,cAAA,CADA,YhBysCN,CgBnsCE,cACE,ahBqsCJ,CgBjsCE,gBACE,YhBmsCJ,CKjpCI,wCW3CA,0CASE,2CAAA,CAHA,YAAA,CACA,qBAAA,CACA,WAAA,CAJA,MAAA,CAFA,iBAAA,CAEA,OAAA,CADA,KAAA,CAEA,ShBksCJ,CgBvrCI,4DACE,eAAA,CACA,ehByrCN,CgB3rCI,yDACE,eAAA,CACA,ehByrCN,CgB3rCI,mDACE,eAAA,CACA,ehByrCN,CgBrrCI,gCAOE,qDAAA,CAHA,uCAAA,CAIA,cAAA,CANA,aAAA,CAGA,kBAAA,CAFA,wBAAA,CAFA,iBAAA,CAKA,kBhByrCN,CgBprCM,wDAGE,UhB0rCR,CgB7rCM,wDAGE,WhB0rCR,CgB7rCM,8CAIE,aAAA,CAEA,aAAA,CACA,YAAA,CANA,iBAAA,CACA,SAAA,CAGA,YhBwrCR,CgBnrCQ,oDAIE,6BAAA,CAKA,UAAA,CARA,aAAA,CAEA,WAAA,CAEA,2CAAA,CAAA,mCAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CANA,UhB4rCV,CgBhrCM,8CAEE,2CAAA,CACA,gEACE,CAHF,eAAA,CAIA,4BAAA,CACA,kBhBirCR,CgB9qCQ,2DACE,YhBgrCV,CgB3qCM,8CAGE,2CAAA,CAFA,gCAAA,CACA,ehB8qCR,CgBzqCM,yCAIE,aAAA,CADA,UAAA,CAEA,YAAA,CACA,aAAA,CALA,iBAAA,CAEA,WAAA,CADA,ShB+qCR,CgBtqCI,+BACE,MhBwqCN,CgBpqCI,+BAEE,4DAAA,CADA,ShBuqCN,CgBnqCM,qDACE,+BhBqqCR,CgBlqCQ,gFACE,+BhBoqCV,CgBrqCQ,6EACE,+BhBoqCV,CgBrqCQ,uEACE,+BhBoqCV,CgB9pCI,+BACE,YAAA,CACA,mBhBgqCN,CgB7pCM,uDAGE,mBhBgqCR,CgBnqCM,uDAGE,kBhBgqCR,CgBnqCM,6CAIE,gBAAA,CAFA,aAAA,CADA,YhBkqCR,CgB5pCQ,mDAIE,6BAAA,CAKA,UAAA,CARA,aAAA,CAEA,WAAA,CAEA,2CAAA,CAAA,mCAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CANA,UhBqqCV,CgBrpCM,+CACE,mBhBupCR,CgB/oCM,4CAEE,wBAAA,CADA,ehBkpCR,CgB9oCQ,oEACE,mBhBgpCV,CgBjpCQ,oEACE,oBhBgpCV,CgB5oCQ,4EACE,iBhB8oCV,CgB/oCQ,4EACE,kBhB8oCV,CgB1oCQ,oFACE,mBhB4oCV,CgB7oCQ,oFACE,oBhB4oCV,CgBxoCQ,4FACE,mBhB0oCV,CgB3oCQ,4FACE,oBhB0oCV,CgBnoCE,mBACE,wBhBqoCJ,CgBjoCE,wBACE,YAAA,CAEA,SAAA,CADA,0BAAA,CAEA,oEhBmoCJ,CgB9nCI,kCACE,2BhBgoCN,CgB3nCE,gCAEE,SAAA,CADA,uBAAA,CAEA,qEhB6nCJ,CgBxnCI,8CAEE,kCAAA,CAAA,0BhBynCN,CACF,CK/xCI,wCW8KA,0CACE,YhBonCJ,CgBjnCI,yDACE,UhBmnCN,CgB/mCI,wDACE,YhBinCN,CgB7mCI,kDACE,YhB+mCN,CgB1mCE,gBAIE,iDAAA,CADA,gCAAA,CAFA,aAAA,CACA,ehB8mCJ,CACF,CK51CM,6DWuPF,6CACE,YhBwmCJ,CgBrmCI,4DACE,UhBumCN,CgBnmCI,2DACE,YhBqmCN,CgBjmCI,qDACE,YhBmmCN,CACF,CKp1CI,mCWyPA,kCAME,qCAAA,CACA,qDAAA,CANA,uBAAA,CAAA,eAAA,CACA,KAAA,CAGA,ShB8lCJ,CgBzlCI,6CACE,uBhB2lCN,CgBvlCI,gDACE,YhBylCN,CACF,CKn2CI,sCW7JJ,QA6aI,oDhBulCF,CgBplCE,gCAME,qCAAA,CACA,qDAAA,CANA,uBAAA,CAAA,eAAA,CACA,KAAA,CAGA,ShBslCJ,CgBjlCI,8CACE,uBhBmlCN,CgBzkCE,sEACE,YhB8kCJ,CgB1kCE,6DACE,ahB4kCJ,CgB7kCE,0DACE,ahB4kCJ,CgB7kCE,oDACE,ahB4kCJ,CgBxkCE,6CACE,YhB0kCJ,CgBtkCE,uBACE,aAAA,CACA,ehBwkCJ,CgBrkCI,kCACE,ehBukCN,CgBnkCI,qCACE,eAAA,CACA,mBhBqkCN,CgBlkCM,mDACE,mBhBokCR,CgBhkCM,mDACE,YhBkkCR,CgB7jCI,+BACE,ahB+jCN,CgB5jCM,2DACE,ShB8jCR,CgBxjCE,cAGE,kBAAA,CADA,YAAA,CAEA,+CACE,CAJF,WhB6jCJ,CgBrjCI,wBACE,wBhBujCN,CgBnjCI,oBACE,uDhBqjCN,CgBjjCI,oBAKE,6BAAA,CAKA,UAAA,CATA,oBAAA,CAEA,WAAA,CAGA,2CAAA,CAAA,mCAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CALA,qBAAA,CAFA,UhB2jCN,CgB/iCI,0JAEE,uBhBgjCN,CgBliCI,+HACE,YhBwiCN,CgBriCM,oDACE,aAAA,CACA,ShBuiCR,CgBpiCQ,kEAOE,qCAAA,CACA,qDAAA,CAFA,eAAA,CAFA,YAAA,CACA,eAAA,CAJA,uBAAA,CAAA,eAAA,CACA,KAAA,CACA,ShB2iCV,CgBniCU,4FACE,mBhBqiCZ,CgBjiCU,gFACE,YhBmiCZ,CgB3hCI,2CACE,ahB6hCN,CgB1hCM,iFACE,mBhB4hCR,CgB7hCM,iFACE,kBhB4hCR,CgBnhCI,mFACE,ehBqhCN,CgBlhCM,iGACE,ShBohCR,CgB/gCI,qFAGE,mDhBihCN,CgBphCI,qFAGE,oDhBihCN,CgBphCI,2EACE,aAAA,CACA,oBhBkhCN,CgB9gCM,0FACE,YhBghCR,CACF,CiBroDA,MACE,igBjBwoDF,CiBloDA,WACE,iBjBqoDF,CKv+CI,mCY/JJ,WAKI,ejBqoDF,CACF,CiBloDE,kBACE,YjBooDJ,CiBhoDE,oBAEE,SAAA,CADA,SjBmoDJ,CKh+CI,wCYpKF,8BAQI,YjB0oDJ,CiBlpDA,8BAQI,ajB0oDJ,CiBlpDA,oBAYI,2CAAA,CACA,kBAAA,CAHA,WAAA,CACA,eAAA,CAOA,mBAAA,CAZA,iBAAA,CACA,SAAA,CAOA,uBAAA,CACA,4CACE,CAPF,UjByoDJ,CiB7nDI,+DACE,SAAA,CACA,oCjB+nDN,CACF,CKtgDI,mCYjJF,8BAiCI,MjBioDJ,CiBlqDA,8BAiCI,OjBioDJ,CiBlqDA,oBAoCI,0BAAA,CACA,cAAA,CAFA,QAAA,CAJA,cAAA,CACA,KAAA,CAMA,sDACE,CALF,OjBgoDJ,CiBtnDI,+DAME,YAAA,CACA,SAAA,CACA,4CACE,CARF,UjB2nDN,CACF,CKrgDI,wCYxGA,+DAII,mBjB6mDN,CACF,CKnjDM,6DY/DF,+DASI,mBjB6mDN,CACF,CKxjDM,6DY/DF,+DAcI,mBjB6mDN,CACF,CiBxmDE,kBAEE,kCAAA,CAAA,0BjBymDJ,CKvhDI,wCYpFF,4BAQI,MjBgnDJ,CiBxnDA,4BAQI,OjBgnDJ,CiBxnDA,kBAWI,QAAA,CAGA,SAAA,CAFA,eAAA,CANA,cAAA,CACA,KAAA,CAMA,wBAAA,CAEA,qGACE,CANF,OAAA,CADA,SjB+mDJ,CiBlmDI,4BACE,yBjBomDN,CiBhmDI,6DAEE,WAAA,CAEA,SAAA,CADA,uBAAA,CAEA,sGACE,CALF,UjBsmDN,CACF,CKlkDI,mCYjEF,4BA2CI,WjBgmDJ,CiB3oDA,4BA2CI,UjBgmDJ,CiB3oDA,kBA6CI,eAAA,CAHA,iBAAA,CAIA,8CAAA,CAFA,ajB+lDJ,CACF,CKjmDM,6DYOF,6DAII,ajB0lDN,CACF,CKhlDI,sCYfA,6DASI,ajB0lDN,CACF,CiBrlDE,iBAIE,2CAAA,CACA,0BAAA,CAFA,aAAA,CAFA,iBAAA,CAKA,2CACE,CALF,SjB2lDJ,CK7lDI,mCYAF,iBAaI,0BAAA,CACA,mBAAA,CAFA,ajBulDJ,CiBllDI,uBACE,0BjBolDN,CACF,CiBhlDI,4DAEE,2CAAA,CACA,6BAAA,CACA,8BAAA,CAHA,gCjBqlDN,CiB7kDE,4BAKE,mBAAA,CAAA,oBjBklDJ,CiBvlDE,4BAKE,mBAAA,CAAA,oBjBklDJ,CiBvlDE,kBAQE,gBAAA,CAFA,eAAA,CAFA,WAAA,CAHA,iBAAA,CAMA,sBAAA,CAJA,UAAA,CADA,SjBqlDJ,CiB5kDI,+BACE,qBjB8kDN,CiB1kDI,kEAEE,uCjB2kDN,CiBvkDI,6BACE,YjBykDN,CK7mDI,wCYaF,kBA8BI,eAAA,CADA,aAAA,CADA,UjB0kDJ,CACF,CKvoDI,mCYgCF,4BAmCI,mBjB0kDJ,CiB7mDA,4BAmCI,oBjB0kDJ,CiB7mDA,kBAoCI,aAAA,CACA,ejBwkDJ,CiBrkDI,+BACE,uCjBukDN,CiBnkDI,mCACE,gCjBqkDN,CiBjkDI,6DACE,kBjBmkDN,CiBhkDM,wJAEE,uCjBikDR,CACF,CiB3jDE,iBAIE,cAAA,CAHA,oBAAA,CAEA,aAAA,CAEA,kCACE,CAJF,YjBgkDJ,CiBxjDI,uBACE,UjB0jDN,CiBtjDI,yCAGE,UjByjDN,CiB5jDI,yCAGE,WjByjDN,CiB5jDI,+BACE,iBAAA,CACA,SAAA,CAEA,SjBwjDN,CiBrjDM,6CACE,oBjBujDR,CK1pDI,wCY2FA,yCAcI,UjBsjDN,CiBpkDE,yCAcI,WjBsjDN,CiBpkDE,+BAaI,SjBujDN,CiBnjDM,+CACE,YjBqjDR,CACF,CKtrDI,mCY8GA,+BAwBI,mBjBojDN,CiBjjDM,8CACE,YjBmjDR,CACF,CiB7iDE,8BAGE,WjBijDJ,CiBpjDE,8BAGE,UjBijDJ,CiBpjDE,oBAKE,mBAAA,CAJA,iBAAA,CACA,SAAA,CAEA,SjBgjDJ,CKlrDI,wCY8HF,8BAUI,WjB+iDJ,CiBzjDA,8BAUI,UjB+iDJ,CiBzjDA,oBASI,SjBgjDJ,CACF,CiB5iDI,gCACE,iBjBkjDN,CiBnjDI,gCACE,kBjBkjDN,CiBnjDI,sBAEE,uCAAA,CAEA,SAAA,CADA,oBAAA,CAEA,+DjB8iDN,CiBziDM,yCAEE,uCAAA,CADA,YjB4iDR,CiBviDM,yFAGE,SAAA,CACA,mBAAA,CAFA,kBjB0iDR,CiBriDQ,8FACE,UjBuiDV,CiBhiDE,8BAOE,mBAAA,CAAA,oBjBuiDJ,CiB9iDE,8BAOE,mBAAA,CAAA,oBjBuiDJ,CiB9iDE,oBAIE,kBAAA,CAIA,yCAAA,CALA,YAAA,CAMA,eAAA,CAHA,WAAA,CAKA,SAAA,CAVA,iBAAA,CACA,KAAA,CAUA,uBAAA,CAFA,kBAAA,CALA,UjByiDJ,CK5uDI,mCY8LF,8BAgBI,mBjBmiDJ,CiBnjDA,8BAgBI,oBjBmiDJ,CiBnjDA,oBAiBI,ejBkiDJ,CACF,CiB/hDI,+DACE,SAAA,CACA,0BjBiiDN,CiB5hDE,6BAKE,+BjB+hDJ,CiBpiDE,0DAME,gCjB8hDJ,CiBpiDE,6BAME,+BjB8hDJ,CiBpiDE,mBAIE,eAAA,CAHA,iBAAA,CAEA,UAAA,CADA,SjBkiDJ,CK3uDI,wCYuMF,mBAWI,QAAA,CADA,UjB+hDJ,CACF,CKpwDI,mCY0NF,mBAiBI,SAAA,CADA,UAAA,CAEA,sBjB8hDJ,CiB3hDI,8DACE,8BAAA,CACA,SjB6hDN,CACF,CiBxhDE,uBAKE,kCAAA,CAAA,0BAAA,CAFA,2CAAA,CAFA,WAAA,CACA,eAAA,CAOA,kBjBshDJ,CiBnhDI,iEAZF,uBAaI,uBjBshDJ,CACF,CKjzDM,6DY6QJ,uBAkBI,ajBshDJ,CACF,CKhyDI,sCYuPF,uBAuBI,ajBshDJ,CACF,CKryDI,mCYuPF,uBA4BI,YAAA,CAEA,yDAAA,CADA,oBjBuhDJ,CiBnhDI,kEACE,ejBqhDN,CiBjhDI,6BACE,+CjBmhDN,CiB/gDI,0CAEE,YAAA,CADA,WjBkhDN,CiB7gDI,gDACE,oDjB+gDN,CiB5gDM,sDACE,0CjB8gDR,CACF,CiBvgDA,kBACE,gCAAA,CACA,qBjB0gDF,CiBvgDE,wBAKE,qDAAA,CAHA,uCAAA,CACA,gBAAA,CACA,kBAAA,CAHA,eAAA,CAKA,uBjBygDJ,CKz0DI,mCY0TF,kCAUI,mBjBygDJ,CiBnhDA,kCAUI,oBjBygDJ,CACF,CiBrgDE,wBAGE,eAAA,CAFA,QAAA,CACA,SAAA,CAGA,wBAAA,CAAA,qBAAA,CAAA,gBjBsgDJ,CiBlgDE,wBACE,yDjBogDJ,CiBjgDI,oCACE,ejBmgDN,CiB9/CE,wBACE,aAAA,CACA,YAAA,CAEA,uBAAA,CADA,gCjBigDJ,CiB7/CI,mDACE,uDjB+/CN,CiBhgDI,gDACE,uDjB+/CN,CiBhgDI,0CACE,uDjB+/CN,CiB3/CI,gDACE,mBjB6/CN,CiBx/CE,gCAGE,+BAAA,CAGA,cAAA,CALA,aAAA,CAGA,gBAAA,CACA,YAAA,CAHA,mBAAA,CAQA,uBAAA,CAHA,2CjB2/CJ,CKh3DI,mCY8WF,0CAcI,mBjBw/CJ,CiBtgDA,0CAcI,oBjBw/CJ,CACF,CiBr/CI,2DAEE,uDAAA,CADA,+BjBw/CN,CiBz/CI,wDAEE,uDAAA,CADA,+BjBw/CN,CiBz/CI,kDAEE,uDAAA,CADA,+BjBw/CN,CiBn/CI,wCACE,YjBq/CN,CiBh/CI,wDACE,YjBk/CN,CiB9+CI,oCACE,WjBg/CN,CiB3+CE,2BAGE,eAAA,CADA,eAAA,CADA,iBjB++CJ,CKv4DI,mCYuZF,qCAOI,mBjB6+CJ,CiBp/CA,qCAOI,oBjB6+CJ,CACF,CiBv+CM,8DAGE,eAAA,CADA,eAAA,CAEA,eAAA,CAHA,ejB4+CR,CiBn+CE,kCAEE,MjBy+CJ,CiB3+CE,kCAEE,OjBy+CJ,CiB3+CE,wBAME,uCAAA,CAFA,aAAA,CACA,YAAA,CAJA,iBAAA,CAEA,YjBw+CJ,CKv4DI,wCY4ZF,wBAUI,YjBq+CJ,CACF,CiBl+CI,8BAIE,6BAAA,CAKA,UAAA,CARA,oBAAA,CAEA,WAAA,CAEA,+CAAA,CAAA,uCAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CANA,UjB2+CN,CiBj+CM,wCACE,oBjBm+CR,CiB79CE,yBAGE,gBAAA,CADA,eAAA,CAEA,eAAA,CAHA,ajBk+CJ,CiB39CE,0BASE,2BAAA,CACA,oBAAA,CALA,uCAAA,CAJA,mBAAA,CAKA,gBAAA,CACA,eAAA,CAJA,aAAA,CADA,eAAA,CAEA,eAAA,CAIA,sBjB+9CJ,CK56DI,wCYqcF,0BAeI,oBAAA,CADA,ejB89CJ,CACF,CK39DM,6DY8eJ,0BAqBI,oBAAA,CADA,ejB89CJ,CACF,CiB19CI,+BAEE,wBAAA,CADA,yBjB69CN,CiBv9CE,yBAEE,gBAAA,CACA,iBAAA,CAFA,ajB29CJ,CiBr9CE,uBAEE,wBAAA,CADA,+BjBw9CJ,CkB9nEA,WACE,iBAAA,CACA,SlBioEF,CkB9nEE,kBAOE,2CAAA,CACA,mBAAA,CACA,8BAAA,CAHA,gCAAA,CAHA,QAAA,CAEA,gBAAA,CADA,YAAA,CAOA,SAAA,CAVA,iBAAA,CACA,sBAAA,CAQA,mCAAA,CAEA,oElBgoEJ,CkB1nEI,+DACE,gBAAA,CAEA,SAAA,CADA,+BAAA,CAEA,sFACE,CADF,8ElB4nEN,CkBhoEI,4DACE,gBAAA,CAEA,SAAA,CADA,+BAAA,CAEA,mFACE,CADF,8ElB4nEN,CkBhoEI,sDACE,gBAAA,CAEA,SAAA,CADA,+BAAA,CAEA,8ElB4nEN,CkBrnEI,wBAUE,+BAAA,CAAA,8CAAA,CAFA,6BAAA,CAAA,8BAAA,CACA,YAAA,CAEA,UAAA,CANA,QAAA,CAFA,QAAA,CAIA,kBAAA,CADA,iBAAA,CALA,iBAAA,CACA,KAAA,CAEA,OlB8nEN,CkBlnEE,iBAOE,mBAAA,CAFA,eAAA,CACA,oBAAA,CAJA,QAAA,CADA,kBAAA,CAGA,aAAA,CADA,SlBwnEJ,CkBhnEE,iBACE,kBlBknEJ,CkB9mEE,2BAGE,kBAAA,CAAA,oBlBonEJ,CkBvnEE,2BAGE,mBAAA,CAAA,mBlBonEJ,CkBvnEE,iBAKE,cAAA,CAJA,aAAA,CAGA,YAAA,CAKA,uBAAA,CAHA,2CACE,CALF,UlBqnEJ,CkB3mEI,4CACE,+BlB6mEN,CkB9mEI,yCACE,+BlB6mEN,CkB9mEI,mCACE,+BlB6mEN,CkBzmEI,uBACE,qDlB2mEN,CmB/rEA,YAIE,qBAAA,CADA,aAAA,CAGA,gBAAA,CALA,uBAAA,CAAA,eAAA,CACA,UAAA,CAGA,anBmsEF,CmB/rEE,aATF,YAUI,YnBksEF,CACF,CKphEI,wCc3KF,+BAMI,anBssEJ,CmB5sEA,+BAMI,cnBssEJ,CmB5sEA,qBAWI,2CAAA,CAHA,aAAA,CAEA,WAAA,CANA,cAAA,CACA,KAAA,CAOA,uBAAA,CACA,iEACE,CALF,aAAA,CAFA,SnBqsEJ,CmB1rEI,mEACE,8BAAA,CACA,6BnB4rEN,CmBzrEM,6EACE,8BnB2rER,CmBtrEI,6CAEE,QAAA,CAAA,MAAA,CACA,QAAA,CAEA,eAAA,CAJA,iBAAA,CACA,OAAA,CAEA,qBAAA,CAFA,KnB2rEN,CACF,CKnkEI,sCctKJ,YAuDI,QnBsrEF,CmBnrEE,mBACE,WnBqrEJ,CmBjrEE,6CACE,UnBmrEJ,CACF,CmB/qEE,uBACE,YAAA,CACA,OnBirEJ,CKllEI,mCcjGF,uBAMI,QnBirEJ,CmB9qEI,8BACE,WnBgrEN,CmB5qEI,qCACE,anB8qEN,CmB1qEI,+CACE,kBnB4qEN,CACF,CmBvqEE,wBAUE,uBAAA,CANA,kCAAA,CAAA,0BAAA,CAHA,cAAA,CACA,eAAA,CASA,yDAAA,CAFA,oBnBsqEJ,CmBjqEI,8BACE,+CnBmqEN,CmB/pEI,2CAEE,YAAA,CADA,WnBkqEN,CmB7pEI,iDACE,oDnB+pEN,CmB5pEM,uDACE,0CnB8pER,CmBhpEE,wCAGE,wBACE,qBnBgpEJ,CmB5oEE,6BACE,kCnB8oEJ,CmB/oEE,6BACE,iCnB8oEJ,CACF,CK1mEI,wCc5BF,YAME,0BAAA,CADA,QAAA,CAEA,SAAA,CANA,cAAA,CACA,KAAA,CAMA,sDACE,CALF,OAAA,CADA,SnB+oEF,CmBpoEE,4CAEE,WAAA,CACA,SAAA,CACA,4CACE,CAJF,UnByoEJ,CACF,CoBtzEA,iBACE,GACE,QpBwzEF,CoBrzEA,GACE,apBuzEF,CACF,CoBnzEA,gBACE,GAEE,SAAA,CADA,0BpBszEF,CoBlzEA,IACE,SpBozEF,CoBjzEA,GAEE,SAAA,CADA,uBpBozEF,CACF,CoB3yEA,MACE,mgBAAA,CACA,oiBAAA,CACA,0nBAAA,CACA,mhBpB6yEF,CoBvyEA,WAOE,kCAAA,CAAA,0BAAA,CANA,aAAA,CACA,gBAAA,CACA,eAAA,CAEA,uCAAA,CAGA,uBAAA,CAJA,kBpB6yEF,CoBtyEE,iBACE,UpBwyEJ,CoBpyEE,iBACE,oBAAA,CAEA,aAAA,CACA,qBAAA,CAFA,UpBwyEJ,CoBnyEI,+BAEE,iBpBqyEN,CoBvyEI,+BAEE,kBpBqyEN,CoBvyEI,qBACE,gBpBsyEN,CoBjyEI,kDACE,iBpBoyEN,CoBryEI,kDACE,kBpBoyEN,CoBryEI,kDAEE,iBpBmyEN,CoBryEI,kDAEE,kBpBmyEN,CoB9xEE,iCAGE,iBpBmyEJ,CoBtyEE,iCAGE,kBpBmyEJ,CoBtyEE,uBACE,oBAAA,CACA,6BAAA,CAEA,eAAA,CACA,sBAAA,CACA,qBpBgyEJ,CoB5xEE,kBACE,YAAA,CAMA,gBAAA,CALA,SAAA,CAMA,oBAAA,CAJA,gBAAA,CAKA,WAAA,CAHA,eAAA,CADA,SAAA,CAFA,UpBoyEJ,CoB3xEI,iDACE,4BpB6xEN,CoBxxEE,iBACE,eAAA,CACA,sBpB0xEJ,CoBvxEI,gDACE,2BpByxEN,CoBrxEI,kCAIE,kBpB6xEN,CoBjyEI,kCAIE,iBpB6xEN,CoBjyEI,wBAME,6BAAA,CAIA,UAAA,CATA,oBAAA,CAEA,YAAA,CAIA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CAJA,uBAAA,CAHA,WpB+xEN,CoBnxEI,iCACE,apBqxEN,CoBjxEI,iCACE,gDAAA,CAAA,wCpBmxEN,CoB/wEI,+BACE,8CAAA,CAAA,sCpBixEN,CoB7wEI,+BACE,8CAAA,CAAA,sCpB+wEN,CoB3wEI,sCACE,qDAAA,CAAA,6CpB6wEN,CqBp6EA,SASE,2CAAA,CAFA,gCAAA,CAHA,aAAA,CAIA,eAAA,CAFA,aAAA,CADA,UAAA,CAFA,SrB26EF,CqBl6EE,aAZF,SAaI,YrBq6EF,CACF,CK1vEI,wCgBzLJ,SAkBI,YrBq6EF,CACF,CqBl6EE,iBACE,mBrBo6EJ,CqBh6EE,yBAEE,iBrBs6EJ,CqBx6EE,yBAEE,kBrBs6EJ,CqBx6EE,eAME,eAAA,CADA,eAAA,CAJA,QAAA,CAEA,SAAA,CACA,kBrBo6EJ,CqB95EE,eACE,oBAAA,CACA,aAAA,CACA,kBAAA,CAAA,mBrBg6EJ,CqB35EE,eAOE,kCAAA,CAAA,0BAAA,CANA,aAAA,CAEA,eAAA,CADA,gBAAA,CAMA,UAAA,CAJA,uCAAA,CACA,oBAAA,CAIA,8DrB45EJ,CqBv5EI,iEAEE,aAAA,CACA,SrBw5EN,CqB35EI,8DAEE,aAAA,CACA,SrBw5EN,CqB35EI,wDAEE,aAAA,CACA,SrBw5EN,CqBn5EM,2CACE,qBrBq5ER,CqBt5EM,2CACE,qBrBw5ER,CqBz5EM,2CACE,qBrB25ER,CqB55EM,2CACE,qBrB85ER,CqB/5EM,2CACE,oBrBi6ER,CqBl6EM,2CACE,qBrBo6ER,CqBr6EM,2CACE,qBrBu6ER,CqBx6EM,2CACE,qBrB06ER,CqB36EM,4CACE,qBrB66ER,CqB96EM,4CACE,oBrBg7ER,CqBj7EM,4CACE,qBrBm7ER,CqBp7EM,4CACE,qBrBs7ER,CqBv7EM,4CACE,qBrBy7ER,CqB17EM,4CACE,qBrB47ER,CqB77EM,4CACE,oBrB+7ER,CqBz7EI,gCAEE,SAAA,CADA,yBAAA,CAEA,wCrB27EN,CsBxgFA,MACE,wStB2gFF,CsBlgFE,qBAEE,mBAAA,CADA,kBtBsgFJ,CsBjgFE,8BAEE,iBtB4gFJ,CsB9gFE,8BAEE,gBtB4gFJ,CsB9gFE,oBAUE,+CAAA,CACA,oBAAA,CAVA,oBAAA,CAKA,gBAAA,CADA,eAAA,CAGA,qBAAA,CADA,eAAA,CAJA,kBAAA,CACA,uBAAA,CAKA,qBtBqgFJ,CsBhgFI,0BAGE,uCAAA,CAFA,aAAA,CACA,YAAA,CAEA,6CtBkgFN,CsB7/EM,gEAGE,0CAAA,CADA,+BtB+/ER,CsBz/EI,yBACE,uBtB2/EN,CsBn/EI,gCAME,oDAAA,CAMA,UAAA,CAXA,oBAAA,CAEA,YAAA,CACA,iBAAA,CAGA,qCAAA,CAAA,6BAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CACA,iCAAA,CANA,0BAAA,CAHA,WtB+/EN,CsBj/EI,6DACE,0CtBm/EN,CsBp/EI,0DACE,0CtBm/EN,CsBp/EI,oDACE,0CtBm/EN,CuB5jFA,iBACE,GACE,uDAAA,CACA,oBvB+jFF,CuB5jFA,IACE,6BAAA,CACA,kBvB8jFF,CuB3jFA,GACE,wBAAA,CACA,oBvB6jFF,CACF,CuBrjFA,MACE,wBvBujFF,CuBjjFA,YAwBE,kCAAA,CAAA,0BAAA,CALA,2CAAA,CACA,mBAAA,CACA,8BAAA,CAJA,gCAAA,CACA,sCAAA,CAfA,+IACE,CAYF,8BAAA,CASA,SAAA,CAxBA,iBAAA,CACA,uBAAA,CAoBA,4BAAA,CAIA,uDACE,CAZF,6BAAA,CADA,SvB4jFF,CuB1iFE,oBAGE,SAAA,CADA,uBAAA,CAEA,2EACE,CAJF,SvB+iFJ,CuBriFE,4DACE,sCvBuiFJ,CuBxiFE,yDACE,sCvBuiFJ,CuBxiFE,mDACE,sCvBuiFJ,CuBniFE,mBAEE,gBAAA,CADA,avBsiFJ,CuBliFI,2CACE,YvBoiFN,CuBhiFI,0CACE,evBkiFN,CuB1hFA,eACE,eAAA,CAEA,YAAA,CADA,kBvB8hFF,CuB1hFE,yBACE,avB4hFJ,CuBxhFE,6BACE,oBAAA,CAGA,iBvBwhFJ,CuBphFE,sBAOE,cAAA,CAFA,sCAAA,CADA,eAAA,CADA,YAAA,CAGA,YAAA,CALA,iBAAA,CAOA,wBAAA,CAAA,qBAAA,CAAA,gBAAA,CANA,SvB4hFJ,CuBnhFI,qCACE,UAAA,CACA,uBvBqhFN,CuBlhFM,gEACE,UvBohFR,CuBrhFM,6DACE,UvBohFR,CuBrhFM,uDACE,UvBohFR,CuB5gFI,4BAYE,oDAAA,CACA,iBAAA,CAIA,UAAA,CARA,YAAA,CANA,YAAA,CAOA,cAAA,CACA,cAAA,CAVA,iBAAA,CACA,KAAA,CAYA,2CACE,CARF,wBAAA,CACA,6BAAA,CAJA,UvBuhFN,CuBvgFM,4CAGE,8CACE,2BvBugFR,CACF,CuBngFM,gDAIE,cAAA,CAHA,2CvBsgFR,CuB9/EI,2BAEE,sCAAA,CADA,iBvBigFN,CuB5/EI,qFACE,+BvB8/EN,CuB//EI,kFACE,+BvB8/EN,CuB//EI,4EACE,+BvB8/EN,CuB3/EM,2FACE,0CvB6/ER,CuB9/EM,wFACE,0CvB6/ER,CuB9/EM,kFACE,0CvB6/ER,CuBx/EI,0CAGE,cAAA,CADA,eAAA,CADA,SvB4/EN,CuBt/EI,8CACE,oBAAA,CACA,evBw/EN,CuBr/EM,qDAME,mCAAA,CALA,oBAAA,CACA,mBAAA,CAEA,qBAAA,CACA,iDAAA,CAFA,qBvB0/ER,CuBn/EQ,iBAVF,qDAWI,WvBs/ER,CuBn/EQ,mEACE,mCvBq/EV,CACF,CwBntFA,kBAKE,exB+tFF,CwBpuFA,kBAKE,gBxB+tFF,CwBpuFA,QASE,2CAAA,CACA,oBAAA,CAEA,8BAAA,CALA,uCAAA,CAHA,aAAA,CAIA,eAAA,CAGA,YAAA,CALA,mBAAA,CALA,cAAA,CACA,UAAA,CAWA,yBAAA,CACA,mGACE,CAZF,SxBiuFF,CwB/sFE,aArBF,QAsBI,YxBktFF,CACF,CwB/sFE,kBACE,wBxBitFJ,CwB7sFE,gBAEE,SAAA,CAEA,mBAAA,CAHA,+BAAA,CAEA,uBxBgtFJ,CwB5sFI,0BACE,8BxB8sFN,CwBzsFE,mCAEE,0CAAA,CADA,+BxB4sFJ,CwB7sFE,gCAEE,0CAAA,CADA,+BxB4sFJ,CwB7sFE,0BAEE,0CAAA,CADA,+BxB4sFJ,CwBvsFE,YACE,oBAAA,CACA,oBxBysFJ,CyB7vFA,oBACE,GACE,mBzBgwFF,CACF,CyBxvFA,MACE,wfzB0vFF,CyBpvFA,YACE,aAAA,CAEA,eAAA,CADA,azBwvFF,CyBpvFE,+BAOE,kBAAA,CAAA,kBzBqvFJ,CyB5vFE,+BAOE,iBAAA,CAAA,mBzBqvFJ,CyB5vFE,qBAQE,aAAA,CAEA,cAAA,CADA,YAAA,CARA,iBAAA,CAKA,UzBsvFJ,CyB/uFI,qCAIE,iBzBuvFN,CyB3vFI,qCAIE,kBzBuvFN,CyB3vFI,2BAKE,6BAAA,CAKA,UAAA,CATA,oBAAA,CAEA,YAAA,CAGA,yCAAA,CAAA,iCAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CAPA,WzByvFN,CyB5uFE,kBAUE,2CAAA,CACA,mBAAA,CACA,8BAAA,CAJA,gCAAA,CACA,oBAAA,CAJA,kBAAA,CADA,YAAA,CASA,SAAA,CANA,aAAA,CADA,SAAA,CALA,iBAAA,CAgBA,4BAAA,CAfA,UAAA,CAYA,+CACE,CAZF,SzB0vFJ,CyBzuFI,gEACE,gBAAA,CACA,SAAA,CACA,8CACE,CADF,sCzB2uFN,CyB9uFI,6DACE,gBAAA,CACA,SAAA,CACA,2CACE,CADF,sCzB2uFN,CyB9uFI,uDACE,gBAAA,CACA,SAAA,CACA,sCzB2uFN,CyBruFI,wBAGE,oCACE,gCzBquFN,CyBjuFI,2CACE,czBmuFN,CACF,CyB9tFE,kBACE,kBzBguFJ,CyB5tFE,4BAGE,kBAAA,CAAA,oBzBmuFJ,CyBtuFE,4BAGE,mBAAA,CAAA,mBzBmuFJ,CyBtuFE,kBAME,cAAA,CALA,aAAA,CAIA,YAAA,CAKA,uBAAA,CAHA,2CACE,CAJF,kBAAA,CAFA,UzBouFJ,CyBztFI,6CACE,+BzB2tFN,CyB5tFI,0CACE,+BzB2tFN,CyB5tFI,oCACE,+BzB2tFN,CyBvtFI,wBACE,qDzBytFN,C0B1zFA,MAEI,uWAAA,CAAA,8WAAA,CAAA,sPAAA,CAAA,8xBAAA,CAAA,0MAAA,CAAA,gbAAA,CAAA,gMAAA,CAAA,iQAAA,CAAA,0VAAA,CAAA,6aAAA,CAAA,8SAAA,CAAA,gM1Bm1FJ,C0Bv0FE,4CAQE,8CAAA,CACA,2BAAA,CACA,mBAAA,CACA,8BAAA,CANA,mCAAA,CAHA,iBAAA,CAIA,gBAAA,CAHA,iBAAA,CACA,eAAA,CAGA,uB1B80FJ,C0Bv0FI,aAdF,4CAeI,e1B20FJ,CACF,C0Bv0FI,gDACE,qB1B00FN,C0Bt0FI,gHAEE,iBAAA,CADA,c1B00FN,C0B30FI,0GAEE,iBAAA,CADA,c1B00FN,C0B30FI,8FAEE,iBAAA,CADA,c1B00FN,C0Br0FI,4FACE,iB1Bw0FN,C0Bp0FI,kFACE,e1Bu0FN,C0Bn0FI,0FACE,Y1Bs0FN,C0Bl0FI,8EACE,mB1Bq0FN,C0Bh0FE,sEAME,iBAAA,CAAA,mB1Bw0FJ,C0B90FE,sEAME,kBAAA,CAAA,kB1Bw0FJ,C0B90FE,sEAUE,uB1Bo0FJ,C0B90FE,sEAUE,wB1Bo0FJ,C0B90FE,sEAWE,4B1Bm0FJ,C0B90FE,4IAYE,6B1Bk0FJ,C0B90FE,sEAYE,4B1Bk0FJ,C0B90FE,kDAQE,0BAAA,CACA,WAAA,CAFA,eAAA,CAHA,eAAA,CACA,oBAAA,CAAA,iBAAA,CAHA,iB1B40FJ,C0B/zFI,kFACE,e1Bk0FN,C0B9zFI,oFAGE,U1By0FN,C0B50FI,oFAGE,W1By0FN,C0B50FI,gEAME,wBCsIU,CDjIV,UAAA,CANA,WAAA,CAEA,kDAAA,CAAA,0CAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CATA,iBAAA,CACA,UAAA,CAEA,U1Bw0FN,C0B7zFI,4DACE,4D1Bg0FN,C0B3yFE,iEACE,oB1B8yFJ,C0B/yFE,2DACE,oB1B8yFJ,C0B/yFE,+CACE,oB1B8yFJ,C0B1yFE,wEACE,0B1B6yFJ,C0B9yFE,kEACE,0B1B6yFJ,C0B9yFE,sDACE,0B1B6yFJ,C0B1yFI,+EACE,wBAnBG,CAoBH,kDAAA,CAAA,0C1B4yFN,C0B9yFI,yEACE,wBAnBG,CAoBH,0C1B4yFN,C0B9yFI,6DACE,wBAnBG,CAoBH,kDAAA,CAAA,0C1B4yFN,C0BxyFI,8EACE,a1B0yFN,C0B3yFI,wEACE,a1B0yFN,C0B3yFI,4DACE,a1B0yFN,C0B1zFE,oFACE,oB1B6zFJ,C0B9zFE,8EACE,oB1B6zFJ,C0B9zFE,kEACE,oB1B6zFJ,C0BzzFE,2FACE,0B1B4zFJ,C0B7zFE,qFACE,0B1B4zFJ,C0B7zFE,yEACE,0B1B4zFJ,C0BzzFI,kGACE,wBAnBG,CAoBH,sDAAA,CAAA,8C1B2zFN,C0B7zFI,4FACE,wBAnBG,CAoBH,8C1B2zFN,C0B7zFI,gFACE,wBAnBG,CAoBH,sDAAA,CAAA,8C1B2zFN,C0BvzFI,iGACE,a1ByzFN,C0B1zFI,2FACE,a1ByzFN,C0B1zFI,+EACE,a1ByzFN,C0Bz0FE,uEACE,oB1B40FJ,C0B70FE,iEACE,oB1B40FJ,C0B70FE,qDACE,oB1B40FJ,C0Bx0FE,8EACE,0B1B20FJ,C0B50FE,wEACE,0B1B20FJ,C0B50FE,4DACE,0B1B20FJ,C0Bx0FI,qFACE,wBAnBG,CAoBH,kDAAA,CAAA,0C1B00FN,C0B50FI,+EACE,wBAnBG,CAoBH,0C1B00FN,C0B50FI,mEACE,wBAnBG,CAoBH,kDAAA,CAAA,0C1B00FN,C0Bt0FI,oFACE,a1Bw0FN,C0Bz0FI,8EACE,a1Bw0FN,C0Bz0FI,kEACE,a1Bw0FN,C0Bx1FE,iFACE,oB1B21FJ,C0B51FE,2EACE,oB1B21FJ,C0B51FE,+DACE,oB1B21FJ,C0Bv1FE,wFACE,0B1B01FJ,C0B31FE,kFACE,0B1B01FJ,C0B31FE,sEACE,0B1B01FJ,C0Bv1FI,+FACE,wBAnBG,CAoBH,iDAAA,CAAA,yC1By1FN,C0B31FI,yFACE,wBAnBG,CAoBH,yC1By1FN,C0B31FI,6EACE,wBAnBG,CAoBH,iDAAA,CAAA,yC1By1FN,C0Br1FI,8FACE,a1Bu1FN,C0Bx1FI,wFACE,a1Bu1FN,C0Bx1FI,4EACE,a1Bu1FN,C0Bv2FE,iFACE,oB1B02FJ,C0B32FE,2EACE,oB1B02FJ,C0B32FE,+DACE,oB1B02FJ,C0Bt2FE,wFACE,0B1By2FJ,C0B12FE,kFACE,0B1By2FJ,C0B12FE,sEACE,0B1By2FJ,C0Bt2FI,+FACE,wBAnBG,CAoBH,qDAAA,CAAA,6C1Bw2FN,C0B12FI,yFACE,wBAnBG,CAoBH,6C1Bw2FN,C0B12FI,6EACE,wBAnBG,CAoBH,qDAAA,CAAA,6C1Bw2FN,C0Bp2FI,8FACE,a1Bs2FN,C0Bv2FI,wFACE,a1Bs2FN,C0Bv2FI,4EACE,a1Bs2FN,C0Bt3FE,gFACE,oB1By3FJ,C0B13FE,0EACE,oB1By3FJ,C0B13FE,8DACE,oB1By3FJ,C0Br3FE,uFACE,0B1Bw3FJ,C0Bz3FE,iFACE,0B1Bw3FJ,C0Bz3FE,qEACE,0B1Bw3FJ,C0Br3FI,8FACE,wBAnBG,CAoBH,sDAAA,CAAA,8C1Bu3FN,C0Bz3FI,wFACE,wBAnBG,CAoBH,8C1Bu3FN,C0Bz3FI,4EACE,wBAnBG,CAoBH,sDAAA,CAAA,8C1Bu3FN,C0Bn3FI,6FACE,a1Bq3FN,C0Bt3FI,uFACE,a1Bq3FN,C0Bt3FI,2EACE,a1Bq3FN,C0Br4FE,wFACE,oB1Bw4FJ,C0Bz4FE,kFACE,oB1Bw4FJ,C0Bz4FE,sEACE,oB1Bw4FJ,C0Bp4FE,+FACE,0B1Bu4FJ,C0Bx4FE,yFACE,0B1Bu4FJ,C0Bx4FE,6EACE,0B1Bu4FJ,C0Bp4FI,sGACE,wBAnBG,CAoBH,qDAAA,CAAA,6C1Bs4FN,C0Bx4FI,gGACE,wBAnBG,CAoBH,6C1Bs4FN,C0Bx4FI,oFACE,wBAnBG,CAoBH,qDAAA,CAAA,6C1Bs4FN,C0Bl4FI,qGACE,a1Bo4FN,C0Br4FI,+FACE,a1Bo4FN,C0Br4FI,mFACE,a1Bo4FN,C0Bp5FE,mFACE,oB1Bu5FJ,C0Bx5FE,6EACE,oB1Bu5FJ,C0Bx5FE,iEACE,oB1Bu5FJ,C0Bn5FE,0FACE,0B1Bs5FJ,C0Bv5FE,oFACE,0B1Bs5FJ,C0Bv5FE,wEACE,0B1Bs5FJ,C0Bn5FI,iGACE,wBAnBG,CAoBH,qDAAA,CAAA,6C1Bq5FN,C0Bv5FI,2FACE,wBAnBG,CAoBH,6C1Bq5FN,C0Bv5FI,+EACE,wBAnBG,CAoBH,qDAAA,CAAA,6C1Bq5FN,C0Bj5FI,gGACE,a1Bm5FN,C0Bp5FI,0FACE,a1Bm5FN,C0Bp5FI,8EACE,a1Bm5FN,C0Bn6FE,0EACE,oB1Bs6FJ,C0Bv6FE,oEACE,oB1Bs6FJ,C0Bv6FE,wDACE,oB1Bs6FJ,C0Bl6FE,iFACE,0B1Bq6FJ,C0Bt6FE,2EACE,0B1Bq6FJ,C0Bt6FE,+DACE,0B1Bq6FJ,C0Bl6FI,wFACE,wBAnBG,CAoBH,oDAAA,CAAA,4C1Bo6FN,C0Bt6FI,kFACE,wBAnBG,CAoBH,4C1Bo6FN,C0Bt6FI,sEACE,wBAnBG,CAoBH,oDAAA,CAAA,4C1Bo6FN,C0Bh6FI,uFACE,a1Bk6FN,C0Bn6FI,iFACE,a1Bk6FN,C0Bn6FI,qEACE,a1Bk6FN,C0Bl7FE,gEACE,oB1Bq7FJ,C0Bt7FE,0DACE,oB1Bq7FJ,C0Bt7FE,8CACE,oB1Bq7FJ,C0Bj7FE,uEACE,0B1Bo7FJ,C0Br7FE,iEACE,0B1Bo7FJ,C0Br7FE,qDACE,0B1Bo7FJ,C0Bj7FI,8EACE,wBAnBG,CAoBH,iDAAA,CAAA,yC1Bm7FN,C0Br7FI,wEACE,wBAnBG,CAoBH,yC1Bm7FN,C0Br7FI,4DACE,wBAnBG,CAoBH,iDAAA,CAAA,yC1Bm7FN,C0B/6FI,6EACE,a1Bi7FN,C0Bl7FI,uEACE,a1Bi7FN,C0Bl7FI,2DACE,a1Bi7FN,C0Bj8FE,oEACE,oB1Bo8FJ,C0Br8FE,8DACE,oB1Bo8FJ,C0Br8FE,kDACE,oB1Bo8FJ,C0Bh8FE,2EACE,0B1Bm8FJ,C0Bp8FE,qEACE,0B1Bm8FJ,C0Bp8FE,yDACE,0B1Bm8FJ,C0Bh8FI,kFACE,wBAnBG,CAoBH,qDAAA,CAAA,6C1Bk8FN,C0Bp8FI,4EACE,wBAnBG,CAoBH,6C1Bk8FN,C0Bp8FI,gEACE,wBAnBG,CAoBH,qDAAA,CAAA,6C1Bk8FN,C0B97FI,iFACE,a1Bg8FN,C0Bj8FI,2EACE,a1Bg8FN,C0Bj8FI,+DACE,a1Bg8FN,C0Bh9FE,wEACE,oB1Bm9FJ,C0Bp9FE,kEACE,oB1Bm9FJ,C0Bp9FE,sDACE,oB1Bm9FJ,C0B/8FE,+EACE,0B1Bk9FJ,C0Bn9FE,yEACE,0B1Bk9FJ,C0Bn9FE,6DACE,0B1Bk9FJ,C0B/8FI,sFACE,wBAnBG,CAoBH,mDAAA,CAAA,2C1Bi9FN,C0Bn9FI,gFACE,wBAnBG,CAoBH,2C1Bi9FN,C0Bn9FI,oEACE,wBAnBG,CAoBH,mDAAA,CAAA,2C1Bi9FN,C0B78FI,qFACE,a1B+8FN,C0Bh9FI,+EACE,a1B+8FN,C0Bh9FI,mEACE,a1B+8FN,C4BjnGA,MACE,wM5BonGF,C4B3mGE,sBACE,uCAAA,CACA,gB5B8mGJ,C4B3mGI,mCACE,a5B6mGN,C4B9mGI,mCACE,c5B6mGN,C4BzmGM,4BACE,sB5B2mGR,C4BxmGQ,mCACE,gC5B0mGV,C4BtmGQ,2DAEE,SAAA,CADA,uBAAA,CAEA,e5BwmGV,C4BpmGQ,0EAEE,SAAA,CADA,uB5BumGV,C4BxmGQ,uEAEE,SAAA,CADA,uB5BumGV,C4BxmGQ,iEAEE,SAAA,CADA,uB5BumGV,C4BlmGQ,yCACE,Y5BomGV,C4B7lGE,0BAEE,eAAA,CADA,e5BgmGJ,C4B5lGI,+BACE,oB5B8lGN,C4BzlGE,gDACE,Y5B2lGJ,C4BvlGE,8BAEE,+BAAA,CADA,oBAAA,CAGA,WAAA,CAGA,SAAA,CADA,4BAAA,CAEA,4DACE,CAJF,0B5B2lGJ,C4BllGI,aAdF,8BAeI,+BAAA,CAEA,SAAA,CADA,uB5BslGJ,CACF,C4BllGI,wCACE,6B5BolGN,C4BhlGI,oCACE,+B5BklGN,C4B9kGI,qCAIE,6BAAA,CAKA,UAAA,CARA,oBAAA,CAEA,YAAA,CAEA,2CAAA,CAAA,mCAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CANA,W5BulGN,C4B1kGQ,mDACE,oB5B4kGV,C6B1rGE,kCAEE,iB7BgsGJ,C6BlsGE,kCAEE,kB7BgsGJ,C6BlsGE,wBAGE,yCAAA,CAFA,oBAAA,CAGA,SAAA,CACA,mC7B6rGJ,C6BxrGI,aAVF,wBAWI,Y7B2rGJ,CACF,C6BvrGE,mFAEE,SAAA,CACA,2CACE,CADF,mC7ByrGJ,C6B5rGE,gFAEE,SAAA,CACA,wCACE,CADF,mC7ByrGJ,C6B5rGE,0EAEE,SAAA,CACA,mC7ByrGJ,C6BnrGE,mFAEE,+B7BqrGJ,C6BvrGE,gFAEE,+B7BqrGJ,C6BvrGE,0EAEE,+B7BqrGJ,C6BjrGE,oBACE,yBAAA,CACA,uBAAA,CAGA,yE7BirGJ,CKljGI,sCwBrHE,qDACE,uB7B0qGN,CACF,C6BrqGE,0CACE,yB7BuqGJ,C6BxqGE,uCACE,yB7BuqGJ,C6BxqGE,iCACE,yB7BuqGJ,C6BnqGE,sBACE,0B7BqqGJ,C8BhuGE,2BACE,a9BmuGJ,CK9iGI,wCyBtLF,2BAKI,e9BmuGJ,CACF,C8BhuGI,6BAEE,0BAAA,CAAA,2BAAA,CACA,eAAA,CACA,iBAAA,CAHA,yBAAA,CAAA,sBAAA,CAAA,iB9BquGN,C8B/tGM,2CACE,kB9BiuGR,C+BlvGE,kDACE,kCAAA,CAAA,0B/BqvGJ,C+BtvGE,+CACE,0B/BqvGJ,C+BtvGE,yCACE,kCAAA,CAAA,0B/BqvGJ,C+BjvGE,uBACE,4C/BmvGJ,C+B/uGE,uBACE,4C/BivGJ,C+B7uGE,4BACE,qC/B+uGJ,C+B5uGI,mCACE,a/B8uGN,C+B1uGI,kCACE,a/B4uGN,C+BvuGE,0BAKE,eAAA,CAJA,aAAA,CACA,YAAA,CAEA,aAAA,CADA,kBAAA,CAAA,mB/B2uGJ,C+BtuGI,uCACE,e/BwuGN,C+BpuGI,sCACE,kB/BsuGN,CgCrxGA,MACE,8LhCwxGF,CgC/wGE,oBACE,iBAAA,CAEA,gBAAA,CADA,ahCmxGJ,CgC/wGI,wCACE,uBhCixGN,CgC7wGI,gCAEE,eAAA,CADA,gBhCgxGN,CgCzwGM,wCACE,mBhC2wGR,CgCrwGE,8BAGE,oBhC0wGJ,CgC7wGE,8BAGE,mBhC0wGJ,CgC7wGE,8BAIE,4BhCywGJ,CgC7wGE,4DAKE,6BhCwwGJ,CgC7wGE,8BAKE,4BhCwwGJ,CgC7wGE,oBAME,cAAA,CALA,aAAA,CACA,ehC2wGJ,CgCpwGI,kCACE,uCAAA,CACA,oBhCswGN,CgClwGI,wCAEE,uCAAA,CADA,YhCqwGN,CgChwGI,oCAGE,WhC4wGN,CgC/wGI,oCAGE,UhC4wGN,CgC/wGI,0BAME,6BAAA,CAOA,UAAA,CARA,WAAA,CAEA,yCAAA,CAAA,iCAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CATA,iBAAA,CACA,UAAA,CASA,sBAAA,CACA,yBAAA,CARA,UhC2wGN,CgC/vGM,oCACE,wBhCiwGR,CgC5vGI,4BACE,YhC8vGN,CgCzvGI,4CACE,YhC2vGN,CiC90GE,qDACE,mBAAA,CACA,cAAA,CACA,uBjCi1GJ,CiCp1GE,kDACE,mBAAA,CACA,cAAA,CACA,uBjCi1GJ,CiCp1GE,4CACE,mBAAA,CACA,cAAA,CACA,uBjCi1GJ,CiC90GI,yDAGE,iBAAA,CADA,eAAA,CADA,ajCk1GN,CiCn1GI,sDAGE,iBAAA,CADA,eAAA,CADA,ajCk1GN,CiCn1GI,gDAGE,iBAAA,CADA,eAAA,CADA,ajCk1GN,CkCx1GE,gCACE,sClC21GJ,CkC51GE,6BACE,sClC21GJ,CkC51GE,uBACE,sClC21GJ,CkCx1GE,cACE,yClC01GJ,CkC90GE,4DACE,oClCg1GJ,CkCj1GE,yDACE,oClCg1GJ,CkCj1GE,mDACE,oClCg1GJ,CkCx0GE,6CACE,qClC00GJ,CkC30GE,0CACE,qClC00GJ,CkC30GE,oCACE,qClC00GJ,CkCh0GE,oDACE,oClCk0GJ,CkCn0GE,iDACE,oClCk0GJ,CkCn0GE,2CACE,oClCk0GJ,CkCzzGE,gDACE,qClC2zGJ,CkC5zGE,6CACE,qClC2zGJ,CkC5zGE,uCACE,qClC2zGJ,CkCtzGE,gCACE,kClCwzGJ,CkCzzGE,6BACE,kClCwzGJ,CkCzzGE,uBACE,kClCwzGJ,CkClzGE,qCACE,sClCozGJ,CkCrzGE,kCACE,sClCozGJ,CkCrzGE,4BACE,sClCozGJ,CkC7yGE,yCACE,sClC+yGJ,CkChzGE,sCACE,sClC+yGJ,CkChzGE,gCACE,sClC+yGJ,CkCxyGE,yCACE,qClC0yGJ,CkC3yGE,sCACE,qClC0yGJ,CkC3yGE,gCACE,qClC0yGJ,CkCjyGE,gDACE,qClCmyGJ,CkCpyGE,6CACE,qClCmyGJ,CkCpyGE,uCACE,qClCmyGJ,CkC3xGE,6CACE,sClC6xGJ,CkC9xGE,0CACE,sClC6xGJ,CkC9xGE,oCACE,sClC6xGJ,CkClxGE,yDACE,qClCoxGJ,CkCrxGE,sDACE,qClCoxGJ,CkCrxGE,gDACE,qClCoxGJ,CkC/wGE,iCAGE,mBAAA,CAFA,gBAAA,CACA,gBlCkxGJ,CkCpxGE,8BAGE,mBAAA,CAFA,gBAAA,CACA,gBlCkxGJ,CkCpxGE,wBAGE,mBAAA,CAFA,gBAAA,CACA,gBlCkxGJ,CkC9wGE,eACE,4ClCgxGJ,CkC7wGE,eACE,4ClC+wGJ,CkC3wGE,gBAIE,wCAAA,CAHA,aAAA,CACA,wBAAA,CACA,wBlC8wGJ,CkCzwGE,yBAOE,wCAAA,CACA,+DAAA,CACA,4BAAA,CACA,6BAAA,CARA,iBAAA,CAIA,eAAA,CADA,eAAA,CAFA,cAAA,CACA,oCAAA,CAHA,iBlCoxGJ,CkCxwGI,6BACE,YlC0wGN,CkCvwGM,kCACE,wBAAA,CACA,yBlCywGR,CkCnwGE,iCAWE,wCAAA,CACA,+DAAA,CAFA,uCAAA,CAGA,0BAAA,CAPA,UAAA,CAJA,oBAAA,CAMA,2BAAA,CADA,2BAAA,CAEA,2BAAA,CARA,uBAAA,CAAA,eAAA,CAaA,wBAAA,CAAA,qBAAA,CAAA,gBAAA,CATA,SlC4wGJ,CkC1vGE,sBACE,iBAAA,CACA,iBlC4vGJ,CkCpvGI,sCACE,gBlCsvGN,CkClvGI,gDACE,YlCovGN,CkC1uGA,gBACE,iBlC6uGF,CkCzuGE,uCACE,aAAA,CACA,SlC2uGJ,CkC7uGE,oCACE,aAAA,CACA,SlC2uGJ,CkC7uGE,8BACE,aAAA,CACA,SlC2uGJ,CkCtuGE,mBACE,YlCwuGJ,CkCnuGE,oBACE,QlCquGJ,CkCjuGE,4BACE,WAAA,CACA,SAAA,CACA,elCmuGJ,CkChuGI,0CACE,YlCkuGN,CkC5tGE,yBAIE,wCAAA,CAEA,+BAAA,CADA,4BAAA,CAFA,eAAA,CADA,oDAAA,CAKA,wBAAA,CAAA,qBAAA,CAAA,gBlC8tGJ,CkC1tGE,2BAEE,+DAAA,CADA,2BlC6tGJ,CkCztGI,+BACE,uCAAA,CACA,gBlC2tGN,CkCttGE,sBACE,MAAA,CACA,WlCwtGJ,CkCntGA,aACE,alCstGF,CkC5sGE,4BAEE,aAAA,CADA,YlCgtGJ,CkC5sGI,wDAEE,2BAAA,CADA,wBlC+sGN,CkCzsGE,+BAKE,2CAAA,CAEA,+BAAA,CADA,gCAAA,CADA,sBAAA,CAJA,mBAAA,CAEA,gBAAA,CADA,alCgtGJ,CkCxsGI,qCAEE,UAAA,CACA,UAAA,CAFA,alC4sGN,CK70GI,wC6BgJF,8BACE,iBlCisGF,CkCvrGE,wSAGE,elC6rGJ,CkCzrGE,sCAEE,mBAAA,CACA,eAAA,CADA,oBAAA,CADA,kBAAA,CAAA,mBlC6rGJ,CACF,CDphHI,kDAIE,+BAAA,CACA,8BAAA,CAFA,aAAA,CADA,QAAA,CADA,iBC0hHN,CD3hHI,+CAIE,+BAAA,CACA,8BAAA,CAFA,aAAA,CADA,QAAA,CADA,iBC0hHN,CD3hHI,yCAIE,+BAAA,CACA,8BAAA,CAFA,aAAA,CADA,QAAA,CADA,iBC0hHN,CDlhHI,uBAEE,uCAAA,CADA,cCqhHN,CDh+GM,iHAEE,WAlDkB,CAiDlB,kBC2+GR,CD5+GM,6HAEE,WAlDkB,CAiDlB,kBCu/GR,CDx/GM,6HAEE,WAlDkB,CAiDlB,kBCmgHR,CDpgHM,oHAEE,WAlDkB,CAiDlB,kBC+gHR,CDhhHM,0HAEE,WAlDkB,CAiDlB,kBC2hHR,CD5hHM,uHAEE,WAlDkB,CAiDlB,kBCuiHR,CDxiHM,uHAEE,WAlDkB,CAiDlB,kBCmjHR,CDpjHM,6HAEE,WAlDkB,CAiDlB,kBC+jHR,CDhkHM,yCAEE,WAlDkB,CAiDlB,kBCmkHR,CDpkHM,yCAEE,WAlDkB,CAiDlB,kBCukHR,CDxkHM,0CAEE,WAlDkB,CAiDlB,kBC2kHR,CD5kHM,uCAEE,WAlDkB,CAiDlB,kBC+kHR,CDhlHM,wCAEE,WAlDkB,CAiDlB,kBCmlHR,CDplHM,sCAEE,WAlDkB,CAiDlB,kBCulHR,CDxlHM,wCAEE,WAlDkB,CAiDlB,kBC2lHR,CD5lHM,oCAEE,WAlDkB,CAiDlB,kBC+lHR,CDhmHM,2CAEE,WAlDkB,CAiDlB,kBCmmHR,CDpmHM,qCAEE,WAlDkB,CAiDlB,kBCumHR,CDxmHM,oCAEE,WAlDkB,CAiDlB,kBC2mHR,CD5mHM,kCAEE,WAlDkB,CAiDlB,kBC+mHR,CDhnHM,qCAEE,WAlDkB,CAiDlB,kBCmnHR,CDpnHM,mCAEE,WAlDkB,CAiDlB,kBCunHR,CDxnHM,qCAEE,WAlDkB,CAiDlB,kBC2nHR,CD5nHM,wCAEE,WAlDkB,CAiDlB,kBC+nHR,CDhoHM,sCAEE,WAlDkB,CAiDlB,kBCmoHR,CDpoHM,2CAEE,WAlDkB,CAiDlB,kBCuoHR,CD5nHM,iCAEE,WAPkB,CAMlB,iBC+nHR,CDhoHM,uCAEE,WAPkB,CAMlB,iBCmoHR,CDpoHM,mCAEE,WAPkB,CAMlB,iBCuoHR,CmCztHA,MACE,qMAAA,CACA,mMnC4tHF,CmCntHE,wBAKE,mBAAA,CAHA,YAAA,CACA,qBAAA,CACA,YAAA,CAHA,iBnC0tHJ,CmChtHI,8BAGE,QAAA,CACA,SAAA,CAHA,iBAAA,CACA,OnCotHN,CmC/sHM,qCACE,0BnCitHR,CmClrHE,2BAKE,uBAAA,CADA,+DAAA,CAHA,YAAA,CACA,cAAA,CACA,aAAA,CAGA,oBnCorHJ,CmCjrHI,aATF,2BAUI,gBnCorHJ,CACF,CmCjrHI,cAGE,+BACE,iBnCirHN,CmC9qHM,sCAOE,oCAAA,CALA,QAAA,CAWA,UAAA,CATA,aAAA,CAEA,UAAA,CAHA,MAAA,CAFA,iBAAA,CAOA,2CAAA,CACA,qCACE,CAEF,kDAAA,CAPA,+BnCsrHR,CACF,CmCzqHI,8CACE,YnC2qHN,CmCvqHI,iCAQE,+BAAA,CACA,6BAAA,CALA,uCAAA,CAMA,cAAA,CATA,aAAA,CAKA,gBAAA,CADA,eAAA,CAFA,8BAAA,CAWA,+BAAA,CAHA,2CACE,CALF,kBAAA,CALA,UnCmrHN,CmCpqHM,aAII,6CACE,OnCmqHV,CmCpqHQ,8CACE,OnCsqHV,CmCvqHQ,8CACE,OnCyqHV,CmC1qHQ,8CACE,OnC4qHV,CmC7qHQ,8CACE,OnC+qHV,CmChrHQ,8CACE,OnCkrHV,CmCnrHQ,8CACE,OnCqrHV,CmCtrHQ,8CACE,OnCwrHV,CmCzrHQ,8CACE,OnC2rHV,CmC5rHQ,+CACE,QnC8rHV,CmC/rHQ,+CACE,QnCisHV,CmClsHQ,+CACE,QnCosHV,CmCrsHQ,+CACE,QnCusHV,CmCxsHQ,+CACE,QnC0sHV,CmC3sHQ,+CACE,QnC6sHV,CmC9sHQ,+CACE,QnCgtHV,CmCjtHQ,+CACE,QnCmtHV,CmCptHQ,+CACE,QnCstHV,CmCvtHQ,+CACE,QnCytHV,CmC1tHQ,+CACE,QnC4tHV,CACF,CmCvtHM,uCACE,+BnCytHR,CmCntHE,4BACE,UnCqtHJ,CmCltHI,aAJF,4BAKI,gBnCqtHJ,CACF,CmCjtHE,0BACE,YnCmtHJ,CmChtHI,aAJF,0BAKI,anCmtHJ,CmC/sHM,sCACE,OnCitHR,CmCltHM,uCACE,OnCotHR,CmCrtHM,uCACE,OnCutHR,CmCxtHM,uCACE,OnC0tHR,CmC3tHM,uCACE,OnC6tHR,CmC9tHM,uCACE,OnCguHR,CmCjuHM,uCACE,OnCmuHR,CmCpuHM,uCACE,OnCsuHR,CmCvuHM,uCACE,OnCyuHR,CmC1uHM,wCACE,QnC4uHR,CmC7uHM,wCACE,QnC+uHR,CmChvHM,wCACE,QnCkvHR,CmCnvHM,wCACE,QnCqvHR,CmCtvHM,wCACE,QnCwvHR,CmCzvHM,wCACE,QnC2vHR,CmC5vHM,wCACE,QnC8vHR,CmC/vHM,wCACE,QnCiwHR,CmClwHM,wCACE,QnCowHR,CmCrwHM,wCACE,QnCuwHR,CmCxwHM,wCACE,QnC0wHR,CACF,CmCpwHI,+FAEE,QnCswHN,CmCnwHM,yGACE,wBAAA,CACA,yBnCswHR,CmC7vHM,2DAEE,wBAAA,CACA,yBAAA,CAFA,QnCiwHR,CmC1vHM,iEACE,QnC4vHR,CmCzvHQ,qLAGE,wBAAA,CACA,yBAAA,CAFA,QnC6vHV,CmCvvHQ,6FACE,wBAAA,CACA,yBnCyvHV,CmCpvHM,yDACE,kBnCsvHR,CmCjvHI,sCACE,QnCmvHN,CmC9uHE,2BAEE,iBAAA,CAKA,kBAAA,CADA,uCAAA,CAEA,cAAA,CAPA,aAAA,CAGA,YAAA,CACA,gBAAA,CAKA,mBAAA,CADA,gCAAA,CANA,WnCuvHJ,CmC7uHI,iCAEE,uDAAA,CADA,+BnCgvHN,CmC3uHI,iCAIE,6BAAA,CAQA,UAAA,CAXA,aAAA,CAEA,WAAA,CAKA,8CAAA,CAAA,sCAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CANA,+CACE,CAJF,UnCqvHN,CmCtuHE,4BAME,yEACE,CALF,YAAA,CAGA,aAAA,CAFA,qBAAA,CAUA,mBAAA,CAZA,iBAAA,CAWA,wBAAA,CARA,YnC4uHJ,CmChuHI,sCACE,wBnCkuHN,CmC9tHI,oCACE,SnCguHN,CmC5tHI,kCAGE,wEACE,CAFF,mBAAA,CADA,OnCguHN,CmCttHM,uDACE,8CAAA,CAAA,sCnCwtHR,CKx0HI,wC8B8HF,wDAGE,kBnC+sHF,CmCltHA,wDAGE,mBnC+sHF,CmCltHA,8CAEE,eAAA,CADA,eAAA,CAGA,iCnC8sHF,CmC1sHE,8DACE,mBnC6sHJ,CmC9sHE,8DACE,kBnC6sHJ,CmC9sHE,oDAEE,UnC4sHJ,CmCxsHE,8EAEE,kBnC2sHJ,CmC7sHE,8EAEE,mBnC2sHJ,CmC7sHE,8EAGE,kBnC0sHJ,CmC7sHE,8EAGE,mBnC0sHJ,CmC7sHE,oEACE,UnC4sHJ,CmCtsHE,8EAEE,mBnCysHJ,CmC3sHE,8EAEE,kBnCysHJ,CmC3sHE,8EAGE,mBnCwsHJ,CmC3sHE,8EAGE,kBnCwsHJ,CmC3sHE,oEACE,UnC0sHJ,CACF,CmC5rHE,cAHF,olDAII,+BnC+rHF,CmC5rHE,g8GACE,sCnC8rHJ,CACF,CmCzrHA,4sDACE,uDnC4rHF,CmCxrHA,wmDACE,anC2rHF,CoCxiIA,MACE,mVAAA,CAEA,4VpC4iIF,CoCliIE,4BAEE,oBAAA,CADA,iBpCsiIJ,CoCjiII,sDAGE,SpCmiIN,CoCtiII,sDAGE,UpCmiIN,CoCtiII,4CACE,iBAAA,CACA,SpCoiIN,CoC9hIE,+CAEE,SAAA,CADA,UpCiiIJ,CoC5hIE,kDAGE,WpCsiIJ,CoCziIE,kDAGE,YpCsiIJ,CoCziIE,wCAME,qDAAA,CAKA,UAAA,CANA,aAAA,CAEA,0CAAA,CAAA,kCAAA,CACA,4BAAA,CAAA,oBAAA,CACA,6BAAA,CAAA,qBAAA,CACA,yBAAA,CAAA,iBAAA,CATA,iBAAA,CACA,SAAA,CAEA,YpCqiIJ,CoC1hIE,gEACE,wBTyWa,CSxWb,mDAAA,CAAA,2CpC4hIJ,CqC9kIA,QACE,8DAAA,CAGA,+CAAA,CACA,iEAAA,CACA,oDAAA,CACA,sDAAA,CACA,mDrC+kIF,CqC3kIA,SAEE,kBAAA,CADA,YrC+kIF,CKt7HI,mCiChKA,8BACE,UtC8lIJ,CsC/lIE,8BACE,WtC8lIJ,CsC/lIE,8BAIE,kBtC2lIJ,CsC/lIE,8BAIE,iBtC2lIJ,CsC/lIE,oBAKE,mBAAA,CAFA,YAAA,CADA,atC6lIJ,CsCvlII,kCACE,WtC0lIN,CsC3lII,kCACE,UtC0lIN,CsC3lII,kCAEE,iBAAA,CAAA,ctCylIN,CsC3lII,kCAEE,aAAA,CAAA,kBtCylIN,CACF","file":"main.css"} \ No newline at end of file diff --git a/2.0.0/assets/stylesheets/palette.2505c338.min.css b/2.0.0/assets/stylesheets/palette.2505c338.min.css new file mode 100644 index 00000000..3c005dd6 --- /dev/null +++ b/2.0.0/assets/stylesheets/palette.2505c338.min.css @@ -0,0 +1 @@ +@media screen{[data-md-color-scheme=slate]{--md-hue:232;--md-default-fg-color:hsla(var(--md-hue),75%,95%,1);--md-default-fg-color--light:hsla(var(--md-hue),75%,90%,0.62);--md-default-fg-color--lighter:hsla(var(--md-hue),75%,90%,0.32);--md-default-fg-color--lightest:hsla(var(--md-hue),75%,90%,0.12);--md-default-bg-color:hsla(var(--md-hue),15%,21%,1);--md-default-bg-color--light:hsla(var(--md-hue),15%,21%,0.54);--md-default-bg-color--lighter:hsla(var(--md-hue),15%,21%,0.26);--md-default-bg-color--lightest:hsla(var(--md-hue),15%,21%,0.07);--md-code-fg-color:hsla(var(--md-hue),18%,86%,1);--md-code-bg-color:hsla(var(--md-hue),15%,15%,1);--md-code-hl-color:#4287ff26;--md-code-hl-number-color:#e6695b;--md-code-hl-special-color:#f06090;--md-code-hl-function-color:#c973d9;--md-code-hl-constant-color:#9383e2;--md-code-hl-keyword-color:#6791e0;--md-code-hl-string-color:#2fb170;--md-code-hl-name-color:var(--md-code-fg-color);--md-code-hl-operator-color:var(--md-default-fg-color--light);--md-code-hl-punctuation-color:var(--md-default-fg-color--light);--md-code-hl-comment-color:var(--md-default-fg-color--light);--md-code-hl-generic-color:var(--md-default-fg-color--light);--md-code-hl-variable-color:var(--md-default-fg-color--light);--md-typeset-color:var(--md-default-fg-color);--md-typeset-a-color:var(--md-primary-fg-color);--md-typeset-mark-color:#4287ff4d;--md-typeset-kbd-color:hsla(var(--md-hue),15%,94%,0.12);--md-typeset-kbd-accent-color:hsla(var(--md-hue),15%,94%,0.2);--md-typeset-kbd-border-color:hsla(var(--md-hue),15%,14%,1);--md-typeset-table-color:hsla(var(--md-hue),75%,95%,0.12);--md-admonition-fg-color:var(--md-default-fg-color);--md-admonition-bg-color:var(--md-default-bg-color);--md-footer-bg-color:hsla(var(--md-hue),15%,12%,0.87);--md-footer-bg-color--dark:hsla(var(--md-hue),15%,10%,1);--md-shadow-z1:0 0.2rem 0.5rem #0003,0 0 0.05rem #0000001a;--md-shadow-z2:0 0.2rem 0.5rem #0000004d,0 0 0.05rem #00000040;--md-shadow-z3:0 0.2rem 0.5rem #0006,0 0 0.05rem #00000059}[data-md-color-scheme=slate] img[src$="#gh-light-mode-only"],[data-md-color-scheme=slate] img[src$="#only-light"]{display:none}[data-md-color-scheme=slate] img[src$="#gh-dark-mode-only"],[data-md-color-scheme=slate] img[src$="#only-dark"]{display:initial}[data-md-color-scheme=slate][data-md-color-primary=pink]{--md-typeset-a-color:#ed5487}[data-md-color-scheme=slate][data-md-color-primary=purple]{--md-typeset-a-color:#bd78c9}[data-md-color-scheme=slate][data-md-color-primary=deep-purple]{--md-typeset-a-color:#a682e3}[data-md-color-scheme=slate][data-md-color-primary=indigo]{--md-typeset-a-color:#6c91d5}[data-md-color-scheme=slate][data-md-color-primary=teal]{--md-typeset-a-color:#00ccb8}[data-md-color-scheme=slate][data-md-color-primary=green]{--md-typeset-a-color:#71c174}[data-md-color-scheme=slate][data-md-color-primary=deep-orange]{--md-typeset-a-color:#ff9575}[data-md-color-scheme=slate][data-md-color-primary=brown]{--md-typeset-a-color:#c7846b}[data-md-color-scheme=slate][data-md-color-primary=black],[data-md-color-scheme=slate][data-md-color-primary=blue-grey],[data-md-color-scheme=slate][data-md-color-primary=grey],[data-md-color-scheme=slate][data-md-color-primary=white]{--md-typeset-a-color:#6c91d5}[data-md-color-switching] *,[data-md-color-switching] :after,[data-md-color-switching] :before{transition-duration:0ms!important}}[data-md-color-accent=red]{--md-accent-fg-color:#ff1947;--md-accent-fg-color--transparent:#ff19471a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=pink]{--md-accent-fg-color:#f50056;--md-accent-fg-color--transparent:#f500561a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=purple]{--md-accent-fg-color:#df41fb;--md-accent-fg-color--transparent:#df41fb1a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=deep-purple]{--md-accent-fg-color:#7c4dff;--md-accent-fg-color--transparent:#7c4dff1a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=indigo]{--md-accent-fg-color:#526cfe;--md-accent-fg-color--transparent:#526cfe1a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=blue]{--md-accent-fg-color:#4287ff;--md-accent-fg-color--transparent:#4287ff1a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=light-blue]{--md-accent-fg-color:#0091eb;--md-accent-fg-color--transparent:#0091eb1a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=cyan]{--md-accent-fg-color:#00bad6;--md-accent-fg-color--transparent:#00bad61a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=teal]{--md-accent-fg-color:#00bda4;--md-accent-fg-color--transparent:#00bda41a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=green]{--md-accent-fg-color:#00c753;--md-accent-fg-color--transparent:#00c7531a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=light-green]{--md-accent-fg-color:#63de17;--md-accent-fg-color--transparent:#63de171a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-accent=lime]{--md-accent-fg-color:#b0eb00;--md-accent-fg-color--transparent:#b0eb001a;--md-accent-bg-color:#000000de;--md-accent-bg-color--light:#0000008a}[data-md-color-accent=yellow]{--md-accent-fg-color:#ffd500;--md-accent-fg-color--transparent:#ffd5001a;--md-accent-bg-color:#000000de;--md-accent-bg-color--light:#0000008a}[data-md-color-accent=amber]{--md-accent-fg-color:#fa0;--md-accent-fg-color--transparent:#ffaa001a;--md-accent-bg-color:#000000de;--md-accent-bg-color--light:#0000008a}[data-md-color-accent=orange]{--md-accent-fg-color:#ff9100;--md-accent-fg-color--transparent:#ff91001a;--md-accent-bg-color:#000000de;--md-accent-bg-color--light:#0000008a}[data-md-color-accent=deep-orange]{--md-accent-fg-color:#ff6e42;--md-accent-fg-color--transparent:#ff6e421a;--md-accent-bg-color:#fff;--md-accent-bg-color--light:#ffffffb3}[data-md-color-primary=red]{--md-primary-fg-color:#ef5552;--md-primary-fg-color--light:#e57171;--md-primary-fg-color--dark:#e53734;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=pink]{--md-primary-fg-color:#e92063;--md-primary-fg-color--light:#ec417a;--md-primary-fg-color--dark:#c3185d;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=purple]{--md-primary-fg-color:#ab47bd;--md-primary-fg-color--light:#bb69c9;--md-primary-fg-color--dark:#8c24a8;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=deep-purple]{--md-primary-fg-color:#7e56c2;--md-primary-fg-color--light:#9574cd;--md-primary-fg-color--dark:#673ab6;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=indigo]{--md-primary-fg-color:#4051b5;--md-primary-fg-color--light:#5d6cc0;--md-primary-fg-color--dark:#303fa1;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=blue]{--md-primary-fg-color:#2094f3;--md-primary-fg-color--light:#42a5f5;--md-primary-fg-color--dark:#1975d2;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=light-blue]{--md-primary-fg-color:#02a6f2;--md-primary-fg-color--light:#28b5f6;--md-primary-fg-color--dark:#0287cf;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=cyan]{--md-primary-fg-color:#00bdd6;--md-primary-fg-color--light:#25c5da;--md-primary-fg-color--dark:#0097a8;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=teal]{--md-primary-fg-color:#009485;--md-primary-fg-color--light:#26a699;--md-primary-fg-color--dark:#007a6c;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=green]{--md-primary-fg-color:#4cae4f;--md-primary-fg-color--light:#68bb6c;--md-primary-fg-color--dark:#398e3d;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=light-green]{--md-primary-fg-color:#8bc34b;--md-primary-fg-color--light:#9ccc66;--md-primary-fg-color--dark:#689f38;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=lime]{--md-primary-fg-color:#cbdc38;--md-primary-fg-color--light:#d3e156;--md-primary-fg-color--dark:#b0b52c;--md-primary-bg-color:#000000de;--md-primary-bg-color--light:#0000008a}[data-md-color-primary=yellow]{--md-primary-fg-color:#ffec3d;--md-primary-fg-color--light:#ffee57;--md-primary-fg-color--dark:#fbc02d;--md-primary-bg-color:#000000de;--md-primary-bg-color--light:#0000008a}[data-md-color-primary=amber]{--md-primary-fg-color:#ffc105;--md-primary-fg-color--light:#ffc929;--md-primary-fg-color--dark:#ffa200;--md-primary-bg-color:#000000de;--md-primary-bg-color--light:#0000008a}[data-md-color-primary=orange]{--md-primary-fg-color:#ffa724;--md-primary-fg-color--light:#ffa724;--md-primary-fg-color--dark:#fa8900;--md-primary-bg-color:#000000de;--md-primary-bg-color--light:#0000008a}[data-md-color-primary=deep-orange]{--md-primary-fg-color:#ff6e42;--md-primary-fg-color--light:#ff8a66;--md-primary-fg-color--dark:#f4511f;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=brown]{--md-primary-fg-color:#795649;--md-primary-fg-color--light:#8d6e62;--md-primary-fg-color--dark:#5d4037;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3}[data-md-color-primary=grey]{--md-primary-fg-color:#757575;--md-primary-fg-color--light:#9e9e9e;--md-primary-fg-color--dark:#616161;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3;--md-typeset-a-color:#4051b5}[data-md-color-primary=blue-grey]{--md-primary-fg-color:#546d78;--md-primary-fg-color--light:#607c8a;--md-primary-fg-color--dark:#455a63;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3;--md-typeset-a-color:#4051b5}[data-md-color-primary=light-green]:not([data-md-color-scheme=slate]){--md-typeset-a-color:#72ad2e}[data-md-color-primary=lime]:not([data-md-color-scheme=slate]){--md-typeset-a-color:#8b990a}[data-md-color-primary=yellow]:not([data-md-color-scheme=slate]){--md-typeset-a-color:#b8a500}[data-md-color-primary=amber]:not([data-md-color-scheme=slate]){--md-typeset-a-color:#d19d00}[data-md-color-primary=orange]:not([data-md-color-scheme=slate]){--md-typeset-a-color:#e68a00}[data-md-color-primary=white]{--md-primary-fg-color:#fff;--md-primary-fg-color--light:#ffffffb3;--md-primary-fg-color--dark:#00000012;--md-primary-bg-color:#000000de;--md-primary-bg-color--light:#0000008a;--md-typeset-a-color:#4051b5}[data-md-color-primary=white] .md-button{color:var(--md-typeset-a-color)}[data-md-color-primary=white] .md-button--primary{background-color:var(--md-typeset-a-color);border-color:var(--md-typeset-a-color);color:#fff}@media screen and (min-width:60em){[data-md-color-primary=white] .md-search__form{background-color:#00000012}[data-md-color-primary=white] .md-search__form:hover{background-color:#00000052}[data-md-color-primary=white] .md-search__input+.md-search__icon{color:#000000de}}@media screen and (min-width:76.25em){[data-md-color-primary=white] .md-tabs{border-bottom:.05rem solid #00000012}}[data-md-color-primary=black]{--md-primary-fg-color:#000;--md-primary-fg-color--light:#0000008a;--md-primary-fg-color--dark:#000;--md-primary-bg-color:#fff;--md-primary-bg-color--light:#ffffffb3;--md-typeset-a-color:#4051b5}[data-md-color-primary=black] .md-button{color:var(--md-typeset-a-color)}[data-md-color-primary=black] .md-button--primary{background-color:var(--md-typeset-a-color);border-color:var(--md-typeset-a-color);color:#fff}[data-md-color-primary=black] .md-header{background-color:#000}@media screen and (max-width:59.9375em){[data-md-color-primary=black] .md-nav__source{background-color:#000000de}}@media screen and (min-width:60em){[data-md-color-primary=black] .md-search__form{background-color:#ffffff1f}[data-md-color-primary=black] .md-search__form:hover{background-color:#ffffff4d}}@media screen and (max-width:76.1875em){html [data-md-color-primary=black] .md-nav--primary .md-nav__title[for=__drawer]{background-color:#000}}@media screen and (min-width:76.25em){[data-md-color-primary=black] .md-tabs{background-color:#000}} \ No newline at end of file diff --git a/2.0.0/assets/stylesheets/palette.2505c338.min.css.map b/2.0.0/assets/stylesheets/palette.2505c338.min.css.map new file mode 100644 index 00000000..3aec1903 --- /dev/null +++ b/2.0.0/assets/stylesheets/palette.2505c338.min.css.map @@ -0,0 +1 @@ +{"version":3,"sources":["src/assets/stylesheets/palette/_scheme.scss","../../../src/assets/stylesheets/palette.scss","src/assets/stylesheets/palette/_accent.scss","src/assets/stylesheets/palette/_primary.scss","src/assets/stylesheets/utilities/_break.scss"],"names":[],"mappings":"AA2BA,cAGE,6BAKE,YAAA,CAGA,mDAAA,CACA,6DAAA,CACA,+DAAA,CACA,gEAAA,CACA,mDAAA,CACA,6DAAA,CACA,+DAAA,CACA,gEAAA,CAGA,gDAAA,CACA,gDAAA,CAGA,4BAAA,CACA,iCAAA,CACA,kCAAA,CACA,mCAAA,CACA,mCAAA,CACA,kCAAA,CACA,iCAAA,CACA,+CAAA,CACA,6DAAA,CACA,gEAAA,CACA,4DAAA,CACA,4DAAA,CACA,6DAAA,CAGA,6CAAA,CAGA,+CAAA,CAGA,iCAAA,CAGA,uDAAA,CACA,6DAAA,CACA,2DAAA,CAGA,yDAAA,CAGA,mDAAA,CACA,mDAAA,CAGA,qDAAA,CACA,wDAAA,CAGA,0DAAA,CAKA,8DAAA,CAKA,0DCxDF,CD6DE,kHAEE,YC3DJ,CD+DE,gHAEE,eC7DJ,CDoFE,yDACE,4BClFJ,CDiFE,2DACE,4BC/EJ,CD8EE,gEACE,4BC5EJ,CD2EE,2DACE,4BCzEJ,CDwEE,yDACE,4BCtEJ,CDqEE,0DACE,4BCnEJ,CDkEE,gEACE,4BChEJ,CD+DE,0DACE,4BC7DJ,CD4DE,2OACE,4BCjDJ,CDwDA,+FAGE,iCCtDF,CACF,CCjDE,2BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCD6CN,CCvDE,4BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDoDN,CC9DE,8BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCD2DN,CCrEE,mCACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDkEN,CC5EE,8BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDyEN,CCnFE,4BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDgFN,CC1FE,kCACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDuFN,CCjGE,4BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCD8FN,CCxGE,4BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDqGN,CC/GE,6BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCD4GN,CCtHE,mCACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDmHN,CC7HE,4BACE,4BAAA,CACA,2CAAA,CAIE,8BAAA,CACA,qCD6HN,CCpIE,8BACE,4BAAA,CACA,2CAAA,CAIE,8BAAA,CACA,qCDoIN,CC3IE,6BACE,yBAAA,CACA,2CAAA,CAIE,8BAAA,CACA,qCD2IN,CClJE,8BACE,4BAAA,CACA,2CAAA,CAIE,8BAAA,CACA,qCDkJN,CCzJE,mCACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDsJN,CE3JE,4BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFwJN,CEnKE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFgKN,CE3KE,+BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFwKN,CEnLE,oCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFgLN,CE3LE,+BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFwLN,CEnME,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFgMN,CE3ME,mCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFwMN,CEnNE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFgNN,CE3NE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFwNN,CEnOE,8BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFgON,CE3OE,oCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFwON,CEnPE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAIE,+BAAA,CACA,sCFmPN,CE3PE,+BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAIE,+BAAA,CACA,sCF2PN,CEnQE,8BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAIE,+BAAA,CACA,sCFmQN,CE3QE,+BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAIE,+BAAA,CACA,sCF2QN,CEnRE,oCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFgRN,CE3RE,8BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFwRN,CEnSE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCAAA,CAKA,4BF4RN,CE5SE,kCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCAAA,CAKA,4BFqSN,CEtRE,sEACE,4BFyRJ,CE1RE,+DACE,4BF6RJ,CE9RE,iEACE,4BFiSJ,CElSE,gEACE,4BFqSJ,CEtSE,iEACE,4BFySJ,CEhSA,8BACE,0BAAA,CACA,sCAAA,CACA,qCAAA,CACA,+BAAA,CACA,sCAAA,CAGA,4BFiSF,CE9RE,yCACE,+BFgSJ,CE7RI,kDAEE,0CAAA,CACA,sCAAA,CAFA,UFiSN,CG7MI,mCD1EA,+CACE,0BF0RJ,CEvRI,qDACE,0BFyRN,CEpRE,iEACE,eFsRJ,CACF,CGxNI,sCDvDA,uCACE,oCFkRJ,CACF,CEzQA,8BACE,0BAAA,CACA,sCAAA,CACA,gCAAA,CACA,0BAAA,CACA,sCAAA,CAGA,4BF0QF,CEvQE,yCACE,+BFyQJ,CEtQI,kDAEE,0CAAA,CACA,sCAAA,CAFA,UF0QN,CEnQE,yCACE,qBFqQJ,CG9NI,wCDhCA,8CACE,0BFiQJ,CACF,CGtPI,mCDJA,+CACE,0BF6PJ,CE1PI,qDACE,0BF4PN,CACF,CG3OI,wCDTA,iFACE,qBFuPJ,CACF,CGnQI,sCDmBA,uCACE,qBFmPJ,CACF","file":"palette.css"} \ No newline at end of file diff --git a/2.0.0/css/extra.css b/2.0.0/css/extra.css new file mode 100644 index 00000000..58759313 --- /dev/null +++ b/2.0.0/css/extra.css @@ -0,0 +1,45 @@ +/* Indentation. */ +div.doc-contents:not(.first) { + padding-left: 25px; + border-left: 4px solid rgba(230, 230, 230); + margin-bottom: 60px; +} + +/* Don't use vertical space on hidden ToC entries. */ +.hidden-toc::before { + margin-top: 0 !important; + padding-top: 0 !important; +} + +/* Don't show permalink of hidden ToC entries. */ +.hidden-toc a.headerlink { + display: none; +} + +/* Avoid breaking parameters name, etc. in table cells. */ +td code { + word-break: normal !important; +} + +/* For pieces of Markdown rendered in table cells. */ +td p { + margin-top: 0 !important; + margin-bottom: 0 !important; +} + +/* Avoid breaking code headings. */ +.doc-heading code { + white-space: normal; +} + +/* Improve rendering of parameters, returns and exceptions. */ +.field-name { + min-width: 100px; +} +.field-name, .field-body { + border: none !important; + padding: 0 !important; +} +.field-list { + margin: 0 !important; +} diff --git a/2.0.0/developer/index.html b/2.0.0/developer/index.html new file mode 100644 index 00000000..8a84ac80 --- /dev/null +++ b/2.0.0/developer/index.html @@ -0,0 +1,939 @@ + + + + + + + + + + + + + + + + 开发者文档 - Omicron + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + +
    + +
    + + + + + + +
    +
    + + + +
    +
    +
    + + + + +
    +
    +
    + + + + + + + +
    +
    + + + + + + + + + +

    开发者文档

    + +

    Omicron的开发流程

    +

    Omicron遵循ppw定义的开发流程和代码规范。您可以阅读tutorial来了解更多。

    +

    简单来说,通过ppw构建的工程,具有以下能力:

    +

    基于poetry进行依赖管理

    +
      +
    1. 通过poetry add给项目增加新的依赖。如果依赖项仅在开发环境下使用,请增加为Extra项,并正确归类为dev, doc和test中的一类。
    2. +
    3. 使用poetry lock来锁定依赖的版本。
    4. +
    5. 使用poetry update更新依赖项。
    6. +
    +

    flake8, isort, black

    +

    omicron使用flake8, isort和black进行语法检查和代码格式化

    +

    pre-commit

    +

    使用pre-commit来确保提交的代码都符合规范。如果是刚下载代码,请运行pre-commit install安装钩子。

    +

    TODO: 将通用部分转换到大富翁的开发者指南中

    +

    如何进行单元测试?

    +

    设置环境变量

    +

    Omicron在notify包中提供了发送邮件和钉钉消息的功能。在进行单元测试前,需要设置相关的环境变量:

    +
    1
    +2
    +3
    +4
    +5
    +6
    +7
    DINGTALK_ACCESS_TOKEN=?
    +DINGTALK_SECRET=?
    +
    +export MAIL_FROM=?
    +export MAIL_SERVER=?
    +export MAIL_TO=?
    +export MAIL_PASSWORD=?
    +
    +

    上述环境变量已在gh://zillionare/omicron中设置。如果您fork了omicron并且想通过github actions进行测试,请在您的repo中设置相应的secrets。

    +

    启动测试

    +

    通过tox来运行测试。tox将启动必要的测试环境(通过stop_service.shstart_service.sh)。

    +

    文档

    +

    文档由两部分组成。一部分是项目文档,存放在docs目录下。另一部分是API文档,它们从源代码的注释中提取。生成文档的工具是mkdocs。API文档的提取则由mkdocs的插件mkdocstrings提取。

    + + + + + + +
    +
    + + +
    + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/2.0.0/history/index.html b/2.0.0/history/index.html new file mode 100644 index 00000000..ff9ce0ee --- /dev/null +++ b/2.0.0/history/index.html @@ -0,0 +1,1048 @@ + + + + + + + + + + + + + + + + 版本历史 - Omicron + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + +
    + +
    + + + + + + +
    +
    + + + +
    +
    +
    + + + + +
    +
    +
    + + + + + + + +
    +
    + + + + + + + + + +

    版本历史

    + + +

    History

    +

    +

    2.0.0-alpha78

    +
      +
    • backtest中捕获异常时,如果是TradeError类型,打印该对象自带的stack
    • +
    +

    2.0.0-alpha77

    +
      +
    • strategy增加lifecycle
    • +
    • 保留最后一个回测周期仅供交易使用,不调用predict
    • +
    • Security获取股票列表时,如果不调用types,将获取股票列表,调用types()不传参数将获取带指数、股票的列表。
    • +
    +

    2.0.0-alpha76

    +
      +
    • 增加backtestlog模块,用于输出回测日志时,将时间替换为回测时间
    • +
    • 增加行情预取功能
    • +
    • 增加回测报告中绘制自定义指标功能(仅支持Scatter)
    • +
    +

    2.0.0-alpha.69

    +
      +
    • BaseStrategy增加available_shares方法
    • +
    +

    2.0.0-alpha.68

    +
      +
    • 增加了MetricsGraph
    • +
    • 增加Strategy基类
    • +
    • Candlestick增加了布林带指标
    • +
    +

    2.0.0-alpha.49 (2022-09-16)

    +
      +
    • 修订了安装文档。
    • +
    • 移除了windows下对ta-lib的依赖。请参考安装指南以获取在windows下安装ta-lib的方法。
    • +
    • 更新了poetry.lock文件。在上一版中,该文件与pyproject.toml不同步,导致安装时进行版本锁定,延长了安装时间。
    • +
    • 修复了k线图标记顶和底时,标记离被标注的点太远的问题。
    • +
    +

    2.0.0-alpha.46 (2022-09-10)

    +
      +
    • #40 增加k线图绘制功能。
    • +
    • 本次修订增加了对plotly, ckwrap, ta-lib的依赖。
    • +
    • 将原属于omicron.talib包中的bars_since, find_runs等跟数组相关的操作,移入omicron.extensions.np中。
    • +
    +

    2.0.0-alpha.45 (2022-09-08)

    +
      +
    • #39 fixed.
    • +
    • removed dependency of postgres
    • +
    • removed funds
    • +
    • update arrow's version to be great than 1.2
    • +
    • lock aiohttp's version to >3.8, <4.0>
    • +
    +

    2.0.0-alpha.35 (2022-07-13)

    +
      +
    • fix issue in security exit date comparison, Security.eval().
    • +
    +

    2.0.0-alpha.34 (2022-07-13)

    +
      +
    • change to sync call for Security.select()
    • +
    • date parameter of Security.select(): if date >= today, it will use the data in cache, otherwise, query from database.
    • +
    +

    0.3.1 (2020-12-11)

    +

    this version introduced no features, just a internal amendment release, we're migrating to poetry build system.

    +

    0.3.0 (2020-11-22)

    +
      +
    • Calendar, Triggers and time frame calculation
    • +
    • Security list
    • +
    • Bars with turnover
    • +
    • Valuation
    • +
    +

    0.1.0 (2020-04-28)

    +
      +
    • First release on PyPI.
    • +
    + + + + + + + +
    +
    + + +
    + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/2.0.0/index.html b/2.0.0/index.html new file mode 100644 index 00000000..0e0e8c72 --- /dev/null +++ b/2.0.0/index.html @@ -0,0 +1,824 @@ + + + + + + + + + + + + + + + + Omicron + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + +
    + +
    + + + + + + +
    +
    + + + +
    +
    +
    + + + + +
    +
    +
    + + + +
    +
    +
    + + + +
    +
    +
    + + + +
    +
    + + + + + + + + + + + +

    +

    Omicron - Core Library for Zillionare

    + +

    Version +CI Status +Code Coverage +Downloads +License +Style

    +

    Contents

    +

    简介

    +

    Omicron是Zillionare的核心模块,提供以下功能:

    +
      +
    1. 行情数据读取(需要启动zillionare-omega服务。
    2. +
    3. 概念板块数据,也需要启动zillionare-omega服务。
    4. +
    5. 交易日历及时间帧相关操作
    6. +
    7. 证券列表及相关查询操作
    8. +
    9. numpy数组功能扩展
    10. +
    11. 技术指标及形态分析功能
    12. +
    13. 各种均线、曲线拟合、直线斜率和夹解计算、曲线平滑函数等。
    14. +
    15. 形态分析功能,如交叉、顶底搜索、平台检测、RSI背离等。
    16. +
    17. 策略编写框架,不修改代码即可同时用于实盘与回测。
    18. +
    19. 绘图功能。提供了交互式k线图回测报告
    20. +
    21. 其它
    22. +
    23. 修正Python的round函数错误,改用math_round
    24. +
    25. 判断价格是否相等的函数:price_equal
    26. +
    +

    Omicron是大富翁量化框架的一部分。您必须至少安装并运行Omega,然后才能利用omicron来访问上述数据。

    +

    使用文档

    +

    Credits

    +

    Zillionare-Omicron采用Python Project Wizard构建。

    + + + + + + + +
    +
    + + +
    + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/2.0.0/installation/index.html b/2.0.0/installation/index.html new file mode 100644 index 00000000..50496743 --- /dev/null +++ b/2.0.0/installation/index.html @@ -0,0 +1,857 @@ + + + + + + + + + + + + + + + + 安装 - Omicron + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + +
    + +
    + + + + + + +
    +
    + + + +
    +
    +
    + + + + +
    +
    +
    + + + +
    +
    +
    + + + +
    +
    +
    + + + +
    +
    + + + + + + + + + +

    安装

    + +

    1. 安装

    +

    要使用Omicron来获取行情数据,请先安装Omega,并按说明文档要求完成初始化配置。

    +

    然后在开发机上,运行下面的命令安装Omicron:

    +
    1
        pip install zillionare-omicron
    +
    +

    omicron依赖numpy, pandas, scipy, sklearn。这些库的体积比较大,因此在安装omicron时,请保持网络连接畅通,必要时,请添加阿里或者清华的PyPI镜像。

    +

    omicron还依赖于talib, zigzag, ciso8601等高性能的C/C++库。安装这些库往往需要在您本机执行一个编译过程。请遵循以下步骤完成:

    +
    +

    安装原生库

    +
    +
    +
    +

    注意我们不支持32位windows

    +

    请跟随windows下安装omicron来完成安装。

    +
    +
    +
      +
    1. 请执行下面的脚本以完成ta-lib的安装 +
      1
      +2
      +3
      +4
      +5
      +6
      +7
      sudo apt update && sudo apt upgrade -y && sudo apt autoremove -y
      +sudo apt-get install build-essential -y
      +curl -L http://prdownloads.sourceforge.net/ta-lib/ta-lib-0.4.0-src.tar.gz | tar -xzv -C /tmp/
      +cd /tmp/ta-lib
      +./configure --prefix=/usr
      +make
      +sudo make install
      +
    2. +
    3. 现在安装omicron,所有其它依赖的安装将自动完成。
    4. +
    +
    +
    +
      +
    1. 请通过brew install ta-lib来完成ta-lib的安装
    2. +
    3. 现在安装omicron,所有其它依赖的安装都将自动完成。
    4. +
    +
    +
    +
    +
    +

    2. 常见问题

    +

    无法访问aka.ms

    +

    如果遇到aka.ms无法访问的问题,有可能是IP地址解析的问题。请以管理员权限,打开并编辑位于c:\windows\system32\drivers\etc\下的hosts文件,将此行加入到文件中: +

    1
    23.41.86.106 aka.ms
    +
    +

    + + + + + + +
    +
    + + +
    + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/2.0.0/objects.inv b/2.0.0/objects.inv new file mode 100644 index 00000000..cd814dfb Binary files /dev/null and b/2.0.0/objects.inv differ diff --git a/2.0.0/search/search_index.json b/2.0.0/search/search_index.json new file mode 100644 index 00000000..91dfffad --- /dev/null +++ b/2.0.0/search/search_index.json @@ -0,0 +1 @@ +{"config":{"indexing":"full","lang":["en"],"min_search_length":3,"prebuild_index":false,"separator":"[\\s\\-]+"},"docs":[{"location":"","text":"Omicron - Core Library for Zillionare Contents \u00b6 \u7b80\u4ecb \u00b6 Omicron\u662fZillionare\u7684\u6838\u5fc3\u6a21\u5757\uff0c\u63d0\u4f9b\u4ee5\u4e0b\u529f\u80fd\uff1a \u884c\u60c5\u6570\u636e\u8bfb\u53d6\uff08\u9700\u8981\u542f\u52a8 zillionare-omega \u670d\u52a1\u3002 \u6982\u5ff5\u677f\u5757\u6570\u636e \uff0c\u4e5f\u9700\u8981\u542f\u52a8 zillionare-omega \u670d\u52a1\u3002 \u4ea4\u6613\u65e5\u5386\u53ca\u65f6\u95f4\u5e27\u76f8\u5173\u64cd\u4f5c \u8bc1\u5238\u5217\u8868\u53ca\u76f8\u5173\u67e5\u8be2\u64cd\u4f5c numpy\u6570\u7ec4\u529f\u80fd\u6269\u5c55 \u6280\u672f\u6307\u6807\u53ca\u5f62\u6001\u5206\u6790\u529f\u80fd \u5404\u79cd\u5747\u7ebf\u3001\u66f2\u7ebf\u62df\u5408\u3001\u76f4\u7ebf\u659c\u7387\u548c\u5939\u89e3\u8ba1\u7b97\u3001\u66f2\u7ebf\u5e73\u6ed1\u51fd\u6570\u7b49\u3002 \u5f62\u6001\u5206\u6790\u529f\u80fd\uff0c\u5982\u4ea4\u53c9\u3001\u9876\u5e95\u641c\u7d22\u3001\u5e73\u53f0\u68c0\u6d4b\u3001RSI\u80cc\u79bb\u7b49\u3002 \u7b56\u7565\u7f16\u5199\u6846\u67b6 \uff0c\u4e0d\u4fee\u6539\u4ee3\u7801\u5373\u53ef\u540c\u65f6\u7528\u4e8e\u5b9e\u76d8\u4e0e\u56de\u6d4b\u3002 \u7ed8\u56fe\u529f\u80fd\u3002\u63d0\u4f9b\u4e86 \u4ea4\u4e92\u5f0fk\u7ebf\u56fe \u53ca \u56de\u6d4b\u62a5\u544a \u3002 \u5176\u5b83 \u4fee\u6b63Python\u7684round\u51fd\u6570\u9519\u8bef\uff0c\u6539\u7528 math_round \u5224\u65ad\u4ef7\u683c\u662f\u5426\u76f8\u7b49\u7684\u51fd\u6570\uff1a price_equal Omicron\u662f\u5927\u5bcc\u7fc1\u91cf\u5316\u6846\u67b6\u7684\u4e00\u90e8\u5206\u3002\u60a8\u5fc5\u987b\u81f3\u5c11\u5b89\u88c5\u5e76\u8fd0\u884c Omega \uff0c\u7136\u540e\u624d\u80fd\u5229\u7528omicron\u6765\u8bbf\u95ee\u4e0a\u8ff0\u6570\u636e\u3002 \u4f7f\u7528\u6587\u6863 Credits \u00b6 Zillionare-Omicron\u91c7\u7528 Python Project Wizard \u6784\u5efa\u3002","title":"\u7b80\u4ecb"},{"location":"#contents","text":"","title":"Contents"},{"location":"#\u7b80\u4ecb","text":"Omicron\u662fZillionare\u7684\u6838\u5fc3\u6a21\u5757\uff0c\u63d0\u4f9b\u4ee5\u4e0b\u529f\u80fd\uff1a \u884c\u60c5\u6570\u636e\u8bfb\u53d6\uff08\u9700\u8981\u542f\u52a8 zillionare-omega \u670d\u52a1\u3002 \u6982\u5ff5\u677f\u5757\u6570\u636e \uff0c\u4e5f\u9700\u8981\u542f\u52a8 zillionare-omega \u670d\u52a1\u3002 \u4ea4\u6613\u65e5\u5386\u53ca\u65f6\u95f4\u5e27\u76f8\u5173\u64cd\u4f5c \u8bc1\u5238\u5217\u8868\u53ca\u76f8\u5173\u67e5\u8be2\u64cd\u4f5c numpy\u6570\u7ec4\u529f\u80fd\u6269\u5c55 \u6280\u672f\u6307\u6807\u53ca\u5f62\u6001\u5206\u6790\u529f\u80fd \u5404\u79cd\u5747\u7ebf\u3001\u66f2\u7ebf\u62df\u5408\u3001\u76f4\u7ebf\u659c\u7387\u548c\u5939\u89e3\u8ba1\u7b97\u3001\u66f2\u7ebf\u5e73\u6ed1\u51fd\u6570\u7b49\u3002 \u5f62\u6001\u5206\u6790\u529f\u80fd\uff0c\u5982\u4ea4\u53c9\u3001\u9876\u5e95\u641c\u7d22\u3001\u5e73\u53f0\u68c0\u6d4b\u3001RSI\u80cc\u79bb\u7b49\u3002 \u7b56\u7565\u7f16\u5199\u6846\u67b6 \uff0c\u4e0d\u4fee\u6539\u4ee3\u7801\u5373\u53ef\u540c\u65f6\u7528\u4e8e\u5b9e\u76d8\u4e0e\u56de\u6d4b\u3002 \u7ed8\u56fe\u529f\u80fd\u3002\u63d0\u4f9b\u4e86 \u4ea4\u4e92\u5f0fk\u7ebf\u56fe \u53ca \u56de\u6d4b\u62a5\u544a \u3002 \u5176\u5b83 \u4fee\u6b63Python\u7684round\u51fd\u6570\u9519\u8bef\uff0c\u6539\u7528 math_round \u5224\u65ad\u4ef7\u683c\u662f\u5426\u76f8\u7b49\u7684\u51fd\u6570\uff1a price_equal Omicron\u662f\u5927\u5bcc\u7fc1\u91cf\u5316\u6846\u67b6\u7684\u4e00\u90e8\u5206\u3002\u60a8\u5fc5\u987b\u81f3\u5c11\u5b89\u88c5\u5e76\u8fd0\u884c Omega \uff0c\u7136\u540e\u624d\u80fd\u5229\u7528omicron\u6765\u8bbf\u95ee\u4e0a\u8ff0\u6570\u636e\u3002 \u4f7f\u7528\u6587\u6863","title":"\u7b80\u4ecb"},{"location":"#credits","text":"Zillionare-Omicron\u91c7\u7528 Python Project Wizard \u6784\u5efa\u3002","title":"Credits"},{"location":"developer/","text":"Omicron\u7684\u5f00\u53d1\u6d41\u7a0b \u00b6 Omicron\u9075\u5faa ppw \u5b9a\u4e49\u7684\u5f00\u53d1\u6d41\u7a0b\u548c\u4ee3\u7801\u89c4\u8303\u3002\u60a8\u53ef\u4ee5\u9605\u8bfb tutorial \u6765\u4e86\u89e3\u66f4\u591a\u3002 \u7b80\u5355\u6765\u8bf4\uff0c\u901a\u8fc7ppw\u6784\u5efa\u7684\u5de5\u7a0b\uff0c\u5177\u6709\u4ee5\u4e0b\u80fd\u529b\uff1a \u57fa\u4e8epoetry\u8fdb\u884c\u4f9d\u8d56\u7ba1\u7406 \u00b6 \u901a\u8fc7poetry add\u7ed9\u9879\u76ee\u589e\u52a0\u65b0\u7684\u4f9d\u8d56\u3002\u5982\u679c\u4f9d\u8d56\u9879\u4ec5\u5728\u5f00\u53d1\u73af\u5883\u4e0b\u4f7f\u7528\uff0c\u8bf7\u589e\u52a0\u4e3aExtra\u9879\uff0c\u5e76\u6b63\u786e\u5f52\u7c7b\u4e3adev, doc\u548ctest\u4e2d\u7684\u4e00\u7c7b\u3002 \u4f7f\u7528poetry lock\u6765\u9501\u5b9a\u4f9d\u8d56\u7684\u7248\u672c\u3002 \u4f7f\u7528poetry update\u66f4\u65b0\u4f9d\u8d56\u9879\u3002 flake8, isort, black \u00b6 omicron\u4f7f\u7528flake8, isort\u548cblack\u8fdb\u884c\u8bed\u6cd5\u68c0\u67e5\u548c\u4ee3\u7801\u683c\u5f0f\u5316 pre-commit \u00b6 \u4f7f\u7528pre-commit\u6765\u786e\u4fdd\u63d0\u4ea4\u7684\u4ee3\u7801\u90fd\u7b26\u5408\u89c4\u8303\u3002\u5982\u679c\u662f\u521a\u4e0b\u8f7d\u4ee3\u7801\uff0c\u8bf7\u8fd0\u884cpre-commit install\u5b89\u88c5\u94a9\u5b50\u3002 TODO: \u5c06\u901a\u7528\u90e8\u5206\u8f6c\u6362\u5230\u5927\u5bcc\u7fc1\u7684\u5f00\u53d1\u8005\u6307\u5357\u4e2d \u00b6 \u5982\u4f55\u8fdb\u884c\u5355\u5143\u6d4b\u8bd5\uff1f \u00b6 \u8bbe\u7f6e\u73af\u5883\u53d8\u91cf \u00b6 Omicron\u5728notify\u5305\u4e2d\u63d0\u4f9b\u4e86\u53d1\u9001\u90ae\u4ef6\u548c\u9489\u9489\u6d88\u606f\u7684\u529f\u80fd\u3002\u5728\u8fdb\u884c\u5355\u5143\u6d4b\u8bd5\u524d\uff0c\u9700\u8981\u8bbe\u7f6e\u76f8\u5173\u7684\u73af\u5883\u53d8\u91cf\uff1a 1 2 3 4 5 6 7 DINGTALK_ACCESS_TOKEN = ? DINGTALK_SECRET = ? export MAIL_FROM = ? export MAIL_SERVER = ? export MAIL_TO = ? export MAIL_PASSWORD = ? \u4e0a\u8ff0\u73af\u5883\u53d8\u91cf\u5df2\u5728gh://zillionare/omicron\u4e2d\u8bbe\u7f6e\u3002\u5982\u679c\u60a8fork\u4e86omicron\u5e76\u4e14\u60f3\u901a\u8fc7github actions\u8fdb\u884c\u6d4b\u8bd5\uff0c\u8bf7\u5728\u60a8\u7684repo\u4e2d\u8bbe\u7f6e\u76f8\u5e94\u7684secrets\u3002 \u542f\u52a8\u6d4b\u8bd5 \u00b6 \u901a\u8fc7tox\u6765\u8fd0\u884c\u6d4b\u8bd5\u3002tox\u5c06\u542f\u52a8\u5fc5\u8981\u7684\u6d4b\u8bd5\u73af\u5883\uff08\u901a\u8fc7 stop_service.sh \u548c start_service.sh \uff09\u3002 \u6587\u6863 \u00b6 \u6587\u6863\u7531\u4e24\u90e8\u5206\u7ec4\u6210\u3002\u4e00\u90e8\u5206\u662f\u9879\u76ee\u6587\u6863\uff0c\u5b58\u653e\u5728docs\u76ee\u5f55\u4e0b\u3002\u53e6\u4e00\u90e8\u5206\u662fAPI\u6587\u6863\uff0c\u5b83\u4eec\u4ece\u6e90\u4ee3\u7801\u7684\u6ce8\u91ca\u4e2d\u63d0\u53d6\u3002\u751f\u6210\u6587\u6863\u7684\u5de5\u5177\u662fmkdocs\u3002API\u6587\u6863\u7684\u63d0\u53d6\u5219\u7531mkdocs\u7684\u63d2\u4ef6mkdocstrings\u63d0\u53d6\u3002","title":"\u5f00\u53d1\u8005\u6587\u6863"},{"location":"developer/#omicron\u7684\u5f00\u53d1\u6d41\u7a0b","text":"Omicron\u9075\u5faa ppw \u5b9a\u4e49\u7684\u5f00\u53d1\u6d41\u7a0b\u548c\u4ee3\u7801\u89c4\u8303\u3002\u60a8\u53ef\u4ee5\u9605\u8bfb tutorial \u6765\u4e86\u89e3\u66f4\u591a\u3002 \u7b80\u5355\u6765\u8bf4\uff0c\u901a\u8fc7ppw\u6784\u5efa\u7684\u5de5\u7a0b\uff0c\u5177\u6709\u4ee5\u4e0b\u80fd\u529b\uff1a","title":"Omicron\u7684\u5f00\u53d1\u6d41\u7a0b"},{"location":"developer/#\u57fa\u4e8epoetry\u8fdb\u884c\u4f9d\u8d56\u7ba1\u7406","text":"\u901a\u8fc7poetry add\u7ed9\u9879\u76ee\u589e\u52a0\u65b0\u7684\u4f9d\u8d56\u3002\u5982\u679c\u4f9d\u8d56\u9879\u4ec5\u5728\u5f00\u53d1\u73af\u5883\u4e0b\u4f7f\u7528\uff0c\u8bf7\u589e\u52a0\u4e3aExtra\u9879\uff0c\u5e76\u6b63\u786e\u5f52\u7c7b\u4e3adev, doc\u548ctest\u4e2d\u7684\u4e00\u7c7b\u3002 \u4f7f\u7528poetry lock\u6765\u9501\u5b9a\u4f9d\u8d56\u7684\u7248\u672c\u3002 \u4f7f\u7528poetry update\u66f4\u65b0\u4f9d\u8d56\u9879\u3002","title":"\u57fa\u4e8epoetry\u8fdb\u884c\u4f9d\u8d56\u7ba1\u7406"},{"location":"developer/#flake8-isort-black","text":"omicron\u4f7f\u7528flake8, isort\u548cblack\u8fdb\u884c\u8bed\u6cd5\u68c0\u67e5\u548c\u4ee3\u7801\u683c\u5f0f\u5316","title":"flake8, isort, black"},{"location":"developer/#pre-commit","text":"\u4f7f\u7528pre-commit\u6765\u786e\u4fdd\u63d0\u4ea4\u7684\u4ee3\u7801\u90fd\u7b26\u5408\u89c4\u8303\u3002\u5982\u679c\u662f\u521a\u4e0b\u8f7d\u4ee3\u7801\uff0c\u8bf7\u8fd0\u884cpre-commit install\u5b89\u88c5\u94a9\u5b50\u3002","title":"pre-commit"},{"location":"developer/#todo-\u5c06\u901a\u7528\u90e8\u5206\u8f6c\u6362\u5230\u5927\u5bcc\u7fc1\u7684\u5f00\u53d1\u8005\u6307\u5357\u4e2d","text":"","title":"TODO: \u5c06\u901a\u7528\u90e8\u5206\u8f6c\u6362\u5230\u5927\u5bcc\u7fc1\u7684\u5f00\u53d1\u8005\u6307\u5357\u4e2d"},{"location":"developer/#\u5982\u4f55\u8fdb\u884c\u5355\u5143\u6d4b\u8bd5","text":"","title":"\u5982\u4f55\u8fdb\u884c\u5355\u5143\u6d4b\u8bd5\uff1f"},{"location":"developer/#\u8bbe\u7f6e\u73af\u5883\u53d8\u91cf","text":"Omicron\u5728notify\u5305\u4e2d\u63d0\u4f9b\u4e86\u53d1\u9001\u90ae\u4ef6\u548c\u9489\u9489\u6d88\u606f\u7684\u529f\u80fd\u3002\u5728\u8fdb\u884c\u5355\u5143\u6d4b\u8bd5\u524d\uff0c\u9700\u8981\u8bbe\u7f6e\u76f8\u5173\u7684\u73af\u5883\u53d8\u91cf\uff1a 1 2 3 4 5 6 7 DINGTALK_ACCESS_TOKEN = ? DINGTALK_SECRET = ? export MAIL_FROM = ? export MAIL_SERVER = ? export MAIL_TO = ? export MAIL_PASSWORD = ? \u4e0a\u8ff0\u73af\u5883\u53d8\u91cf\u5df2\u5728gh://zillionare/omicron\u4e2d\u8bbe\u7f6e\u3002\u5982\u679c\u60a8fork\u4e86omicron\u5e76\u4e14\u60f3\u901a\u8fc7github actions\u8fdb\u884c\u6d4b\u8bd5\uff0c\u8bf7\u5728\u60a8\u7684repo\u4e2d\u8bbe\u7f6e\u76f8\u5e94\u7684secrets\u3002","title":"\u8bbe\u7f6e\u73af\u5883\u53d8\u91cf"},{"location":"developer/#\u542f\u52a8\u6d4b\u8bd5","text":"\u901a\u8fc7tox\u6765\u8fd0\u884c\u6d4b\u8bd5\u3002tox\u5c06\u542f\u52a8\u5fc5\u8981\u7684\u6d4b\u8bd5\u73af\u5883\uff08\u901a\u8fc7 stop_service.sh \u548c start_service.sh \uff09\u3002","title":"\u542f\u52a8\u6d4b\u8bd5"},{"location":"developer/#\u6587\u6863","text":"\u6587\u6863\u7531\u4e24\u90e8\u5206\u7ec4\u6210\u3002\u4e00\u90e8\u5206\u662f\u9879\u76ee\u6587\u6863\uff0c\u5b58\u653e\u5728docs\u76ee\u5f55\u4e0b\u3002\u53e6\u4e00\u90e8\u5206\u662fAPI\u6587\u6863\uff0c\u5b83\u4eec\u4ece\u6e90\u4ee3\u7801\u7684\u6ce8\u91ca\u4e2d\u63d0\u53d6\u3002\u751f\u6210\u6587\u6863\u7684\u5de5\u5177\u662fmkdocs\u3002API\u6587\u6863\u7684\u63d0\u53d6\u5219\u7531mkdocs\u7684\u63d2\u4ef6mkdocstrings\u63d0\u53d6\u3002","title":"\u6587\u6863"},{"location":"history/","text":"History \u00b6 \u00b6 2.0.0-alpha78 \u00b6 backtest\u4e2d\u6355\u83b7\u5f02\u5e38\u65f6\uff0c\u5982\u679c\u662fTradeError\u7c7b\u578b\uff0c\u6253\u5370\u8be5\u5bf9\u8c61\u81ea\u5e26\u7684stack 2.0.0-alpha77 \u00b6 strategy\u589e\u52a0lifecycle \u4fdd\u7559\u6700\u540e\u4e00\u4e2a\u56de\u6d4b\u5468\u671f\u4ec5\u4f9b\u4ea4\u6613\u4f7f\u7528\uff0c\u4e0d\u8c03\u7528 predict Security\u83b7\u53d6\u80a1\u7968\u5217\u8868\u65f6\uff0c\u5982\u679c\u4e0d\u8c03\u7528 types \uff0c\u5c06\u83b7\u53d6\u80a1\u7968\u5217\u8868\uff0c\u8c03\u7528 types() \u4e0d\u4f20\u53c2\u6570\u5c06\u83b7\u53d6\u5e26\u6307\u6570\u3001\u80a1\u7968\u7684\u5217\u8868\u3002 2.0.0-alpha76 \u00b6 \u589e\u52a0backtestlog\u6a21\u5757\uff0c\u7528\u4e8e\u8f93\u51fa\u56de\u6d4b\u65e5\u5fd7\u65f6\uff0c\u5c06\u65f6\u95f4\u66ff\u6362\u4e3a\u56de\u6d4b\u65f6\u95f4 \u589e\u52a0\u884c\u60c5\u9884\u53d6\u529f\u80fd \u589e\u52a0\u56de\u6d4b\u62a5\u544a\u4e2d\u7ed8\u5236\u81ea\u5b9a\u4e49\u6307\u6807\u529f\u80fd\uff08\u4ec5\u652f\u6301Scatter) 2.0.0-alpha.69 \u00b6 BaseStrategy\u589e\u52a0 available_shares \u65b9\u6cd5 2.0.0-alpha.68 \u00b6 \u589e\u52a0\u4e86MetricsGraph \u589e\u52a0Strategy\u57fa\u7c7b Candlestick\u589e\u52a0\u4e86\u5e03\u6797\u5e26\u6307\u6807 2.0.0-alpha.49 (2022-09-16) \u00b6 \u4fee\u8ba2\u4e86\u5b89\u88c5\u6587\u6863\u3002 \u79fb\u9664\u4e86windows\u4e0b\u5bf9ta-lib\u7684\u4f9d\u8d56\u3002\u8bf7\u53c2\u8003 \u5b89\u88c5\u6307\u5357 \u4ee5\u83b7\u53d6\u5728windows\u4e0b\u5b89\u88c5ta-lib\u7684\u65b9\u6cd5\u3002 \u66f4\u65b0\u4e86poetry.lock\u6587\u4ef6\u3002\u5728\u4e0a\u4e00\u7248\u4e2d\uff0c\u8be5\u6587\u4ef6\u4e0epyproject.toml\u4e0d\u540c\u6b65\uff0c\u5bfc\u81f4\u5b89\u88c5\u65f6\u8fdb\u884c\u7248\u672c\u9501\u5b9a\uff0c\u5ef6\u957f\u4e86\u5b89\u88c5\u65f6\u95f4\u3002 \u4fee\u590d\u4e86k\u7ebf\u56fe\u6807\u8bb0\u9876\u548c\u5e95\u65f6\uff0c\u6807\u8bb0\u79bb\u88ab\u6807\u6ce8\u7684\u70b9\u592a\u8fdc\u7684\u95ee\u9898\u3002 2.0.0-alpha.46 (2022-09-10) \u00b6 #40 \u589e\u52a0k\u7ebf\u56fe\u7ed8\u5236\u529f\u80fd\u3002 \u672c\u6b21\u4fee\u8ba2\u589e\u52a0\u4e86\u5bf9plotly, ckwrap, ta-lib\u7684\u4f9d\u8d56\u3002 \u5c06\u539f\u5c5e\u4e8eomicron.talib\u5305\u4e2d\u7684bars_since, find_runs\u7b49\u8ddf\u6570\u7ec4\u76f8\u5173\u7684\u64cd\u4f5c\uff0c\u79fb\u5165omicron.extensions.np\u4e2d\u3002 2.0.0-alpha.45 (2022-09-08) \u00b6 #39 fixed. removed dependency of postgres removed funds update arrow's version to be great than 1.2 lock aiohttp's version to >3.8, <4.0> 2.0.0-alpha.35 (2022-07-13) \u00b6 fix issue in security exit date comparison, Security.eval(). 2.0.0-alpha.34 (2022-07-13) \u00b6 change to sync call for Security.select() date parameter of Security.select(): if date >= today, it will use the data in cache, otherwise, query from database. 0.3.1 (2020-12-11) \u00b6 this version introduced no features, just a internal amendment release, we're migrating to poetry build system. 0.3.0 (2020-11-22) \u00b6 Calendar, Triggers and time frame calculation Security list Bars with turnover Valuation 0.1.0 (2020-04-28) \u00b6 First release on PyPI.","title":"\u7248\u672c\u5386\u53f2"},{"location":"history/#history","text":"","title":"History"},{"location":"history/#_1","text":"","title":""},{"location":"history/#200-alpha78","text":"backtest\u4e2d\u6355\u83b7\u5f02\u5e38\u65f6\uff0c\u5982\u679c\u662fTradeError\u7c7b\u578b\uff0c\u6253\u5370\u8be5\u5bf9\u8c61\u81ea\u5e26\u7684stack","title":"2.0.0-alpha78"},{"location":"history/#200-alpha77","text":"strategy\u589e\u52a0lifecycle \u4fdd\u7559\u6700\u540e\u4e00\u4e2a\u56de\u6d4b\u5468\u671f\u4ec5\u4f9b\u4ea4\u6613\u4f7f\u7528\uff0c\u4e0d\u8c03\u7528 predict Security\u83b7\u53d6\u80a1\u7968\u5217\u8868\u65f6\uff0c\u5982\u679c\u4e0d\u8c03\u7528 types \uff0c\u5c06\u83b7\u53d6\u80a1\u7968\u5217\u8868\uff0c\u8c03\u7528 types() \u4e0d\u4f20\u53c2\u6570\u5c06\u83b7\u53d6\u5e26\u6307\u6570\u3001\u80a1\u7968\u7684\u5217\u8868\u3002","title":"2.0.0-alpha77"},{"location":"history/#200-alpha76","text":"\u589e\u52a0backtestlog\u6a21\u5757\uff0c\u7528\u4e8e\u8f93\u51fa\u56de\u6d4b\u65e5\u5fd7\u65f6\uff0c\u5c06\u65f6\u95f4\u66ff\u6362\u4e3a\u56de\u6d4b\u65f6\u95f4 \u589e\u52a0\u884c\u60c5\u9884\u53d6\u529f\u80fd \u589e\u52a0\u56de\u6d4b\u62a5\u544a\u4e2d\u7ed8\u5236\u81ea\u5b9a\u4e49\u6307\u6807\u529f\u80fd\uff08\u4ec5\u652f\u6301Scatter)","title":"2.0.0-alpha76"},{"location":"history/#200-alpha69","text":"BaseStrategy\u589e\u52a0 available_shares \u65b9\u6cd5","title":"2.0.0-alpha.69"},{"location":"history/#200-alpha68","text":"\u589e\u52a0\u4e86MetricsGraph \u589e\u52a0Strategy\u57fa\u7c7b Candlestick\u589e\u52a0\u4e86\u5e03\u6797\u5e26\u6307\u6807","title":"2.0.0-alpha.68"},{"location":"history/#200-alpha49-2022-09-16","text":"\u4fee\u8ba2\u4e86\u5b89\u88c5\u6587\u6863\u3002 \u79fb\u9664\u4e86windows\u4e0b\u5bf9ta-lib\u7684\u4f9d\u8d56\u3002\u8bf7\u53c2\u8003 \u5b89\u88c5\u6307\u5357 \u4ee5\u83b7\u53d6\u5728windows\u4e0b\u5b89\u88c5ta-lib\u7684\u65b9\u6cd5\u3002 \u66f4\u65b0\u4e86poetry.lock\u6587\u4ef6\u3002\u5728\u4e0a\u4e00\u7248\u4e2d\uff0c\u8be5\u6587\u4ef6\u4e0epyproject.toml\u4e0d\u540c\u6b65\uff0c\u5bfc\u81f4\u5b89\u88c5\u65f6\u8fdb\u884c\u7248\u672c\u9501\u5b9a\uff0c\u5ef6\u957f\u4e86\u5b89\u88c5\u65f6\u95f4\u3002 \u4fee\u590d\u4e86k\u7ebf\u56fe\u6807\u8bb0\u9876\u548c\u5e95\u65f6\uff0c\u6807\u8bb0\u79bb\u88ab\u6807\u6ce8\u7684\u70b9\u592a\u8fdc\u7684\u95ee\u9898\u3002","title":"2.0.0-alpha.49 (2022-09-16)"},{"location":"history/#200-alpha46-2022-09-10","text":"#40 \u589e\u52a0k\u7ebf\u56fe\u7ed8\u5236\u529f\u80fd\u3002 \u672c\u6b21\u4fee\u8ba2\u589e\u52a0\u4e86\u5bf9plotly, ckwrap, ta-lib\u7684\u4f9d\u8d56\u3002 \u5c06\u539f\u5c5e\u4e8eomicron.talib\u5305\u4e2d\u7684bars_since, find_runs\u7b49\u8ddf\u6570\u7ec4\u76f8\u5173\u7684\u64cd\u4f5c\uff0c\u79fb\u5165omicron.extensions.np\u4e2d\u3002","title":"2.0.0-alpha.46 (2022-09-10)"},{"location":"history/#200-alpha45-2022-09-08","text":"#39 fixed. removed dependency of postgres removed funds update arrow's version to be great than 1.2 lock aiohttp's version to >3.8, <4.0>","title":"2.0.0-alpha.45 (2022-09-08)"},{"location":"history/#200-alpha35-2022-07-13","text":"fix issue in security exit date comparison, Security.eval().","title":"2.0.0-alpha.35 (2022-07-13)"},{"location":"history/#200-alpha34-2022-07-13","text":"change to sync call for Security.select() date parameter of Security.select(): if date >= today, it will use the data in cache, otherwise, query from database.","title":"2.0.0-alpha.34 (2022-07-13)"},{"location":"history/#031-2020-12-11","text":"this version introduced no features, just a internal amendment release, we're migrating to poetry build system.","title":"0.3.1 (2020-12-11)"},{"location":"history/#030-2020-11-22","text":"Calendar, Triggers and time frame calculation Security list Bars with turnover Valuation","title":"0.3.0 (2020-11-22)"},{"location":"history/#010-2020-04-28","text":"First release on PyPI.","title":"0.1.0 (2020-04-28)"},{"location":"installation/","text":"1. \u5b89\u88c5 \u00b6 \u8981\u4f7f\u7528Omicron\u6765\u83b7\u53d6\u884c\u60c5\u6570\u636e\uff0c\u8bf7\u5148\u5b89\u88c5 Omega \uff0c\u5e76\u6309\u8bf4\u660e\u6587\u6863\u8981\u6c42\u5b8c\u6210\u521d\u59cb\u5316\u914d\u7f6e\u3002 \u7136\u540e\u5728\u5f00\u53d1\u673a\u4e0a\uff0c\u8fd0\u884c\u4e0b\u9762\u7684\u547d\u4ee4\u5b89\u88c5Omicron: 1 pip install zillionare-omicron omicron\u4f9d\u8d56numpy, pandas, scipy, sklearn\u3002\u8fd9\u4e9b\u5e93\u7684\u4f53\u79ef\u6bd4\u8f83\u5927\uff0c\u56e0\u6b64\u5728\u5b89\u88c5omicron\u65f6\uff0c\u8bf7\u4fdd\u6301\u7f51\u7edc\u8fde\u63a5\u7545\u901a\uff0c\u5fc5\u8981\u65f6\uff0c\u8bf7\u6dfb\u52a0\u963f\u91cc\u6216\u8005\u6e05\u534e\u7684PyPI\u955c\u50cf\u3002 omicron\u8fd8\u4f9d\u8d56\u4e8etalib, zigzag, ciso8601\u7b49\u9ad8\u6027\u80fd\u7684C/C++\u5e93\u3002\u5b89\u88c5\u8fd9\u4e9b\u5e93\u5f80\u5f80\u9700\u8981\u5728\u60a8\u672c\u673a\u6267\u884c\u4e00\u4e2a\u7f16\u8bd1\u8fc7\u7a0b\u3002\u8bf7\u9075\u5faa\u4ee5\u4e0b\u6b65\u9aa4\u5b8c\u6210\uff1a \u5b89\u88c5\u539f\u751f\u5e93 Windows Linux MacOS \u6ce8\u610f\u6211\u4eec\u4e0d\u652f\u630132\u4f4dwindows \u8bf7\u8ddf\u968f windows\u4e0b\u5b89\u88c5omicron \u6765\u5b8c\u6210\u5b89\u88c5\u3002 \u8bf7\u6267\u884c\u4e0b\u9762\u7684\u811a\u672c\u4ee5\u5b8c\u6210ta-lib\u7684\u5b89\u88c5 1 2 3 4 5 6 7 sudo apt update && sudo apt upgrade -y && sudo apt autoremove -y sudo apt-get install build-essential -y curl -L http://prdownloads.sourceforge.net/ta-lib/ta-lib-0.4.0-src.tar.gz | tar -xzv -C /tmp/ cd /tmp/ta-lib ./configure --prefix = /usr make sudo make install \u73b0\u5728\u5b89\u88c5omicron\uff0c\u6240\u6709\u5176\u5b83\u4f9d\u8d56\u7684\u5b89\u88c5\u5c06\u81ea\u52a8\u5b8c\u6210\u3002 \u8bf7\u901a\u8fc7 brew install ta-lib \u6765\u5b8c\u6210ta-lib\u7684\u5b89\u88c5 \u73b0\u5728\u5b89\u88c5omicron\uff0c\u6240\u6709\u5176\u5b83\u4f9d\u8d56\u7684\u5b89\u88c5\u90fd\u5c06\u81ea\u52a8\u5b8c\u6210\u3002 2. \u5e38\u89c1\u95ee\u9898 \u00b6 \u65e0\u6cd5\u8bbf\u95eeaka.ms \u00b6 \u5982\u679c\u9047\u5230aka.ms\u65e0\u6cd5\u8bbf\u95ee\u7684\u95ee\u9898\uff0c\u6709\u53ef\u80fd\u662fIP\u5730\u5740\u89e3\u6790\u7684\u95ee\u9898\u3002\u8bf7\u4ee5\u7ba1\u7406\u5458\u6743\u9650\uff0c\u6253\u5f00\u5e76\u7f16\u8f91\u4f4d\u4e8ec:\\windows\\system32\\drivers\\etc\\\u4e0b\u7684hosts\u6587\u4ef6\uff0c\u5c06\u6b64\u884c\u52a0\u5165\u5230\u6587\u4ef6\u4e2d\uff1a 1 23.41.86.106 aka.ms","title":"\u5b89\u88c5"},{"location":"installation/#1-\u5b89\u88c5","text":"\u8981\u4f7f\u7528Omicron\u6765\u83b7\u53d6\u884c\u60c5\u6570\u636e\uff0c\u8bf7\u5148\u5b89\u88c5 Omega \uff0c\u5e76\u6309\u8bf4\u660e\u6587\u6863\u8981\u6c42\u5b8c\u6210\u521d\u59cb\u5316\u914d\u7f6e\u3002 \u7136\u540e\u5728\u5f00\u53d1\u673a\u4e0a\uff0c\u8fd0\u884c\u4e0b\u9762\u7684\u547d\u4ee4\u5b89\u88c5Omicron: 1 pip install zillionare-omicron omicron\u4f9d\u8d56numpy, pandas, scipy, sklearn\u3002\u8fd9\u4e9b\u5e93\u7684\u4f53\u79ef\u6bd4\u8f83\u5927\uff0c\u56e0\u6b64\u5728\u5b89\u88c5omicron\u65f6\uff0c\u8bf7\u4fdd\u6301\u7f51\u7edc\u8fde\u63a5\u7545\u901a\uff0c\u5fc5\u8981\u65f6\uff0c\u8bf7\u6dfb\u52a0\u963f\u91cc\u6216\u8005\u6e05\u534e\u7684PyPI\u955c\u50cf\u3002 omicron\u8fd8\u4f9d\u8d56\u4e8etalib, zigzag, ciso8601\u7b49\u9ad8\u6027\u80fd\u7684C/C++\u5e93\u3002\u5b89\u88c5\u8fd9\u4e9b\u5e93\u5f80\u5f80\u9700\u8981\u5728\u60a8\u672c\u673a\u6267\u884c\u4e00\u4e2a\u7f16\u8bd1\u8fc7\u7a0b\u3002\u8bf7\u9075\u5faa\u4ee5\u4e0b\u6b65\u9aa4\u5b8c\u6210\uff1a \u5b89\u88c5\u539f\u751f\u5e93 Windows Linux MacOS \u6ce8\u610f\u6211\u4eec\u4e0d\u652f\u630132\u4f4dwindows \u8bf7\u8ddf\u968f windows\u4e0b\u5b89\u88c5omicron \u6765\u5b8c\u6210\u5b89\u88c5\u3002 \u8bf7\u6267\u884c\u4e0b\u9762\u7684\u811a\u672c\u4ee5\u5b8c\u6210ta-lib\u7684\u5b89\u88c5 1 2 3 4 5 6 7 sudo apt update && sudo apt upgrade -y && sudo apt autoremove -y sudo apt-get install build-essential -y curl -L http://prdownloads.sourceforge.net/ta-lib/ta-lib-0.4.0-src.tar.gz | tar -xzv -C /tmp/ cd /tmp/ta-lib ./configure --prefix = /usr make sudo make install \u73b0\u5728\u5b89\u88c5omicron\uff0c\u6240\u6709\u5176\u5b83\u4f9d\u8d56\u7684\u5b89\u88c5\u5c06\u81ea\u52a8\u5b8c\u6210\u3002 \u8bf7\u901a\u8fc7 brew install ta-lib \u6765\u5b8c\u6210ta-lib\u7684\u5b89\u88c5 \u73b0\u5728\u5b89\u88c5omicron\uff0c\u6240\u6709\u5176\u5b83\u4f9d\u8d56\u7684\u5b89\u88c5\u90fd\u5c06\u81ea\u52a8\u5b8c\u6210\u3002","title":"1. \u5b89\u88c5"},{"location":"installation/#2-\u5e38\u89c1\u95ee\u9898","text":"","title":"2. \u5e38\u89c1\u95ee\u9898"},{"location":"installation/#\u65e0\u6cd5\u8bbf\u95eeakams","text":"\u5982\u679c\u9047\u5230aka.ms\u65e0\u6cd5\u8bbf\u95ee\u7684\u95ee\u9898\uff0c\u6709\u53ef\u80fd\u662fIP\u5730\u5740\u89e3\u6790\u7684\u95ee\u9898\u3002\u8bf7\u4ee5\u7ba1\u7406\u5458\u6743\u9650\uff0c\u6253\u5f00\u5e76\u7f16\u8f91\u4f4d\u4e8ec:\\windows\\system32\\drivers\\etc\\\u4e0b\u7684hosts\u6587\u4ef6\uff0c\u5c06\u6b64\u884c\u52a0\u5165\u5230\u6587\u4ef6\u4e2d\uff1a 1 23.41.86.106 aka.ms","title":"\u65e0\u6cd5\u8bbf\u95eeaka.ms"},{"location":"usage/","text":"1. \u914d\u7f6e\u3001\u521d\u59cb\u5316\u548c\u5173\u95ed OMICRON \u00b6 Omicron \u4f9d\u8d56\u4e8e zillionare-omega \u670d\u52a1\u6765\u83b7\u53d6\u6570\u636e\u3002\u4f46\u5b83\u5e76\u4e0d\u76f4\u63a5\u4e0e Omega \u670d\u52a1\u901a\u8baf\uff0c\u76f8\u53cd\uff0c\u5b83\u76f4\u63a5\u8bfb\u53d6 Omega \u670d\u52a1\u5668\u4f1a\u5199\u5165\u6570\u636e\u7684 Influxdb \u548c redis \u6570\u636e\u5e93\u3002\u56e0\u6b64\uff0c\u5728\u4f7f\u7528 Omicron \u4e4b\u524d\uff0c\u6211\u4eec\u9700\u8981\u63d0\u4f9b\u8fd9\u4e24\u4e2a\u670d\u52a1\u5668\u7684\u8fde\u63a5\u5730\u5740\uff0c\u5e76\u8fdb\u884c\u521d\u59cb\u5316\u3002 1.1. \u914d\u7f6e\u548c\u521d\u59cb\u5316 \u00b6 Omicron \u4f7f\u7528 cfg4py \u6765\u7ba1\u7406\u914d\u7f6e\u3002 cfg4py \u4f7f\u7528 yaml \u6587\u4ef6\u6765\u4fdd\u5b58\u914d\u7f6e\u9879\u3002\u5728\u4f7f\u7528 cfg4py \u4e4b\u524d\uff0c\u60a8\u9700\u8981\u5728\u67d0\u5904\u521d\u59cb\u5316 cfg4py\uff0c\u7136\u540e\u518d\u521d\u59cb\u5316 omicron: Tip \u4e3a\u4e86\u7b80\u6d01\u8d77\u89c1\uff0c\u6211\u4eec\u5728\u9876\u5c42\u4ee3\u7801\u4e2d\u76f4\u63a5\u4f7f\u7528\u4e86 async/await\u3002\u901a\u5e38\uff0c\u8fd9\u4e9b\u4ee3\u7801\u80fd\u591f\u76f4\u63a5\u5728 notebook \u4e2d\u8fd0\u884c\uff0c\u4f46\u5982\u679c\u9700\u8981\u5728\u666e\u901a\u7684 python \u811a\u672c\u4e2d\u8fd0\u884c\u8fd9\u4e9b\u4ee3\u7801\uff0c\u60a8\u901a\u5e38\u9700\u8981\u5c06\u5176\u5c01\u88c5\u5230\u4e00\u4e2a\u5f02\u6b65\u51fd\u6570\u4e2d\uff0c\u518d\u901a\u8fc7 asyncio.run \u6765\u8fd0\u884c\u5b83\u3002 1 2 3 4 5 6 7 8 9 import asyncio import cfg4py import omicron async def main (): cfg4py . init ( 'path/to/your/config/dir' ) await omicron . init () # DO YOUR GREAT JOB WITH OMICRON asyncio . run ( main ()) 1 2 3 4 5 import cfg4py import omicron cfg4py . init ( 'path/to/your/config/dir' ) await omicron . init () \u6ce8\u610f\u521d\u59cb\u5316 cfg4py \u65f6\uff0c\u9700\u8981\u63d0\u4f9b\u5305\u542b\u914d\u7f6e\u6587\u4ef6\u7684 \u6587\u4ef6\u5939 \u7684\u8def\u5f84\uff0c\u800c \u4e0d\u662f\u914d\u7f6e\u6587\u4ef6 \u7684\u8def\u5f84\u3002\u914d\u7f6e\u6587\u4ef6\u540d\u5fc5\u987b\u4e3a defaults.yml\u3002 \u60a8\u81f3\u5c11\u5e94\u8be5\u4e3a omicron \u914d\u7f6e Redis \u8fde\u63a5\u4e32\u548c influxdb \u8fde\u63a5\u4e32\u3002\u4e0b\u9762\u662f\u5e38\u7528\u914d\u7f6e\u793a\u4f8b\uff1a 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 # DEFAULTS.YAML redis : dsn : redis://${REDIS_HOST}:${REDIS_PORT} influxdb : url : http://${INFLUXDB_HOST}:${INFLUXDB_PORT} token : ${INFLUXDB_TOKEN} org : ${INFLUXDB_ORG} bucket_name : ${INFLUXDB_BUCKET_NAME} enable_compress : true max_query_size : 150000 notify : mail_from : ${MAIL_FROM} mail_to : - ${MAIL_TO} mail_server : ${MAIL_SERVER} dingtalk_access_token : ${DINGTALK_ACCESS_TOKEN} dingtalk_secret : ${DINGTALK_SECRET} \u8bf7\u6839\u636e\u60a8\u5b9e\u9645\u73af\u5883\u914d\u7f6e\u6765\u66f4\u6539\u4e0a\u8ff0\u6587\u4ef6\u3002\u4e0a\u8ff0\u914d\u7f6e\u4e2d\uff0c${{REDIS_HOST}}\u610f\u5473\u7740\u73af\u5883\u53d8\u91cf\u3002\u5982\u679c\u662f windows\uff0c\u60a8\u9700\u8981\u5728\u7cfb\u7edf > \u73af\u5883\u53d8\u91cf\u4e2d\u8fdb\u884c\u8bbe\u7f6e\u3002\u5982\u679c\u662f Linux \u6216\u8005 Mac\uff0c\u60a8\u9700\u8981\u4fee\u6539.bashrc\uff0c\u4f8b\u5982\uff1a 1 export REDIS_HOST=localhost 1.2. \u5173\u95ed omicron \u00b6 \u5728\u60a8\u7684\u8fdb\u7a0b\u5373\u5c06\u9000\u51fa\u4e4b\u524d\uff0c\u8bf7\u8bb0\u5f97\u5173\u95ed omicron\u3002\u5982\u679c\u60a8\u662f\u5728 notebook \u4e2d\u4f7f\u7528 omicron, \u5219\u53ef\u4ee5\u5ffd\u7565\u6b64\u6b65\u805a\u3002 1 await omicron . close () 2. \u6570\u636e\u8bfb\u53d6 \u00b6 2.1. \u8bc1\u5238\u5217\u8868 \u00b6 Security \u548c Query \u63d0\u4f9b\u4e86\u8bc1\u5238\u5217\u8868\u548c\u67e5\u8be2\u64cd\u4f5c\u3002\u67e5\u8be2\u88ab\u8bbe\u8ba1\u6210\u4e3a\u94fe\u5f0f API\u3002\u901a\u5e38\uff0c\u6211\u4eec\u901a\u8fc7\u8c03\u7528 Security.select() \u6765\u751f\u6210\u4e00\u4e2a Query \u5bf9\u8c61\uff0c\u7136\u540e\u53ef\u4ee5\u9488\u5bf9\u6b64\u5bf9\u8c61\uff0c\u8fdb\u884c\u5404\u79cd\u8fc7\u67e5\u8be2\u8fc7\u6ee4\uff0c\u6700\u540e\uff0c\u6211\u4eec\u8c03\u7528 query.eval() \u65b9\u6cd5\u7ed3\u675f\u94fe\u5f0f\u8c03\u7528\uff0c\u5e76\u8fd4\u56de\u7ed3\u679c\u3002 2.1.1. \u67e5\u8be2\u6240\u6709\u8bc1\u5238\u4ee3\u7801 \u00b6 \u60a8\u53ef\u4ee5\u901a\u8fc7\u4ee5\u4e0b\u65b9\u6cd5\u6765\u83b7\u53d6\u67d0\u4e00\u5929\u7684\u8bc1\u5238\u5217\u8868\uff1a 1 2 3 4 5 6 7 # 4. ASSUME YOU HAVE OMICRON INIT dt = datetime . date ( 2022 , 5 , 20 ) query = Security . select ( dt ) codes = await query . eval () print ( codes ) # THE OUTPUTS IS LIKE [\"000001.XSHE\", \"000004.XSHE\", ...] \u8fd9\u91cc\u7684 dt \u5982\u679c\u6ca1\u6709\u63d0\u4f9b\u7684\u8bdd\uff0c\u5c06\u4f7f\u7528\u6700\u65b0\u7684\u8bc1\u5238\u5217\u8868\u3002\u4f46\u5728\u56de\u6d4b\u4e2d\uff0c\u60a8\u901a\u5e38\u4e0d\u540c\u65f6\u95f4\u7684\u8bc1\u5238\u5217\u8868\uff0c\u56e0\u6b64\uff0c dt \u5728\u8fd9\u79cd\u60c5\u51b5\u4e0b\u662f\u5fc5\u987b\u7684\uff0c\u5426\u5219\uff0c\u60a8\u5c06\u5f15\u5165\u672a\u6765\u6570\u636e\u3002 2.1.2. \u8fd4\u56de\u6240\u6709\u80a1\u7968\u6216\u8005\u6307\u6570 \u00b6 1 2 3 query = Security . select ( dt ) codes = await query . types ([ \"stock\" ]) . eval () print ( codes ) 2.1.3. \u6392\u9664\u67d0\u79cd\u80a1\u7968\uff08\u8bc1\u5238\uff09 \u00b6 1 2 3 query = Security . select ( dt ) codes = await query . exclude_st () . exclude_kcb () . exclude_cyb () . eval () print ( codes ) 2.1.4. \u5982\u679c\u53ea\u8981\u6c42\u67d0\u79cd\u80a1\u7968\uff08\u8bc1\u5238\uff09 \u00b6 1 2 3 4 query = Security . select ( dt ) codes = await query . only_kcb () . only_st () . only_cyb () . eval () print ( codes ) #\u5f97\u5230\u7a7a\u5217\u8868 2.1.5. \u6309\u522b\u540d\u8fdb\u884c\u6a21\u7cca\u67e5\u8be2 \u00b6 A \u80a1\u7684\u8bc1\u5238\u5728\u6807\u8bc6\u4e0a\uff0c\u4e00\u822c\u6709\u4ee3\u7801\uff08code \u6216\u8005 symbol)\u3001\u62fc\u97f3\u7b80\u5199 (name) \u548c\u6c49\u5b57\u8868\u793a\u540d (display_name) \u4e09\u79cd\u6807\u8bc6\u3002\u6bd4\u5982\u4e2d\u56fd\u5e73\u5b89\uff0c\u5176\u4ee3\u7801\u4e3a 601318.XSHG; \u5176\u62fc\u97f3\u7b80\u5199\u4e3a ZGPA\uff1b\u800c\u4e2d\u56fd\u5e73\u5b89\u88ab\u79f0\u4e3a\u5b83\u7684\u522b\u540d ( alias )\u3002 \u5982\u679c\u8981\u67e5\u8be2\u6240\u6709\u4e2d\u5b57\u5934\u7684\u80a1\u7968\uff1a 1 2 3 query = Security . select ( dt ) codes = await query . alias_like ( \"\u4e2d\" ) . eval () print ( codes ) 2.1.6. \u901a\u8fc7\u4ee3\u7801\u67e5\u8be2\u5176\u5b83\u4fe1\u606f \u00b6 \u901a\u8fc7\u524d\u9762\u7684\u67e5\u8be2\u6211\u4eec\u53ef\u4ee5\u5f97\u5230\u4e00\u4e2a\u8bc1\u5238\u5217\u8868\uff0c\u5982\u679c\u8981\u5f97\u5230\u5177\u4f53\u7684\u4fe1\u606f\uff0c\u53ef\u4ee5\u901a\u8fc7 info \u63a5\u53e3\u6765\u67e5\u8be2\uff1a 1 2 3 dt = datetime . date ( 2022 , 5 , 20 ) info = await Security . info ( \"688001.XSHG\" , dt ) print ( info ) \u8f93\u51fa\u4e3a\uff1a 1 2 3 4 5 6 7 8 { ' t ype' : 's t ock' , 'display_ na me' : '\u534e\u5174\u6e90\u521b' , 'alias' : '\u534e\u5174\u6e90\u521b' , 'e n d' : da tet ime.da te ( 2200 , 1 , 1 ) , 's tart ' : da tet ime.da te ( 2019 , 7 , 22 ) , ' na me' : 'HXYC' } 2.2. \u4ea4\u6613\u65e5\u5386\u53ca\u65f6\u95f4\u5e27\u8ba1\u7b97 \u00b6 Omicron \u4e0d\u4ec5\u63d0\u4f9b\u4e86\u4ea4\u6613\u65e5\u5386\uff0c\u4e0e\u5176\u5b83\u91cf\u5316\u6846\u67b6\u76f8\u6bd4\uff0c\u6211\u4eec\u8fd8\u63d0\u4f9b\u4e86\u4e30\u5bcc\u7684\u4e0e\u65f6\u95f4\u76f8\u5173\u7684\u8fd0\u7b97\u64cd\u4f5c\u3002\u8fd9\u4e9b\u64cd\u4f5c\u90fd\u6709\u8be6\u7ec6\u7684\u6587\u6863\u548c\u793a\u4f8b\uff0c\u60a8\u53ef\u4ee5\u901a\u8fc7 TimeFrame \u6765\u8fdb\u4e00\u6b65\u9605\u8bfb\u3002 omicron \u4e2d\uff0c\u5e38\u5e38\u4f1a\u9047\u5230\u65f6\u95f4\u5e27 (Time Frame) \u8fd9\u4e2a\u6982\u5ff5\u3002\u56e0\u4e3a\u884c\u60c5\u6570\u636e\u90fd\u662f\u6309\u4e00\u5b9a\u7684\u65f6\u95f4\u957f\u5ea6\u7ec4\u7ec7\u7684\uff0c\u6bd4\u5982 5 \u5206\u949f\uff0c1 \u5929\uff0c\u7b49\u7b49\u3002\u56e0\u6b64\uff0c\u5728 omicron \u4e2d\uff0c\u6211\u4eec\u7ecf\u5e38\u4f7f\u7528\u67d0\u4e2a\u65f6\u95f4\u7247\u7ed3\u675f\u7684\u65f6\u95f4\uff0c\u6765\u6807\u8bc6\u8fd9\u4e2a\u65f6\u95f4\u7247\uff0c\u5e76\u5c06\u5176\u79f0\u4e4b\u4e3a\u5e27 (Time Frame)\u3002 omicron \u4e2d\uff0c\u6211\u4eec\u652f\u6301\u7684\u65f6\u95f4\u5e27\u5305\u62ec\u65e5\u5185\u7684\u5206\u949f\u5e27 (FrameType.MIN1), 5 \u5206\u949f\u5e27 (FrameType.MIN5), 15 \u5206\u949f\u5e27\u300130 \u5206\u949f\u5e27\u548c 60 \u5206\u949f\u5e27\uff0c\u4ee5\u53ca\u65e5\u7ebf\u7ea7\u522b\u7684 FrameType.DAY, FrameType.WEEK \u7b49\u3002\u5173\u4e8e\u8be6\u7ec6\u7684\u7c7b\u578b\u8bf4\u660e\uff0c\u8bf7\u53c2\u89c1 coretypes omicron \u63d0\u4f9b\u7684\u4ea4\u6613\u65e5\u5386\u8d77\u59cb\u4e8e 2005 \u5e74 1 \u6708 4 \u65e5\u3002\u63d0\u4f9b\u7684\u884c\u60c5\u6570\u636e\uff0c\u6700\u65e9\u4ece\u8fd9\u4e00\u5929\u8d77\u3002 \u5927\u81f4\u4e0a\uff0comicron \u63d0\u4f9b\u4e86\u4ee5\u4e0b\u65f6\u95f4\u5e27\u64cd\u4f5c\uff1a 2.2.1. \u4ea4\u6613\u65f6\u95f4\u7684\u504f\u79fb \u00b6 \u5982\u679c\u4eca\u5929\u662f 2022 \u5e74 5 \u6708 20 \u65e5\uff0c\u60a8\u60f3\u5f97\u5230 100 \u5929\u524d\u7684\u4ea4\u6613\u65e5\uff0c\u5219\u53ef\u4ee5\u4f7f\u7528 day_shift: 1 2 3 4 from omicron import tf dt = datetime . date ( 2022 , 5 , 20 ) tf . day_shift ( dt , - 100 ) \u8f93\u51fa\u662f datetime.date(2021, 12, 16)\u3002\u5728\u8fd9\u91cc\uff0cday_shift \u7684\u7b2c\u4e8c\u4e2a\u53c2\u6570 n \u662f\u504f\u79fb\u91cf\uff0c\u5f53\u5b83\u5c0f\u4e8e\u96f6\u65f6\uff0c\u662f\u627e dt \u524d n \u4e2a\u4ea4\u6613\u65e5\uff1b\u5f53\u5b83\u5927\u4e8e\u96f6\u65f6\uff0c\u662f\u627e dt \u4e4b\u540e\u7684 n \u4e2a\u4ea4\u6613\u65e5\u3002 \u6bd4\u5982\u6709\u610f\u601d\u7684\u662f n == 0 \u7684\u65f6\u5019\u3002\u5bf9\u4e0a\u8ff0 dt \uff0cday_shift(dt, 0) \u5f97\u5230\u7684\u4ecd\u7136\u662f\u540c\u4e00\u5929\uff0c\u4f46\u5982\u679c dt \u662f 2022 \u5e74 5 \u6708 21 \u65e5\u662f\u5468\u516d\uff0c\u5219 day_shift(datetime.date(2022, 5, 21)) \u5c06\u8fd4\u56de 2022 \u5e74 5 \u6708 20 \u65e5\u3002\u56e0\u4e3a 5 \u6708 21 \u65e5\u8fd9\u4e00\u5929\u662f\u5468\u516d\uff0c\u4e0d\u662f\u4ea4\u6613\u65e5\uff0cday_shift \u5c06\u8fd4\u56de\u5176\u5bf9\u5e94\u7684\u4ea4\u6613\u65e5\uff0c\u8fd9\u5728\u591a\u6570\u60c5\u51b5\u4e0b\u4f1a\u975e\u5e38\u65b9\u4fbf\u3002 \u9664\u4e86 day_shift \u5916\uff0ctimeframe \u8fd8\u63d0\u4f9b\u4e86\u7c7b\u4f3c\u51fd\u6570\u6bd4\u5982 week_shift \u7b49\u3002\u4e00\u822c\u5730\uff0c\u60a8\u53ef\u4ee5\u7528 shift(dt, n, frame_type) \u6765\u5bf9\u4efb\u610f\u652f\u6301\u7684\u65f6\u95f4\u8fdb\u884c\u504f\u79fb\u3002 2.2.2. \u8fb9\u754c\u64cd\u4f5c ceiling \u548c floor \u00b6 \u5f88\u591a\u65f6\u5019\u6211\u4eec\u9700\u8981\u77e5\u9053\u5177\u4f53\u7684\u67d0\u4e2a\u65f6\u95f4\u70b9 (moment) \u6240\u5c5e\u7684\u5e27\u3002\u5982\u679c\u8981\u53d6\u5176\u4e0a\u4e00\u5e27\uff0c\u5219\u53ef\u4ee5\u7528 floor \u64cd\u4f5c\uff0c\u53cd\u4e4b\uff0c\u4f7f\u7528 ceiling\u3002 1 2 tf . ceiling ( datetime . date ( 2005 , 1 , 4 ), FrameType . WEEK ) # OUTPUT IS DATETIME.DATE(2005, 1, 7) 2.2.3. \u65f6\u95f4\u8f6c\u6362 \u00b6 \u4e3a\u4e86\u52a0\u5feb\u901f\u5ea6\uff0c\u4ee5\u53ca\u65b9\u4fbf\u6301\u4e45\u5316\u5b58\u50a8\uff0c\u5728 timeframe \u5185\u90e8\uff0c\u6709\u65f6\u5019\u4f7f\u7528\u6574\u6570\u6765\u8868\u793a\u65f6\u95f4\u3002\u6bd4\u5982 20220502 \u8868\u793a\u7684\u662f 2022 \u5e74 5 \u6708 20 \u65e5\uff0c\u800c 202205220931 \u5219\u8868\u793a 2022 \u5e74 5 \u6708 20 \u65e5 9 \u65f6 31 \u5206\u949f\u3002 \u8fd9\u79cd\u8868\u793a\u6cd5\uff0c\u6709\u65f6\u5019\u8981\u6c42\u6211\u4eec\u8fdb\u884c\u4e00\u4e9b\u8f6c\u6362\uff1a 1 2 3 4 5 6 7 8 9 10 # \u5c06\u6574\u6570\u8868\u793a\u7684\u65e5\u671f\u8f6c\u6362\u4e3a\u65e5\u671f tf . int2date ( 20220522 ) # datetime.date(2022, 5, 22) # \u5c06\u6574\u6570\u8868\u793a\u7684\u65f6\u95f4\u8f6c\u6362\u4e3a\u65f6\u95f4 tf . int2time ( 202205220931 ) # datetime.datetime(2022, 5, 22, 9, 31) # \u5c06\u65e5\u671f\u8f6c\u6362\u6210\u4e3a\u6574\u6570 tf . date2int ( datetime . date ( 2022 , 5 , 22 )) # 20220520 # \u5c06\u65f6\u95f4\u8f6c\u6362\u6210\u4e3a\u65f6\u95f4 tf . date2time ( datetime . datetime ( 2022 , 5 , 22 , 9 , 21 )) # 202205220921 2.2.4. \u5217\u51fa\u533a\u95f4\u5185\u7684\u6240\u6709\u65f6\u95f4\u5e27 \u00b6 \u6709\u65f6\u5019\u6211\u4eec\u9700\u8981\u5f97\u5230 start \u548c end \u4e4b\u95f4\u67d0\u4e2a\u65f6\u95f4\u5e27\u7c7b\u578b\u7684\u6240\u6709\u65f6\u95f4\u5e27\uff1a 1 2 3 4 start = arrow . get ( '2020-1-13 10:00' ) . naive end = arrow . get ( '2020-1-13 13:30' ) . naive tf . get_frames ( start , end , FrameType . MIN30 ) [ 202001131000 , 202001131030 , 202001131100 , 202001131130 , 202001131330 ] Important \u4e0a\u9762\u7684\u793a\u4f8b\u4e2d\uff0c\u51fa\u73b0\u4e86\u53ef\u80fd\u60a8\u4e0d\u592a\u719f\u6089\u7684 naive \u5c5e\u6027\u3002\u5b83\u6307\u7684\u662f\u53d6\u4e0d\u5e26\u65f6\u533a\u7684\u65f6\u95f4\u3002\u5728 python \u4e2d\uff0c\u65f6\u95f4\u53ef\u4ee5\u5e26\u65f6\u533a\uff08timezone-aware) \u548c\u4e0d\u5e26\u65f6\u533a (naive)\u3002 \u5982\u679c\u60a8\u4f7f\u7528 datetime.datetime(2022, 5, 20)\uff0c\u5b83\u5c31\u662f\u4e0d\u5e26\u65f6\u533a\u7684\uff0c\u9664\u975e\u60a8\u4e13\u95e8\u6307\u5b9a\u65f6\u533a\u3002 \u5728 omicron \u4e2d\uff0c\u6211\u4eec\u5728\u7edd\u5927\u591a\u6570\u60c5\u51b5\u4e0b\uff0c\u4ec5\u4f7f\u7528 naive \u8868\u793a\u7684\u65f6\u95f4\uff0c\u5373\u4e0d\u5e26\u65f6\u533a\uff0c\u5e76\u4e14\u5047\u5b9a\u65f6\u533a\u4e3a\u4e1c\u516b\u533a\uff08\u5373\u5317\u4eac\u65f6\u95f4\uff09\u3002 \u5982\u679c\u60a8\u53ea\u77e5\u9053\u7ed3\u675f\u65f6\u95f4\uff0c\u9700\u8981\u5411\u524d\u53d6 n \u4e2a\u65f6\u95f4\u5e27\uff0c\u5219\u53ef\u4ee5\u4f7f\u7528 get_frames_by_count \u3002 \u5982\u679c\u60a8\u53ea\u662f\u9700\u8981\u77e5\u9053\u5728 start \u548c end \u4e4b\u95f4\uff0c\u603b\u5171\u6709\u591a\u5c11\u4e2a\u5e27\uff0c\u8bf7\u4f7f\u7528 count_frames : 1 2 3 start = datetime . date ( 2019 , 12 , 21 ) end = datetime . date ( 2019 , 12 , 21 ) tf . count_frames ( start , end , FrameType . DAY ) \u8f93\u51fa\u5c06\u662f 1\u3002\u4e0a\u8ff0\u65b9\u6cd5\u8fd8\u6709\u4e00\u4e2a\u5feb\u6377\u65b9\u6cd5\uff0c\u5373 count_day_frames \uff0c\u5e76\u4e14\uff0c\u5bf9 week, month, quaters \u4e5f\u662f\u4e00\u6837\u3002 2.3. \u8bfb\u53d6\u884c\u60c5\u6570\u636e \u00b6 \u73b0\u5728\uff0c\u8ba9\u6211\u4eec\u6765\u83b7\u53d6\u4e00\u6bb5\u884c\u60c5\u6570\u636e\uff1a 1 2 3 4 code = \"000001.XSHE\" end = datetime . date ( 2022 , 5 , 20 ) bars = await Stock . get_bars ( code , 10 , FrameType . DAY , end ) \u8fd4\u56de\u7684 bars \u5c06\u662f\u4e00\u4e2a numpy structured array, \u5176\u7c7b\u578b\u4e3a bars_dtype \u3002\u4e00\u822c\u5730\uff0c\u5b83\u5305\u62ec\u4e86\u4ee5\u4e0b\u5b57\u6bb5\uff1a 1 2 3 4 5 6 7 8 * frame\uff08\u5e27\uff09 * open\uff08\u5f00\u76d8\u4ef7\uff09 * high\uff08\u6700\u9ad8\u4ef7\uff09 * low\uff08\u6700\u4f4e\u4ef7\uff09 * close\uff08\u6536\u76d8\u4ef7\uff09 * volume\uff08\u6210\u4ea4\u91cf\uff0c\u80a1\u6570\uff09 * amount\uff08\u6210\u4ea4\u989d\uff09 * factor\uff08\u590d\u6743\u56e0\u5b50\uff09 \u7f3a\u7701\u60c5\u51b5\u4e0b\uff0c\u8fd4\u56de\u7684\u6570\u636e\u662f\u5230 end \u4e3a\u6b62\u7684\u524d\u590d\u6743\u6570\u636e\u3002\u4f60\u53ef\u4ee5\u901a\u53c2\u6570 fq = False \u5173\u95ed\u5b83\uff0c\u6765\u83b7\u5f97\u4e0d\u590d\u6743\u6570\u636e\uff0c\u5e76\u4ee5\u6b64\u81ea\u884c\u8ba1\u7b97\u540e\u590d\u6743\u6570\u636e\u3002 \u5982\u679c\u8981\u83b7\u53d6\u67d0\u4e2a\u65f6\u95f4\u6bb5\u7684\u6570\u636e\uff0c\u53ef\u4ee5\u4f7f\u7528 get_bars_in_range \u3002 \u4e0a\u8ff0\u65b9\u6cd5\u603b\u662f\u5c3d\u6700\u5927\u53ef\u80fd\u8fd4\u56de\u5b9e\u65f6\u6570\u636e\uff0c\u5982\u679c end \u4e3a\u5f53\u524d\u65f6\u95f4\u7684\u8bdd\uff0c\u4f46\u7531\u4e8e omega \u540c\u6b65\u5ef6\u65f6\u662f\u4e00\u5206\u949f\uff0c\u6240\u4ee5\u884c\u60c5\u6570\u636e\u6700\u591a\u53ef\u80fd\u6162\u4e00\u5206\u949f\u3002\u5982\u679c\u8981\u83b7\u53d6\u66f4\u5b9e\u65f6\u7684\u6570\u636e\uff0c\u53ef\u4ee5\u901a\u8fc7 get_latest_price \u65b9\u6cd5\u3002 \u8981\u83b7\u6da8\u8dcc\u505c\u4ef7\u683c\u548c\u6807\u5fd7\uff0c\u8bf7\u4f7f\u7528: get_trade_price_limits trade_price_limits_flags trade_price_limit_flags_ex 2.4. \u677f\u5757\u6570\u636e \u00b6 \u63d0\u4f9b\u540c\u82b1\u987a\u677f\u5757\u884c\u4e1a\u677f\u5757\u548c\u6982\u5ff5\u677f\u5757\u6570\u636e\u3002\u5728\u4f7f\u7528\u672c\u6a21\u5757\u4e4b\u524d\uff0c\u9700\u8981\u8fdb\u884c\u521d\u59cb\u5316\uff1a 1 2 3 # \u8bf7\u5148\u8fdb\u884comicron\u521d\u59cb\u5316\uff0c\u7565 from omicron.models.board import Board , BoardType Board . init ( '192.168.100.101' ) \u6b64\u5904\u7684IP\u4e3a\u5b89\u88c5omega\u670d\u52a1\u5668\u7684ip\u3002 \u901a\u8fc7 board_list \u6765\u67e5\u8be2\u6240\u6709\u7684\u677f\u5757\u3002 \u5176\u5b83\u65b9\u6cd5\u8bf7\u53c2\u770b API\u6587\u6863 3. \u7b56\u7565\u7f16\u5199 \u00b6 omicron \u901a\u8fc7 strategy \u6765\u63d0\u4f9b\u7b56\u7565\u6846\u67b6\u3002\u901a\u8fc7\u8be5\u6846\u67b6\u7f16\u5199\u7684\u7b56\u7565\uff0c\u53ef\u4ee5\u5728\u5b9e\u76d8\u548c\u56de\u6d4b\u4e4b\u95f4\u65e0\u7f1d\u8f6c\u6362 -- \u6839\u636e\u521d\u59cb\u5316\u65f6\u4f20\u5165\u7684\u670d\u52a1\u5668\u4e0d\u540c\u800c\u81ea\u52a8\u5207\u6362\u3002 omicron \u63d0\u4f9b\u4e86\u4e00\u4e2a\u7b80\u5355\u7684 \u53cc\u5747\u7ebf\u7b56\u7565 \u4f5c\u4e3a\u7b56\u7565\u7f16\u5199\u7684\u793a\u8303\uff0c\u53ef\u7ed3\u5408\u5176\u6e90\u7801\uff0c\u4ee5\u53ca\u672c\u6587\u6863\u4e2d\u7684 \u5b8c\u6574\u7b56\u7565\u793a\u4f8b \u5728notebook\u4e2d\u8fd0\u884c\u67e5\u770b\u3002 \u7b56\u7565\u6846\u67b6\u63d0\u4f9b\u4e86\u56de\u6d4b\u9a71\u52a8\u903b\u8f91\u53ca\u4e00\u4e9b\u57fa\u672c\u51fd\u6570\u3002\u8981\u7f16\u5199\u81ea\u5df1\u7684\u7b56\u7565\uff0c\u60a8\u9700\u8981\u4ece\u57fa\u7c7b BaseStrategy \u6d3e\u751f\u51fa\u81ea\u5df1\u7684\u5b50\u7c7b\uff0c\u5e76\u6539\u5199\u5b83\u7684 predict \u65b9\u6cd5\u6765\u5b9e\u73b0\u8c03\u4ed3\u6362\u80a1\u3002 \u7b56\u7565\u6846\u67b6\u4f9d\u8d56\u4e8e zillionare-trader-client \uff0c\u5728\u56de\u6d4b\u65f6\uff0c\u9700\u8981\u6709 zillionare-backtesting \u63d0\u4f9b\u56de\u6d4b\u670d\u52a1\u3002\u5728\u5b9e\u76d8\u65f6\uff0c\u9700\u8981 zilllionare-gm-adaptor \u6216\u8005\u5176\u5b83\u5b9e\u76d8\u4ea4\u6613\u7f51\u5173\u63d0\u4f9b\u670d\u52a1\u3002 \u7b56\u7565\u4ee3\u7801\u53ef\u4ee5\u4e0d\u52a0\u4fee\u6539\uff0c\u5373\u53ef\u4f7f\u7528\u4e8e\u56de\u6d4b\u548c\u5b9e\u76d8\u4e24\u79cd\u573a\u666f\u3002 3.1. \u56de\u6d4b\u573a\u666f \u00b6 \u5b9e\u73b0\u7b56\u7565\u56de\u6d4b\uff0c\u4e00\u822c\u9700\u8981\u8fdb\u884c\u4ee5\u4e0b\u6b65\u9aa4\uff1a 1. \u4ece\u6b64\u57fa\u7c7b\u6d3e\u751f\u51fa\u4e00\u4e2a\u7b56\u7565\u5b50\u7c7b\uff0c\u6bd4\u5982sma.py 2. \u5b50\u7c7b\u9700\u8981\u91cd\u8f7d predict \u65b9\u6cd5\uff0c\u6839\u636e\u5f53\u524d\u4f20\u5165\u7684\u65f6\u95f4\u5e27\u548c\u5e27\u7c7b\u578b\u53c2\u6570\uff0c\u83b7\u53d6\u6570\u636e\u5e76\u8fdb\u884c\u5904\u7406\uff0c\u8bc4\u4f30\u51fa\u4ea4\u6613\u4fe1\u53f7\u3002 3. \u5b50\u7c7b\u6839\u636e\u4ea4\u6613\u4fe1\u53f7\uff0c\u5728 predict \u65b9\u6cd5\u91cc\uff0c\u8c03\u7528\u57fa\u7c7b\u7684 buy \u548c sell \u65b9\u6cd5\u6765\u8fdb\u884c\u4ea4\u6613 4. \u751f\u6210\u7b56\u7565\u5b9e\u4f8b\uff0c\u901a\u8fc7\u5b9e\u4f8b\u8c03\u7528 backtest \u65b9\u6cd5\u6765\u8fdb\u884c\u56de\u6d4b\uff0c\u8be5\u65b9\u6cd5\u5c06\u6839\u636e\u7b56\u7565\u6784\u5efa\u65f6\u6307\u5b9a\u7684\u56de\u6d4b\u8d77\u59cb\u65f6\u95f4\u3001\u7ec8\u6b62\u65f6\u95f4\u3001\u5e27\u7c7b\u578b\uff0c\u9010\u5e27\u751f\u6210\u5404\u4e2a\u65f6\u95f4\u5e27\uff0c\u5e76\u8c03\u7528\u5b50\u7c7b\u7684 predict \u65b9\u6cd5\u3002\u5982\u679c\u8c03\u7528\u65f6\u6307\u5b9a\u4e86 prefetch_stocks \u53c2\u6570\uff0c backtest \u8fd8\u5c06\u8fdb\u884c\u6570\u636e\u9884\u53d6\uff08\u9884\u53d6\u7684\u6570\u636e\u957f\u5ea6\u7531 warmup_peroid \u51b3\u5b9a\uff09\uff0c\u5e76\u5c06\u622a\u6b62\u5230\u5f53\u524d\u56de\u6d4b\u5e27\u65f6\u7684\u6570\u636e\u4f20\u5165\u3002 5. \u5728\u4ea4\u6613\u7ed3\u675f\u65f6\uff0c\u8c03\u7528 plot_metrics \u65b9\u6cd5\u6765\u83b7\u53d6\u5982\u4e0b\u6240\u793a\u7684\u56de\u6d4b\u6307\u6807\u56fe \u5982\u4f55\u6d3e\u751f\u5b50\u7c7b\uff0c\u53ef\u4ee5\u53c2\u8003 sma \u6e90\u4ee3\u7801\u3002 1 2 3 4 5 6 7 8 9 10 11 from omicron.strategy.sma import SMAStrategy sma = SMAStrategy ( url = \"\" , # the url of either backtest server, or trade server is_backtest = True , start = datetime . date ( 2023 , 2 , 3 ), end = datetime . date ( 2023 , 4 , 28 ), frame_type = FrameType . DAY , warmup_period = 20 ) await sma . backtest ( prefetch_stocks = [ \"600000.XSHG\" ]) \u5728\u56de\u6d4b\u65f6\uff0c\u5fc5\u987b\u8981\u6307\u5b9a is_backtest=True \u548c start , end \u53c2\u6570\u3002 3.2. \u56de\u6d4b\u62a5\u544a \u00b6 \u5728\u56de\u6d4b\u7ed3\u675f\u540e\uff0c\u53ef\u4ee5\u901a\u8fc7\u4ee5\u4e0b\u65b9\u6cd5\uff0c\u5728notebook\u4e2d\u7ed8\u5236\u56de\u6d4b\u62a5\u544a\uff1a 1 await sma . plot_metrics () \u8fd9\u5c06\u7ed8\u5236\u51fa\u7c7b\u4f3c\u4ee5\u4e0b\u56fe\uff1a 3.2.1. \u5728\u56de\u6d4b\u62a5\u544a\u4e2d\u6dfb\u52a0\u6280\u672f\u6307\u6807 \u00b6 Info Since 2.0.0.a76 \u9996\u5148\uff0c\u6211\u4eec\u53ef\u4ee5\u5728\u7b56\u7565\u7c7b\u7684predict\u65b9\u6cd5\u4e2d\u8ba1\u7b97\u51fa\u6280\u672f\u6307\u6807\uff0c\u5e76\u4fdd\u5b58\u5230\u6210\u5458\u53d8\u91cf\u4e2d\u3002\u5728\u4e0b\u9762\u7684\u793a\u4f8b\u4ee3\u7801\u4e2d\uff0c\u6211\u4eec\u5c06\u6280\u672f\u6307\u6807\u53ca\u5f53\u65f6\u7684\u65f6\u95f4\u4fdd\u5b58\u5230\u4e86\u4e00\u4e2aindicators\u6570\u7ec4\u4e2d\uff08\u6ce8\u610f\u987a\u5e8f\uff01\uff09\uff0c\u7136\u540e\u5728\u56de\u6d4b\u7ed3\u675f\u540e\uff0c\u5728\u8c03\u7528 plot_metrics\u65f6\uff0c\u5c06\u5176\u4f20\u5165\u5373\u53ef\u3002 1 2 3 4 5 6 7 indicators = [ (datetime.date(2021, 2, 3), 20.1), (datetime.date(2021, 2, 4), 20.2), ..., (datetime.date(2021, 4, 1), 20.3) ] await sma.plot_metrics(indicator) \u65f6\u95f4\u53ea\u80fd\u4f7f\u7528\u4e3b\u5468\u671f\u7684\u65f6\u95f4\uff0c\u5426\u5219\u53ef\u80fd\u4ea7\u751f\u65e0\u6cd5\u4e0e\u5750\u6807\u8f74\u5bf9\u9f50\u7684\u60c5\u51b5\u3002 \u52a0\u5165\u7684\u6307\u6807\u9ed8\u8ba4\u53ea\u663e\u793a\u5728legend\u4e2d\uff0c\u5982\u679c\u8981\u663e\u793a\u5728\u4e3b\u56fe\u4e0a\uff0c\u9700\u8981\u70b9\u51fblegend\u8fdb\u884c\u663e\u793a\u3002 \u6307\u6807\u9664\u53ef\u4ee5\u53e0\u52a0\u5728\u4e3b\u56fe\u4e0a\u4e4b\u5916\uff0c\u8fd8\u4f1a\u51fa\u73b0\u5728\u57fa\u51c6\u7ebf\u7684hoverinfo\u4e2d\uff08\u5373\u4f7f\u6307\u6807\u7684\u8ba1\u7b97\u4e0e\u57fa\u51c6\u7ebf\u65e0\u5173\uff09\uff0c\u53c2\u89c1\u4e0a\u56fe\u4e2d\u7684\u201c\u6307\u6807\u201d\u884c\u3002 3.3. \u4f7f\u7528\u6570\u636e\u9884\u53d6 \u00b6 Info since version 2.0.0-alpha76 \u5728\u56de\u6d4b\u4e2d\uff0c\u53ef\u4ee5\u4f7f\u7528\u4e3b\u5468\u671f\u7684\u6570\u636e\u9884\u53d6\uff0c\u4ee5\u52a0\u5feb\u56de\u6d4b\u901f\u5ea6\u3002\u5de5\u4f5c\u539f\u7406\u5982\u4e0b\uff1a \u5982\u679c\u7b56\u7565\u6307\u5b9a\u4e86 warmup_period \uff0c\u5e76\u5728\u8c03\u7528 backtest \u65f6\u4f20\u5165\u4e86 prefetch_stocks \u53c2\u6570\uff0c\u5219 backtest \u5c06\u4f1a\u5728\u56de\u6d4b\u4e4b\u524d\uff0c\u9884\u53d6\u4ece[start - warmup_period * frame_type, end]\u95f4\u7684portfolio\u884c\u60c5\u6570\u636e\uff0c\u5e76\u5728\u6bcf\u6b21\u8c03\u7528 predict \u65b9\u6cd5\u65f6\uff0c\u901a\u8fc7 barss \u53c2\u6570\uff0c\u5c06[start - warmup_period * frame_type, start + i * frame_type]\u95f4\u7684\u6570\u636e\u4f20\u7ed9 predict \u65b9\u6cd5\u3002\u4f20\u5165\u7684\u6570\u636e\u5df2\u8fdb\u884c\u524d\u590d\u6743\u3002 \u5982\u679c\u5728\u56de\u6d4b\u8fc7\u7a0b\u4e2d\uff0c\u9700\u8981\u5077\u770b\u672a\u6765\u6570\u636e\uff0c\u53ef\u4ee5\u4f7f\u7528peek\u65b9\u6cd5\u3002 3.4. \u5b8c\u6574SMA\u56de\u6d4b\u793a\u4f8b \u00b6 \u4ee5\u4e0b\u7b56\u7565\u9700\u8981\u5728notebook\u4e2d\u8fd0\u884c\uff0c\u5e76\u4e14\u9700\u8981\u4e8b\u5148\u5b89\u88c5omega\u670d\u52a1\u5668\u540c\u6b65\u6570\u636e\uff0c\u5e76\u6b63\u786e\u914d\u7f6eomicron\u3002 \u8be5\u793a\u4f8b\u5728\u300a\u5927\u5bcc\u7fc1\u91cf\u5316\u8bfe\u7a0b\u300b\u8bfe\u4ef6\u73af\u5883\u4e0b\u53ef\u8fd0\u884c\u3002 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 import cfg4py import omicron import datetime from omicron.strategy.sma import SMAStrategy from coretypes import FrameType cfg = cfg4py . init ( \"/etc/zillionare\" ) await omicron . init () sec = \"600000.XSHG\" start = datetime . date ( 2022 , 1 , 4 ) end = datetime . date ( 2023 , 1 , 1 ) sma = SMAStrategy ( sec , url = cfg . backtest . url , is_backtest = True , start = start , end = end , frame_type = FrameType . DAY , warmup_period = 10 ) await sma . backtest ( portfolio = [ sec ], stop_on_error = False ) await sma . plot_metrics ( sma . indicators ) 3.5. \u5b9e\u76d8 \u00b6 \u5728\u5b9e\u76d8\u73af\u5883\u4e0b\uff0c\u4f60\u8fd8\u9700\u8981\u5728\u5b50\u7c7b\u4e2d\u52a0\u5165\u5468\u671f\u6027\u4efb\u52a1(\u6bd4\u5982\u6bcf\u5206\u949f\u6267\u884c\u4e00\u6b21\uff09\uff0c\u5728\u8be5\u4efb\u52a1\u4e2d\u8c03\u7528 predict \u65b9\u6cd5\u6765\u5b8c\u6210\u4ea4\u6613\uff0c\u5982\u4ee5\u4e0b\u793a\u4f8b\u6240\u793a\uff1a 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 import cfg4py import omicron import datetime from omicron.strategy.sma import SMAStrategy from coretypes import FrameType from apscheduler.schedulers.asyncio import AsyncIOScheduler cfg = cfg4py . init ( \"/etc/zillionare\" ) await omicron . init () async def daily_job (): sma = SMAStrategy ( sec , url = cfg . traderserver . url , is_backtest = False , frame_type = FrameType . DAY ) bars = await Stock . get_bars ( sma . _sec , 20 , FrameType . DAY ) await sma . predict ( barss = { sma . _sec : bars }) async def main (): scheduler = AsyncIOScheduler () scheduler . add_job ( daily_job , 'cron' , hour = 14 , minute = 55 ) scheduler . start () \u7b56\u7565\u4ee3\u7801\u65e0\u987b\u4fee\u6539\u3002 \u8be5\u7b56\u7565\u5c06\u81ea\u52a8\u5728\u6bcf\u5929\u768414\uff1a55\u8fd0\u884c\uff0c\u4ee5\u5224\u65ad\u662f\u5426\u8981\u8fdb\u884c\u8c03\u4ed3\u6362\u80a1\u3002\u60a8\u9700\u8981\u989d\u5916\u5224\u65ad\u5f53\u5929\u662f\u5426\u4e3a\u4ea4\u6613\u65e5\u3002 4. \u7ed8\u56fe \u00b6 omicron \u901a\u8fc7 Candlestick \u63d0\u4f9b\u4e86 k \u7ebf\u7ed8\u5236\u529f\u80fd\u3002\u9ed8\u8ba4\u5730\uff0c\u5b83\u5c06\u7ed8\u5236\u4e00\u5e45\u663e\u793a 120 \u4e2a bar\uff0c\u53ef\u62d6\u52a8\uff08\u4ee5\u52a0\u8f7d\u66f4\u591a bar)\uff0c\u5e76\u4e14\u53ef\u4ee5\u53e0\u52a0\u526f\u56fe\u3001\u4e3b\u56fe\u53e0\u52a0\u5404\u79cd\u6307\u6807\u7684 k \u7ebf\u56fe\uff1a \u4e0a\u56fe\u663e\u793a\u4e86\u81ea\u52a8\u68c0\u6d4b\u51fa\u6765\u7684\u5e73\u53f0\u3002\u6b64\u5916\uff0c\u8fd8\u53ef\u4ee5\u8fdb\u884c\u9876\u5e95\u81ea\u52a8\u68c0\u6d4b\u548c\u6807\u6ce8\u3002 Note \u901a\u8fc7\u6307\u5b9a width \u53c2\u6570\uff0c\u53ef\u4ee5\u5f71\u54cd\u521d\u59cb\u52a0\u8f7d\u7684bar\u7684\u6570\u91cf\u3002 omicron \u901a\u8fc7 metris \u63d0\u4f9b\u56de\u6d4b\u62a5\u544a\u3002\u8be5\u62a5\u544a\u7c7b\u4f3c\u4e8e\uff1a \u5b83\u540c\u6837\u63d0\u4f9b\u53ef\u62d6\u52a8\u7684\u7ed8\u56fe\uff0c\u5e76\u4e14\u5728\u4e70\u5356\u70b9\u4e0a\u53ef\u4ee5\u901a\u8fc7\u9f20\u6807\u60ac\u505c\uff0c\u663e\u793a\u4e70\u5356\u70b9\u4fe1\u606f\u3002 omicron \u7684\u7ed8\u56fe\u529f\u80fd\u53ea\u80fd\u5728 notebook \u4e2d\u4f7f\u7528\u3002 5. \u8bc4\u4f30\u6307\u6807 \u00b6 omicron \u63d0\u4f9b\u4e86 mean_absolute_error \u51fd\u6570\u548c pct_error \u51fd\u6570\u3002\u5b83\u4eec\u5728 scipy \u6216\u8005\u5176\u5b83\u5e93\u4e2d\u4e5f\u80fd\u627e\u5230\uff0c\u4e3a\u4e86\u65b9\u4fbf\u4e0d\u719f\u6089\u8fd9\u4e9b\u7b2c\u4e09\u65b9\u5e93\u7684\u4f7f\u7528\u8005\uff0c\u6211\u4eec\u5185\u7f6e\u4e86\u8fd9\u4e2a\u5e38\u6307\u6807\u3002 \u5bf9\u4e00\u4e9b\u5e38\u89c1\u7684\u7b56\u7565\u8bc4\u4f30\u51fd\u6570\uff0c\u6211\u4eec\u5f15\u7528\u4e86 empyrical \u4e2d\u7684\u76f8\u5173\u51fd\u6570\uff0c\u6bd4\u5982 alpha, beta, shapre_ratio\uff0c calmar_ratio \u7b49\u3002 6. TALIB \u5e93 \u00b6 \u60a8\u5e94\u8be5\u628a\u8fd9\u91cc\u63d0\u4f9b\u7684\u51fd\u6570\u5f53\u6210\u5b9e\u9a8c\u6027\u7684\u3002\u8fd9\u4e9b API \u4e5f\u53ef\u80fd\u5728\u67d0\u5929\u88ab\u5e9f\u5f03\u3001\u91cd\u547d\u540d\u3001\u4fee\u6539\uff0c\u6216\u8005\u8fd9\u4e9b API \u5e76\u6ca1\u6709\u591a\u5927\u4f5c\u7528\uff0c\u6216\u8005\u5b83\u4eec\u7684\u5b9e\u73b0\u5b58\u5728\u9519\u8bef\u3002 \u4f46\u662f\uff0c\u5982\u679c\u6211\u4eec\u5c06\u6765\u4f1a\u629b\u5f03\u8fd9\u4e9b API \u7684\u8bdd\uff0c\u6211\u4eec\u4e00\u5b9a\u4f1a\u901a\u8fc7 depracted \u65b9\u6cd5\u63d0\u524d\u8fdb\u884c\u8b66\u544a\u3002 7. \u6269\u5c55 \u00b6 Python\u5f53\u4e2d\u7684\u56db\u820d\u4e94\u5165\u7528\u4e8e\u8bc1\u5238\u6295\u8d44\uff0c\u4f1a\u5e26\u6765\u4e25\u91cd\u7684\u95ee\u9898\uff0c\u6bd4\u5982\uff0c\u50cf round(0.3/2) \uff0c\u6211\u4eec\u671f\u671b\u5f97\u5230 0.2 \uff0c\u4f46\u5b9e\u9645\u4e0a\u4f1a\u5f97\u5230 0.1 \u3002\u8fd9\u79cd\u8bef\u5dee\u4e00\u65e6\u53d1\u751f\u6210\u5728\u4e00\u4e9b\u4f4e\u4ef7\u80a1\u8eab\u4e0a\uff0c\u5c06\u4f1a\u5e26\u6765\u975e\u5e38\u5927\u7684\u4e0d\u786e\u5b9a\u6027\u3002\u6bd4\u5982\uff0c1.945\u4fdd\u7559\u4e24\u4f4d\u5c0f\u6570\uff0c\u672c\u6765\u5e94\u8be5\u662f1.95\uff0c\u5982\u679c\u88ab\u8bef\u820d\u5165\u4e3a1.94\uff0c\u5219\u8bef\u5dee\u63a5\u8fd10.5%\uff0c\u8fd9\u5bf9\u6295\u8d44\u6765\u8bf4\u662f\u96be\u4ee5\u63a5\u53d7\u7684\u3002 Info \u5982\u679c\u4e00\u5929\u53ea\u8fdb\u884c\u4e00\u6b21\u4ea4\u6613\uff0c\u4e00\u6b21\u4ea4\u6613\u8bef\u5dee\u4e3a0.5%\uff0c\u4e00\u5e74\u7d2f\u79ef\u4e0b\u6765\uff0c\u8bef\u5dee\u5c06\u8fbe\u52302.5\u500d\u3002 \u6211\u4eec\u5728 decimals \u4e2d\u63d0\u4f9b\u4e86\u9002\u7528\u4e8e\u8bc1\u5238\u4ea4\u6613\u9886\u57df\u7684\u7248\u672c\uff0c math_round \u548c\u4ef7\u683c\u6bd4\u8f83\u51fd\u6570 price_equal \u3002 \u6211\u4eec\u8fd8\u5728 np \u4e2d\uff0c\u5bf9numpy\u4e2d\u7f3a\u5931\u7684\u4e00\u4e9b\u529f\u80fd\u8fdb\u884c\u4e86\u8865\u5145\uff0c\u6bd4\u5982 numpy_append_fields , fill_nan \u7b49\u3002","title":"\u4f7f\u7528\u6559\u7a0b"},{"location":"usage/#1-\u914d\u7f6e\u521d\u59cb\u5316\u548c\u5173\u95ed-omicron","text":"Omicron \u4f9d\u8d56\u4e8e zillionare-omega \u670d\u52a1\u6765\u83b7\u53d6\u6570\u636e\u3002\u4f46\u5b83\u5e76\u4e0d\u76f4\u63a5\u4e0e Omega \u670d\u52a1\u901a\u8baf\uff0c\u76f8\u53cd\uff0c\u5b83\u76f4\u63a5\u8bfb\u53d6 Omega \u670d\u52a1\u5668\u4f1a\u5199\u5165\u6570\u636e\u7684 Influxdb \u548c redis \u6570\u636e\u5e93\u3002\u56e0\u6b64\uff0c\u5728\u4f7f\u7528 Omicron \u4e4b\u524d\uff0c\u6211\u4eec\u9700\u8981\u63d0\u4f9b\u8fd9\u4e24\u4e2a\u670d\u52a1\u5668\u7684\u8fde\u63a5\u5730\u5740\uff0c\u5e76\u8fdb\u884c\u521d\u59cb\u5316\u3002","title":"1. \u914d\u7f6e\u3001\u521d\u59cb\u5316\u548c\u5173\u95ed OMICRON"},{"location":"usage/#11-\u914d\u7f6e\u548c\u521d\u59cb\u5316","text":"Omicron \u4f7f\u7528 cfg4py \u6765\u7ba1\u7406\u914d\u7f6e\u3002 cfg4py \u4f7f\u7528 yaml \u6587\u4ef6\u6765\u4fdd\u5b58\u914d\u7f6e\u9879\u3002\u5728\u4f7f\u7528 cfg4py \u4e4b\u524d\uff0c\u60a8\u9700\u8981\u5728\u67d0\u5904\u521d\u59cb\u5316 cfg4py\uff0c\u7136\u540e\u518d\u521d\u59cb\u5316 omicron: Tip \u4e3a\u4e86\u7b80\u6d01\u8d77\u89c1\uff0c\u6211\u4eec\u5728\u9876\u5c42\u4ee3\u7801\u4e2d\u76f4\u63a5\u4f7f\u7528\u4e86 async/await\u3002\u901a\u5e38\uff0c\u8fd9\u4e9b\u4ee3\u7801\u80fd\u591f\u76f4\u63a5\u5728 notebook \u4e2d\u8fd0\u884c\uff0c\u4f46\u5982\u679c\u9700\u8981\u5728\u666e\u901a\u7684 python \u811a\u672c\u4e2d\u8fd0\u884c\u8fd9\u4e9b\u4ee3\u7801\uff0c\u60a8\u901a\u5e38\u9700\u8981\u5c06\u5176\u5c01\u88c5\u5230\u4e00\u4e2a\u5f02\u6b65\u51fd\u6570\u4e2d\uff0c\u518d\u901a\u8fc7 asyncio.run \u6765\u8fd0\u884c\u5b83\u3002 1 2 3 4 5 6 7 8 9 import asyncio import cfg4py import omicron async def main (): cfg4py . init ( 'path/to/your/config/dir' ) await omicron . init () # DO YOUR GREAT JOB WITH OMICRON asyncio . run ( main ()) 1 2 3 4 5 import cfg4py import omicron cfg4py . init ( 'path/to/your/config/dir' ) await omicron . init () \u6ce8\u610f\u521d\u59cb\u5316 cfg4py \u65f6\uff0c\u9700\u8981\u63d0\u4f9b\u5305\u542b\u914d\u7f6e\u6587\u4ef6\u7684 \u6587\u4ef6\u5939 \u7684\u8def\u5f84\uff0c\u800c \u4e0d\u662f\u914d\u7f6e\u6587\u4ef6 \u7684\u8def\u5f84\u3002\u914d\u7f6e\u6587\u4ef6\u540d\u5fc5\u987b\u4e3a defaults.yml\u3002 \u60a8\u81f3\u5c11\u5e94\u8be5\u4e3a omicron \u914d\u7f6e Redis \u8fde\u63a5\u4e32\u548c influxdb \u8fde\u63a5\u4e32\u3002\u4e0b\u9762\u662f\u5e38\u7528\u914d\u7f6e\u793a\u4f8b\uff1a 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 # DEFAULTS.YAML redis : dsn : redis://${REDIS_HOST}:${REDIS_PORT} influxdb : url : http://${INFLUXDB_HOST}:${INFLUXDB_PORT} token : ${INFLUXDB_TOKEN} org : ${INFLUXDB_ORG} bucket_name : ${INFLUXDB_BUCKET_NAME} enable_compress : true max_query_size : 150000 notify : mail_from : ${MAIL_FROM} mail_to : - ${MAIL_TO} mail_server : ${MAIL_SERVER} dingtalk_access_token : ${DINGTALK_ACCESS_TOKEN} dingtalk_secret : ${DINGTALK_SECRET} \u8bf7\u6839\u636e\u60a8\u5b9e\u9645\u73af\u5883\u914d\u7f6e\u6765\u66f4\u6539\u4e0a\u8ff0\u6587\u4ef6\u3002\u4e0a\u8ff0\u914d\u7f6e\u4e2d\uff0c${{REDIS_HOST}}\u610f\u5473\u7740\u73af\u5883\u53d8\u91cf\u3002\u5982\u679c\u662f windows\uff0c\u60a8\u9700\u8981\u5728\u7cfb\u7edf > \u73af\u5883\u53d8\u91cf\u4e2d\u8fdb\u884c\u8bbe\u7f6e\u3002\u5982\u679c\u662f Linux \u6216\u8005 Mac\uff0c\u60a8\u9700\u8981\u4fee\u6539.bashrc\uff0c\u4f8b\u5982\uff1a 1 export REDIS_HOST=localhost","title":"1.1. \u914d\u7f6e\u548c\u521d\u59cb\u5316"},{"location":"usage/#12-\u5173\u95ed-omicron","text":"\u5728\u60a8\u7684\u8fdb\u7a0b\u5373\u5c06\u9000\u51fa\u4e4b\u524d\uff0c\u8bf7\u8bb0\u5f97\u5173\u95ed omicron\u3002\u5982\u679c\u60a8\u662f\u5728 notebook \u4e2d\u4f7f\u7528 omicron, \u5219\u53ef\u4ee5\u5ffd\u7565\u6b64\u6b65\u805a\u3002 1 await omicron . close ()","title":"1.2. \u5173\u95ed omicron"},{"location":"usage/#2-\u6570\u636e\u8bfb\u53d6","text":"","title":"2. \u6570\u636e\u8bfb\u53d6"},{"location":"usage/#21-\u8bc1\u5238\u5217\u8868","text":"Security \u548c Query \u63d0\u4f9b\u4e86\u8bc1\u5238\u5217\u8868\u548c\u67e5\u8be2\u64cd\u4f5c\u3002\u67e5\u8be2\u88ab\u8bbe\u8ba1\u6210\u4e3a\u94fe\u5f0f API\u3002\u901a\u5e38\uff0c\u6211\u4eec\u901a\u8fc7\u8c03\u7528 Security.select() \u6765\u751f\u6210\u4e00\u4e2a Query \u5bf9\u8c61\uff0c\u7136\u540e\u53ef\u4ee5\u9488\u5bf9\u6b64\u5bf9\u8c61\uff0c\u8fdb\u884c\u5404\u79cd\u8fc7\u67e5\u8be2\u8fc7\u6ee4\uff0c\u6700\u540e\uff0c\u6211\u4eec\u8c03\u7528 query.eval() \u65b9\u6cd5\u7ed3\u675f\u94fe\u5f0f\u8c03\u7528\uff0c\u5e76\u8fd4\u56de\u7ed3\u679c\u3002","title":"2.1. \u8bc1\u5238\u5217\u8868"},{"location":"usage/#211-\u67e5\u8be2\u6240\u6709\u8bc1\u5238\u4ee3\u7801","text":"\u60a8\u53ef\u4ee5\u901a\u8fc7\u4ee5\u4e0b\u65b9\u6cd5\u6765\u83b7\u53d6\u67d0\u4e00\u5929\u7684\u8bc1\u5238\u5217\u8868\uff1a 1 2 3 4 5 6 7 # 4. ASSUME YOU HAVE OMICRON INIT dt = datetime . date ( 2022 , 5 , 20 ) query = Security . select ( dt ) codes = await query . eval () print ( codes ) # THE OUTPUTS IS LIKE [\"000001.XSHE\", \"000004.XSHE\", ...] \u8fd9\u91cc\u7684 dt \u5982\u679c\u6ca1\u6709\u63d0\u4f9b\u7684\u8bdd\uff0c\u5c06\u4f7f\u7528\u6700\u65b0\u7684\u8bc1\u5238\u5217\u8868\u3002\u4f46\u5728\u56de\u6d4b\u4e2d\uff0c\u60a8\u901a\u5e38\u4e0d\u540c\u65f6\u95f4\u7684\u8bc1\u5238\u5217\u8868\uff0c\u56e0\u6b64\uff0c dt \u5728\u8fd9\u79cd\u60c5\u51b5\u4e0b\u662f\u5fc5\u987b\u7684\uff0c\u5426\u5219\uff0c\u60a8\u5c06\u5f15\u5165\u672a\u6765\u6570\u636e\u3002","title":"2.1.1. \u67e5\u8be2\u6240\u6709\u8bc1\u5238\u4ee3\u7801"},{"location":"usage/#212-\u8fd4\u56de\u6240\u6709\u80a1\u7968\u6216\u8005\u6307\u6570","text":"1 2 3 query = Security . select ( dt ) codes = await query . types ([ \"stock\" ]) . eval () print ( codes )","title":"2.1.2. \u8fd4\u56de\u6240\u6709\u80a1\u7968\u6216\u8005\u6307\u6570"},{"location":"usage/#213-\u6392\u9664\u67d0\u79cd\u80a1\u7968\u8bc1\u5238","text":"1 2 3 query = Security . select ( dt ) codes = await query . exclude_st () . exclude_kcb () . exclude_cyb () . eval () print ( codes )","title":"2.1.3. \u6392\u9664\u67d0\u79cd\u80a1\u7968\uff08\u8bc1\u5238\uff09"},{"location":"usage/#214-\u5982\u679c\u53ea\u8981\u6c42\u67d0\u79cd\u80a1\u7968\u8bc1\u5238","text":"1 2 3 4 query = Security . select ( dt ) codes = await query . only_kcb () . only_st () . only_cyb () . eval () print ( codes ) #\u5f97\u5230\u7a7a\u5217\u8868","title":"2.1.4. \u5982\u679c\u53ea\u8981\u6c42\u67d0\u79cd\u80a1\u7968\uff08\u8bc1\u5238\uff09"},{"location":"usage/#215-\u6309\u522b\u540d\u8fdb\u884c\u6a21\u7cca\u67e5\u8be2","text":"A \u80a1\u7684\u8bc1\u5238\u5728\u6807\u8bc6\u4e0a\uff0c\u4e00\u822c\u6709\u4ee3\u7801\uff08code \u6216\u8005 symbol)\u3001\u62fc\u97f3\u7b80\u5199 (name) \u548c\u6c49\u5b57\u8868\u793a\u540d (display_name) \u4e09\u79cd\u6807\u8bc6\u3002\u6bd4\u5982\u4e2d\u56fd\u5e73\u5b89\uff0c\u5176\u4ee3\u7801\u4e3a 601318.XSHG; \u5176\u62fc\u97f3\u7b80\u5199\u4e3a ZGPA\uff1b\u800c\u4e2d\u56fd\u5e73\u5b89\u88ab\u79f0\u4e3a\u5b83\u7684\u522b\u540d ( alias )\u3002 \u5982\u679c\u8981\u67e5\u8be2\u6240\u6709\u4e2d\u5b57\u5934\u7684\u80a1\u7968\uff1a 1 2 3 query = Security . select ( dt ) codes = await query . alias_like ( \"\u4e2d\" ) . eval () print ( codes )","title":"2.1.5. \u6309\u522b\u540d\u8fdb\u884c\u6a21\u7cca\u67e5\u8be2"},{"location":"usage/#216-\u901a\u8fc7\u4ee3\u7801\u67e5\u8be2\u5176\u5b83\u4fe1\u606f","text":"\u901a\u8fc7\u524d\u9762\u7684\u67e5\u8be2\u6211\u4eec\u53ef\u4ee5\u5f97\u5230\u4e00\u4e2a\u8bc1\u5238\u5217\u8868\uff0c\u5982\u679c\u8981\u5f97\u5230\u5177\u4f53\u7684\u4fe1\u606f\uff0c\u53ef\u4ee5\u901a\u8fc7 info \u63a5\u53e3\u6765\u67e5\u8be2\uff1a 1 2 3 dt = datetime . date ( 2022 , 5 , 20 ) info = await Security . info ( \"688001.XSHG\" , dt ) print ( info ) \u8f93\u51fa\u4e3a\uff1a 1 2 3 4 5 6 7 8 { ' t ype' : 's t ock' , 'display_ na me' : '\u534e\u5174\u6e90\u521b' , 'alias' : '\u534e\u5174\u6e90\u521b' , 'e n d' : da tet ime.da te ( 2200 , 1 , 1 ) , 's tart ' : da tet ime.da te ( 2019 , 7 , 22 ) , ' na me' : 'HXYC' }","title":"2.1.6. \u901a\u8fc7\u4ee3\u7801\u67e5\u8be2\u5176\u5b83\u4fe1\u606f"},{"location":"usage/#22-\u4ea4\u6613\u65e5\u5386\u53ca\u65f6\u95f4\u5e27\u8ba1\u7b97","text":"Omicron \u4e0d\u4ec5\u63d0\u4f9b\u4e86\u4ea4\u6613\u65e5\u5386\uff0c\u4e0e\u5176\u5b83\u91cf\u5316\u6846\u67b6\u76f8\u6bd4\uff0c\u6211\u4eec\u8fd8\u63d0\u4f9b\u4e86\u4e30\u5bcc\u7684\u4e0e\u65f6\u95f4\u76f8\u5173\u7684\u8fd0\u7b97\u64cd\u4f5c\u3002\u8fd9\u4e9b\u64cd\u4f5c\u90fd\u6709\u8be6\u7ec6\u7684\u6587\u6863\u548c\u793a\u4f8b\uff0c\u60a8\u53ef\u4ee5\u901a\u8fc7 TimeFrame \u6765\u8fdb\u4e00\u6b65\u9605\u8bfb\u3002 omicron \u4e2d\uff0c\u5e38\u5e38\u4f1a\u9047\u5230\u65f6\u95f4\u5e27 (Time Frame) \u8fd9\u4e2a\u6982\u5ff5\u3002\u56e0\u4e3a\u884c\u60c5\u6570\u636e\u90fd\u662f\u6309\u4e00\u5b9a\u7684\u65f6\u95f4\u957f\u5ea6\u7ec4\u7ec7\u7684\uff0c\u6bd4\u5982 5 \u5206\u949f\uff0c1 \u5929\uff0c\u7b49\u7b49\u3002\u56e0\u6b64\uff0c\u5728 omicron \u4e2d\uff0c\u6211\u4eec\u7ecf\u5e38\u4f7f\u7528\u67d0\u4e2a\u65f6\u95f4\u7247\u7ed3\u675f\u7684\u65f6\u95f4\uff0c\u6765\u6807\u8bc6\u8fd9\u4e2a\u65f6\u95f4\u7247\uff0c\u5e76\u5c06\u5176\u79f0\u4e4b\u4e3a\u5e27 (Time Frame)\u3002 omicron \u4e2d\uff0c\u6211\u4eec\u652f\u6301\u7684\u65f6\u95f4\u5e27\u5305\u62ec\u65e5\u5185\u7684\u5206\u949f\u5e27 (FrameType.MIN1), 5 \u5206\u949f\u5e27 (FrameType.MIN5), 15 \u5206\u949f\u5e27\u300130 \u5206\u949f\u5e27\u548c 60 \u5206\u949f\u5e27\uff0c\u4ee5\u53ca\u65e5\u7ebf\u7ea7\u522b\u7684 FrameType.DAY, FrameType.WEEK \u7b49\u3002\u5173\u4e8e\u8be6\u7ec6\u7684\u7c7b\u578b\u8bf4\u660e\uff0c\u8bf7\u53c2\u89c1 coretypes omicron \u63d0\u4f9b\u7684\u4ea4\u6613\u65e5\u5386\u8d77\u59cb\u4e8e 2005 \u5e74 1 \u6708 4 \u65e5\u3002\u63d0\u4f9b\u7684\u884c\u60c5\u6570\u636e\uff0c\u6700\u65e9\u4ece\u8fd9\u4e00\u5929\u8d77\u3002 \u5927\u81f4\u4e0a\uff0comicron \u63d0\u4f9b\u4e86\u4ee5\u4e0b\u65f6\u95f4\u5e27\u64cd\u4f5c\uff1a","title":"2.2. \u4ea4\u6613\u65e5\u5386\u53ca\u65f6\u95f4\u5e27\u8ba1\u7b97"},{"location":"usage/#221-\u4ea4\u6613\u65f6\u95f4\u7684\u504f\u79fb","text":"\u5982\u679c\u4eca\u5929\u662f 2022 \u5e74 5 \u6708 20 \u65e5\uff0c\u60a8\u60f3\u5f97\u5230 100 \u5929\u524d\u7684\u4ea4\u6613\u65e5\uff0c\u5219\u53ef\u4ee5\u4f7f\u7528 day_shift: 1 2 3 4 from omicron import tf dt = datetime . date ( 2022 , 5 , 20 ) tf . day_shift ( dt , - 100 ) \u8f93\u51fa\u662f datetime.date(2021, 12, 16)\u3002\u5728\u8fd9\u91cc\uff0cday_shift \u7684\u7b2c\u4e8c\u4e2a\u53c2\u6570 n \u662f\u504f\u79fb\u91cf\uff0c\u5f53\u5b83\u5c0f\u4e8e\u96f6\u65f6\uff0c\u662f\u627e dt \u524d n \u4e2a\u4ea4\u6613\u65e5\uff1b\u5f53\u5b83\u5927\u4e8e\u96f6\u65f6\uff0c\u662f\u627e dt \u4e4b\u540e\u7684 n \u4e2a\u4ea4\u6613\u65e5\u3002 \u6bd4\u5982\u6709\u610f\u601d\u7684\u662f n == 0 \u7684\u65f6\u5019\u3002\u5bf9\u4e0a\u8ff0 dt \uff0cday_shift(dt, 0) \u5f97\u5230\u7684\u4ecd\u7136\u662f\u540c\u4e00\u5929\uff0c\u4f46\u5982\u679c dt \u662f 2022 \u5e74 5 \u6708 21 \u65e5\u662f\u5468\u516d\uff0c\u5219 day_shift(datetime.date(2022, 5, 21)) \u5c06\u8fd4\u56de 2022 \u5e74 5 \u6708 20 \u65e5\u3002\u56e0\u4e3a 5 \u6708 21 \u65e5\u8fd9\u4e00\u5929\u662f\u5468\u516d\uff0c\u4e0d\u662f\u4ea4\u6613\u65e5\uff0cday_shift \u5c06\u8fd4\u56de\u5176\u5bf9\u5e94\u7684\u4ea4\u6613\u65e5\uff0c\u8fd9\u5728\u591a\u6570\u60c5\u51b5\u4e0b\u4f1a\u975e\u5e38\u65b9\u4fbf\u3002 \u9664\u4e86 day_shift \u5916\uff0ctimeframe \u8fd8\u63d0\u4f9b\u4e86\u7c7b\u4f3c\u51fd\u6570\u6bd4\u5982 week_shift \u7b49\u3002\u4e00\u822c\u5730\uff0c\u60a8\u53ef\u4ee5\u7528 shift(dt, n, frame_type) \u6765\u5bf9\u4efb\u610f\u652f\u6301\u7684\u65f6\u95f4\u8fdb\u884c\u504f\u79fb\u3002","title":"2.2.1. \u4ea4\u6613\u65f6\u95f4\u7684\u504f\u79fb"},{"location":"usage/#222-\u8fb9\u754c\u64cd\u4f5c-ceiling-\u548c-floor","text":"\u5f88\u591a\u65f6\u5019\u6211\u4eec\u9700\u8981\u77e5\u9053\u5177\u4f53\u7684\u67d0\u4e2a\u65f6\u95f4\u70b9 (moment) \u6240\u5c5e\u7684\u5e27\u3002\u5982\u679c\u8981\u53d6\u5176\u4e0a\u4e00\u5e27\uff0c\u5219\u53ef\u4ee5\u7528 floor \u64cd\u4f5c\uff0c\u53cd\u4e4b\uff0c\u4f7f\u7528 ceiling\u3002 1 2 tf . ceiling ( datetime . date ( 2005 , 1 , 4 ), FrameType . WEEK ) # OUTPUT IS DATETIME.DATE(2005, 1, 7)","title":"2.2.2. \u8fb9\u754c\u64cd\u4f5c ceiling \u548c floor"},{"location":"usage/#223-\u65f6\u95f4\u8f6c\u6362","text":"\u4e3a\u4e86\u52a0\u5feb\u901f\u5ea6\uff0c\u4ee5\u53ca\u65b9\u4fbf\u6301\u4e45\u5316\u5b58\u50a8\uff0c\u5728 timeframe \u5185\u90e8\uff0c\u6709\u65f6\u5019\u4f7f\u7528\u6574\u6570\u6765\u8868\u793a\u65f6\u95f4\u3002\u6bd4\u5982 20220502 \u8868\u793a\u7684\u662f 2022 \u5e74 5 \u6708 20 \u65e5\uff0c\u800c 202205220931 \u5219\u8868\u793a 2022 \u5e74 5 \u6708 20 \u65e5 9 \u65f6 31 \u5206\u949f\u3002 \u8fd9\u79cd\u8868\u793a\u6cd5\uff0c\u6709\u65f6\u5019\u8981\u6c42\u6211\u4eec\u8fdb\u884c\u4e00\u4e9b\u8f6c\u6362\uff1a 1 2 3 4 5 6 7 8 9 10 # \u5c06\u6574\u6570\u8868\u793a\u7684\u65e5\u671f\u8f6c\u6362\u4e3a\u65e5\u671f tf . int2date ( 20220522 ) # datetime.date(2022, 5, 22) # \u5c06\u6574\u6570\u8868\u793a\u7684\u65f6\u95f4\u8f6c\u6362\u4e3a\u65f6\u95f4 tf . int2time ( 202205220931 ) # datetime.datetime(2022, 5, 22, 9, 31) # \u5c06\u65e5\u671f\u8f6c\u6362\u6210\u4e3a\u6574\u6570 tf . date2int ( datetime . date ( 2022 , 5 , 22 )) # 20220520 # \u5c06\u65f6\u95f4\u8f6c\u6362\u6210\u4e3a\u65f6\u95f4 tf . date2time ( datetime . datetime ( 2022 , 5 , 22 , 9 , 21 )) # 202205220921","title":"2.2.3. \u65f6\u95f4\u8f6c\u6362"},{"location":"usage/#224-\u5217\u51fa\u533a\u95f4\u5185\u7684\u6240\u6709\u65f6\u95f4\u5e27","text":"\u6709\u65f6\u5019\u6211\u4eec\u9700\u8981\u5f97\u5230 start \u548c end \u4e4b\u95f4\u67d0\u4e2a\u65f6\u95f4\u5e27\u7c7b\u578b\u7684\u6240\u6709\u65f6\u95f4\u5e27\uff1a 1 2 3 4 start = arrow . get ( '2020-1-13 10:00' ) . naive end = arrow . get ( '2020-1-13 13:30' ) . naive tf . get_frames ( start , end , FrameType . MIN30 ) [ 202001131000 , 202001131030 , 202001131100 , 202001131130 , 202001131330 ] Important \u4e0a\u9762\u7684\u793a\u4f8b\u4e2d\uff0c\u51fa\u73b0\u4e86\u53ef\u80fd\u60a8\u4e0d\u592a\u719f\u6089\u7684 naive \u5c5e\u6027\u3002\u5b83\u6307\u7684\u662f\u53d6\u4e0d\u5e26\u65f6\u533a\u7684\u65f6\u95f4\u3002\u5728 python \u4e2d\uff0c\u65f6\u95f4\u53ef\u4ee5\u5e26\u65f6\u533a\uff08timezone-aware) \u548c\u4e0d\u5e26\u65f6\u533a (naive)\u3002 \u5982\u679c\u60a8\u4f7f\u7528 datetime.datetime(2022, 5, 20)\uff0c\u5b83\u5c31\u662f\u4e0d\u5e26\u65f6\u533a\u7684\uff0c\u9664\u975e\u60a8\u4e13\u95e8\u6307\u5b9a\u65f6\u533a\u3002 \u5728 omicron \u4e2d\uff0c\u6211\u4eec\u5728\u7edd\u5927\u591a\u6570\u60c5\u51b5\u4e0b\uff0c\u4ec5\u4f7f\u7528 naive \u8868\u793a\u7684\u65f6\u95f4\uff0c\u5373\u4e0d\u5e26\u65f6\u533a\uff0c\u5e76\u4e14\u5047\u5b9a\u65f6\u533a\u4e3a\u4e1c\u516b\u533a\uff08\u5373\u5317\u4eac\u65f6\u95f4\uff09\u3002 \u5982\u679c\u60a8\u53ea\u77e5\u9053\u7ed3\u675f\u65f6\u95f4\uff0c\u9700\u8981\u5411\u524d\u53d6 n \u4e2a\u65f6\u95f4\u5e27\uff0c\u5219\u53ef\u4ee5\u4f7f\u7528 get_frames_by_count \u3002 \u5982\u679c\u60a8\u53ea\u662f\u9700\u8981\u77e5\u9053\u5728 start \u548c end \u4e4b\u95f4\uff0c\u603b\u5171\u6709\u591a\u5c11\u4e2a\u5e27\uff0c\u8bf7\u4f7f\u7528 count_frames : 1 2 3 start = datetime . date ( 2019 , 12 , 21 ) end = datetime . date ( 2019 , 12 , 21 ) tf . count_frames ( start , end , FrameType . DAY ) \u8f93\u51fa\u5c06\u662f 1\u3002\u4e0a\u8ff0\u65b9\u6cd5\u8fd8\u6709\u4e00\u4e2a\u5feb\u6377\u65b9\u6cd5\uff0c\u5373 count_day_frames \uff0c\u5e76\u4e14\uff0c\u5bf9 week, month, quaters \u4e5f\u662f\u4e00\u6837\u3002","title":"2.2.4. \u5217\u51fa\u533a\u95f4\u5185\u7684\u6240\u6709\u65f6\u95f4\u5e27"},{"location":"usage/#23-\u8bfb\u53d6\u884c\u60c5\u6570\u636e","text":"\u73b0\u5728\uff0c\u8ba9\u6211\u4eec\u6765\u83b7\u53d6\u4e00\u6bb5\u884c\u60c5\u6570\u636e\uff1a 1 2 3 4 code = \"000001.XSHE\" end = datetime . date ( 2022 , 5 , 20 ) bars = await Stock . get_bars ( code , 10 , FrameType . DAY , end ) \u8fd4\u56de\u7684 bars \u5c06\u662f\u4e00\u4e2a numpy structured array, \u5176\u7c7b\u578b\u4e3a bars_dtype \u3002\u4e00\u822c\u5730\uff0c\u5b83\u5305\u62ec\u4e86\u4ee5\u4e0b\u5b57\u6bb5\uff1a 1 2 3 4 5 6 7 8 * frame\uff08\u5e27\uff09 * open\uff08\u5f00\u76d8\u4ef7\uff09 * high\uff08\u6700\u9ad8\u4ef7\uff09 * low\uff08\u6700\u4f4e\u4ef7\uff09 * close\uff08\u6536\u76d8\u4ef7\uff09 * volume\uff08\u6210\u4ea4\u91cf\uff0c\u80a1\u6570\uff09 * amount\uff08\u6210\u4ea4\u989d\uff09 * factor\uff08\u590d\u6743\u56e0\u5b50\uff09 \u7f3a\u7701\u60c5\u51b5\u4e0b\uff0c\u8fd4\u56de\u7684\u6570\u636e\u662f\u5230 end \u4e3a\u6b62\u7684\u524d\u590d\u6743\u6570\u636e\u3002\u4f60\u53ef\u4ee5\u901a\u53c2\u6570 fq = False \u5173\u95ed\u5b83\uff0c\u6765\u83b7\u5f97\u4e0d\u590d\u6743\u6570\u636e\uff0c\u5e76\u4ee5\u6b64\u81ea\u884c\u8ba1\u7b97\u540e\u590d\u6743\u6570\u636e\u3002 \u5982\u679c\u8981\u83b7\u53d6\u67d0\u4e2a\u65f6\u95f4\u6bb5\u7684\u6570\u636e\uff0c\u53ef\u4ee5\u4f7f\u7528 get_bars_in_range \u3002 \u4e0a\u8ff0\u65b9\u6cd5\u603b\u662f\u5c3d\u6700\u5927\u53ef\u80fd\u8fd4\u56de\u5b9e\u65f6\u6570\u636e\uff0c\u5982\u679c end \u4e3a\u5f53\u524d\u65f6\u95f4\u7684\u8bdd\uff0c\u4f46\u7531\u4e8e omega \u540c\u6b65\u5ef6\u65f6\u662f\u4e00\u5206\u949f\uff0c\u6240\u4ee5\u884c\u60c5\u6570\u636e\u6700\u591a\u53ef\u80fd\u6162\u4e00\u5206\u949f\u3002\u5982\u679c\u8981\u83b7\u53d6\u66f4\u5b9e\u65f6\u7684\u6570\u636e\uff0c\u53ef\u4ee5\u901a\u8fc7 get_latest_price \u65b9\u6cd5\u3002 \u8981\u83b7\u6da8\u8dcc\u505c\u4ef7\u683c\u548c\u6807\u5fd7\uff0c\u8bf7\u4f7f\u7528: get_trade_price_limits trade_price_limits_flags trade_price_limit_flags_ex","title":"2.3. \u8bfb\u53d6\u884c\u60c5\u6570\u636e"},{"location":"usage/#24-\u677f\u5757\u6570\u636e","text":"\u63d0\u4f9b\u540c\u82b1\u987a\u677f\u5757\u884c\u4e1a\u677f\u5757\u548c\u6982\u5ff5\u677f\u5757\u6570\u636e\u3002\u5728\u4f7f\u7528\u672c\u6a21\u5757\u4e4b\u524d\uff0c\u9700\u8981\u8fdb\u884c\u521d\u59cb\u5316\uff1a 1 2 3 # \u8bf7\u5148\u8fdb\u884comicron\u521d\u59cb\u5316\uff0c\u7565 from omicron.models.board import Board , BoardType Board . init ( '192.168.100.101' ) \u6b64\u5904\u7684IP\u4e3a\u5b89\u88c5omega\u670d\u52a1\u5668\u7684ip\u3002 \u901a\u8fc7 board_list \u6765\u67e5\u8be2\u6240\u6709\u7684\u677f\u5757\u3002 \u5176\u5b83\u65b9\u6cd5\u8bf7\u53c2\u770b API\u6587\u6863","title":"2.4. \u677f\u5757\u6570\u636e"},{"location":"usage/#3-\u7b56\u7565\u7f16\u5199","text":"omicron \u901a\u8fc7 strategy \u6765\u63d0\u4f9b\u7b56\u7565\u6846\u67b6\u3002\u901a\u8fc7\u8be5\u6846\u67b6\u7f16\u5199\u7684\u7b56\u7565\uff0c\u53ef\u4ee5\u5728\u5b9e\u76d8\u548c\u56de\u6d4b\u4e4b\u95f4\u65e0\u7f1d\u8f6c\u6362 -- \u6839\u636e\u521d\u59cb\u5316\u65f6\u4f20\u5165\u7684\u670d\u52a1\u5668\u4e0d\u540c\u800c\u81ea\u52a8\u5207\u6362\u3002 omicron \u63d0\u4f9b\u4e86\u4e00\u4e2a\u7b80\u5355\u7684 \u53cc\u5747\u7ebf\u7b56\u7565 \u4f5c\u4e3a\u7b56\u7565\u7f16\u5199\u7684\u793a\u8303\uff0c\u53ef\u7ed3\u5408\u5176\u6e90\u7801\uff0c\u4ee5\u53ca\u672c\u6587\u6863\u4e2d\u7684 \u5b8c\u6574\u7b56\u7565\u793a\u4f8b \u5728notebook\u4e2d\u8fd0\u884c\u67e5\u770b\u3002 \u7b56\u7565\u6846\u67b6\u63d0\u4f9b\u4e86\u56de\u6d4b\u9a71\u52a8\u903b\u8f91\u53ca\u4e00\u4e9b\u57fa\u672c\u51fd\u6570\u3002\u8981\u7f16\u5199\u81ea\u5df1\u7684\u7b56\u7565\uff0c\u60a8\u9700\u8981\u4ece\u57fa\u7c7b BaseStrategy \u6d3e\u751f\u51fa\u81ea\u5df1\u7684\u5b50\u7c7b\uff0c\u5e76\u6539\u5199\u5b83\u7684 predict \u65b9\u6cd5\u6765\u5b9e\u73b0\u8c03\u4ed3\u6362\u80a1\u3002 \u7b56\u7565\u6846\u67b6\u4f9d\u8d56\u4e8e zillionare-trader-client \uff0c\u5728\u56de\u6d4b\u65f6\uff0c\u9700\u8981\u6709 zillionare-backtesting \u63d0\u4f9b\u56de\u6d4b\u670d\u52a1\u3002\u5728\u5b9e\u76d8\u65f6\uff0c\u9700\u8981 zilllionare-gm-adaptor \u6216\u8005\u5176\u5b83\u5b9e\u76d8\u4ea4\u6613\u7f51\u5173\u63d0\u4f9b\u670d\u52a1\u3002 \u7b56\u7565\u4ee3\u7801\u53ef\u4ee5\u4e0d\u52a0\u4fee\u6539\uff0c\u5373\u53ef\u4f7f\u7528\u4e8e\u56de\u6d4b\u548c\u5b9e\u76d8\u4e24\u79cd\u573a\u666f\u3002","title":"3. \u7b56\u7565\u7f16\u5199"},{"location":"usage/#31-\u56de\u6d4b\u573a\u666f","text":"\u5b9e\u73b0\u7b56\u7565\u56de\u6d4b\uff0c\u4e00\u822c\u9700\u8981\u8fdb\u884c\u4ee5\u4e0b\u6b65\u9aa4\uff1a 1. \u4ece\u6b64\u57fa\u7c7b\u6d3e\u751f\u51fa\u4e00\u4e2a\u7b56\u7565\u5b50\u7c7b\uff0c\u6bd4\u5982sma.py 2. \u5b50\u7c7b\u9700\u8981\u91cd\u8f7d predict \u65b9\u6cd5\uff0c\u6839\u636e\u5f53\u524d\u4f20\u5165\u7684\u65f6\u95f4\u5e27\u548c\u5e27\u7c7b\u578b\u53c2\u6570\uff0c\u83b7\u53d6\u6570\u636e\u5e76\u8fdb\u884c\u5904\u7406\uff0c\u8bc4\u4f30\u51fa\u4ea4\u6613\u4fe1\u53f7\u3002 3. \u5b50\u7c7b\u6839\u636e\u4ea4\u6613\u4fe1\u53f7\uff0c\u5728 predict \u65b9\u6cd5\u91cc\uff0c\u8c03\u7528\u57fa\u7c7b\u7684 buy \u548c sell \u65b9\u6cd5\u6765\u8fdb\u884c\u4ea4\u6613 4. \u751f\u6210\u7b56\u7565\u5b9e\u4f8b\uff0c\u901a\u8fc7\u5b9e\u4f8b\u8c03\u7528 backtest \u65b9\u6cd5\u6765\u8fdb\u884c\u56de\u6d4b\uff0c\u8be5\u65b9\u6cd5\u5c06\u6839\u636e\u7b56\u7565\u6784\u5efa\u65f6\u6307\u5b9a\u7684\u56de\u6d4b\u8d77\u59cb\u65f6\u95f4\u3001\u7ec8\u6b62\u65f6\u95f4\u3001\u5e27\u7c7b\u578b\uff0c\u9010\u5e27\u751f\u6210\u5404\u4e2a\u65f6\u95f4\u5e27\uff0c\u5e76\u8c03\u7528\u5b50\u7c7b\u7684 predict \u65b9\u6cd5\u3002\u5982\u679c\u8c03\u7528\u65f6\u6307\u5b9a\u4e86 prefetch_stocks \u53c2\u6570\uff0c backtest \u8fd8\u5c06\u8fdb\u884c\u6570\u636e\u9884\u53d6\uff08\u9884\u53d6\u7684\u6570\u636e\u957f\u5ea6\u7531 warmup_peroid \u51b3\u5b9a\uff09\uff0c\u5e76\u5c06\u622a\u6b62\u5230\u5f53\u524d\u56de\u6d4b\u5e27\u65f6\u7684\u6570\u636e\u4f20\u5165\u3002 5. \u5728\u4ea4\u6613\u7ed3\u675f\u65f6\uff0c\u8c03\u7528 plot_metrics \u65b9\u6cd5\u6765\u83b7\u53d6\u5982\u4e0b\u6240\u793a\u7684\u56de\u6d4b\u6307\u6807\u56fe \u5982\u4f55\u6d3e\u751f\u5b50\u7c7b\uff0c\u53ef\u4ee5\u53c2\u8003 sma \u6e90\u4ee3\u7801\u3002 1 2 3 4 5 6 7 8 9 10 11 from omicron.strategy.sma import SMAStrategy sma = SMAStrategy ( url = \"\" , # the url of either backtest server, or trade server is_backtest = True , start = datetime . date ( 2023 , 2 , 3 ), end = datetime . date ( 2023 , 4 , 28 ), frame_type = FrameType . DAY , warmup_period = 20 ) await sma . backtest ( prefetch_stocks = [ \"600000.XSHG\" ]) \u5728\u56de\u6d4b\u65f6\uff0c\u5fc5\u987b\u8981\u6307\u5b9a is_backtest=True \u548c start , end \u53c2\u6570\u3002","title":"3.1. \u56de\u6d4b\u573a\u666f"},{"location":"usage/#32-\u56de\u6d4b\u62a5\u544a","text":"\u5728\u56de\u6d4b\u7ed3\u675f\u540e\uff0c\u53ef\u4ee5\u901a\u8fc7\u4ee5\u4e0b\u65b9\u6cd5\uff0c\u5728notebook\u4e2d\u7ed8\u5236\u56de\u6d4b\u62a5\u544a\uff1a 1 await sma . plot_metrics () \u8fd9\u5c06\u7ed8\u5236\u51fa\u7c7b\u4f3c\u4ee5\u4e0b\u56fe\uff1a","title":"3.2. \u56de\u6d4b\u62a5\u544a"},{"location":"usage/#321-\u5728\u56de\u6d4b\u62a5\u544a\u4e2d\u6dfb\u52a0\u6280\u672f\u6307\u6807","text":"Info Since 2.0.0.a76 \u9996\u5148\uff0c\u6211\u4eec\u53ef\u4ee5\u5728\u7b56\u7565\u7c7b\u7684predict\u65b9\u6cd5\u4e2d\u8ba1\u7b97\u51fa\u6280\u672f\u6307\u6807\uff0c\u5e76\u4fdd\u5b58\u5230\u6210\u5458\u53d8\u91cf\u4e2d\u3002\u5728\u4e0b\u9762\u7684\u793a\u4f8b\u4ee3\u7801\u4e2d\uff0c\u6211\u4eec\u5c06\u6280\u672f\u6307\u6807\u53ca\u5f53\u65f6\u7684\u65f6\u95f4\u4fdd\u5b58\u5230\u4e86\u4e00\u4e2aindicators\u6570\u7ec4\u4e2d\uff08\u6ce8\u610f\u987a\u5e8f\uff01\uff09\uff0c\u7136\u540e\u5728\u56de\u6d4b\u7ed3\u675f\u540e\uff0c\u5728\u8c03\u7528 plot_metrics\u65f6\uff0c\u5c06\u5176\u4f20\u5165\u5373\u53ef\u3002 1 2 3 4 5 6 7 indicators = [ (datetime.date(2021, 2, 3), 20.1), (datetime.date(2021, 2, 4), 20.2), ..., (datetime.date(2021, 4, 1), 20.3) ] await sma.plot_metrics(indicator) \u65f6\u95f4\u53ea\u80fd\u4f7f\u7528\u4e3b\u5468\u671f\u7684\u65f6\u95f4\uff0c\u5426\u5219\u53ef\u80fd\u4ea7\u751f\u65e0\u6cd5\u4e0e\u5750\u6807\u8f74\u5bf9\u9f50\u7684\u60c5\u51b5\u3002 \u52a0\u5165\u7684\u6307\u6807\u9ed8\u8ba4\u53ea\u663e\u793a\u5728legend\u4e2d\uff0c\u5982\u679c\u8981\u663e\u793a\u5728\u4e3b\u56fe\u4e0a\uff0c\u9700\u8981\u70b9\u51fblegend\u8fdb\u884c\u663e\u793a\u3002 \u6307\u6807\u9664\u53ef\u4ee5\u53e0\u52a0\u5728\u4e3b\u56fe\u4e0a\u4e4b\u5916\uff0c\u8fd8\u4f1a\u51fa\u73b0\u5728\u57fa\u51c6\u7ebf\u7684hoverinfo\u4e2d\uff08\u5373\u4f7f\u6307\u6807\u7684\u8ba1\u7b97\u4e0e\u57fa\u51c6\u7ebf\u65e0\u5173\uff09\uff0c\u53c2\u89c1\u4e0a\u56fe\u4e2d\u7684\u201c\u6307\u6807\u201d\u884c\u3002","title":"3.2.1. \u5728\u56de\u6d4b\u62a5\u544a\u4e2d\u6dfb\u52a0\u6280\u672f\u6307\u6807"},{"location":"usage/#33-\u4f7f\u7528\u6570\u636e\u9884\u53d6","text":"Info since version 2.0.0-alpha76 \u5728\u56de\u6d4b\u4e2d\uff0c\u53ef\u4ee5\u4f7f\u7528\u4e3b\u5468\u671f\u7684\u6570\u636e\u9884\u53d6\uff0c\u4ee5\u52a0\u5feb\u56de\u6d4b\u901f\u5ea6\u3002\u5de5\u4f5c\u539f\u7406\u5982\u4e0b\uff1a \u5982\u679c\u7b56\u7565\u6307\u5b9a\u4e86 warmup_period \uff0c\u5e76\u5728\u8c03\u7528 backtest \u65f6\u4f20\u5165\u4e86 prefetch_stocks \u53c2\u6570\uff0c\u5219 backtest \u5c06\u4f1a\u5728\u56de\u6d4b\u4e4b\u524d\uff0c\u9884\u53d6\u4ece[start - warmup_period * frame_type, end]\u95f4\u7684portfolio\u884c\u60c5\u6570\u636e\uff0c\u5e76\u5728\u6bcf\u6b21\u8c03\u7528 predict \u65b9\u6cd5\u65f6\uff0c\u901a\u8fc7 barss \u53c2\u6570\uff0c\u5c06[start - warmup_period * frame_type, start + i * frame_type]\u95f4\u7684\u6570\u636e\u4f20\u7ed9 predict \u65b9\u6cd5\u3002\u4f20\u5165\u7684\u6570\u636e\u5df2\u8fdb\u884c\u524d\u590d\u6743\u3002 \u5982\u679c\u5728\u56de\u6d4b\u8fc7\u7a0b\u4e2d\uff0c\u9700\u8981\u5077\u770b\u672a\u6765\u6570\u636e\uff0c\u53ef\u4ee5\u4f7f\u7528peek\u65b9\u6cd5\u3002","title":"3.3. \u4f7f\u7528\u6570\u636e\u9884\u53d6"},{"location":"usage/#34-\u5b8c\u6574sma\u56de\u6d4b\u793a\u4f8b","text":"\u4ee5\u4e0b\u7b56\u7565\u9700\u8981\u5728notebook\u4e2d\u8fd0\u884c\uff0c\u5e76\u4e14\u9700\u8981\u4e8b\u5148\u5b89\u88c5omega\u670d\u52a1\u5668\u540c\u6b65\u6570\u636e\uff0c\u5e76\u6b63\u786e\u914d\u7f6eomicron\u3002 \u8be5\u793a\u4f8b\u5728\u300a\u5927\u5bcc\u7fc1\u91cf\u5316\u8bfe\u7a0b\u300b\u8bfe\u4ef6\u73af\u5883\u4e0b\u53ef\u8fd0\u884c\u3002 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 import cfg4py import omicron import datetime from omicron.strategy.sma import SMAStrategy from coretypes import FrameType cfg = cfg4py . init ( \"/etc/zillionare\" ) await omicron . init () sec = \"600000.XSHG\" start = datetime . date ( 2022 , 1 , 4 ) end = datetime . date ( 2023 , 1 , 1 ) sma = SMAStrategy ( sec , url = cfg . backtest . url , is_backtest = True , start = start , end = end , frame_type = FrameType . DAY , warmup_period = 10 ) await sma . backtest ( portfolio = [ sec ], stop_on_error = False ) await sma . plot_metrics ( sma . indicators )","title":"3.4. \u5b8c\u6574SMA\u56de\u6d4b\u793a\u4f8b"},{"location":"usage/#35-\u5b9e\u76d8","text":"\u5728\u5b9e\u76d8\u73af\u5883\u4e0b\uff0c\u4f60\u8fd8\u9700\u8981\u5728\u5b50\u7c7b\u4e2d\u52a0\u5165\u5468\u671f\u6027\u4efb\u52a1(\u6bd4\u5982\u6bcf\u5206\u949f\u6267\u884c\u4e00\u6b21\uff09\uff0c\u5728\u8be5\u4efb\u52a1\u4e2d\u8c03\u7528 predict \u65b9\u6cd5\u6765\u5b8c\u6210\u4ea4\u6613\uff0c\u5982\u4ee5\u4e0b\u793a\u4f8b\u6240\u793a\uff1a 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 import cfg4py import omicron import datetime from omicron.strategy.sma import SMAStrategy from coretypes import FrameType from apscheduler.schedulers.asyncio import AsyncIOScheduler cfg = cfg4py . init ( \"/etc/zillionare\" ) await omicron . init () async def daily_job (): sma = SMAStrategy ( sec , url = cfg . traderserver . url , is_backtest = False , frame_type = FrameType . DAY ) bars = await Stock . get_bars ( sma . _sec , 20 , FrameType . DAY ) await sma . predict ( barss = { sma . _sec : bars }) async def main (): scheduler = AsyncIOScheduler () scheduler . add_job ( daily_job , 'cron' , hour = 14 , minute = 55 ) scheduler . start () \u7b56\u7565\u4ee3\u7801\u65e0\u987b\u4fee\u6539\u3002 \u8be5\u7b56\u7565\u5c06\u81ea\u52a8\u5728\u6bcf\u5929\u768414\uff1a55\u8fd0\u884c\uff0c\u4ee5\u5224\u65ad\u662f\u5426\u8981\u8fdb\u884c\u8c03\u4ed3\u6362\u80a1\u3002\u60a8\u9700\u8981\u989d\u5916\u5224\u65ad\u5f53\u5929\u662f\u5426\u4e3a\u4ea4\u6613\u65e5\u3002","title":"3.5. \u5b9e\u76d8"},{"location":"usage/#4-\u7ed8\u56fe","text":"omicron \u901a\u8fc7 Candlestick \u63d0\u4f9b\u4e86 k \u7ebf\u7ed8\u5236\u529f\u80fd\u3002\u9ed8\u8ba4\u5730\uff0c\u5b83\u5c06\u7ed8\u5236\u4e00\u5e45\u663e\u793a 120 \u4e2a bar\uff0c\u53ef\u62d6\u52a8\uff08\u4ee5\u52a0\u8f7d\u66f4\u591a bar)\uff0c\u5e76\u4e14\u53ef\u4ee5\u53e0\u52a0\u526f\u56fe\u3001\u4e3b\u56fe\u53e0\u52a0\u5404\u79cd\u6307\u6807\u7684 k \u7ebf\u56fe\uff1a \u4e0a\u56fe\u663e\u793a\u4e86\u81ea\u52a8\u68c0\u6d4b\u51fa\u6765\u7684\u5e73\u53f0\u3002\u6b64\u5916\uff0c\u8fd8\u53ef\u4ee5\u8fdb\u884c\u9876\u5e95\u81ea\u52a8\u68c0\u6d4b\u548c\u6807\u6ce8\u3002 Note \u901a\u8fc7\u6307\u5b9a width \u53c2\u6570\uff0c\u53ef\u4ee5\u5f71\u54cd\u521d\u59cb\u52a0\u8f7d\u7684bar\u7684\u6570\u91cf\u3002 omicron \u901a\u8fc7 metris \u63d0\u4f9b\u56de\u6d4b\u62a5\u544a\u3002\u8be5\u62a5\u544a\u7c7b\u4f3c\u4e8e\uff1a \u5b83\u540c\u6837\u63d0\u4f9b\u53ef\u62d6\u52a8\u7684\u7ed8\u56fe\uff0c\u5e76\u4e14\u5728\u4e70\u5356\u70b9\u4e0a\u53ef\u4ee5\u901a\u8fc7\u9f20\u6807\u60ac\u505c\uff0c\u663e\u793a\u4e70\u5356\u70b9\u4fe1\u606f\u3002 omicron \u7684\u7ed8\u56fe\u529f\u80fd\u53ea\u80fd\u5728 notebook \u4e2d\u4f7f\u7528\u3002","title":"4. \u7ed8\u56fe"},{"location":"usage/#5-\u8bc4\u4f30\u6307\u6807","text":"omicron \u63d0\u4f9b\u4e86 mean_absolute_error \u51fd\u6570\u548c pct_error \u51fd\u6570\u3002\u5b83\u4eec\u5728 scipy \u6216\u8005\u5176\u5b83\u5e93\u4e2d\u4e5f\u80fd\u627e\u5230\uff0c\u4e3a\u4e86\u65b9\u4fbf\u4e0d\u719f\u6089\u8fd9\u4e9b\u7b2c\u4e09\u65b9\u5e93\u7684\u4f7f\u7528\u8005\uff0c\u6211\u4eec\u5185\u7f6e\u4e86\u8fd9\u4e2a\u5e38\u6307\u6807\u3002 \u5bf9\u4e00\u4e9b\u5e38\u89c1\u7684\u7b56\u7565\u8bc4\u4f30\u51fd\u6570\uff0c\u6211\u4eec\u5f15\u7528\u4e86 empyrical \u4e2d\u7684\u76f8\u5173\u51fd\u6570\uff0c\u6bd4\u5982 alpha, beta, shapre_ratio\uff0c calmar_ratio \u7b49\u3002","title":"5. \u8bc4\u4f30\u6307\u6807"},{"location":"usage/#6-talib-\u5e93","text":"\u60a8\u5e94\u8be5\u628a\u8fd9\u91cc\u63d0\u4f9b\u7684\u51fd\u6570\u5f53\u6210\u5b9e\u9a8c\u6027\u7684\u3002\u8fd9\u4e9b API \u4e5f\u53ef\u80fd\u5728\u67d0\u5929\u88ab\u5e9f\u5f03\u3001\u91cd\u547d\u540d\u3001\u4fee\u6539\uff0c\u6216\u8005\u8fd9\u4e9b API \u5e76\u6ca1\u6709\u591a\u5927\u4f5c\u7528\uff0c\u6216\u8005\u5b83\u4eec\u7684\u5b9e\u73b0\u5b58\u5728\u9519\u8bef\u3002 \u4f46\u662f\uff0c\u5982\u679c\u6211\u4eec\u5c06\u6765\u4f1a\u629b\u5f03\u8fd9\u4e9b API \u7684\u8bdd\uff0c\u6211\u4eec\u4e00\u5b9a\u4f1a\u901a\u8fc7 depracted \u65b9\u6cd5\u63d0\u524d\u8fdb\u884c\u8b66\u544a\u3002","title":"6. TALIB \u5e93"},{"location":"usage/#7-\u6269\u5c55","text":"Python\u5f53\u4e2d\u7684\u56db\u820d\u4e94\u5165\u7528\u4e8e\u8bc1\u5238\u6295\u8d44\uff0c\u4f1a\u5e26\u6765\u4e25\u91cd\u7684\u95ee\u9898\uff0c\u6bd4\u5982\uff0c\u50cf round(0.3/2) \uff0c\u6211\u4eec\u671f\u671b\u5f97\u5230 0.2 \uff0c\u4f46\u5b9e\u9645\u4e0a\u4f1a\u5f97\u5230 0.1 \u3002\u8fd9\u79cd\u8bef\u5dee\u4e00\u65e6\u53d1\u751f\u6210\u5728\u4e00\u4e9b\u4f4e\u4ef7\u80a1\u8eab\u4e0a\uff0c\u5c06\u4f1a\u5e26\u6765\u975e\u5e38\u5927\u7684\u4e0d\u786e\u5b9a\u6027\u3002\u6bd4\u5982\uff0c1.945\u4fdd\u7559\u4e24\u4f4d\u5c0f\u6570\uff0c\u672c\u6765\u5e94\u8be5\u662f1.95\uff0c\u5982\u679c\u88ab\u8bef\u820d\u5165\u4e3a1.94\uff0c\u5219\u8bef\u5dee\u63a5\u8fd10.5%\uff0c\u8fd9\u5bf9\u6295\u8d44\u6765\u8bf4\u662f\u96be\u4ee5\u63a5\u53d7\u7684\u3002 Info \u5982\u679c\u4e00\u5929\u53ea\u8fdb\u884c\u4e00\u6b21\u4ea4\u6613\uff0c\u4e00\u6b21\u4ea4\u6613\u8bef\u5dee\u4e3a0.5%\uff0c\u4e00\u5e74\u7d2f\u79ef\u4e0b\u6765\uff0c\u8bef\u5dee\u5c06\u8fbe\u52302.5\u500d\u3002 \u6211\u4eec\u5728 decimals \u4e2d\u63d0\u4f9b\u4e86\u9002\u7528\u4e8e\u8bc1\u5238\u4ea4\u6613\u9886\u57df\u7684\u7248\u672c\uff0c math_round \u548c\u4ef7\u683c\u6bd4\u8f83\u51fd\u6570 price_equal \u3002 \u6211\u4eec\u8fd8\u5728 np \u4e2d\uff0c\u5bf9numpy\u4e2d\u7f3a\u5931\u7684\u4e00\u4e9b\u529f\u80fd\u8fdb\u884c\u4e86\u8865\u5145\uff0c\u6bd4\u5982 numpy_append_fields , fill_nan \u7b49\u3002","title":"7. \u6269\u5c55"},{"location":"api/board/","text":"Board \u00b6 Source code in omicron/models/board.py class Board : server_ip : str server_port : int measurement = \"board_bars_1d\" @classmethod def init ( cls , ip : str , port : int = 3180 ): cls . server_ip = ip cls . server_port = port @classmethod async def _rpc_call ( cls , url : str , param : str ): _url = f \"http:// { cls . server_ip } : { cls . server_port } /api/board/ { url } \" async with httpx . AsyncClient () as client : r = await client . post ( _url , json = param , timeout = 10 ) if r . status_code != 200 : logger . error ( f \"failed to post RPC call, { _url } : { param } , response: { r . content . decode () } \" ) return { \"rc\" : r . status_code } rsp = json . loads ( r . content ) return { \"rc\" : 200 , \"data\" : rsp } @classmethod async def board_list ( cls , _btype : BoardType = BoardType . CONCEPT ) -> List [ List ]: \"\"\"\u83b7\u53d6\u677f\u5757\u5217\u8868 Args: _btype: \u677f\u5757\u7c7b\u522b\uff0c\u53ef\u9009\u503c`BoardType.CONCEPT`\u548c`BoardType.INDUSTRY`. Returns: \u677f\u5757\u5217\u8868\u3002\u6bcf\u4e00\u4e2a\u5b50\u5143\u7d20\u4ecd\u4e3a\u4e00\u4e2a\u5217\u8868\uff0c\u7531\u677f\u5757\u4ee3\u7801(str), \u677f\u5757\u540d\u79f0(str)\u548c\u6210\u5458\u6570\u7ec4\u6210\u3002\u793a\u4f8b\uff1a ``` [ ['881101', '\u79cd\u690d\u4e1a\u4e0e\u6797\u4e1a', 24], ['881102', '\u517b\u6b96\u4e1a', 27], ['881103', '\u519c\u4ea7\u54c1\u52a0\u5de5', 41], ['881104', '\u519c\u4e1a\u670d\u52a1', 16], ] ``` \"\"\" rsp = await cls . _rpc_call ( \"board_list\" , { \"board_type\" : _btype . value }) if rsp [ \"rc\" ] != 200 : return { \"status\" : 500 , \"msg\" : \"httpx RPC call failed\" } return rsp [ \"data\" ] @classmethod async def fuzzy_match_board_name ( cls , pattern : str , _btype : BoardType = BoardType . CONCEPT ) -> dict : \"\"\"\u6a21\u7cca\u67e5\u8be2\u677f\u5757\u4ee3\u7801\u7684\u540d\u5b57 Examples: ```python await Board.fuzzy_match_board_name(\"\u6c7d\u8f66\", BoardType.INDUSTRY) # returns: [ '881125 \u6c7d\u8f66\u6574\u8f66', '881126 \u6c7d\u8f66\u96f6\u90e8\u4ef6', '881127 \u975e\u6c7d\u8f66\u4ea4\u8fd0', '881128 \u6c7d\u8f66\u670d\u52a1', '884107 \u6c7d\u8f66\u670d\u52a1\u2162', '884194 \u6c7d\u8f66\u96f6\u90e8\u4ef6\u2162' ] ``` Args: pattern: \u5f85\u67e5\u8be2\u6a21\u5f0f\u4e32 _btype: \u67e5\u8be2\u7c7b\u578b Returns: \u5305\u542b\u4ee5\u4e0bkey\u7684dict: code(\u677f\u5757\u4ee3\u7801), name\uff08\u677f\u5757\u540d\uff09, stocks(\u80a1\u7968\u6570) \"\"\" if not pattern : return [] rsp = await cls . _rpc_call ( \"fuzzy_match_name\" , { \"board_type\" : _btype . value , \"pattern\" : pattern } ) if rsp [ \"rc\" ] != 200 : return { \"status\" : 500 , \"msg\" : \"httpx RPC call failed\" } return rsp [ \"data\" ] @classmethod async def board_info_by_id ( cls , board_id : str , full_mode : bool = False ) -> dict : \"\"\"\u901a\u8fc7\u677f\u5757\u4ee3\u7801\u67e5\u8be2\u677f\u5757\u4fe1\u606f\uff08\u540d\u5b57\uff0c\u6210\u5458\u6570\u76ee\u6216\u6e05\u5355\uff09 Examples: ```python board_code = '881128' # \u6c7d\u8f66\u670d\u52a1 \u53ef\u81ea\u884c\u4fee\u6539 board_info = await Board.board_info_by_id(board_code) print(board_info) # \u5b57\u5178\u5f62\u5f0f # returns {'code': '881128', 'name': '\u6c7d\u8f66\u670d\u52a1', 'stocks': 14} ``` Returns: {'code': '301505', 'name': '\u533b\u7597\u5668\u68b0\u6982\u5ff5', 'stocks': 242} or {'code': '301505', 'name': '\u533b\u7597\u5668\u68b0\u6982\u5ff5', 'stocks': [['300916', '\u6717\u7279\u667a\u80fd'], ['300760', '\u8fc8\u745e\u533b\u7597']]} \"\"\" if not board_id : return {} if board_id [ 0 ] == \"3\" : _btype = BoardType . CONCEPT else : _btype = BoardType . INDUSTRY _mode = 0 if full_mode : # \u8f6c\u6362bool\u7c7b\u578b _mode = 1 rsp = await cls . _rpc_call ( \"info\" , { \"board_type\" : _btype . value , \"board_id\" : board_id , \"fullmode\" : _mode }, ) if rsp [ \"rc\" ] != 200 : return { \"status\" : 500 , \"msg\" : \"httpx RPC call failed\" } return rsp [ \"data\" ] @classmethod async def board_info_by_security ( cls , security : str , _btype : BoardType = BoardType . CONCEPT ) -> List [ dict ]: \"\"\"\u83b7\u53d6\u80a1\u7968\u6240\u5728\u677f\u5757\u4fe1\u606f\uff1a\u540d\u79f0\uff0c\u4ee3\u7801 Examples: ```python stock_code = '002236' # \u5927\u534e\u80a1\u4efd\uff0c\u80a1\u7968\u4ee3\u7801\u4e0d\u5e26\u5b57\u6bcd\u540e\u7f00 stock_in_board = await Board.board_info_by_security(stock_code, _btype=BoardType.CONCEPT) print(stock_in_board) # returns: [ {'code': '301715', 'name': '\u8bc1\u91d1\u6301\u80a1', 'stocks': 208}, {'code': '308870', 'name': '\u6570\u5b57\u7ecf\u6d4e', 'stocks': 195}, {'code': '308642', 'name': '\u6570\u636e\u4e2d\u5fc3', 'stocks': 188}, ..., {'code': '300008', 'name': '\u65b0\u80fd\u6e90\u6c7d\u8f66', 'stocks': 603} ] ``` Returns: [{'code': '301505', 'name': '\u533b\u7597\u5668\u68b0\u6982\u5ff5'}] \"\"\" if not security : return [] rsp = await cls . _rpc_call ( \"info_by_sec\" , { \"board_type\" : _btype . value , \"security\" : security } ) if rsp [ \"rc\" ] != 200 : return { \"status\" : 500 , \"msg\" : \"httpx RPC call failed\" } return rsp [ \"data\" ] @classmethod async def board_filter_members ( cls , included : List [ str ], excluded : List [ str ] = [], _btype : BoardType = BoardType . CONCEPT , ) -> List : \"\"\"\u6839\u636e\u677f\u5757\u540d\u7b5b\u9009\u80a1\u7968\uff0c\u53c2\u6570\u4e3ainclude, exclude Fixme: this function doesn't work Raise status 500 Returns: [['300181', '\u4f50\u529b\u836f\u4e1a'], ['600056', '\u4e2d\u56fd\u533b\u836f']] \"\"\" if not included : return [] if excluded is None : excluded = [] rsp = await cls . _rpc_call ( \"board_filter_members\" , { \"board_type\" : _btype . value , \"include_boards\" : included , \"exclude_boards\" : excluded , }, ) if rsp [ \"rc\" ] != 200 : return { \"status\" : 500 , \"msg\" : \"httpx RPC call failed\" } return rsp [ \"data\" ] @classmethod async def new_concept_boards ( cls , days : int = 10 ): raise NotImplementedError ( \"not ready\" ) @classmethod async def latest_concept_boards ( n : int = 3 ): raise NotImplementedError ( \"not ready\" ) @classmethod async def new_concept_members ( days : int = 10 , prot : int = None ): raise NotImplementedError ( \"not ready\" ) @classmethod async def board_filter ( cls , industry = None , with_concepts : Optional [ List [ str ]] = None , without = [] ): raise NotImplementedError ( \"not ready\" ) @classmethod async def save_bars ( cls , bars ): client = get_influx_client () logger . info ( \"persisting bars to influxdb: %s , %d secs\" , cls . measurement , len ( bars ) ) await client . save ( bars , cls . measurement , tag_keys = [ \"code\" ], time_key = \"frame\" ) return True @classmethod async def get_last_date_of_bars ( cls , code : str ): # \u884c\u4e1a\u677f\u5757\u56de\u6eaf1\u5e74\u7684\u6570\u636e\uff0c\u6982\u5ff5\u677f\u5757\u53ea\u53d6\u5f53\u5e74\u7684\u6570\u636e code = f \" { code } .THS\" client = get_influx_client () now = datetime . datetime . now () dt_end = tf . day_shift ( now , 0 ) # 250 + 60: \u53ef\u4ee5\u5f97\u523060\u4e2aMA250\u7684\u70b9, \u9ed8\u8ba4K\u7ebf\u56fe120\u4e2a\u8282\u70b9 dt_start = tf . day_shift ( now , - 310 ) flux = ( Flux () . measurement ( cls . measurement ) . range ( dt_start , dt_end ) . bucket ( client . _bucket ) . tags ({ \"code\" : code }) ) data = await client . query ( flux ) if len ( data ) == 2 : # \\r\\n return dt_start ds = DataframeDeserializer ( sort_values = \"_time\" , usecols = [ \"_time\" ], time_col = \"_time\" , engine = \"c\" ) bars = ds ( data ) secs = bars . to_records ( index = False ) . astype ( \"datetime64[s]\" ) _dt = secs [ - 1 ] . item () return _dt . date () @classmethod async def get_bars_in_range ( cls , code : str , start : Frame , end : Frame = None ) -> BarsArray : \"\"\"\u4ece\u6301\u4e45\u5316\u6570\u636e\u5e93\u4e2d\u83b7\u53d6\u4ecb\u4e8e[`start`, `end`]\u95f4\u7684\u884c\u60c5\u8bb0\u5f55 Examples: ```python start = datetime.date(2022, 9, 1) # \u8d77\u59cb\u65f6\u95f4\uff0c \u53ef\u4fee\u6539 end = datetime.date(2023, 3, 1) # \u622a\u6b62\u65f6\u95f4\uff0c \u53ef\u4fee\u6539 board_code = '881128' # \u6c7d\u8f66\u670d\u52a1\uff0c \u53ef\u4fee\u6539 bars = await Board.get_bars_in_range(board_code, start, end) bars[-3:] # \u6253\u5370\u540e3\u6761\u6570\u636e # prints: rec.array([ ('2023-02-27T00:00:00', 1117.748, 1124.364, 1108.741, 1109.525, 1.77208600e+08, 1.13933095e+09, 1.), ('2023-02-28T00:00:00', 1112.246, 1119.568, 1109.827, 1113.43 , 1.32828124e+08, 6.65160380e+08, 1.), ('2023-03-01T00:00:00', 1122.233, 1123.493, 1116.62 , 1123.274, 7.21718910e+07, 3.71172850e+08, 1.) ], dtype=[('frame', ' ) async classmethod \u00b6 \u6839\u636e\u677f\u5757\u540d\u7b5b\u9009\u80a1\u7968\uff0c\u53c2\u6570\u4e3ainclude, exclude Fixme this function doesn't work Raise status 500 Returns: Type Description List [['300181', '\u4f50\u529b\u836f\u4e1a'], ['600056', '\u4e2d\u56fd\u533b\u836f']] Source code in omicron/models/board.py @classmethod async def board_filter_members ( cls , included : List [ str ], excluded : List [ str ] = [], _btype : BoardType = BoardType . CONCEPT , ) -> List : \"\"\"\u6839\u636e\u677f\u5757\u540d\u7b5b\u9009\u80a1\u7968\uff0c\u53c2\u6570\u4e3ainclude, exclude Fixme: this function doesn't work Raise status 500 Returns: [['300181', '\u4f50\u529b\u836f\u4e1a'], ['600056', '\u4e2d\u56fd\u533b\u836f']] \"\"\" if not included : return [] if excluded is None : excluded = [] rsp = await cls . _rpc_call ( \"board_filter_members\" , { \"board_type\" : _btype . value , \"include_boards\" : included , \"exclude_boards\" : excluded , }, ) if rsp [ \"rc\" ] != 200 : return { \"status\" : 500 , \"msg\" : \"httpx RPC call failed\" } return rsp [ \"data\" ] board_info_by_id ( board_id , full_mode = False ) async classmethod \u00b6 \u901a\u8fc7\u677f\u5757\u4ee3\u7801\u67e5\u8be2\u677f\u5757\u4fe1\u606f\uff08\u540d\u5b57\uff0c\u6210\u5458\u6570\u76ee\u6216\u6e05\u5355\uff09 Examples: 1 2 3 4 5 6 board_code = '881128' # \u6c7d\u8f66\u670d\u52a1 \u53ef\u81ea\u884c\u4fee\u6539 board_info = await Board . board_info_by_id ( board_code ) print ( board_info ) # \u5b57\u5178\u5f62\u5f0f # returns { 'code' : '881128' , 'name' : '\u6c7d\u8f66\u670d\u52a1' , 'stocks' : 14 } Returns: Type Description {'code' '301505', 'name': '\u533b\u7597\u5668\u68b0\u6982\u5ff5', 'stocks': 242} or Source code in omicron/models/board.py @classmethod async def board_info_by_id ( cls , board_id : str , full_mode : bool = False ) -> dict : \"\"\"\u901a\u8fc7\u677f\u5757\u4ee3\u7801\u67e5\u8be2\u677f\u5757\u4fe1\u606f\uff08\u540d\u5b57\uff0c\u6210\u5458\u6570\u76ee\u6216\u6e05\u5355\uff09 Examples: ```python board_code = '881128' # \u6c7d\u8f66\u670d\u52a1 \u53ef\u81ea\u884c\u4fee\u6539 board_info = await Board.board_info_by_id(board_code) print(board_info) # \u5b57\u5178\u5f62\u5f0f # returns {'code': '881128', 'name': '\u6c7d\u8f66\u670d\u52a1', 'stocks': 14} ``` Returns: {'code': '301505', 'name': '\u533b\u7597\u5668\u68b0\u6982\u5ff5', 'stocks': 242} or {'code': '301505', 'name': '\u533b\u7597\u5668\u68b0\u6982\u5ff5', 'stocks': [['300916', '\u6717\u7279\u667a\u80fd'], ['300760', '\u8fc8\u745e\u533b\u7597']]} \"\"\" if not board_id : return {} if board_id [ 0 ] == \"3\" : _btype = BoardType . CONCEPT else : _btype = BoardType . INDUSTRY _mode = 0 if full_mode : # \u8f6c\u6362bool\u7c7b\u578b _mode = 1 rsp = await cls . _rpc_call ( \"info\" , { \"board_type\" : _btype . value , \"board_id\" : board_id , \"fullmode\" : _mode }, ) if rsp [ \"rc\" ] != 200 : return { \"status\" : 500 , \"msg\" : \"httpx RPC call failed\" } return rsp [ \"data\" ] board_info_by_security ( security , _btype =< BoardType . CONCEPT : 'concept' > ) async classmethod \u00b6 \u83b7\u53d6\u80a1\u7968\u6240\u5728\u677f\u5757\u4fe1\u606f\uff1a\u540d\u79f0\uff0c\u4ee3\u7801 Examples: 1 2 3 4 5 6 7 8 9 10 11 12 stock_code = '002236' # \u5927\u534e\u80a1\u4efd\uff0c\u80a1\u7968\u4ee3\u7801\u4e0d\u5e26\u5b57\u6bcd\u540e\u7f00 stock_in_board = await Board . board_info_by_security ( stock_code , _btype = BoardType . CONCEPT ) print ( stock_in_board ) # returns: [ { 'code' : '301715' , 'name' : '\u8bc1\u91d1\u6301\u80a1' , 'stocks' : 208 }, { 'code' : '308870' , 'name' : '\u6570\u5b57\u7ecf\u6d4e' , 'stocks' : 195 }, { 'code' : '308642' , 'name' : '\u6570\u636e\u4e2d\u5fc3' , 'stocks' : 188 }, ... , { 'code' : '300008' , 'name' : '\u65b0\u80fd\u6e90\u6c7d\u8f66' , 'stocks' : 603 } ] Returns: Type Description [{'code' '301505', 'name': '\u533b\u7597\u5668\u68b0\u6982\u5ff5'}] Source code in omicron/models/board.py @classmethod async def board_info_by_security ( cls , security : str , _btype : BoardType = BoardType . CONCEPT ) -> List [ dict ]: \"\"\"\u83b7\u53d6\u80a1\u7968\u6240\u5728\u677f\u5757\u4fe1\u606f\uff1a\u540d\u79f0\uff0c\u4ee3\u7801 Examples: ```python stock_code = '002236' # \u5927\u534e\u80a1\u4efd\uff0c\u80a1\u7968\u4ee3\u7801\u4e0d\u5e26\u5b57\u6bcd\u540e\u7f00 stock_in_board = await Board.board_info_by_security(stock_code, _btype=BoardType.CONCEPT) print(stock_in_board) # returns: [ {'code': '301715', 'name': '\u8bc1\u91d1\u6301\u80a1', 'stocks': 208}, {'code': '308870', 'name': '\u6570\u5b57\u7ecf\u6d4e', 'stocks': 195}, {'code': '308642', 'name': '\u6570\u636e\u4e2d\u5fc3', 'stocks': 188}, ..., {'code': '300008', 'name': '\u65b0\u80fd\u6e90\u6c7d\u8f66', 'stocks': 603} ] ``` Returns: [{'code': '301505', 'name': '\u533b\u7597\u5668\u68b0\u6982\u5ff5'}] \"\"\" if not security : return [] rsp = await cls . _rpc_call ( \"info_by_sec\" , { \"board_type\" : _btype . value , \"security\" : security } ) if rsp [ \"rc\" ] != 200 : return { \"status\" : 500 , \"msg\" : \"httpx RPC call failed\" } return rsp [ \"data\" ] board_list ( _btype =< BoardType . CONCEPT : 'concept' > ) async classmethod \u00b6 \u83b7\u53d6\u677f\u5757\u5217\u8868 Parameters: Name Type Description Default _btype BoardType \u677f\u5757\u7c7b\u522b\uff0c\u53ef\u9009\u503c BoardType.CONCEPT \u548c BoardType.INDUSTRY . Returns: Type Description List[List] \u677f\u5757\u5217\u8868\u3002\u6bcf\u4e00\u4e2a\u5b50\u5143\u7d20\u4ecd\u4e3a\u4e00\u4e2a\u5217\u8868\uff0c\u7531\u677f\u5757\u4ee3\u7801(str), \u677f\u5757\u540d\u79f0(str)\u548c\u6210\u5458\u6570\u7ec4\u6210\u3002\u793a\u4f8b\uff1a 1 2 3 4 5 6 [ ['881101', '\u79cd\u690d\u4e1a\u4e0e\u6797\u4e1a', 24], ['881102', '\u517b\u6b96\u4e1a', 27], ['881103', '\u519c\u4ea7\u54c1\u52a0\u5de5', 41], ['881104', '\u519c\u4e1a\u670d\u52a1', 16], ] Source code in omicron/models/board.py @classmethod async def board_list ( cls , _btype : BoardType = BoardType . CONCEPT ) -> List [ List ]: \"\"\"\u83b7\u53d6\u677f\u5757\u5217\u8868 Args: _btype: \u677f\u5757\u7c7b\u522b\uff0c\u53ef\u9009\u503c`BoardType.CONCEPT`\u548c`BoardType.INDUSTRY`. Returns: \u677f\u5757\u5217\u8868\u3002\u6bcf\u4e00\u4e2a\u5b50\u5143\u7d20\u4ecd\u4e3a\u4e00\u4e2a\u5217\u8868\uff0c\u7531\u677f\u5757\u4ee3\u7801(str), \u677f\u5757\u540d\u79f0(str)\u548c\u6210\u5458\u6570\u7ec4\u6210\u3002\u793a\u4f8b\uff1a ``` [ ['881101', '\u79cd\u690d\u4e1a\u4e0e\u6797\u4e1a', 24], ['881102', '\u517b\u6b96\u4e1a', 27], ['881103', '\u519c\u4ea7\u54c1\u52a0\u5de5', 41], ['881104', '\u519c\u4e1a\u670d\u52a1', 16], ] ``` \"\"\" rsp = await cls . _rpc_call ( \"board_list\" , { \"board_type\" : _btype . value }) if rsp [ \"rc\" ] != 200 : return { \"status\" : 500 , \"msg\" : \"httpx RPC call failed\" } return rsp [ \"data\" ] fuzzy_match_board_name ( pattern , _btype =< BoardType . CONCEPT : 'concept' > ) async classmethod \u00b6 \u6a21\u7cca\u67e5\u8be2\u677f\u5757\u4ee3\u7801\u7684\u540d\u5b57 Examples: 1 2 3 4 5 6 7 8 9 10 11 await Board . fuzzy_match_board_name ( \"\u6c7d\u8f66\" , BoardType . INDUSTRY ) # returns: [ '881125 \u6c7d\u8f66\u6574\u8f66' , '881126 \u6c7d\u8f66\u96f6\u90e8\u4ef6' , '881127 \u975e\u6c7d\u8f66\u4ea4\u8fd0' , '881128 \u6c7d\u8f66\u670d\u52a1' , '884107 \u6c7d\u8f66\u670d\u52a1\u2162' , '884194 \u6c7d\u8f66\u96f6\u90e8\u4ef6\u2162' ] Parameters: Name Type Description Default pattern str \u5f85\u67e5\u8be2\u6a21\u5f0f\u4e32 required _btype BoardType \u67e5\u8be2\u7c7b\u578b Returns: Type Description \u5305\u542b\u4ee5\u4e0bkey\u7684dict code(\u677f\u5757\u4ee3\u7801), name\uff08\u677f\u5757\u540d\uff09, stocks(\u80a1\u7968\u6570) Source code in omicron/models/board.py @classmethod async def fuzzy_match_board_name ( cls , pattern : str , _btype : BoardType = BoardType . CONCEPT ) -> dict : \"\"\"\u6a21\u7cca\u67e5\u8be2\u677f\u5757\u4ee3\u7801\u7684\u540d\u5b57 Examples: ```python await Board.fuzzy_match_board_name(\"\u6c7d\u8f66\", BoardType.INDUSTRY) # returns: [ '881125 \u6c7d\u8f66\u6574\u8f66', '881126 \u6c7d\u8f66\u96f6\u90e8\u4ef6', '881127 \u975e\u6c7d\u8f66\u4ea4\u8fd0', '881128 \u6c7d\u8f66\u670d\u52a1', '884107 \u6c7d\u8f66\u670d\u52a1\u2162', '884194 \u6c7d\u8f66\u96f6\u90e8\u4ef6\u2162' ] ``` Args: pattern: \u5f85\u67e5\u8be2\u6a21\u5f0f\u4e32 _btype: \u67e5\u8be2\u7c7b\u578b Returns: \u5305\u542b\u4ee5\u4e0bkey\u7684dict: code(\u677f\u5757\u4ee3\u7801), name\uff08\u677f\u5757\u540d\uff09, stocks(\u80a1\u7968\u6570) \"\"\" if not pattern : return [] rsp = await cls . _rpc_call ( \"fuzzy_match_name\" , { \"board_type\" : _btype . value , \"pattern\" : pattern } ) if rsp [ \"rc\" ] != 200 : return { \"status\" : 500 , \"msg\" : \"httpx RPC call failed\" } return rsp [ \"data\" ] get_bars_in_range ( code , start , end = None ) async classmethod \u00b6 \u4ece\u6301\u4e45\u5316\u6570\u636e\u5e93\u4e2d\u83b7\u53d6\u4ecb\u4e8e[ start , end ]\u95f4\u7684\u884c\u60c5\u8bb0\u5f55 Examples: 1 2 3 4 5 6 7 8 9 10 11 12 13 start = datetime . date ( 2022 , 9 , 1 ) # \u8d77\u59cb\u65f6\u95f4\uff0c \u53ef\u4fee\u6539 end = datetime . date ( 2023 , 3 , 1 ) # \u622a\u6b62\u65f6\u95f4\uff0c \u53ef\u4fee\u6539 board_code = '881128' # \u6c7d\u8f66\u670d\u52a1\uff0c \u53ef\u4fee\u6539 bars = await Board . get_bars_in_range ( board_code , start , end ) bars [ - 3 :] # \u6253\u5370\u540e3\u6761\u6570\u636e # prints: rec . array ([ ( '2023-02-27T00:00:00' , 1117.748 , 1124.364 , 1108.741 , 1109.525 , 1.77208600e+08 , 1.13933095e+09 , 1. ), ( '2023-02-28T00:00:00' , 1112.246 , 1119.568 , 1109.827 , 1113.43 , 1.32828124e+08 , 6.65160380e+08 , 1. ), ( '2023-03-01T00:00:00' , 1122.233 , 1123.493 , 1116.62 , 1123.274 , 7.21718910e+07 , 3.71172850e+08 , 1. ) ], dtype = [( 'frame' , ' BarsArray : \"\"\"\u4ece\u6301\u4e45\u5316\u6570\u636e\u5e93\u4e2d\u83b7\u53d6\u4ecb\u4e8e[`start`, `end`]\u95f4\u7684\u884c\u60c5\u8bb0\u5f55 Examples: ```python start = datetime.date(2022, 9, 1) # \u8d77\u59cb\u65f6\u95f4\uff0c \u53ef\u4fee\u6539 end = datetime.date(2023, 3, 1) # \u622a\u6b62\u65f6\u95f4\uff0c \u53ef\u4fee\u6539 board_code = '881128' # \u6c7d\u8f66\u670d\u52a1\uff0c \u53ef\u4fee\u6539 bars = await Board.get_bars_in_range(board_code, start, end) bars[-3:] # \u6253\u5370\u540e3\u6761\u6570\u636e # prints: rec.array([ ('2023-02-27T00:00:00', 1117.748, 1124.364, 1108.741, 1109.525, 1.77208600e+08, 1.13933095e+09, 1.), ('2023-02-28T00:00:00', 1112.246, 1119.568, 1109.827, 1113.43 , 1.32828124e+08, 6.65160380e+08, 1.), ('2023-03-01T00:00:00', 1122.233, 1123.493, 1116.62 , 1123.274, 7.21718910e+07, 3.71172850e+08, 1.) ], dtype=[('frame', ' List [ List ]: \"\"\"\u83b7\u53d6\u677f\u5757\u5217\u8868 Args: _btype: \u677f\u5757\u7c7b\u522b\uff0c\u53ef\u9009\u503c`BoardType.CONCEPT`\u548c`BoardType.INDUSTRY`. Returns: \u677f\u5757\u5217\u8868\u3002\u6bcf\u4e00\u4e2a\u5b50\u5143\u7d20\u4ecd\u4e3a\u4e00\u4e2a\u5217\u8868\uff0c\u7531\u677f\u5757\u4ee3\u7801(str), \u677f\u5757\u540d\u79f0(str)\u548c\u6210\u5458\u6570\u7ec4\u6210\u3002\u793a\u4f8b\uff1a ``` [ ['881101', '\u79cd\u690d\u4e1a\u4e0e\u6797\u4e1a', 24], ['881102', '\u517b\u6b96\u4e1a', 27], ['881103', '\u519c\u4ea7\u54c1\u52a0\u5de5', 41], ['881104', '\u519c\u4e1a\u670d\u52a1', 16], ] ``` \"\"\" rsp = await cls . _rpc_call ( \"board_list\" , { \"board_type\" : _btype . value }) if rsp [ \"rc\" ] != 200 : return { \"status\" : 500 , \"msg\" : \"httpx RPC call failed\" } return rsp [ \"data\" ] @classmethod async def fuzzy_match_board_name ( cls , pattern : str , _btype : BoardType = BoardType . CONCEPT ) -> dict : \"\"\"\u6a21\u7cca\u67e5\u8be2\u677f\u5757\u4ee3\u7801\u7684\u540d\u5b57 Examples: ```python await Board.fuzzy_match_board_name(\"\u6c7d\u8f66\", BoardType.INDUSTRY) # returns: [ '881125 \u6c7d\u8f66\u6574\u8f66', '881126 \u6c7d\u8f66\u96f6\u90e8\u4ef6', '881127 \u975e\u6c7d\u8f66\u4ea4\u8fd0', '881128 \u6c7d\u8f66\u670d\u52a1', '884107 \u6c7d\u8f66\u670d\u52a1\u2162', '884194 \u6c7d\u8f66\u96f6\u90e8\u4ef6\u2162' ] ``` Args: pattern: \u5f85\u67e5\u8be2\u6a21\u5f0f\u4e32 _btype: \u67e5\u8be2\u7c7b\u578b Returns: \u5305\u542b\u4ee5\u4e0bkey\u7684dict: code(\u677f\u5757\u4ee3\u7801), name\uff08\u677f\u5757\u540d\uff09, stocks(\u80a1\u7968\u6570) \"\"\" if not pattern : return [] rsp = await cls . _rpc_call ( \"fuzzy_match_name\" , { \"board_type\" : _btype . value , \"pattern\" : pattern } ) if rsp [ \"rc\" ] != 200 : return { \"status\" : 500 , \"msg\" : \"httpx RPC call failed\" } return rsp [ \"data\" ] @classmethod async def board_info_by_id ( cls , board_id : str , full_mode : bool = False ) -> dict : \"\"\"\u901a\u8fc7\u677f\u5757\u4ee3\u7801\u67e5\u8be2\u677f\u5757\u4fe1\u606f\uff08\u540d\u5b57\uff0c\u6210\u5458\u6570\u76ee\u6216\u6e05\u5355\uff09 Examples: ```python board_code = '881128' # \u6c7d\u8f66\u670d\u52a1 \u53ef\u81ea\u884c\u4fee\u6539 board_info = await Board.board_info_by_id(board_code) print(board_info) # \u5b57\u5178\u5f62\u5f0f # returns {'code': '881128', 'name': '\u6c7d\u8f66\u670d\u52a1', 'stocks': 14} ``` Returns: {'code': '301505', 'name': '\u533b\u7597\u5668\u68b0\u6982\u5ff5', 'stocks': 242} or {'code': '301505', 'name': '\u533b\u7597\u5668\u68b0\u6982\u5ff5', 'stocks': [['300916', '\u6717\u7279\u667a\u80fd'], ['300760', '\u8fc8\u745e\u533b\u7597']]} \"\"\" if not board_id : return {} if board_id [ 0 ] == \"3\" : _btype = BoardType . CONCEPT else : _btype = BoardType . INDUSTRY _mode = 0 if full_mode : # \u8f6c\u6362bool\u7c7b\u578b _mode = 1 rsp = await cls . _rpc_call ( \"info\" , { \"board_type\" : _btype . value , \"board_id\" : board_id , \"fullmode\" : _mode }, ) if rsp [ \"rc\" ] != 200 : return { \"status\" : 500 , \"msg\" : \"httpx RPC call failed\" } return rsp [ \"data\" ] @classmethod async def board_info_by_security ( cls , security : str , _btype : BoardType = BoardType . CONCEPT ) -> List [ dict ]: \"\"\"\u83b7\u53d6\u80a1\u7968\u6240\u5728\u677f\u5757\u4fe1\u606f\uff1a\u540d\u79f0\uff0c\u4ee3\u7801 Examples: ```python stock_code = '002236' # \u5927\u534e\u80a1\u4efd\uff0c\u80a1\u7968\u4ee3\u7801\u4e0d\u5e26\u5b57\u6bcd\u540e\u7f00 stock_in_board = await Board.board_info_by_security(stock_code, _btype=BoardType.CONCEPT) print(stock_in_board) # returns: [ {'code': '301715', 'name': '\u8bc1\u91d1\u6301\u80a1', 'stocks': 208}, {'code': '308870', 'name': '\u6570\u5b57\u7ecf\u6d4e', 'stocks': 195}, {'code': '308642', 'name': '\u6570\u636e\u4e2d\u5fc3', 'stocks': 188}, ..., {'code': '300008', 'name': '\u65b0\u80fd\u6e90\u6c7d\u8f66', 'stocks': 603} ] ``` Returns: [{'code': '301505', 'name': '\u533b\u7597\u5668\u68b0\u6982\u5ff5'}] \"\"\" if not security : return [] rsp = await cls . _rpc_call ( \"info_by_sec\" , { \"board_type\" : _btype . value , \"security\" : security } ) if rsp [ \"rc\" ] != 200 : return { \"status\" : 500 , \"msg\" : \"httpx RPC call failed\" } return rsp [ \"data\" ] @classmethod async def board_filter_members ( cls , included : List [ str ], excluded : List [ str ] = [], _btype : BoardType = BoardType . CONCEPT , ) -> List : \"\"\"\u6839\u636e\u677f\u5757\u540d\u7b5b\u9009\u80a1\u7968\uff0c\u53c2\u6570\u4e3ainclude, exclude Fixme: this function doesn't work Raise status 500 Returns: [['300181', '\u4f50\u529b\u836f\u4e1a'], ['600056', '\u4e2d\u56fd\u533b\u836f']] \"\"\" if not included : return [] if excluded is None : excluded = [] rsp = await cls . _rpc_call ( \"board_filter_members\" , { \"board_type\" : _btype . value , \"include_boards\" : included , \"exclude_boards\" : excluded , }, ) if rsp [ \"rc\" ] != 200 : return { \"status\" : 500 , \"msg\" : \"httpx RPC call failed\" } return rsp [ \"data\" ] @classmethod async def new_concept_boards ( cls , days : int = 10 ): raise NotImplementedError ( \"not ready\" ) @classmethod async def latest_concept_boards ( n : int = 3 ): raise NotImplementedError ( \"not ready\" ) @classmethod async def new_concept_members ( days : int = 10 , prot : int = None ): raise NotImplementedError ( \"not ready\" ) @classmethod async def board_filter ( cls , industry = None , with_concepts : Optional [ List [ str ]] = None , without = [] ): raise NotImplementedError ( \"not ready\" ) @classmethod async def save_bars ( cls , bars ): client = get_influx_client () logger . info ( \"persisting bars to influxdb: %s , %d secs\" , cls . measurement , len ( bars ) ) await client . save ( bars , cls . measurement , tag_keys = [ \"code\" ], time_key = \"frame\" ) return True @classmethod async def get_last_date_of_bars ( cls , code : str ): # \u884c\u4e1a\u677f\u5757\u56de\u6eaf1\u5e74\u7684\u6570\u636e\uff0c\u6982\u5ff5\u677f\u5757\u53ea\u53d6\u5f53\u5e74\u7684\u6570\u636e code = f \" { code } .THS\" client = get_influx_client () now = datetime . datetime . now () dt_end = tf . day_shift ( now , 0 ) # 250 + 60: \u53ef\u4ee5\u5f97\u523060\u4e2aMA250\u7684\u70b9, \u9ed8\u8ba4K\u7ebf\u56fe120\u4e2a\u8282\u70b9 dt_start = tf . day_shift ( now , - 310 ) flux = ( Flux () . measurement ( cls . measurement ) . range ( dt_start , dt_end ) . bucket ( client . _bucket ) . tags ({ \"code\" : code }) ) data = await client . query ( flux ) if len ( data ) == 2 : # \\r\\n return dt_start ds = DataframeDeserializer ( sort_values = \"_time\" , usecols = [ \"_time\" ], time_col = \"_time\" , engine = \"c\" ) bars = ds ( data ) secs = bars . to_records ( index = False ) . astype ( \"datetime64[s]\" ) _dt = secs [ - 1 ] . item () return _dt . date () @classmethod async def get_bars_in_range ( cls , code : str , start : Frame , end : Frame = None ) -> BarsArray : \"\"\"\u4ece\u6301\u4e45\u5316\u6570\u636e\u5e93\u4e2d\u83b7\u53d6\u4ecb\u4e8e[`start`, `end`]\u95f4\u7684\u884c\u60c5\u8bb0\u5f55 Examples: ```python start = datetime.date(2022, 9, 1) # \u8d77\u59cb\u65f6\u95f4\uff0c \u53ef\u4fee\u6539 end = datetime.date(2023, 3, 1) # \u622a\u6b62\u65f6\u95f4\uff0c \u53ef\u4fee\u6539 board_code = '881128' # \u6c7d\u8f66\u670d\u52a1\uff0c \u53ef\u4fee\u6539 bars = await Board.get_bars_in_range(board_code, start, end) bars[-3:] # \u6253\u5370\u540e3\u6761\u6570\u636e # prints: rec.array([ ('2023-02-27T00:00:00', 1117.748, 1124.364, 1108.741, 1109.525, 1.77208600e+08, 1.13933095e+09, 1.), ('2023-02-28T00:00:00', 1112.246, 1119.568, 1109.827, 1113.43 , 1.32828124e+08, 6.65160380e+08, 1.), ('2023-03-01T00:00:00', 1122.233, 1123.493, 1116.62 , 1123.274, 7.21718910e+07, 3.71172850e+08, 1.) ], dtype=[('frame', ' List : \"\"\"\u6839\u636e\u677f\u5757\u540d\u7b5b\u9009\u80a1\u7968\uff0c\u53c2\u6570\u4e3ainclude, exclude Fixme: this function doesn't work Raise status 500 Returns: [['300181', '\u4f50\u529b\u836f\u4e1a'], ['600056', '\u4e2d\u56fd\u533b\u836f']] \"\"\" if not included : return [] if excluded is None : excluded = [] rsp = await cls . _rpc_call ( \"board_filter_members\" , { \"board_type\" : _btype . value , \"include_boards\" : included , \"exclude_boards\" : excluded , }, ) if rsp [ \"rc\" ] != 200 : return { \"status\" : 500 , \"msg\" : \"httpx RPC call failed\" } return rsp [ \"data\" ]","title":"board_filter_members()"},{"location":"api/board/#omicron.models.board.Board.board_info_by_id","text":"\u901a\u8fc7\u677f\u5757\u4ee3\u7801\u67e5\u8be2\u677f\u5757\u4fe1\u606f\uff08\u540d\u5b57\uff0c\u6210\u5458\u6570\u76ee\u6216\u6e05\u5355\uff09 Examples: 1 2 3 4 5 6 board_code = '881128' # \u6c7d\u8f66\u670d\u52a1 \u53ef\u81ea\u884c\u4fee\u6539 board_info = await Board . board_info_by_id ( board_code ) print ( board_info ) # \u5b57\u5178\u5f62\u5f0f # returns { 'code' : '881128' , 'name' : '\u6c7d\u8f66\u670d\u52a1' , 'stocks' : 14 } Returns: Type Description {'code' '301505', 'name': '\u533b\u7597\u5668\u68b0\u6982\u5ff5', 'stocks': 242} or Source code in omicron/models/board.py @classmethod async def board_info_by_id ( cls , board_id : str , full_mode : bool = False ) -> dict : \"\"\"\u901a\u8fc7\u677f\u5757\u4ee3\u7801\u67e5\u8be2\u677f\u5757\u4fe1\u606f\uff08\u540d\u5b57\uff0c\u6210\u5458\u6570\u76ee\u6216\u6e05\u5355\uff09 Examples: ```python board_code = '881128' # \u6c7d\u8f66\u670d\u52a1 \u53ef\u81ea\u884c\u4fee\u6539 board_info = await Board.board_info_by_id(board_code) print(board_info) # \u5b57\u5178\u5f62\u5f0f # returns {'code': '881128', 'name': '\u6c7d\u8f66\u670d\u52a1', 'stocks': 14} ``` Returns: {'code': '301505', 'name': '\u533b\u7597\u5668\u68b0\u6982\u5ff5', 'stocks': 242} or {'code': '301505', 'name': '\u533b\u7597\u5668\u68b0\u6982\u5ff5', 'stocks': [['300916', '\u6717\u7279\u667a\u80fd'], ['300760', '\u8fc8\u745e\u533b\u7597']]} \"\"\" if not board_id : return {} if board_id [ 0 ] == \"3\" : _btype = BoardType . CONCEPT else : _btype = BoardType . INDUSTRY _mode = 0 if full_mode : # \u8f6c\u6362bool\u7c7b\u578b _mode = 1 rsp = await cls . _rpc_call ( \"info\" , { \"board_type\" : _btype . value , \"board_id\" : board_id , \"fullmode\" : _mode }, ) if rsp [ \"rc\" ] != 200 : return { \"status\" : 500 , \"msg\" : \"httpx RPC call failed\" } return rsp [ \"data\" ]","title":"board_info_by_id()"},{"location":"api/board/#omicron.models.board.Board.board_info_by_security","text":"\u83b7\u53d6\u80a1\u7968\u6240\u5728\u677f\u5757\u4fe1\u606f\uff1a\u540d\u79f0\uff0c\u4ee3\u7801 Examples: 1 2 3 4 5 6 7 8 9 10 11 12 stock_code = '002236' # \u5927\u534e\u80a1\u4efd\uff0c\u80a1\u7968\u4ee3\u7801\u4e0d\u5e26\u5b57\u6bcd\u540e\u7f00 stock_in_board = await Board . board_info_by_security ( stock_code , _btype = BoardType . CONCEPT ) print ( stock_in_board ) # returns: [ { 'code' : '301715' , 'name' : '\u8bc1\u91d1\u6301\u80a1' , 'stocks' : 208 }, { 'code' : '308870' , 'name' : '\u6570\u5b57\u7ecf\u6d4e' , 'stocks' : 195 }, { 'code' : '308642' , 'name' : '\u6570\u636e\u4e2d\u5fc3' , 'stocks' : 188 }, ... , { 'code' : '300008' , 'name' : '\u65b0\u80fd\u6e90\u6c7d\u8f66' , 'stocks' : 603 } ] Returns: Type Description [{'code' '301505', 'name': '\u533b\u7597\u5668\u68b0\u6982\u5ff5'}] Source code in omicron/models/board.py @classmethod async def board_info_by_security ( cls , security : str , _btype : BoardType = BoardType . CONCEPT ) -> List [ dict ]: \"\"\"\u83b7\u53d6\u80a1\u7968\u6240\u5728\u677f\u5757\u4fe1\u606f\uff1a\u540d\u79f0\uff0c\u4ee3\u7801 Examples: ```python stock_code = '002236' # \u5927\u534e\u80a1\u4efd\uff0c\u80a1\u7968\u4ee3\u7801\u4e0d\u5e26\u5b57\u6bcd\u540e\u7f00 stock_in_board = await Board.board_info_by_security(stock_code, _btype=BoardType.CONCEPT) print(stock_in_board) # returns: [ {'code': '301715', 'name': '\u8bc1\u91d1\u6301\u80a1', 'stocks': 208}, {'code': '308870', 'name': '\u6570\u5b57\u7ecf\u6d4e', 'stocks': 195}, {'code': '308642', 'name': '\u6570\u636e\u4e2d\u5fc3', 'stocks': 188}, ..., {'code': '300008', 'name': '\u65b0\u80fd\u6e90\u6c7d\u8f66', 'stocks': 603} ] ``` Returns: [{'code': '301505', 'name': '\u533b\u7597\u5668\u68b0\u6982\u5ff5'}] \"\"\" if not security : return [] rsp = await cls . _rpc_call ( \"info_by_sec\" , { \"board_type\" : _btype . value , \"security\" : security } ) if rsp [ \"rc\" ] != 200 : return { \"status\" : 500 , \"msg\" : \"httpx RPC call failed\" } return rsp [ \"data\" ]","title":"board_info_by_security()"},{"location":"api/board/#omicron.models.board.Board.board_list","text":"\u83b7\u53d6\u677f\u5757\u5217\u8868 Parameters: Name Type Description Default _btype BoardType \u677f\u5757\u7c7b\u522b\uff0c\u53ef\u9009\u503c BoardType.CONCEPT \u548c BoardType.INDUSTRY . Returns: Type Description List[List] \u677f\u5757\u5217\u8868\u3002\u6bcf\u4e00\u4e2a\u5b50\u5143\u7d20\u4ecd\u4e3a\u4e00\u4e2a\u5217\u8868\uff0c\u7531\u677f\u5757\u4ee3\u7801(str), \u677f\u5757\u540d\u79f0(str)\u548c\u6210\u5458\u6570\u7ec4\u6210\u3002\u793a\u4f8b\uff1a 1 2 3 4 5 6 [ ['881101', '\u79cd\u690d\u4e1a\u4e0e\u6797\u4e1a', 24], ['881102', '\u517b\u6b96\u4e1a', 27], ['881103', '\u519c\u4ea7\u54c1\u52a0\u5de5', 41], ['881104', '\u519c\u4e1a\u670d\u52a1', 16], ] Source code in omicron/models/board.py @classmethod async def board_list ( cls , _btype : BoardType = BoardType . CONCEPT ) -> List [ List ]: \"\"\"\u83b7\u53d6\u677f\u5757\u5217\u8868 Args: _btype: \u677f\u5757\u7c7b\u522b\uff0c\u53ef\u9009\u503c`BoardType.CONCEPT`\u548c`BoardType.INDUSTRY`. Returns: \u677f\u5757\u5217\u8868\u3002\u6bcf\u4e00\u4e2a\u5b50\u5143\u7d20\u4ecd\u4e3a\u4e00\u4e2a\u5217\u8868\uff0c\u7531\u677f\u5757\u4ee3\u7801(str), \u677f\u5757\u540d\u79f0(str)\u548c\u6210\u5458\u6570\u7ec4\u6210\u3002\u793a\u4f8b\uff1a ``` [ ['881101', '\u79cd\u690d\u4e1a\u4e0e\u6797\u4e1a', 24], ['881102', '\u517b\u6b96\u4e1a', 27], ['881103', '\u519c\u4ea7\u54c1\u52a0\u5de5', 41], ['881104', '\u519c\u4e1a\u670d\u52a1', 16], ] ``` \"\"\" rsp = await cls . _rpc_call ( \"board_list\" , { \"board_type\" : _btype . value }) if rsp [ \"rc\" ] != 200 : return { \"status\" : 500 , \"msg\" : \"httpx RPC call failed\" } return rsp [ \"data\" ]","title":"board_list()"},{"location":"api/board/#omicron.models.board.Board.fuzzy_match_board_name","text":"\u6a21\u7cca\u67e5\u8be2\u677f\u5757\u4ee3\u7801\u7684\u540d\u5b57 Examples: 1 2 3 4 5 6 7 8 9 10 11 await Board . fuzzy_match_board_name ( \"\u6c7d\u8f66\" , BoardType . INDUSTRY ) # returns: [ '881125 \u6c7d\u8f66\u6574\u8f66' , '881126 \u6c7d\u8f66\u96f6\u90e8\u4ef6' , '881127 \u975e\u6c7d\u8f66\u4ea4\u8fd0' , '881128 \u6c7d\u8f66\u670d\u52a1' , '884107 \u6c7d\u8f66\u670d\u52a1\u2162' , '884194 \u6c7d\u8f66\u96f6\u90e8\u4ef6\u2162' ] Parameters: Name Type Description Default pattern str \u5f85\u67e5\u8be2\u6a21\u5f0f\u4e32 required _btype BoardType \u67e5\u8be2\u7c7b\u578b Returns: Type Description \u5305\u542b\u4ee5\u4e0bkey\u7684dict code(\u677f\u5757\u4ee3\u7801), name\uff08\u677f\u5757\u540d\uff09, stocks(\u80a1\u7968\u6570) Source code in omicron/models/board.py @classmethod async def fuzzy_match_board_name ( cls , pattern : str , _btype : BoardType = BoardType . CONCEPT ) -> dict : \"\"\"\u6a21\u7cca\u67e5\u8be2\u677f\u5757\u4ee3\u7801\u7684\u540d\u5b57 Examples: ```python await Board.fuzzy_match_board_name(\"\u6c7d\u8f66\", BoardType.INDUSTRY) # returns: [ '881125 \u6c7d\u8f66\u6574\u8f66', '881126 \u6c7d\u8f66\u96f6\u90e8\u4ef6', '881127 \u975e\u6c7d\u8f66\u4ea4\u8fd0', '881128 \u6c7d\u8f66\u670d\u52a1', '884107 \u6c7d\u8f66\u670d\u52a1\u2162', '884194 \u6c7d\u8f66\u96f6\u90e8\u4ef6\u2162' ] ``` Args: pattern: \u5f85\u67e5\u8be2\u6a21\u5f0f\u4e32 _btype: \u67e5\u8be2\u7c7b\u578b Returns: \u5305\u542b\u4ee5\u4e0bkey\u7684dict: code(\u677f\u5757\u4ee3\u7801), name\uff08\u677f\u5757\u540d\uff09, stocks(\u80a1\u7968\u6570) \"\"\" if not pattern : return [] rsp = await cls . _rpc_call ( \"fuzzy_match_name\" , { \"board_type\" : _btype . value , \"pattern\" : pattern } ) if rsp [ \"rc\" ] != 200 : return { \"status\" : 500 , \"msg\" : \"httpx RPC call failed\" } return rsp [ \"data\" ]","title":"fuzzy_match_board_name()"},{"location":"api/board/#omicron.models.board.Board.get_bars_in_range","text":"\u4ece\u6301\u4e45\u5316\u6570\u636e\u5e93\u4e2d\u83b7\u53d6\u4ecb\u4e8e[ start , end ]\u95f4\u7684\u884c\u60c5\u8bb0\u5f55 Examples: 1 2 3 4 5 6 7 8 9 10 11 12 13 start = datetime . date ( 2022 , 9 , 1 ) # \u8d77\u59cb\u65f6\u95f4\uff0c \u53ef\u4fee\u6539 end = datetime . date ( 2023 , 3 , 1 ) # \u622a\u6b62\u65f6\u95f4\uff0c \u53ef\u4fee\u6539 board_code = '881128' # \u6c7d\u8f66\u670d\u52a1\uff0c \u53ef\u4fee\u6539 bars = await Board . get_bars_in_range ( board_code , start , end ) bars [ - 3 :] # \u6253\u5370\u540e3\u6761\u6570\u636e # prints: rec . array ([ ( '2023-02-27T00:00:00' , 1117.748 , 1124.364 , 1108.741 , 1109.525 , 1.77208600e+08 , 1.13933095e+09 , 1. ), ( '2023-02-28T00:00:00' , 1112.246 , 1119.568 , 1109.827 , 1113.43 , 1.32828124e+08 , 6.65160380e+08 , 1. ), ( '2023-03-01T00:00:00' , 1122.233 , 1123.493 , 1116.62 , 1123.274 , 7.21718910e+07 , 3.71172850e+08 , 1. ) ], dtype = [( 'frame' , ' BarsArray : \"\"\"\u4ece\u6301\u4e45\u5316\u6570\u636e\u5e93\u4e2d\u83b7\u53d6\u4ecb\u4e8e[`start`, `end`]\u95f4\u7684\u884c\u60c5\u8bb0\u5f55 Examples: ```python start = datetime.date(2022, 9, 1) # \u8d77\u59cb\u65f6\u95f4\uff0c \u53ef\u4fee\u6539 end = datetime.date(2023, 3, 1) # \u622a\u6b62\u65f6\u95f4\uff0c \u53ef\u4fee\u6539 board_code = '881128' # \u6c7d\u8f66\u670d\u52a1\uff0c \u53ef\u4fee\u6539 bars = await Board.get_bars_in_range(board_code, start, end) bars[-3:] # \u6253\u5370\u540e3\u6761\u6570\u636e # prints: rec.array([ ('2023-02-27T00:00:00', 1117.748, 1124.364, 1108.741, 1109.525, 1.77208600e+08, 1.13933095e+09, 1.), ('2023-02-28T00:00:00', 1112.246, 1119.568, 1109.827, 1113.43 , 1.32828124e+08, 6.65160380e+08, 1.), ('2023-03-01T00:00:00', 1122.233, 1123.493, 1116.62 , 1123.274, 7.21718910e+07, 3.71172850e+08, 1.) ], dtype=[('frame', ' bool : \"\"\"\u5224\u65ad\u80a1\u4ef7\u662f\u5426\u76f8\u7b49 Args: x : \u4ef7\u683c1 y : \u4ef7\u683c2 Returns: \u5982\u679c\u76f8\u7b49\u5219\u8fd4\u56deTrue\uff0c\u5426\u5219\u8fd4\u56deFalse \"\"\" return abs ( math_round ( x , 2 ) - math_round ( y , 2 )) < 1e-2 np \u00b6 Extension function related to numpy array_math_round ( arr , digits ) \u00b6 \u5c06\u4e00\u7ef4\u6570\u7ec4arr\u7684\u6570\u636e\u8fdb\u884c\u56db\u820d\u4e94\u5165 numpy.around\u7684\u51fd\u6570\u5e76\u4e0d\u662f\u6570\u5b66\u4e0a\u7684\u56db\u820d\u4e94\u5165\uff0c\u5bf91.5\u548c2.5\u8fdb\u884cround\u7684\u7ed3\u679c\u90fd\u4f1a\u53d8\u62102\uff0c\u5728\u91d1\u878d\u9886\u57df\u8ba1\u7b97\u4e2d\uff0c\u6211\u4eec\u5fc5\u987b\u4f7f\u7528\u6570\u5b66\u610f\u4e49\u4e0a\u7684\u56db\u820d\u4e94\u5165\u3002 Parameters: Name Type Description Default arr ArrayLike \u8f93\u5165\u6570\u7ec4 required digits int required Returns: Type Description np.ndarray \u56db\u820d\u4e94\u5165\u540e\u7684\u4e00\u7ef4\u6570\u7ec4 Source code in omicron/extensions/np.py def array_math_round ( arr : Union [ float , ArrayLike ], digits : int ) -> np . ndarray : \"\"\"\u5c06\u4e00\u7ef4\u6570\u7ec4arr\u7684\u6570\u636e\u8fdb\u884c\u56db\u820d\u4e94\u5165 numpy.around\u7684\u51fd\u6570\u5e76\u4e0d\u662f\u6570\u5b66\u4e0a\u7684\u56db\u820d\u4e94\u5165\uff0c\u5bf91.5\u548c2.5\u8fdb\u884cround\u7684\u7ed3\u679c\u90fd\u4f1a\u53d8\u62102\uff0c\u5728\u91d1\u878d\u9886\u57df\u8ba1\u7b97\u4e2d\uff0c\u6211\u4eec\u5fc5\u987b\u4f7f\u7528\u6570\u5b66\u610f\u4e49\u4e0a\u7684\u56db\u820d\u4e94\u5165\u3002 Args: arr (ArrayLike): \u8f93\u5165\u6570\u7ec4 digits (int): Returns: np.ndarray: \u56db\u820d\u4e94\u5165\u540e\u7684\u4e00\u7ef4\u6570\u7ec4 \"\"\" # \u5982\u679c\u662f\u5355\u4e2a\u5143\u7d20\uff0c\u5219\u76f4\u63a5\u8fd4\u56de if isinstance ( arr , float ): return decimals . math_round ( arr , digits ) f = np . vectorize ( lambda x : decimals . math_round ( x , digits )) return f ( arr ) array_price_equal ( price1 , price2 ) \u00b6 \u5224\u65ad\u4e24\u4e2a\u4ef7\u683c\u6570\u7ec4\u662f\u5426\u76f8\u7b49 Parameters: Name Type Description Default price1 ArrayLike \u4ef7\u683c\u6570\u7ec4 required price2 ArrayLike \u4ef7\u683c\u6570\u7ec4 required Returns: Type Description np.ndarray \u5224\u65ad\u7ed3\u679c Source code in omicron/extensions/np.py def array_price_equal ( price1 : ArrayLike , price2 : ArrayLike ) -> np . ndarray : \"\"\"\u5224\u65ad\u4e24\u4e2a\u4ef7\u683c\u6570\u7ec4\u662f\u5426\u76f8\u7b49 Args: price1 (ArrayLike): \u4ef7\u683c\u6570\u7ec4 price2 (ArrayLike): \u4ef7\u683c\u6570\u7ec4 Returns: np.ndarray: \u5224\u65ad\u7ed3\u679c \"\"\" price1 = array_math_round ( price1 , 2 ) price2 = array_math_round ( price2 , 2 ) return abs ( price1 - price2 ) < 1e-2 bars_since ( condition , default = None ) \u00b6 Return the number of bars since condition sequence was last True , or if never, return default . 1 2 3 >>> condition = [True, True, False] >>> bars_since(condition) 1 Source code in omicron/extensions/np.py def bars_since ( condition : Sequence [ bool ], default = None ) -> int : \"\"\" Return the number of bars since `condition` sequence was last `True`, or if never, return `default`. >>> condition = [True, True, False] >>> bars_since(condition) 1 \"\"\" return next ( compress ( range ( len ( condition )), reversed ( condition )), default ) bin_cut ( arr , n ) \u00b6 \u5c06\u6570\u7ec4arr\u5207\u5206\u6210n\u4efd todo: use padding + reshape to boost performance Parameters: Name Type Description Default arr [type] [description] required n [type] [description] required Returns: Type Description [type] [description] Source code in omicron/extensions/np.py def bin_cut ( arr : list , n : int ): \"\"\"\u5c06\u6570\u7ec4arr\u5207\u5206\u6210n\u4efd todo: use padding + reshape to boost performance Args: arr ([type]): [description] n ([type]): [description] Returns: [type]: [description] \"\"\" result = [[] for i in range ( n )] for i , e in enumerate ( arr ): result [ i % n ] . append ( e ) return [ e for e in result if len ( e )] count_between ( arr , start , end ) \u00b6 \u8ba1\u7b97\u6570\u7ec4\u4e2d\uff0c start \u5143\u7d20\u4e0e end \u5143\u7d20\u4e4b\u95f4\u5171\u6709\u591a\u5c11\u4e2a\u5143\u7d20 \u8981\u6c42arr\u5fc5\u987b\u662f\u5df2\u6392\u5e8f\u3002\u8ba1\u7b97\u7ed3\u679c\u4f1a\u5305\u542b\u533a\u95f4\u8fb9\u754c\u70b9\u3002 Examples: >>> arr = [ 20050104 , 20050105 , 20050106 , 20050107 , 20050110 , 20050111 ] >>> count_between ( arr , 20050104 , 20050111 ) 6 >>> count_between ( arr , 20050104 , 20050109 ) 4 Source code in omicron/extensions/np.py def count_between ( arr , start , end ): \"\"\"\u8ba1\u7b97\u6570\u7ec4\u4e2d\uff0c`start`\u5143\u7d20\u4e0e`end`\u5143\u7d20\u4e4b\u95f4\u5171\u6709\u591a\u5c11\u4e2a\u5143\u7d20 \u8981\u6c42arr\u5fc5\u987b\u662f\u5df2\u6392\u5e8f\u3002\u8ba1\u7b97\u7ed3\u679c\u4f1a\u5305\u542b\u533a\u95f4\u8fb9\u754c\u70b9\u3002 Examples: >>> arr = [20050104, 20050105, 20050106, 20050107, 20050110, 20050111] >>> count_between(arr, 20050104, 20050111) 6 >>> count_between(arr, 20050104, 20050109) 4 \"\"\" pos_start = np . searchsorted ( arr , start , side = \"right\" ) pos_end = np . searchsorted ( arr , end , side = \"right\" ) counter = pos_end - pos_start + 1 if start < arr [ 0 ]: counter -= 1 if end > arr [ - 1 ]: counter -= 1 return counter dataframe_to_structured_array ( df , dtypes = None ) \u00b6 convert dataframe (with all columns, and index possibly) to numpy structured arrays len(dtypes) should be either equal to len(df.columns) or len(df.columns) + 1 . In the later case, it implies to include df.index into converted array. Parameters: Name Type Description Default df DataFrame the one needs to be converted required dtypes List[Tuple] Defaults to None. If it's None , then dtypes of df is used, in such case, the index of df will not be converted. None Returns: Type Description ArrayLike [description] Source code in omicron/extensions/np.py def dataframe_to_structured_array ( df : DataFrame , dtypes : List [ Tuple ] = None ) -> ArrayLike : \"\"\"convert dataframe (with all columns, and index possibly) to numpy structured arrays `len(dtypes)` should be either equal to `len(df.columns)` or `len(df.columns) + 1`. In the later case, it implies to include `df.index` into converted array. Args: df: the one needs to be converted dtypes: Defaults to None. If it's `None`, then dtypes of `df` is used, in such case, the `index` of `df` will not be converted. Returns: ArrayLike: [description] \"\"\" v = df if dtypes is not None : dtypes_in_dict = { key : value for key , value in dtypes } col_len = len ( df . columns ) if len ( dtypes ) == col_len + 1 : v = df . reset_index () rename_index_to = set ( dtypes_in_dict . keys ()) . difference ( set ( df . columns )) v . rename ( columns = { \"index\" : list ( rename_index_to )[ 0 ]}, inplace = True ) elif col_len != len ( dtypes ): raise ValueError ( f \"length of dtypes should be either { col_len } or { col_len + 1 } , is { len ( dtypes ) } \" ) # re-arrange order of dtypes, in order to align with df.columns dtypes = [] for name in v . columns : dtypes . append (( name , dtypes_in_dict [ name ])) else : dtypes = df . dtypes return np . array ( np . rec . fromrecords ( v . values ), dtype = dtypes ) dict_to_numpy_array ( d , dtype ) \u00b6 convert dictionary to numpy array Examples: d = {\"aaron\": 5, \"jack\": 6} dtype = [(\"name\", \"S8\"), (\"score\", \" np . array : \"\"\"convert dictionary to numpy array Examples: >>> d = {\"aaron\": 5, \"jack\": 6} >>> dtype = [(\"name\", \"S8\"), (\"score\", \">> dict_to_numpy_array(d, dtype) array([(b'aaron', 5), (b'jack', 6)], dtype=[('name', 'S8'), ('score', '>> arr = np . arange ( 6 , dtype = np . float32 ) >>> arr [ 3 : 5 ] = np . NaN >>> fill_nan ( arr ) ... array ([ 0. , 1. , 2. , 2. , 2. , 5. ], dtype = float32 ) >>> arr = np . arange ( 6 , dtype = np . float32 ) >>> arr [ 0 : 2 ] = np . nan >>> fill_nan ( arr ) ... array ([ 2. , 2. , 2. , 3. , 4. , 5. ], dtype = float32 ) Parameters: Name Type Description Default ts np.array [description] required Source code in omicron/extensions/np.py def fill_nan ( ts : np . ndarray ): \"\"\"\u5c06ts\u4e2d\u7684NaN\u66ff\u6362\u4e3a\u5176\u524d\u503c \u5982\u679cts\u8d77\u5934\u7684\u5143\u7d20\u4e3aNaN\uff0c\u5219\u7528\u7b2c\u4e00\u4e2a\u975eNaN\u5143\u7d20\u66ff\u6362\u3002 \u5982\u679c\u6240\u6709\u5143\u7d20\u90fd\u4e3aNaN\uff0c\u5219\u65e0\u6cd5\u66ff\u6362\u3002 Example: >>> arr = np.arange(6, dtype=np.float32) >>> arr[3:5] = np.NaN >>> fill_nan(arr) ... # doctest: +NORMALIZE_WHITESPACE array([0., 1., 2., 2., 2., 5.], dtype=float32) >>> arr = np.arange(6, dtype=np.float32) >>> arr[0:2] = np.nan >>> fill_nan(arr) ... # doctest: +NORMALIZE_WHITESPACE array([2., 2., 2., 3., 4., 5.], dtype=float32) Args: ts (np.array): [description] \"\"\" if np . all ( np . isnan ( ts )): raise ValueError ( \"all of ts are NaN\" ) if ts [ 0 ] is None or math . isnan ( ts [ 0 ]): idx = np . argwhere ( ~ np . isnan ( ts ))[ 0 ] ts [ 0 ] = ts [ idx ] mask = np . isnan ( ts ) idx = np . where ( ~ mask , np . arange ( mask . size ), 0 ) np . maximum . accumulate ( idx , out = idx ) return ts [ idx ] find_runs ( x ) \u00b6 Find runs of consecutive items in an array. Parameters: Name Type Description Default x ArrayLike the sequence to find runs in required Returns: Type Description Tuple[np.ndarray, np.ndarray, np.ndarray] A tuple of unique values, start indices, and length of runs Source code in omicron/extensions/np.py def find_runs ( x : ArrayLike ) -> Tuple [ np . ndarray , np . ndarray , np . ndarray ]: \"\"\"Find runs of consecutive items in an array. Args: x: the sequence to find runs in Returns: A tuple of unique values, start indices, and length of runs \"\"\" # ensure array x = np . asanyarray ( x ) if x . ndim != 1 : raise ValueError ( \"only 1D array supported\" ) n = x . shape [ 0 ] # handle empty array if n == 0 : return np . array ([]), np . array ([]), np . array ([]) else : # find run starts loc_run_start = np . empty ( n , dtype = bool ) loc_run_start [ 0 ] = True np . not_equal ( x [: - 1 ], x [ 1 :], out = loc_run_start [ 1 :]) run_starts = np . nonzero ( loc_run_start )[ 0 ] # find run values run_values = x [ loc_run_start ] # find run lengths run_lengths = np . diff ( np . append ( run_starts , n )) return run_values , run_starts , run_lengths floor ( arr , item ) \u00b6 \u5728\u6570\u636earr\u4e2d\uff0c\u627e\u5230\u5c0f\u4e8e\u7b49\u4e8eitem\u7684\u90a3\u4e00\u4e2a\u503c\u3002\u5982\u679citem\u5c0f\u4e8e\u6240\u6709arr\u5143\u7d20\u7684\u503c\uff0c\u8fd4\u56dearr[0];\u5982\u679citem \u5927\u4e8e\u6240\u6709arr\u5143\u7d20\u7684\u503c\uff0c\u8fd4\u56dearr[-1] \u4e0e minute_frames_floor \u4e0d\u540c\u7684\u662f\uff0c\u672c\u51fd\u6570\u4e0d\u505a\u56de\u7ed5\u4e0e\u8fdb\u4f4d. Examples: >>> a = [ 3 , 6 , 9 ] >>> floor ( a , - 1 ) 3 >>> floor ( a , 9 ) 9 >>> floor ( a , 10 ) 9 >>> floor ( a , 4 ) 3 >>> floor ( a , 10 ) 9 Parameters: Name Type Description Default arr required item required Source code in omicron/extensions/np.py def floor ( arr , item ): \"\"\" \u5728\u6570\u636earr\u4e2d\uff0c\u627e\u5230\u5c0f\u4e8e\u7b49\u4e8eitem\u7684\u90a3\u4e00\u4e2a\u503c\u3002\u5982\u679citem\u5c0f\u4e8e\u6240\u6709arr\u5143\u7d20\u7684\u503c\uff0c\u8fd4\u56dearr[0];\u5982\u679citem \u5927\u4e8e\u6240\u6709arr\u5143\u7d20\u7684\u503c\uff0c\u8fd4\u56dearr[-1] \u4e0e`minute_frames_floor`\u4e0d\u540c\u7684\u662f\uff0c\u672c\u51fd\u6570\u4e0d\u505a\u56de\u7ed5\u4e0e\u8fdb\u4f4d. Examples: >>> a = [3, 6, 9] >>> floor(a, -1) 3 >>> floor(a, 9) 9 >>> floor(a, 10) 9 >>> floor(a, 4) 3 >>> floor(a,10) 9 Args: arr: item: Returns: \"\"\" if item < arr [ 0 ]: return arr [ 0 ] index = np . searchsorted ( arr , item , side = \"right\" ) return arr [ index - 1 ] join_by_left ( key , r1 , r2 , mask = True ) \u00b6 \u5de6\u8fde\u63a5 r1 , r2 by key \u5982\u679c r1 \u4e2d\u5b58\u5728 r2 \u4e2d\u6ca1\u6709\u7684\u884c\uff0c\u5219\u8be5\u884c\u5bf9\u5e94\u7684 r2 \u4e2d\u7684\u90a3\u4e9b\u5b57\u6bb5\u5c06\u88abmask\uff0c\u6216\u8005\u586b\u5145\u968f\u673a\u6570\u3002 same as numpy.lib.recfunctions.join_by(key, r1, r2, jointype='leftouter'), but allows r1 have duplicate keys Reference: stackoverflow Examples: >>> # to join the following >>> # [[ 1, 2], >>> # [ 1, 3], x [[1, 5], >>> # [ 2, 3]] [4, 7]] >>> # only first two rows in left will be joined >>> r1 = np . array ([( 1 , 2 ), ( 1 , 3 ), ( 2 , 3 )], dtype = [( 'seq' , 'i4' ), ( 'score' , 'i4' )]) >>> r2 = np . array ([( 1 , 5 ), ( 4 , 7 )], dtype = [( 'seq' , 'i4' ), ( 'age' , 'i4' )]) >>> joined = join_by_left ( 'seq' , r1 , r2 ) >>> print ( joined ) [( 1 , 2 , 5 ) ( 1 , 3 , 5 ) ( 2 , 3 , -- )] >>> print ( joined . dtype ) ( numpy . record , [( 'seq' , '>> joined [ 2 ][ 2 ] masked >>> joined . tolist ()[ 2 ][ 2 ] == None True Parameters: Name Type Description Default key join\u5173\u952e\u5b57 required r1 \u6570\u636e\u96c61 required r2 \u6570\u636e\u96c62 required Returns: Type Description a numpy array Source code in omicron/extensions/np.py def join_by_left ( key , r1 , r2 , mask = True ): \"\"\"\u5de6\u8fde\u63a5 `r1`, `r2` by `key` \u5982\u679c`r1`\u4e2d\u5b58\u5728`r2`\u4e2d\u6ca1\u6709\u7684\u884c\uff0c\u5219\u8be5\u884c\u5bf9\u5e94\u7684`r2`\u4e2d\u7684\u90a3\u4e9b\u5b57\u6bb5\u5c06\u88abmask\uff0c\u6216\u8005\u586b\u5145\u968f\u673a\u6570\u3002 same as numpy.lib.recfunctions.join_by(key, r1, r2, jointype='leftouter'), but allows r1 have duplicate keys [Reference: stackoverflow](https://stackoverflow.com/a/53261882/13395693) Examples: >>> # to join the following >>> # [[ 1, 2], >>> # [ 1, 3], x [[1, 5], >>> # [ 2, 3]] [4, 7]] >>> # only first two rows in left will be joined >>> r1 = np.array([(1, 2), (1,3), (2,3)], dtype=[('seq', 'i4'), ('score', 'i4')]) >>> r2 = np.array([(1, 5), (4,7)], dtype=[('seq', 'i4'), ('age', 'i4')]) >>> joined = join_by_left('seq', r1, r2) >>> print(joined) [(1, 2, 5) (1, 3, 5) (2, 3, --)] >>> print(joined.dtype) (numpy.record, [('seq', '>> joined[2][2] masked >>> joined.tolist()[2][2] == None True Args: key : join\u5173\u952e\u5b57 r1 : \u6570\u636e\u96c61 r2 : \u6570\u636e\u96c62 Returns: a numpy array \"\"\" # figure out the dtype of the result array descr1 = r1 . dtype . descr descr2 = [ d for d in r2 . dtype . descr if d [ 0 ] not in r1 . dtype . names ] descrm = descr1 + descr2 # figure out the fields we'll need from each array f1 = [ d [ 0 ] for d in descr1 ] f2 = [ d [ 0 ] for d in descr2 ] # cache the number of columns in f1 ncol1 = len ( f1 ) # get a dict of the rows of r2 grouped by key rows2 = {} for row2 in r2 : rows2 . setdefault ( row2 [ key ], []) . append ( row2 ) # figure out how many rows will be in the result nrowm = 0 for k1 in r1 [ key ]: if k1 in rows2 : nrowm += len ( rows2 [ k1 ]) else : nrowm += 1 # allocate the return array # ret = np.full((nrowm, ), fill, dtype=descrm) _ret = np . recarray ( nrowm , dtype = descrm ) if mask : ret = np . ma . array ( _ret , mask = True ) else : ret = _ret # merge the data into the return array i = 0 for row1 in r1 : if row1 [ key ] in rows2 : for row2 in rows2 [ row1 [ key ]]: ret [ i ] = tuple ( row1 [ f1 ]) + tuple ( row2 [ f2 ]) i += 1 else : for j in range ( ncol1 ): ret [ i ][ j ] = row1 [ j ] i += 1 return ret numpy_append_fields ( base , names , data , dtypes ) \u00b6 \u7ed9\u73b0\u6709\u7684\u6570\u7ec4 base \u589e\u52a0\u65b0\u7684\u5b57\u6bb5 \u5b9e\u73b0\u4e86 numpy.lib.recfunctions.rec_append_fields \u7684\u529f\u80fd\u3002\u63d0\u4f9b\u8fd9\u4e2a\u529f\u80fd\uff0c\u662f\u56e0\u4e3a rec_append_fields \u4e0d\u80fd\u5904\u7406 data \u5143\u7d20\u7684\u7c7b\u578b\u4e3aObject\u7684\u60c5\u51b5\u3002 \u65b0\u589e\u7684\u6570\u636e\u5217\u5c06\u987a\u5e8f\u6392\u5217\u5728\u5176\u5b83\u5217\u7684\u53f3\u8fb9\u3002 Examples: >>> # \u65b0\u589e\u5355\u4e2a\u5b57\u6bb5 >>> import numpy >>> old = np . array ([ i for i in range ( 3 )], dtype = [( 'col1' , '>> new_list = [ 2 * i for i in range ( 3 )] >>> res = numpy_append_fields ( old , 'new_col' , new_list , [( 'new_col' , '>> print ( res ) ... [( 0. , 0. ) ( 1. , 2. ) ( 2. , 4. )] >>> # \u65b0\u589e\u591a\u4e2a\u5b57\u6bb5 >>> data = [ res [ 'col1' ] . tolist (), res [ 'new_col' ] . tolist ()] >>> print ( numpy_append_fields ( old , ( 'col3' , 'col4' ), data , [( 'col3' , ' np . ndarray : \"\"\"\u7ed9\u73b0\u6709\u7684\u6570\u7ec4`base`\u589e\u52a0\u65b0\u7684\u5b57\u6bb5 \u5b9e\u73b0\u4e86`numpy.lib.recfunctions.rec_append_fields`\u7684\u529f\u80fd\u3002\u63d0\u4f9b\u8fd9\u4e2a\u529f\u80fd\uff0c\u662f\u56e0\u4e3a`rec_append_fields`\u4e0d\u80fd\u5904\u7406`data`\u5143\u7d20\u7684\u7c7b\u578b\u4e3aObject\u7684\u60c5\u51b5\u3002 \u65b0\u589e\u7684\u6570\u636e\u5217\u5c06\u987a\u5e8f\u6392\u5217\u5728\u5176\u5b83\u5217\u7684\u53f3\u8fb9\u3002 Example: >>> # \u65b0\u589e\u5355\u4e2a\u5b57\u6bb5 >>> import numpy >>> old = np.array([i for i in range(3)], dtype=[('col1', '>> new_list = [2 * i for i in range(3)] >>> res = numpy_append_fields(old, 'new_col', new_list, [('new_col', '>> print(res) ... # doctest: +NORMALIZE_WHITESPACE [(0., 0.) (1., 2.) (2., 4.)] >>> # \u65b0\u589e\u591a\u4e2a\u5b57\u6bb5 >>> data = [res['col1'].tolist(), res['new_col'].tolist()] >>> print(numpy_append_fields(old, ('col3', 'col4'), data, [('col3', ' np . ndarray : \"\"\"\u4ece`ts`\u4e2d\u53bb\u9664NaN Args: ts (np.array): [description] Returns: np.array: [description] \"\"\" return ts [ ~ np . isnan ( ts . astype ( float ))] replace_zero ( ts , replacement = None ) \u00b6 \u5c06ts\u4e2d\u76840\u66ff\u6362\u4e3a\u524d\u503c, \u5904\u7406volume\u6570\u636e\u65f6\u5e38\u7528\u7528\u5230 \u5982\u679c\u63d0\u4f9b\u4e86replacement, \u5219\u66ff\u6362\u4e3areplacement Source code in omicron/extensions/np.py def replace_zero ( ts : np . ndarray , replacement = None ) -> np . ndarray : \"\"\"\u5c06ts\u4e2d\u76840\u66ff\u6362\u4e3a\u524d\u503c, \u5904\u7406volume\u6570\u636e\u65f6\u5e38\u7528\u7528\u5230 \u5982\u679c\u63d0\u4f9b\u4e86replacement, \u5219\u66ff\u6362\u4e3areplacement \"\"\" if replacement is not None : return np . where ( ts == 0 , replacement , ts ) if np . all ( ts == 0 ): raise ValueError ( \"all of ts are 0\" ) if ts [ 0 ] == 0 : idx = np . argwhere ( ts != 0 )[ 0 ] ts [ 0 ] = ts [ idx ] mask = ts == 0 idx = np . where ( ~ mask , np . arange ( mask . size ), 0 ) np . maximum . accumulate ( idx , out = idx ) return ts [ idx ] rolling ( x , win , func ) \u00b6 \u5bf9\u5e8f\u5217 x \u8fdb\u884c\u7a97\u53e3\u6ed1\u52a8\u8ba1\u7b97\u3002 \u5982\u679c func \u8981\u5b9e\u73b0\u7684\u529f\u80fd\u662fargmax, argmin, max, mean, median, min, rank, std, sum, var\u7b49\uff0cmove_argmax\uff0c\u8bf7\u4f7f\u7528bottleneck\u4e2d\u7684move_argmin, move_max, move_mean, move_median, move_min move_rank, move_std, move_sum, move_var\u3002\u8fd9\u4e9b\u51fd\u6570\u7684\u6027\u80fd\u66f4\u597d\u3002 Parameters: Name Type Description Default x [type] [description] required win [type] [description] required func [type] [description] required Returns: Type Description [type] [description] Source code in omicron/extensions/np.py def rolling ( x , win , func ): \"\"\"\u5bf9\u5e8f\u5217`x`\u8fdb\u884c\u7a97\u53e3\u6ed1\u52a8\u8ba1\u7b97\u3002 \u5982\u679c`func`\u8981\u5b9e\u73b0\u7684\u529f\u80fd\u662fargmax, argmin, max, mean, median, min, rank, std, sum, var\u7b49\uff0cmove_argmax\uff0c\u8bf7\u4f7f\u7528bottleneck\u4e2d\u7684move_argmin, move_max, move_mean, move_median, move_min move_rank, move_std, move_sum, move_var\u3002\u8fd9\u4e9b\u51fd\u6570\u7684\u6027\u80fd\u66f4\u597d\u3002 Args: x ([type]): [description] win ([type]): [description] func ([type]): [description] Returns: [type]: [description] \"\"\" results = [] for subarray in sliding_window_view ( x , window_shape = win ): results . append ( func ( subarray )) return np . array ( results ) shift ( arr , start , offset ) \u00b6 \u5728numpy\u6570\u7ec4arr\u4e2d\uff0c\u627e\u5230start(\u6216\u8005\u6700\u63a5\u8fd1\u7684\u4e00\u4e2a\uff09\uff0c\u53d6offset\u5bf9\u5e94\u7684\u5143\u7d20\u3002 \u8981\u6c42 arr \u5df2\u6392\u5e8f\u3002 offset \u4e3a\u6b63\uff0c\u8868\u660e\u5411\u540e\u79fb\u4f4d\uff1b offset \u4e3a\u8d1f\uff0c\u8868\u660e\u5411\u524d\u79fb\u4f4d Examples: >>> arr = [ 20050104 , 20050105 , 20050106 , 20050107 , 20050110 , 20050111 ] >>> shift ( arr , 20050104 , 1 ) 20050105 >>> shift ( arr , 20050105 , - 1 ) 20050104 >>> # \u8d77\u59cb\u70b9\u5df2\u53f3\u8d8a\u754c\uff0c\u4e14\u5411\u53f3shift\uff0c\u8fd4\u56de\u8d77\u59cb\u70b9 >>> shift ( arr , 20050120 , 1 ) 20050120 Parameters: Name Type Description Default arr \u5df2\u6392\u5e8f\u7684\u6570\u7ec4 required start numpy\u53ef\u63a5\u53d7\u7684\u6570\u636e\u7c7b\u578b required offset int [description] required Returns: Type Description \u79fb\u4f4d\u540e\u5f97\u5230\u7684\u5143\u7d20\u503c Source code in omicron/extensions/np.py def shift ( arr , start , offset ): \"\"\"\u5728numpy\u6570\u7ec4arr\u4e2d\uff0c\u627e\u5230start(\u6216\u8005\u6700\u63a5\u8fd1\u7684\u4e00\u4e2a\uff09\uff0c\u53d6offset\u5bf9\u5e94\u7684\u5143\u7d20\u3002 \u8981\u6c42`arr`\u5df2\u6392\u5e8f\u3002`offset`\u4e3a\u6b63\uff0c\u8868\u660e\u5411\u540e\u79fb\u4f4d\uff1b`offset`\u4e3a\u8d1f\uff0c\u8868\u660e\u5411\u524d\u79fb\u4f4d Examples: >>> arr = [20050104, 20050105, 20050106, 20050107, 20050110, 20050111] >>> shift(arr, 20050104, 1) 20050105 >>> shift(arr, 20050105, -1) 20050104 >>> # \u8d77\u59cb\u70b9\u5df2\u53f3\u8d8a\u754c\uff0c\u4e14\u5411\u53f3shift\uff0c\u8fd4\u56de\u8d77\u59cb\u70b9 >>> shift(arr, 20050120, 1) 20050120 Args: arr : \u5df2\u6392\u5e8f\u7684\u6570\u7ec4 start : numpy\u53ef\u63a5\u53d7\u7684\u6570\u636e\u7c7b\u578b offset (int): [description] Returns: \u79fb\u4f4d\u540e\u5f97\u5230\u7684\u5143\u7d20\u503c \"\"\" pos = np . searchsorted ( arr , start , side = \"right\" ) if pos + offset - 1 >= len ( arr ): return start else : return arr [ pos + offset - 1 ] smallest_n_argpos ( ts , n ) \u00b6 get smallest n (min->max) elements and return argpos which its value ordered in ascent Examples: >>> smallest_n_argpos ([ np . nan , 4 , 3 , 9 , 8 , 5 , 2 , 1 , 0 , 6 , 7 ], 2 ) array ([ 8 , 7 ]) Parameters: Name Type Description Default ts np.array \u8f93\u5165\u7684\u6570\u7ec4 required n int \u53d6\u6700\u5c0f\u7684n\u4e2a\u5143\u7d20 required Returns: Type Description np.array [description] Source code in omicron/extensions/np.py def smallest_n_argpos ( ts : np . array , n : int ) -> np . array : \"\"\"get smallest n (min->max) elements and return argpos which its value ordered in ascent Example: >>> smallest_n_argpos([np.nan, 4, 3, 9, 8, 5, 2, 1, 0, 6, 7], 2) array([8, 7]) Args: ts (np.array): \u8f93\u5165\u7684\u6570\u7ec4 n (int): \u53d6\u6700\u5c0f\u7684n\u4e2a\u5143\u7d20 Returns: np.array: [description] \"\"\" return np . argsort ( ts )[: n ] to_pydatetime ( tm ) \u00b6 \u5c06numpy.datetime64\u5bf9\u8c61\u8f6c\u6362\u6210\u4e3apython\u7684datetime\u5bf9\u8c61 numpy.ndarray.item()\u65b9\u6cd5\u53ef\u7528\u4ee5\u5c06\u4efb\u4f55numpy\u5bf9\u8c61\u8f6c\u6362\u6210python\u5bf9\u8c61\uff0c\u63a8\u8350\u5728\u4efb\u4f55\u9002\u7528\u7684\u5730\u65b9\u4f7f\u7528.item()\u65b9\u6cd5\uff0c\u800c\u4e0d\u662f\u672c\u65b9\u6cd5\u3002\u793a\u4f8b: 1 2 3 4 arr = np.array(['2022-09-08', '2022-09-09'], dtype='datetime64[s]') arr.item(0) # output is datetime.datetime(2022, 9, 8, 0, 0) arr[1].item() # output is datetime.datetime(2022, 9, 9, 0, 0) Parameters: Name Type Description Default tm the input numpy datetime object required Returns: Type Description datetime.datetime python datetime object .. deprecated:: 2.0.0 use tm.item() instead Source code in omicron/extensions/np.py @deprecated ( \"2.0.0\" , details = \"use `tm.item()` instead\" ) def to_pydatetime ( tm : np . datetime64 ) -> datetime . datetime : \"\"\"\u5c06numpy.datetime64\u5bf9\u8c61\u8f6c\u6362\u6210\u4e3apython\u7684datetime\u5bf9\u8c61 numpy.ndarray.item()\u65b9\u6cd5\u53ef\u7528\u4ee5\u5c06\u4efb\u4f55numpy\u5bf9\u8c61\u8f6c\u6362\u6210python\u5bf9\u8c61\uff0c\u63a8\u8350\u5728\u4efb\u4f55\u9002\u7528\u7684\u5730\u65b9\u4f7f\u7528.item()\u65b9\u6cd5\uff0c\u800c\u4e0d\u662f\u672c\u65b9\u6cd5\u3002\u793a\u4f8b: ``` arr = np.array(['2022-09-08', '2022-09-09'], dtype='datetime64[s]') arr.item(0) # output is datetime.datetime(2022, 9, 8, 0, 0) arr[1].item() # output is datetime.datetime(2022, 9, 9, 0, 0) ``` Args: tm : the input numpy datetime object Returns: python datetime object \"\"\" unix_epoch = np . datetime64 ( 0 , \"s\" ) one_second = np . timedelta64 ( 1 , \"s\" ) seconds_since_epoch = ( tm - unix_epoch ) / one_second return datetime . datetime . utcfromtimestamp ( seconds_since_epoch ) top_n_argpos ( ts , n ) \u00b6 get top n (max->min) elements and return argpos which its value ordered in descent Examples: >>> top_n_argpos ([ np . nan , 4 , 3 , 9 , 8 , 5 , 2 , 1 , 0 , 6 , 7 ], 2 ) array ([ 3 , 4 ]) Parameters: Name Type Description Default ts np.array [description] required n int [description] required Returns: Type Description np.array [description] Source code in omicron/extensions/np.py def top_n_argpos ( ts : np . array , n : int ) -> np . array : \"\"\"get top n (max->min) elements and return argpos which its value ordered in descent Example: >>> top_n_argpos([np.nan, 4, 3, 9, 8, 5, 2, 1, 0, 6, 7], 2) array([3, 4]) Args: ts (np.array): [description] n (int): [description] Returns: np.array: [description] \"\"\" ts_ = np . copy ( ts ) ts_ [ np . isnan ( ts_ )] = - np . inf return np . argsort ( ts_ )[ - n :][:: - 1 ]","title":"Extensions"},{"location":"api/extensions/#omicron.extensions.decimals","text":"","title":"decimals"},{"location":"api/extensions/#omicron.extensions.decimals.math_round","text":"\u7531\u4e8e\u6d6e\u70b9\u6570\u7684\u8868\u793a\u95ee\u9898\uff0c\u5f88\u591a\u8bed\u8a00\u7684round\u51fd\u6570\u4e0e\u6570\u5b66\u4e0a\u7684round\u51fd\u6570\u4e0d\u4e00\u81f4\u3002\u4e0b\u9762\u7684\u51fd\u6570\u7ed3\u679c\u4e0e\u6570\u5b66\u4e0a\u7684\u4e00\u81f4\u3002 Parameters: Name Type Description Default x float \u8981\u8fdb\u884c\u56db\u820d\u4e94\u5165\u7684\u6570\u5b57 required digits int \u5c0f\u6570\u70b9\u540e\u4fdd\u7559\u7684\u4f4d\u6570 required Source code in omicron/extensions/decimals.py def math_round ( x : float , digits : int ): \"\"\"\u7531\u4e8e\u6d6e\u70b9\u6570\u7684\u8868\u793a\u95ee\u9898\uff0c\u5f88\u591a\u8bed\u8a00\u7684round\u51fd\u6570\u4e0e\u6570\u5b66\u4e0a\u7684round\u51fd\u6570\u4e0d\u4e00\u81f4\u3002\u4e0b\u9762\u7684\u51fd\u6570\u7ed3\u679c\u4e0e\u6570\u5b66\u4e0a\u7684\u4e00\u81f4\u3002 Args: x: \u8981\u8fdb\u884c\u56db\u820d\u4e94\u5165\u7684\u6570\u5b57 digits: \u5c0f\u6570\u70b9\u540e\u4fdd\u7559\u7684\u4f4d\u6570 \"\"\" return int ( x * ( 10 ** digits ) + copysign ( 0.5 , x )) / ( 10 ** digits )","title":"math_round()"},{"location":"api/extensions/#omicron.extensions.decimals.price_equal","text":"\u5224\u65ad\u80a1\u4ef7\u662f\u5426\u76f8\u7b49 Parameters: Name Type Description Default x \u4ef7\u683c1 required y \u4ef7\u683c2 required Returns: Type Description bool \u5982\u679c\u76f8\u7b49\u5219\u8fd4\u56deTrue\uff0c\u5426\u5219\u8fd4\u56deFalse Source code in omicron/extensions/decimals.py def price_equal ( x : float , y : float ) -> bool : \"\"\"\u5224\u65ad\u80a1\u4ef7\u662f\u5426\u76f8\u7b49 Args: x : \u4ef7\u683c1 y : \u4ef7\u683c2 Returns: \u5982\u679c\u76f8\u7b49\u5219\u8fd4\u56deTrue\uff0c\u5426\u5219\u8fd4\u56deFalse \"\"\" return abs ( math_round ( x , 2 ) - math_round ( y , 2 )) < 1e-2","title":"price_equal()"},{"location":"api/extensions/#omicron.extensions.np","text":"Extension function related to numpy","title":"np"},{"location":"api/extensions/#omicron.extensions.np.array_math_round","text":"\u5c06\u4e00\u7ef4\u6570\u7ec4arr\u7684\u6570\u636e\u8fdb\u884c\u56db\u820d\u4e94\u5165 numpy.around\u7684\u51fd\u6570\u5e76\u4e0d\u662f\u6570\u5b66\u4e0a\u7684\u56db\u820d\u4e94\u5165\uff0c\u5bf91.5\u548c2.5\u8fdb\u884cround\u7684\u7ed3\u679c\u90fd\u4f1a\u53d8\u62102\uff0c\u5728\u91d1\u878d\u9886\u57df\u8ba1\u7b97\u4e2d\uff0c\u6211\u4eec\u5fc5\u987b\u4f7f\u7528\u6570\u5b66\u610f\u4e49\u4e0a\u7684\u56db\u820d\u4e94\u5165\u3002 Parameters: Name Type Description Default arr ArrayLike \u8f93\u5165\u6570\u7ec4 required digits int required Returns: Type Description np.ndarray \u56db\u820d\u4e94\u5165\u540e\u7684\u4e00\u7ef4\u6570\u7ec4 Source code in omicron/extensions/np.py def array_math_round ( arr : Union [ float , ArrayLike ], digits : int ) -> np . ndarray : \"\"\"\u5c06\u4e00\u7ef4\u6570\u7ec4arr\u7684\u6570\u636e\u8fdb\u884c\u56db\u820d\u4e94\u5165 numpy.around\u7684\u51fd\u6570\u5e76\u4e0d\u662f\u6570\u5b66\u4e0a\u7684\u56db\u820d\u4e94\u5165\uff0c\u5bf91.5\u548c2.5\u8fdb\u884cround\u7684\u7ed3\u679c\u90fd\u4f1a\u53d8\u62102\uff0c\u5728\u91d1\u878d\u9886\u57df\u8ba1\u7b97\u4e2d\uff0c\u6211\u4eec\u5fc5\u987b\u4f7f\u7528\u6570\u5b66\u610f\u4e49\u4e0a\u7684\u56db\u820d\u4e94\u5165\u3002 Args: arr (ArrayLike): \u8f93\u5165\u6570\u7ec4 digits (int): Returns: np.ndarray: \u56db\u820d\u4e94\u5165\u540e\u7684\u4e00\u7ef4\u6570\u7ec4 \"\"\" # \u5982\u679c\u662f\u5355\u4e2a\u5143\u7d20\uff0c\u5219\u76f4\u63a5\u8fd4\u56de if isinstance ( arr , float ): return decimals . math_round ( arr , digits ) f = np . vectorize ( lambda x : decimals . math_round ( x , digits )) return f ( arr )","title":"array_math_round()"},{"location":"api/extensions/#omicron.extensions.np.array_price_equal","text":"\u5224\u65ad\u4e24\u4e2a\u4ef7\u683c\u6570\u7ec4\u662f\u5426\u76f8\u7b49 Parameters: Name Type Description Default price1 ArrayLike \u4ef7\u683c\u6570\u7ec4 required price2 ArrayLike \u4ef7\u683c\u6570\u7ec4 required Returns: Type Description np.ndarray \u5224\u65ad\u7ed3\u679c Source code in omicron/extensions/np.py def array_price_equal ( price1 : ArrayLike , price2 : ArrayLike ) -> np . ndarray : \"\"\"\u5224\u65ad\u4e24\u4e2a\u4ef7\u683c\u6570\u7ec4\u662f\u5426\u76f8\u7b49 Args: price1 (ArrayLike): \u4ef7\u683c\u6570\u7ec4 price2 (ArrayLike): \u4ef7\u683c\u6570\u7ec4 Returns: np.ndarray: \u5224\u65ad\u7ed3\u679c \"\"\" price1 = array_math_round ( price1 , 2 ) price2 = array_math_round ( price2 , 2 ) return abs ( price1 - price2 ) < 1e-2","title":"array_price_equal()"},{"location":"api/extensions/#omicron.extensions.np.bars_since","text":"Return the number of bars since condition sequence was last True , or if never, return default . 1 2 3 >>> condition = [True, True, False] >>> bars_since(condition) 1 Source code in omicron/extensions/np.py def bars_since ( condition : Sequence [ bool ], default = None ) -> int : \"\"\" Return the number of bars since `condition` sequence was last `True`, or if never, return `default`. >>> condition = [True, True, False] >>> bars_since(condition) 1 \"\"\" return next ( compress ( range ( len ( condition )), reversed ( condition )), default )","title":"bars_since()"},{"location":"api/extensions/#omicron.extensions.np.bin_cut","text":"\u5c06\u6570\u7ec4arr\u5207\u5206\u6210n\u4efd todo: use padding + reshape to boost performance Parameters: Name Type Description Default arr [type] [description] required n [type] [description] required Returns: Type Description [type] [description] Source code in omicron/extensions/np.py def bin_cut ( arr : list , n : int ): \"\"\"\u5c06\u6570\u7ec4arr\u5207\u5206\u6210n\u4efd todo: use padding + reshape to boost performance Args: arr ([type]): [description] n ([type]): [description] Returns: [type]: [description] \"\"\" result = [[] for i in range ( n )] for i , e in enumerate ( arr ): result [ i % n ] . append ( e ) return [ e for e in result if len ( e )]","title":"bin_cut()"},{"location":"api/extensions/#omicron.extensions.np.count_between","text":"\u8ba1\u7b97\u6570\u7ec4\u4e2d\uff0c start \u5143\u7d20\u4e0e end \u5143\u7d20\u4e4b\u95f4\u5171\u6709\u591a\u5c11\u4e2a\u5143\u7d20 \u8981\u6c42arr\u5fc5\u987b\u662f\u5df2\u6392\u5e8f\u3002\u8ba1\u7b97\u7ed3\u679c\u4f1a\u5305\u542b\u533a\u95f4\u8fb9\u754c\u70b9\u3002 Examples: >>> arr = [ 20050104 , 20050105 , 20050106 , 20050107 , 20050110 , 20050111 ] >>> count_between ( arr , 20050104 , 20050111 ) 6 >>> count_between ( arr , 20050104 , 20050109 ) 4 Source code in omicron/extensions/np.py def count_between ( arr , start , end ): \"\"\"\u8ba1\u7b97\u6570\u7ec4\u4e2d\uff0c`start`\u5143\u7d20\u4e0e`end`\u5143\u7d20\u4e4b\u95f4\u5171\u6709\u591a\u5c11\u4e2a\u5143\u7d20 \u8981\u6c42arr\u5fc5\u987b\u662f\u5df2\u6392\u5e8f\u3002\u8ba1\u7b97\u7ed3\u679c\u4f1a\u5305\u542b\u533a\u95f4\u8fb9\u754c\u70b9\u3002 Examples: >>> arr = [20050104, 20050105, 20050106, 20050107, 20050110, 20050111] >>> count_between(arr, 20050104, 20050111) 6 >>> count_between(arr, 20050104, 20050109) 4 \"\"\" pos_start = np . searchsorted ( arr , start , side = \"right\" ) pos_end = np . searchsorted ( arr , end , side = \"right\" ) counter = pos_end - pos_start + 1 if start < arr [ 0 ]: counter -= 1 if end > arr [ - 1 ]: counter -= 1 return counter","title":"count_between()"},{"location":"api/extensions/#omicron.extensions.np.dataframe_to_structured_array","text":"convert dataframe (with all columns, and index possibly) to numpy structured arrays len(dtypes) should be either equal to len(df.columns) or len(df.columns) + 1 . In the later case, it implies to include df.index into converted array. Parameters: Name Type Description Default df DataFrame the one needs to be converted required dtypes List[Tuple] Defaults to None. If it's None , then dtypes of df is used, in such case, the index of df will not be converted. None Returns: Type Description ArrayLike [description] Source code in omicron/extensions/np.py def dataframe_to_structured_array ( df : DataFrame , dtypes : List [ Tuple ] = None ) -> ArrayLike : \"\"\"convert dataframe (with all columns, and index possibly) to numpy structured arrays `len(dtypes)` should be either equal to `len(df.columns)` or `len(df.columns) + 1`. In the later case, it implies to include `df.index` into converted array. Args: df: the one needs to be converted dtypes: Defaults to None. If it's `None`, then dtypes of `df` is used, in such case, the `index` of `df` will not be converted. Returns: ArrayLike: [description] \"\"\" v = df if dtypes is not None : dtypes_in_dict = { key : value for key , value in dtypes } col_len = len ( df . columns ) if len ( dtypes ) == col_len + 1 : v = df . reset_index () rename_index_to = set ( dtypes_in_dict . keys ()) . difference ( set ( df . columns )) v . rename ( columns = { \"index\" : list ( rename_index_to )[ 0 ]}, inplace = True ) elif col_len != len ( dtypes ): raise ValueError ( f \"length of dtypes should be either { col_len } or { col_len + 1 } , is { len ( dtypes ) } \" ) # re-arrange order of dtypes, in order to align with df.columns dtypes = [] for name in v . columns : dtypes . append (( name , dtypes_in_dict [ name ])) else : dtypes = df . dtypes return np . array ( np . rec . fromrecords ( v . values ), dtype = dtypes )","title":"dataframe_to_structured_array()"},{"location":"api/extensions/#omicron.extensions.np.dict_to_numpy_array","text":"convert dictionary to numpy array Examples: d = {\"aaron\": 5, \"jack\": 6} dtype = [(\"name\", \"S8\"), (\"score\", \" np . array : \"\"\"convert dictionary to numpy array Examples: >>> d = {\"aaron\": 5, \"jack\": 6} >>> dtype = [(\"name\", \"S8\"), (\"score\", \">> dict_to_numpy_array(d, dtype) array([(b'aaron', 5), (b'jack', 6)], dtype=[('name', 'S8'), ('score', '>> arr = np . arange ( 6 , dtype = np . float32 ) >>> arr [ 3 : 5 ] = np . NaN >>> fill_nan ( arr ) ... array ([ 0. , 1. , 2. , 2. , 2. , 5. ], dtype = float32 ) >>> arr = np . arange ( 6 , dtype = np . float32 ) >>> arr [ 0 : 2 ] = np . nan >>> fill_nan ( arr ) ... array ([ 2. , 2. , 2. , 3. , 4. , 5. ], dtype = float32 ) Parameters: Name Type Description Default ts np.array [description] required Source code in omicron/extensions/np.py def fill_nan ( ts : np . ndarray ): \"\"\"\u5c06ts\u4e2d\u7684NaN\u66ff\u6362\u4e3a\u5176\u524d\u503c \u5982\u679cts\u8d77\u5934\u7684\u5143\u7d20\u4e3aNaN\uff0c\u5219\u7528\u7b2c\u4e00\u4e2a\u975eNaN\u5143\u7d20\u66ff\u6362\u3002 \u5982\u679c\u6240\u6709\u5143\u7d20\u90fd\u4e3aNaN\uff0c\u5219\u65e0\u6cd5\u66ff\u6362\u3002 Example: >>> arr = np.arange(6, dtype=np.float32) >>> arr[3:5] = np.NaN >>> fill_nan(arr) ... # doctest: +NORMALIZE_WHITESPACE array([0., 1., 2., 2., 2., 5.], dtype=float32) >>> arr = np.arange(6, dtype=np.float32) >>> arr[0:2] = np.nan >>> fill_nan(arr) ... # doctest: +NORMALIZE_WHITESPACE array([2., 2., 2., 3., 4., 5.], dtype=float32) Args: ts (np.array): [description] \"\"\" if np . all ( np . isnan ( ts )): raise ValueError ( \"all of ts are NaN\" ) if ts [ 0 ] is None or math . isnan ( ts [ 0 ]): idx = np . argwhere ( ~ np . isnan ( ts ))[ 0 ] ts [ 0 ] = ts [ idx ] mask = np . isnan ( ts ) idx = np . where ( ~ mask , np . arange ( mask . size ), 0 ) np . maximum . accumulate ( idx , out = idx ) return ts [ idx ]","title":"fill_nan()"},{"location":"api/extensions/#omicron.extensions.np.find_runs","text":"Find runs of consecutive items in an array. Parameters: Name Type Description Default x ArrayLike the sequence to find runs in required Returns: Type Description Tuple[np.ndarray, np.ndarray, np.ndarray] A tuple of unique values, start indices, and length of runs Source code in omicron/extensions/np.py def find_runs ( x : ArrayLike ) -> Tuple [ np . ndarray , np . ndarray , np . ndarray ]: \"\"\"Find runs of consecutive items in an array. Args: x: the sequence to find runs in Returns: A tuple of unique values, start indices, and length of runs \"\"\" # ensure array x = np . asanyarray ( x ) if x . ndim != 1 : raise ValueError ( \"only 1D array supported\" ) n = x . shape [ 0 ] # handle empty array if n == 0 : return np . array ([]), np . array ([]), np . array ([]) else : # find run starts loc_run_start = np . empty ( n , dtype = bool ) loc_run_start [ 0 ] = True np . not_equal ( x [: - 1 ], x [ 1 :], out = loc_run_start [ 1 :]) run_starts = np . nonzero ( loc_run_start )[ 0 ] # find run values run_values = x [ loc_run_start ] # find run lengths run_lengths = np . diff ( np . append ( run_starts , n )) return run_values , run_starts , run_lengths","title":"find_runs()"},{"location":"api/extensions/#omicron.extensions.np.floor","text":"\u5728\u6570\u636earr\u4e2d\uff0c\u627e\u5230\u5c0f\u4e8e\u7b49\u4e8eitem\u7684\u90a3\u4e00\u4e2a\u503c\u3002\u5982\u679citem\u5c0f\u4e8e\u6240\u6709arr\u5143\u7d20\u7684\u503c\uff0c\u8fd4\u56dearr[0];\u5982\u679citem \u5927\u4e8e\u6240\u6709arr\u5143\u7d20\u7684\u503c\uff0c\u8fd4\u56dearr[-1] \u4e0e minute_frames_floor \u4e0d\u540c\u7684\u662f\uff0c\u672c\u51fd\u6570\u4e0d\u505a\u56de\u7ed5\u4e0e\u8fdb\u4f4d. Examples: >>> a = [ 3 , 6 , 9 ] >>> floor ( a , - 1 ) 3 >>> floor ( a , 9 ) 9 >>> floor ( a , 10 ) 9 >>> floor ( a , 4 ) 3 >>> floor ( a , 10 ) 9 Parameters: Name Type Description Default arr required item required Source code in omicron/extensions/np.py def floor ( arr , item ): \"\"\" \u5728\u6570\u636earr\u4e2d\uff0c\u627e\u5230\u5c0f\u4e8e\u7b49\u4e8eitem\u7684\u90a3\u4e00\u4e2a\u503c\u3002\u5982\u679citem\u5c0f\u4e8e\u6240\u6709arr\u5143\u7d20\u7684\u503c\uff0c\u8fd4\u56dearr[0];\u5982\u679citem \u5927\u4e8e\u6240\u6709arr\u5143\u7d20\u7684\u503c\uff0c\u8fd4\u56dearr[-1] \u4e0e`minute_frames_floor`\u4e0d\u540c\u7684\u662f\uff0c\u672c\u51fd\u6570\u4e0d\u505a\u56de\u7ed5\u4e0e\u8fdb\u4f4d. Examples: >>> a = [3, 6, 9] >>> floor(a, -1) 3 >>> floor(a, 9) 9 >>> floor(a, 10) 9 >>> floor(a, 4) 3 >>> floor(a,10) 9 Args: arr: item: Returns: \"\"\" if item < arr [ 0 ]: return arr [ 0 ] index = np . searchsorted ( arr , item , side = \"right\" ) return arr [ index - 1 ]","title":"floor()"},{"location":"api/extensions/#omicron.extensions.np.join_by_left","text":"\u5de6\u8fde\u63a5 r1 , r2 by key \u5982\u679c r1 \u4e2d\u5b58\u5728 r2 \u4e2d\u6ca1\u6709\u7684\u884c\uff0c\u5219\u8be5\u884c\u5bf9\u5e94\u7684 r2 \u4e2d\u7684\u90a3\u4e9b\u5b57\u6bb5\u5c06\u88abmask\uff0c\u6216\u8005\u586b\u5145\u968f\u673a\u6570\u3002 same as numpy.lib.recfunctions.join_by(key, r1, r2, jointype='leftouter'), but allows r1 have duplicate keys Reference: stackoverflow Examples: >>> # to join the following >>> # [[ 1, 2], >>> # [ 1, 3], x [[1, 5], >>> # [ 2, 3]] [4, 7]] >>> # only first two rows in left will be joined >>> r1 = np . array ([( 1 , 2 ), ( 1 , 3 ), ( 2 , 3 )], dtype = [( 'seq' , 'i4' ), ( 'score' , 'i4' )]) >>> r2 = np . array ([( 1 , 5 ), ( 4 , 7 )], dtype = [( 'seq' , 'i4' ), ( 'age' , 'i4' )]) >>> joined = join_by_left ( 'seq' , r1 , r2 ) >>> print ( joined ) [( 1 , 2 , 5 ) ( 1 , 3 , 5 ) ( 2 , 3 , -- )] >>> print ( joined . dtype ) ( numpy . record , [( 'seq' , '>> joined [ 2 ][ 2 ] masked >>> joined . tolist ()[ 2 ][ 2 ] == None True Parameters: Name Type Description Default key join\u5173\u952e\u5b57 required r1 \u6570\u636e\u96c61 required r2 \u6570\u636e\u96c62 required Returns: Type Description a numpy array Source code in omicron/extensions/np.py def join_by_left ( key , r1 , r2 , mask = True ): \"\"\"\u5de6\u8fde\u63a5 `r1`, `r2` by `key` \u5982\u679c`r1`\u4e2d\u5b58\u5728`r2`\u4e2d\u6ca1\u6709\u7684\u884c\uff0c\u5219\u8be5\u884c\u5bf9\u5e94\u7684`r2`\u4e2d\u7684\u90a3\u4e9b\u5b57\u6bb5\u5c06\u88abmask\uff0c\u6216\u8005\u586b\u5145\u968f\u673a\u6570\u3002 same as numpy.lib.recfunctions.join_by(key, r1, r2, jointype='leftouter'), but allows r1 have duplicate keys [Reference: stackoverflow](https://stackoverflow.com/a/53261882/13395693) Examples: >>> # to join the following >>> # [[ 1, 2], >>> # [ 1, 3], x [[1, 5], >>> # [ 2, 3]] [4, 7]] >>> # only first two rows in left will be joined >>> r1 = np.array([(1, 2), (1,3), (2,3)], dtype=[('seq', 'i4'), ('score', 'i4')]) >>> r2 = np.array([(1, 5), (4,7)], dtype=[('seq', 'i4'), ('age', 'i4')]) >>> joined = join_by_left('seq', r1, r2) >>> print(joined) [(1, 2, 5) (1, 3, 5) (2, 3, --)] >>> print(joined.dtype) (numpy.record, [('seq', '>> joined[2][2] masked >>> joined.tolist()[2][2] == None True Args: key : join\u5173\u952e\u5b57 r1 : \u6570\u636e\u96c61 r2 : \u6570\u636e\u96c62 Returns: a numpy array \"\"\" # figure out the dtype of the result array descr1 = r1 . dtype . descr descr2 = [ d for d in r2 . dtype . descr if d [ 0 ] not in r1 . dtype . names ] descrm = descr1 + descr2 # figure out the fields we'll need from each array f1 = [ d [ 0 ] for d in descr1 ] f2 = [ d [ 0 ] for d in descr2 ] # cache the number of columns in f1 ncol1 = len ( f1 ) # get a dict of the rows of r2 grouped by key rows2 = {} for row2 in r2 : rows2 . setdefault ( row2 [ key ], []) . append ( row2 ) # figure out how many rows will be in the result nrowm = 0 for k1 in r1 [ key ]: if k1 in rows2 : nrowm += len ( rows2 [ k1 ]) else : nrowm += 1 # allocate the return array # ret = np.full((nrowm, ), fill, dtype=descrm) _ret = np . recarray ( nrowm , dtype = descrm ) if mask : ret = np . ma . array ( _ret , mask = True ) else : ret = _ret # merge the data into the return array i = 0 for row1 in r1 : if row1 [ key ] in rows2 : for row2 in rows2 [ row1 [ key ]]: ret [ i ] = tuple ( row1 [ f1 ]) + tuple ( row2 [ f2 ]) i += 1 else : for j in range ( ncol1 ): ret [ i ][ j ] = row1 [ j ] i += 1 return ret","title":"join_by_left()"},{"location":"api/extensions/#omicron.extensions.np.numpy_append_fields","text":"\u7ed9\u73b0\u6709\u7684\u6570\u7ec4 base \u589e\u52a0\u65b0\u7684\u5b57\u6bb5 \u5b9e\u73b0\u4e86 numpy.lib.recfunctions.rec_append_fields \u7684\u529f\u80fd\u3002\u63d0\u4f9b\u8fd9\u4e2a\u529f\u80fd\uff0c\u662f\u56e0\u4e3a rec_append_fields \u4e0d\u80fd\u5904\u7406 data \u5143\u7d20\u7684\u7c7b\u578b\u4e3aObject\u7684\u60c5\u51b5\u3002 \u65b0\u589e\u7684\u6570\u636e\u5217\u5c06\u987a\u5e8f\u6392\u5217\u5728\u5176\u5b83\u5217\u7684\u53f3\u8fb9\u3002 Examples: >>> # \u65b0\u589e\u5355\u4e2a\u5b57\u6bb5 >>> import numpy >>> old = np . array ([ i for i in range ( 3 )], dtype = [( 'col1' , '>> new_list = [ 2 * i for i in range ( 3 )] >>> res = numpy_append_fields ( old , 'new_col' , new_list , [( 'new_col' , '>> print ( res ) ... [( 0. , 0. ) ( 1. , 2. ) ( 2. , 4. )] >>> # \u65b0\u589e\u591a\u4e2a\u5b57\u6bb5 >>> data = [ res [ 'col1' ] . tolist (), res [ 'new_col' ] . tolist ()] >>> print ( numpy_append_fields ( old , ( 'col3' , 'col4' ), data , [( 'col3' , ' np . ndarray : \"\"\"\u7ed9\u73b0\u6709\u7684\u6570\u7ec4`base`\u589e\u52a0\u65b0\u7684\u5b57\u6bb5 \u5b9e\u73b0\u4e86`numpy.lib.recfunctions.rec_append_fields`\u7684\u529f\u80fd\u3002\u63d0\u4f9b\u8fd9\u4e2a\u529f\u80fd\uff0c\u662f\u56e0\u4e3a`rec_append_fields`\u4e0d\u80fd\u5904\u7406`data`\u5143\u7d20\u7684\u7c7b\u578b\u4e3aObject\u7684\u60c5\u51b5\u3002 \u65b0\u589e\u7684\u6570\u636e\u5217\u5c06\u987a\u5e8f\u6392\u5217\u5728\u5176\u5b83\u5217\u7684\u53f3\u8fb9\u3002 Example: >>> # \u65b0\u589e\u5355\u4e2a\u5b57\u6bb5 >>> import numpy >>> old = np.array([i for i in range(3)], dtype=[('col1', '>> new_list = [2 * i for i in range(3)] >>> res = numpy_append_fields(old, 'new_col', new_list, [('new_col', '>> print(res) ... # doctest: +NORMALIZE_WHITESPACE [(0., 0.) (1., 2.) (2., 4.)] >>> # \u65b0\u589e\u591a\u4e2a\u5b57\u6bb5 >>> data = [res['col1'].tolist(), res['new_col'].tolist()] >>> print(numpy_append_fields(old, ('col3', 'col4'), data, [('col3', ' np . ndarray : \"\"\"\u4ece`ts`\u4e2d\u53bb\u9664NaN Args: ts (np.array): [description] Returns: np.array: [description] \"\"\" return ts [ ~ np . isnan ( ts . astype ( float ))]","title":"remove_nan()"},{"location":"api/extensions/#omicron.extensions.np.replace_zero","text":"\u5c06ts\u4e2d\u76840\u66ff\u6362\u4e3a\u524d\u503c, \u5904\u7406volume\u6570\u636e\u65f6\u5e38\u7528\u7528\u5230 \u5982\u679c\u63d0\u4f9b\u4e86replacement, \u5219\u66ff\u6362\u4e3areplacement Source code in omicron/extensions/np.py def replace_zero ( ts : np . ndarray , replacement = None ) -> np . ndarray : \"\"\"\u5c06ts\u4e2d\u76840\u66ff\u6362\u4e3a\u524d\u503c, \u5904\u7406volume\u6570\u636e\u65f6\u5e38\u7528\u7528\u5230 \u5982\u679c\u63d0\u4f9b\u4e86replacement, \u5219\u66ff\u6362\u4e3areplacement \"\"\" if replacement is not None : return np . where ( ts == 0 , replacement , ts ) if np . all ( ts == 0 ): raise ValueError ( \"all of ts are 0\" ) if ts [ 0 ] == 0 : idx = np . argwhere ( ts != 0 )[ 0 ] ts [ 0 ] = ts [ idx ] mask = ts == 0 idx = np . where ( ~ mask , np . arange ( mask . size ), 0 ) np . maximum . accumulate ( idx , out = idx ) return ts [ idx ]","title":"replace_zero()"},{"location":"api/extensions/#omicron.extensions.np.rolling","text":"\u5bf9\u5e8f\u5217 x \u8fdb\u884c\u7a97\u53e3\u6ed1\u52a8\u8ba1\u7b97\u3002 \u5982\u679c func \u8981\u5b9e\u73b0\u7684\u529f\u80fd\u662fargmax, argmin, max, mean, median, min, rank, std, sum, var\u7b49\uff0cmove_argmax\uff0c\u8bf7\u4f7f\u7528bottleneck\u4e2d\u7684move_argmin, move_max, move_mean, move_median, move_min move_rank, move_std, move_sum, move_var\u3002\u8fd9\u4e9b\u51fd\u6570\u7684\u6027\u80fd\u66f4\u597d\u3002 Parameters: Name Type Description Default x [type] [description] required win [type] [description] required func [type] [description] required Returns: Type Description [type] [description] Source code in omicron/extensions/np.py def rolling ( x , win , func ): \"\"\"\u5bf9\u5e8f\u5217`x`\u8fdb\u884c\u7a97\u53e3\u6ed1\u52a8\u8ba1\u7b97\u3002 \u5982\u679c`func`\u8981\u5b9e\u73b0\u7684\u529f\u80fd\u662fargmax, argmin, max, mean, median, min, rank, std, sum, var\u7b49\uff0cmove_argmax\uff0c\u8bf7\u4f7f\u7528bottleneck\u4e2d\u7684move_argmin, move_max, move_mean, move_median, move_min move_rank, move_std, move_sum, move_var\u3002\u8fd9\u4e9b\u51fd\u6570\u7684\u6027\u80fd\u66f4\u597d\u3002 Args: x ([type]): [description] win ([type]): [description] func ([type]): [description] Returns: [type]: [description] \"\"\" results = [] for subarray in sliding_window_view ( x , window_shape = win ): results . append ( func ( subarray )) return np . array ( results )","title":"rolling()"},{"location":"api/extensions/#omicron.extensions.np.shift","text":"\u5728numpy\u6570\u7ec4arr\u4e2d\uff0c\u627e\u5230start(\u6216\u8005\u6700\u63a5\u8fd1\u7684\u4e00\u4e2a\uff09\uff0c\u53d6offset\u5bf9\u5e94\u7684\u5143\u7d20\u3002 \u8981\u6c42 arr \u5df2\u6392\u5e8f\u3002 offset \u4e3a\u6b63\uff0c\u8868\u660e\u5411\u540e\u79fb\u4f4d\uff1b offset \u4e3a\u8d1f\uff0c\u8868\u660e\u5411\u524d\u79fb\u4f4d Examples: >>> arr = [ 20050104 , 20050105 , 20050106 , 20050107 , 20050110 , 20050111 ] >>> shift ( arr , 20050104 , 1 ) 20050105 >>> shift ( arr , 20050105 , - 1 ) 20050104 >>> # \u8d77\u59cb\u70b9\u5df2\u53f3\u8d8a\u754c\uff0c\u4e14\u5411\u53f3shift\uff0c\u8fd4\u56de\u8d77\u59cb\u70b9 >>> shift ( arr , 20050120 , 1 ) 20050120 Parameters: Name Type Description Default arr \u5df2\u6392\u5e8f\u7684\u6570\u7ec4 required start numpy\u53ef\u63a5\u53d7\u7684\u6570\u636e\u7c7b\u578b required offset int [description] required Returns: Type Description \u79fb\u4f4d\u540e\u5f97\u5230\u7684\u5143\u7d20\u503c Source code in omicron/extensions/np.py def shift ( arr , start , offset ): \"\"\"\u5728numpy\u6570\u7ec4arr\u4e2d\uff0c\u627e\u5230start(\u6216\u8005\u6700\u63a5\u8fd1\u7684\u4e00\u4e2a\uff09\uff0c\u53d6offset\u5bf9\u5e94\u7684\u5143\u7d20\u3002 \u8981\u6c42`arr`\u5df2\u6392\u5e8f\u3002`offset`\u4e3a\u6b63\uff0c\u8868\u660e\u5411\u540e\u79fb\u4f4d\uff1b`offset`\u4e3a\u8d1f\uff0c\u8868\u660e\u5411\u524d\u79fb\u4f4d Examples: >>> arr = [20050104, 20050105, 20050106, 20050107, 20050110, 20050111] >>> shift(arr, 20050104, 1) 20050105 >>> shift(arr, 20050105, -1) 20050104 >>> # \u8d77\u59cb\u70b9\u5df2\u53f3\u8d8a\u754c\uff0c\u4e14\u5411\u53f3shift\uff0c\u8fd4\u56de\u8d77\u59cb\u70b9 >>> shift(arr, 20050120, 1) 20050120 Args: arr : \u5df2\u6392\u5e8f\u7684\u6570\u7ec4 start : numpy\u53ef\u63a5\u53d7\u7684\u6570\u636e\u7c7b\u578b offset (int): [description] Returns: \u79fb\u4f4d\u540e\u5f97\u5230\u7684\u5143\u7d20\u503c \"\"\" pos = np . searchsorted ( arr , start , side = \"right\" ) if pos + offset - 1 >= len ( arr ): return start else : return arr [ pos + offset - 1 ]","title":"shift()"},{"location":"api/extensions/#omicron.extensions.np.smallest_n_argpos","text":"get smallest n (min->max) elements and return argpos which its value ordered in ascent Examples: >>> smallest_n_argpos ([ np . nan , 4 , 3 , 9 , 8 , 5 , 2 , 1 , 0 , 6 , 7 ], 2 ) array ([ 8 , 7 ]) Parameters: Name Type Description Default ts np.array \u8f93\u5165\u7684\u6570\u7ec4 required n int \u53d6\u6700\u5c0f\u7684n\u4e2a\u5143\u7d20 required Returns: Type Description np.array [description] Source code in omicron/extensions/np.py def smallest_n_argpos ( ts : np . array , n : int ) -> np . array : \"\"\"get smallest n (min->max) elements and return argpos which its value ordered in ascent Example: >>> smallest_n_argpos([np.nan, 4, 3, 9, 8, 5, 2, 1, 0, 6, 7], 2) array([8, 7]) Args: ts (np.array): \u8f93\u5165\u7684\u6570\u7ec4 n (int): \u53d6\u6700\u5c0f\u7684n\u4e2a\u5143\u7d20 Returns: np.array: [description] \"\"\" return np . argsort ( ts )[: n ]","title":"smallest_n_argpos()"},{"location":"api/extensions/#omicron.extensions.np.to_pydatetime","text":"\u5c06numpy.datetime64\u5bf9\u8c61\u8f6c\u6362\u6210\u4e3apython\u7684datetime\u5bf9\u8c61 numpy.ndarray.item()\u65b9\u6cd5\u53ef\u7528\u4ee5\u5c06\u4efb\u4f55numpy\u5bf9\u8c61\u8f6c\u6362\u6210python\u5bf9\u8c61\uff0c\u63a8\u8350\u5728\u4efb\u4f55\u9002\u7528\u7684\u5730\u65b9\u4f7f\u7528.item()\u65b9\u6cd5\uff0c\u800c\u4e0d\u662f\u672c\u65b9\u6cd5\u3002\u793a\u4f8b: 1 2 3 4 arr = np.array(['2022-09-08', '2022-09-09'], dtype='datetime64[s]') arr.item(0) # output is datetime.datetime(2022, 9, 8, 0, 0) arr[1].item() # output is datetime.datetime(2022, 9, 9, 0, 0) Parameters: Name Type Description Default tm the input numpy datetime object required Returns: Type Description datetime.datetime python datetime object .. deprecated:: 2.0.0 use tm.item() instead Source code in omicron/extensions/np.py @deprecated ( \"2.0.0\" , details = \"use `tm.item()` instead\" ) def to_pydatetime ( tm : np . datetime64 ) -> datetime . datetime : \"\"\"\u5c06numpy.datetime64\u5bf9\u8c61\u8f6c\u6362\u6210\u4e3apython\u7684datetime\u5bf9\u8c61 numpy.ndarray.item()\u65b9\u6cd5\u53ef\u7528\u4ee5\u5c06\u4efb\u4f55numpy\u5bf9\u8c61\u8f6c\u6362\u6210python\u5bf9\u8c61\uff0c\u63a8\u8350\u5728\u4efb\u4f55\u9002\u7528\u7684\u5730\u65b9\u4f7f\u7528.item()\u65b9\u6cd5\uff0c\u800c\u4e0d\u662f\u672c\u65b9\u6cd5\u3002\u793a\u4f8b: ``` arr = np.array(['2022-09-08', '2022-09-09'], dtype='datetime64[s]') arr.item(0) # output is datetime.datetime(2022, 9, 8, 0, 0) arr[1].item() # output is datetime.datetime(2022, 9, 9, 0, 0) ``` Args: tm : the input numpy datetime object Returns: python datetime object \"\"\" unix_epoch = np . datetime64 ( 0 , \"s\" ) one_second = np . timedelta64 ( 1 , \"s\" ) seconds_since_epoch = ( tm - unix_epoch ) / one_second return datetime . datetime . utcfromtimestamp ( seconds_since_epoch )","title":"to_pydatetime()"},{"location":"api/extensions/#omicron.extensions.np.top_n_argpos","text":"get top n (max->min) elements and return argpos which its value ordered in descent Examples: >>> top_n_argpos ([ np . nan , 4 , 3 , 9 , 8 , 5 , 2 , 1 , 0 , 6 , 7 ], 2 ) array ([ 3 , 4 ]) Parameters: Name Type Description Default ts np.array [description] required n int [description] required Returns: Type Description np.array [description] Source code in omicron/extensions/np.py def top_n_argpos ( ts : np . array , n : int ) -> np . array : \"\"\"get top n (max->min) elements and return argpos which its value ordered in descent Example: >>> top_n_argpos([np.nan, 4, 3, 9, 8, 5, 2, 1, 0, 6, 7], 2) array([3, 4]) Args: ts (np.array): [description] n (int): [description] Returns: np.array: [description] \"\"\" ts_ = np . copy ( ts ) ts_ [ np . isnan ( ts_ )] = - np . inf return np . argsort ( ts_ )[ - n :][:: - 1 ]","title":"top_n_argpos()"},{"location":"api/metrics/","text":"\u4ee5\u4e0b\u529f\u80fd\u8bf7\u4f7f\u7528empyrical\u5305\u4e2d\u76f8\u5173\u7684\u51fd\u6570\u3002 \u00b6 usage : 1 from empyrical import aggregate_returns aggregate_returns \u00b6 external link alpha \u00b6 external link alpha_aligned \u00b6 external link alpha_beta \u00b6 external link alpha_beta_aligned \u00b6 external link annual_return \u00b6 external link annual_volatility \u00b6 external link beta \u00b6 external link beta_aligned \u00b6 external link beta_fragility_heuristic \u00b6 external link beta_fragility_heuristic_aligned \u00b6 external link cagr \u00b6 external link calmar_ratio \u00b6 external link capture \u00b6 external link compute_exposures \u00b6 external link conditional_value_at_risk \u00b6 external link cum_returns \u00b6 external link cum_returns_final \u00b6 external link down_alpha_beta \u00b6 external link down_capture \u00b6 external link downside_risk \u00b6 external link excess_sharpe \u00b6 external link gpd_risk_estimates \u00b6 external link gpd_risk_estimates_aligned \u00b6 external link max_drawdown \u00b6 external link omega_ratio \u00b6 external link perf_attrib \u00b6 external link periods \u00b6 external link roll_alpha \u00b6 external link roll_alpha_aligned \u00b6 external link roll_alpha_beta \u00b6 external link roll_alpha_beta_aligned \u00b6 external link roll_annual_volatility \u00b6 external link roll_beta \u00b6 external link roll_beta_aligned \u00b6 external link roll_down_capture \u00b6 external link roll_max_drawdown \u00b6 external link roll_sharpe_ratio \u00b6 external link roll_sortino_ratio \u00b6 external link roll_up_capture \u00b6 external link roll_up_down_capture \u00b6 external link sharpe_ratio \u00b6 external link simple_returns \u00b6 external link sortino_ratio \u00b6 external link stability_of_timeseries \u00b6 external link stats \u00b6 external link tail_ratio \u00b6 external link up_alpha_beta \u00b6 external link up_capture \u00b6 external link up_down_capture \u00b6 external link utils \u00b6 external link value_at_risk \u00b6 external link","title":"metrics"},{"location":"api/metrics/#\u4ee5\u4e0b\u529f\u80fd\u8bf7\u4f7f\u7528empyrical\u5305\u4e2d\u76f8\u5173\u7684\u51fd\u6570","text":"usage : 1 from empyrical import aggregate_returns","title":"\u4ee5\u4e0b\u529f\u80fd\u8bf7\u4f7f\u7528empyrical\u5305\u4e2d\u76f8\u5173\u7684\u51fd\u6570\u3002"},{"location":"api/metrics/#aggregate_returns","text":"external link","title":"aggregate_returns"},{"location":"api/metrics/#alpha","text":"external link","title":"alpha"},{"location":"api/metrics/#alpha_aligned","text":"external link","title":"alpha_aligned"},{"location":"api/metrics/#alpha_beta","text":"external link","title":"alpha_beta"},{"location":"api/metrics/#alpha_beta_aligned","text":"external link","title":"alpha_beta_aligned"},{"location":"api/metrics/#annual_return","text":"external link","title":"annual_return"},{"location":"api/metrics/#annual_volatility","text":"external link","title":"annual_volatility"},{"location":"api/metrics/#beta","text":"external link","title":"beta"},{"location":"api/metrics/#beta_aligned","text":"external link","title":"beta_aligned"},{"location":"api/metrics/#beta_fragility_heuristic","text":"external link","title":"beta_fragility_heuristic"},{"location":"api/metrics/#beta_fragility_heuristic_aligned","text":"external link","title":"beta_fragility_heuristic_aligned"},{"location":"api/metrics/#cagr","text":"external link","title":"cagr"},{"location":"api/metrics/#calmar_ratio","text":"external link","title":"calmar_ratio"},{"location":"api/metrics/#capture","text":"external link","title":"capture"},{"location":"api/metrics/#compute_exposures","text":"external link","title":"compute_exposures"},{"location":"api/metrics/#conditional_value_at_risk","text":"external link","title":"conditional_value_at_risk"},{"location":"api/metrics/#cum_returns","text":"external link","title":"cum_returns"},{"location":"api/metrics/#cum_returns_final","text":"external link","title":"cum_returns_final"},{"location":"api/metrics/#down_alpha_beta","text":"external link","title":"down_alpha_beta"},{"location":"api/metrics/#down_capture","text":"external link","title":"down_capture"},{"location":"api/metrics/#downside_risk","text":"external link","title":"downside_risk"},{"location":"api/metrics/#excess_sharpe","text":"external link","title":"excess_sharpe"},{"location":"api/metrics/#gpd_risk_estimates","text":"external link","title":"gpd_risk_estimates"},{"location":"api/metrics/#gpd_risk_estimates_aligned","text":"external link","title":"gpd_risk_estimates_aligned"},{"location":"api/metrics/#max_drawdown","text":"external link","title":"max_drawdown"},{"location":"api/metrics/#omega_ratio","text":"external link","title":"omega_ratio"},{"location":"api/metrics/#perf_attrib","text":"external link","title":"perf_attrib"},{"location":"api/metrics/#periods","text":"external link","title":"periods"},{"location":"api/metrics/#roll_alpha","text":"external link","title":"roll_alpha"},{"location":"api/metrics/#roll_alpha_aligned","text":"external link","title":"roll_alpha_aligned"},{"location":"api/metrics/#roll_alpha_beta","text":"external link","title":"roll_alpha_beta"},{"location":"api/metrics/#roll_alpha_beta_aligned","text":"external link","title":"roll_alpha_beta_aligned"},{"location":"api/metrics/#roll_annual_volatility","text":"external link","title":"roll_annual_volatility"},{"location":"api/metrics/#roll_beta","text":"external link","title":"roll_beta"},{"location":"api/metrics/#roll_beta_aligned","text":"external link","title":"roll_beta_aligned"},{"location":"api/metrics/#roll_down_capture","text":"external link","title":"roll_down_capture"},{"location":"api/metrics/#roll_max_drawdown","text":"external link","title":"roll_max_drawdown"},{"location":"api/metrics/#roll_sharpe_ratio","text":"external link","title":"roll_sharpe_ratio"},{"location":"api/metrics/#roll_sortino_ratio","text":"external link","title":"roll_sortino_ratio"},{"location":"api/metrics/#roll_up_capture","text":"external link","title":"roll_up_capture"},{"location":"api/metrics/#roll_up_down_capture","text":"external link","title":"roll_up_down_capture"},{"location":"api/metrics/#sharpe_ratio","text":"external link","title":"sharpe_ratio"},{"location":"api/metrics/#simple_returns","text":"external link","title":"simple_returns"},{"location":"api/metrics/#sortino_ratio","text":"external link","title":"sortino_ratio"},{"location":"api/metrics/#stability_of_timeseries","text":"external link","title":"stability_of_timeseries"},{"location":"api/metrics/#stats","text":"external link","title":"stats"},{"location":"api/metrics/#tail_ratio","text":"external link","title":"tail_ratio"},{"location":"api/metrics/#up_alpha_beta","text":"external link","title":"up_alpha_beta"},{"location":"api/metrics/#up_capture","text":"external link","title":"up_capture"},{"location":"api/metrics/#up_down_capture","text":"external link","title":"up_down_capture"},{"location":"api/metrics/#utils","text":"external link","title":"utils"},{"location":"api/metrics/#value_at_risk","text":"external link","title":"value_at_risk"},{"location":"api/omicron/","text":"Omicron\u63d0\u4f9b\u6570\u636e\u6301\u4e45\u5316\u3001\u65f6\u95f4\uff08\u65e5\u5386\u3001triggers)\u3001\u884c\u60c5\u6570\u636emodel\u3001\u57fa\u7840\u8fd0\u7b97\u548c\u57fa\u7840\u91cf\u5316\u56e0\u5b50 close () async \u00b6 \u5173\u95ed\u4e0e\u7f13\u5b58\u7684\u8fde\u63a5 Source code in omicron/__init__.py async def close (): \"\"\"\u5173\u95ed\u4e0e\u7f13\u5b58\u7684\u8fde\u63a5\"\"\" try : await cache . close () except Exception as e : # noqa pass init ( app_cache = 5 ) async \u00b6 \u521d\u59cb\u5316Omicron \u521d\u59cb\u5316influxDB, \u7f13\u5b58\u7b49\u8fde\u63a5\uff0c \u5e76\u52a0\u8f7d\u65e5\u5386\u548c\u8bc1\u5238\u5217\u8868 \u4e0a\u8ff0\u521d\u59cb\u5316\u7684\u8fde\u63a5\uff0c\u5e94\u8be5\u5728\u7a0b\u5e8f\u9000\u51fa\u65f6\uff0c\u901a\u8fc7\u8c03\u7528 close() \u5173\u95ed Source code in omicron/__init__.py async def init ( app_cache : int = 5 ): \"\"\"\u521d\u59cb\u5316Omicron \u521d\u59cb\u5316influxDB, \u7f13\u5b58\u7b49\u8fde\u63a5\uff0c \u5e76\u52a0\u8f7d\u65e5\u5386\u548c\u8bc1\u5238\u5217\u8868 \u4e0a\u8ff0\u521d\u59cb\u5316\u7684\u8fde\u63a5\uff0c\u5e94\u8be5\u5728\u7a0b\u5e8f\u9000\u51fa\u65f6\uff0c\u901a\u8fc7\u8c03\u7528`close()`\u5173\u95ed \"\"\" global cache await cache . init ( app = app_cache ) await tf . init () from omicron.models.security import Security await Security . init () Extensions package \u00b6 decimals \u00b6 math_round ( x , digits ) \u00b6 \u7531\u4e8e\u6d6e\u70b9\u6570\u7684\u8868\u793a\u95ee\u9898\uff0c\u5f88\u591a\u8bed\u8a00\u7684round\u51fd\u6570\u4e0e\u6570\u5b66\u4e0a\u7684round\u51fd\u6570\u4e0d\u4e00\u81f4\u3002\u4e0b\u9762\u7684\u51fd\u6570\u7ed3\u679c\u4e0e\u6570\u5b66\u4e0a\u7684\u4e00\u81f4\u3002 Parameters: Name Type Description Default x float \u8981\u8fdb\u884c\u56db\u820d\u4e94\u5165\u7684\u6570\u5b57 required digits int \u5c0f\u6570\u70b9\u540e\u4fdd\u7559\u7684\u4f4d\u6570 required Source code in omicron/extensions/decimals.py def math_round ( x : float , digits : int ): \"\"\"\u7531\u4e8e\u6d6e\u70b9\u6570\u7684\u8868\u793a\u95ee\u9898\uff0c\u5f88\u591a\u8bed\u8a00\u7684round\u51fd\u6570\u4e0e\u6570\u5b66\u4e0a\u7684round\u51fd\u6570\u4e0d\u4e00\u81f4\u3002\u4e0b\u9762\u7684\u51fd\u6570\u7ed3\u679c\u4e0e\u6570\u5b66\u4e0a\u7684\u4e00\u81f4\u3002 Args: x: \u8981\u8fdb\u884c\u56db\u820d\u4e94\u5165\u7684\u6570\u5b57 digits: \u5c0f\u6570\u70b9\u540e\u4fdd\u7559\u7684\u4f4d\u6570 \"\"\" return int ( x * ( 10 ** digits ) + copysign ( 0.5 , x )) / ( 10 ** digits ) price_equal ( x , y ) \u00b6 \u5224\u65ad\u80a1\u4ef7\u662f\u5426\u76f8\u7b49 Parameters: Name Type Description Default x \u4ef7\u683c1 required y \u4ef7\u683c2 required Returns: Type Description bool \u5982\u679c\u76f8\u7b49\u5219\u8fd4\u56deTrue\uff0c\u5426\u5219\u8fd4\u56deFalse Source code in omicron/extensions/decimals.py def price_equal ( x : float , y : float ) -> bool : \"\"\"\u5224\u65ad\u80a1\u4ef7\u662f\u5426\u76f8\u7b49 Args: x : \u4ef7\u683c1 y : \u4ef7\u683c2 Returns: \u5982\u679c\u76f8\u7b49\u5219\u8fd4\u56deTrue\uff0c\u5426\u5219\u8fd4\u56deFalse \"\"\" return abs ( math_round ( x , 2 ) - math_round ( y , 2 )) < 1e-2 np \u00b6 Extension function related to numpy array_math_round ( arr , digits ) \u00b6 \u5c06\u4e00\u7ef4\u6570\u7ec4arr\u7684\u6570\u636e\u8fdb\u884c\u56db\u820d\u4e94\u5165 numpy.around\u7684\u51fd\u6570\u5e76\u4e0d\u662f\u6570\u5b66\u4e0a\u7684\u56db\u820d\u4e94\u5165\uff0c\u5bf91.5\u548c2.5\u8fdb\u884cround\u7684\u7ed3\u679c\u90fd\u4f1a\u53d8\u62102\uff0c\u5728\u91d1\u878d\u9886\u57df\u8ba1\u7b97\u4e2d\uff0c\u6211\u4eec\u5fc5\u987b\u4f7f\u7528\u6570\u5b66\u610f\u4e49\u4e0a\u7684\u56db\u820d\u4e94\u5165\u3002 Parameters: Name Type Description Default arr ArrayLike \u8f93\u5165\u6570\u7ec4 required digits int required Returns: Type Description np.ndarray \u56db\u820d\u4e94\u5165\u540e\u7684\u4e00\u7ef4\u6570\u7ec4 Source code in omicron/extensions/np.py def array_math_round ( arr : Union [ float , ArrayLike ], digits : int ) -> np . ndarray : \"\"\"\u5c06\u4e00\u7ef4\u6570\u7ec4arr\u7684\u6570\u636e\u8fdb\u884c\u56db\u820d\u4e94\u5165 numpy.around\u7684\u51fd\u6570\u5e76\u4e0d\u662f\u6570\u5b66\u4e0a\u7684\u56db\u820d\u4e94\u5165\uff0c\u5bf91.5\u548c2.5\u8fdb\u884cround\u7684\u7ed3\u679c\u90fd\u4f1a\u53d8\u62102\uff0c\u5728\u91d1\u878d\u9886\u57df\u8ba1\u7b97\u4e2d\uff0c\u6211\u4eec\u5fc5\u987b\u4f7f\u7528\u6570\u5b66\u610f\u4e49\u4e0a\u7684\u56db\u820d\u4e94\u5165\u3002 Args: arr (ArrayLike): \u8f93\u5165\u6570\u7ec4 digits (int): Returns: np.ndarray: \u56db\u820d\u4e94\u5165\u540e\u7684\u4e00\u7ef4\u6570\u7ec4 \"\"\" # \u5982\u679c\u662f\u5355\u4e2a\u5143\u7d20\uff0c\u5219\u76f4\u63a5\u8fd4\u56de if isinstance ( arr , float ): return decimals . math_round ( arr , digits ) f = np . vectorize ( lambda x : decimals . math_round ( x , digits )) return f ( arr ) array_price_equal ( price1 , price2 ) \u00b6 \u5224\u65ad\u4e24\u4e2a\u4ef7\u683c\u6570\u7ec4\u662f\u5426\u76f8\u7b49 Parameters: Name Type Description Default price1 ArrayLike \u4ef7\u683c\u6570\u7ec4 required price2 ArrayLike \u4ef7\u683c\u6570\u7ec4 required Returns: Type Description np.ndarray \u5224\u65ad\u7ed3\u679c Source code in omicron/extensions/np.py def array_price_equal ( price1 : ArrayLike , price2 : ArrayLike ) -> np . ndarray : \"\"\"\u5224\u65ad\u4e24\u4e2a\u4ef7\u683c\u6570\u7ec4\u662f\u5426\u76f8\u7b49 Args: price1 (ArrayLike): \u4ef7\u683c\u6570\u7ec4 price2 (ArrayLike): \u4ef7\u683c\u6570\u7ec4 Returns: np.ndarray: \u5224\u65ad\u7ed3\u679c \"\"\" price1 = array_math_round ( price1 , 2 ) price2 = array_math_round ( price2 , 2 ) return abs ( price1 - price2 ) < 1e-2 bars_since ( condition , default = None ) \u00b6 Return the number of bars since condition sequence was last True , or if never, return default . 1 2 3 >>> condition = [True, True, False] >>> bars_since(condition) 1 Source code in omicron/extensions/np.py def bars_since ( condition : Sequence [ bool ], default = None ) -> int : \"\"\" Return the number of bars since `condition` sequence was last `True`, or if never, return `default`. >>> condition = [True, True, False] >>> bars_since(condition) 1 \"\"\" return next ( compress ( range ( len ( condition )), reversed ( condition )), default ) bin_cut ( arr , n ) \u00b6 \u5c06\u6570\u7ec4arr\u5207\u5206\u6210n\u4efd todo: use padding + reshape to boost performance Parameters: Name Type Description Default arr [type] [description] required n [type] [description] required Returns: Type Description [type] [description] Source code in omicron/extensions/np.py def bin_cut ( arr : list , n : int ): \"\"\"\u5c06\u6570\u7ec4arr\u5207\u5206\u6210n\u4efd todo: use padding + reshape to boost performance Args: arr ([type]): [description] n ([type]): [description] Returns: [type]: [description] \"\"\" result = [[] for i in range ( n )] for i , e in enumerate ( arr ): result [ i % n ] . append ( e ) return [ e for e in result if len ( e )] count_between ( arr , start , end ) \u00b6 \u8ba1\u7b97\u6570\u7ec4\u4e2d\uff0c start \u5143\u7d20\u4e0e end \u5143\u7d20\u4e4b\u95f4\u5171\u6709\u591a\u5c11\u4e2a\u5143\u7d20 \u8981\u6c42arr\u5fc5\u987b\u662f\u5df2\u6392\u5e8f\u3002\u8ba1\u7b97\u7ed3\u679c\u4f1a\u5305\u542b\u533a\u95f4\u8fb9\u754c\u70b9\u3002 Examples: >>> arr = [ 20050104 , 20050105 , 20050106 , 20050107 , 20050110 , 20050111 ] >>> count_between ( arr , 20050104 , 20050111 ) 6 >>> count_between ( arr , 20050104 , 20050109 ) 4 Source code in omicron/extensions/np.py def count_between ( arr , start , end ): \"\"\"\u8ba1\u7b97\u6570\u7ec4\u4e2d\uff0c`start`\u5143\u7d20\u4e0e`end`\u5143\u7d20\u4e4b\u95f4\u5171\u6709\u591a\u5c11\u4e2a\u5143\u7d20 \u8981\u6c42arr\u5fc5\u987b\u662f\u5df2\u6392\u5e8f\u3002\u8ba1\u7b97\u7ed3\u679c\u4f1a\u5305\u542b\u533a\u95f4\u8fb9\u754c\u70b9\u3002 Examples: >>> arr = [20050104, 20050105, 20050106, 20050107, 20050110, 20050111] >>> count_between(arr, 20050104, 20050111) 6 >>> count_between(arr, 20050104, 20050109) 4 \"\"\" pos_start = np . searchsorted ( arr , start , side = \"right\" ) pos_end = np . searchsorted ( arr , end , side = \"right\" ) counter = pos_end - pos_start + 1 if start < arr [ 0 ]: counter -= 1 if end > arr [ - 1 ]: counter -= 1 return counter dataframe_to_structured_array ( df , dtypes = None ) \u00b6 convert dataframe (with all columns, and index possibly) to numpy structured arrays len(dtypes) should be either equal to len(df.columns) or len(df.columns) + 1 . In the later case, it implies to include df.index into converted array. Parameters: Name Type Description Default df DataFrame the one needs to be converted required dtypes List[Tuple] Defaults to None. If it's None , then dtypes of df is used, in such case, the index of df will not be converted. None Returns: Type Description ArrayLike [description] Source code in omicron/extensions/np.py def dataframe_to_structured_array ( df : DataFrame , dtypes : List [ Tuple ] = None ) -> ArrayLike : \"\"\"convert dataframe (with all columns, and index possibly) to numpy structured arrays `len(dtypes)` should be either equal to `len(df.columns)` or `len(df.columns) + 1`. In the later case, it implies to include `df.index` into converted array. Args: df: the one needs to be converted dtypes: Defaults to None. If it's `None`, then dtypes of `df` is used, in such case, the `index` of `df` will not be converted. Returns: ArrayLike: [description] \"\"\" v = df if dtypes is not None : dtypes_in_dict = { key : value for key , value in dtypes } col_len = len ( df . columns ) if len ( dtypes ) == col_len + 1 : v = df . reset_index () rename_index_to = set ( dtypes_in_dict . keys ()) . difference ( set ( df . columns )) v . rename ( columns = { \"index\" : list ( rename_index_to )[ 0 ]}, inplace = True ) elif col_len != len ( dtypes ): raise ValueError ( f \"length of dtypes should be either { col_len } or { col_len + 1 } , is { len ( dtypes ) } \" ) # re-arrange order of dtypes, in order to align with df.columns dtypes = [] for name in v . columns : dtypes . append (( name , dtypes_in_dict [ name ])) else : dtypes = df . dtypes return np . array ( np . rec . fromrecords ( v . values ), dtype = dtypes ) dict_to_numpy_array ( d , dtype ) \u00b6 convert dictionary to numpy array Examples: d = {\"aaron\": 5, \"jack\": 6} dtype = [(\"name\", \"S8\"), (\"score\", \" np . array : \"\"\"convert dictionary to numpy array Examples: >>> d = {\"aaron\": 5, \"jack\": 6} >>> dtype = [(\"name\", \"S8\"), (\"score\", \">> dict_to_numpy_array(d, dtype) array([(b'aaron', 5), (b'jack', 6)], dtype=[('name', 'S8'), ('score', '>> arr = np . arange ( 6 , dtype = np . float32 ) >>> arr [ 3 : 5 ] = np . NaN >>> fill_nan ( arr ) ... array ([ 0. , 1. , 2. , 2. , 2. , 5. ], dtype = float32 ) >>> arr = np . arange ( 6 , dtype = np . float32 ) >>> arr [ 0 : 2 ] = np . nan >>> fill_nan ( arr ) ... array ([ 2. , 2. , 2. , 3. , 4. , 5. ], dtype = float32 ) Parameters: Name Type Description Default ts np.array [description] required Source code in omicron/extensions/np.py def fill_nan ( ts : np . ndarray ): \"\"\"\u5c06ts\u4e2d\u7684NaN\u66ff\u6362\u4e3a\u5176\u524d\u503c \u5982\u679cts\u8d77\u5934\u7684\u5143\u7d20\u4e3aNaN\uff0c\u5219\u7528\u7b2c\u4e00\u4e2a\u975eNaN\u5143\u7d20\u66ff\u6362\u3002 \u5982\u679c\u6240\u6709\u5143\u7d20\u90fd\u4e3aNaN\uff0c\u5219\u65e0\u6cd5\u66ff\u6362\u3002 Example: >>> arr = np.arange(6, dtype=np.float32) >>> arr[3:5] = np.NaN >>> fill_nan(arr) ... # doctest: +NORMALIZE_WHITESPACE array([0., 1., 2., 2., 2., 5.], dtype=float32) >>> arr = np.arange(6, dtype=np.float32) >>> arr[0:2] = np.nan >>> fill_nan(arr) ... # doctest: +NORMALIZE_WHITESPACE array([2., 2., 2., 3., 4., 5.], dtype=float32) Args: ts (np.array): [description] \"\"\" if np . all ( np . isnan ( ts )): raise ValueError ( \"all of ts are NaN\" ) if ts [ 0 ] is None or math . isnan ( ts [ 0 ]): idx = np . argwhere ( ~ np . isnan ( ts ))[ 0 ] ts [ 0 ] = ts [ idx ] mask = np . isnan ( ts ) idx = np . where ( ~ mask , np . arange ( mask . size ), 0 ) np . maximum . accumulate ( idx , out = idx ) return ts [ idx ] find_runs ( x ) \u00b6 Find runs of consecutive items in an array. Parameters: Name Type Description Default x ArrayLike the sequence to find runs in required Returns: Type Description Tuple[np.ndarray, np.ndarray, np.ndarray] A tuple of unique values, start indices, and length of runs Source code in omicron/extensions/np.py def find_runs ( x : ArrayLike ) -> Tuple [ np . ndarray , np . ndarray , np . ndarray ]: \"\"\"Find runs of consecutive items in an array. Args: x: the sequence to find runs in Returns: A tuple of unique values, start indices, and length of runs \"\"\" # ensure array x = np . asanyarray ( x ) if x . ndim != 1 : raise ValueError ( \"only 1D array supported\" ) n = x . shape [ 0 ] # handle empty array if n == 0 : return np . array ([]), np . array ([]), np . array ([]) else : # find run starts loc_run_start = np . empty ( n , dtype = bool ) loc_run_start [ 0 ] = True np . not_equal ( x [: - 1 ], x [ 1 :], out = loc_run_start [ 1 :]) run_starts = np . nonzero ( loc_run_start )[ 0 ] # find run values run_values = x [ loc_run_start ] # find run lengths run_lengths = np . diff ( np . append ( run_starts , n )) return run_values , run_starts , run_lengths floor ( arr , item ) \u00b6 \u5728\u6570\u636earr\u4e2d\uff0c\u627e\u5230\u5c0f\u4e8e\u7b49\u4e8eitem\u7684\u90a3\u4e00\u4e2a\u503c\u3002\u5982\u679citem\u5c0f\u4e8e\u6240\u6709arr\u5143\u7d20\u7684\u503c\uff0c\u8fd4\u56dearr[0];\u5982\u679citem \u5927\u4e8e\u6240\u6709arr\u5143\u7d20\u7684\u503c\uff0c\u8fd4\u56dearr[-1] \u4e0e minute_frames_floor \u4e0d\u540c\u7684\u662f\uff0c\u672c\u51fd\u6570\u4e0d\u505a\u56de\u7ed5\u4e0e\u8fdb\u4f4d. Examples: >>> a = [ 3 , 6 , 9 ] >>> floor ( a , - 1 ) 3 >>> floor ( a , 9 ) 9 >>> floor ( a , 10 ) 9 >>> floor ( a , 4 ) 3 >>> floor ( a , 10 ) 9 Parameters: Name Type Description Default arr required item required Source code in omicron/extensions/np.py def floor ( arr , item ): \"\"\" \u5728\u6570\u636earr\u4e2d\uff0c\u627e\u5230\u5c0f\u4e8e\u7b49\u4e8eitem\u7684\u90a3\u4e00\u4e2a\u503c\u3002\u5982\u679citem\u5c0f\u4e8e\u6240\u6709arr\u5143\u7d20\u7684\u503c\uff0c\u8fd4\u56dearr[0];\u5982\u679citem \u5927\u4e8e\u6240\u6709arr\u5143\u7d20\u7684\u503c\uff0c\u8fd4\u56dearr[-1] \u4e0e`minute_frames_floor`\u4e0d\u540c\u7684\u662f\uff0c\u672c\u51fd\u6570\u4e0d\u505a\u56de\u7ed5\u4e0e\u8fdb\u4f4d. Examples: >>> a = [3, 6, 9] >>> floor(a, -1) 3 >>> floor(a, 9) 9 >>> floor(a, 10) 9 >>> floor(a, 4) 3 >>> floor(a,10) 9 Args: arr: item: Returns: \"\"\" if item < arr [ 0 ]: return arr [ 0 ] index = np . searchsorted ( arr , item , side = \"right\" ) return arr [ index - 1 ] join_by_left ( key , r1 , r2 , mask = True ) \u00b6 \u5de6\u8fde\u63a5 r1 , r2 by key \u5982\u679c r1 \u4e2d\u5b58\u5728 r2 \u4e2d\u6ca1\u6709\u7684\u884c\uff0c\u5219\u8be5\u884c\u5bf9\u5e94\u7684 r2 \u4e2d\u7684\u90a3\u4e9b\u5b57\u6bb5\u5c06\u88abmask\uff0c\u6216\u8005\u586b\u5145\u968f\u673a\u6570\u3002 same as numpy.lib.recfunctions.join_by(key, r1, r2, jointype='leftouter'), but allows r1 have duplicate keys Reference: stackoverflow Examples: >>> # to join the following >>> # [[ 1, 2], >>> # [ 1, 3], x [[1, 5], >>> # [ 2, 3]] [4, 7]] >>> # only first two rows in left will be joined >>> r1 = np . array ([( 1 , 2 ), ( 1 , 3 ), ( 2 , 3 )], dtype = [( 'seq' , 'i4' ), ( 'score' , 'i4' )]) >>> r2 = np . array ([( 1 , 5 ), ( 4 , 7 )], dtype = [( 'seq' , 'i4' ), ( 'age' , 'i4' )]) >>> joined = join_by_left ( 'seq' , r1 , r2 ) >>> print ( joined ) [( 1 , 2 , 5 ) ( 1 , 3 , 5 ) ( 2 , 3 , -- )] >>> print ( joined . dtype ) ( numpy . record , [( 'seq' , '>> joined [ 2 ][ 2 ] masked >>> joined . tolist ()[ 2 ][ 2 ] == None True Parameters: Name Type Description Default key join\u5173\u952e\u5b57 required r1 \u6570\u636e\u96c61 required r2 \u6570\u636e\u96c62 required Returns: Type Description a numpy array Source code in omicron/extensions/np.py def join_by_left ( key , r1 , r2 , mask = True ): \"\"\"\u5de6\u8fde\u63a5 `r1`, `r2` by `key` \u5982\u679c`r1`\u4e2d\u5b58\u5728`r2`\u4e2d\u6ca1\u6709\u7684\u884c\uff0c\u5219\u8be5\u884c\u5bf9\u5e94\u7684`r2`\u4e2d\u7684\u90a3\u4e9b\u5b57\u6bb5\u5c06\u88abmask\uff0c\u6216\u8005\u586b\u5145\u968f\u673a\u6570\u3002 same as numpy.lib.recfunctions.join_by(key, r1, r2, jointype='leftouter'), but allows r1 have duplicate keys [Reference: stackoverflow](https://stackoverflow.com/a/53261882/13395693) Examples: >>> # to join the following >>> # [[ 1, 2], >>> # [ 1, 3], x [[1, 5], >>> # [ 2, 3]] [4, 7]] >>> # only first two rows in left will be joined >>> r1 = np.array([(1, 2), (1,3), (2,3)], dtype=[('seq', 'i4'), ('score', 'i4')]) >>> r2 = np.array([(1, 5), (4,7)], dtype=[('seq', 'i4'), ('age', 'i4')]) >>> joined = join_by_left('seq', r1, r2) >>> print(joined) [(1, 2, 5) (1, 3, 5) (2, 3, --)] >>> print(joined.dtype) (numpy.record, [('seq', '>> joined[2][2] masked >>> joined.tolist()[2][2] == None True Args: key : join\u5173\u952e\u5b57 r1 : \u6570\u636e\u96c61 r2 : \u6570\u636e\u96c62 Returns: a numpy array \"\"\" # figure out the dtype of the result array descr1 = r1 . dtype . descr descr2 = [ d for d in r2 . dtype . descr if d [ 0 ] not in r1 . dtype . names ] descrm = descr1 + descr2 # figure out the fields we'll need from each array f1 = [ d [ 0 ] for d in descr1 ] f2 = [ d [ 0 ] for d in descr2 ] # cache the number of columns in f1 ncol1 = len ( f1 ) # get a dict of the rows of r2 grouped by key rows2 = {} for row2 in r2 : rows2 . setdefault ( row2 [ key ], []) . append ( row2 ) # figure out how many rows will be in the result nrowm = 0 for k1 in r1 [ key ]: if k1 in rows2 : nrowm += len ( rows2 [ k1 ]) else : nrowm += 1 # allocate the return array # ret = np.full((nrowm, ), fill, dtype=descrm) _ret = np . recarray ( nrowm , dtype = descrm ) if mask : ret = np . ma . array ( _ret , mask = True ) else : ret = _ret # merge the data into the return array i = 0 for row1 in r1 : if row1 [ key ] in rows2 : for row2 in rows2 [ row1 [ key ]]: ret [ i ] = tuple ( row1 [ f1 ]) + tuple ( row2 [ f2 ]) i += 1 else : for j in range ( ncol1 ): ret [ i ][ j ] = row1 [ j ] i += 1 return ret numpy_append_fields ( base , names , data , dtypes ) \u00b6 \u7ed9\u73b0\u6709\u7684\u6570\u7ec4 base \u589e\u52a0\u65b0\u7684\u5b57\u6bb5 \u5b9e\u73b0\u4e86 numpy.lib.recfunctions.rec_append_fields \u7684\u529f\u80fd\u3002\u63d0\u4f9b\u8fd9\u4e2a\u529f\u80fd\uff0c\u662f\u56e0\u4e3a rec_append_fields \u4e0d\u80fd\u5904\u7406 data \u5143\u7d20\u7684\u7c7b\u578b\u4e3aObject\u7684\u60c5\u51b5\u3002 \u65b0\u589e\u7684\u6570\u636e\u5217\u5c06\u987a\u5e8f\u6392\u5217\u5728\u5176\u5b83\u5217\u7684\u53f3\u8fb9\u3002 Examples: >>> # \u65b0\u589e\u5355\u4e2a\u5b57\u6bb5 >>> import numpy >>> old = np . array ([ i for i in range ( 3 )], dtype = [( 'col1' , '>> new_list = [ 2 * i for i in range ( 3 )] >>> res = numpy_append_fields ( old , 'new_col' , new_list , [( 'new_col' , '>> print ( res ) ... [( 0. , 0. ) ( 1. , 2. ) ( 2. , 4. )] >>> # \u65b0\u589e\u591a\u4e2a\u5b57\u6bb5 >>> data = [ res [ 'col1' ] . tolist (), res [ 'new_col' ] . tolist ()] >>> print ( numpy_append_fields ( old , ( 'col3' , 'col4' ), data , [( 'col3' , ' np . ndarray : \"\"\"\u7ed9\u73b0\u6709\u7684\u6570\u7ec4`base`\u589e\u52a0\u65b0\u7684\u5b57\u6bb5 \u5b9e\u73b0\u4e86`numpy.lib.recfunctions.rec_append_fields`\u7684\u529f\u80fd\u3002\u63d0\u4f9b\u8fd9\u4e2a\u529f\u80fd\uff0c\u662f\u56e0\u4e3a`rec_append_fields`\u4e0d\u80fd\u5904\u7406`data`\u5143\u7d20\u7684\u7c7b\u578b\u4e3aObject\u7684\u60c5\u51b5\u3002 \u65b0\u589e\u7684\u6570\u636e\u5217\u5c06\u987a\u5e8f\u6392\u5217\u5728\u5176\u5b83\u5217\u7684\u53f3\u8fb9\u3002 Example: >>> # \u65b0\u589e\u5355\u4e2a\u5b57\u6bb5 >>> import numpy >>> old = np.array([i for i in range(3)], dtype=[('col1', '>> new_list = [2 * i for i in range(3)] >>> res = numpy_append_fields(old, 'new_col', new_list, [('new_col', '>> print(res) ... # doctest: +NORMALIZE_WHITESPACE [(0., 0.) (1., 2.) (2., 4.)] >>> # \u65b0\u589e\u591a\u4e2a\u5b57\u6bb5 >>> data = [res['col1'].tolist(), res['new_col'].tolist()] >>> print(numpy_append_fields(old, ('col3', 'col4'), data, [('col3', ' np . ndarray : \"\"\"\u4ece`ts`\u4e2d\u53bb\u9664NaN Args: ts (np.array): [description] Returns: np.array: [description] \"\"\" return ts [ ~ np . isnan ( ts . astype ( float ))] replace_zero ( ts , replacement = None ) \u00b6 \u5c06ts\u4e2d\u76840\u66ff\u6362\u4e3a\u524d\u503c, \u5904\u7406volume\u6570\u636e\u65f6\u5e38\u7528\u7528\u5230 \u5982\u679c\u63d0\u4f9b\u4e86replacement, \u5219\u66ff\u6362\u4e3areplacement Source code in omicron/extensions/np.py def replace_zero ( ts : np . ndarray , replacement = None ) -> np . ndarray : \"\"\"\u5c06ts\u4e2d\u76840\u66ff\u6362\u4e3a\u524d\u503c, \u5904\u7406volume\u6570\u636e\u65f6\u5e38\u7528\u7528\u5230 \u5982\u679c\u63d0\u4f9b\u4e86replacement, \u5219\u66ff\u6362\u4e3areplacement \"\"\" if replacement is not None : return np . where ( ts == 0 , replacement , ts ) if np . all ( ts == 0 ): raise ValueError ( \"all of ts are 0\" ) if ts [ 0 ] == 0 : idx = np . argwhere ( ts != 0 )[ 0 ] ts [ 0 ] = ts [ idx ] mask = ts == 0 idx = np . where ( ~ mask , np . arange ( mask . size ), 0 ) np . maximum . accumulate ( idx , out = idx ) return ts [ idx ] rolling ( x , win , func ) \u00b6 \u5bf9\u5e8f\u5217 x \u8fdb\u884c\u7a97\u53e3\u6ed1\u52a8\u8ba1\u7b97\u3002 \u5982\u679c func \u8981\u5b9e\u73b0\u7684\u529f\u80fd\u662fargmax, argmin, max, mean, median, min, rank, std, sum, var\u7b49\uff0cmove_argmax\uff0c\u8bf7\u4f7f\u7528bottleneck\u4e2d\u7684move_argmin, move_max, move_mean, move_median, move_min move_rank, move_std, move_sum, move_var\u3002\u8fd9\u4e9b\u51fd\u6570\u7684\u6027\u80fd\u66f4\u597d\u3002 Parameters: Name Type Description Default x [type] [description] required win [type] [description] required func [type] [description] required Returns: Type Description [type] [description] Source code in omicron/extensions/np.py def rolling ( x , win , func ): \"\"\"\u5bf9\u5e8f\u5217`x`\u8fdb\u884c\u7a97\u53e3\u6ed1\u52a8\u8ba1\u7b97\u3002 \u5982\u679c`func`\u8981\u5b9e\u73b0\u7684\u529f\u80fd\u662fargmax, argmin, max, mean, median, min, rank, std, sum, var\u7b49\uff0cmove_argmax\uff0c\u8bf7\u4f7f\u7528bottleneck\u4e2d\u7684move_argmin, move_max, move_mean, move_median, move_min move_rank, move_std, move_sum, move_var\u3002\u8fd9\u4e9b\u51fd\u6570\u7684\u6027\u80fd\u66f4\u597d\u3002 Args: x ([type]): [description] win ([type]): [description] func ([type]): [description] Returns: [type]: [description] \"\"\" results = [] for subarray in sliding_window_view ( x , window_shape = win ): results . append ( func ( subarray )) return np . array ( results ) shift ( arr , start , offset ) \u00b6 \u5728numpy\u6570\u7ec4arr\u4e2d\uff0c\u627e\u5230start(\u6216\u8005\u6700\u63a5\u8fd1\u7684\u4e00\u4e2a\uff09\uff0c\u53d6offset\u5bf9\u5e94\u7684\u5143\u7d20\u3002 \u8981\u6c42 arr \u5df2\u6392\u5e8f\u3002 offset \u4e3a\u6b63\uff0c\u8868\u660e\u5411\u540e\u79fb\u4f4d\uff1b offset \u4e3a\u8d1f\uff0c\u8868\u660e\u5411\u524d\u79fb\u4f4d Examples: >>> arr = [ 20050104 , 20050105 , 20050106 , 20050107 , 20050110 , 20050111 ] >>> shift ( arr , 20050104 , 1 ) 20050105 >>> shift ( arr , 20050105 , - 1 ) 20050104 >>> # \u8d77\u59cb\u70b9\u5df2\u53f3\u8d8a\u754c\uff0c\u4e14\u5411\u53f3shift\uff0c\u8fd4\u56de\u8d77\u59cb\u70b9 >>> shift ( arr , 20050120 , 1 ) 20050120 Parameters: Name Type Description Default arr \u5df2\u6392\u5e8f\u7684\u6570\u7ec4 required start numpy\u53ef\u63a5\u53d7\u7684\u6570\u636e\u7c7b\u578b required offset int [description] required Returns: Type Description \u79fb\u4f4d\u540e\u5f97\u5230\u7684\u5143\u7d20\u503c Source code in omicron/extensions/np.py def shift ( arr , start , offset ): \"\"\"\u5728numpy\u6570\u7ec4arr\u4e2d\uff0c\u627e\u5230start(\u6216\u8005\u6700\u63a5\u8fd1\u7684\u4e00\u4e2a\uff09\uff0c\u53d6offset\u5bf9\u5e94\u7684\u5143\u7d20\u3002 \u8981\u6c42`arr`\u5df2\u6392\u5e8f\u3002`offset`\u4e3a\u6b63\uff0c\u8868\u660e\u5411\u540e\u79fb\u4f4d\uff1b`offset`\u4e3a\u8d1f\uff0c\u8868\u660e\u5411\u524d\u79fb\u4f4d Examples: >>> arr = [20050104, 20050105, 20050106, 20050107, 20050110, 20050111] >>> shift(arr, 20050104, 1) 20050105 >>> shift(arr, 20050105, -1) 20050104 >>> # \u8d77\u59cb\u70b9\u5df2\u53f3\u8d8a\u754c\uff0c\u4e14\u5411\u53f3shift\uff0c\u8fd4\u56de\u8d77\u59cb\u70b9 >>> shift(arr, 20050120, 1) 20050120 Args: arr : \u5df2\u6392\u5e8f\u7684\u6570\u7ec4 start : numpy\u53ef\u63a5\u53d7\u7684\u6570\u636e\u7c7b\u578b offset (int): [description] Returns: \u79fb\u4f4d\u540e\u5f97\u5230\u7684\u5143\u7d20\u503c \"\"\" pos = np . searchsorted ( arr , start , side = \"right\" ) if pos + offset - 1 >= len ( arr ): return start else : return arr [ pos + offset - 1 ] smallest_n_argpos ( ts , n ) \u00b6 get smallest n (min->max) elements and return argpos which its value ordered in ascent Examples: >>> smallest_n_argpos ([ np . nan , 4 , 3 , 9 , 8 , 5 , 2 , 1 , 0 , 6 , 7 ], 2 ) array ([ 8 , 7 ]) Parameters: Name Type Description Default ts np.array \u8f93\u5165\u7684\u6570\u7ec4 required n int \u53d6\u6700\u5c0f\u7684n\u4e2a\u5143\u7d20 required Returns: Type Description np.array [description] Source code in omicron/extensions/np.py def smallest_n_argpos ( ts : np . array , n : int ) -> np . array : \"\"\"get smallest n (min->max) elements and return argpos which its value ordered in ascent Example: >>> smallest_n_argpos([np.nan, 4, 3, 9, 8, 5, 2, 1, 0, 6, 7], 2) array([8, 7]) Args: ts (np.array): \u8f93\u5165\u7684\u6570\u7ec4 n (int): \u53d6\u6700\u5c0f\u7684n\u4e2a\u5143\u7d20 Returns: np.array: [description] \"\"\" return np . argsort ( ts )[: n ] to_pydatetime ( tm ) \u00b6 \u5c06numpy.datetime64\u5bf9\u8c61\u8f6c\u6362\u6210\u4e3apython\u7684datetime\u5bf9\u8c61 numpy.ndarray.item()\u65b9\u6cd5\u53ef\u7528\u4ee5\u5c06\u4efb\u4f55numpy\u5bf9\u8c61\u8f6c\u6362\u6210python\u5bf9\u8c61\uff0c\u63a8\u8350\u5728\u4efb\u4f55\u9002\u7528\u7684\u5730\u65b9\u4f7f\u7528.item()\u65b9\u6cd5\uff0c\u800c\u4e0d\u662f\u672c\u65b9\u6cd5\u3002\u793a\u4f8b: 1 2 3 4 arr = np.array(['2022-09-08', '2022-09-09'], dtype='datetime64[s]') arr.item(0) # output is datetime.datetime(2022, 9, 8, 0, 0) arr[1].item() # output is datetime.datetime(2022, 9, 9, 0, 0) Parameters: Name Type Description Default tm the input numpy datetime object required Returns: Type Description datetime.datetime python datetime object .. deprecated:: 2.0.0 use tm.item() instead Source code in omicron/extensions/np.py @deprecated ( \"2.0.0\" , details = \"use `tm.item()` instead\" ) def to_pydatetime ( tm : np . datetime64 ) -> datetime . datetime : \"\"\"\u5c06numpy.datetime64\u5bf9\u8c61\u8f6c\u6362\u6210\u4e3apython\u7684datetime\u5bf9\u8c61 numpy.ndarray.item()\u65b9\u6cd5\u53ef\u7528\u4ee5\u5c06\u4efb\u4f55numpy\u5bf9\u8c61\u8f6c\u6362\u6210python\u5bf9\u8c61\uff0c\u63a8\u8350\u5728\u4efb\u4f55\u9002\u7528\u7684\u5730\u65b9\u4f7f\u7528.item()\u65b9\u6cd5\uff0c\u800c\u4e0d\u662f\u672c\u65b9\u6cd5\u3002\u793a\u4f8b: ``` arr = np.array(['2022-09-08', '2022-09-09'], dtype='datetime64[s]') arr.item(0) # output is datetime.datetime(2022, 9, 8, 0, 0) arr[1].item() # output is datetime.datetime(2022, 9, 9, 0, 0) ``` Args: tm : the input numpy datetime object Returns: python datetime object \"\"\" unix_epoch = np . datetime64 ( 0 , \"s\" ) one_second = np . timedelta64 ( 1 , \"s\" ) seconds_since_epoch = ( tm - unix_epoch ) / one_second return datetime . datetime . utcfromtimestamp ( seconds_since_epoch ) top_n_argpos ( ts , n ) \u00b6 get top n (max->min) elements and return argpos which its value ordered in descent Examples: >>> top_n_argpos ([ np . nan , 4 , 3 , 9 , 8 , 5 , 2 , 1 , 0 , 6 , 7 ], 2 ) array ([ 3 , 4 ]) Parameters: Name Type Description Default ts np.array [description] required n int [description] required Returns: Type Description np.array [description] Source code in omicron/extensions/np.py def top_n_argpos ( ts : np . array , n : int ) -> np . array : \"\"\"get top n (max->min) elements and return argpos which its value ordered in descent Example: >>> top_n_argpos([np.nan, 4, 3, 9, 8, 5, 2, 1, 0, 6, 7], 2) array([3, 4]) Args: ts (np.array): [description] n (int): [description] Returns: np.array: [description] \"\"\" ts_ = np . copy ( ts ) ts_ [ np . isnan ( ts_ )] = - np . inf return np . argsort ( ts_ )[ - n :][:: - 1 ] Notify package \u00b6 dingtalk \u00b6 DingTalkMessage \u00b6 \u9489\u9489\u7684\u673a\u5668\u4eba\u6d88\u606f\u63a8\u9001\u7c7b\uff0c\u5c01\u88c5\u4e86\u5e38\u7528\u7684\u6d88\u606f\u7c7b\u578b\u4ee5\u53ca\u52a0\u5bc6\u7b97\u6cd5 \u9700\u8981\u5728\u914d\u7f6e\u6587\u4ef6\u4e2d\u914d\u7f6e\u9489\u9489\u7684\u673a\u5668\u4eba\u7684access_token \u5982\u679c\u914d\u7f6e\u4e86\u52a0\u7b7e\uff0c\u9700\u8981\u5728\u914d\u7f6e\u6587\u4ef6\u4e2d\u914d\u7f6e\u9489\u9489\u7684\u673a\u5668\u4eba\u7684secret \u5982\u679c\u914d\u7f6e\u4e86\u81ea\u5b9a\u4e49\u5173\u952e\u8bcd\uff0c\u9700\u8981\u5728\u914d\u7f6e\u6587\u4ef6\u4e2d\u914d\u7f6e\u9489\u9489\u7684\u673a\u5668\u4eba\u7684keyword\uff0c\u591a\u4e2a\u5173\u952e\u8bcd\u7528\u82f1\u6587\u9017\u53f7\u5206\u9694 \u5168\u90e8\u7684\u914d\u7f6e\u6587\u4ef6\u793a\u4f8b\u5982\u4e0b, \u5176\u4e2dsecret\u548ckeyword\u53ef\u4ee5\u4e0d\u914d\u7f6e, access_token\u5fc5\u987b\u914d\u7f6e notify: dingtalk_access_token: xxxx dingtalk_secret: xxxx Source code in omicron/notify/dingtalk.py class DingTalkMessage : \"\"\" \u9489\u9489\u7684\u673a\u5668\u4eba\u6d88\u606f\u63a8\u9001\u7c7b\uff0c\u5c01\u88c5\u4e86\u5e38\u7528\u7684\u6d88\u606f\u7c7b\u578b\u4ee5\u53ca\u52a0\u5bc6\u7b97\u6cd5 \u9700\u8981\u5728\u914d\u7f6e\u6587\u4ef6\u4e2d\u914d\u7f6e\u9489\u9489\u7684\u673a\u5668\u4eba\u7684access_token \u5982\u679c\u914d\u7f6e\u4e86\u52a0\u7b7e\uff0c\u9700\u8981\u5728\u914d\u7f6e\u6587\u4ef6\u4e2d\u914d\u7f6e\u9489\u9489\u7684\u673a\u5668\u4eba\u7684secret \u5982\u679c\u914d\u7f6e\u4e86\u81ea\u5b9a\u4e49\u5173\u952e\u8bcd\uff0c\u9700\u8981\u5728\u914d\u7f6e\u6587\u4ef6\u4e2d\u914d\u7f6e\u9489\u9489\u7684\u673a\u5668\u4eba\u7684keyword\uff0c\u591a\u4e2a\u5173\u952e\u8bcd\u7528\u82f1\u6587\u9017\u53f7\u5206\u9694 \u5168\u90e8\u7684\u914d\u7f6e\u6587\u4ef6\u793a\u4f8b\u5982\u4e0b, \u5176\u4e2dsecret\u548ckeyword\u53ef\u4ee5\u4e0d\u914d\u7f6e, access_token\u5fc5\u987b\u914d\u7f6e notify: dingtalk_access_token: xxxx dingtalk_secret: xxxx \"\"\" url = \"https://oapi.dingtalk.com/robot/send\" @classmethod def _get_access_token ( cls ): \"\"\"\u83b7\u53d6\u9489\u9489\u673a\u5668\u4eba\u7684access_token\"\"\" if hasattr ( cfg . notify , \"dingtalk_access_token\" ): return cfg . notify . dingtalk_access_token else : logger . error ( \"Dingtalk not configured, please add the following items: \\n \" \"notify: \\n \" \" dingtalk_access_token: xxxx \\n \" \" dingtalk_secret: xxxx \\n \" ) raise ConfigError ( \"dingtalk_access_token not found\" ) @classmethod def _get_secret ( cls ): \"\"\"\u83b7\u53d6\u9489\u9489\u673a\u5668\u4eba\u7684secret\"\"\" if hasattr ( cfg . notify , \"dingtalk_secret\" ): return cfg . notify . dingtalk_secret else : return None @classmethod def _get_url ( cls ): \"\"\"\u83b7\u53d6\u9489\u9489\u673a\u5668\u4eba\u7684\u6d88\u606f\u63a8\u9001\u5730\u5740\uff0c\u5c06\u7b7e\u540d\u548c\u65f6\u95f4\u6233\u62fc\u63a5\u5728url\u540e\u9762\"\"\" access_token = cls . _get_access_token () url = f \" { cls . url } ?access_token= { access_token } \" secret = cls . _get_secret () if secret : timestamp , sign = cls . _get_sign ( secret ) url = f \" { url } ×tamp= { timestamp } &sign= { sign } \" return url @classmethod def _get_sign ( cls , secret : str ): \"\"\"\u83b7\u53d6\u7b7e\u540d\u53d1\u9001\u7ed9\u9489\u9489\u673a\u5668\u4eba\"\"\" timestamp = str ( round ( time . time () * 1000 )) secret_enc = secret . encode ( \"utf-8\" ) string_to_sign = \" {} \\n {} \" . format ( timestamp , secret ) string_to_sign_enc = string_to_sign . encode ( \"utf-8\" ) hmac_code = hmac . new ( secret_enc , string_to_sign_enc , digestmod = hashlib . sha256 ) . digest () sign = urllib . parse . quote_plus ( base64 . b64encode ( hmac_code )) return timestamp , sign @classmethod def _send ( cls , msg ): \"\"\"\u53d1\u9001\u6d88\u606f\u5230\u9489\u9489\u673a\u5668\u4eba\"\"\" url = cls . _get_url () response = httpx . post ( url , json = msg , timeout = 30 ) if response . status_code != 200 : logger . error ( f \"failed to send message, content: { msg } , response from Dingtalk: { response . content . decode () } \" ) return rsp = json . loads ( response . content ) if rsp . get ( \"errcode\" ) != 0 : logger . error ( f \"failed to send message, content: { msg } , response from Dingtalk: { rsp } \" ) return response . content . decode () @classmethod async def _send_async ( cls , msg ): \"\"\"\u53d1\u9001\u6d88\u606f\u5230\u9489\u9489\u673a\u5668\u4eba\"\"\" url = cls . _get_url () async with httpx . AsyncClient () as client : r = await client . post ( url , json = msg , timeout = 30 ) if r . status_code != 200 : logger . error ( f \"failed to send message, content: { msg } , response from Dingtalk: { r . content . decode () } \" ) return rsp = json . loads ( r . content ) if rsp . get ( \"errcode\" ) != 0 : logger . error ( f \"failed to send message, content: { msg } , response from Dingtalk: { rsp } \" ) return r . content . decode () @classmethod @deprecated ( \"2.0.0\" , details = \"use function `ding` instead\" ) def text ( cls , content ): msg = { \"text\" : { \"content\" : content }, \"msgtype\" : \"text\" } return cls . _send ( msg ) text ( cls , content ) classmethod \u00b6 .. deprecated:: 2.0.0 use function ding instead Source code in omicron/notify/dingtalk.py @classmethod @deprecated ( \"2.0.0\" , details = \"use function `ding` instead\" ) def text ( cls , content ): msg = { \"text\" : { \"content\" : content }, \"msgtype\" : \"text\" } return cls . _send ( msg ) ding ( msg ) \u00b6 \u53d1\u9001\u6d88\u606f\u5230\u9489\u9489\u673a\u5668\u4eba \u652f\u6301\u53d1\u9001\u7eaf\u6587\u672c\u6d88\u606f\u548cmarkdown\u683c\u5f0f\u7684\u6587\u672c\u6d88\u606f\u3002\u5982\u679c\u8981\u53d1\u9001markdown\u683c\u5f0f\u7684\u6d88\u606f\uff0c\u8bf7\u901a\u8fc7\u5b57\u5178\u4f20\u5165\uff0c\u5fc5\u987b\u5305\u542b\u5305\u542b\"title\"\u548c\"text\"\u4e24\u4e2a\u5b57\u6bb5\u3002\u66f4\u8be6\u7ec6\u4fe1\u606f\uff0c\u8bf7\u89c1 \u9489\u9489\u5f00\u653e\u5e73\u53f0\u6587\u6863 Important \u5fc5\u987b\u5728\u5f02\u6b65\u7ebf\u7a0b(\u5373\u8fd0\u884casyncio loop\u7684\u7ebf\u7a0b\uff09\u4e2d\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5426\u5219\u4f1a\u629b\u51fa\u5f02\u5e38\u3002 \u6b64\u65b9\u6cd5\u8fd4\u56de\u4e00\u4e2aAwaitable\uff0c\u60a8\u53ef\u4ee5\u7b49\u5f85\u5b83\u5b8c\u6210\uff0c\u4e5f\u53ef\u4ee5\u5ffd\u7565\u8fd4\u56de\u503c\uff0c\u6b64\u65f6\u5b83\u5c06\u4f5c\u4e3a\u4e00\u4e2a\u540e\u53f0\u4efb\u52a1\u6267\u884c\uff0c\u4f46\u5b8c\u6210\u7684\u65f6\u95f4\u4e0d\u786e\u5b9a\u3002 Parameters: Name Type Description Default msg Union[str, dict] \u5f85\u53d1\u9001\u6d88\u606f\u3002 required Returns: Type Description Awaitable \u53d1\u9001\u6d88\u606f\u7684\u540e\u53f0\u4efb\u52a1\u3002\u60a8\u53ef\u4ee5\u4f7f\u7528\u6b64\u8fd4\u56de\u53e5\u67c4\u6765\u53d6\u6d88\u4efb\u52a1\u3002 Source code in omicron/notify/dingtalk.py def ding ( msg : Union [ str , dict ]) -> Awaitable : \"\"\"\u53d1\u9001\u6d88\u606f\u5230\u9489\u9489\u673a\u5668\u4eba \u652f\u6301\u53d1\u9001\u7eaf\u6587\u672c\u6d88\u606f\u548cmarkdown\u683c\u5f0f\u7684\u6587\u672c\u6d88\u606f\u3002\u5982\u679c\u8981\u53d1\u9001markdown\u683c\u5f0f\u7684\u6d88\u606f\uff0c\u8bf7\u901a\u8fc7\u5b57\u5178\u4f20\u5165\uff0c\u5fc5\u987b\u5305\u542b\u5305\u542b\"title\"\u548c\"text\"\u4e24\u4e2a\u5b57\u6bb5\u3002\u66f4\u8be6\u7ec6\u4fe1\u606f\uff0c\u8bf7\u89c1[\u9489\u9489\u5f00\u653e\u5e73\u53f0\u6587\u6863](https://open.dingtalk.com/document/orgapp-server/message-type) ???+ Important \u5fc5\u987b\u5728\u5f02\u6b65\u7ebf\u7a0b(\u5373\u8fd0\u884casyncio loop\u7684\u7ebf\u7a0b\uff09\u4e2d\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5426\u5219\u4f1a\u629b\u51fa\u5f02\u5e38\u3002 \u6b64\u65b9\u6cd5\u8fd4\u56de\u4e00\u4e2aAwaitable\uff0c\u60a8\u53ef\u4ee5\u7b49\u5f85\u5b83\u5b8c\u6210\uff0c\u4e5f\u53ef\u4ee5\u5ffd\u7565\u8fd4\u56de\u503c\uff0c\u6b64\u65f6\u5b83\u5c06\u4f5c\u4e3a\u4e00\u4e2a\u540e\u53f0\u4efb\u52a1\u6267\u884c\uff0c\u4f46\u5b8c\u6210\u7684\u65f6\u95f4\u4e0d\u786e\u5b9a\u3002 Args: msg: \u5f85\u53d1\u9001\u6d88\u606f\u3002 Returns: \u53d1\u9001\u6d88\u606f\u7684\u540e\u53f0\u4efb\u52a1\u3002\u60a8\u53ef\u4ee5\u4f7f\u7528\u6b64\u8fd4\u56de\u53e5\u67c4\u6765\u53d6\u6d88\u4efb\u52a1\u3002 \"\"\" if isinstance ( msg , str ): msg_ = { \"text\" : { \"content\" : msg }, \"msgtype\" : \"text\" } elif isinstance ( msg , dict ): msg_ = { \"msgtype\" : \"markdown\" , \"markdown\" : { \"title\" : msg [ \"title\" ], \"text\" : msg [ \"text\" ]}, } else : raise TypeError task = asyncio . create_task ( DingTalkMessage . _send_async ( msg_ )) return task mail \u00b6 compose ( subject , plain_txt = None , html = None , attachment = None ) \u00b6 \u7f16\u5199MIME\u90ae\u4ef6\u3002 Parameters: Name Type Description Default subject str \u90ae\u4ef6\u4e3b\u9898 required plain_txt str \u7eaf\u6587\u672c\u683c\u5f0f\u7684\u90ae\u4ef6\u5185\u5bb9 None html str html\u683c\u5f0f\u7684\u90ae\u4ef6\u5185\u5bb9. Defaults to None. None attachment str \u9644\u4ef6\u6587\u4ef6\u540d None Returns: Type Description EmailMessage MIME mail Source code in omicron/notify/mail.py def compose ( subject : str , plain_txt : str = None , html : str = None , attachment : str = None ) -> EmailMessage : \"\"\"\u7f16\u5199MIME\u90ae\u4ef6\u3002 Args: subject (str): \u90ae\u4ef6\u4e3b\u9898 plain_txt (str): \u7eaf\u6587\u672c\u683c\u5f0f\u7684\u90ae\u4ef6\u5185\u5bb9 html (str, optional): html\u683c\u5f0f\u7684\u90ae\u4ef6\u5185\u5bb9. Defaults to None. attachment (str, optional): \u9644\u4ef6\u6587\u4ef6\u540d Returns: MIME mail \"\"\" msg = EmailMessage () msg [ \"Subject\" ] = subject if html : msg . preamble = plain_txt or \"\" msg . set_content ( html , subtype = \"html\" ) else : assert plain_txt , \"Either plain_txt or html is required.\" msg . set_content ( plain_txt ) if attachment : ctype , encoding = mimetypes . guess_type ( attachment ) if ctype is None or encoding is not None : ctype = \"application/octet-stream\" maintype , subtype = ctype . split ( \"/\" , 1 ) with open ( attachment , \"rb\" ) as f : msg . add_attachment ( f . read (), maintype = maintype , subtype = subtype , filename = attachment ) return msg mail_notify ( subject = None , body = None , msg = None , html = False , receivers = None ) \u00b6 \u53d1\u9001\u90ae\u4ef6\u901a\u77e5\u3002 \u53d1\u9001\u8005\u3001\u63a5\u6536\u8005\u53ca\u90ae\u4ef6\u670d\u52a1\u5668\u7b49\u914d\u7f6e\u8bf7\u901a\u8fc7cfg4py\u914d\u7f6e\uff1a 1 2 3 4 5 notify: mail_from: aaron_yang@jieyu.ai mail_to: - code@jieyu.ai mail_server: smtp.ym.163.com \u9a8c\u8bc1\u5bc6\u7801\u8bf7\u901a\u8fc7\u73af\u5883\u53d8\u91cf MAIL_PASSWORD \u6765\u914d\u7f6e\u3002 subject/body\u4e0emsg\u5fc5\u987b\u63d0\u4f9b\u5176\u4e00\u3002 Important \u5fc5\u987b\u5728\u5f02\u6b65\u7ebf\u7a0b(\u5373\u8fd0\u884casyncio loop\u7684\u7ebf\u7a0b\uff09\u4e2d\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5426\u5219\u4f1a\u629b\u51fa\u5f02\u5e38\u3002 \u6b64\u65b9\u6cd5\u8fd4\u56de\u4e00\u4e2aAwaitable\uff0c\u60a8\u53ef\u4ee5\u7b49\u5f85\u5b83\u5b8c\u6210\uff0c\u4e5f\u53ef\u4ee5\u5ffd\u7565\u8fd4\u56de\u503c\uff0c\u6b64\u65f6\u5b83\u5c06\u4f5c\u4e3a\u4e00\u4e2a\u540e\u53f0\u4efb\u52a1\u6267\u884c\uff0c\u4f46\u5b8c\u6210\u7684\u65f6\u95f4\u4e0d\u786e\u5b9a\u3002 Parameters: Name Type Description Default msg EmailMessage [description]. Defaults to None. None subject str [description]. Defaults to None. None body str [description]. Defaults to None. None html bool body\u662f\u5426\u6309html\u683c\u5f0f\u5904\u7406\uff1f Defaults to False. False receivers List[str], Optional \u63a5\u6536\u8005\u4fe1\u606f\u3002\u5982\u679c\u4e0d\u63d0\u4f9b\uff0c\u5c06\u4f7f\u7528\u9884\u5148\u914d\u7f6e\u7684\u63a5\u6536\u8005\u4fe1\u606f\u3002 None Returns: Type Description Awaitable \u53d1\u9001\u6d88\u606f\u7684\u540e\u53f0\u4efb\u52a1\u3002\u60a8\u53ef\u4ee5\u4f7f\u7528\u6b64\u8fd4\u56de\u53e5\u67c4\u6765\u53d6\u6d88\u4efb\u52a1\u3002 Source code in omicron/notify/mail.py def mail_notify ( subject : str = None , body : str = None , msg : EmailMessage = None , html = False , receivers = None , ) -> Awaitable : \"\"\"\u53d1\u9001\u90ae\u4ef6\u901a\u77e5\u3002 \u53d1\u9001\u8005\u3001\u63a5\u6536\u8005\u53ca\u90ae\u4ef6\u670d\u52a1\u5668\u7b49\u914d\u7f6e\u8bf7\u901a\u8fc7cfg4py\u914d\u7f6e\uff1a ``` notify: mail_from: aaron_yang@jieyu.ai mail_to: - code@jieyu.ai mail_server: smtp.ym.163.com ``` \u9a8c\u8bc1\u5bc6\u7801\u8bf7\u901a\u8fc7\u73af\u5883\u53d8\u91cf`MAIL_PASSWORD`\u6765\u914d\u7f6e\u3002 subject/body\u4e0emsg\u5fc5\u987b\u63d0\u4f9b\u5176\u4e00\u3002 ???+ Important \u5fc5\u987b\u5728\u5f02\u6b65\u7ebf\u7a0b(\u5373\u8fd0\u884casyncio loop\u7684\u7ebf\u7a0b\uff09\u4e2d\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5426\u5219\u4f1a\u629b\u51fa\u5f02\u5e38\u3002 \u6b64\u65b9\u6cd5\u8fd4\u56de\u4e00\u4e2aAwaitable\uff0c\u60a8\u53ef\u4ee5\u7b49\u5f85\u5b83\u5b8c\u6210\uff0c\u4e5f\u53ef\u4ee5\u5ffd\u7565\u8fd4\u56de\u503c\uff0c\u6b64\u65f6\u5b83\u5c06\u4f5c\u4e3a\u4e00\u4e2a\u540e\u53f0\u4efb\u52a1\u6267\u884c\uff0c\u4f46\u5b8c\u6210\u7684\u65f6\u95f4\u4e0d\u786e\u5b9a\u3002 Args: msg (EmailMessage, optional): [description]. Defaults to None. subject (str, optional): [description]. Defaults to None. body (str, optional): [description]. Defaults to None. html (bool, optional): body\u662f\u5426\u6309html\u683c\u5f0f\u5904\u7406\uff1f Defaults to False. receivers (List[str], Optional): \u63a5\u6536\u8005\u4fe1\u606f\u3002\u5982\u679c\u4e0d\u63d0\u4f9b\uff0c\u5c06\u4f7f\u7528\u9884\u5148\u914d\u7f6e\u7684\u63a5\u6536\u8005\u4fe1\u606f\u3002 Returns: \u53d1\u9001\u6d88\u606f\u7684\u540e\u53f0\u4efb\u52a1\u3002\u60a8\u53ef\u4ee5\u4f7f\u7528\u6b64\u8fd4\u56de\u53e5\u67c4\u6765\u53d6\u6d88\u4efb\u52a1\u3002 \"\"\" if all ([ msg is not None , subject or body ]): raise TypeError ( \"msg\u53c2\u6570\u4e0esubject/body\u53ea\u80fd\u63d0\u4f9b\u5176\u4e2d\u4e4b\u4e00\" ) elif all ([ msg is None , subject is None , body is None ]): raise TypeError ( \"\u5fc5\u987b\u63d0\u4f9bmsg\u53c2\u6570\u6216\u8005subjecdt/body\u53c2\u6570\" ) if msg is None : if html : msg = compose ( subject , html = body ) else : msg = compose ( subject , plain_txt = body ) cfg = cfg4py . get_instance () if not receivers : receivers = cfg . notify . mail_to password = os . environ . get ( \"MAIL_PASSWORD\" ) return send_mail ( cfg . notify . mail_from , receivers , password , msg , host = cfg . notify . mail_server ) send_mail ( sender , receivers , password , msg = None , host = None , port = 25 , cc = None , bcc = None , subject = None , body = None , username = None ) \u00b6 \u53d1\u9001\u90ae\u4ef6\u901a\u77e5\u3002 \u5982\u679c\u53ea\u53d1\u9001\u7b80\u5355\u7684\u6587\u672c\u90ae\u4ef6\uff0c\u8bf7\u4f7f\u7528 send_mail(sender, receivers, subject=subject, plain=plain)\u3002\u5982\u679c\u8981\u53d1\u9001\u8f83\u590d\u6742\u7684\u5e26html\u548c\u9644\u4ef6\u7684\u90ae\u4ef6\uff0c\u8bf7\u5148\u8c03\u7528compose()\u751f\u6210\u4e00\u4e2aEmailMessage,\u7136\u540e\u518d\u8c03\u7528send_mail(sender, receivers, msg)\u6765\u53d1\u9001\u90ae\u4ef6\u3002 Important \u5fc5\u987b\u5728\u5f02\u6b65\u7ebf\u7a0b(\u5373\u8fd0\u884casyncio loop\u7684\u7ebf\u7a0b\uff09\u4e2d\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5426\u5219\u4f1a\u629b\u51fa\u5f02\u5e38\u3002 \u6b64\u65b9\u6cd5\u8fd4\u56de\u4e00\u4e2aAwaitable\uff0c\u60a8\u53ef\u4ee5\u7b49\u5f85\u5b83\u5b8c\u6210\uff0c\u4e5f\u53ef\u4ee5\u5ffd\u7565\u8fd4\u56de\u503c\uff0c\u6b64\u65f6\u5b83\u5c06\u4f5c\u4e3a\u4e00\u4e2a\u540e\u53f0\u4efb\u52a1\u6267\u884c\uff0c\u4f46\u5b8c\u6210\u7684\u65f6\u95f4\u4e0d\u786e\u5b9a\u3002 Parameters: Name Type Description Default sender str [description] required receivers List[str] [description] required msg EmailMessage [description]. Defaults to None. None host str [description]. Defaults to None. None port int [description]. Defaults to 25. 25 cc List[str] [description]. Defaults to None. None bcc List[str] [description]. Defaults to None. None subject str [description]. Defaults to None. None plain str [description]. Defaults to None. required username str the username used to logon to mail server. if not provided, then sender is used. None Returns: Type Description Awaitable \u53d1\u9001\u6d88\u606f\u7684\u540e\u53f0\u4efb\u52a1\u3002\u60a8\u53ef\u4ee5\u4f7f\u7528\u6b64\u8fd4\u56de\u53e5\u67c4\u6765\u53d6\u6d88\u4efb\u52a1\u3002 Source code in omicron/notify/mail.py @retry ( aiosmtplib . errors . SMTPConnectError , tries = 3 , backoff = 2 , delay = 30 , logger = logger ) def send_mail ( sender : str , receivers : List [ str ], password : str , msg : EmailMessage = None , host : str = None , port : int = 25 , cc : List [ str ] = None , bcc : List [ str ] = None , subject : str = None , body : str = None , username : str = None , ) -> Awaitable : \"\"\"\u53d1\u9001\u90ae\u4ef6\u901a\u77e5\u3002 \u5982\u679c\u53ea\u53d1\u9001\u7b80\u5355\u7684\u6587\u672c\u90ae\u4ef6\uff0c\u8bf7\u4f7f\u7528 send_mail(sender, receivers, subject=subject, plain=plain)\u3002\u5982\u679c\u8981\u53d1\u9001\u8f83\u590d\u6742\u7684\u5e26html\u548c\u9644\u4ef6\u7684\u90ae\u4ef6\uff0c\u8bf7\u5148\u8c03\u7528compose()\u751f\u6210\u4e00\u4e2aEmailMessage,\u7136\u540e\u518d\u8c03\u7528send_mail(sender, receivers, msg)\u6765\u53d1\u9001\u90ae\u4ef6\u3002 ???+ Important \u5fc5\u987b\u5728\u5f02\u6b65\u7ebf\u7a0b(\u5373\u8fd0\u884casyncio loop\u7684\u7ebf\u7a0b\uff09\u4e2d\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5426\u5219\u4f1a\u629b\u51fa\u5f02\u5e38\u3002 \u6b64\u65b9\u6cd5\u8fd4\u56de\u4e00\u4e2aAwaitable\uff0c\u60a8\u53ef\u4ee5\u7b49\u5f85\u5b83\u5b8c\u6210\uff0c\u4e5f\u53ef\u4ee5\u5ffd\u7565\u8fd4\u56de\u503c\uff0c\u6b64\u65f6\u5b83\u5c06\u4f5c\u4e3a\u4e00\u4e2a\u540e\u53f0\u4efb\u52a1\u6267\u884c\uff0c\u4f46\u5b8c\u6210\u7684\u65f6\u95f4\u4e0d\u786e\u5b9a\u3002 Args: sender (str): [description] receivers (List[str]): [description] msg (EmailMessage, optional): [description]. Defaults to None. host (str, optional): [description]. Defaults to None. port (int, optional): [description]. Defaults to 25. cc (List[str], optional): [description]. Defaults to None. bcc (List[str], optional): [description]. Defaults to None. subject (str, optional): [description]. Defaults to None. plain (str, optional): [description]. Defaults to None. username (str, optional): the username used to logon to mail server. if not provided, then `sender` is used. Returns: \u53d1\u9001\u6d88\u606f\u7684\u540e\u53f0\u4efb\u52a1\u3002\u60a8\u53ef\u4ee5\u4f7f\u7528\u6b64\u8fd4\u56de\u53e5\u67c4\u6765\u53d6\u6d88\u4efb\u52a1\u3002 \"\"\" if all ([ msg is not None , subject is not None or body is not None ]): raise TypeError ( \"msg\u53c2\u6570\u4e0esubject/body\u53ea\u80fd\u63d0\u4f9b\u5176\u4e2d\u4e4b\u4e00\" ) elif all ([ msg is None , subject is None , body is None ]): raise TypeError ( \"\u5fc5\u987b\u63d0\u4f9bmsg\u53c2\u6570\u6216\u8005subjecdt/body\u53c2\u6570\" ) msg = msg or EmailMessage () if isinstance ( receivers , str ): receivers = [ receivers ] msg [ \"From\" ] = sender msg [ \"To\" ] = \", \" . join ( receivers ) if subject : msg [ \"subject\" ] = subject if body : msg . set_content ( body ) if cc : msg [ \"Cc\" ] = \", \" . join ( cc ) if bcc : msg [ \"Bcc\" ] = \", \" . join ( bcc ) username = username or sender if host is None : host = sender . split ( \"@\" )[ - 1 ] task = asyncio . create_task ( aiosmtplib . send ( msg , hostname = host , port = port , username = sender , password = password ) ) return task Backtesting Log Facility \u00b6 Info Since 2.0.0.a76 \u56de\u6d4b\u65f6\uff0c\u6253\u5370\u65f6\u95f4\u4e00\u822c\u8981\u6c42\u4e3a\u56de\u6d4b\u5f53\u65f6\u7684\u65f6\u95f4\uff0c\u800c\u975e\u7cfb\u7edf\u65f6\u95f4\u3002\u8fd9\u4e2a\u6a21\u5757\u63d0\u4f9b\u4e86\u6539\u5199\u65e5\u5fd7\u65f6\u95f4\u7684\u529f\u80fd\u3002 \u4f7f\u7528\u65b9\u6cd5\uff1a 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 from omicron.core.backtestlog import BacktestLogger logger = BacktestLogger . getLogger ( __name__ ) logger . setLevel ( logging . INFO ) handler = logging . StreamHandler () # \u901a\u8fc7bt_date\u57df\u6765\u8bbe\u7f6e\u65e5\u671f\uff0c\u800c\u4e0d\u662fasctime handler . setFormatter ( Formatter ( \" %(bt_date)s %(message)s \" )) logging . basicConfig ( level = logging . INFO , handlers = [ handler ]) # \u8c03\u7528\u65f6\u4e0e\u666e\u901a\u65e5\u5fd7\u4e00\u6837\uff0c\u4f46\u8981\u589e\u52a0\u4e00\u4e2adate\u53c2\u6570 logger . info ( \"this is info\" , date = datetime . date ( 2022 , 3 , 1 )) \u4e0a\u8ff0\u4ee3\u7801\u5c06\u8f93\u51fa\uff1a 1 2022-03-01 this is info \u4f7f\u7528\u672c\u65e5\u5fd7\u7684\u6838\u5fc3\u662f\u4e0a\u8ff0\u4ee3\u7801\u4e2d\u7684\u7b2c3\u884c\u548c\u7b2c9\u884c\uff0c\u6700\u540e\uff0c\u5728\u8f93\u51fa\u65e5\u5fd7\u65f6\u52a0\u4e0a date=... \uff0c\u5982\u7b2c15\u884c\u6240\u793a\u3002 \u6ce8\u610f\u5728\u7b2c9\u884c\uff0c\u901a\u5e38\u662f logging.getLogger(__nam__) \uff0c\u800c\u8fd9\u91cc\u662f BacktestLogger.getLogger(__name__) \u5982\u679c\u4e0a\u8ff0\u8c03\u7528\u4e2d\u6ca1\u6709\u4f20\u5165 date \uff0c\u5219\u5c06\u4f7f\u7528\u8c03\u7528\u65f6\u95f4\uff0c\u6b64\u65f6\u884c\u4e3a\u8ddf\u539f\u65e5\u5fd7\u7cfb\u7edf\u4e00\u81f4\u3002 Warning \u5f53\u8c03\u7528logger.exception\u65f6\uff0c\u4e0d\u80fd\u4f20\u5165date\u53c2\u6570\u3002 \u914d\u7f6e\u6587\u4ef6\u793a\u4f8b \u00b6 \u5982\u679c\u8981\u901a\u8fc7\u914d\u7f6e\u6587\u4ef6\u6765\u914d\u7f6e\uff0c\u53ef\u4f7f\u7528\u4ee5\u4e0b\u793a\u4f8b\uff1a 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 formatters : backtest : format : '%(bt_date)s | %(message)s' handlers : backtest : class : logging.StreamHandler formatter : backtest omicron.base.strategy : level : INFO handlers : [ backtest ] propagate : false loggers : omicron.base.strategy : level : INFO handlers : [ backtest ] propagate : false","title":"omicron"},{"location":"api/omicron/#omicron.close","text":"\u5173\u95ed\u4e0e\u7f13\u5b58\u7684\u8fde\u63a5 Source code in omicron/__init__.py async def close (): \"\"\"\u5173\u95ed\u4e0e\u7f13\u5b58\u7684\u8fde\u63a5\"\"\" try : await cache . close () except Exception as e : # noqa pass","title":"close()"},{"location":"api/omicron/#omicron.init","text":"\u521d\u59cb\u5316Omicron \u521d\u59cb\u5316influxDB, \u7f13\u5b58\u7b49\u8fde\u63a5\uff0c \u5e76\u52a0\u8f7d\u65e5\u5386\u548c\u8bc1\u5238\u5217\u8868 \u4e0a\u8ff0\u521d\u59cb\u5316\u7684\u8fde\u63a5\uff0c\u5e94\u8be5\u5728\u7a0b\u5e8f\u9000\u51fa\u65f6\uff0c\u901a\u8fc7\u8c03\u7528 close() \u5173\u95ed Source code in omicron/__init__.py async def init ( app_cache : int = 5 ): \"\"\"\u521d\u59cb\u5316Omicron \u521d\u59cb\u5316influxDB, \u7f13\u5b58\u7b49\u8fde\u63a5\uff0c \u5e76\u52a0\u8f7d\u65e5\u5386\u548c\u8bc1\u5238\u5217\u8868 \u4e0a\u8ff0\u521d\u59cb\u5316\u7684\u8fde\u63a5\uff0c\u5e94\u8be5\u5728\u7a0b\u5e8f\u9000\u51fa\u65f6\uff0c\u901a\u8fc7\u8c03\u7528`close()`\u5173\u95ed \"\"\" global cache await cache . init ( app = app_cache ) await tf . init () from omicron.models.security import Security await Security . init ()","title":"init()"},{"location":"api/omicron/#extensions-package","text":"","title":"Extensions package"},{"location":"api/omicron/#omicron.extensions.decimals","text":"","title":"decimals"},{"location":"api/omicron/#omicron.extensions.decimals.math_round","text":"\u7531\u4e8e\u6d6e\u70b9\u6570\u7684\u8868\u793a\u95ee\u9898\uff0c\u5f88\u591a\u8bed\u8a00\u7684round\u51fd\u6570\u4e0e\u6570\u5b66\u4e0a\u7684round\u51fd\u6570\u4e0d\u4e00\u81f4\u3002\u4e0b\u9762\u7684\u51fd\u6570\u7ed3\u679c\u4e0e\u6570\u5b66\u4e0a\u7684\u4e00\u81f4\u3002 Parameters: Name Type Description Default x float \u8981\u8fdb\u884c\u56db\u820d\u4e94\u5165\u7684\u6570\u5b57 required digits int \u5c0f\u6570\u70b9\u540e\u4fdd\u7559\u7684\u4f4d\u6570 required Source code in omicron/extensions/decimals.py def math_round ( x : float , digits : int ): \"\"\"\u7531\u4e8e\u6d6e\u70b9\u6570\u7684\u8868\u793a\u95ee\u9898\uff0c\u5f88\u591a\u8bed\u8a00\u7684round\u51fd\u6570\u4e0e\u6570\u5b66\u4e0a\u7684round\u51fd\u6570\u4e0d\u4e00\u81f4\u3002\u4e0b\u9762\u7684\u51fd\u6570\u7ed3\u679c\u4e0e\u6570\u5b66\u4e0a\u7684\u4e00\u81f4\u3002 Args: x: \u8981\u8fdb\u884c\u56db\u820d\u4e94\u5165\u7684\u6570\u5b57 digits: \u5c0f\u6570\u70b9\u540e\u4fdd\u7559\u7684\u4f4d\u6570 \"\"\" return int ( x * ( 10 ** digits ) + copysign ( 0.5 , x )) / ( 10 ** digits )","title":"math_round()"},{"location":"api/omicron/#omicron.extensions.decimals.price_equal","text":"\u5224\u65ad\u80a1\u4ef7\u662f\u5426\u76f8\u7b49 Parameters: Name Type Description Default x \u4ef7\u683c1 required y \u4ef7\u683c2 required Returns: Type Description bool \u5982\u679c\u76f8\u7b49\u5219\u8fd4\u56deTrue\uff0c\u5426\u5219\u8fd4\u56deFalse Source code in omicron/extensions/decimals.py def price_equal ( x : float , y : float ) -> bool : \"\"\"\u5224\u65ad\u80a1\u4ef7\u662f\u5426\u76f8\u7b49 Args: x : \u4ef7\u683c1 y : \u4ef7\u683c2 Returns: \u5982\u679c\u76f8\u7b49\u5219\u8fd4\u56deTrue\uff0c\u5426\u5219\u8fd4\u56deFalse \"\"\" return abs ( math_round ( x , 2 ) - math_round ( y , 2 )) < 1e-2","title":"price_equal()"},{"location":"api/omicron/#omicron.extensions.np","text":"Extension function related to numpy","title":"np"},{"location":"api/omicron/#omicron.extensions.np.array_math_round","text":"\u5c06\u4e00\u7ef4\u6570\u7ec4arr\u7684\u6570\u636e\u8fdb\u884c\u56db\u820d\u4e94\u5165 numpy.around\u7684\u51fd\u6570\u5e76\u4e0d\u662f\u6570\u5b66\u4e0a\u7684\u56db\u820d\u4e94\u5165\uff0c\u5bf91.5\u548c2.5\u8fdb\u884cround\u7684\u7ed3\u679c\u90fd\u4f1a\u53d8\u62102\uff0c\u5728\u91d1\u878d\u9886\u57df\u8ba1\u7b97\u4e2d\uff0c\u6211\u4eec\u5fc5\u987b\u4f7f\u7528\u6570\u5b66\u610f\u4e49\u4e0a\u7684\u56db\u820d\u4e94\u5165\u3002 Parameters: Name Type Description Default arr ArrayLike \u8f93\u5165\u6570\u7ec4 required digits int required Returns: Type Description np.ndarray \u56db\u820d\u4e94\u5165\u540e\u7684\u4e00\u7ef4\u6570\u7ec4 Source code in omicron/extensions/np.py def array_math_round ( arr : Union [ float , ArrayLike ], digits : int ) -> np . ndarray : \"\"\"\u5c06\u4e00\u7ef4\u6570\u7ec4arr\u7684\u6570\u636e\u8fdb\u884c\u56db\u820d\u4e94\u5165 numpy.around\u7684\u51fd\u6570\u5e76\u4e0d\u662f\u6570\u5b66\u4e0a\u7684\u56db\u820d\u4e94\u5165\uff0c\u5bf91.5\u548c2.5\u8fdb\u884cround\u7684\u7ed3\u679c\u90fd\u4f1a\u53d8\u62102\uff0c\u5728\u91d1\u878d\u9886\u57df\u8ba1\u7b97\u4e2d\uff0c\u6211\u4eec\u5fc5\u987b\u4f7f\u7528\u6570\u5b66\u610f\u4e49\u4e0a\u7684\u56db\u820d\u4e94\u5165\u3002 Args: arr (ArrayLike): \u8f93\u5165\u6570\u7ec4 digits (int): Returns: np.ndarray: \u56db\u820d\u4e94\u5165\u540e\u7684\u4e00\u7ef4\u6570\u7ec4 \"\"\" # \u5982\u679c\u662f\u5355\u4e2a\u5143\u7d20\uff0c\u5219\u76f4\u63a5\u8fd4\u56de if isinstance ( arr , float ): return decimals . math_round ( arr , digits ) f = np . vectorize ( lambda x : decimals . math_round ( x , digits )) return f ( arr )","title":"array_math_round()"},{"location":"api/omicron/#omicron.extensions.np.array_price_equal","text":"\u5224\u65ad\u4e24\u4e2a\u4ef7\u683c\u6570\u7ec4\u662f\u5426\u76f8\u7b49 Parameters: Name Type Description Default price1 ArrayLike \u4ef7\u683c\u6570\u7ec4 required price2 ArrayLike \u4ef7\u683c\u6570\u7ec4 required Returns: Type Description np.ndarray \u5224\u65ad\u7ed3\u679c Source code in omicron/extensions/np.py def array_price_equal ( price1 : ArrayLike , price2 : ArrayLike ) -> np . ndarray : \"\"\"\u5224\u65ad\u4e24\u4e2a\u4ef7\u683c\u6570\u7ec4\u662f\u5426\u76f8\u7b49 Args: price1 (ArrayLike): \u4ef7\u683c\u6570\u7ec4 price2 (ArrayLike): \u4ef7\u683c\u6570\u7ec4 Returns: np.ndarray: \u5224\u65ad\u7ed3\u679c \"\"\" price1 = array_math_round ( price1 , 2 ) price2 = array_math_round ( price2 , 2 ) return abs ( price1 - price2 ) < 1e-2","title":"array_price_equal()"},{"location":"api/omicron/#omicron.extensions.np.bars_since","text":"Return the number of bars since condition sequence was last True , or if never, return default . 1 2 3 >>> condition = [True, True, False] >>> bars_since(condition) 1 Source code in omicron/extensions/np.py def bars_since ( condition : Sequence [ bool ], default = None ) -> int : \"\"\" Return the number of bars since `condition` sequence was last `True`, or if never, return `default`. >>> condition = [True, True, False] >>> bars_since(condition) 1 \"\"\" return next ( compress ( range ( len ( condition )), reversed ( condition )), default )","title":"bars_since()"},{"location":"api/omicron/#omicron.extensions.np.bin_cut","text":"\u5c06\u6570\u7ec4arr\u5207\u5206\u6210n\u4efd todo: use padding + reshape to boost performance Parameters: Name Type Description Default arr [type] [description] required n [type] [description] required Returns: Type Description [type] [description] Source code in omicron/extensions/np.py def bin_cut ( arr : list , n : int ): \"\"\"\u5c06\u6570\u7ec4arr\u5207\u5206\u6210n\u4efd todo: use padding + reshape to boost performance Args: arr ([type]): [description] n ([type]): [description] Returns: [type]: [description] \"\"\" result = [[] for i in range ( n )] for i , e in enumerate ( arr ): result [ i % n ] . append ( e ) return [ e for e in result if len ( e )]","title":"bin_cut()"},{"location":"api/omicron/#omicron.extensions.np.count_between","text":"\u8ba1\u7b97\u6570\u7ec4\u4e2d\uff0c start \u5143\u7d20\u4e0e end \u5143\u7d20\u4e4b\u95f4\u5171\u6709\u591a\u5c11\u4e2a\u5143\u7d20 \u8981\u6c42arr\u5fc5\u987b\u662f\u5df2\u6392\u5e8f\u3002\u8ba1\u7b97\u7ed3\u679c\u4f1a\u5305\u542b\u533a\u95f4\u8fb9\u754c\u70b9\u3002 Examples: >>> arr = [ 20050104 , 20050105 , 20050106 , 20050107 , 20050110 , 20050111 ] >>> count_between ( arr , 20050104 , 20050111 ) 6 >>> count_between ( arr , 20050104 , 20050109 ) 4 Source code in omicron/extensions/np.py def count_between ( arr , start , end ): \"\"\"\u8ba1\u7b97\u6570\u7ec4\u4e2d\uff0c`start`\u5143\u7d20\u4e0e`end`\u5143\u7d20\u4e4b\u95f4\u5171\u6709\u591a\u5c11\u4e2a\u5143\u7d20 \u8981\u6c42arr\u5fc5\u987b\u662f\u5df2\u6392\u5e8f\u3002\u8ba1\u7b97\u7ed3\u679c\u4f1a\u5305\u542b\u533a\u95f4\u8fb9\u754c\u70b9\u3002 Examples: >>> arr = [20050104, 20050105, 20050106, 20050107, 20050110, 20050111] >>> count_between(arr, 20050104, 20050111) 6 >>> count_between(arr, 20050104, 20050109) 4 \"\"\" pos_start = np . searchsorted ( arr , start , side = \"right\" ) pos_end = np . searchsorted ( arr , end , side = \"right\" ) counter = pos_end - pos_start + 1 if start < arr [ 0 ]: counter -= 1 if end > arr [ - 1 ]: counter -= 1 return counter","title":"count_between()"},{"location":"api/omicron/#omicron.extensions.np.dataframe_to_structured_array","text":"convert dataframe (with all columns, and index possibly) to numpy structured arrays len(dtypes) should be either equal to len(df.columns) or len(df.columns) + 1 . In the later case, it implies to include df.index into converted array. Parameters: Name Type Description Default df DataFrame the one needs to be converted required dtypes List[Tuple] Defaults to None. If it's None , then dtypes of df is used, in such case, the index of df will not be converted. None Returns: Type Description ArrayLike [description] Source code in omicron/extensions/np.py def dataframe_to_structured_array ( df : DataFrame , dtypes : List [ Tuple ] = None ) -> ArrayLike : \"\"\"convert dataframe (with all columns, and index possibly) to numpy structured arrays `len(dtypes)` should be either equal to `len(df.columns)` or `len(df.columns) + 1`. In the later case, it implies to include `df.index` into converted array. Args: df: the one needs to be converted dtypes: Defaults to None. If it's `None`, then dtypes of `df` is used, in such case, the `index` of `df` will not be converted. Returns: ArrayLike: [description] \"\"\" v = df if dtypes is not None : dtypes_in_dict = { key : value for key , value in dtypes } col_len = len ( df . columns ) if len ( dtypes ) == col_len + 1 : v = df . reset_index () rename_index_to = set ( dtypes_in_dict . keys ()) . difference ( set ( df . columns )) v . rename ( columns = { \"index\" : list ( rename_index_to )[ 0 ]}, inplace = True ) elif col_len != len ( dtypes ): raise ValueError ( f \"length of dtypes should be either { col_len } or { col_len + 1 } , is { len ( dtypes ) } \" ) # re-arrange order of dtypes, in order to align with df.columns dtypes = [] for name in v . columns : dtypes . append (( name , dtypes_in_dict [ name ])) else : dtypes = df . dtypes return np . array ( np . rec . fromrecords ( v . values ), dtype = dtypes )","title":"dataframe_to_structured_array()"},{"location":"api/omicron/#omicron.extensions.np.dict_to_numpy_array","text":"convert dictionary to numpy array Examples: d = {\"aaron\": 5, \"jack\": 6} dtype = [(\"name\", \"S8\"), (\"score\", \" np . array : \"\"\"convert dictionary to numpy array Examples: >>> d = {\"aaron\": 5, \"jack\": 6} >>> dtype = [(\"name\", \"S8\"), (\"score\", \">> dict_to_numpy_array(d, dtype) array([(b'aaron', 5), (b'jack', 6)], dtype=[('name', 'S8'), ('score', '>> arr = np . arange ( 6 , dtype = np . float32 ) >>> arr [ 3 : 5 ] = np . NaN >>> fill_nan ( arr ) ... array ([ 0. , 1. , 2. , 2. , 2. , 5. ], dtype = float32 ) >>> arr = np . arange ( 6 , dtype = np . float32 ) >>> arr [ 0 : 2 ] = np . nan >>> fill_nan ( arr ) ... array ([ 2. , 2. , 2. , 3. , 4. , 5. ], dtype = float32 ) Parameters: Name Type Description Default ts np.array [description] required Source code in omicron/extensions/np.py def fill_nan ( ts : np . ndarray ): \"\"\"\u5c06ts\u4e2d\u7684NaN\u66ff\u6362\u4e3a\u5176\u524d\u503c \u5982\u679cts\u8d77\u5934\u7684\u5143\u7d20\u4e3aNaN\uff0c\u5219\u7528\u7b2c\u4e00\u4e2a\u975eNaN\u5143\u7d20\u66ff\u6362\u3002 \u5982\u679c\u6240\u6709\u5143\u7d20\u90fd\u4e3aNaN\uff0c\u5219\u65e0\u6cd5\u66ff\u6362\u3002 Example: >>> arr = np.arange(6, dtype=np.float32) >>> arr[3:5] = np.NaN >>> fill_nan(arr) ... # doctest: +NORMALIZE_WHITESPACE array([0., 1., 2., 2., 2., 5.], dtype=float32) >>> arr = np.arange(6, dtype=np.float32) >>> arr[0:2] = np.nan >>> fill_nan(arr) ... # doctest: +NORMALIZE_WHITESPACE array([2., 2., 2., 3., 4., 5.], dtype=float32) Args: ts (np.array): [description] \"\"\" if np . all ( np . isnan ( ts )): raise ValueError ( \"all of ts are NaN\" ) if ts [ 0 ] is None or math . isnan ( ts [ 0 ]): idx = np . argwhere ( ~ np . isnan ( ts ))[ 0 ] ts [ 0 ] = ts [ idx ] mask = np . isnan ( ts ) idx = np . where ( ~ mask , np . arange ( mask . size ), 0 ) np . maximum . accumulate ( idx , out = idx ) return ts [ idx ]","title":"fill_nan()"},{"location":"api/omicron/#omicron.extensions.np.find_runs","text":"Find runs of consecutive items in an array. Parameters: Name Type Description Default x ArrayLike the sequence to find runs in required Returns: Type Description Tuple[np.ndarray, np.ndarray, np.ndarray] A tuple of unique values, start indices, and length of runs Source code in omicron/extensions/np.py def find_runs ( x : ArrayLike ) -> Tuple [ np . ndarray , np . ndarray , np . ndarray ]: \"\"\"Find runs of consecutive items in an array. Args: x: the sequence to find runs in Returns: A tuple of unique values, start indices, and length of runs \"\"\" # ensure array x = np . asanyarray ( x ) if x . ndim != 1 : raise ValueError ( \"only 1D array supported\" ) n = x . shape [ 0 ] # handle empty array if n == 0 : return np . array ([]), np . array ([]), np . array ([]) else : # find run starts loc_run_start = np . empty ( n , dtype = bool ) loc_run_start [ 0 ] = True np . not_equal ( x [: - 1 ], x [ 1 :], out = loc_run_start [ 1 :]) run_starts = np . nonzero ( loc_run_start )[ 0 ] # find run values run_values = x [ loc_run_start ] # find run lengths run_lengths = np . diff ( np . append ( run_starts , n )) return run_values , run_starts , run_lengths","title":"find_runs()"},{"location":"api/omicron/#omicron.extensions.np.floor","text":"\u5728\u6570\u636earr\u4e2d\uff0c\u627e\u5230\u5c0f\u4e8e\u7b49\u4e8eitem\u7684\u90a3\u4e00\u4e2a\u503c\u3002\u5982\u679citem\u5c0f\u4e8e\u6240\u6709arr\u5143\u7d20\u7684\u503c\uff0c\u8fd4\u56dearr[0];\u5982\u679citem \u5927\u4e8e\u6240\u6709arr\u5143\u7d20\u7684\u503c\uff0c\u8fd4\u56dearr[-1] \u4e0e minute_frames_floor \u4e0d\u540c\u7684\u662f\uff0c\u672c\u51fd\u6570\u4e0d\u505a\u56de\u7ed5\u4e0e\u8fdb\u4f4d. Examples: >>> a = [ 3 , 6 , 9 ] >>> floor ( a , - 1 ) 3 >>> floor ( a , 9 ) 9 >>> floor ( a , 10 ) 9 >>> floor ( a , 4 ) 3 >>> floor ( a , 10 ) 9 Parameters: Name Type Description Default arr required item required Source code in omicron/extensions/np.py def floor ( arr , item ): \"\"\" \u5728\u6570\u636earr\u4e2d\uff0c\u627e\u5230\u5c0f\u4e8e\u7b49\u4e8eitem\u7684\u90a3\u4e00\u4e2a\u503c\u3002\u5982\u679citem\u5c0f\u4e8e\u6240\u6709arr\u5143\u7d20\u7684\u503c\uff0c\u8fd4\u56dearr[0];\u5982\u679citem \u5927\u4e8e\u6240\u6709arr\u5143\u7d20\u7684\u503c\uff0c\u8fd4\u56dearr[-1] \u4e0e`minute_frames_floor`\u4e0d\u540c\u7684\u662f\uff0c\u672c\u51fd\u6570\u4e0d\u505a\u56de\u7ed5\u4e0e\u8fdb\u4f4d. Examples: >>> a = [3, 6, 9] >>> floor(a, -1) 3 >>> floor(a, 9) 9 >>> floor(a, 10) 9 >>> floor(a, 4) 3 >>> floor(a,10) 9 Args: arr: item: Returns: \"\"\" if item < arr [ 0 ]: return arr [ 0 ] index = np . searchsorted ( arr , item , side = \"right\" ) return arr [ index - 1 ]","title":"floor()"},{"location":"api/omicron/#omicron.extensions.np.join_by_left","text":"\u5de6\u8fde\u63a5 r1 , r2 by key \u5982\u679c r1 \u4e2d\u5b58\u5728 r2 \u4e2d\u6ca1\u6709\u7684\u884c\uff0c\u5219\u8be5\u884c\u5bf9\u5e94\u7684 r2 \u4e2d\u7684\u90a3\u4e9b\u5b57\u6bb5\u5c06\u88abmask\uff0c\u6216\u8005\u586b\u5145\u968f\u673a\u6570\u3002 same as numpy.lib.recfunctions.join_by(key, r1, r2, jointype='leftouter'), but allows r1 have duplicate keys Reference: stackoverflow Examples: >>> # to join the following >>> # [[ 1, 2], >>> # [ 1, 3], x [[1, 5], >>> # [ 2, 3]] [4, 7]] >>> # only first two rows in left will be joined >>> r1 = np . array ([( 1 , 2 ), ( 1 , 3 ), ( 2 , 3 )], dtype = [( 'seq' , 'i4' ), ( 'score' , 'i4' )]) >>> r2 = np . array ([( 1 , 5 ), ( 4 , 7 )], dtype = [( 'seq' , 'i4' ), ( 'age' , 'i4' )]) >>> joined = join_by_left ( 'seq' , r1 , r2 ) >>> print ( joined ) [( 1 , 2 , 5 ) ( 1 , 3 , 5 ) ( 2 , 3 , -- )] >>> print ( joined . dtype ) ( numpy . record , [( 'seq' , '>> joined [ 2 ][ 2 ] masked >>> joined . tolist ()[ 2 ][ 2 ] == None True Parameters: Name Type Description Default key join\u5173\u952e\u5b57 required r1 \u6570\u636e\u96c61 required r2 \u6570\u636e\u96c62 required Returns: Type Description a numpy array Source code in omicron/extensions/np.py def join_by_left ( key , r1 , r2 , mask = True ): \"\"\"\u5de6\u8fde\u63a5 `r1`, `r2` by `key` \u5982\u679c`r1`\u4e2d\u5b58\u5728`r2`\u4e2d\u6ca1\u6709\u7684\u884c\uff0c\u5219\u8be5\u884c\u5bf9\u5e94\u7684`r2`\u4e2d\u7684\u90a3\u4e9b\u5b57\u6bb5\u5c06\u88abmask\uff0c\u6216\u8005\u586b\u5145\u968f\u673a\u6570\u3002 same as numpy.lib.recfunctions.join_by(key, r1, r2, jointype='leftouter'), but allows r1 have duplicate keys [Reference: stackoverflow](https://stackoverflow.com/a/53261882/13395693) Examples: >>> # to join the following >>> # [[ 1, 2], >>> # [ 1, 3], x [[1, 5], >>> # [ 2, 3]] [4, 7]] >>> # only first two rows in left will be joined >>> r1 = np.array([(1, 2), (1,3), (2,3)], dtype=[('seq', 'i4'), ('score', 'i4')]) >>> r2 = np.array([(1, 5), (4,7)], dtype=[('seq', 'i4'), ('age', 'i4')]) >>> joined = join_by_left('seq', r1, r2) >>> print(joined) [(1, 2, 5) (1, 3, 5) (2, 3, --)] >>> print(joined.dtype) (numpy.record, [('seq', '>> joined[2][2] masked >>> joined.tolist()[2][2] == None True Args: key : join\u5173\u952e\u5b57 r1 : \u6570\u636e\u96c61 r2 : \u6570\u636e\u96c62 Returns: a numpy array \"\"\" # figure out the dtype of the result array descr1 = r1 . dtype . descr descr2 = [ d for d in r2 . dtype . descr if d [ 0 ] not in r1 . dtype . names ] descrm = descr1 + descr2 # figure out the fields we'll need from each array f1 = [ d [ 0 ] for d in descr1 ] f2 = [ d [ 0 ] for d in descr2 ] # cache the number of columns in f1 ncol1 = len ( f1 ) # get a dict of the rows of r2 grouped by key rows2 = {} for row2 in r2 : rows2 . setdefault ( row2 [ key ], []) . append ( row2 ) # figure out how many rows will be in the result nrowm = 0 for k1 in r1 [ key ]: if k1 in rows2 : nrowm += len ( rows2 [ k1 ]) else : nrowm += 1 # allocate the return array # ret = np.full((nrowm, ), fill, dtype=descrm) _ret = np . recarray ( nrowm , dtype = descrm ) if mask : ret = np . ma . array ( _ret , mask = True ) else : ret = _ret # merge the data into the return array i = 0 for row1 in r1 : if row1 [ key ] in rows2 : for row2 in rows2 [ row1 [ key ]]: ret [ i ] = tuple ( row1 [ f1 ]) + tuple ( row2 [ f2 ]) i += 1 else : for j in range ( ncol1 ): ret [ i ][ j ] = row1 [ j ] i += 1 return ret","title":"join_by_left()"},{"location":"api/omicron/#omicron.extensions.np.numpy_append_fields","text":"\u7ed9\u73b0\u6709\u7684\u6570\u7ec4 base \u589e\u52a0\u65b0\u7684\u5b57\u6bb5 \u5b9e\u73b0\u4e86 numpy.lib.recfunctions.rec_append_fields \u7684\u529f\u80fd\u3002\u63d0\u4f9b\u8fd9\u4e2a\u529f\u80fd\uff0c\u662f\u56e0\u4e3a rec_append_fields \u4e0d\u80fd\u5904\u7406 data \u5143\u7d20\u7684\u7c7b\u578b\u4e3aObject\u7684\u60c5\u51b5\u3002 \u65b0\u589e\u7684\u6570\u636e\u5217\u5c06\u987a\u5e8f\u6392\u5217\u5728\u5176\u5b83\u5217\u7684\u53f3\u8fb9\u3002 Examples: >>> # \u65b0\u589e\u5355\u4e2a\u5b57\u6bb5 >>> import numpy >>> old = np . array ([ i for i in range ( 3 )], dtype = [( 'col1' , '>> new_list = [ 2 * i for i in range ( 3 )] >>> res = numpy_append_fields ( old , 'new_col' , new_list , [( 'new_col' , '>> print ( res ) ... [( 0. , 0. ) ( 1. , 2. ) ( 2. , 4. )] >>> # \u65b0\u589e\u591a\u4e2a\u5b57\u6bb5 >>> data = [ res [ 'col1' ] . tolist (), res [ 'new_col' ] . tolist ()] >>> print ( numpy_append_fields ( old , ( 'col3' , 'col4' ), data , [( 'col3' , ' np . ndarray : \"\"\"\u7ed9\u73b0\u6709\u7684\u6570\u7ec4`base`\u589e\u52a0\u65b0\u7684\u5b57\u6bb5 \u5b9e\u73b0\u4e86`numpy.lib.recfunctions.rec_append_fields`\u7684\u529f\u80fd\u3002\u63d0\u4f9b\u8fd9\u4e2a\u529f\u80fd\uff0c\u662f\u56e0\u4e3a`rec_append_fields`\u4e0d\u80fd\u5904\u7406`data`\u5143\u7d20\u7684\u7c7b\u578b\u4e3aObject\u7684\u60c5\u51b5\u3002 \u65b0\u589e\u7684\u6570\u636e\u5217\u5c06\u987a\u5e8f\u6392\u5217\u5728\u5176\u5b83\u5217\u7684\u53f3\u8fb9\u3002 Example: >>> # \u65b0\u589e\u5355\u4e2a\u5b57\u6bb5 >>> import numpy >>> old = np.array([i for i in range(3)], dtype=[('col1', '>> new_list = [2 * i for i in range(3)] >>> res = numpy_append_fields(old, 'new_col', new_list, [('new_col', '>> print(res) ... # doctest: +NORMALIZE_WHITESPACE [(0., 0.) (1., 2.) (2., 4.)] >>> # \u65b0\u589e\u591a\u4e2a\u5b57\u6bb5 >>> data = [res['col1'].tolist(), res['new_col'].tolist()] >>> print(numpy_append_fields(old, ('col3', 'col4'), data, [('col3', ' np . ndarray : \"\"\"\u4ece`ts`\u4e2d\u53bb\u9664NaN Args: ts (np.array): [description] Returns: np.array: [description] \"\"\" return ts [ ~ np . isnan ( ts . astype ( float ))]","title":"remove_nan()"},{"location":"api/omicron/#omicron.extensions.np.replace_zero","text":"\u5c06ts\u4e2d\u76840\u66ff\u6362\u4e3a\u524d\u503c, \u5904\u7406volume\u6570\u636e\u65f6\u5e38\u7528\u7528\u5230 \u5982\u679c\u63d0\u4f9b\u4e86replacement, \u5219\u66ff\u6362\u4e3areplacement Source code in omicron/extensions/np.py def replace_zero ( ts : np . ndarray , replacement = None ) -> np . ndarray : \"\"\"\u5c06ts\u4e2d\u76840\u66ff\u6362\u4e3a\u524d\u503c, \u5904\u7406volume\u6570\u636e\u65f6\u5e38\u7528\u7528\u5230 \u5982\u679c\u63d0\u4f9b\u4e86replacement, \u5219\u66ff\u6362\u4e3areplacement \"\"\" if replacement is not None : return np . where ( ts == 0 , replacement , ts ) if np . all ( ts == 0 ): raise ValueError ( \"all of ts are 0\" ) if ts [ 0 ] == 0 : idx = np . argwhere ( ts != 0 )[ 0 ] ts [ 0 ] = ts [ idx ] mask = ts == 0 idx = np . where ( ~ mask , np . arange ( mask . size ), 0 ) np . maximum . accumulate ( idx , out = idx ) return ts [ idx ]","title":"replace_zero()"},{"location":"api/omicron/#omicron.extensions.np.rolling","text":"\u5bf9\u5e8f\u5217 x \u8fdb\u884c\u7a97\u53e3\u6ed1\u52a8\u8ba1\u7b97\u3002 \u5982\u679c func \u8981\u5b9e\u73b0\u7684\u529f\u80fd\u662fargmax, argmin, max, mean, median, min, rank, std, sum, var\u7b49\uff0cmove_argmax\uff0c\u8bf7\u4f7f\u7528bottleneck\u4e2d\u7684move_argmin, move_max, move_mean, move_median, move_min move_rank, move_std, move_sum, move_var\u3002\u8fd9\u4e9b\u51fd\u6570\u7684\u6027\u80fd\u66f4\u597d\u3002 Parameters: Name Type Description Default x [type] [description] required win [type] [description] required func [type] [description] required Returns: Type Description [type] [description] Source code in omicron/extensions/np.py def rolling ( x , win , func ): \"\"\"\u5bf9\u5e8f\u5217`x`\u8fdb\u884c\u7a97\u53e3\u6ed1\u52a8\u8ba1\u7b97\u3002 \u5982\u679c`func`\u8981\u5b9e\u73b0\u7684\u529f\u80fd\u662fargmax, argmin, max, mean, median, min, rank, std, sum, var\u7b49\uff0cmove_argmax\uff0c\u8bf7\u4f7f\u7528bottleneck\u4e2d\u7684move_argmin, move_max, move_mean, move_median, move_min move_rank, move_std, move_sum, move_var\u3002\u8fd9\u4e9b\u51fd\u6570\u7684\u6027\u80fd\u66f4\u597d\u3002 Args: x ([type]): [description] win ([type]): [description] func ([type]): [description] Returns: [type]: [description] \"\"\" results = [] for subarray in sliding_window_view ( x , window_shape = win ): results . append ( func ( subarray )) return np . array ( results )","title":"rolling()"},{"location":"api/omicron/#omicron.extensions.np.shift","text":"\u5728numpy\u6570\u7ec4arr\u4e2d\uff0c\u627e\u5230start(\u6216\u8005\u6700\u63a5\u8fd1\u7684\u4e00\u4e2a\uff09\uff0c\u53d6offset\u5bf9\u5e94\u7684\u5143\u7d20\u3002 \u8981\u6c42 arr \u5df2\u6392\u5e8f\u3002 offset \u4e3a\u6b63\uff0c\u8868\u660e\u5411\u540e\u79fb\u4f4d\uff1b offset \u4e3a\u8d1f\uff0c\u8868\u660e\u5411\u524d\u79fb\u4f4d Examples: >>> arr = [ 20050104 , 20050105 , 20050106 , 20050107 , 20050110 , 20050111 ] >>> shift ( arr , 20050104 , 1 ) 20050105 >>> shift ( arr , 20050105 , - 1 ) 20050104 >>> # \u8d77\u59cb\u70b9\u5df2\u53f3\u8d8a\u754c\uff0c\u4e14\u5411\u53f3shift\uff0c\u8fd4\u56de\u8d77\u59cb\u70b9 >>> shift ( arr , 20050120 , 1 ) 20050120 Parameters: Name Type Description Default arr \u5df2\u6392\u5e8f\u7684\u6570\u7ec4 required start numpy\u53ef\u63a5\u53d7\u7684\u6570\u636e\u7c7b\u578b required offset int [description] required Returns: Type Description \u79fb\u4f4d\u540e\u5f97\u5230\u7684\u5143\u7d20\u503c Source code in omicron/extensions/np.py def shift ( arr , start , offset ): \"\"\"\u5728numpy\u6570\u7ec4arr\u4e2d\uff0c\u627e\u5230start(\u6216\u8005\u6700\u63a5\u8fd1\u7684\u4e00\u4e2a\uff09\uff0c\u53d6offset\u5bf9\u5e94\u7684\u5143\u7d20\u3002 \u8981\u6c42`arr`\u5df2\u6392\u5e8f\u3002`offset`\u4e3a\u6b63\uff0c\u8868\u660e\u5411\u540e\u79fb\u4f4d\uff1b`offset`\u4e3a\u8d1f\uff0c\u8868\u660e\u5411\u524d\u79fb\u4f4d Examples: >>> arr = [20050104, 20050105, 20050106, 20050107, 20050110, 20050111] >>> shift(arr, 20050104, 1) 20050105 >>> shift(arr, 20050105, -1) 20050104 >>> # \u8d77\u59cb\u70b9\u5df2\u53f3\u8d8a\u754c\uff0c\u4e14\u5411\u53f3shift\uff0c\u8fd4\u56de\u8d77\u59cb\u70b9 >>> shift(arr, 20050120, 1) 20050120 Args: arr : \u5df2\u6392\u5e8f\u7684\u6570\u7ec4 start : numpy\u53ef\u63a5\u53d7\u7684\u6570\u636e\u7c7b\u578b offset (int): [description] Returns: \u79fb\u4f4d\u540e\u5f97\u5230\u7684\u5143\u7d20\u503c \"\"\" pos = np . searchsorted ( arr , start , side = \"right\" ) if pos + offset - 1 >= len ( arr ): return start else : return arr [ pos + offset - 1 ]","title":"shift()"},{"location":"api/omicron/#omicron.extensions.np.smallest_n_argpos","text":"get smallest n (min->max) elements and return argpos which its value ordered in ascent Examples: >>> smallest_n_argpos ([ np . nan , 4 , 3 , 9 , 8 , 5 , 2 , 1 , 0 , 6 , 7 ], 2 ) array ([ 8 , 7 ]) Parameters: Name Type Description Default ts np.array \u8f93\u5165\u7684\u6570\u7ec4 required n int \u53d6\u6700\u5c0f\u7684n\u4e2a\u5143\u7d20 required Returns: Type Description np.array [description] Source code in omicron/extensions/np.py def smallest_n_argpos ( ts : np . array , n : int ) -> np . array : \"\"\"get smallest n (min->max) elements and return argpos which its value ordered in ascent Example: >>> smallest_n_argpos([np.nan, 4, 3, 9, 8, 5, 2, 1, 0, 6, 7], 2) array([8, 7]) Args: ts (np.array): \u8f93\u5165\u7684\u6570\u7ec4 n (int): \u53d6\u6700\u5c0f\u7684n\u4e2a\u5143\u7d20 Returns: np.array: [description] \"\"\" return np . argsort ( ts )[: n ]","title":"smallest_n_argpos()"},{"location":"api/omicron/#omicron.extensions.np.to_pydatetime","text":"\u5c06numpy.datetime64\u5bf9\u8c61\u8f6c\u6362\u6210\u4e3apython\u7684datetime\u5bf9\u8c61 numpy.ndarray.item()\u65b9\u6cd5\u53ef\u7528\u4ee5\u5c06\u4efb\u4f55numpy\u5bf9\u8c61\u8f6c\u6362\u6210python\u5bf9\u8c61\uff0c\u63a8\u8350\u5728\u4efb\u4f55\u9002\u7528\u7684\u5730\u65b9\u4f7f\u7528.item()\u65b9\u6cd5\uff0c\u800c\u4e0d\u662f\u672c\u65b9\u6cd5\u3002\u793a\u4f8b: 1 2 3 4 arr = np.array(['2022-09-08', '2022-09-09'], dtype='datetime64[s]') arr.item(0) # output is datetime.datetime(2022, 9, 8, 0, 0) arr[1].item() # output is datetime.datetime(2022, 9, 9, 0, 0) Parameters: Name Type Description Default tm the input numpy datetime object required Returns: Type Description datetime.datetime python datetime object .. deprecated:: 2.0.0 use tm.item() instead Source code in omicron/extensions/np.py @deprecated ( \"2.0.0\" , details = \"use `tm.item()` instead\" ) def to_pydatetime ( tm : np . datetime64 ) -> datetime . datetime : \"\"\"\u5c06numpy.datetime64\u5bf9\u8c61\u8f6c\u6362\u6210\u4e3apython\u7684datetime\u5bf9\u8c61 numpy.ndarray.item()\u65b9\u6cd5\u53ef\u7528\u4ee5\u5c06\u4efb\u4f55numpy\u5bf9\u8c61\u8f6c\u6362\u6210python\u5bf9\u8c61\uff0c\u63a8\u8350\u5728\u4efb\u4f55\u9002\u7528\u7684\u5730\u65b9\u4f7f\u7528.item()\u65b9\u6cd5\uff0c\u800c\u4e0d\u662f\u672c\u65b9\u6cd5\u3002\u793a\u4f8b: ``` arr = np.array(['2022-09-08', '2022-09-09'], dtype='datetime64[s]') arr.item(0) # output is datetime.datetime(2022, 9, 8, 0, 0) arr[1].item() # output is datetime.datetime(2022, 9, 9, 0, 0) ``` Args: tm : the input numpy datetime object Returns: python datetime object \"\"\" unix_epoch = np . datetime64 ( 0 , \"s\" ) one_second = np . timedelta64 ( 1 , \"s\" ) seconds_since_epoch = ( tm - unix_epoch ) / one_second return datetime . datetime . utcfromtimestamp ( seconds_since_epoch )","title":"to_pydatetime()"},{"location":"api/omicron/#omicron.extensions.np.top_n_argpos","text":"get top n (max->min) elements and return argpos which its value ordered in descent Examples: >>> top_n_argpos ([ np . nan , 4 , 3 , 9 , 8 , 5 , 2 , 1 , 0 , 6 , 7 ], 2 ) array ([ 3 , 4 ]) Parameters: Name Type Description Default ts np.array [description] required n int [description] required Returns: Type Description np.array [description] Source code in omicron/extensions/np.py def top_n_argpos ( ts : np . array , n : int ) -> np . array : \"\"\"get top n (max->min) elements and return argpos which its value ordered in descent Example: >>> top_n_argpos([np.nan, 4, 3, 9, 8, 5, 2, 1, 0, 6, 7], 2) array([3, 4]) Args: ts (np.array): [description] n (int): [description] Returns: np.array: [description] \"\"\" ts_ = np . copy ( ts ) ts_ [ np . isnan ( ts_ )] = - np . inf return np . argsort ( ts_ )[ - n :][:: - 1 ]","title":"top_n_argpos()"},{"location":"api/omicron/#notify-package","text":"","title":"Notify package"},{"location":"api/omicron/#omicron.notify.dingtalk","text":"","title":"dingtalk"},{"location":"api/omicron/#omicron.notify.dingtalk.DingTalkMessage","text":"\u9489\u9489\u7684\u673a\u5668\u4eba\u6d88\u606f\u63a8\u9001\u7c7b\uff0c\u5c01\u88c5\u4e86\u5e38\u7528\u7684\u6d88\u606f\u7c7b\u578b\u4ee5\u53ca\u52a0\u5bc6\u7b97\u6cd5 \u9700\u8981\u5728\u914d\u7f6e\u6587\u4ef6\u4e2d\u914d\u7f6e\u9489\u9489\u7684\u673a\u5668\u4eba\u7684access_token \u5982\u679c\u914d\u7f6e\u4e86\u52a0\u7b7e\uff0c\u9700\u8981\u5728\u914d\u7f6e\u6587\u4ef6\u4e2d\u914d\u7f6e\u9489\u9489\u7684\u673a\u5668\u4eba\u7684secret \u5982\u679c\u914d\u7f6e\u4e86\u81ea\u5b9a\u4e49\u5173\u952e\u8bcd\uff0c\u9700\u8981\u5728\u914d\u7f6e\u6587\u4ef6\u4e2d\u914d\u7f6e\u9489\u9489\u7684\u673a\u5668\u4eba\u7684keyword\uff0c\u591a\u4e2a\u5173\u952e\u8bcd\u7528\u82f1\u6587\u9017\u53f7\u5206\u9694 \u5168\u90e8\u7684\u914d\u7f6e\u6587\u4ef6\u793a\u4f8b\u5982\u4e0b, \u5176\u4e2dsecret\u548ckeyword\u53ef\u4ee5\u4e0d\u914d\u7f6e, access_token\u5fc5\u987b\u914d\u7f6e notify: dingtalk_access_token: xxxx dingtalk_secret: xxxx Source code in omicron/notify/dingtalk.py class DingTalkMessage : \"\"\" \u9489\u9489\u7684\u673a\u5668\u4eba\u6d88\u606f\u63a8\u9001\u7c7b\uff0c\u5c01\u88c5\u4e86\u5e38\u7528\u7684\u6d88\u606f\u7c7b\u578b\u4ee5\u53ca\u52a0\u5bc6\u7b97\u6cd5 \u9700\u8981\u5728\u914d\u7f6e\u6587\u4ef6\u4e2d\u914d\u7f6e\u9489\u9489\u7684\u673a\u5668\u4eba\u7684access_token \u5982\u679c\u914d\u7f6e\u4e86\u52a0\u7b7e\uff0c\u9700\u8981\u5728\u914d\u7f6e\u6587\u4ef6\u4e2d\u914d\u7f6e\u9489\u9489\u7684\u673a\u5668\u4eba\u7684secret \u5982\u679c\u914d\u7f6e\u4e86\u81ea\u5b9a\u4e49\u5173\u952e\u8bcd\uff0c\u9700\u8981\u5728\u914d\u7f6e\u6587\u4ef6\u4e2d\u914d\u7f6e\u9489\u9489\u7684\u673a\u5668\u4eba\u7684keyword\uff0c\u591a\u4e2a\u5173\u952e\u8bcd\u7528\u82f1\u6587\u9017\u53f7\u5206\u9694 \u5168\u90e8\u7684\u914d\u7f6e\u6587\u4ef6\u793a\u4f8b\u5982\u4e0b, \u5176\u4e2dsecret\u548ckeyword\u53ef\u4ee5\u4e0d\u914d\u7f6e, access_token\u5fc5\u987b\u914d\u7f6e notify: dingtalk_access_token: xxxx dingtalk_secret: xxxx \"\"\" url = \"https://oapi.dingtalk.com/robot/send\" @classmethod def _get_access_token ( cls ): \"\"\"\u83b7\u53d6\u9489\u9489\u673a\u5668\u4eba\u7684access_token\"\"\" if hasattr ( cfg . notify , \"dingtalk_access_token\" ): return cfg . notify . dingtalk_access_token else : logger . error ( \"Dingtalk not configured, please add the following items: \\n \" \"notify: \\n \" \" dingtalk_access_token: xxxx \\n \" \" dingtalk_secret: xxxx \\n \" ) raise ConfigError ( \"dingtalk_access_token not found\" ) @classmethod def _get_secret ( cls ): \"\"\"\u83b7\u53d6\u9489\u9489\u673a\u5668\u4eba\u7684secret\"\"\" if hasattr ( cfg . notify , \"dingtalk_secret\" ): return cfg . notify . dingtalk_secret else : return None @classmethod def _get_url ( cls ): \"\"\"\u83b7\u53d6\u9489\u9489\u673a\u5668\u4eba\u7684\u6d88\u606f\u63a8\u9001\u5730\u5740\uff0c\u5c06\u7b7e\u540d\u548c\u65f6\u95f4\u6233\u62fc\u63a5\u5728url\u540e\u9762\"\"\" access_token = cls . _get_access_token () url = f \" { cls . url } ?access_token= { access_token } \" secret = cls . _get_secret () if secret : timestamp , sign = cls . _get_sign ( secret ) url = f \" { url } ×tamp= { timestamp } &sign= { sign } \" return url @classmethod def _get_sign ( cls , secret : str ): \"\"\"\u83b7\u53d6\u7b7e\u540d\u53d1\u9001\u7ed9\u9489\u9489\u673a\u5668\u4eba\"\"\" timestamp = str ( round ( time . time () * 1000 )) secret_enc = secret . encode ( \"utf-8\" ) string_to_sign = \" {} \\n {} \" . format ( timestamp , secret ) string_to_sign_enc = string_to_sign . encode ( \"utf-8\" ) hmac_code = hmac . new ( secret_enc , string_to_sign_enc , digestmod = hashlib . sha256 ) . digest () sign = urllib . parse . quote_plus ( base64 . b64encode ( hmac_code )) return timestamp , sign @classmethod def _send ( cls , msg ): \"\"\"\u53d1\u9001\u6d88\u606f\u5230\u9489\u9489\u673a\u5668\u4eba\"\"\" url = cls . _get_url () response = httpx . post ( url , json = msg , timeout = 30 ) if response . status_code != 200 : logger . error ( f \"failed to send message, content: { msg } , response from Dingtalk: { response . content . decode () } \" ) return rsp = json . loads ( response . content ) if rsp . get ( \"errcode\" ) != 0 : logger . error ( f \"failed to send message, content: { msg } , response from Dingtalk: { rsp } \" ) return response . content . decode () @classmethod async def _send_async ( cls , msg ): \"\"\"\u53d1\u9001\u6d88\u606f\u5230\u9489\u9489\u673a\u5668\u4eba\"\"\" url = cls . _get_url () async with httpx . AsyncClient () as client : r = await client . post ( url , json = msg , timeout = 30 ) if r . status_code != 200 : logger . error ( f \"failed to send message, content: { msg } , response from Dingtalk: { r . content . decode () } \" ) return rsp = json . loads ( r . content ) if rsp . get ( \"errcode\" ) != 0 : logger . error ( f \"failed to send message, content: { msg } , response from Dingtalk: { rsp } \" ) return r . content . decode () @classmethod @deprecated ( \"2.0.0\" , details = \"use function `ding` instead\" ) def text ( cls , content ): msg = { \"text\" : { \"content\" : content }, \"msgtype\" : \"text\" } return cls . _send ( msg )","title":"DingTalkMessage"},{"location":"api/omicron/#omicron.notify.dingtalk.DingTalkMessage.text","text":".. deprecated:: 2.0.0 use function ding instead Source code in omicron/notify/dingtalk.py @classmethod @deprecated ( \"2.0.0\" , details = \"use function `ding` instead\" ) def text ( cls , content ): msg = { \"text\" : { \"content\" : content }, \"msgtype\" : \"text\" } return cls . _send ( msg )","title":"text()"},{"location":"api/omicron/#omicron.notify.dingtalk.ding","text":"\u53d1\u9001\u6d88\u606f\u5230\u9489\u9489\u673a\u5668\u4eba \u652f\u6301\u53d1\u9001\u7eaf\u6587\u672c\u6d88\u606f\u548cmarkdown\u683c\u5f0f\u7684\u6587\u672c\u6d88\u606f\u3002\u5982\u679c\u8981\u53d1\u9001markdown\u683c\u5f0f\u7684\u6d88\u606f\uff0c\u8bf7\u901a\u8fc7\u5b57\u5178\u4f20\u5165\uff0c\u5fc5\u987b\u5305\u542b\u5305\u542b\"title\"\u548c\"text\"\u4e24\u4e2a\u5b57\u6bb5\u3002\u66f4\u8be6\u7ec6\u4fe1\u606f\uff0c\u8bf7\u89c1 \u9489\u9489\u5f00\u653e\u5e73\u53f0\u6587\u6863 Important \u5fc5\u987b\u5728\u5f02\u6b65\u7ebf\u7a0b(\u5373\u8fd0\u884casyncio loop\u7684\u7ebf\u7a0b\uff09\u4e2d\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5426\u5219\u4f1a\u629b\u51fa\u5f02\u5e38\u3002 \u6b64\u65b9\u6cd5\u8fd4\u56de\u4e00\u4e2aAwaitable\uff0c\u60a8\u53ef\u4ee5\u7b49\u5f85\u5b83\u5b8c\u6210\uff0c\u4e5f\u53ef\u4ee5\u5ffd\u7565\u8fd4\u56de\u503c\uff0c\u6b64\u65f6\u5b83\u5c06\u4f5c\u4e3a\u4e00\u4e2a\u540e\u53f0\u4efb\u52a1\u6267\u884c\uff0c\u4f46\u5b8c\u6210\u7684\u65f6\u95f4\u4e0d\u786e\u5b9a\u3002 Parameters: Name Type Description Default msg Union[str, dict] \u5f85\u53d1\u9001\u6d88\u606f\u3002 required Returns: Type Description Awaitable \u53d1\u9001\u6d88\u606f\u7684\u540e\u53f0\u4efb\u52a1\u3002\u60a8\u53ef\u4ee5\u4f7f\u7528\u6b64\u8fd4\u56de\u53e5\u67c4\u6765\u53d6\u6d88\u4efb\u52a1\u3002 Source code in omicron/notify/dingtalk.py def ding ( msg : Union [ str , dict ]) -> Awaitable : \"\"\"\u53d1\u9001\u6d88\u606f\u5230\u9489\u9489\u673a\u5668\u4eba \u652f\u6301\u53d1\u9001\u7eaf\u6587\u672c\u6d88\u606f\u548cmarkdown\u683c\u5f0f\u7684\u6587\u672c\u6d88\u606f\u3002\u5982\u679c\u8981\u53d1\u9001markdown\u683c\u5f0f\u7684\u6d88\u606f\uff0c\u8bf7\u901a\u8fc7\u5b57\u5178\u4f20\u5165\uff0c\u5fc5\u987b\u5305\u542b\u5305\u542b\"title\"\u548c\"text\"\u4e24\u4e2a\u5b57\u6bb5\u3002\u66f4\u8be6\u7ec6\u4fe1\u606f\uff0c\u8bf7\u89c1[\u9489\u9489\u5f00\u653e\u5e73\u53f0\u6587\u6863](https://open.dingtalk.com/document/orgapp-server/message-type) ???+ Important \u5fc5\u987b\u5728\u5f02\u6b65\u7ebf\u7a0b(\u5373\u8fd0\u884casyncio loop\u7684\u7ebf\u7a0b\uff09\u4e2d\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5426\u5219\u4f1a\u629b\u51fa\u5f02\u5e38\u3002 \u6b64\u65b9\u6cd5\u8fd4\u56de\u4e00\u4e2aAwaitable\uff0c\u60a8\u53ef\u4ee5\u7b49\u5f85\u5b83\u5b8c\u6210\uff0c\u4e5f\u53ef\u4ee5\u5ffd\u7565\u8fd4\u56de\u503c\uff0c\u6b64\u65f6\u5b83\u5c06\u4f5c\u4e3a\u4e00\u4e2a\u540e\u53f0\u4efb\u52a1\u6267\u884c\uff0c\u4f46\u5b8c\u6210\u7684\u65f6\u95f4\u4e0d\u786e\u5b9a\u3002 Args: msg: \u5f85\u53d1\u9001\u6d88\u606f\u3002 Returns: \u53d1\u9001\u6d88\u606f\u7684\u540e\u53f0\u4efb\u52a1\u3002\u60a8\u53ef\u4ee5\u4f7f\u7528\u6b64\u8fd4\u56de\u53e5\u67c4\u6765\u53d6\u6d88\u4efb\u52a1\u3002 \"\"\" if isinstance ( msg , str ): msg_ = { \"text\" : { \"content\" : msg }, \"msgtype\" : \"text\" } elif isinstance ( msg , dict ): msg_ = { \"msgtype\" : \"markdown\" , \"markdown\" : { \"title\" : msg [ \"title\" ], \"text\" : msg [ \"text\" ]}, } else : raise TypeError task = asyncio . create_task ( DingTalkMessage . _send_async ( msg_ )) return task","title":"ding()"},{"location":"api/omicron/#omicron.notify.mail","text":"","title":"mail"},{"location":"api/omicron/#omicron.notify.mail.compose","text":"\u7f16\u5199MIME\u90ae\u4ef6\u3002 Parameters: Name Type Description Default subject str \u90ae\u4ef6\u4e3b\u9898 required plain_txt str \u7eaf\u6587\u672c\u683c\u5f0f\u7684\u90ae\u4ef6\u5185\u5bb9 None html str html\u683c\u5f0f\u7684\u90ae\u4ef6\u5185\u5bb9. Defaults to None. None attachment str \u9644\u4ef6\u6587\u4ef6\u540d None Returns: Type Description EmailMessage MIME mail Source code in omicron/notify/mail.py def compose ( subject : str , plain_txt : str = None , html : str = None , attachment : str = None ) -> EmailMessage : \"\"\"\u7f16\u5199MIME\u90ae\u4ef6\u3002 Args: subject (str): \u90ae\u4ef6\u4e3b\u9898 plain_txt (str): \u7eaf\u6587\u672c\u683c\u5f0f\u7684\u90ae\u4ef6\u5185\u5bb9 html (str, optional): html\u683c\u5f0f\u7684\u90ae\u4ef6\u5185\u5bb9. Defaults to None. attachment (str, optional): \u9644\u4ef6\u6587\u4ef6\u540d Returns: MIME mail \"\"\" msg = EmailMessage () msg [ \"Subject\" ] = subject if html : msg . preamble = plain_txt or \"\" msg . set_content ( html , subtype = \"html\" ) else : assert plain_txt , \"Either plain_txt or html is required.\" msg . set_content ( plain_txt ) if attachment : ctype , encoding = mimetypes . guess_type ( attachment ) if ctype is None or encoding is not None : ctype = \"application/octet-stream\" maintype , subtype = ctype . split ( \"/\" , 1 ) with open ( attachment , \"rb\" ) as f : msg . add_attachment ( f . read (), maintype = maintype , subtype = subtype , filename = attachment ) return msg","title":"compose()"},{"location":"api/omicron/#omicron.notify.mail.mail_notify","text":"\u53d1\u9001\u90ae\u4ef6\u901a\u77e5\u3002 \u53d1\u9001\u8005\u3001\u63a5\u6536\u8005\u53ca\u90ae\u4ef6\u670d\u52a1\u5668\u7b49\u914d\u7f6e\u8bf7\u901a\u8fc7cfg4py\u914d\u7f6e\uff1a 1 2 3 4 5 notify: mail_from: aaron_yang@jieyu.ai mail_to: - code@jieyu.ai mail_server: smtp.ym.163.com \u9a8c\u8bc1\u5bc6\u7801\u8bf7\u901a\u8fc7\u73af\u5883\u53d8\u91cf MAIL_PASSWORD \u6765\u914d\u7f6e\u3002 subject/body\u4e0emsg\u5fc5\u987b\u63d0\u4f9b\u5176\u4e00\u3002 Important \u5fc5\u987b\u5728\u5f02\u6b65\u7ebf\u7a0b(\u5373\u8fd0\u884casyncio loop\u7684\u7ebf\u7a0b\uff09\u4e2d\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5426\u5219\u4f1a\u629b\u51fa\u5f02\u5e38\u3002 \u6b64\u65b9\u6cd5\u8fd4\u56de\u4e00\u4e2aAwaitable\uff0c\u60a8\u53ef\u4ee5\u7b49\u5f85\u5b83\u5b8c\u6210\uff0c\u4e5f\u53ef\u4ee5\u5ffd\u7565\u8fd4\u56de\u503c\uff0c\u6b64\u65f6\u5b83\u5c06\u4f5c\u4e3a\u4e00\u4e2a\u540e\u53f0\u4efb\u52a1\u6267\u884c\uff0c\u4f46\u5b8c\u6210\u7684\u65f6\u95f4\u4e0d\u786e\u5b9a\u3002 Parameters: Name Type Description Default msg EmailMessage [description]. Defaults to None. None subject str [description]. Defaults to None. None body str [description]. Defaults to None. None html bool body\u662f\u5426\u6309html\u683c\u5f0f\u5904\u7406\uff1f Defaults to False. False receivers List[str], Optional \u63a5\u6536\u8005\u4fe1\u606f\u3002\u5982\u679c\u4e0d\u63d0\u4f9b\uff0c\u5c06\u4f7f\u7528\u9884\u5148\u914d\u7f6e\u7684\u63a5\u6536\u8005\u4fe1\u606f\u3002 None Returns: Type Description Awaitable \u53d1\u9001\u6d88\u606f\u7684\u540e\u53f0\u4efb\u52a1\u3002\u60a8\u53ef\u4ee5\u4f7f\u7528\u6b64\u8fd4\u56de\u53e5\u67c4\u6765\u53d6\u6d88\u4efb\u52a1\u3002 Source code in omicron/notify/mail.py def mail_notify ( subject : str = None , body : str = None , msg : EmailMessage = None , html = False , receivers = None , ) -> Awaitable : \"\"\"\u53d1\u9001\u90ae\u4ef6\u901a\u77e5\u3002 \u53d1\u9001\u8005\u3001\u63a5\u6536\u8005\u53ca\u90ae\u4ef6\u670d\u52a1\u5668\u7b49\u914d\u7f6e\u8bf7\u901a\u8fc7cfg4py\u914d\u7f6e\uff1a ``` notify: mail_from: aaron_yang@jieyu.ai mail_to: - code@jieyu.ai mail_server: smtp.ym.163.com ``` \u9a8c\u8bc1\u5bc6\u7801\u8bf7\u901a\u8fc7\u73af\u5883\u53d8\u91cf`MAIL_PASSWORD`\u6765\u914d\u7f6e\u3002 subject/body\u4e0emsg\u5fc5\u987b\u63d0\u4f9b\u5176\u4e00\u3002 ???+ Important \u5fc5\u987b\u5728\u5f02\u6b65\u7ebf\u7a0b(\u5373\u8fd0\u884casyncio loop\u7684\u7ebf\u7a0b\uff09\u4e2d\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5426\u5219\u4f1a\u629b\u51fa\u5f02\u5e38\u3002 \u6b64\u65b9\u6cd5\u8fd4\u56de\u4e00\u4e2aAwaitable\uff0c\u60a8\u53ef\u4ee5\u7b49\u5f85\u5b83\u5b8c\u6210\uff0c\u4e5f\u53ef\u4ee5\u5ffd\u7565\u8fd4\u56de\u503c\uff0c\u6b64\u65f6\u5b83\u5c06\u4f5c\u4e3a\u4e00\u4e2a\u540e\u53f0\u4efb\u52a1\u6267\u884c\uff0c\u4f46\u5b8c\u6210\u7684\u65f6\u95f4\u4e0d\u786e\u5b9a\u3002 Args: msg (EmailMessage, optional): [description]. Defaults to None. subject (str, optional): [description]. Defaults to None. body (str, optional): [description]. Defaults to None. html (bool, optional): body\u662f\u5426\u6309html\u683c\u5f0f\u5904\u7406\uff1f Defaults to False. receivers (List[str], Optional): \u63a5\u6536\u8005\u4fe1\u606f\u3002\u5982\u679c\u4e0d\u63d0\u4f9b\uff0c\u5c06\u4f7f\u7528\u9884\u5148\u914d\u7f6e\u7684\u63a5\u6536\u8005\u4fe1\u606f\u3002 Returns: \u53d1\u9001\u6d88\u606f\u7684\u540e\u53f0\u4efb\u52a1\u3002\u60a8\u53ef\u4ee5\u4f7f\u7528\u6b64\u8fd4\u56de\u53e5\u67c4\u6765\u53d6\u6d88\u4efb\u52a1\u3002 \"\"\" if all ([ msg is not None , subject or body ]): raise TypeError ( \"msg\u53c2\u6570\u4e0esubject/body\u53ea\u80fd\u63d0\u4f9b\u5176\u4e2d\u4e4b\u4e00\" ) elif all ([ msg is None , subject is None , body is None ]): raise TypeError ( \"\u5fc5\u987b\u63d0\u4f9bmsg\u53c2\u6570\u6216\u8005subjecdt/body\u53c2\u6570\" ) if msg is None : if html : msg = compose ( subject , html = body ) else : msg = compose ( subject , plain_txt = body ) cfg = cfg4py . get_instance () if not receivers : receivers = cfg . notify . mail_to password = os . environ . get ( \"MAIL_PASSWORD\" ) return send_mail ( cfg . notify . mail_from , receivers , password , msg , host = cfg . notify . mail_server )","title":"mail_notify()"},{"location":"api/omicron/#omicron.notify.mail.send_mail","text":"\u53d1\u9001\u90ae\u4ef6\u901a\u77e5\u3002 \u5982\u679c\u53ea\u53d1\u9001\u7b80\u5355\u7684\u6587\u672c\u90ae\u4ef6\uff0c\u8bf7\u4f7f\u7528 send_mail(sender, receivers, subject=subject, plain=plain)\u3002\u5982\u679c\u8981\u53d1\u9001\u8f83\u590d\u6742\u7684\u5e26html\u548c\u9644\u4ef6\u7684\u90ae\u4ef6\uff0c\u8bf7\u5148\u8c03\u7528compose()\u751f\u6210\u4e00\u4e2aEmailMessage,\u7136\u540e\u518d\u8c03\u7528send_mail(sender, receivers, msg)\u6765\u53d1\u9001\u90ae\u4ef6\u3002 Important \u5fc5\u987b\u5728\u5f02\u6b65\u7ebf\u7a0b(\u5373\u8fd0\u884casyncio loop\u7684\u7ebf\u7a0b\uff09\u4e2d\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5426\u5219\u4f1a\u629b\u51fa\u5f02\u5e38\u3002 \u6b64\u65b9\u6cd5\u8fd4\u56de\u4e00\u4e2aAwaitable\uff0c\u60a8\u53ef\u4ee5\u7b49\u5f85\u5b83\u5b8c\u6210\uff0c\u4e5f\u53ef\u4ee5\u5ffd\u7565\u8fd4\u56de\u503c\uff0c\u6b64\u65f6\u5b83\u5c06\u4f5c\u4e3a\u4e00\u4e2a\u540e\u53f0\u4efb\u52a1\u6267\u884c\uff0c\u4f46\u5b8c\u6210\u7684\u65f6\u95f4\u4e0d\u786e\u5b9a\u3002 Parameters: Name Type Description Default sender str [description] required receivers List[str] [description] required msg EmailMessage [description]. Defaults to None. None host str [description]. Defaults to None. None port int [description]. Defaults to 25. 25 cc List[str] [description]. Defaults to None. None bcc List[str] [description]. Defaults to None. None subject str [description]. Defaults to None. None plain str [description]. Defaults to None. required username str the username used to logon to mail server. if not provided, then sender is used. None Returns: Type Description Awaitable \u53d1\u9001\u6d88\u606f\u7684\u540e\u53f0\u4efb\u52a1\u3002\u60a8\u53ef\u4ee5\u4f7f\u7528\u6b64\u8fd4\u56de\u53e5\u67c4\u6765\u53d6\u6d88\u4efb\u52a1\u3002 Source code in omicron/notify/mail.py @retry ( aiosmtplib . errors . SMTPConnectError , tries = 3 , backoff = 2 , delay = 30 , logger = logger ) def send_mail ( sender : str , receivers : List [ str ], password : str , msg : EmailMessage = None , host : str = None , port : int = 25 , cc : List [ str ] = None , bcc : List [ str ] = None , subject : str = None , body : str = None , username : str = None , ) -> Awaitable : \"\"\"\u53d1\u9001\u90ae\u4ef6\u901a\u77e5\u3002 \u5982\u679c\u53ea\u53d1\u9001\u7b80\u5355\u7684\u6587\u672c\u90ae\u4ef6\uff0c\u8bf7\u4f7f\u7528 send_mail(sender, receivers, subject=subject, plain=plain)\u3002\u5982\u679c\u8981\u53d1\u9001\u8f83\u590d\u6742\u7684\u5e26html\u548c\u9644\u4ef6\u7684\u90ae\u4ef6\uff0c\u8bf7\u5148\u8c03\u7528compose()\u751f\u6210\u4e00\u4e2aEmailMessage,\u7136\u540e\u518d\u8c03\u7528send_mail(sender, receivers, msg)\u6765\u53d1\u9001\u90ae\u4ef6\u3002 ???+ Important \u5fc5\u987b\u5728\u5f02\u6b65\u7ebf\u7a0b(\u5373\u8fd0\u884casyncio loop\u7684\u7ebf\u7a0b\uff09\u4e2d\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5426\u5219\u4f1a\u629b\u51fa\u5f02\u5e38\u3002 \u6b64\u65b9\u6cd5\u8fd4\u56de\u4e00\u4e2aAwaitable\uff0c\u60a8\u53ef\u4ee5\u7b49\u5f85\u5b83\u5b8c\u6210\uff0c\u4e5f\u53ef\u4ee5\u5ffd\u7565\u8fd4\u56de\u503c\uff0c\u6b64\u65f6\u5b83\u5c06\u4f5c\u4e3a\u4e00\u4e2a\u540e\u53f0\u4efb\u52a1\u6267\u884c\uff0c\u4f46\u5b8c\u6210\u7684\u65f6\u95f4\u4e0d\u786e\u5b9a\u3002 Args: sender (str): [description] receivers (List[str]): [description] msg (EmailMessage, optional): [description]. Defaults to None. host (str, optional): [description]. Defaults to None. port (int, optional): [description]. Defaults to 25. cc (List[str], optional): [description]. Defaults to None. bcc (List[str], optional): [description]. Defaults to None. subject (str, optional): [description]. Defaults to None. plain (str, optional): [description]. Defaults to None. username (str, optional): the username used to logon to mail server. if not provided, then `sender` is used. Returns: \u53d1\u9001\u6d88\u606f\u7684\u540e\u53f0\u4efb\u52a1\u3002\u60a8\u53ef\u4ee5\u4f7f\u7528\u6b64\u8fd4\u56de\u53e5\u67c4\u6765\u53d6\u6d88\u4efb\u52a1\u3002 \"\"\" if all ([ msg is not None , subject is not None or body is not None ]): raise TypeError ( \"msg\u53c2\u6570\u4e0esubject/body\u53ea\u80fd\u63d0\u4f9b\u5176\u4e2d\u4e4b\u4e00\" ) elif all ([ msg is None , subject is None , body is None ]): raise TypeError ( \"\u5fc5\u987b\u63d0\u4f9bmsg\u53c2\u6570\u6216\u8005subjecdt/body\u53c2\u6570\" ) msg = msg or EmailMessage () if isinstance ( receivers , str ): receivers = [ receivers ] msg [ \"From\" ] = sender msg [ \"To\" ] = \", \" . join ( receivers ) if subject : msg [ \"subject\" ] = subject if body : msg . set_content ( body ) if cc : msg [ \"Cc\" ] = \", \" . join ( cc ) if bcc : msg [ \"Bcc\" ] = \", \" . join ( bcc ) username = username or sender if host is None : host = sender . split ( \"@\" )[ - 1 ] task = asyncio . create_task ( aiosmtplib . send ( msg , hostname = host , port = port , username = sender , password = password ) ) return task","title":"send_mail()"},{"location":"api/omicron/#backtesting-log-facility","text":"Info Since 2.0.0.a76 \u56de\u6d4b\u65f6\uff0c\u6253\u5370\u65f6\u95f4\u4e00\u822c\u8981\u6c42\u4e3a\u56de\u6d4b\u5f53\u65f6\u7684\u65f6\u95f4\uff0c\u800c\u975e\u7cfb\u7edf\u65f6\u95f4\u3002\u8fd9\u4e2a\u6a21\u5757\u63d0\u4f9b\u4e86\u6539\u5199\u65e5\u5fd7\u65f6\u95f4\u7684\u529f\u80fd\u3002 \u4f7f\u7528\u65b9\u6cd5\uff1a 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 from omicron.core.backtestlog import BacktestLogger logger = BacktestLogger . getLogger ( __name__ ) logger . setLevel ( logging . INFO ) handler = logging . StreamHandler () # \u901a\u8fc7bt_date\u57df\u6765\u8bbe\u7f6e\u65e5\u671f\uff0c\u800c\u4e0d\u662fasctime handler . setFormatter ( Formatter ( \" %(bt_date)s %(message)s \" )) logging . basicConfig ( level = logging . INFO , handlers = [ handler ]) # \u8c03\u7528\u65f6\u4e0e\u666e\u901a\u65e5\u5fd7\u4e00\u6837\uff0c\u4f46\u8981\u589e\u52a0\u4e00\u4e2adate\u53c2\u6570 logger . info ( \"this is info\" , date = datetime . date ( 2022 , 3 , 1 )) \u4e0a\u8ff0\u4ee3\u7801\u5c06\u8f93\u51fa\uff1a 1 2022-03-01 this is info \u4f7f\u7528\u672c\u65e5\u5fd7\u7684\u6838\u5fc3\u662f\u4e0a\u8ff0\u4ee3\u7801\u4e2d\u7684\u7b2c3\u884c\u548c\u7b2c9\u884c\uff0c\u6700\u540e\uff0c\u5728\u8f93\u51fa\u65e5\u5fd7\u65f6\u52a0\u4e0a date=... \uff0c\u5982\u7b2c15\u884c\u6240\u793a\u3002 \u6ce8\u610f\u5728\u7b2c9\u884c\uff0c\u901a\u5e38\u662f logging.getLogger(__nam__) \uff0c\u800c\u8fd9\u91cc\u662f BacktestLogger.getLogger(__name__) \u5982\u679c\u4e0a\u8ff0\u8c03\u7528\u4e2d\u6ca1\u6709\u4f20\u5165 date \uff0c\u5219\u5c06\u4f7f\u7528\u8c03\u7528\u65f6\u95f4\uff0c\u6b64\u65f6\u884c\u4e3a\u8ddf\u539f\u65e5\u5fd7\u7cfb\u7edf\u4e00\u81f4\u3002 Warning \u5f53\u8c03\u7528logger.exception\u65f6\uff0c\u4e0d\u80fd\u4f20\u5165date\u53c2\u6570\u3002","title":"Backtesting Log Facility"},{"location":"api/omicron/#omicron.core.backtestlog--\u914d\u7f6e\u6587\u4ef6\u793a\u4f8b","text":"\u5982\u679c\u8981\u901a\u8fc7\u914d\u7f6e\u6587\u4ef6\u6765\u914d\u7f6e\uff0c\u53ef\u4f7f\u7528\u4ee5\u4e0b\u793a\u4f8b\uff1a 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 formatters : backtest : format : '%(bt_date)s | %(message)s' handlers : backtest : class : logging.StreamHandler formatter : backtest omicron.base.strategy : level : INFO handlers : [ backtest ] propagate : false loggers : omicron.base.strategy : level : INFO handlers : [ backtest ] propagate : false","title":"\u914d\u7f6e\u6587\u4ef6\u793a\u4f8b"},{"location":"api/security/","text":"Query \u00b6 \u8bc1\u5238\u4fe1\u606f\u67e5\u8be2\u5bf9\u8c61 \u8bc1\u5238\u4fe1\u606f\u67e5\u8be2\u5bf9\u8c61\uff0c\u7531 Security.select() \u65b9\u6cd5\u751f\u6210\uff0c\u652f\u6301\u94fe\u5f0f\u67e5\u8be2\u3002\u901a\u8fc7 eval \u51fd\u6570\u7ed3\u675f\u94fe\u5f0f\u8c03\u7528\u5e76\u751f\u6210\u67e5\u8be2\u7ed3\u679c\u3002 Source code in omicron/models/security.py class Query : \"\"\"\u8bc1\u5238\u4fe1\u606f\u67e5\u8be2\u5bf9\u8c61 \u8bc1\u5238\u4fe1\u606f\u67e5\u8be2\u5bf9\u8c61\uff0c\u7531`Security.select()`\u65b9\u6cd5\u751f\u6210\uff0c\u652f\u6301\u94fe\u5f0f\u67e5\u8be2\u3002\u901a\u8fc7`eval`\u51fd\u6570\u7ed3\u675f\u94fe\u5f0f\u8c03\u7528\u5e76\u751f\u6210\u67e5\u8be2\u7ed3\u679c\u3002 \"\"\" def __init__ ( self , target_date : datetime . date = None ): if target_date is None : # \u805a\u5bbd\u4e0d\u4e00\u5b9a\u4f1a\u53ca\u65f6\u66f4\u65b0\u6570\u636e\uff0c\u56e0\u6b64db\u4e2d\u4e0d\u5b58\u653e\u5f53\u5929\u7684\u6570\u636e\uff0c\u5982\u679c\u4f20\u7a7a\uff0c\u67e5cache self . target_date = None else : # \u5982\u679c\u662f\u4ea4\u6613\u65e5\uff0c\u53d6\u5f53\u5929\uff0c\u5426\u5219\u53d6\u524d\u4e00\u5929 self . target_date = tf . day_shift ( target_date , 0 ) # \u540d\u5b57\uff0c\u663e\u793a\u540d\uff0c\u7c7b\u578b\u8fc7\u6ee4\u5668 self . _name_pattern = None # \u5b57\u6bcd\u540d\u5b57 self . _alias_pattern = None # \u663e\u793a\u540d self . _type_pattern = None # \u4e0d\u6307\u5b9a\u5219\u9ed8\u8ba4\u4e3a\u5168\u90e8\uff0c\u5982\u679c\u4f20\u5165\u7a7a\u503c\u5219\u53ea\u9009\u62e9\u80a1\u7968\u548c\u6307\u6570 # \u5f00\u5173\u9009\u9879 self . _exclude_kcb = False # \u79d1\u521b\u677f self . _exclude_cyb = False # \u521b\u4e1a\u677f self . _exclude_st = False # ST self . _include_exit = False # \u662f\u5426\u5305\u542b\u5df2\u9000\u5e02\u8bc1\u5238(\u9ed8\u8ba4\u4e0d\u5305\u62ec\u5f53\u5929\u9000\u5e02\u7684) # \u4e0b\u5217\u5f00\u5173\u4f18\u5148\u7ea7\u9ad8\u4e8e\u4e0a\u9762\u7684 self . _only_kcb = False self . _only_cyb = False self . _only_st = False def only_cyb ( self ) -> \"Query\" : \"\"\"\u8fd4\u56de\u7ed3\u679c\u4e2d\u53ea\u5305\u542b\u521b\u4e1a\u677f\u80a1\u7968\"\"\" self . _only_cyb = True # \u9ad8\u4f18\u5148\u7ea7 self . _exclude_cyb = False self . _only_kcb = False self . _only_st = False return self def only_st ( self ) -> \"Query\" : \"\"\"\u8fd4\u56de\u7ed3\u679c\u4e2d\u53ea\u5305\u542bST\u7c7b\u578b\u7684\u8bc1\u5238\"\"\" self . _only_st = True # \u9ad8\u4f18\u5148\u7ea7 self . _exclude_st = False self . _only_kcb = False self . _only_cyb = False return self def only_kcb ( self ) -> \"Query\" : \"\"\"\u8fd4\u56de\u7ed3\u679c\u4e2d\u53ea\u5305\u542b\u79d1\u521b\u677f\u80a1\u7968\"\"\" self . _only_kcb = True # \u9ad8\u4f18\u5148\u7ea7 self . _exclude_kcb = False self . _only_cyb = False self . _only_st = False return self def exclude_st ( self ) -> \"Query\" : \"\"\"\u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u6392\u9664ST\u7c7b\u578b\u7684\u80a1\u7968\"\"\" self . _exclude_st = True self . _only_st = False return self def exclude_cyb ( self ) -> \"Query\" : \"\"\"\u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u6392\u9664\u521b\u4e1a\u677f\u7c7b\u578b\u7684\u80a1\u7968\"\"\" self . _exclude_cyb = True self . _only_cyb = False return self def exclude_kcb ( self ) -> \"Query\" : \"\"\"\u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u6392\u9664\u79d1\u521b\u677f\u7c7b\u578b\u7684\u80a1\u7968\"\"\" self . _exclude_kcb = True self . _only_kcb = False return self def include_exit ( self ) -> \"Query\" : \"\"\"\u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u5305\u542b\u5df2\u9000\u5e02\u7684\u8bc1\u5238\"\"\" self . _include_exit = True return self def types ( self , types : List [ str ]) -> \"Query\" : \"\"\"\u9009\u62e9\u7c7b\u578b\u5728`types`\u4e2d\u7684\u8bc1\u5238\u54c1\u79cd \u5982\u679c\u4e0d\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u9ed8\u8ba4\u9009\u62e9\u6240\u6709\u80a1\u7968\u7c7b\u578b\u3002 \u5982\u679c\u8c03\u7528\u6b64\u65b9\u6cd5\u4f46\u4e0d\u4f20\u5165\u53c2\u6570\uff0c\u9ed8\u8ba4\u9009\u62e9\u6307\u6570+\u80a1\u7968 Args: types: \u6709\u6548\u7684\u7c7b\u578b\u5305\u62ec: \u5bf9\u80a1\u7968\u6307\u6570\u800c\u8a00\u662f\uff08'index', 'stock'\uff09\uff0c\u5bf9\u57fa\u91d1\u800c\u8a00\u5219\u662f\uff08'etf', 'fjb', 'mmf', 'reits', 'fja', 'fjm', 'lof'\uff09 \"\"\" if types is None or isinstance ( types , List ) is False : return self if len ( types ) == 0 : self . _type_pattern = [ \"index\" , \"stock\" ] else : tmp = set ( types ) self . _type_pattern = list ( tmp ) return self def name_like ( self , name : str ) -> \"Query\" : \"\"\"\u67e5\u627e\u80a1\u7968/\u8bc1\u5238\u540d\u79f0\u4e2d\u51fa\u73b0`name`\u7684\u54c1\u79cd \u6ce8\u610f\u8fd9\u91cc\u7684\u8bc1\u5238\u540d\u79f0\u5e76\u4e0d\u662f\u5176\u663e\u793a\u540d\u3002\u6bd4\u5982\u5bf9\u4e2d\u56fd\u5e73\u5b89000001.XSHE\u6765\u8bf4\uff0c\u5b83\u7684\u540d\u79f0\u662fZGPA\uff0c\u800c\u4e0d\u662f\u201c\u4e2d\u56fd\u5e73\u5b89\u201d\u3002 Args: name: \u5f85\u67e5\u627e\u7684\u540d\u5b57\uff0c\u6bd4\u5982\"ZGPA\" \"\"\" if name is None or len ( name ) == 0 : self . _name_pattern = None else : self . _name_pattern = name return self def alias_like ( self , display_name : str ) -> \"Query\" : \"\"\"\u67e5\u627e\u80a1\u7968/\u8bc1\u5238\u663e\u793a\u540d\u4e2d\u51fa\u73b0`display_name\u7684\u54c1\u79cd Args: display_name: \u663e\u793a\u540d\uff0c\u6bd4\u5982\u201c\u4e2d\u56fd\u5e73\u5b89\" \"\"\" if display_name is None or len ( display_name ) == 0 : self . _alias_pattern = None else : self . _alias_pattern = display_name return self async def eval ( self ) -> List [ str ]: \"\"\"\u5bf9\u67e5\u8be2\u7ed3\u679c\u8fdb\u884c\u6c42\u503c\uff0c\u8fd4\u56decode\u5217\u8868 Returns: \u4ee3\u7801\u5217\u8868 \"\"\" logger . debug ( \"eval, date: %s \" , self . target_date ) logger . debug ( \"eval, names and types: %s , %s , %s \" , self . _name_pattern , self . _alias_pattern , self . _type_pattern , ) logger . debug ( \"eval, exclude and include: %s , %s , %s , %s \" , self . _exclude_cyb , self . _exclude_st , self . _exclude_kcb , self . _include_exit , ) logger . debug ( \"eval, only: %s , %s , %s \" , self . _only_cyb , self . _only_st , self . _only_kcb ) date_in_cache = await cache . security . get ( \"security:latest_date\" ) if date_in_cache : # \u65e0\u6b64\u6570\u636e\u8bf4\u660eomega\u6709\u67d0\u4e9b\u95ee\u9898\uff0c\u4e0d\u5904\u7406 _date = arrow . get ( date_in_cache ) . date () else : now = datetime . datetime . now () _date = tf . day_shift ( now , 0 ) # \u786e\u5b9a\u6570\u636e\u6e90\uff0ccache\u4e3a\u5f53\u59298\u70b9\u4e4b\u540e\u83b7\u53d6\u7684\u6570\u636e\uff0c\u6570\u636e\u5e93\u5b58\u653e\u524d\u4e00\u65e5\u548c\u66f4\u65e9\u7684\u6570\u636e if not self . target_date or self . target_date >= _date : self . target_date = _date records = None if self . target_date == _date : # \u4ece\u5185\u5b58\u4e2d\u67e5\u627e\uff0c\u5982\u679c\u7f13\u5b58\u4e2d\u7684\u6570\u636e\u5df2\u66f4\u65b0\uff0c\u91cd\u65b0\u52a0\u8f7d\u5230\u5185\u5b58 secs = await cache . security . lrange ( \"security:all\" , 0 , - 1 ) if len ( secs ) != 0 : # using np.datetime64[s] records = np . array ( [ tuple ( x . split ( \",\" )) for x in secs ], dtype = security_info_dtype ) else : records = await Security . load_securities_from_db ( self . target_date ) if records is None : return None results = [] self . _type_pattern = self . _type_pattern or SecurityType . STOCK . value for record in records : if self . _type_pattern is not None : if record [ \"type\" ] not in self . _type_pattern : continue if self . _name_pattern is not None : if record [ \"name\" ] . find ( self . _name_pattern ) == - 1 : continue if self . _alias_pattern is not None : if record [ \"alias\" ] . find ( self . _alias_pattern ) == - 1 : continue # \u521b\u4e1a\u677f\uff0c\u79d1\u521b\u677f\uff0cST\u6682\u65f6\u9650\u5b9a\u4e3a\u80a1\u7968\u7c7b\u578b if self . _only_cyb : if record [ \"type\" ] != SecurityType . STOCK . value or not ( record [ \"code\" ][: 3 ] in ( \"300\" , \"301\" ) ): continue if self . _only_kcb : if ( record [ \"type\" ] != SecurityType . STOCK . value or record [ \"code\" ] . startswith ( \"688\" ) is False ): continue if self . _only_st : if ( record [ \"type\" ] != SecurityType . STOCK . value or record [ \"alias\" ] . find ( \"ST\" ) == - 1 ): continue if self . _exclude_cyb : if record [ \"type\" ] == SecurityType . STOCK . value and record [ \"code\" ][ : 3 ] in ( \"300\" , \"301\" ): continue if self . _exclude_st : if ( record [ \"type\" ] == SecurityType . STOCK . value and record [ \"alias\" ] . find ( \"ST\" ) != - 1 ): continue if self . _exclude_kcb : if record [ \"type\" ] == SecurityType . STOCK . value and record [ \"code\" ] . startswith ( \"688\" ): continue # \u9000\u5e02\u6682\u4e0d\u9650\u5b9a\u662f\u5426\u4e3a\u80a1\u7968 if self . _include_exit is False : d1 = convert_nptime_to_datetime ( record [ \"end\" ]) . date () if d1 < self . target_date : continue results . append ( record [ \"code\" ]) # \u8fd4\u56de\u6240\u6709\u67e5\u8be2\u5230\u7684\u7ed3\u679c return results alias_like ( self , display_name ) \u00b6 \u67e5\u627e\u80a1\u7968/\u8bc1\u5238\u663e\u793a\u540d\u4e2d\u51fa\u73b0`display_name\u7684\u54c1\u79cd Parameters: Name Type Description Default display_name str \u663e\u793a\u540d\uff0c\u6bd4\u5982\u201c\u4e2d\u56fd\u5e73\u5b89\" required Source code in omicron/models/security.py def alias_like ( self , display_name : str ) -> \"Query\" : \"\"\"\u67e5\u627e\u80a1\u7968/\u8bc1\u5238\u663e\u793a\u540d\u4e2d\u51fa\u73b0`display_name\u7684\u54c1\u79cd Args: display_name: \u663e\u793a\u540d\uff0c\u6bd4\u5982\u201c\u4e2d\u56fd\u5e73\u5b89\" \"\"\" if display_name is None or len ( display_name ) == 0 : self . _alias_pattern = None else : self . _alias_pattern = display_name return self eval ( self ) async \u00b6 \u5bf9\u67e5\u8be2\u7ed3\u679c\u8fdb\u884c\u6c42\u503c\uff0c\u8fd4\u56decode\u5217\u8868 Returns: Type Description List[str] \u4ee3\u7801\u5217\u8868 Source code in omicron/models/security.py async def eval ( self ) -> List [ str ]: \"\"\"\u5bf9\u67e5\u8be2\u7ed3\u679c\u8fdb\u884c\u6c42\u503c\uff0c\u8fd4\u56decode\u5217\u8868 Returns: \u4ee3\u7801\u5217\u8868 \"\"\" logger . debug ( \"eval, date: %s \" , self . target_date ) logger . debug ( \"eval, names and types: %s , %s , %s \" , self . _name_pattern , self . _alias_pattern , self . _type_pattern , ) logger . debug ( \"eval, exclude and include: %s , %s , %s , %s \" , self . _exclude_cyb , self . _exclude_st , self . _exclude_kcb , self . _include_exit , ) logger . debug ( \"eval, only: %s , %s , %s \" , self . _only_cyb , self . _only_st , self . _only_kcb ) date_in_cache = await cache . security . get ( \"security:latest_date\" ) if date_in_cache : # \u65e0\u6b64\u6570\u636e\u8bf4\u660eomega\u6709\u67d0\u4e9b\u95ee\u9898\uff0c\u4e0d\u5904\u7406 _date = arrow . get ( date_in_cache ) . date () else : now = datetime . datetime . now () _date = tf . day_shift ( now , 0 ) # \u786e\u5b9a\u6570\u636e\u6e90\uff0ccache\u4e3a\u5f53\u59298\u70b9\u4e4b\u540e\u83b7\u53d6\u7684\u6570\u636e\uff0c\u6570\u636e\u5e93\u5b58\u653e\u524d\u4e00\u65e5\u548c\u66f4\u65e9\u7684\u6570\u636e if not self . target_date or self . target_date >= _date : self . target_date = _date records = None if self . target_date == _date : # \u4ece\u5185\u5b58\u4e2d\u67e5\u627e\uff0c\u5982\u679c\u7f13\u5b58\u4e2d\u7684\u6570\u636e\u5df2\u66f4\u65b0\uff0c\u91cd\u65b0\u52a0\u8f7d\u5230\u5185\u5b58 secs = await cache . security . lrange ( \"security:all\" , 0 , - 1 ) if len ( secs ) != 0 : # using np.datetime64[s] records = np . array ( [ tuple ( x . split ( \",\" )) for x in secs ], dtype = security_info_dtype ) else : records = await Security . load_securities_from_db ( self . target_date ) if records is None : return None results = [] self . _type_pattern = self . _type_pattern or SecurityType . STOCK . value for record in records : if self . _type_pattern is not None : if record [ \"type\" ] not in self . _type_pattern : continue if self . _name_pattern is not None : if record [ \"name\" ] . find ( self . _name_pattern ) == - 1 : continue if self . _alias_pattern is not None : if record [ \"alias\" ] . find ( self . _alias_pattern ) == - 1 : continue # \u521b\u4e1a\u677f\uff0c\u79d1\u521b\u677f\uff0cST\u6682\u65f6\u9650\u5b9a\u4e3a\u80a1\u7968\u7c7b\u578b if self . _only_cyb : if record [ \"type\" ] != SecurityType . STOCK . value or not ( record [ \"code\" ][: 3 ] in ( \"300\" , \"301\" ) ): continue if self . _only_kcb : if ( record [ \"type\" ] != SecurityType . STOCK . value or record [ \"code\" ] . startswith ( \"688\" ) is False ): continue if self . _only_st : if ( record [ \"type\" ] != SecurityType . STOCK . value or record [ \"alias\" ] . find ( \"ST\" ) == - 1 ): continue if self . _exclude_cyb : if record [ \"type\" ] == SecurityType . STOCK . value and record [ \"code\" ][ : 3 ] in ( \"300\" , \"301\" ): continue if self . _exclude_st : if ( record [ \"type\" ] == SecurityType . STOCK . value and record [ \"alias\" ] . find ( \"ST\" ) != - 1 ): continue if self . _exclude_kcb : if record [ \"type\" ] == SecurityType . STOCK . value and record [ \"code\" ] . startswith ( \"688\" ): continue # \u9000\u5e02\u6682\u4e0d\u9650\u5b9a\u662f\u5426\u4e3a\u80a1\u7968 if self . _include_exit is False : d1 = convert_nptime_to_datetime ( record [ \"end\" ]) . date () if d1 < self . target_date : continue results . append ( record [ \"code\" ]) # \u8fd4\u56de\u6240\u6709\u67e5\u8be2\u5230\u7684\u7ed3\u679c return results exclude_cyb ( self ) \u00b6 \u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u6392\u9664\u521b\u4e1a\u677f\u7c7b\u578b\u7684\u80a1\u7968 Source code in omicron/models/security.py def exclude_cyb ( self ) -> \"Query\" : \"\"\"\u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u6392\u9664\u521b\u4e1a\u677f\u7c7b\u578b\u7684\u80a1\u7968\"\"\" self . _exclude_cyb = True self . _only_cyb = False return self exclude_kcb ( self ) \u00b6 \u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u6392\u9664\u79d1\u521b\u677f\u7c7b\u578b\u7684\u80a1\u7968 Source code in omicron/models/security.py def exclude_kcb ( self ) -> \"Query\" : \"\"\"\u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u6392\u9664\u79d1\u521b\u677f\u7c7b\u578b\u7684\u80a1\u7968\"\"\" self . _exclude_kcb = True self . _only_kcb = False return self exclude_st ( self ) \u00b6 \u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u6392\u9664ST\u7c7b\u578b\u7684\u80a1\u7968 Source code in omicron/models/security.py def exclude_st ( self ) -> \"Query\" : \"\"\"\u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u6392\u9664ST\u7c7b\u578b\u7684\u80a1\u7968\"\"\" self . _exclude_st = True self . _only_st = False return self include_exit ( self ) \u00b6 \u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u5305\u542b\u5df2\u9000\u5e02\u7684\u8bc1\u5238 Source code in omicron/models/security.py def include_exit ( self ) -> \"Query\" : \"\"\"\u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u5305\u542b\u5df2\u9000\u5e02\u7684\u8bc1\u5238\"\"\" self . _include_exit = True return self name_like ( self , name ) \u00b6 \u67e5\u627e\u80a1\u7968/\u8bc1\u5238\u540d\u79f0\u4e2d\u51fa\u73b0 name \u7684\u54c1\u79cd \u6ce8\u610f\u8fd9\u91cc\u7684\u8bc1\u5238\u540d\u79f0\u5e76\u4e0d\u662f\u5176\u663e\u793a\u540d\u3002\u6bd4\u5982\u5bf9\u4e2d\u56fd\u5e73\u5b89000001.XSHE\u6765\u8bf4\uff0c\u5b83\u7684\u540d\u79f0\u662fZGPA\uff0c\u800c\u4e0d\u662f\u201c\u4e2d\u56fd\u5e73\u5b89\u201d\u3002 Parameters: Name Type Description Default name str \u5f85\u67e5\u627e\u7684\u540d\u5b57\uff0c\u6bd4\u5982\"ZGPA\" required Source code in omicron/models/security.py def name_like ( self , name : str ) -> \"Query\" : \"\"\"\u67e5\u627e\u80a1\u7968/\u8bc1\u5238\u540d\u79f0\u4e2d\u51fa\u73b0`name`\u7684\u54c1\u79cd \u6ce8\u610f\u8fd9\u91cc\u7684\u8bc1\u5238\u540d\u79f0\u5e76\u4e0d\u662f\u5176\u663e\u793a\u540d\u3002\u6bd4\u5982\u5bf9\u4e2d\u56fd\u5e73\u5b89000001.XSHE\u6765\u8bf4\uff0c\u5b83\u7684\u540d\u79f0\u662fZGPA\uff0c\u800c\u4e0d\u662f\u201c\u4e2d\u56fd\u5e73\u5b89\u201d\u3002 Args: name: \u5f85\u67e5\u627e\u7684\u540d\u5b57\uff0c\u6bd4\u5982\"ZGPA\" \"\"\" if name is None or len ( name ) == 0 : self . _name_pattern = None else : self . _name_pattern = name return self only_cyb ( self ) \u00b6 \u8fd4\u56de\u7ed3\u679c\u4e2d\u53ea\u5305\u542b\u521b\u4e1a\u677f\u80a1\u7968 Source code in omicron/models/security.py def only_cyb ( self ) -> \"Query\" : \"\"\"\u8fd4\u56de\u7ed3\u679c\u4e2d\u53ea\u5305\u542b\u521b\u4e1a\u677f\u80a1\u7968\"\"\" self . _only_cyb = True # \u9ad8\u4f18\u5148\u7ea7 self . _exclude_cyb = False self . _only_kcb = False self . _only_st = False return self only_kcb ( self ) \u00b6 \u8fd4\u56de\u7ed3\u679c\u4e2d\u53ea\u5305\u542b\u79d1\u521b\u677f\u80a1\u7968 Source code in omicron/models/security.py def only_kcb ( self ) -> \"Query\" : \"\"\"\u8fd4\u56de\u7ed3\u679c\u4e2d\u53ea\u5305\u542b\u79d1\u521b\u677f\u80a1\u7968\"\"\" self . _only_kcb = True # \u9ad8\u4f18\u5148\u7ea7 self . _exclude_kcb = False self . _only_cyb = False self . _only_st = False return self only_st ( self ) \u00b6 \u8fd4\u56de\u7ed3\u679c\u4e2d\u53ea\u5305\u542bST\u7c7b\u578b\u7684\u8bc1\u5238 Source code in omicron/models/security.py def only_st ( self ) -> \"Query\" : \"\"\"\u8fd4\u56de\u7ed3\u679c\u4e2d\u53ea\u5305\u542bST\u7c7b\u578b\u7684\u8bc1\u5238\"\"\" self . _only_st = True # \u9ad8\u4f18\u5148\u7ea7 self . _exclude_st = False self . _only_kcb = False self . _only_cyb = False return self types ( self , types ) \u00b6 \u9009\u62e9\u7c7b\u578b\u5728 types \u4e2d\u7684\u8bc1\u5238\u54c1\u79cd \u5982\u679c\u4e0d\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u9ed8\u8ba4\u9009\u62e9\u6240\u6709\u80a1\u7968\u7c7b\u578b\u3002 \u5982\u679c\u8c03\u7528\u6b64\u65b9\u6cd5\u4f46\u4e0d\u4f20\u5165\u53c2\u6570\uff0c\u9ed8\u8ba4\u9009\u62e9\u6307\u6570+\u80a1\u7968 Parameters: Name Type Description Default types List[str] \u6709\u6548\u7684\u7c7b\u578b\u5305\u62ec: \u5bf9\u80a1\u7968\u6307\u6570\u800c\u8a00\u662f\uff08'index', 'stock'\uff09\uff0c\u5bf9\u57fa\u91d1\u800c\u8a00\u5219\u662f\uff08'etf', 'fjb', 'mmf', 'reits', 'fja', 'fjm', 'lof'\uff09 required Source code in omicron/models/security.py def types ( self , types : List [ str ]) -> \"Query\" : \"\"\"\u9009\u62e9\u7c7b\u578b\u5728`types`\u4e2d\u7684\u8bc1\u5238\u54c1\u79cd \u5982\u679c\u4e0d\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u9ed8\u8ba4\u9009\u62e9\u6240\u6709\u80a1\u7968\u7c7b\u578b\u3002 \u5982\u679c\u8c03\u7528\u6b64\u65b9\u6cd5\u4f46\u4e0d\u4f20\u5165\u53c2\u6570\uff0c\u9ed8\u8ba4\u9009\u62e9\u6307\u6570+\u80a1\u7968 Args: types: \u6709\u6548\u7684\u7c7b\u578b\u5305\u62ec: \u5bf9\u80a1\u7968\u6307\u6570\u800c\u8a00\u662f\uff08'index', 'stock'\uff09\uff0c\u5bf9\u57fa\u91d1\u800c\u8a00\u5219\u662f\uff08'etf', 'fjb', 'mmf', 'reits', 'fja', 'fjm', 'lof'\uff09 \"\"\" if types is None or isinstance ( types , List ) is False : return self if len ( types ) == 0 : self . _type_pattern = [ \"index\" , \"stock\" ] else : tmp = set ( types ) self . _type_pattern = list ( tmp ) return self Security \u00b6 Source code in omicron/models/security.py class Security : _securities = [] _securities_date = None _security_types = set () _stocks = [] @classmethod async def init ( cls ): \"\"\"\u521d\u59cb\u5316Security. \u4e00\u822c\u800c\u8a00\uff0comicron\u7684\u4f7f\u7528\u8005\u65e0\u987b\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5b83\u4f1a\u5728omicron\u521d\u59cb\u5316\uff08\u901a\u8fc7`omicron.init`\uff09\u65f6\uff0c\u88ab\u81ea\u52a8\u8c03\u7528\u3002 Raises: DataNotReadyError: \u5982\u679comicron\u672a\u521d\u59cb\u5316\uff0c\u6216\u8005cache\u4e2d\u672a\u52a0\u8f7d\u6700\u65b0\u8bc1\u5238\u5217\u8868\uff0c\u5219\u629b\u51fa\u6b64\u5f02\u5e38\u3002 \"\"\" # read all securities from redis, 7111 records now # {'index', 'stock'} # {'fjb', 'mmf', 'reits', 'fja', 'fjm'} # {'etf', 'lof'} if len ( cls . _securities ) > 100 : return True secs = await cls . load_securities () if secs is None or len ( secs ) == 0 : # pragma: no cover raise DataNotReadyError ( \"No securities in cache, make sure you have called omicron.init() first.\" ) print ( \"init securities done\" ) return True @classmethod async def load_securities ( cls ): \"\"\"\u52a0\u8f7d\u6240\u6709\u8bc1\u5238\u7684\u4fe1\u606f\uff0c\u5e76\u7f13\u5b58\u5230\u5185\u5b58\u4e2d \u4e00\u822c\u800c\u8a00\uff0comicron\u7684\u4f7f\u7528\u8005\u65e0\u987b\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5b83\u4f1a\u5728omicron\u521d\u59cb\u5316\uff08\u901a\u8fc7`omicron.init`\uff09\u65f6\uff0c\u88ab\u81ea\u52a8\u8c03\u7528\u3002 \"\"\" secs = await cache . security . lrange ( \"security:all\" , 0 , - 1 ) if len ( secs ) != 0 : # using np.datetime64[s] _securities = np . array ( [ tuple ( x . split ( \",\" )) for x in secs ], dtype = security_info_dtype ) # \u66f4\u65b0\u8bc1\u5238\u7c7b\u578b\u5217\u8868 cls . _securities = _securities cls . _security_types = set ( _securities [ \"type\" ]) cls . _stocks = _securities [ ( _securities [ \"type\" ] == \"stock\" ) | ( _securities [ \"type\" ] == \"index\" ) ] logger . info ( \" %d securities loaded, types: %s \" , len ( _securities ), cls . _security_types ) date_in_cache = await cache . security . get ( \"security:latest_date\" ) if date_in_cache is not None : cls . _securities_date = arrow . get ( date_in_cache ) . date () else : cls . _securities_date = datetime . date . today () return _securities else : # pragma: no cover return None @classmethod async def get_security_types ( cls ): if cls . _security_types : return list ( cls . _security_types ) else : return None @classmethod def get_stock ( cls , code ) -> NDArray [ security_info_dtype ]: \"\"\"\u6839\u636e`code`\u6765\u67e5\u627e\u5bf9\u5e94\u7684\u80a1\u7968\uff08\u542b\u6307\u6570\uff09\u5bf9\u8c61\u4fe1\u606f\u3002 \u5982\u679c\u60a8\u53ea\u6709\u80a1\u7968\u4ee3\u7801\uff0c\u60f3\u77e5\u9053\u8be5\u4ee3\u7801\u5bf9\u5e94\u7684\u80a1\u7968\u540d\u79f0\u3001\u522b\u540d\uff08\u663e\u793a\u540d\uff09\u3001\u4e0a\u5e02\u65e5\u671f\u7b49\u4fe1\u606f\uff0c\u5c31\u53ef\u4ee5\u4f7f\u7528\u6b64\u65b9\u6cd5\u6765\u83b7\u53d6\u76f8\u5173\u4fe1\u606f\u3002 \u8fd4\u56de\u7c7b\u578b\u4e3a`security_info_dtype`\u7684numpy\u6570\u7ec4\uff0c\u4f46\u4ec5\u5305\u542b\u4e00\u4e2a\u5143\u7d20\u3002\u60a8\u53ef\u4ee5\u8c61\u5b57\u5178\u4e00\u6837\u5b58\u53d6\u5b83\uff0c\u6bd4\u5982 ```python item = Security.get_stock(\"000001.XSHE\") print(item[\"alias\"]) ``` \u663e\u793a\u4e3a\"\u5e73\u5b89\u94f6\u884c\" Args: code: \u5f85\u67e5\u8be2\u7684\u80a1\u7968/\u6307\u6570\u4ee3\u7801 Returns: \u7c7b\u578b\u4e3a`security_info_dtype`\u7684numpy\u6570\u7ec4\uff0c\u4f46\u4ec5\u5305\u542b\u4e00\u4e2a\u5143\u7d20 \"\"\" if len ( cls . _securities ) == 0 : return None tmp = cls . _securities [ cls . _securities [ \"code\" ] == code ] if len ( tmp ) > 0 : if tmp [ \"type\" ] in [ \"stock\" , \"index\" ]: return tmp [ 0 ] return None @classmethod def fuzzy_match_ex ( cls , query : str ) -> Dict [ str , Tuple ]: # fixme: \u6b64\u65b9\u6cd5\u4e0eStock.fuzzy_match\u91cd\u590d\uff0c\u5e76\u4e14\u8fdb\u884c\u4e86\u7c7b\u578b\u9650\u5236\uff0c\u4f7f\u5f97\u5176\u4e0d\u9002\u5408\u653e\u5728Security\u91cc\uff0c\u4ee5\u53ca\u4f5c\u4e3a\u4e00\u4e2a\u901a\u7528\u65b9\u6cd5 query = query . upper () if re . match ( r \"\\d+\" , query ): return { sec [ \"code\" ]: sec . tolist () for sec in cls . _securities if sec [ \"code\" ] . find ( query ) != - 1 and sec [ \"type\" ] == \"stock\" } elif re . match ( r \"[A-Z]+\" , query ): return { sec [ \"code\" ]: sec . tolist () for sec in cls . _securities if sec [ \"name\" ] . startswith ( query ) and sec [ \"type\" ] == \"stock\" } else : return { sec [ \"code\" ]: sec . tolist () for sec in cls . _securities if sec [ \"alias\" ] . find ( query ) != - 1 and sec [ \"type\" ] == \"stock\" } @classmethod async def info ( cls , code , date = None ): _obj = await cls . query_security_via_date ( code , date ) if _obj is None : return None # \"_time\", \"code\", \"type\", \"alias\", \"end\", \"ipo\", \"name\" d1 = convert_nptime_to_datetime ( _obj [ \"ipo\" ]) . date () d2 = convert_nptime_to_datetime ( _obj [ \"end\" ]) . date () return { \"type\" : _obj [ \"type\" ], \"display_name\" : _obj [ \"alias\" ], \"alias\" : _obj [ \"alias\" ], \"end\" : d2 , \"start\" : d1 , \"name\" : _obj [ \"name\" ], } @classmethod async def name ( cls , code , date = None ): _security = await cls . query_security_via_date ( code , date ) if _security is None : return None return _security [ \"name\" ] @classmethod async def alias ( cls , code , date = None ): return await cls . display_name ( code , date ) @classmethod async def display_name ( cls , code , date = None ): _security = await cls . query_security_via_date ( code , date ) if _security is None : return None return _security [ \"alias\" ] @classmethod async def start_date ( cls , code , date = None ): _security = await cls . query_security_via_date ( code , date ) if _security is None : return None return convert_nptime_to_datetime ( _security [ \"ipo\" ]) . date () @classmethod async def end_date ( cls , code , date = None ): _security = await cls . query_security_via_date ( code , date ) if _security is None : return None return convert_nptime_to_datetime ( _security [ \"end\" ]) . date () @classmethod async def security_type ( cls , code , date = None ) -> SecurityType : _security = await cls . query_security_via_date ( code , date ) if _security is None : return None return _security [ \"type\" ] @classmethod async def query_security_via_date ( cls , code : str , date : datetime . date = None ): if date is None : # \u4ece\u5185\u5b58\u4e2d\u67e5\u627e\uff0c\u5982\u679c\u7f13\u5b58\u4e2d\u7684\u6570\u636e\u5df2\u66f4\u65b0\uff0c\u91cd\u65b0\u52a0\u8f7d\u5230\u5185\u5b58 date_in_cache = await cache . security . get ( \"security:latest_date\" ) if date_in_cache is not None : date = arrow . get ( date_in_cache ) . date () if date > cls . _securities_date : await cls . load_securities () results = cls . _securities [ cls . _securities [ \"code\" ] == code ] else : # \u4eceinfluxdb\u67e5\u627e date = tf . day_shift ( date , 0 ) results = await cls . load_securities_from_db ( date , code ) if results is not None and len ( results ) > 0 : return results [ 0 ] else : return None @classmethod def select ( cls , date : datetime . date = None ) -> Query : if date is None : return Query ( target_date = None ) else : return Query ( target_date = date ) @classmethod async def update_secs_cache ( cls , dt : datetime . date , securities : List [ Tuple ]): \"\"\"\u66f4\u65b0\u8bc1\u5238\u5217\u8868\u5230\u7f13\u5b58\u6570\u636e\u5e93\u4e2d Args: dt: \u8bc1\u5238\u5217\u8868\u5f52\u5c5e\u7684\u65e5\u671f securities: \u8bc1\u5238\u5217\u8868, \u5143\u7d20\u4e3a\u5143\u7ec4\uff0c\u5206\u522b\u4e3a\u4ee3\u7801\u3001\u522b\u540d\u3001\u540d\u79f0\u3001IPO\u65e5\u671f\u3001\u9000\u5e02\u65e5\u548c\u8bc1\u5238\u7c7b\u578b \"\"\" # stock: {'index', 'stock'} # funds: {'fjb', 'mmf', 'reits', 'fja', 'fjm'} # {'etf', 'lof'} key = \"security:all\" pipeline = cache . security . pipeline () pipeline . delete ( key ) for code , alias , name , start , end , _type in securities : pipeline . rpush ( key , f \" { code } , { alias } , { name } , { start } ,\" f \" { end } , { _type } \" ) await pipeline . execute () logger . info ( \"all securities saved to cache %s , %d secs\" , key , len ( securities )) # update latest date info await cache . security . set ( \"security:latest_date\" , dt . strftime ( \"%Y-%m- %d \" )) @classmethod async def save_securities ( cls , securities : List [ str ], dt : datetime . date ): \"\"\"\u4fdd\u5b58\u6307\u5b9a\u7684\u8bc1\u5238\u4fe1\u606f\u5230\u7f13\u5b58\u4e2d\uff0c\u5e76\u4e14\u5b58\u5165influxdb\uff0c\u5b9a\u65f6job\u8c03\u7528\u672c\u63a5\u53e3 Args: securities: \u8bc1\u5238\u4ee3\u7801\u5217\u8868\u3002 \"\"\" # stock: {'index', 'stock'} # funds: {'fjb', 'mmf', 'reits', 'fja', 'fjm'} # {'etf', 'lof'} if dt is None or len ( securities ) == 0 : return measurement = \"security_list\" client = get_influx_client () # code, alias, name, start, end, type security_list = np . array ( [ ( dt , x [ 0 ], f \" { x [ 0 ] } , { x [ 1 ] } , { x [ 2 ] } , { x [ 3 ] } , { x [ 4 ] } , { x [ 5 ] } \" ) for x in securities ], dtype = security_db_dtype , ) await client . save ( security_list , measurement , time_key = \"frame\" , tag_keys = [ \"code\" ] ) @classmethod async def load_securities_from_db ( cls , target_date : datetime . date , code : str = None ): if target_date is None : return None client = get_influx_client () measurement = \"security_list\" flux = ( Flux () . measurement ( measurement ) . range ( target_date , target_date ) . bucket ( client . _bucket ) . fields ([ \"info\" ]) ) if code is not None and len ( code ) > 0 : flux . tags ({ \"code\" : code }) data = await client . query ( flux ) if len ( data ) == 2 : # \\r\\n return None ds = DataframeDeserializer ( sort_values = \"_time\" , usecols = [ \"_time\" , \"code\" , \"info\" ], time_col = \"_time\" , engine = \"c\" , ) actual = ds ( data ) secs = actual . to_records ( index = False ) if len ( secs ) != 0 : # \"_time\", \"code\", \"code, alias, name, start, end, type\" _securities = np . array ( [ tuple ( x [ \"info\" ] . split ( \",\" )) for x in secs ], dtype = security_info_dtype ) return _securities else : return None @classmethod async def get_datescope_from_db ( cls ): # fixme: \u51fd\u6570\u540d\u65e0\u6cd5\u53cd\u6620\u7528\u9014\uff0c\u9700\u8981\u589e\u52a0\u6587\u6863\u6ce8\u91ca\uff0c\u8bf4\u660e\u8be5\u51fd\u6570\u7684\u4f5c\u7528,\u6216\u8005\u4e0d\u5e94\u8be5\u51fa\u73b0\u5728\u6b64\u7c7b\u4e2d\uff1f client = get_influx_client () measurement = \"security_list\" date1 = arrow . get ( \"2005-01-01\" ) . date () date2 = arrow . now () . naive . date () flux = ( Flux () . measurement ( measurement ) . range ( date1 , date2 ) . bucket ( client . _bucket ) . tags ({ \"code\" : \"000001.XSHE\" }) ) data = await client . query ( flux ) if len ( data ) == 2 : # \\r\\n return None , None ds = DataframeDeserializer ( sort_values = \"_time\" , usecols = [ \"_time\" ], time_col = \"_time\" , engine = \"c\" ) actual = ds ( data ) secs = actual . to_records ( index = False ) if len ( secs ) != 0 : d1 = convert_nptime_to_datetime ( secs [ 0 ][ \"_time\" ]) d2 = convert_nptime_to_datetime ( secs [ len ( secs ) - 1 ][ \"_time\" ]) return d1 . date (), d2 . date () else : return None , None @classmethod async def _notify_special_bonusnote ( cls , code , note , cancel_date ): # fixme: \u8fd9\u4e2a\u51fd\u6570\u5e94\u8be5\u51fa\u73b0\u5728omega\u4e2d\uff1f default_cancel_date = datetime . date ( 2099 , 1 , 1 ) # \u9ed8\u8ba4\u65e0\u53d6\u6d88\u516c\u544a # report this special event to notify user if cancel_date != default_cancel_date : ding ( \"security %s , bonus_cancel_pub_date %s \" % ( code , cancel_date )) if note . find ( \"\u6d41\u901a\" ) != - 1 : # \u68c0\u67e5\u662f\u5426\u6709\u201c\u6d41\u901a\u80a1\u201d\u6587\u5b57 ding ( \"security %s , special xrxd note: %s \" % ( code , note )) @classmethod async def save_xrxd_reports ( cls , reports : List [ str ], dt : datetime . date ): # fixme: \u6b64\u51fd\u6570\u5e94\u8be5\u5c5e\u4e8eomega? \"\"\"\u4fdd\u5b581\u5e74\u5185\u7684\u5206\u7ea2\u9001\u80a1\u4fe1\u606f\uff0c\u5e76\u4e14\u5b58\u5165influxdb\uff0c\u5b9a\u65f6job\u8c03\u7528\u672c\u63a5\u53e3 Args: reports: \u5206\u7ea2\u9001\u80a1\u516c\u544a \"\"\" # code(0), a_xr_date, board_plan_bonusnote, bonus_ratio_rmb(3), dividend_ratio, transfer_ratio(5), # at_bonus_ratio_rmb(6), report_date, plan_progress, implementation_bonusnote, bonus_cancel_pub_date(10) if len ( reports ) == 0 or dt is None : return # read reports from db and convert to dict map reports_in_db = {} dt_start = dt - datetime . timedelta ( days = 366 ) # \u5f80\u524d\u56de\u6eaf366\u5929 dt_end = dt + datetime . timedelta ( days = 366 ) # \u5f80\u540e\u5ef6\u957f366\u5929 existing_records = await cls . _load_xrxd_from_db ( None , dt_start , dt_end ) for record in existing_records : code = record [ 0 ] if code not in reports_in_db : reports_in_db [ code ] = [ record ] else : reports_in_db [ code ] . append ( record ) records = [] # \u51c6\u5907\u5199\u5165db for x in reports : code = x [ 0 ] note = x [ 2 ] cancel_date = x [ 10 ] existing_items = reports_in_db . get ( code , None ) if existing_items is None : # \u65b0\u8bb0\u5f55 record = ( x [ 1 ], x [ 0 ], f \" { x [ 0 ] } | { x [ 1 ] } | { x [ 2 ] } | { x [ 3 ] } | { x [ 4 ] } | { x [ 5 ] } | { x [ 6 ] } | { x [ 7 ] } | { x [ 8 ] } | { x [ 9 ] } | { x [ 10 ] } \" , ) records . append ( record ) await cls . _notify_special_bonusnote ( code , note , cancel_date ) else : new_record = True for item in existing_items : existing_date = convert_nptime_to_datetime ( item [ 1 ]) . date () if existing_date == x [ 1 ]: # \u5982\u679cxr_date\u76f8\u540c\uff0c\u4e0d\u66f4\u65b0 new_record = False continue if new_record : record = ( x [ 1 ], x [ 0 ], f \" { x [ 0 ] } | { x [ 1 ] } | { x [ 2 ] } | { x [ 3 ] } | { x [ 4 ] } | { x [ 5 ] } | { x [ 6 ] } | { x [ 7 ] } | { x [ 8 ] } | { x [ 9 ] } | { x [ 10 ] } \" , ) records . append ( record ) await cls . _notify_special_bonusnote ( code , note , cancel_date ) logger . info ( \"save_xrxd_reports, %d records to be saved\" , len ( records )) if len ( records ) == 0 : return measurement = \"security_xrxd_reports\" client = get_influx_client () # a_xr_date(_time), code(tag), info report_list = np . array ( records , dtype = security_db_dtype ) await client . save ( report_list , measurement , time_key = \"frame\" , tag_keys = [ \"code\" ]) @classmethod async def _load_xrxd_from_db ( cls , code , dt_start : datetime . date , dt_end : datetime . date ): if dt_start is None or dt_end is None : return [] client = get_influx_client () measurement = \"security_xrxd_reports\" flux = ( Flux () . measurement ( measurement ) . range ( dt_start , dt_end ) . bucket ( client . _bucket ) . fields ([ \"info\" ]) ) if code is not None and len ( code ) > 0 : flux . tags ({ \"code\" : code }) data = await client . query ( flux ) if len ( data ) == 2 : # \\r\\n return [] ds = DataframeDeserializer ( sort_values = \"_time\" , usecols = [ \"_time\" , \"code\" , \"info\" ], time_col = \"_time\" , engine = \"c\" , ) actual = ds ( data ) secs = actual . to_records ( index = False ) if len ( secs ) != 0 : _reports = np . array ( [ tuple ( x [ \"info\" ] . split ( \"|\" )) for x in secs ], dtype = xrxd_info_dtype ) return _reports else : return [] @classmethod async def get_xrxd_info ( cls , dt : datetime . date , code : str = None ): if dt is None : return None # code(0), a_xr_date, board_plan_bonusnote, bonus_ratio_rmb(3), dividend_ratio, transfer_ratio(5), # at_bonus_ratio_rmb(6), report_date, plan_progress, implementation_bonusnote, bonus_cancel_pub_date(10) reports = await cls . _load_xrxd_from_db ( code , dt , dt ) if len ( reports ) == 0 : return None readable_reports = [] for report in reports : xr_date = convert_nptime_to_datetime ( report [ 1 ]) . date () readable_reports . append ( { \"code\" : report [ 0 ], \"xr_date\" : xr_date , \"bonus\" : report [ 3 ], \"dividend\" : report [ 4 ], \"transfer\" : report [ 5 ], \"bonusnote\" : report [ 2 ], } ) return readable_reports get_stock ( code ) classmethod \u00b6 \u6839\u636e code \u6765\u67e5\u627e\u5bf9\u5e94\u7684\u80a1\u7968\uff08\u542b\u6307\u6570\uff09\u5bf9\u8c61\u4fe1\u606f\u3002 \u5982\u679c\u60a8\u53ea\u6709\u80a1\u7968\u4ee3\u7801\uff0c\u60f3\u77e5\u9053\u8be5\u4ee3\u7801\u5bf9\u5e94\u7684\u80a1\u7968\u540d\u79f0\u3001\u522b\u540d\uff08\u663e\u793a\u540d\uff09\u3001\u4e0a\u5e02\u65e5\u671f\u7b49\u4fe1\u606f\uff0c\u5c31\u53ef\u4ee5\u4f7f\u7528\u6b64\u65b9\u6cd5\u6765\u83b7\u53d6\u76f8\u5173\u4fe1\u606f\u3002 \u8fd4\u56de\u7c7b\u578b\u4e3a security_info_dtype \u7684numpy\u6570\u7ec4\uff0c\u4f46\u4ec5\u5305\u542b\u4e00\u4e2a\u5143\u7d20\u3002\u60a8\u53ef\u4ee5\u8c61\u5b57\u5178\u4e00\u6837\u5b58\u53d6\u5b83\uff0c\u6bd4\u5982 1 2 item = Security . get_stock ( \"000001.XSHE\" ) print ( item [ \"alias\" ]) \u663e\u793a\u4e3a\"\u5e73\u5b89\u94f6\u884c\" Parameters: Name Type Description Default code \u5f85\u67e5\u8be2\u7684\u80a1\u7968/\u6307\u6570\u4ee3\u7801 required Returns: Type Description numpy.ndarray[Any, numpy.dtype[[('code', 'O'), ('alias', 'O'), ('name', 'O'), ('ipo', 'datetime64[s]'), ('end', 'datetime64[s]'), ('type', 'O')]]] \u7c7b\u578b\u4e3a security_info_dtype \u7684numpy\u6570\u7ec4\uff0c\u4f46\u4ec5\u5305\u542b\u4e00\u4e2a\u5143\u7d20 Source code in omicron/models/security.py @classmethod def get_stock ( cls , code ) -> NDArray [ security_info_dtype ]: \"\"\"\u6839\u636e`code`\u6765\u67e5\u627e\u5bf9\u5e94\u7684\u80a1\u7968\uff08\u542b\u6307\u6570\uff09\u5bf9\u8c61\u4fe1\u606f\u3002 \u5982\u679c\u60a8\u53ea\u6709\u80a1\u7968\u4ee3\u7801\uff0c\u60f3\u77e5\u9053\u8be5\u4ee3\u7801\u5bf9\u5e94\u7684\u80a1\u7968\u540d\u79f0\u3001\u522b\u540d\uff08\u663e\u793a\u540d\uff09\u3001\u4e0a\u5e02\u65e5\u671f\u7b49\u4fe1\u606f\uff0c\u5c31\u53ef\u4ee5\u4f7f\u7528\u6b64\u65b9\u6cd5\u6765\u83b7\u53d6\u76f8\u5173\u4fe1\u606f\u3002 \u8fd4\u56de\u7c7b\u578b\u4e3a`security_info_dtype`\u7684numpy\u6570\u7ec4\uff0c\u4f46\u4ec5\u5305\u542b\u4e00\u4e2a\u5143\u7d20\u3002\u60a8\u53ef\u4ee5\u8c61\u5b57\u5178\u4e00\u6837\u5b58\u53d6\u5b83\uff0c\u6bd4\u5982 ```python item = Security.get_stock(\"000001.XSHE\") print(item[\"alias\"]) ``` \u663e\u793a\u4e3a\"\u5e73\u5b89\u94f6\u884c\" Args: code: \u5f85\u67e5\u8be2\u7684\u80a1\u7968/\u6307\u6570\u4ee3\u7801 Returns: \u7c7b\u578b\u4e3a`security_info_dtype`\u7684numpy\u6570\u7ec4\uff0c\u4f46\u4ec5\u5305\u542b\u4e00\u4e2a\u5143\u7d20 \"\"\" if len ( cls . _securities ) == 0 : return None tmp = cls . _securities [ cls . _securities [ \"code\" ] == code ] if len ( tmp ) > 0 : if tmp [ \"type\" ] in [ \"stock\" , \"index\" ]: return tmp [ 0 ] return None init () async classmethod \u00b6 \u521d\u59cb\u5316Security. \u4e00\u822c\u800c\u8a00\uff0comicron\u7684\u4f7f\u7528\u8005\u65e0\u987b\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5b83\u4f1a\u5728omicron\u521d\u59cb\u5316\uff08\u901a\u8fc7 omicron.init \uff09\u65f6\uff0c\u88ab\u81ea\u52a8\u8c03\u7528\u3002 Exceptions: Type Description DataNotReadyError \u5982\u679comicron\u672a\u521d\u59cb\u5316\uff0c\u6216\u8005cache\u4e2d\u672a\u52a0\u8f7d\u6700\u65b0\u8bc1\u5238\u5217\u8868\uff0c\u5219\u629b\u51fa\u6b64\u5f02\u5e38\u3002 Source code in omicron/models/security.py @classmethod async def init ( cls ): \"\"\"\u521d\u59cb\u5316Security. \u4e00\u822c\u800c\u8a00\uff0comicron\u7684\u4f7f\u7528\u8005\u65e0\u987b\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5b83\u4f1a\u5728omicron\u521d\u59cb\u5316\uff08\u901a\u8fc7`omicron.init`\uff09\u65f6\uff0c\u88ab\u81ea\u52a8\u8c03\u7528\u3002 Raises: DataNotReadyError: \u5982\u679comicron\u672a\u521d\u59cb\u5316\uff0c\u6216\u8005cache\u4e2d\u672a\u52a0\u8f7d\u6700\u65b0\u8bc1\u5238\u5217\u8868\uff0c\u5219\u629b\u51fa\u6b64\u5f02\u5e38\u3002 \"\"\" # read all securities from redis, 7111 records now # {'index', 'stock'} # {'fjb', 'mmf', 'reits', 'fja', 'fjm'} # {'etf', 'lof'} if len ( cls . _securities ) > 100 : return True secs = await cls . load_securities () if secs is None or len ( secs ) == 0 : # pragma: no cover raise DataNotReadyError ( \"No securities in cache, make sure you have called omicron.init() first.\" ) print ( \"init securities done\" ) return True load_securities () async classmethod \u00b6 \u52a0\u8f7d\u6240\u6709\u8bc1\u5238\u7684\u4fe1\u606f\uff0c\u5e76\u7f13\u5b58\u5230\u5185\u5b58\u4e2d \u4e00\u822c\u800c\u8a00\uff0comicron\u7684\u4f7f\u7528\u8005\u65e0\u987b\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5b83\u4f1a\u5728omicron\u521d\u59cb\u5316\uff08\u901a\u8fc7 omicron.init \uff09\u65f6\uff0c\u88ab\u81ea\u52a8\u8c03\u7528\u3002 Source code in omicron/models/security.py @classmethod async def load_securities ( cls ): \"\"\"\u52a0\u8f7d\u6240\u6709\u8bc1\u5238\u7684\u4fe1\u606f\uff0c\u5e76\u7f13\u5b58\u5230\u5185\u5b58\u4e2d \u4e00\u822c\u800c\u8a00\uff0comicron\u7684\u4f7f\u7528\u8005\u65e0\u987b\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5b83\u4f1a\u5728omicron\u521d\u59cb\u5316\uff08\u901a\u8fc7`omicron.init`\uff09\u65f6\uff0c\u88ab\u81ea\u52a8\u8c03\u7528\u3002 \"\"\" secs = await cache . security . lrange ( \"security:all\" , 0 , - 1 ) if len ( secs ) != 0 : # using np.datetime64[s] _securities = np . array ( [ tuple ( x . split ( \",\" )) for x in secs ], dtype = security_info_dtype ) # \u66f4\u65b0\u8bc1\u5238\u7c7b\u578b\u5217\u8868 cls . _securities = _securities cls . _security_types = set ( _securities [ \"type\" ]) cls . _stocks = _securities [ ( _securities [ \"type\" ] == \"stock\" ) | ( _securities [ \"type\" ] == \"index\" ) ] logger . info ( \" %d securities loaded, types: %s \" , len ( _securities ), cls . _security_types ) date_in_cache = await cache . security . get ( \"security:latest_date\" ) if date_in_cache is not None : cls . _securities_date = arrow . get ( date_in_cache ) . date () else : cls . _securities_date = datetime . date . today () return _securities else : # pragma: no cover return None save_securities ( securities , dt ) async classmethod \u00b6 \u4fdd\u5b58\u6307\u5b9a\u7684\u8bc1\u5238\u4fe1\u606f\u5230\u7f13\u5b58\u4e2d\uff0c\u5e76\u4e14\u5b58\u5165influxdb\uff0c\u5b9a\u65f6job\u8c03\u7528\u672c\u63a5\u53e3 Parameters: Name Type Description Default securities List[str] \u8bc1\u5238\u4ee3\u7801\u5217\u8868\u3002 required Source code in omicron/models/security.py @classmethod async def save_securities ( cls , securities : List [ str ], dt : datetime . date ): \"\"\"\u4fdd\u5b58\u6307\u5b9a\u7684\u8bc1\u5238\u4fe1\u606f\u5230\u7f13\u5b58\u4e2d\uff0c\u5e76\u4e14\u5b58\u5165influxdb\uff0c\u5b9a\u65f6job\u8c03\u7528\u672c\u63a5\u53e3 Args: securities: \u8bc1\u5238\u4ee3\u7801\u5217\u8868\u3002 \"\"\" # stock: {'index', 'stock'} # funds: {'fjb', 'mmf', 'reits', 'fja', 'fjm'} # {'etf', 'lof'} if dt is None or len ( securities ) == 0 : return measurement = \"security_list\" client = get_influx_client () # code, alias, name, start, end, type security_list = np . array ( [ ( dt , x [ 0 ], f \" { x [ 0 ] } , { x [ 1 ] } , { x [ 2 ] } , { x [ 3 ] } , { x [ 4 ] } , { x [ 5 ] } \" ) for x in securities ], dtype = security_db_dtype , ) await client . save ( security_list , measurement , time_key = \"frame\" , tag_keys = [ \"code\" ] ) save_xrxd_reports ( reports , dt ) async classmethod \u00b6 \u4fdd\u5b581\u5e74\u5185\u7684\u5206\u7ea2\u9001\u80a1\u4fe1\u606f\uff0c\u5e76\u4e14\u5b58\u5165influxdb\uff0c\u5b9a\u65f6job\u8c03\u7528\u672c\u63a5\u53e3 Parameters: Name Type Description Default reports List[str] \u5206\u7ea2\u9001\u80a1\u516c\u544a required Source code in omicron/models/security.py @classmethod async def save_xrxd_reports ( cls , reports : List [ str ], dt : datetime . date ): # fixme: \u6b64\u51fd\u6570\u5e94\u8be5\u5c5e\u4e8eomega? \"\"\"\u4fdd\u5b581\u5e74\u5185\u7684\u5206\u7ea2\u9001\u80a1\u4fe1\u606f\uff0c\u5e76\u4e14\u5b58\u5165influxdb\uff0c\u5b9a\u65f6job\u8c03\u7528\u672c\u63a5\u53e3 Args: reports: \u5206\u7ea2\u9001\u80a1\u516c\u544a \"\"\" # code(0), a_xr_date, board_plan_bonusnote, bonus_ratio_rmb(3), dividend_ratio, transfer_ratio(5), # at_bonus_ratio_rmb(6), report_date, plan_progress, implementation_bonusnote, bonus_cancel_pub_date(10) if len ( reports ) == 0 or dt is None : return # read reports from db and convert to dict map reports_in_db = {} dt_start = dt - datetime . timedelta ( days = 366 ) # \u5f80\u524d\u56de\u6eaf366\u5929 dt_end = dt + datetime . timedelta ( days = 366 ) # \u5f80\u540e\u5ef6\u957f366\u5929 existing_records = await cls . _load_xrxd_from_db ( None , dt_start , dt_end ) for record in existing_records : code = record [ 0 ] if code not in reports_in_db : reports_in_db [ code ] = [ record ] else : reports_in_db [ code ] . append ( record ) records = [] # \u51c6\u5907\u5199\u5165db for x in reports : code = x [ 0 ] note = x [ 2 ] cancel_date = x [ 10 ] existing_items = reports_in_db . get ( code , None ) if existing_items is None : # \u65b0\u8bb0\u5f55 record = ( x [ 1 ], x [ 0 ], f \" { x [ 0 ] } | { x [ 1 ] } | { x [ 2 ] } | { x [ 3 ] } | { x [ 4 ] } | { x [ 5 ] } | { x [ 6 ] } | { x [ 7 ] } | { x [ 8 ] } | { x [ 9 ] } | { x [ 10 ] } \" , ) records . append ( record ) await cls . _notify_special_bonusnote ( code , note , cancel_date ) else : new_record = True for item in existing_items : existing_date = convert_nptime_to_datetime ( item [ 1 ]) . date () if existing_date == x [ 1 ]: # \u5982\u679cxr_date\u76f8\u540c\uff0c\u4e0d\u66f4\u65b0 new_record = False continue if new_record : record = ( x [ 1 ], x [ 0 ], f \" { x [ 0 ] } | { x [ 1 ] } | { x [ 2 ] } | { x [ 3 ] } | { x [ 4 ] } | { x [ 5 ] } | { x [ 6 ] } | { x [ 7 ] } | { x [ 8 ] } | { x [ 9 ] } | { x [ 10 ] } \" , ) records . append ( record ) await cls . _notify_special_bonusnote ( code , note , cancel_date ) logger . info ( \"save_xrxd_reports, %d records to be saved\" , len ( records )) if len ( records ) == 0 : return measurement = \"security_xrxd_reports\" client = get_influx_client () # a_xr_date(_time), code(tag), info report_list = np . array ( records , dtype = security_db_dtype ) await client . save ( report_list , measurement , time_key = \"frame\" , tag_keys = [ \"code\" ]) update_secs_cache ( dt , securities ) async classmethod \u00b6 \u66f4\u65b0\u8bc1\u5238\u5217\u8868\u5230\u7f13\u5b58\u6570\u636e\u5e93\u4e2d Parameters: Name Type Description Default dt date \u8bc1\u5238\u5217\u8868\u5f52\u5c5e\u7684\u65e5\u671f required securities List[Tuple] \u8bc1\u5238\u5217\u8868, \u5143\u7d20\u4e3a\u5143\u7ec4\uff0c\u5206\u522b\u4e3a\u4ee3\u7801\u3001\u522b\u540d\u3001\u540d\u79f0\u3001IPO\u65e5\u671f\u3001\u9000\u5e02\u65e5\u548c\u8bc1\u5238\u7c7b\u578b required Source code in omicron/models/security.py @classmethod async def update_secs_cache ( cls , dt : datetime . date , securities : List [ Tuple ]): \"\"\"\u66f4\u65b0\u8bc1\u5238\u5217\u8868\u5230\u7f13\u5b58\u6570\u636e\u5e93\u4e2d Args: dt: \u8bc1\u5238\u5217\u8868\u5f52\u5c5e\u7684\u65e5\u671f securities: \u8bc1\u5238\u5217\u8868, \u5143\u7d20\u4e3a\u5143\u7ec4\uff0c\u5206\u522b\u4e3a\u4ee3\u7801\u3001\u522b\u540d\u3001\u540d\u79f0\u3001IPO\u65e5\u671f\u3001\u9000\u5e02\u65e5\u548c\u8bc1\u5238\u7c7b\u578b \"\"\" # stock: {'index', 'stock'} # funds: {'fjb', 'mmf', 'reits', 'fja', 'fjm'} # {'etf', 'lof'} key = \"security:all\" pipeline = cache . security . pipeline () pipeline . delete ( key ) for code , alias , name , start , end , _type in securities : pipeline . rpush ( key , f \" { code } , { alias } , { name } , { start } ,\" f \" { end } , { _type } \" ) await pipeline . execute () logger . info ( \"all securities saved to cache %s , %d secs\" , key , len ( securities )) # update latest date info await cache . security . set ( \"security:latest_date\" , dt . strftime ( \"%Y-%m- %d \" ))","title":"security"},{"location":"api/security/#omicron.models.security.Query","text":"\u8bc1\u5238\u4fe1\u606f\u67e5\u8be2\u5bf9\u8c61 \u8bc1\u5238\u4fe1\u606f\u67e5\u8be2\u5bf9\u8c61\uff0c\u7531 Security.select() \u65b9\u6cd5\u751f\u6210\uff0c\u652f\u6301\u94fe\u5f0f\u67e5\u8be2\u3002\u901a\u8fc7 eval \u51fd\u6570\u7ed3\u675f\u94fe\u5f0f\u8c03\u7528\u5e76\u751f\u6210\u67e5\u8be2\u7ed3\u679c\u3002 Source code in omicron/models/security.py class Query : \"\"\"\u8bc1\u5238\u4fe1\u606f\u67e5\u8be2\u5bf9\u8c61 \u8bc1\u5238\u4fe1\u606f\u67e5\u8be2\u5bf9\u8c61\uff0c\u7531`Security.select()`\u65b9\u6cd5\u751f\u6210\uff0c\u652f\u6301\u94fe\u5f0f\u67e5\u8be2\u3002\u901a\u8fc7`eval`\u51fd\u6570\u7ed3\u675f\u94fe\u5f0f\u8c03\u7528\u5e76\u751f\u6210\u67e5\u8be2\u7ed3\u679c\u3002 \"\"\" def __init__ ( self , target_date : datetime . date = None ): if target_date is None : # \u805a\u5bbd\u4e0d\u4e00\u5b9a\u4f1a\u53ca\u65f6\u66f4\u65b0\u6570\u636e\uff0c\u56e0\u6b64db\u4e2d\u4e0d\u5b58\u653e\u5f53\u5929\u7684\u6570\u636e\uff0c\u5982\u679c\u4f20\u7a7a\uff0c\u67e5cache self . target_date = None else : # \u5982\u679c\u662f\u4ea4\u6613\u65e5\uff0c\u53d6\u5f53\u5929\uff0c\u5426\u5219\u53d6\u524d\u4e00\u5929 self . target_date = tf . day_shift ( target_date , 0 ) # \u540d\u5b57\uff0c\u663e\u793a\u540d\uff0c\u7c7b\u578b\u8fc7\u6ee4\u5668 self . _name_pattern = None # \u5b57\u6bcd\u540d\u5b57 self . _alias_pattern = None # \u663e\u793a\u540d self . _type_pattern = None # \u4e0d\u6307\u5b9a\u5219\u9ed8\u8ba4\u4e3a\u5168\u90e8\uff0c\u5982\u679c\u4f20\u5165\u7a7a\u503c\u5219\u53ea\u9009\u62e9\u80a1\u7968\u548c\u6307\u6570 # \u5f00\u5173\u9009\u9879 self . _exclude_kcb = False # \u79d1\u521b\u677f self . _exclude_cyb = False # \u521b\u4e1a\u677f self . _exclude_st = False # ST self . _include_exit = False # \u662f\u5426\u5305\u542b\u5df2\u9000\u5e02\u8bc1\u5238(\u9ed8\u8ba4\u4e0d\u5305\u62ec\u5f53\u5929\u9000\u5e02\u7684) # \u4e0b\u5217\u5f00\u5173\u4f18\u5148\u7ea7\u9ad8\u4e8e\u4e0a\u9762\u7684 self . _only_kcb = False self . _only_cyb = False self . _only_st = False def only_cyb ( self ) -> \"Query\" : \"\"\"\u8fd4\u56de\u7ed3\u679c\u4e2d\u53ea\u5305\u542b\u521b\u4e1a\u677f\u80a1\u7968\"\"\" self . _only_cyb = True # \u9ad8\u4f18\u5148\u7ea7 self . _exclude_cyb = False self . _only_kcb = False self . _only_st = False return self def only_st ( self ) -> \"Query\" : \"\"\"\u8fd4\u56de\u7ed3\u679c\u4e2d\u53ea\u5305\u542bST\u7c7b\u578b\u7684\u8bc1\u5238\"\"\" self . _only_st = True # \u9ad8\u4f18\u5148\u7ea7 self . _exclude_st = False self . _only_kcb = False self . _only_cyb = False return self def only_kcb ( self ) -> \"Query\" : \"\"\"\u8fd4\u56de\u7ed3\u679c\u4e2d\u53ea\u5305\u542b\u79d1\u521b\u677f\u80a1\u7968\"\"\" self . _only_kcb = True # \u9ad8\u4f18\u5148\u7ea7 self . _exclude_kcb = False self . _only_cyb = False self . _only_st = False return self def exclude_st ( self ) -> \"Query\" : \"\"\"\u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u6392\u9664ST\u7c7b\u578b\u7684\u80a1\u7968\"\"\" self . _exclude_st = True self . _only_st = False return self def exclude_cyb ( self ) -> \"Query\" : \"\"\"\u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u6392\u9664\u521b\u4e1a\u677f\u7c7b\u578b\u7684\u80a1\u7968\"\"\" self . _exclude_cyb = True self . _only_cyb = False return self def exclude_kcb ( self ) -> \"Query\" : \"\"\"\u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u6392\u9664\u79d1\u521b\u677f\u7c7b\u578b\u7684\u80a1\u7968\"\"\" self . _exclude_kcb = True self . _only_kcb = False return self def include_exit ( self ) -> \"Query\" : \"\"\"\u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u5305\u542b\u5df2\u9000\u5e02\u7684\u8bc1\u5238\"\"\" self . _include_exit = True return self def types ( self , types : List [ str ]) -> \"Query\" : \"\"\"\u9009\u62e9\u7c7b\u578b\u5728`types`\u4e2d\u7684\u8bc1\u5238\u54c1\u79cd \u5982\u679c\u4e0d\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u9ed8\u8ba4\u9009\u62e9\u6240\u6709\u80a1\u7968\u7c7b\u578b\u3002 \u5982\u679c\u8c03\u7528\u6b64\u65b9\u6cd5\u4f46\u4e0d\u4f20\u5165\u53c2\u6570\uff0c\u9ed8\u8ba4\u9009\u62e9\u6307\u6570+\u80a1\u7968 Args: types: \u6709\u6548\u7684\u7c7b\u578b\u5305\u62ec: \u5bf9\u80a1\u7968\u6307\u6570\u800c\u8a00\u662f\uff08'index', 'stock'\uff09\uff0c\u5bf9\u57fa\u91d1\u800c\u8a00\u5219\u662f\uff08'etf', 'fjb', 'mmf', 'reits', 'fja', 'fjm', 'lof'\uff09 \"\"\" if types is None or isinstance ( types , List ) is False : return self if len ( types ) == 0 : self . _type_pattern = [ \"index\" , \"stock\" ] else : tmp = set ( types ) self . _type_pattern = list ( tmp ) return self def name_like ( self , name : str ) -> \"Query\" : \"\"\"\u67e5\u627e\u80a1\u7968/\u8bc1\u5238\u540d\u79f0\u4e2d\u51fa\u73b0`name`\u7684\u54c1\u79cd \u6ce8\u610f\u8fd9\u91cc\u7684\u8bc1\u5238\u540d\u79f0\u5e76\u4e0d\u662f\u5176\u663e\u793a\u540d\u3002\u6bd4\u5982\u5bf9\u4e2d\u56fd\u5e73\u5b89000001.XSHE\u6765\u8bf4\uff0c\u5b83\u7684\u540d\u79f0\u662fZGPA\uff0c\u800c\u4e0d\u662f\u201c\u4e2d\u56fd\u5e73\u5b89\u201d\u3002 Args: name: \u5f85\u67e5\u627e\u7684\u540d\u5b57\uff0c\u6bd4\u5982\"ZGPA\" \"\"\" if name is None or len ( name ) == 0 : self . _name_pattern = None else : self . _name_pattern = name return self def alias_like ( self , display_name : str ) -> \"Query\" : \"\"\"\u67e5\u627e\u80a1\u7968/\u8bc1\u5238\u663e\u793a\u540d\u4e2d\u51fa\u73b0`display_name\u7684\u54c1\u79cd Args: display_name: \u663e\u793a\u540d\uff0c\u6bd4\u5982\u201c\u4e2d\u56fd\u5e73\u5b89\" \"\"\" if display_name is None or len ( display_name ) == 0 : self . _alias_pattern = None else : self . _alias_pattern = display_name return self async def eval ( self ) -> List [ str ]: \"\"\"\u5bf9\u67e5\u8be2\u7ed3\u679c\u8fdb\u884c\u6c42\u503c\uff0c\u8fd4\u56decode\u5217\u8868 Returns: \u4ee3\u7801\u5217\u8868 \"\"\" logger . debug ( \"eval, date: %s \" , self . target_date ) logger . debug ( \"eval, names and types: %s , %s , %s \" , self . _name_pattern , self . _alias_pattern , self . _type_pattern , ) logger . debug ( \"eval, exclude and include: %s , %s , %s , %s \" , self . _exclude_cyb , self . _exclude_st , self . _exclude_kcb , self . _include_exit , ) logger . debug ( \"eval, only: %s , %s , %s \" , self . _only_cyb , self . _only_st , self . _only_kcb ) date_in_cache = await cache . security . get ( \"security:latest_date\" ) if date_in_cache : # \u65e0\u6b64\u6570\u636e\u8bf4\u660eomega\u6709\u67d0\u4e9b\u95ee\u9898\uff0c\u4e0d\u5904\u7406 _date = arrow . get ( date_in_cache ) . date () else : now = datetime . datetime . now () _date = tf . day_shift ( now , 0 ) # \u786e\u5b9a\u6570\u636e\u6e90\uff0ccache\u4e3a\u5f53\u59298\u70b9\u4e4b\u540e\u83b7\u53d6\u7684\u6570\u636e\uff0c\u6570\u636e\u5e93\u5b58\u653e\u524d\u4e00\u65e5\u548c\u66f4\u65e9\u7684\u6570\u636e if not self . target_date or self . target_date >= _date : self . target_date = _date records = None if self . target_date == _date : # \u4ece\u5185\u5b58\u4e2d\u67e5\u627e\uff0c\u5982\u679c\u7f13\u5b58\u4e2d\u7684\u6570\u636e\u5df2\u66f4\u65b0\uff0c\u91cd\u65b0\u52a0\u8f7d\u5230\u5185\u5b58 secs = await cache . security . lrange ( \"security:all\" , 0 , - 1 ) if len ( secs ) != 0 : # using np.datetime64[s] records = np . array ( [ tuple ( x . split ( \",\" )) for x in secs ], dtype = security_info_dtype ) else : records = await Security . load_securities_from_db ( self . target_date ) if records is None : return None results = [] self . _type_pattern = self . _type_pattern or SecurityType . STOCK . value for record in records : if self . _type_pattern is not None : if record [ \"type\" ] not in self . _type_pattern : continue if self . _name_pattern is not None : if record [ \"name\" ] . find ( self . _name_pattern ) == - 1 : continue if self . _alias_pattern is not None : if record [ \"alias\" ] . find ( self . _alias_pattern ) == - 1 : continue # \u521b\u4e1a\u677f\uff0c\u79d1\u521b\u677f\uff0cST\u6682\u65f6\u9650\u5b9a\u4e3a\u80a1\u7968\u7c7b\u578b if self . _only_cyb : if record [ \"type\" ] != SecurityType . STOCK . value or not ( record [ \"code\" ][: 3 ] in ( \"300\" , \"301\" ) ): continue if self . _only_kcb : if ( record [ \"type\" ] != SecurityType . STOCK . value or record [ \"code\" ] . startswith ( \"688\" ) is False ): continue if self . _only_st : if ( record [ \"type\" ] != SecurityType . STOCK . value or record [ \"alias\" ] . find ( \"ST\" ) == - 1 ): continue if self . _exclude_cyb : if record [ \"type\" ] == SecurityType . STOCK . value and record [ \"code\" ][ : 3 ] in ( \"300\" , \"301\" ): continue if self . _exclude_st : if ( record [ \"type\" ] == SecurityType . STOCK . value and record [ \"alias\" ] . find ( \"ST\" ) != - 1 ): continue if self . _exclude_kcb : if record [ \"type\" ] == SecurityType . STOCK . value and record [ \"code\" ] . startswith ( \"688\" ): continue # \u9000\u5e02\u6682\u4e0d\u9650\u5b9a\u662f\u5426\u4e3a\u80a1\u7968 if self . _include_exit is False : d1 = convert_nptime_to_datetime ( record [ \"end\" ]) . date () if d1 < self . target_date : continue results . append ( record [ \"code\" ]) # \u8fd4\u56de\u6240\u6709\u67e5\u8be2\u5230\u7684\u7ed3\u679c return results","title":"Query"},{"location":"api/security/#omicron.models.security.Query.alias_like","text":"\u67e5\u627e\u80a1\u7968/\u8bc1\u5238\u663e\u793a\u540d\u4e2d\u51fa\u73b0`display_name\u7684\u54c1\u79cd Parameters: Name Type Description Default display_name str \u663e\u793a\u540d\uff0c\u6bd4\u5982\u201c\u4e2d\u56fd\u5e73\u5b89\" required Source code in omicron/models/security.py def alias_like ( self , display_name : str ) -> \"Query\" : \"\"\"\u67e5\u627e\u80a1\u7968/\u8bc1\u5238\u663e\u793a\u540d\u4e2d\u51fa\u73b0`display_name\u7684\u54c1\u79cd Args: display_name: \u663e\u793a\u540d\uff0c\u6bd4\u5982\u201c\u4e2d\u56fd\u5e73\u5b89\" \"\"\" if display_name is None or len ( display_name ) == 0 : self . _alias_pattern = None else : self . _alias_pattern = display_name return self","title":"alias_like()"},{"location":"api/security/#omicron.models.security.Query.eval","text":"\u5bf9\u67e5\u8be2\u7ed3\u679c\u8fdb\u884c\u6c42\u503c\uff0c\u8fd4\u56decode\u5217\u8868 Returns: Type Description List[str] \u4ee3\u7801\u5217\u8868 Source code in omicron/models/security.py async def eval ( self ) -> List [ str ]: \"\"\"\u5bf9\u67e5\u8be2\u7ed3\u679c\u8fdb\u884c\u6c42\u503c\uff0c\u8fd4\u56decode\u5217\u8868 Returns: \u4ee3\u7801\u5217\u8868 \"\"\" logger . debug ( \"eval, date: %s \" , self . target_date ) logger . debug ( \"eval, names and types: %s , %s , %s \" , self . _name_pattern , self . _alias_pattern , self . _type_pattern , ) logger . debug ( \"eval, exclude and include: %s , %s , %s , %s \" , self . _exclude_cyb , self . _exclude_st , self . _exclude_kcb , self . _include_exit , ) logger . debug ( \"eval, only: %s , %s , %s \" , self . _only_cyb , self . _only_st , self . _only_kcb ) date_in_cache = await cache . security . get ( \"security:latest_date\" ) if date_in_cache : # \u65e0\u6b64\u6570\u636e\u8bf4\u660eomega\u6709\u67d0\u4e9b\u95ee\u9898\uff0c\u4e0d\u5904\u7406 _date = arrow . get ( date_in_cache ) . date () else : now = datetime . datetime . now () _date = tf . day_shift ( now , 0 ) # \u786e\u5b9a\u6570\u636e\u6e90\uff0ccache\u4e3a\u5f53\u59298\u70b9\u4e4b\u540e\u83b7\u53d6\u7684\u6570\u636e\uff0c\u6570\u636e\u5e93\u5b58\u653e\u524d\u4e00\u65e5\u548c\u66f4\u65e9\u7684\u6570\u636e if not self . target_date or self . target_date >= _date : self . target_date = _date records = None if self . target_date == _date : # \u4ece\u5185\u5b58\u4e2d\u67e5\u627e\uff0c\u5982\u679c\u7f13\u5b58\u4e2d\u7684\u6570\u636e\u5df2\u66f4\u65b0\uff0c\u91cd\u65b0\u52a0\u8f7d\u5230\u5185\u5b58 secs = await cache . security . lrange ( \"security:all\" , 0 , - 1 ) if len ( secs ) != 0 : # using np.datetime64[s] records = np . array ( [ tuple ( x . split ( \",\" )) for x in secs ], dtype = security_info_dtype ) else : records = await Security . load_securities_from_db ( self . target_date ) if records is None : return None results = [] self . _type_pattern = self . _type_pattern or SecurityType . STOCK . value for record in records : if self . _type_pattern is not None : if record [ \"type\" ] not in self . _type_pattern : continue if self . _name_pattern is not None : if record [ \"name\" ] . find ( self . _name_pattern ) == - 1 : continue if self . _alias_pattern is not None : if record [ \"alias\" ] . find ( self . _alias_pattern ) == - 1 : continue # \u521b\u4e1a\u677f\uff0c\u79d1\u521b\u677f\uff0cST\u6682\u65f6\u9650\u5b9a\u4e3a\u80a1\u7968\u7c7b\u578b if self . _only_cyb : if record [ \"type\" ] != SecurityType . STOCK . value or not ( record [ \"code\" ][: 3 ] in ( \"300\" , \"301\" ) ): continue if self . _only_kcb : if ( record [ \"type\" ] != SecurityType . STOCK . value or record [ \"code\" ] . startswith ( \"688\" ) is False ): continue if self . _only_st : if ( record [ \"type\" ] != SecurityType . STOCK . value or record [ \"alias\" ] . find ( \"ST\" ) == - 1 ): continue if self . _exclude_cyb : if record [ \"type\" ] == SecurityType . STOCK . value and record [ \"code\" ][ : 3 ] in ( \"300\" , \"301\" ): continue if self . _exclude_st : if ( record [ \"type\" ] == SecurityType . STOCK . value and record [ \"alias\" ] . find ( \"ST\" ) != - 1 ): continue if self . _exclude_kcb : if record [ \"type\" ] == SecurityType . STOCK . value and record [ \"code\" ] . startswith ( \"688\" ): continue # \u9000\u5e02\u6682\u4e0d\u9650\u5b9a\u662f\u5426\u4e3a\u80a1\u7968 if self . _include_exit is False : d1 = convert_nptime_to_datetime ( record [ \"end\" ]) . date () if d1 < self . target_date : continue results . append ( record [ \"code\" ]) # \u8fd4\u56de\u6240\u6709\u67e5\u8be2\u5230\u7684\u7ed3\u679c return results","title":"eval()"},{"location":"api/security/#omicron.models.security.Query.exclude_cyb","text":"\u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u6392\u9664\u521b\u4e1a\u677f\u7c7b\u578b\u7684\u80a1\u7968 Source code in omicron/models/security.py def exclude_cyb ( self ) -> \"Query\" : \"\"\"\u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u6392\u9664\u521b\u4e1a\u677f\u7c7b\u578b\u7684\u80a1\u7968\"\"\" self . _exclude_cyb = True self . _only_cyb = False return self","title":"exclude_cyb()"},{"location":"api/security/#omicron.models.security.Query.exclude_kcb","text":"\u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u6392\u9664\u79d1\u521b\u677f\u7c7b\u578b\u7684\u80a1\u7968 Source code in omicron/models/security.py def exclude_kcb ( self ) -> \"Query\" : \"\"\"\u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u6392\u9664\u79d1\u521b\u677f\u7c7b\u578b\u7684\u80a1\u7968\"\"\" self . _exclude_kcb = True self . _only_kcb = False return self","title":"exclude_kcb()"},{"location":"api/security/#omicron.models.security.Query.exclude_st","text":"\u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u6392\u9664ST\u7c7b\u578b\u7684\u80a1\u7968 Source code in omicron/models/security.py def exclude_st ( self ) -> \"Query\" : \"\"\"\u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u6392\u9664ST\u7c7b\u578b\u7684\u80a1\u7968\"\"\" self . _exclude_st = True self . _only_st = False return self","title":"exclude_st()"},{"location":"api/security/#omicron.models.security.Query.include_exit","text":"\u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u5305\u542b\u5df2\u9000\u5e02\u7684\u8bc1\u5238 Source code in omicron/models/security.py def include_exit ( self ) -> \"Query\" : \"\"\"\u4ece\u8fd4\u56de\u7ed3\u679c\u4e2d\u5305\u542b\u5df2\u9000\u5e02\u7684\u8bc1\u5238\"\"\" self . _include_exit = True return self","title":"include_exit()"},{"location":"api/security/#omicron.models.security.Query.name_like","text":"\u67e5\u627e\u80a1\u7968/\u8bc1\u5238\u540d\u79f0\u4e2d\u51fa\u73b0 name \u7684\u54c1\u79cd \u6ce8\u610f\u8fd9\u91cc\u7684\u8bc1\u5238\u540d\u79f0\u5e76\u4e0d\u662f\u5176\u663e\u793a\u540d\u3002\u6bd4\u5982\u5bf9\u4e2d\u56fd\u5e73\u5b89000001.XSHE\u6765\u8bf4\uff0c\u5b83\u7684\u540d\u79f0\u662fZGPA\uff0c\u800c\u4e0d\u662f\u201c\u4e2d\u56fd\u5e73\u5b89\u201d\u3002 Parameters: Name Type Description Default name str \u5f85\u67e5\u627e\u7684\u540d\u5b57\uff0c\u6bd4\u5982\"ZGPA\" required Source code in omicron/models/security.py def name_like ( self , name : str ) -> \"Query\" : \"\"\"\u67e5\u627e\u80a1\u7968/\u8bc1\u5238\u540d\u79f0\u4e2d\u51fa\u73b0`name`\u7684\u54c1\u79cd \u6ce8\u610f\u8fd9\u91cc\u7684\u8bc1\u5238\u540d\u79f0\u5e76\u4e0d\u662f\u5176\u663e\u793a\u540d\u3002\u6bd4\u5982\u5bf9\u4e2d\u56fd\u5e73\u5b89000001.XSHE\u6765\u8bf4\uff0c\u5b83\u7684\u540d\u79f0\u662fZGPA\uff0c\u800c\u4e0d\u662f\u201c\u4e2d\u56fd\u5e73\u5b89\u201d\u3002 Args: name: \u5f85\u67e5\u627e\u7684\u540d\u5b57\uff0c\u6bd4\u5982\"ZGPA\" \"\"\" if name is None or len ( name ) == 0 : self . _name_pattern = None else : self . _name_pattern = name return self","title":"name_like()"},{"location":"api/security/#omicron.models.security.Query.only_cyb","text":"\u8fd4\u56de\u7ed3\u679c\u4e2d\u53ea\u5305\u542b\u521b\u4e1a\u677f\u80a1\u7968 Source code in omicron/models/security.py def only_cyb ( self ) -> \"Query\" : \"\"\"\u8fd4\u56de\u7ed3\u679c\u4e2d\u53ea\u5305\u542b\u521b\u4e1a\u677f\u80a1\u7968\"\"\" self . _only_cyb = True # \u9ad8\u4f18\u5148\u7ea7 self . _exclude_cyb = False self . _only_kcb = False self . _only_st = False return self","title":"only_cyb()"},{"location":"api/security/#omicron.models.security.Query.only_kcb","text":"\u8fd4\u56de\u7ed3\u679c\u4e2d\u53ea\u5305\u542b\u79d1\u521b\u677f\u80a1\u7968 Source code in omicron/models/security.py def only_kcb ( self ) -> \"Query\" : \"\"\"\u8fd4\u56de\u7ed3\u679c\u4e2d\u53ea\u5305\u542b\u79d1\u521b\u677f\u80a1\u7968\"\"\" self . _only_kcb = True # \u9ad8\u4f18\u5148\u7ea7 self . _exclude_kcb = False self . _only_cyb = False self . _only_st = False return self","title":"only_kcb()"},{"location":"api/security/#omicron.models.security.Query.only_st","text":"\u8fd4\u56de\u7ed3\u679c\u4e2d\u53ea\u5305\u542bST\u7c7b\u578b\u7684\u8bc1\u5238 Source code in omicron/models/security.py def only_st ( self ) -> \"Query\" : \"\"\"\u8fd4\u56de\u7ed3\u679c\u4e2d\u53ea\u5305\u542bST\u7c7b\u578b\u7684\u8bc1\u5238\"\"\" self . _only_st = True # \u9ad8\u4f18\u5148\u7ea7 self . _exclude_st = False self . _only_kcb = False self . _only_cyb = False return self","title":"only_st()"},{"location":"api/security/#omicron.models.security.Query.types","text":"\u9009\u62e9\u7c7b\u578b\u5728 types \u4e2d\u7684\u8bc1\u5238\u54c1\u79cd \u5982\u679c\u4e0d\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u9ed8\u8ba4\u9009\u62e9\u6240\u6709\u80a1\u7968\u7c7b\u578b\u3002 \u5982\u679c\u8c03\u7528\u6b64\u65b9\u6cd5\u4f46\u4e0d\u4f20\u5165\u53c2\u6570\uff0c\u9ed8\u8ba4\u9009\u62e9\u6307\u6570+\u80a1\u7968 Parameters: Name Type Description Default types List[str] \u6709\u6548\u7684\u7c7b\u578b\u5305\u62ec: \u5bf9\u80a1\u7968\u6307\u6570\u800c\u8a00\u662f\uff08'index', 'stock'\uff09\uff0c\u5bf9\u57fa\u91d1\u800c\u8a00\u5219\u662f\uff08'etf', 'fjb', 'mmf', 'reits', 'fja', 'fjm', 'lof'\uff09 required Source code in omicron/models/security.py def types ( self , types : List [ str ]) -> \"Query\" : \"\"\"\u9009\u62e9\u7c7b\u578b\u5728`types`\u4e2d\u7684\u8bc1\u5238\u54c1\u79cd \u5982\u679c\u4e0d\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u9ed8\u8ba4\u9009\u62e9\u6240\u6709\u80a1\u7968\u7c7b\u578b\u3002 \u5982\u679c\u8c03\u7528\u6b64\u65b9\u6cd5\u4f46\u4e0d\u4f20\u5165\u53c2\u6570\uff0c\u9ed8\u8ba4\u9009\u62e9\u6307\u6570+\u80a1\u7968 Args: types: \u6709\u6548\u7684\u7c7b\u578b\u5305\u62ec: \u5bf9\u80a1\u7968\u6307\u6570\u800c\u8a00\u662f\uff08'index', 'stock'\uff09\uff0c\u5bf9\u57fa\u91d1\u800c\u8a00\u5219\u662f\uff08'etf', 'fjb', 'mmf', 'reits', 'fja', 'fjm', 'lof'\uff09 \"\"\" if types is None or isinstance ( types , List ) is False : return self if len ( types ) == 0 : self . _type_pattern = [ \"index\" , \"stock\" ] else : tmp = set ( types ) self . _type_pattern = list ( tmp ) return self","title":"types()"},{"location":"api/security/#omicron.models.security.Security","text":"Source code in omicron/models/security.py class Security : _securities = [] _securities_date = None _security_types = set () _stocks = [] @classmethod async def init ( cls ): \"\"\"\u521d\u59cb\u5316Security. \u4e00\u822c\u800c\u8a00\uff0comicron\u7684\u4f7f\u7528\u8005\u65e0\u987b\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5b83\u4f1a\u5728omicron\u521d\u59cb\u5316\uff08\u901a\u8fc7`omicron.init`\uff09\u65f6\uff0c\u88ab\u81ea\u52a8\u8c03\u7528\u3002 Raises: DataNotReadyError: \u5982\u679comicron\u672a\u521d\u59cb\u5316\uff0c\u6216\u8005cache\u4e2d\u672a\u52a0\u8f7d\u6700\u65b0\u8bc1\u5238\u5217\u8868\uff0c\u5219\u629b\u51fa\u6b64\u5f02\u5e38\u3002 \"\"\" # read all securities from redis, 7111 records now # {'index', 'stock'} # {'fjb', 'mmf', 'reits', 'fja', 'fjm'} # {'etf', 'lof'} if len ( cls . _securities ) > 100 : return True secs = await cls . load_securities () if secs is None or len ( secs ) == 0 : # pragma: no cover raise DataNotReadyError ( \"No securities in cache, make sure you have called omicron.init() first.\" ) print ( \"init securities done\" ) return True @classmethod async def load_securities ( cls ): \"\"\"\u52a0\u8f7d\u6240\u6709\u8bc1\u5238\u7684\u4fe1\u606f\uff0c\u5e76\u7f13\u5b58\u5230\u5185\u5b58\u4e2d \u4e00\u822c\u800c\u8a00\uff0comicron\u7684\u4f7f\u7528\u8005\u65e0\u987b\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5b83\u4f1a\u5728omicron\u521d\u59cb\u5316\uff08\u901a\u8fc7`omicron.init`\uff09\u65f6\uff0c\u88ab\u81ea\u52a8\u8c03\u7528\u3002 \"\"\" secs = await cache . security . lrange ( \"security:all\" , 0 , - 1 ) if len ( secs ) != 0 : # using np.datetime64[s] _securities = np . array ( [ tuple ( x . split ( \",\" )) for x in secs ], dtype = security_info_dtype ) # \u66f4\u65b0\u8bc1\u5238\u7c7b\u578b\u5217\u8868 cls . _securities = _securities cls . _security_types = set ( _securities [ \"type\" ]) cls . _stocks = _securities [ ( _securities [ \"type\" ] == \"stock\" ) | ( _securities [ \"type\" ] == \"index\" ) ] logger . info ( \" %d securities loaded, types: %s \" , len ( _securities ), cls . _security_types ) date_in_cache = await cache . security . get ( \"security:latest_date\" ) if date_in_cache is not None : cls . _securities_date = arrow . get ( date_in_cache ) . date () else : cls . _securities_date = datetime . date . today () return _securities else : # pragma: no cover return None @classmethod async def get_security_types ( cls ): if cls . _security_types : return list ( cls . _security_types ) else : return None @classmethod def get_stock ( cls , code ) -> NDArray [ security_info_dtype ]: \"\"\"\u6839\u636e`code`\u6765\u67e5\u627e\u5bf9\u5e94\u7684\u80a1\u7968\uff08\u542b\u6307\u6570\uff09\u5bf9\u8c61\u4fe1\u606f\u3002 \u5982\u679c\u60a8\u53ea\u6709\u80a1\u7968\u4ee3\u7801\uff0c\u60f3\u77e5\u9053\u8be5\u4ee3\u7801\u5bf9\u5e94\u7684\u80a1\u7968\u540d\u79f0\u3001\u522b\u540d\uff08\u663e\u793a\u540d\uff09\u3001\u4e0a\u5e02\u65e5\u671f\u7b49\u4fe1\u606f\uff0c\u5c31\u53ef\u4ee5\u4f7f\u7528\u6b64\u65b9\u6cd5\u6765\u83b7\u53d6\u76f8\u5173\u4fe1\u606f\u3002 \u8fd4\u56de\u7c7b\u578b\u4e3a`security_info_dtype`\u7684numpy\u6570\u7ec4\uff0c\u4f46\u4ec5\u5305\u542b\u4e00\u4e2a\u5143\u7d20\u3002\u60a8\u53ef\u4ee5\u8c61\u5b57\u5178\u4e00\u6837\u5b58\u53d6\u5b83\uff0c\u6bd4\u5982 ```python item = Security.get_stock(\"000001.XSHE\") print(item[\"alias\"]) ``` \u663e\u793a\u4e3a\"\u5e73\u5b89\u94f6\u884c\" Args: code: \u5f85\u67e5\u8be2\u7684\u80a1\u7968/\u6307\u6570\u4ee3\u7801 Returns: \u7c7b\u578b\u4e3a`security_info_dtype`\u7684numpy\u6570\u7ec4\uff0c\u4f46\u4ec5\u5305\u542b\u4e00\u4e2a\u5143\u7d20 \"\"\" if len ( cls . _securities ) == 0 : return None tmp = cls . _securities [ cls . _securities [ \"code\" ] == code ] if len ( tmp ) > 0 : if tmp [ \"type\" ] in [ \"stock\" , \"index\" ]: return tmp [ 0 ] return None @classmethod def fuzzy_match_ex ( cls , query : str ) -> Dict [ str , Tuple ]: # fixme: \u6b64\u65b9\u6cd5\u4e0eStock.fuzzy_match\u91cd\u590d\uff0c\u5e76\u4e14\u8fdb\u884c\u4e86\u7c7b\u578b\u9650\u5236\uff0c\u4f7f\u5f97\u5176\u4e0d\u9002\u5408\u653e\u5728Security\u91cc\uff0c\u4ee5\u53ca\u4f5c\u4e3a\u4e00\u4e2a\u901a\u7528\u65b9\u6cd5 query = query . upper () if re . match ( r \"\\d+\" , query ): return { sec [ \"code\" ]: sec . tolist () for sec in cls . _securities if sec [ \"code\" ] . find ( query ) != - 1 and sec [ \"type\" ] == \"stock\" } elif re . match ( r \"[A-Z]+\" , query ): return { sec [ \"code\" ]: sec . tolist () for sec in cls . _securities if sec [ \"name\" ] . startswith ( query ) and sec [ \"type\" ] == \"stock\" } else : return { sec [ \"code\" ]: sec . tolist () for sec in cls . _securities if sec [ \"alias\" ] . find ( query ) != - 1 and sec [ \"type\" ] == \"stock\" } @classmethod async def info ( cls , code , date = None ): _obj = await cls . query_security_via_date ( code , date ) if _obj is None : return None # \"_time\", \"code\", \"type\", \"alias\", \"end\", \"ipo\", \"name\" d1 = convert_nptime_to_datetime ( _obj [ \"ipo\" ]) . date () d2 = convert_nptime_to_datetime ( _obj [ \"end\" ]) . date () return { \"type\" : _obj [ \"type\" ], \"display_name\" : _obj [ \"alias\" ], \"alias\" : _obj [ \"alias\" ], \"end\" : d2 , \"start\" : d1 , \"name\" : _obj [ \"name\" ], } @classmethod async def name ( cls , code , date = None ): _security = await cls . query_security_via_date ( code , date ) if _security is None : return None return _security [ \"name\" ] @classmethod async def alias ( cls , code , date = None ): return await cls . display_name ( code , date ) @classmethod async def display_name ( cls , code , date = None ): _security = await cls . query_security_via_date ( code , date ) if _security is None : return None return _security [ \"alias\" ] @classmethod async def start_date ( cls , code , date = None ): _security = await cls . query_security_via_date ( code , date ) if _security is None : return None return convert_nptime_to_datetime ( _security [ \"ipo\" ]) . date () @classmethod async def end_date ( cls , code , date = None ): _security = await cls . query_security_via_date ( code , date ) if _security is None : return None return convert_nptime_to_datetime ( _security [ \"end\" ]) . date () @classmethod async def security_type ( cls , code , date = None ) -> SecurityType : _security = await cls . query_security_via_date ( code , date ) if _security is None : return None return _security [ \"type\" ] @classmethod async def query_security_via_date ( cls , code : str , date : datetime . date = None ): if date is None : # \u4ece\u5185\u5b58\u4e2d\u67e5\u627e\uff0c\u5982\u679c\u7f13\u5b58\u4e2d\u7684\u6570\u636e\u5df2\u66f4\u65b0\uff0c\u91cd\u65b0\u52a0\u8f7d\u5230\u5185\u5b58 date_in_cache = await cache . security . get ( \"security:latest_date\" ) if date_in_cache is not None : date = arrow . get ( date_in_cache ) . date () if date > cls . _securities_date : await cls . load_securities () results = cls . _securities [ cls . _securities [ \"code\" ] == code ] else : # \u4eceinfluxdb\u67e5\u627e date = tf . day_shift ( date , 0 ) results = await cls . load_securities_from_db ( date , code ) if results is not None and len ( results ) > 0 : return results [ 0 ] else : return None @classmethod def select ( cls , date : datetime . date = None ) -> Query : if date is None : return Query ( target_date = None ) else : return Query ( target_date = date ) @classmethod async def update_secs_cache ( cls , dt : datetime . date , securities : List [ Tuple ]): \"\"\"\u66f4\u65b0\u8bc1\u5238\u5217\u8868\u5230\u7f13\u5b58\u6570\u636e\u5e93\u4e2d Args: dt: \u8bc1\u5238\u5217\u8868\u5f52\u5c5e\u7684\u65e5\u671f securities: \u8bc1\u5238\u5217\u8868, \u5143\u7d20\u4e3a\u5143\u7ec4\uff0c\u5206\u522b\u4e3a\u4ee3\u7801\u3001\u522b\u540d\u3001\u540d\u79f0\u3001IPO\u65e5\u671f\u3001\u9000\u5e02\u65e5\u548c\u8bc1\u5238\u7c7b\u578b \"\"\" # stock: {'index', 'stock'} # funds: {'fjb', 'mmf', 'reits', 'fja', 'fjm'} # {'etf', 'lof'} key = \"security:all\" pipeline = cache . security . pipeline () pipeline . delete ( key ) for code , alias , name , start , end , _type in securities : pipeline . rpush ( key , f \" { code } , { alias } , { name } , { start } ,\" f \" { end } , { _type } \" ) await pipeline . execute () logger . info ( \"all securities saved to cache %s , %d secs\" , key , len ( securities )) # update latest date info await cache . security . set ( \"security:latest_date\" , dt . strftime ( \"%Y-%m- %d \" )) @classmethod async def save_securities ( cls , securities : List [ str ], dt : datetime . date ): \"\"\"\u4fdd\u5b58\u6307\u5b9a\u7684\u8bc1\u5238\u4fe1\u606f\u5230\u7f13\u5b58\u4e2d\uff0c\u5e76\u4e14\u5b58\u5165influxdb\uff0c\u5b9a\u65f6job\u8c03\u7528\u672c\u63a5\u53e3 Args: securities: \u8bc1\u5238\u4ee3\u7801\u5217\u8868\u3002 \"\"\" # stock: {'index', 'stock'} # funds: {'fjb', 'mmf', 'reits', 'fja', 'fjm'} # {'etf', 'lof'} if dt is None or len ( securities ) == 0 : return measurement = \"security_list\" client = get_influx_client () # code, alias, name, start, end, type security_list = np . array ( [ ( dt , x [ 0 ], f \" { x [ 0 ] } , { x [ 1 ] } , { x [ 2 ] } , { x [ 3 ] } , { x [ 4 ] } , { x [ 5 ] } \" ) for x in securities ], dtype = security_db_dtype , ) await client . save ( security_list , measurement , time_key = \"frame\" , tag_keys = [ \"code\" ] ) @classmethod async def load_securities_from_db ( cls , target_date : datetime . date , code : str = None ): if target_date is None : return None client = get_influx_client () measurement = \"security_list\" flux = ( Flux () . measurement ( measurement ) . range ( target_date , target_date ) . bucket ( client . _bucket ) . fields ([ \"info\" ]) ) if code is not None and len ( code ) > 0 : flux . tags ({ \"code\" : code }) data = await client . query ( flux ) if len ( data ) == 2 : # \\r\\n return None ds = DataframeDeserializer ( sort_values = \"_time\" , usecols = [ \"_time\" , \"code\" , \"info\" ], time_col = \"_time\" , engine = \"c\" , ) actual = ds ( data ) secs = actual . to_records ( index = False ) if len ( secs ) != 0 : # \"_time\", \"code\", \"code, alias, name, start, end, type\" _securities = np . array ( [ tuple ( x [ \"info\" ] . split ( \",\" )) for x in secs ], dtype = security_info_dtype ) return _securities else : return None @classmethod async def get_datescope_from_db ( cls ): # fixme: \u51fd\u6570\u540d\u65e0\u6cd5\u53cd\u6620\u7528\u9014\uff0c\u9700\u8981\u589e\u52a0\u6587\u6863\u6ce8\u91ca\uff0c\u8bf4\u660e\u8be5\u51fd\u6570\u7684\u4f5c\u7528,\u6216\u8005\u4e0d\u5e94\u8be5\u51fa\u73b0\u5728\u6b64\u7c7b\u4e2d\uff1f client = get_influx_client () measurement = \"security_list\" date1 = arrow . get ( \"2005-01-01\" ) . date () date2 = arrow . now () . naive . date () flux = ( Flux () . measurement ( measurement ) . range ( date1 , date2 ) . bucket ( client . _bucket ) . tags ({ \"code\" : \"000001.XSHE\" }) ) data = await client . query ( flux ) if len ( data ) == 2 : # \\r\\n return None , None ds = DataframeDeserializer ( sort_values = \"_time\" , usecols = [ \"_time\" ], time_col = \"_time\" , engine = \"c\" ) actual = ds ( data ) secs = actual . to_records ( index = False ) if len ( secs ) != 0 : d1 = convert_nptime_to_datetime ( secs [ 0 ][ \"_time\" ]) d2 = convert_nptime_to_datetime ( secs [ len ( secs ) - 1 ][ \"_time\" ]) return d1 . date (), d2 . date () else : return None , None @classmethod async def _notify_special_bonusnote ( cls , code , note , cancel_date ): # fixme: \u8fd9\u4e2a\u51fd\u6570\u5e94\u8be5\u51fa\u73b0\u5728omega\u4e2d\uff1f default_cancel_date = datetime . date ( 2099 , 1 , 1 ) # \u9ed8\u8ba4\u65e0\u53d6\u6d88\u516c\u544a # report this special event to notify user if cancel_date != default_cancel_date : ding ( \"security %s , bonus_cancel_pub_date %s \" % ( code , cancel_date )) if note . find ( \"\u6d41\u901a\" ) != - 1 : # \u68c0\u67e5\u662f\u5426\u6709\u201c\u6d41\u901a\u80a1\u201d\u6587\u5b57 ding ( \"security %s , special xrxd note: %s \" % ( code , note )) @classmethod async def save_xrxd_reports ( cls , reports : List [ str ], dt : datetime . date ): # fixme: \u6b64\u51fd\u6570\u5e94\u8be5\u5c5e\u4e8eomega? \"\"\"\u4fdd\u5b581\u5e74\u5185\u7684\u5206\u7ea2\u9001\u80a1\u4fe1\u606f\uff0c\u5e76\u4e14\u5b58\u5165influxdb\uff0c\u5b9a\u65f6job\u8c03\u7528\u672c\u63a5\u53e3 Args: reports: \u5206\u7ea2\u9001\u80a1\u516c\u544a \"\"\" # code(0), a_xr_date, board_plan_bonusnote, bonus_ratio_rmb(3), dividend_ratio, transfer_ratio(5), # at_bonus_ratio_rmb(6), report_date, plan_progress, implementation_bonusnote, bonus_cancel_pub_date(10) if len ( reports ) == 0 or dt is None : return # read reports from db and convert to dict map reports_in_db = {} dt_start = dt - datetime . timedelta ( days = 366 ) # \u5f80\u524d\u56de\u6eaf366\u5929 dt_end = dt + datetime . timedelta ( days = 366 ) # \u5f80\u540e\u5ef6\u957f366\u5929 existing_records = await cls . _load_xrxd_from_db ( None , dt_start , dt_end ) for record in existing_records : code = record [ 0 ] if code not in reports_in_db : reports_in_db [ code ] = [ record ] else : reports_in_db [ code ] . append ( record ) records = [] # \u51c6\u5907\u5199\u5165db for x in reports : code = x [ 0 ] note = x [ 2 ] cancel_date = x [ 10 ] existing_items = reports_in_db . get ( code , None ) if existing_items is None : # \u65b0\u8bb0\u5f55 record = ( x [ 1 ], x [ 0 ], f \" { x [ 0 ] } | { x [ 1 ] } | { x [ 2 ] } | { x [ 3 ] } | { x [ 4 ] } | { x [ 5 ] } | { x [ 6 ] } | { x [ 7 ] } | { x [ 8 ] } | { x [ 9 ] } | { x [ 10 ] } \" , ) records . append ( record ) await cls . _notify_special_bonusnote ( code , note , cancel_date ) else : new_record = True for item in existing_items : existing_date = convert_nptime_to_datetime ( item [ 1 ]) . date () if existing_date == x [ 1 ]: # \u5982\u679cxr_date\u76f8\u540c\uff0c\u4e0d\u66f4\u65b0 new_record = False continue if new_record : record = ( x [ 1 ], x [ 0 ], f \" { x [ 0 ] } | { x [ 1 ] } | { x [ 2 ] } | { x [ 3 ] } | { x [ 4 ] } | { x [ 5 ] } | { x [ 6 ] } | { x [ 7 ] } | { x [ 8 ] } | { x [ 9 ] } | { x [ 10 ] } \" , ) records . append ( record ) await cls . _notify_special_bonusnote ( code , note , cancel_date ) logger . info ( \"save_xrxd_reports, %d records to be saved\" , len ( records )) if len ( records ) == 0 : return measurement = \"security_xrxd_reports\" client = get_influx_client () # a_xr_date(_time), code(tag), info report_list = np . array ( records , dtype = security_db_dtype ) await client . save ( report_list , measurement , time_key = \"frame\" , tag_keys = [ \"code\" ]) @classmethod async def _load_xrxd_from_db ( cls , code , dt_start : datetime . date , dt_end : datetime . date ): if dt_start is None or dt_end is None : return [] client = get_influx_client () measurement = \"security_xrxd_reports\" flux = ( Flux () . measurement ( measurement ) . range ( dt_start , dt_end ) . bucket ( client . _bucket ) . fields ([ \"info\" ]) ) if code is not None and len ( code ) > 0 : flux . tags ({ \"code\" : code }) data = await client . query ( flux ) if len ( data ) == 2 : # \\r\\n return [] ds = DataframeDeserializer ( sort_values = \"_time\" , usecols = [ \"_time\" , \"code\" , \"info\" ], time_col = \"_time\" , engine = \"c\" , ) actual = ds ( data ) secs = actual . to_records ( index = False ) if len ( secs ) != 0 : _reports = np . array ( [ tuple ( x [ \"info\" ] . split ( \"|\" )) for x in secs ], dtype = xrxd_info_dtype ) return _reports else : return [] @classmethod async def get_xrxd_info ( cls , dt : datetime . date , code : str = None ): if dt is None : return None # code(0), a_xr_date, board_plan_bonusnote, bonus_ratio_rmb(3), dividend_ratio, transfer_ratio(5), # at_bonus_ratio_rmb(6), report_date, plan_progress, implementation_bonusnote, bonus_cancel_pub_date(10) reports = await cls . _load_xrxd_from_db ( code , dt , dt ) if len ( reports ) == 0 : return None readable_reports = [] for report in reports : xr_date = convert_nptime_to_datetime ( report [ 1 ]) . date () readable_reports . append ( { \"code\" : report [ 0 ], \"xr_date\" : xr_date , \"bonus\" : report [ 3 ], \"dividend\" : report [ 4 ], \"transfer\" : report [ 5 ], \"bonusnote\" : report [ 2 ], } ) return readable_reports","title":"Security"},{"location":"api/security/#omicron.models.security.Security.get_stock","text":"\u6839\u636e code \u6765\u67e5\u627e\u5bf9\u5e94\u7684\u80a1\u7968\uff08\u542b\u6307\u6570\uff09\u5bf9\u8c61\u4fe1\u606f\u3002 \u5982\u679c\u60a8\u53ea\u6709\u80a1\u7968\u4ee3\u7801\uff0c\u60f3\u77e5\u9053\u8be5\u4ee3\u7801\u5bf9\u5e94\u7684\u80a1\u7968\u540d\u79f0\u3001\u522b\u540d\uff08\u663e\u793a\u540d\uff09\u3001\u4e0a\u5e02\u65e5\u671f\u7b49\u4fe1\u606f\uff0c\u5c31\u53ef\u4ee5\u4f7f\u7528\u6b64\u65b9\u6cd5\u6765\u83b7\u53d6\u76f8\u5173\u4fe1\u606f\u3002 \u8fd4\u56de\u7c7b\u578b\u4e3a security_info_dtype \u7684numpy\u6570\u7ec4\uff0c\u4f46\u4ec5\u5305\u542b\u4e00\u4e2a\u5143\u7d20\u3002\u60a8\u53ef\u4ee5\u8c61\u5b57\u5178\u4e00\u6837\u5b58\u53d6\u5b83\uff0c\u6bd4\u5982 1 2 item = Security . get_stock ( \"000001.XSHE\" ) print ( item [ \"alias\" ]) \u663e\u793a\u4e3a\"\u5e73\u5b89\u94f6\u884c\" Parameters: Name Type Description Default code \u5f85\u67e5\u8be2\u7684\u80a1\u7968/\u6307\u6570\u4ee3\u7801 required Returns: Type Description numpy.ndarray[Any, numpy.dtype[[('code', 'O'), ('alias', 'O'), ('name', 'O'), ('ipo', 'datetime64[s]'), ('end', 'datetime64[s]'), ('type', 'O')]]] \u7c7b\u578b\u4e3a security_info_dtype \u7684numpy\u6570\u7ec4\uff0c\u4f46\u4ec5\u5305\u542b\u4e00\u4e2a\u5143\u7d20 Source code in omicron/models/security.py @classmethod def get_stock ( cls , code ) -> NDArray [ security_info_dtype ]: \"\"\"\u6839\u636e`code`\u6765\u67e5\u627e\u5bf9\u5e94\u7684\u80a1\u7968\uff08\u542b\u6307\u6570\uff09\u5bf9\u8c61\u4fe1\u606f\u3002 \u5982\u679c\u60a8\u53ea\u6709\u80a1\u7968\u4ee3\u7801\uff0c\u60f3\u77e5\u9053\u8be5\u4ee3\u7801\u5bf9\u5e94\u7684\u80a1\u7968\u540d\u79f0\u3001\u522b\u540d\uff08\u663e\u793a\u540d\uff09\u3001\u4e0a\u5e02\u65e5\u671f\u7b49\u4fe1\u606f\uff0c\u5c31\u53ef\u4ee5\u4f7f\u7528\u6b64\u65b9\u6cd5\u6765\u83b7\u53d6\u76f8\u5173\u4fe1\u606f\u3002 \u8fd4\u56de\u7c7b\u578b\u4e3a`security_info_dtype`\u7684numpy\u6570\u7ec4\uff0c\u4f46\u4ec5\u5305\u542b\u4e00\u4e2a\u5143\u7d20\u3002\u60a8\u53ef\u4ee5\u8c61\u5b57\u5178\u4e00\u6837\u5b58\u53d6\u5b83\uff0c\u6bd4\u5982 ```python item = Security.get_stock(\"000001.XSHE\") print(item[\"alias\"]) ``` \u663e\u793a\u4e3a\"\u5e73\u5b89\u94f6\u884c\" Args: code: \u5f85\u67e5\u8be2\u7684\u80a1\u7968/\u6307\u6570\u4ee3\u7801 Returns: \u7c7b\u578b\u4e3a`security_info_dtype`\u7684numpy\u6570\u7ec4\uff0c\u4f46\u4ec5\u5305\u542b\u4e00\u4e2a\u5143\u7d20 \"\"\" if len ( cls . _securities ) == 0 : return None tmp = cls . _securities [ cls . _securities [ \"code\" ] == code ] if len ( tmp ) > 0 : if tmp [ \"type\" ] in [ \"stock\" , \"index\" ]: return tmp [ 0 ] return None","title":"get_stock()"},{"location":"api/security/#omicron.models.security.Security.init","text":"\u521d\u59cb\u5316Security. \u4e00\u822c\u800c\u8a00\uff0comicron\u7684\u4f7f\u7528\u8005\u65e0\u987b\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5b83\u4f1a\u5728omicron\u521d\u59cb\u5316\uff08\u901a\u8fc7 omicron.init \uff09\u65f6\uff0c\u88ab\u81ea\u52a8\u8c03\u7528\u3002 Exceptions: Type Description DataNotReadyError \u5982\u679comicron\u672a\u521d\u59cb\u5316\uff0c\u6216\u8005cache\u4e2d\u672a\u52a0\u8f7d\u6700\u65b0\u8bc1\u5238\u5217\u8868\uff0c\u5219\u629b\u51fa\u6b64\u5f02\u5e38\u3002 Source code in omicron/models/security.py @classmethod async def init ( cls ): \"\"\"\u521d\u59cb\u5316Security. \u4e00\u822c\u800c\u8a00\uff0comicron\u7684\u4f7f\u7528\u8005\u65e0\u987b\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5b83\u4f1a\u5728omicron\u521d\u59cb\u5316\uff08\u901a\u8fc7`omicron.init`\uff09\u65f6\uff0c\u88ab\u81ea\u52a8\u8c03\u7528\u3002 Raises: DataNotReadyError: \u5982\u679comicron\u672a\u521d\u59cb\u5316\uff0c\u6216\u8005cache\u4e2d\u672a\u52a0\u8f7d\u6700\u65b0\u8bc1\u5238\u5217\u8868\uff0c\u5219\u629b\u51fa\u6b64\u5f02\u5e38\u3002 \"\"\" # read all securities from redis, 7111 records now # {'index', 'stock'} # {'fjb', 'mmf', 'reits', 'fja', 'fjm'} # {'etf', 'lof'} if len ( cls . _securities ) > 100 : return True secs = await cls . load_securities () if secs is None or len ( secs ) == 0 : # pragma: no cover raise DataNotReadyError ( \"No securities in cache, make sure you have called omicron.init() first.\" ) print ( \"init securities done\" ) return True","title":"init()"},{"location":"api/security/#omicron.models.security.Security.load_securities","text":"\u52a0\u8f7d\u6240\u6709\u8bc1\u5238\u7684\u4fe1\u606f\uff0c\u5e76\u7f13\u5b58\u5230\u5185\u5b58\u4e2d \u4e00\u822c\u800c\u8a00\uff0comicron\u7684\u4f7f\u7528\u8005\u65e0\u987b\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5b83\u4f1a\u5728omicron\u521d\u59cb\u5316\uff08\u901a\u8fc7 omicron.init \uff09\u65f6\uff0c\u88ab\u81ea\u52a8\u8c03\u7528\u3002 Source code in omicron/models/security.py @classmethod async def load_securities ( cls ): \"\"\"\u52a0\u8f7d\u6240\u6709\u8bc1\u5238\u7684\u4fe1\u606f\uff0c\u5e76\u7f13\u5b58\u5230\u5185\u5b58\u4e2d \u4e00\u822c\u800c\u8a00\uff0comicron\u7684\u4f7f\u7528\u8005\u65e0\u987b\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u5b83\u4f1a\u5728omicron\u521d\u59cb\u5316\uff08\u901a\u8fc7`omicron.init`\uff09\u65f6\uff0c\u88ab\u81ea\u52a8\u8c03\u7528\u3002 \"\"\" secs = await cache . security . lrange ( \"security:all\" , 0 , - 1 ) if len ( secs ) != 0 : # using np.datetime64[s] _securities = np . array ( [ tuple ( x . split ( \",\" )) for x in secs ], dtype = security_info_dtype ) # \u66f4\u65b0\u8bc1\u5238\u7c7b\u578b\u5217\u8868 cls . _securities = _securities cls . _security_types = set ( _securities [ \"type\" ]) cls . _stocks = _securities [ ( _securities [ \"type\" ] == \"stock\" ) | ( _securities [ \"type\" ] == \"index\" ) ] logger . info ( \" %d securities loaded, types: %s \" , len ( _securities ), cls . _security_types ) date_in_cache = await cache . security . get ( \"security:latest_date\" ) if date_in_cache is not None : cls . _securities_date = arrow . get ( date_in_cache ) . date () else : cls . _securities_date = datetime . date . today () return _securities else : # pragma: no cover return None","title":"load_securities()"},{"location":"api/security/#omicron.models.security.Security.save_securities","text":"\u4fdd\u5b58\u6307\u5b9a\u7684\u8bc1\u5238\u4fe1\u606f\u5230\u7f13\u5b58\u4e2d\uff0c\u5e76\u4e14\u5b58\u5165influxdb\uff0c\u5b9a\u65f6job\u8c03\u7528\u672c\u63a5\u53e3 Parameters: Name Type Description Default securities List[str] \u8bc1\u5238\u4ee3\u7801\u5217\u8868\u3002 required Source code in omicron/models/security.py @classmethod async def save_securities ( cls , securities : List [ str ], dt : datetime . date ): \"\"\"\u4fdd\u5b58\u6307\u5b9a\u7684\u8bc1\u5238\u4fe1\u606f\u5230\u7f13\u5b58\u4e2d\uff0c\u5e76\u4e14\u5b58\u5165influxdb\uff0c\u5b9a\u65f6job\u8c03\u7528\u672c\u63a5\u53e3 Args: securities: \u8bc1\u5238\u4ee3\u7801\u5217\u8868\u3002 \"\"\" # stock: {'index', 'stock'} # funds: {'fjb', 'mmf', 'reits', 'fja', 'fjm'} # {'etf', 'lof'} if dt is None or len ( securities ) == 0 : return measurement = \"security_list\" client = get_influx_client () # code, alias, name, start, end, type security_list = np . array ( [ ( dt , x [ 0 ], f \" { x [ 0 ] } , { x [ 1 ] } , { x [ 2 ] } , { x [ 3 ] } , { x [ 4 ] } , { x [ 5 ] } \" ) for x in securities ], dtype = security_db_dtype , ) await client . save ( security_list , measurement , time_key = \"frame\" , tag_keys = [ \"code\" ] )","title":"save_securities()"},{"location":"api/security/#omicron.models.security.Security.save_xrxd_reports","text":"\u4fdd\u5b581\u5e74\u5185\u7684\u5206\u7ea2\u9001\u80a1\u4fe1\u606f\uff0c\u5e76\u4e14\u5b58\u5165influxdb\uff0c\u5b9a\u65f6job\u8c03\u7528\u672c\u63a5\u53e3 Parameters: Name Type Description Default reports List[str] \u5206\u7ea2\u9001\u80a1\u516c\u544a required Source code in omicron/models/security.py @classmethod async def save_xrxd_reports ( cls , reports : List [ str ], dt : datetime . date ): # fixme: \u6b64\u51fd\u6570\u5e94\u8be5\u5c5e\u4e8eomega? \"\"\"\u4fdd\u5b581\u5e74\u5185\u7684\u5206\u7ea2\u9001\u80a1\u4fe1\u606f\uff0c\u5e76\u4e14\u5b58\u5165influxdb\uff0c\u5b9a\u65f6job\u8c03\u7528\u672c\u63a5\u53e3 Args: reports: \u5206\u7ea2\u9001\u80a1\u516c\u544a \"\"\" # code(0), a_xr_date, board_plan_bonusnote, bonus_ratio_rmb(3), dividend_ratio, transfer_ratio(5), # at_bonus_ratio_rmb(6), report_date, plan_progress, implementation_bonusnote, bonus_cancel_pub_date(10) if len ( reports ) == 0 or dt is None : return # read reports from db and convert to dict map reports_in_db = {} dt_start = dt - datetime . timedelta ( days = 366 ) # \u5f80\u524d\u56de\u6eaf366\u5929 dt_end = dt + datetime . timedelta ( days = 366 ) # \u5f80\u540e\u5ef6\u957f366\u5929 existing_records = await cls . _load_xrxd_from_db ( None , dt_start , dt_end ) for record in existing_records : code = record [ 0 ] if code not in reports_in_db : reports_in_db [ code ] = [ record ] else : reports_in_db [ code ] . append ( record ) records = [] # \u51c6\u5907\u5199\u5165db for x in reports : code = x [ 0 ] note = x [ 2 ] cancel_date = x [ 10 ] existing_items = reports_in_db . get ( code , None ) if existing_items is None : # \u65b0\u8bb0\u5f55 record = ( x [ 1 ], x [ 0 ], f \" { x [ 0 ] } | { x [ 1 ] } | { x [ 2 ] } | { x [ 3 ] } | { x [ 4 ] } | { x [ 5 ] } | { x [ 6 ] } | { x [ 7 ] } | { x [ 8 ] } | { x [ 9 ] } | { x [ 10 ] } \" , ) records . append ( record ) await cls . _notify_special_bonusnote ( code , note , cancel_date ) else : new_record = True for item in existing_items : existing_date = convert_nptime_to_datetime ( item [ 1 ]) . date () if existing_date == x [ 1 ]: # \u5982\u679cxr_date\u76f8\u540c\uff0c\u4e0d\u66f4\u65b0 new_record = False continue if new_record : record = ( x [ 1 ], x [ 0 ], f \" { x [ 0 ] } | { x [ 1 ] } | { x [ 2 ] } | { x [ 3 ] } | { x [ 4 ] } | { x [ 5 ] } | { x [ 6 ] } | { x [ 7 ] } | { x [ 8 ] } | { x [ 9 ] } | { x [ 10 ] } \" , ) records . append ( record ) await cls . _notify_special_bonusnote ( code , note , cancel_date ) logger . info ( \"save_xrxd_reports, %d records to be saved\" , len ( records )) if len ( records ) == 0 : return measurement = \"security_xrxd_reports\" client = get_influx_client () # a_xr_date(_time), code(tag), info report_list = np . array ( records , dtype = security_db_dtype ) await client . save ( report_list , measurement , time_key = \"frame\" , tag_keys = [ \"code\" ])","title":"save_xrxd_reports()"},{"location":"api/security/#omicron.models.security.Security.update_secs_cache","text":"\u66f4\u65b0\u8bc1\u5238\u5217\u8868\u5230\u7f13\u5b58\u6570\u636e\u5e93\u4e2d Parameters: Name Type Description Default dt date \u8bc1\u5238\u5217\u8868\u5f52\u5c5e\u7684\u65e5\u671f required securities List[Tuple] \u8bc1\u5238\u5217\u8868, \u5143\u7d20\u4e3a\u5143\u7ec4\uff0c\u5206\u522b\u4e3a\u4ee3\u7801\u3001\u522b\u540d\u3001\u540d\u79f0\u3001IPO\u65e5\u671f\u3001\u9000\u5e02\u65e5\u548c\u8bc1\u5238\u7c7b\u578b required Source code in omicron/models/security.py @classmethod async def update_secs_cache ( cls , dt : datetime . date , securities : List [ Tuple ]): \"\"\"\u66f4\u65b0\u8bc1\u5238\u5217\u8868\u5230\u7f13\u5b58\u6570\u636e\u5e93\u4e2d Args: dt: \u8bc1\u5238\u5217\u8868\u5f52\u5c5e\u7684\u65e5\u671f securities: \u8bc1\u5238\u5217\u8868, \u5143\u7d20\u4e3a\u5143\u7ec4\uff0c\u5206\u522b\u4e3a\u4ee3\u7801\u3001\u522b\u540d\u3001\u540d\u79f0\u3001IPO\u65e5\u671f\u3001\u9000\u5e02\u65e5\u548c\u8bc1\u5238\u7c7b\u578b \"\"\" # stock: {'index', 'stock'} # funds: {'fjb', 'mmf', 'reits', 'fja', 'fjm'} # {'etf', 'lof'} key = \"security:all\" pipeline = cache . security . pipeline () pipeline . delete ( key ) for code , alias , name , start , end , _type in securities : pipeline . rpush ( key , f \" { code } , { alias } , { name } , { start } ,\" f \" { end } , { _type } \" ) await pipeline . execute () logger . info ( \"all securities saved to cache %s , %d secs\" , key , len ( securities )) # update latest date info await cache . security . set ( \"security:latest_date\" , dt . strftime ( \"%Y-%m- %d \" ))","title":"update_secs_cache()"},{"location":"api/stock/","text":"Stock ( Security ) \u00b6 Stock\u5bf9\u8c61\u7528\u4e8e\u5f52\u96c6\u67d0\u652f\u8bc1\u5238\uff08\u80a1\u7968\u548c\u6307\u6570\uff0c\u4e0d\u5305\u62ec\u5176\u5b83\u6295\u8d44\u54c1\u79cd\uff09\u7684\u76f8\u5173\u4fe1\u606f\uff0c\u6bd4\u5982\u884c\u60c5\u6570\u636e\uff08OHLC\u7b49\uff09\u3001\u5e02\u503c\u6570\u636e\u3001\u6240\u5c5e\u6982\u5ff5\u5206\u7c7b\u7b49\u3002 Source code in omicron/models/stock.py class Stock ( Security ): \"\"\" Stock\u5bf9\u8c61\u7528\u4e8e\u5f52\u96c6\u67d0\u652f\u8bc1\u5238\uff08\u80a1\u7968\u548c\u6307\u6570\uff0c\u4e0d\u5305\u62ec\u5176\u5b83\u6295\u8d44\u54c1\u79cd\uff09\u7684\u76f8\u5173\u4fe1\u606f\uff0c\u6bd4\u5982\u884c\u60c5\u6570\u636e\uff08OHLC\u7b49\uff09\u3001\u5e02\u503c\u6570\u636e\u3001\u6240\u5c5e\u6982\u5ff5\u5206\u7c7b\u7b49\u3002 \"\"\" _is_cache_empty = True def __init__ ( self , code : str ): self . _code = code self . _stock = self . get_stock ( code ) assert self . _stock , \"\u7cfb\u7edf\u4e2d\u4e0d\u5b58\u5728\u8be5code\" ( _ , self . _display_name , self . _name , ipo , end , _type ) = self . _stock self . _start_date = convert_nptime_to_datetime ( ipo ) . date () self . _end_date = convert_nptime_to_datetime ( end ) . date () self . _type = SecurityType ( _type ) @classmethod def choose_listed ( cls , dt : datetime . date , types : List [ str ] = [ \"stock\" , \"index\" ]): cond = np . array ([ False ] * len ( cls . _stocks )) dt = datetime . datetime . combine ( dt , datetime . time ()) for type_ in types : cond |= cls . _stocks [ \"type\" ] == type_ result = cls . _stocks [ cond ] result = result [ result [ \"end\" ] > dt ] result = result [ result [ \"ipo\" ] <= dt ] # result = np.array(result, dtype=cls.stock_info_dtype) return result [ \"code\" ] . tolist () @classmethod def fuzzy_match ( cls , query : str ) -> Dict [ str , Tuple ]: \"\"\"\u5bf9\u80a1\u7968/\u6307\u6570\u8fdb\u884c\u6a21\u7cca\u5339\u914d\u67e5\u627e query\u53ef\u4ee5\u662f\u80a1\u7968/\u6307\u6570\u4ee3\u7801\uff0c\u4e5f\u53ef\u4ee5\u662f\u5b57\u6bcd\uff08\u6309name\u67e5\u627e\uff09\uff0c\u4e5f\u53ef\u4ee5\u662f\u6c49\u5b57\uff08\u6309\u663e\u793a\u540d\u67e5\u627e\uff09 Args: query (str): \u67e5\u8be2\u5b57\u7b26\u4e32 Returns: Dict[str, Tuple]: \u67e5\u8be2\u7ed3\u679c\uff0c\u5176\u4e2dTuple\u4e3a(code, display_name, name, start, end, type) \"\"\" query = query . upper () if re . match ( r \"\\d+\" , query ): return { sec [ \"code\" ]: sec . tolist () for sec in cls . _stocks if sec [ \"code\" ] . startswith ( query ) } elif re . match ( r \"[A-Z]+\" , query ): return { sec [ \"code\" ]: sec . tolist () for sec in cls . _stocks if sec [ \"name\" ] . startswith ( query ) } else : return { sec [ \"code\" ]: sec . tolist () for sec in cls . _stocks if sec [ \"alias\" ] . find ( query ) != - 1 } def __str__ ( self ): return f \" { self . display_name } [ { self . code } ]\" @property def ipo_date ( self ) -> datetime . date : return self . _start_date @property def display_name ( self ) -> str : return self . _display_name @property def name ( self ) -> str : return self . _name @property def end_date ( self ) -> datetime . date : return self . _end_date @property def code ( self ) -> str : return self . _code @property def sim_code ( self ) -> str : return re . sub ( r \"\\.XSH[EG]\" , \"\" , self . code ) @property def security_type ( self ) -> SecurityType : \"\"\"\u8fd4\u56de\u8bc1\u5238\u7c7b\u578b Returns: SecurityType: [description] \"\"\" return self . _type @staticmethod def simplify_code ( code ) -> str : return re . sub ( r \"\\.XSH[EG]\" , \"\" , code ) @staticmethod def format_code ( code ) -> str : \"\"\"\u65b0\u4e09\u677f\u548c\u5317\u4ea4\u6240\u7684\u80a1\u7968, \u6682\u4e0d\u652f\u6301, \u9ed8\u8ba4\u8fd4\u56deNone \u4e0a\u8bc1A\u80a1: 600\u3001601\u3001603\u3001605 \u6df1\u8bc1A\u80a1: 000\u3001001 \u4e2d\u5c0f\u677f: 002\u3001003 \u521b\u4e1a\u677f: 300/301 \u79d1\u521b\u677f: 688 \u65b0\u4e09\u677f: 82\u300183\u300187\u300188\u3001430\u3001420\u3001400 \u5317\u4ea4\u6240: 43\u300183\u300187\u300188 \"\"\" if not code or len ( code ) != 6 : return None prefix = code [ 0 ] if prefix in ( \"0\" , \"3\" ): return f \" { code } .XSHE\" elif prefix == \"6\" : return f \" { code } .XSHG\" else : return None def days_since_ipo ( self ) -> int : \"\"\"\u83b7\u53d6\u4e0a\u5e02\u4ee5\u6765\u7ecf\u8fc7\u4e86\u591a\u5c11\u4e2a\u4ea4\u6613\u65e5 \u7531\u4e8e\u53d7\u4ea4\u6613\u65e5\u5386\u9650\u5236\uff082005\u5e741\u67084\u65e5\u4e4b\u524d\u7684\u4ea4\u6613\u65e5\u5386\u6ca1\u6709\uff09\uff0c\u5bf9\u4e8e\u5728\u4e4b\u524d\u4e0a\u5e02\u7684\u54c1\u79cd\uff0c\u90fd\u8fd4\u56de\u4ece2005\u5e741\u67084\u65e5\u8d77\u7684\u65e5\u671f\u3002 Returns: int: [description] \"\"\" epoch_start = arrow . get ( \"2005-01-04\" ) . date () ipo_day = self . ipo_date if self . ipo_date > epoch_start else epoch_start return tf . count_day_frames ( ipo_day , arrow . now () . date ()) @staticmethod def qfq ( bars : BarsArray ) -> BarsArray : \"\"\"\u5bf9\u884c\u60c5\u6570\u636e\u6267\u884c\u524d\u590d\u6743\u64cd\u4f5c\"\"\" # todo: \u8fd9\u91cc\u53ef\u4ee5\u4f18\u5316 if bars . size == 0 : return bars last = bars [ - 1 ][ \"factor\" ] for field in [ \"open\" , \"high\" , \"low\" , \"close\" , \"volume\" ]: bars [ field ] = bars [ field ] * ( bars [ \"factor\" ] / last ) return bars @classmethod async def batch_get_min_level_bars_in_range ( cls , codes : List [ str ], frame_type : FrameType , start : Frame , end : Frame , fq : bool = True , ) -> Generator [ Dict [ str , BarsArray ], None , None ]: \"\"\"\u83b7\u53d6\u591a\u652f\u80a1\u7968\uff08\u6307\u6570\uff09\u5728[start, end)\u65f6\u95f4\u6bb5\u5185\u7684\u884c\u60c5\u6570\u636e \u5982\u679c\u8981\u83b7\u53d6\u7684\u884c\u60c5\u6570\u636e\u662f\u5206\u949f\u7ea7\u522b\uff08\u53731m, 5m, 15m, 30m\u548c60m)\uff0c\u4f7f\u7528\u672c\u63a5\u53e3\u3002 \u505c\u724c\u6570\u636e\u5904\u7406\u8bf7\u89c1[get_bars][omicron.models.stock.Stock.get_bars]\u3002 \u672c\u51fd\u6570\u8fd4\u56de\u4e00\u4e2a\u8fed\u4ee3\u5668\uff0c\u4f7f\u7528\u65b9\u6cd5\u793a\u4f8b\uff1a ``` async for code, bars in Stock.batch_get_min_level_bars_in_range(...): print(code, bars) ``` \u5982\u679c`end`\u4e0d\u5728`frame_type`\u6240\u5c5e\u7684\u8fb9\u754c\u70b9\u4e0a\uff0c\u90a3\u4e48\uff0c\u5982\u679c`end`\u5927\u4e8e\u7b49\u4e8e\u5f53\u524d\u7f13\u5b58\u672a\u6536\u76d8\u6570\u636e\u65f6\u95f4\uff0c\u5219\u5c06\u5305\u542b\u672a\u6536\u76d8\u6570\u636e\uff1b\u5426\u5219\uff0c\u8fd4\u56de\u7684\u8bb0\u5f55\u5c06\u622a\u6b62\u5230`tf.floor(end, frame_type)`\u3002 Args: codes: \u80a1\u7968/\u6307\u6570\u4ee3\u7801\u5217\u8868 frame_type: \u5e27\u7c7b\u578b start: \u8d77\u59cb\u65f6\u95f4 end: \u7ed3\u675f\u65f6\u95f4\u3002\u5982\u679c\u672a\u6307\u660e\uff0c\u5219\u53d6\u5f53\u524d\u65f6\u95f4\u3002 fq: \u662f\u5426\u8fdb\u884c\u590d\u6743\uff0c\u5982\u679c\u662f\uff0c\u5219\u8fdb\u884c\u524d\u590d\u6743\u3002Defaults to True. Returns: Generator[Dict[str, BarsArray], None, None]: \u8fed\u4ee3\u5668\uff0c\u6bcf\u6b21\u8fd4\u56de\u4e00\u4e2a\u5b57\u5178\uff0c\u5176\u4e2dkey\u4e3a\u4ee3\u7801\uff0cvalue\u4e3a\u884c\u60c5\u6570\u636e \"\"\" closed_end = tf . floor ( end , frame_type ) n = tf . count_frames ( start , closed_end , frame_type ) max_query_size = min ( cfg . influxdb . max_query_size , INFLUXDB_MAX_QUERY_SIZE ) batch_size = max ( 1 , max_query_size // n ) ff = tf . first_min_frame ( datetime . datetime . now (), frame_type ) for i in range ( 0 , len ( codes ), batch_size ): batch_codes = codes [ i : i + batch_size ] if end < ff : part1 = await cls . _batch_get_persisted_bars_in_range ( batch_codes , frame_type , start , end ) part2 = pd . DataFrame ([], columns = bars_dtype_with_code . names ) elif start >= ff : part1 = pd . DataFrame ([], columns = bars_dtype_with_code . names ) n = tf . count_frames ( start , closed_end , frame_type ) + 1 cached = await cls . _batch_get_cached_bars_n ( frame_type , n , end , batch_codes ) cached = cached [ cached [ \"frame\" ] >= start ] part2 = pd . DataFrame ( cached , columns = bars_dtype_with_code . names ) else : part1 = await cls . _batch_get_persisted_bars_in_range ( batch_codes , frame_type , start , ff ) n = tf . count_frames ( start , closed_end , frame_type ) + 1 cached = await cls . _batch_get_cached_bars_n ( frame_type , n , end , batch_codes ) part2 = pd . DataFrame ( cached , columns = bars_dtype_with_code . names ) df = pd . concat ([ part1 , part2 ]) for code in batch_codes : filtered = df [ df [ \"code\" ] == code ][ bars_cols ] bars = filtered . to_records ( index = False ) . astype ( bars_dtype ) if fq : bars = cls . qfq ( bars ) yield code , bars @classmethod async def batch_get_day_level_bars_in_range ( cls , codes : List [ str ], frame_type : FrameType , start : Frame , end : Frame , fq : bool = True , ) -> Generator [ Dict [ str , BarsArray ], None , None ]: \"\"\"\u83b7\u53d6\u591a\u652f\u80a1\u7968\uff08\u6307\u6570\uff09\u5728[start, end)\u65f6\u95f4\u6bb5\u5185\u7684\u884c\u60c5\u6570\u636e \u5982\u679c\u8981\u83b7\u53d6\u7684\u884c\u60c5\u6570\u636e\u662f\u65e5\u7ebf\u7ea7\u522b\uff08\u53731d, 1w, 1M)\uff0c\u4f7f\u7528\u672c\u63a5\u53e3\u3002 \u505c\u724c\u6570\u636e\u5904\u7406\u8bf7\u89c1[get_bars][omicron.models.stock.Stock.get_bars]\u3002 \u672c\u51fd\u6570\u8fd4\u56de\u4e00\u4e2a\u8fed\u4ee3\u5668\uff0c\u4f7f\u7528\u65b9\u6cd5\u793a\u4f8b\uff1a ``` async for code, bars in Stock.batch_get_day_level_bars_in_range(...): print(code, bars) ``` \u5982\u679c`end`\u4e0d\u5728`frame_type`\u6240\u5c5e\u7684\u8fb9\u754c\u70b9\u4e0a\uff0c\u90a3\u4e48\uff0c\u5982\u679c`end`\u5927\u4e8e\u7b49\u4e8e\u5f53\u524d\u7f13\u5b58\u672a\u6536\u76d8\u6570\u636e\u65f6\u95f4\uff0c\u5219\u5c06\u5305\u542b\u672a\u6536\u76d8\u6570\u636e\uff1b\u5426\u5219\uff0c\u8fd4\u56de\u7684\u8bb0\u5f55\u5c06\u622a\u6b62\u5230`tf.floor(end, frame_type)`\u3002 Args: codes: \u4ee3\u7801\u5217\u8868 frame_type: \u5e27\u7c7b\u578b start: \u8d77\u59cb\u65f6\u95f4 end: \u7ed3\u675f\u65f6\u95f4 fq: \u662f\u5426\u8fdb\u884c\u590d\u6743\uff0c\u5982\u679c\u662f\uff0c\u5219\u8fdb\u884c\u524d\u590d\u6743\u3002Defaults to True. Returns: Generator[Dict[str, BarsArray], None, None]: \u8fed\u4ee3\u5668\uff0c\u6bcf\u6b21\u8fd4\u56de\u4e00\u4e2a\u5b57\u5178\uff0c\u5176\u4e2dkey\u4e3a\u4ee3\u7801\uff0cvalue\u4e3a\u884c\u60c5\u6570\u636e \"\"\" today = datetime . datetime . now () . date () # \u65e5\u7ebf\uff0cend\u4e0d\u7b49\u4e8e\u6700\u540e\u4ea4\u6613\u65e5\uff0c\u6b64\u65f6\u5df2\u65e0\u7f13\u5b58 if frame_type == FrameType . DAY and end == tf . floor ( today , frame_type ): from_cache = True elif frame_type != FrameType . DAY and start > tf . floor ( today , frame_type ): from_cache = True else : from_cache = False n = tf . count_frames ( start , end , frame_type ) max_query_size = min ( cfg . influxdb . max_query_size , INFLUXDB_MAX_QUERY_SIZE ) batch_size = max ( max_query_size // n , 1 ) for i in range ( 0 , len ( codes ), batch_size ): batch_codes = codes [ i : i + batch_size ] persisted = await cls . _batch_get_persisted_bars_in_range ( batch_codes , frame_type , start , end ) if from_cache : cached = await cls . _batch_get_cached_bars_n ( frame_type , 1 , end , batch_codes ) cached = pd . DataFrame ( cached , columns = bars_dtype_with_code . names ) df = pd . concat ([ persisted , cached ]) else : df = persisted for code in batch_codes : filtered = df [ df [ \"code\" ] == code ][ bars_cols ] bars = filtered . to_records ( index = False ) . astype ( bars_dtype ) if fq : bars = cls . qfq ( bars ) yield code , bars @classmethod async def get_bars_in_range ( cls , code : str , frame_type : FrameType , start : Frame , end : Frame = None , fq = True , unclosed = True , ) -> BarsArray : \"\"\"\u83b7\u53d6\u6307\u5b9a\u8bc1\u5238\uff08`code`\uff09\u5728[`start`, `end`]\u671f\u95f4\u5e27\u7c7b\u578b\u4e3a`frame_type`\u7684\u884c\u60c5\u6570\u636e\u3002 Args: code : \u8bc1\u5238\u4ee3\u7801 frame_type : \u884c\u60c5\u6570\u636e\u7684\u5e27\u7c7b\u578b start : \u8d77\u59cb\u65f6\u95f4 end : \u7ed3\u675f\u65f6\u95f4,\u5982\u679c\u4e3aNone\uff0c\u5219\u8868\u660e\u53d6\u5230\u5f53\u524d\u65f6\u95f4\u3002 fq : \u662f\u5426\u5bf9\u884c\u60c5\u6570\u636e\u6267\u884c\u524d\u590d\u6743\u64cd\u4f5c unclosed : \u662f\u5426\u5305\u542b\u672a\u6536\u76d8\u7684\u6570\u636e \"\"\" now = datetime . datetime . now () if frame_type in tf . day_level_frames : end = end or now . date () if unclosed and tf . day_shift ( end , 0 ) == now . date (): part2 = await cls . _get_cached_bars_n ( code , 1 , frame_type ) else : part2 = np . array ([], dtype = bars_dtype ) # get rest from persisted part1 = await cls . _get_persisted_bars_in_range ( code , frame_type , start , end ) bars = np . concatenate (( part1 , part2 )) else : end = end or now closed_end = tf . floor ( end , frame_type ) ff_min1 = tf . first_min_frame ( now , FrameType . MIN1 ) if tf . day_shift ( end , 0 ) < now . date () or end < ff_min1 : part1 = await cls . _get_persisted_bars_in_range ( code , frame_type , start , end ) part2 = np . array ([], dtype = bars_dtype ) elif start >= ff_min1 : # all in cache part1 = np . array ([], dtype = bars_dtype ) n = tf . count_frames ( start , closed_end , frame_type ) + 1 part2 = await cls . _get_cached_bars_n ( code , n , frame_type , end ) part2 = part2 [ part2 [ \"frame\" ] >= start ] else : # in both cache and persisted ff = tf . first_min_frame ( now , frame_type ) part1 = await cls . _get_persisted_bars_in_range ( code , frame_type , start , ff ) n = tf . count_frames ( ff , closed_end , frame_type ) + 1 part2 = await cls . _get_cached_bars_n ( code , n , frame_type , end ) if not unclosed : part2 = part2 [ part2 [ \"frame\" ] <= closed_end ] bars = np . concatenate (( part1 , part2 )) if fq : return cls . qfq ( bars ) else : return bars @classmethod async def get_bars ( cls , code : str , n : int , frame_type : FrameType , end : Frame = None , fq = True , unclosed = True , ) -> BarsArray : \"\"\"\u83b7\u53d6\u5230`end`\u4e3a\u6b62\u7684`n`\u4e2a\u884c\u60c5\u6570\u636e\u3002 \u8fd4\u56de\u7684\u6570\u636e\u662f\u6309\u7167\u65f6\u95f4\u987a\u5e8f\u9012\u589e\u6392\u5e8f\u7684\u3002\u5728\u9047\u5230\u505c\u724c\u7684\u60c5\u51b5\u65f6\uff0c\u8be5\u65f6\u6bb5\u6570\u636e\u5c06\u88ab\u8df3\u8fc7\uff0c\u56e0\u6b64\u8fd4\u56de\u7684\u8bb0\u5f55\u53ef\u80fd\u4e0d\u662f\u4ea4\u6613\u65e5\u8fde\u7eed\u7684\uff0c\u5e76\u4e14\u53ef\u80fd\u4e0d\u8db3`n`\u4e2a\u3002 \u5982\u679c\u7cfb\u7edf\u5f53\u524d\u6ca1\u6709\u5230\u6307\u5b9a\u65f6\u95f4`end`\u7684\u6570\u636e\uff0c\u5c06\u5c3d\u6700\u5927\u52aa\u529b\u8fd4\u56de\u6570\u636e\u3002\u8c03\u7528\u8005\u53ef\u4ee5\u901a\u8fc7\u5224\u65ad\u6700\u540e\u4e00\u6761\u6570\u636e\u7684\u65f6\u95f4\u662f\u5426\u7b49\u4e8e`end`\u6765\u5224\u65ad\u662f\u5426\u83b7\u53d6\u5230\u4e86\u5168\u90e8\u6570\u636e\u3002 Args: code: \u8bc1\u5238\u4ee3\u7801 n: \u8bb0\u5f55\u6570 frame_type: \u5e27\u7c7b\u578b end: \u622a\u6b62\u65f6\u95f4,\u5982\u679c\u672a\u6307\u660e\uff0c\u5219\u53d6\u5f53\u524d\u65f6\u95f4 fq: \u662f\u5426\u5bf9\u8fd4\u56de\u8bb0\u5f55\u8fdb\u884c\u590d\u6743\u3002\u5982\u679c\u4e3a`True`\u7684\u8bdd\uff0c\u5219\u8fdb\u884c\u524d\u590d\u6743\u3002Defaults to True. unclosed: \u662f\u5426\u5305\u542b\u6700\u65b0\u672a\u6536\u76d8\u7684\u6570\u636e\uff1f Defaults to True. Returns: \u8fd4\u56dedtype\u4e3a`coretypes.bars_dtype`\u7684\u4e00\u7ef4numpy\u6570\u7ec4\u3002 \"\"\" now = datetime . datetime . now () try : cached = np . array ([], dtype = bars_dtype ) if frame_type in tf . day_level_frames : if end is None : end = now . date () elif type ( end ) == datetime . datetime : end = end . date () n0 = n if unclosed : cached = await cls . _get_cached_bars_n ( code , 1 , frame_type ) if cached . size > 0 : # \u5982\u679c\u7f13\u5b58\u7684\u672a\u6536\u76d8\u65e5\u671f > end\uff0c\u5219\u8be5\u7f13\u5b58\u4e0d\u662f\u9700\u8981\u7684 if cached [ 0 ][ \"frame\" ] . item () . date () > end : cached = np . array ([], dtype = bars_dtype ) else : n0 = n - 1 else : end = end or now closed_frame = tf . floor ( end , frame_type ) # fetch one more bar, in case we should discard unclosed bar cached = await cls . _get_cached_bars_n ( code , n + 1 , frame_type , end ) if not unclosed : cached = cached [ cached [ \"frame\" ] <= closed_frame ] # n bars we need fetch from persisted db n0 = n - cached . size if n0 > 0 : if cached . size > 0 : end0 = cached [ 0 ][ \"frame\" ] . item () else : end0 = end bars = await cls . _get_persisted_bars_n ( code , frame_type , n0 , end0 ) merged = np . concatenate (( bars , cached )) bars = merged [ - n :] else : bars = cached [ - n :] if fq : bars = cls . qfq ( bars ) return bars except Exception as e : logger . exception ( e ) logger . warning ( \"failed to get bars for %s , %s , %s , %s \" , code , n , frame_type , end ) raise @classmethod async def _get_persisted_bars_in_range ( cls , code : str , frame_type : FrameType , start : Frame , end : Frame = None ) -> BarsArray : \"\"\"\u4ece\u6301\u4e45\u5316\u6570\u636e\u5e93\u4e2d\u83b7\u53d6\u4ecb\u4e8e[`start`, `end`]\u95f4\u7684\u884c\u60c5\u8bb0\u5f55 \u5982\u679c`start`\u5230`end`\u533a\u95f4\u67d0\u652f\u80a1\u7968\u505c\u724c\uff0c\u5219\u4f1a\u8fd4\u56de\u7a7a\u6570\u7ec4\u3002 Args: code: \u8bc1\u5238\u4ee3\u7801 frame_type: \u5e27\u7c7b\u578b start: \u8d77\u59cb\u65f6\u95f4 end: \u7ed3\u675f\u65f6\u95f4\uff0c\u5982\u679c\u672a\u6307\u660e\uff0c\u5219\u53d6\u5f53\u524d\u65f6\u95f4 Returns: \u8fd4\u56dedtype\u4e3a`coretypes.bars_dtype`\u7684\u4e00\u7ef4numpy\u6570\u7ec4\u3002 \"\"\" end = end or datetime . datetime . now () keep_cols = [ \"_time\" ] + list ( bars_cols [ 1 :]) measurement = cls . _measurement_name ( frame_type ) flux = ( Flux () . bucket ( cfg . influxdb . bucket_name ) . range ( start , end ) . measurement ( measurement ) . fields ( keep_cols ) . tags ({ \"code\" : code }) ) serializer = DataframeDeserializer ( encoding = \"utf-8\" , names = [ \"_\" , \"table\" , \"result\" , \"frame\" , \"code\" , \"amount\" , \"close\" , \"factor\" , \"high\" , \"low\" , \"open\" , \"volume\" , ], engine = \"c\" , skiprows = 0 , header = 0 , usecols = bars_cols , parse_dates = [ \"frame\" ], ) client = get_influx_client () result = await client . query ( flux , serializer ) return result . to_records ( index = False ) . astype ( bars_dtype ) @classmethod async def _get_persisted_bars_n ( cls , code : str , frame_type : FrameType , n : int , end : Frame = None ) -> BarsArray : \"\"\"\u4ece\u6301\u4e45\u5316\u6570\u636e\u5e93\u4e2d\u83b7\u53d6\u622a\u6b62\u5230`end`\u7684`n`\u6761\u884c\u60c5\u8bb0\u5f55 \u5982\u679c`end`\u672a\u6307\u5b9a\uff0c\u5219\u53d6\u5f53\u524d\u65f6\u95f4\u3002 \u57fa\u4e8einfluxdb\u67e5\u8be2\u7684\u7279\u6027\uff0c\u5728\u67e5\u8be2\u524d\uff0c\u5fc5\u987b\u5148\u6839\u636e`end`\u548c`n`\u8ba1\u7b97\u51fa\u8d77\u59cb\u65f6\u95f4\uff0c\u4f46\u5982\u679c\u5728\u6b64\u671f\u95f4\u67d0\u4e9b\u80a1\u7968\u6709\u505c\u724c\uff0c\u5219\u65e0\u6cd5\u8fd4\u56de\u7684\u6570\u636e\u5c06\u5c0f\u4e8e`n`\u3002\u800c\u5982\u679c\u8d77\u59cb\u65f6\u95f4\u8bbe\u7f6e\u5f97\u8db3\u591f\u65e9\uff0c\u867d\u7136\u80fd\u6ee1\u8db3\u8fd4\u56de\u6570\u636e\u6761\u6570\u7684\u8981\u6c42\uff0c\u4f46\u4f1a\u5e26\u6765\u6027\u80fd\u4e0a\u7684\u635f\u5931\u3002\u56e0\u6b64\uff0c\u6211\u4eec\u5728\u8ba1\u7b97\u8d77\u59cb\u65f6\u95f4\u65f6\uff0c\u4e0d\u662f\u4f7f\u7528`n`\u6765\u8ba1\u7b97\uff0c\u800c\u662f\u4f7f\u7528\u4e86`min(n * 2, n + 20)`\u6765\u8ba1\u7b97\u8d77\u59cb\u65f6\u95f4\uff0c\u8fd9\u6837\u591a\u6570\u60c5\u51b5\u4e0b\uff0c\u80fd\u591f\u4fdd\u8bc1\u8fd4\u56de\u6570\u636e\u7684\u6761\u6570\u4e3a`n`\u6761\u3002 \u8fd4\u56de\u7684\u6570\u636e\u6309`frame`\u8fdb\u884c\u5347\u5e8f\u6392\u5217\u3002 Args: code: \u8bc1\u5238\u4ee3\u7801 frame_type: \u5e27\u7c7b\u578b n: \u8fd4\u56de\u7ed3\u679c\u6570\u91cf end: \u7ed3\u675f\u65f6\u95f4\uff0c\u5982\u679c\u672a\u6307\u660e\uff0c\u5219\u53d6\u5f53\u524d\u65f6\u95f4 Returns: \u8fd4\u56dedtype\u4e3a`bars_dtype`\u7684numpy\u6570\u7ec4 \"\"\" # check is needed since tags accept List as well assert isinstance ( code , str ), \"`code` must be a string\" end = end or datetime . datetime . now () closed_end = tf . floor ( end , frame_type ) start = tf . shift ( closed_end , - min ( 2 * n , n + 20 ), frame_type ) keep_cols = [ \"_time\" ] + list ( bars_cols [ 1 :]) measurement = cls . _measurement_name ( frame_type ) flux = ( Flux () . bucket ( cfg . influxdb . bucket_name ) . range ( start , end ) . measurement ( measurement ) . fields ( keep_cols ) . tags ({ \"code\" : code }) . latest ( n ) ) serializer = DataframeDeserializer ( encoding = \"utf-8\" , names = [ \"_\" , \"table\" , \"result\" , \"frame\" , \"code\" , \"amount\" , \"close\" , \"factor\" , \"high\" , \"low\" , \"open\" , \"volume\" , ], engine = \"c\" , skiprows = 0 , header = 0 , usecols = bars_cols , parse_dates = [ \"frame\" ], ) client = get_influx_client () result = await client . query ( flux , serializer ) return result . to_records ( index = False ) . astype ( bars_dtype ) @classmethod async def _batch_get_persisted_bars_n ( cls , codes : List [ str ], frame_type : FrameType , n : int , end : Frame = None ) -> pd . DataFrame : \"\"\"\u4ece\u6301\u4e45\u5316\u5b58\u50a8\u4e2d\u83b7\u53d6`codes`\u6307\u5b9a\u7684\u4e00\u6279\u80a1\u7968\u622a\u6b62`end`\u65f6\u7684`n`\u6761\u8bb0\u5f55\u3002 \u8fd4\u56de\u7684\u6570\u636e\u6309`frame`\u8fdb\u884c\u5347\u5e8f\u6392\u5217\u3002\u5982\u679c\u4e0d\u5b58\u5728\u6ee1\u8db3\u6307\u5b9a\u6761\u4ef6\u7684\u67e5\u8be2\u7ed3\u679c\uff0c\u5c06\u8fd4\u56de\u7a7a\u7684DataFrame\u3002 \u57fa\u4e8einfluxdb\u67e5\u8be2\u7684\u7279\u6027\uff0c\u5728\u67e5\u8be2\u524d\uff0c\u5fc5\u987b\u5148\u6839\u636e`end`\u548c`n`\u8ba1\u7b97\u51fa\u8d77\u59cb\u65f6\u95f4\uff0c\u4f46\u5982\u679c\u5728\u6b64\u671f\u95f4\u67d0\u4e9b\u80a1\u7968\u6709\u505c\u724c\uff0c\u5219\u65e0\u6cd5\u8fd4\u56de\u7684\u6570\u636e\u5c06\u5c0f\u4e8e`n`\u3002\u5982\u679c\u8d77\u59cb\u65f6\u95f4\u8bbe\u7f6e\u7684\u8db3\u591f\u65e9\uff0c\u867d\u7136\u80fd\u6ee1\u8db3\u8fd4\u56de\u6570\u636e\u6761\u6570\u7684\u8981\u6c42\uff0c\u4f46\u4f1a\u5e26\u6765\u6027\u80fd\u4e0a\u7684\u635f\u5931\u3002\u56e0\u6b64\uff0c\u6211\u4eec\u5728\u8ba1\u7b97\u8d77\u59cb\u65f6\u95f4\u65f6\uff0c\u4e0d\u662f\u4f7f\u7528`n`\u6765\u8ba1\u7b97\uff0c\u800c\u662f\u4f7f\u7528\u4e86`min(n * 2, n + 20)`\u6765\u8ba1\u7b97\u8d77\u59cb\u65f6\u95f4\uff0c\u8fd9\u6837\u591a\u6570\u60c5\u51b5\u4e0b\uff0c\u80fd\u591f\u4fdd\u8bc1\u8fd4\u56de\u6570\u636e\u7684\u6761\u6570\u4e3a`n`\u6761\u3002 Args: codes: \u8bc1\u5238\u4ee3\u7801\u5217\u8868\u3002 frame_type: \u5e27\u7c7b\u578b n: \u8fd4\u56de\u7ed3\u679c\u6570\u91cf end: \u7ed3\u675f\u65f6\u95f4\uff0c\u5982\u679c\u672a\u6307\u5b9a\uff0c\u5219\u4f7f\u7528\u5f53\u524d\u65f6\u95f4 Returns: DataFrame, columns\u4e3a`code`, `frame`, `open`, `high`, `low`, `close`, `volume`, `amount`, `factor` \"\"\" max_query_size = min ( cfg . influxdb . max_query_size , INFLUXDB_MAX_QUERY_SIZE ) if len ( codes ) * min ( n + 20 , 2 * n ) > max_query_size : raise BadParameterError ( f \"codes\u7684\u6570\u91cf\u548cn\u7684\u4e58\u79ef\u8d85\u8fc7\u4e86influxdb\u7684\u6700\u5927\u67e5\u8be2\u6570\u91cf\u9650\u5236 { max_query_size } \" ) end = end or datetime . datetime . now () close_end = tf . floor ( end , frame_type ) begin = tf . shift ( close_end , - 1 * min ( n + 20 , n * 2 ), frame_type ) # influxdb\u7684\u67e5\u8be2\u7ed3\u679c\u683c\u5f0f\u7c7b\u4f3c\u4e8eCSV\uff0c\u5176\u5217\u987a\u5e8f\u4e3a_, result_alias, table_seq, _time, tags, fields,\u5176\u4e2dtags\u548cfields\u90fd\u662f\u5347\u5e8f\u6392\u5217 keep_cols = [ \"code\" ] + list ( bars_cols ) names = [ \"_\" , \"result\" , \"table\" , \"frame\" , \"code\" ] # influxdb will return fields in the order of name ascending parallel names . extend ( sorted ( bars_cols [ 1 :])) measurement = cls . _measurement_name ( frame_type ) flux = ( Flux () . bucket ( cfg . influxdb . bucket_name ) . range ( begin , end ) . measurement ( measurement ) . fields ( keep_cols ) . latest ( n ) ) if codes is not None : assert isinstance ( codes , list ), \"`codes` must be a list or None\" flux . tags ({ \"code\" : codes }) deserializer = DataframeDeserializer ( names = names , usecols = keep_cols , encoding = \"utf-8\" , time_col = \"frame\" , engine = \"c\" , ) client = get_influx_client () return await client . query ( flux , deserializer ) @classmethod async def _batch_get_persisted_bars_in_range ( cls , codes : List [ str ], frame_type : FrameType , begin : Frame , end : Frame = None ) -> pd . DataFrame : \"\"\"\u4ece\u6301\u4e45\u5316\u5b58\u50a8\u4e2d\u83b7\u53d6`codes`\u6307\u5b9a\u7684\u4e00\u6279\u80a1\u7968\u5728`begin`\u548c`end`\u4e4b\u95f4\u7684\u8bb0\u5f55\u3002 \u8fd4\u56de\u7684\u6570\u636e\u5c06\u6309`frame`\u8fdb\u884c\u5347\u5e8f\u6392\u5217\u3002 \u6ce8\u610f\uff0c\u8fd4\u56de\u7684\u6570\u636e\u6709\u53ef\u80fd\u4e0d\u662f\u7b49\u957f\u7684\uff0c\u56e0\u4e3a\u6709\u7684\u80a1\u7968\u53ef\u80fd\u505c\u724c\u3002 Args: codes: \u8bc1\u5238\u4ee3\u7801\u5217\u8868\u3002 frame_type: \u5e27\u7c7b\u578b begin: \u5f00\u59cb\u65f6\u95f4 end: \u7ed3\u675f\u65f6\u95f4 Returns: DataFrame, columns\u4e3a`code`, `frame`, `open`, `high`, `low`, `close`, `volume`, `amount`, `factor` \"\"\" end = end or datetime . datetime . now () n = tf . count_frames ( begin , end , frame_type ) max_query_size = min ( cfg . influxdb . max_query_size , INFLUXDB_MAX_QUERY_SIZE ) if len ( codes ) * n > max_query_size : raise BadParameterError ( f \"asked records is { len ( codes ) * n } , which is too large than { max_query_size } \" ) # influxdb\u7684\u67e5\u8be2\u7ed3\u679c\u683c\u5f0f\u7c7b\u4f3c\u4e8eCSV\uff0c\u5176\u5217\u987a\u5e8f\u4e3a_, result_alias, table_seq, _time, tags, fields,\u5176\u4e2dtags\u548cfields\u90fd\u662f\u5347\u5e8f\u6392\u5217 keep_cols = [ \"code\" ] + list ( bars_cols ) names = [ \"_\" , \"result\" , \"table\" , \"frame\" , \"code\" ] # influxdb will return fields in the order of name ascending parallel names . extend ( sorted ( bars_cols [ 1 :])) measurement = cls . _measurement_name ( frame_type ) flux = ( Flux () . bucket ( cfg . influxdb . bucket_name ) . range ( begin , end ) . measurement ( measurement ) . fields ( keep_cols ) ) flux . tags ({ \"code\" : codes }) deserializer = DataframeDeserializer ( names = names , usecols = keep_cols , encoding = \"utf-8\" , time_col = \"frame\" , engine = \"c\" , ) client = get_influx_client () df = await client . query ( flux , deserializer ) return df @classmethod async def batch_cache_bars ( cls , frame_type : FrameType , bars : Dict [ str , BarsArray ]): \"\"\"\u7f13\u5b58\u5df2\u6536\u76d8\u7684\u5206\u949f\u7ebf\u548c\u65e5\u7ebf \u5f53\u7f13\u5b58\u65e5\u7ebf\u65f6\uff0c\u4ec5\u9650\u4e8e\u5f53\u65e5\u6536\u76d8\u540e\u7684\u7b2c\u4e00\u6b21\u540c\u6b65\u65f6\u8c03\u7528\u3002 Args: frame_type: \u5e27\u7c7b\u578b bars: \u884c\u60c5\u6570\u636e\uff0c\u5176key\u4e3a\u80a1\u7968\u4ee3\u7801\uff0c\u5176value\u4e3adtype\u4e3a`bars_dtype`\u7684\u4e00\u7ef4numpy\u6570\u7ec4\u3002 Raises: RedisError: \u5982\u679c\u5728\u6267\u884c\u8fc7\u7a0b\u4e2d\u53d1\u751f\u9519\u8bef\uff0c\u5219\u629b\u51fa\u4ee5\u6b64\u5f02\u5e38\u4e3a\u57fa\u7c7b\u7684\u5404\u79cd\u5f02\u5e38\uff0c\u5177\u4f53\u53c2\u8003aioredis\u76f8\u5173\u6587\u6863\u3002 \"\"\" if frame_type == FrameType . DAY : await cls . batch_cache_unclosed_bars ( frame_type , bars ) return pl = cache . security . pipeline () for code , bars in bars . items (): key = f \"bars: { frame_type . value } : { code } \" for bar in bars : frame = tf . time2int ( bar [ \"frame\" ] . item ()) val = [ * bar ] val [ 0 ] = frame pl . hset ( key , frame , \",\" . join ( map ( str , val ))) await pl . execute () @classmethod async def batch_cache_unclosed_bars ( cls , frame_type : FrameType , bars : Dict [ str , BarsArray ] ): # pragma: no cover \"\"\"\u7f13\u5b58\u672a\u6536\u76d8\u76845\u300115\u300130\u300160\u5206\u949f\u7ebf\u53ca\u65e5\u7ebf\u3001\u5468\u7ebf\u3001\u6708\u7ebf Args: frame_type: \u5e27\u7c7b\u578b bars: \u884c\u60c5\u6570\u636e\uff0c\u5176key\u4e3a\u80a1\u7968\u4ee3\u7801\uff0c\u5176value\u4e3adtype\u4e3a`bars_dtype`\u7684\u4e00\u7ef4numpy\u6570\u7ec4\u3002bars\u4e0d\u80fd\u4e3aNone\uff0c\u6216\u8005empty\u3002 Raise: RedisError: \u5982\u679c\u5728\u6267\u884c\u8fc7\u7a0b\u4e2d\u53d1\u751f\u9519\u8bef\uff0c\u5219\u629b\u51fa\u4ee5\u6b64\u5f02\u5e38\u4e3a\u57fa\u7c7b\u7684\u5404\u79cd\u5f02\u5e38\uff0c\u5177\u4f53\u53c2\u8003aioredis\u76f8\u5173\u6587\u6863\u3002 \"\"\" pl = cache . security . pipeline () key = f \"bars: { frame_type . value } :unclosed\" convert = tf . time2int if frame_type in tf . minute_level_frames else tf . date2int for code , bar in bars . items (): val = [ * bar [ 0 ]] val [ 0 ] = convert ( bar [ \"frame\" ][ 0 ] . item ()) # \u65f6\u95f4\u8f6c\u6362 pl . hset ( key , code , \",\" . join ( map ( str , val ))) await pl . execute () @classmethod async def reset_cache ( cls ): \"\"\"\u6e05\u9664\u7f13\u5b58\u7684\u884c\u60c5\u6570\u636e\"\"\" try : for ft in itertools . chain ( tf . minute_level_frames , tf . day_level_frames ): keys = await cache . security . keys ( f \"bars: { ft . value } :*\" ) if keys : await cache . security . delete ( * keys ) finally : cls . _is_cache_empty = True @classmethod def _deserialize_cached_bars ( cls , raw : List [ str ], ft : FrameType ) -> BarsArray : \"\"\"\u4eceredis\u4e2d\u53cd\u5e8f\u5217\u5316\u7f13\u5b58\u7684\u6570\u636e \u5982\u679c`raw`\u7a7a\u6570\u7ec4\u6216\u8005\u5143\u7d20\u4e3a`None`\uff0c\u5219\u8fd4\u56de\u7a7a\u6570\u7ec4\u3002 Args: raw: redis\u4e2d\u7684\u7f13\u5b58\u6570\u636e ft: \u5e27\u7c7b\u578b sort: \u662f\u5426\u9700\u8981\u91cd\u65b0\u6392\u5e8f\uff0c\u7f3a\u7701\u4e3aFalse Returns: BarsArray: \u884c\u60c5\u6570\u636e \"\"\" fix_date = False if ft in tf . minute_level_frames : convert = tf . int2time else : convert = tf . int2date fix_date = True recs = [] # it's possible to treat raw as csv and use pandas to parse, however, the performance is 10 times worse than this method for raw_rec in raw : if raw_rec is None : continue f , o , h , l , c , v , m , fac = raw_rec . split ( \",\" ) if fix_date : f = f [: 8 ] recs . append ( ( convert ( f ), float ( o ), float ( h ), float ( l ), float ( c ), float ( v ), float ( m ), float ( fac ), ) ) return np . array ( recs , dtype = bars_dtype ) @classmethod async def _batch_get_cached_bars_n ( cls , frame_type : FrameType , n : int , end : Frame = None , codes : List [ str ] = None ) -> BarsPanel : \"\"\"\u6279\u91cf\u83b7\u53d6\u5728cache\u4e2d\u622a\u6b62`end`\u7684`n`\u4e2abars\u3002 \u5982\u679c`end`\u4e0d\u5728`frame_type`\u6240\u5c5e\u7684\u8fb9\u754c\u70b9\u4e0a\uff0c\u90a3\u4e48\uff0c\u5982\u679c`end`\u5927\u4e8e\u7b49\u4e8e\u5f53\u524d\u7f13\u5b58\u672a\u6536\u76d8\u6570\u636e\u65f6\u95f4\uff0c\u5219\u5c06\u5305\u542b\u672a\u6536\u76d8\u6570\u636e\uff1b\u5426\u5219\uff0c\u8fd4\u56de\u7684\u8bb0\u5f55\u5c06\u622a\u6b62\u5230`tf.floor(end, frame_type)`\u3002 Args: frame_type: \u65f6\u95f4\u5e27\u7c7b\u578b n: \u8fd4\u56de\u8bb0\u5f55\u6761\u6570 codes: \u8bc1\u5238\u4ee3\u7801\u5217\u8868 end: \u622a\u6b62\u65f6\u95f4, \u5982\u679c\u4e3aNone Returns: BarsPanel: \u884c\u60c5\u6570\u636e \"\"\" # \u8c03\u7528\u8005\u81ea\u5df1\u4fdd\u8bc1end\u5728\u7f13\u5b58\u4e2d cols = list ( bars_dtype_with_code . names ) if frame_type in tf . day_level_frames : key = f \"bars: { frame_type . value } :unclosed\" if codes is None : recs = await cache . security . hgetall ( key ) codes = list ( recs . keys ()) recs = recs . values () else : recs = await cache . security . hmget ( key , * codes ) barss = cls . _deserialize_cached_bars ( recs , frame_type ) if barss . size > 0 : if len ( barss ) != len ( codes ): # issue 39, \u5982\u679c\u67d0\u652f\u7968\u5f53\u5929\u505c\u724c\uff0c\u5219\u7f13\u5b58\u4e2d\u5c06\u4e0d\u4f1a\u6709\u5b83\u7684\u8bb0\u5f55\uff0c\u6b64\u65f6\u9700\u8981\u79fb\u9664\u5176\u4ee3\u7801 codes = [ codes [ i ] for i , item in enumerate ( recs ) if item is not None ] barss = numpy_append_fields ( barss , \"code\" , codes , [( \"code\" , \"O\" )]) return barss [ cols ] . astype ( bars_dtype_with_code ) else : return np . array ([], dtype = bars_dtype_with_code ) else : end = end or datetime . datetime . now () close_end = tf . floor ( end , frame_type ) all_bars = [] if codes is None : keys = await cache . security . keys ( f \"bars: { frame_type . value } :*[^unclosed]\" ) codes = [ key . split ( \":\" )[ - 1 ] for key in keys ] else : keys = [ f \"bars: { frame_type . value } : { code } \" for code in codes ] if frame_type != FrameType . MIN1 : unclosed = await cache . security . hgetall ( f \"bars: { frame_type . value } :unclosed\" ) else : unclosed = {} pl = cache . security . pipeline () frames = tf . get_frames_by_count ( close_end , n , frame_type ) for key in keys : pl . hmget ( key , * frames ) all_closed = await pl . execute () for code , raw in zip ( codes , all_closed ): raw . append ( unclosed . get ( code )) barss = cls . _deserialize_cached_bars ( raw , frame_type ) barss = numpy_append_fields ( barss , \"code\" , [ code ] * len ( barss ), [( \"code\" , \"O\" )] ) barss = barss [ cols ] . astype ( bars_dtype_with_code ) all_bars . append ( barss [ barss [ \"frame\" ] <= end ][ - n :]) try : return np . concatenate ( all_bars ) except ValueError as e : logger . exception ( e ) return np . array ([], dtype = bars_dtype_with_code ) @classmethod async def _get_cached_bars_n ( cls , code : str , n : int , frame_type : FrameType , end : Frame = None ) -> BarsArray : \"\"\"\u4ece\u7f13\u5b58\u4e2d\u83b7\u53d6\u6307\u5b9a\u4ee3\u7801\u7684\u884c\u60c5\u6570\u636e \u5b58\u53d6\u903b\u8f91\u662f\uff0c\u4ece`end`\u6307\u5b9a\u7684\u65f6\u95f4\u5411\u524d\u53d6`n`\u6761\u8bb0\u5f55\u3002`end`\u4e0d\u5e94\u8be5\u5927\u4e8e\u5f53\u524d\u7cfb\u7edf\u65f6\u95f4\uff0c\u5e76\u4e14\u6839\u636e`end`\u548c`n`\u8ba1\u7b97\u51fa\u6765\u7684\u8d77\u59cb\u65f6\u95f4\u5e94\u8be5\u5728\u7f13\u5b58\u4e2d\u5b58\u5728\u3002\u5426\u5219\uff0c\u4e24\u79cd\u60c5\u51b5\u4e0b\uff0c\u8fd4\u56de\u8bb0\u5f55\u6570\u90fd\u5c06\u5c0f\u4e8e`n`\u3002 \u5982\u679c`end`\u4e0d\u5904\u4e8e`frame_type`\u6240\u5c5e\u7684\u8fb9\u754c\u7ed3\u675f\u4f4d\u7f6e\uff0c\u4e14\u5c0f\u4e8e\u5f53\u524d\u5df2\u7f13\u5b58\u7684\u672a\u6536\u76d8bar\u65f6\u95f4\uff0c\u5219\u4f1a\u8fd4\u56de\u524d\u4e00\u4e2a\u5df2\u6536\u76d8\u7684\u6570\u636e\uff0c\u5426\u5219\uff0c\u8fd4\u56de\u7684\u8bb0\u5f55\u4e2d\u8fd8\u5c06\u5305\u542b\u672a\u6536\u76d8\u7684\u6570\u636e\u3002 args: code: \u8bc1\u5238\u4ee3\u7801\uff0c\u6bd4\u5982000001.XSHE n: \u8fd4\u56de\u8bb0\u5f55\u6761\u6570 frame_type: \u5e27\u7c7b\u578b end: \u7ed3\u675f\u5e27\uff0c\u5982\u679c\u4e3aNone\uff0c\u5219\u53d6\u5f53\u524d\u65f6\u95f4 returns: \u5143\u7d20\u7c7b\u578b\u4e3a`coretypes.bars_dtype`\u7684\u4e00\u7ef4numpy\u6570\u7ec4\u3002\u5982\u679c\u6ca1\u6709\u6570\u636e\uff0c\u5219\u8fd4\u56de\u7a7andarray\u3002 \"\"\" # 50 times faster than arrow.now().floor('second') end = end or datetime . datetime . now () . replace ( second = 0 , microsecond = 0 ) if frame_type in tf . minute_level_frames : cache_start = tf . first_min_frame ( end . date (), frame_type ) closed = tf . floor ( end , frame_type ) frames = ( tf . get_frames ( cache_start , closed , frame_type ))[ - n :] if len ( frames ) == 0 : recs = np . empty ( shape = ( 0 ,), dtype = bars_dtype ) else : key = f \"bars: { frame_type . value } : { code } \" recs = await cache . security . hmget ( key , * frames ) recs = cls . _deserialize_cached_bars ( recs , frame_type ) if closed < end : # for unclosed key = f \"bars: { frame_type . value } :unclosed\" unclosed = await cache . security . hget ( key , code ) unclosed = cls . _deserialize_cached_bars ([ unclosed ], frame_type ) if len ( unclosed ) == 0 : return recs [ - n :] if end < unclosed [ 0 ][ \"frame\" ] . item (): # \u5982\u679cunclosed\u4e3a9:36, \u8c03\u7528\u8005\u8981\u6c42\u53d69:29\u76845m\u6570\u636e\uff0c\u5219\u53d6\u5230\u7684unclosed\u4e0d\u5408\u8981\u6c42\uff0c\u629b\u5f03\u3002\u4f3c\u4e4e\u6ca1\u6709\u66f4\u597d\u7684\u65b9\u6cd5\u68c0\u6d4bend\u4e0eunclosed\u7684\u5173\u7cfb return recs [ - n :] else : bars = np . concatenate (( recs , unclosed )) return bars [ - n :] else : return recs [ - n :] else : # \u65e5\u7ebf\u53ca\u4ee5\u4e0a\u7ea7\u522b\uff0c\u4ec5\u5728\u7f13\u5b58\u4e2d\u5b58\u5728\u672a\u6536\u76d8\u6570\u636e key = f \"bars: { frame_type . value } :unclosed\" rec = await cache . security . hget ( key , code ) return cls . _deserialize_cached_bars ([ rec ], frame_type ) @classmethod async def cache_bars ( cls , code : str , frame_type : FrameType , bars : BarsArray ): \"\"\"\u5c06\u5f53\u671f\u5df2\u6536\u76d8\u7684\u884c\u60c5\u6570\u636e\u7f13\u5b58 Note: \u5f53\u524d\u53ea\u7f13\u5b581\u5206\u949f\u6570\u636e\u3002\u5176\u5b83\u5206\u949f\u6570\u636e\uff0c\u90fd\u5728\u8c03\u7528\u65f6\uff0c\u901a\u8fc7resample\u4e34\u65f6\u5408\u6210\u3002 \u884c\u60c5\u6570\u636e\u7f13\u5b58\u5728\u4ee5`bars:{frame_type.value}:{code}`\u4e3akey, {frame}\u4e3afield\u7684hashmap\u4e2d\u3002 Args: code: the full qualified code of a security or index frame_type: frame type of the bars bars: the bars to cache, which is a numpy array of dtype `coretypes.bars_dtype` Raises: RedisError: if redis operation failed, see documentation of aioredis \"\"\" # \u8f6c\u6362\u65f6\u95f4\u4e3aint convert = tf . time2int if frame_type in tf . minute_level_frames else tf . date2int key = f \"bars: { frame_type . value } : { code } \" pl = cache . security . pipeline () for bar in bars : val = [ * bar ] val [ 0 ] = convert ( bar [ \"frame\" ] . item ()) pl . hset ( key , val [ 0 ], \",\" . join ( map ( str , val ))) await pl . execute () @classmethod async def cache_unclosed_bars ( cls , code : str , frame_type : FrameType , bars : BarsArray ): # pragma: no cover \"\"\"\u5c06\u672a\u7ed3\u675f\u7684\u884c\u60c5\u6570\u636e\u7f13\u5b58 \u672a\u7ed3\u675f\u7684\u884c\u60c5\u6570\u636e\u7f13\u5b58\u5728\u4ee5`bars:{frame_type.value}:unclosed`\u4e3akey, {code}\u4e3afield\u7684hashmap\u4e2d\u3002 \u5c3d\u7ba1`bars`\u88ab\u58f0\u660e\u4e3aBarsArray\uff0c\u4f46\u5b9e\u9645\u4e0a\u5e94\u8be5\u53ea\u5305\u542b\u4e00\u4e2a\u5143\u7d20\u3002 Args: code: the full qualified code of a security or index frame_type: frame type of the bars bars: the bars to cache, which is a numpy array of dtype `coretypes.bars_dtype` Raises: RedisError: if redis operation failed, see documentation of aioredis \"\"\" converter = tf . time2int if frame_type in tf . minute_level_frames else tf . date2int assert len ( bars ) == 1 , \"unclosed bars should only have one record\" key = f \"bars: { frame_type . value } :unclosed\" bar = bars [ 0 ] val = [ * bar ] val [ 0 ] = converter ( bar [ \"frame\" ] . item ()) await cache . security . hset ( key , code , \",\" . join ( map ( str , val ))) @classmethod async def persist_bars ( cls , frame_type : FrameType , bars : Union [ Dict [ str , BarsArray ], BarsArray , pd . DataFrame ], ): \"\"\"\u5c06\u884c\u60c5\u6570\u636e\u6301\u4e45\u5316 \u5982\u679c`bars`\u7c7b\u578b\u4e3aDict,\u5219key\u4e3a`code`\uff0cvalue\u4e3a`bars`\u3002\u5982\u679c\u5176\u7c7b\u578b\u4e3aBarsArray\u6216\u8005pd.DataFrame\uff0c\u5219`bars`\u5404\u5217\u5b57\u6bb5\u5e94\u8be5\u4e3a`coretypes.bars_dtype` + (\"code\", \"O\")\u6784\u6210\u3002 Args: frame_type: the frame type of the bars bars: the bars to be persisted Raises: InfluxDBWriteError: if influxdb write failed \"\"\" client = get_influx_client () measurement = cls . _measurement_name ( frame_type ) logger . info ( \"persisting bars to influxdb: %s , %d secs\" , measurement , len ( bars )) if isinstance ( bars , dict ): for code , value in bars . items (): await client . save ( value , measurement , global_tags = { \"code\" : code }, time_key = \"frame\" ) else : await client . save ( bars , measurement , tag_keys = [ \"code\" ], time_key = \"frame\" ) @classmethod def resample ( cls , bars : BarsArray , from_frame : FrameType , to_frame : FrameType ) -> BarsArray : \"\"\"\u5c06\u539f\u6765\u4e3a`from_frame`\u7684\u884c\u60c5\u6570\u636e\u8f6c\u6362\u4e3a`to_frame`\u7684\u884c\u60c5\u6570\u636e \u5982\u679c`to_frame`\u4e3a\u65e5\u7ebf\u6216\u8005\u5206\u949f\u7ea7\u522b\u7ebf\uff0c\u5219`from_frame`\u5fc5\u987b\u4e3a\u5206\u949f\u7ebf\uff1b\u5982\u679c`to_frame`\u4e3a\u5468\u4ee5\u4e0a\u7ea7\u522b\u7ebf\uff0c\u5219`from_frame`\u5fc5\u987b\u4e3a\u65e5\u7ebf\u3002\u5176\u5b83\u7ea7\u522b\u4e4b\u95f4\u7684\u8f6c\u6362\u4e0d\u652f\u6301\u3002 \u5982\u679c`from_frame`\u4e3a1\u5206\u949f\u7ebf\uff0c\u5219\u5fc5\u987b\u4ece9\uff1a31\u8d77\u3002 Args: bars (BarsArray): \u884c\u60c5\u6570\u636e from_frame (FrameType): \u8f6c\u6362\u524d\u7684FrameType to_frame (FrameType): \u8f6c\u6362\u540e\u7684FrameType Returns: BarsArray: \u8f6c\u6362\u540e\u7684\u884c\u60c5\u6570\u636e \"\"\" if from_frame == FrameType . MIN1 : return cls . _resample_from_min1 ( bars , to_frame ) elif from_frame == FrameType . DAY : # pragma: no cover return cls . _resample_from_day ( bars , to_frame ) else : # pragma: no cover raise TypeError ( f \"unsupported from_frame: { from_frame } \" ) @classmethod def _measurement_name ( cls , frame_type ): return f \"stock_bars_ { frame_type . value } \" @classmethod def _resample_from_min1 ( cls , bars : BarsArray , to_frame : FrameType ) -> BarsArray : \"\"\"\u5c06`bars`\u4ece1\u5206\u949f\u7ebf\u8f6c\u6362\u4e3a`to_frame`\u7684\u884c\u60c5\u6570\u636e \u91cd\u91c7\u6837\u540e\u7684\u6570\u636e\u53ea\u5305\u542bframe, open, high, low, close, volume, amount, factor\uff0c\u65e0\u8bba\u4f20\u5165\u6570\u636e\u662f\u5426\u8fd8\u6709\u522b\u7684\u5b57\u6bb5\uff0c\u5b83\u4eec\u90fd\u5c06\u88ab\u4e22\u5f03\u3002 resampling 240\u6839\u5206\u949f\u7ebf\u52305\u5206\u949f\u5927\u7ea6\u9700\u8981100\u5fae\u79d2\u3002 TODO\uff1a \u5982\u679c`bars`\u4e2d\u5305\u542bnan\u600e\u4e48\u5904\u7406\uff1f \"\"\" if bars [ 0 ][ \"frame\" ] . item () . minute != 31 : raise ValueError ( \"resampling from 1min must start from 9:31\" ) if to_frame not in ( FrameType . MIN5 , FrameType . MIN15 , FrameType . MIN30 , FrameType . MIN60 , FrameType . DAY , ): raise ValueError ( f \"unsupported to_frame: { to_frame } \" ) bins_len = { FrameType . MIN5 : 5 , FrameType . MIN15 : 15 , FrameType . MIN30 : 30 , FrameType . MIN60 : 60 , FrameType . DAY : 240 , }[ to_frame ] bins = len ( bars ) // bins_len npart1 = bins * bins_len part1 = bars [: npart1 ] . reshape (( - 1 , bins_len )) part2 = bars [ npart1 :] open_pos = np . arange ( bins ) * bins_len close_pos = np . arange ( 1 , bins + 1 ) * bins_len - 1 if len ( bars ) > bins_len * bins : close_pos = np . append ( close_pos , len ( bars ) - 1 ) resampled = np . empty (( bins + 1 ,), dtype = bars_dtype ) else : resampled = np . empty (( bins ,), dtype = bars_dtype ) resampled [: bins ][ \"open\" ] = bars [ open_pos ][ \"open\" ] resampled [: bins ][ \"high\" ] = np . max ( part1 [ \"high\" ], axis = 1 ) resampled [: bins ][ \"low\" ] = np . min ( part1 [ \"low\" ], axis = 1 ) resampled [: bins ][ \"volume\" ] = np . sum ( part1 [ \"volume\" ], axis = 1 ) resampled [: bins ][ \"amount\" ] = np . sum ( part1 [ \"amount\" ], axis = 1 ) if len ( part2 ): resampled [ - 1 ][ \"open\" ] = part2 [ \"open\" ][ 0 ] resampled [ - 1 ][ \"high\" ] = np . max ( part2 [ \"high\" ]) resampled [ - 1 ][ \"low\" ] = np . min ( part2 [ \"low\" ]) resampled [ - 1 ][ \"volume\" ] = np . sum ( part2 [ \"volume\" ]) resampled [ - 1 ][ \"amount\" ] = np . sum ( part2 [ \"amount\" ]) cols = [ \"frame\" , \"close\" , \"factor\" ] resampled [ cols ] = bars [ close_pos ][ cols ] if to_frame == FrameType . DAY : resampled [ \"frame\" ] = bars [ - 1 ][ \"frame\" ] . item () . date () return resampled @classmethod def _resample_from_day ( cls , bars : BarsArray , to_frame : FrameType ) -> BarsArray : \"\"\"\u5c06`bars`\u4ece\u65e5\u7ebf\u8f6c\u6362\u6210`to_frame`\u7684\u884c\u60c5\u6570\u636e Args: bars (BarsArray): [description] to_frame (FrameType): [description] Returns: \u8f6c\u6362\u540e\u7684\u884c\u60c5\u6570\u636e \"\"\" rules = { \"frame\" : \"last\" , \"open\" : \"first\" , \"high\" : \"max\" , \"low\" : \"min\" , \"close\" : \"last\" , \"volume\" : \"sum\" , \"amount\" : \"sum\" , \"factor\" : \"last\" , } if to_frame == FrameType . WEEK : freq = \"W-Fri\" elif to_frame == FrameType . MONTH : freq = \"M\" elif to_frame == FrameType . QUARTER : freq = \"Q\" elif to_frame == FrameType . YEAR : freq = \"A\" else : raise ValueError ( f \"unsupported to_frame: { to_frame } \" ) df = pd . DataFrame ( bars ) df . index = pd . to_datetime ( bars [ \"frame\" ]) df = df . resample ( freq ) . agg ( rules ) bars = np . array ( df . to_records ( index = False ), dtype = bars_dtype ) # filter out data like (None, nan, ...) return bars [ np . isfinite ( bars [ \"close\" ])] @classmethod async def _get_price_limit_in_cache ( cls , code : str , begin : datetime . date , end : datetime . date ): date_str = await cache . _security_ . get ( TRADE_PRICE_LIMITS_DATE ) if date_str : date_in_cache = arrow . get ( date_str ) . date () if date_in_cache < begin or date_in_cache > end : return None else : return None dtype = [( \"frame\" , \"O\" ), ( \"high_limit\" , \"f4\" ), ( \"low_limit\" , \"f4\" )] hp = await cache . _security_ . hget ( TRADE_PRICE_LIMITS , f \" { code } .high_limit\" ) lp = await cache . _security_ . hget ( TRADE_PRICE_LIMITS , f \" { code } .low_limit\" ) if hp is None or lp is None : return None else : return np . array ([( date_in_cache , hp , lp )], dtype = dtype ) @classmethod async def get_trade_price_limits ( cls , code : str , begin : Frame , end : Frame ) -> BarsArray : \"\"\"\u4eceinfluxdb\u548ccache\u4e2d\u83b7\u53d6\u4e2a\u80a1\u5728[begin, end]\u4e4b\u95f4\u7684\u6da8\u8dcc\u505c\u4ef7\u3002 \u6da8\u8dcc\u505c\u4ef7\u53ea\u6709\u65e5\u7ebf\u6570\u636e\u624d\u6709\uff0c\u56e0\u6b64\uff0cFrameType\u56fa\u5b9a\u4e3aFrameType.DAY\uff0c \u5f53\u5929\u7684\u6570\u636e\u5b58\u653e\u4e8eredis\uff0c\u5982\u679c\u67e5\u8be2\u65e5\u671f\u5305\u542b\u5f53\u5929\uff08\u4ea4\u6613\u65e5\uff09\uff0c\u4ececache\u4e2d\u8bfb\u53d6\u5e76\u8ffd\u52a0\u5230\u7ed3\u679c\u4e2d Args: code : \u4e2a\u80a1\u4ee3\u7801 begin : \u5f00\u59cb\u65e5\u671f end : \u7ed3\u675f\u65e5\u671f Returns: dtype\u4e3a[('frame', 'O'), ('high_limit', 'f4'), ('low_limit', 'f4')]\u7684numpy\u6570\u7ec4 \"\"\" cols = [ \"_time\" , \"high_limit\" , \"low_limit\" ] dtype = [( \"frame\" , \"O\" ), ( \"high_limit\" , \"f4\" ), ( \"low_limit\" , \"f4\" )] if isinstance ( begin , datetime . datetime ): begin = begin . date () # \u5f3a\u5236\u8f6c\u6362\u4e3adate if isinstance ( end , datetime . datetime ): end = end . date () # \u5f3a\u5236\u8f6c\u6362\u4e3adate data_in_cache = await cls . _get_price_limit_in_cache ( code , begin , end ) client = get_influx_client () measurement = cls . _measurement_name ( FrameType . DAY ) flux = ( Flux () . bucket ( client . _bucket ) . measurement ( measurement ) . range ( begin , end ) . tags ({ \"code\" : code }) . fields ( cols ) . sort ( \"_time\" ) ) ds = NumpyDeserializer ( dtype , use_cols = cols , converters = { \"_time\" : lambda x : ciso8601 . parse_datetime ( x ) . date ()}, # since we ask parse date in convertors, so we have to disable parse_date parse_date = None , ) result = await client . query ( flux , ds ) if data_in_cache : result = np . concatenate ([ result , data_in_cache ]) return result @classmethod async def reset_price_limits_cache ( cls , cache_only : bool , dt : datetime . date = None ): if cache_only is False : date_str = await cache . _security_ . get ( TRADE_PRICE_LIMITS_DATE ) if not date_str : return # skip clear action if date not found in cache date_in_cache = arrow . get ( date_str ) . date () if dt is None or date_in_cache != dt : # \u66f4\u65b0\u7684\u65f6\u95f4\u548ccache\u7684\u65f6\u95f4\u76f8\u540c\uff0c\u5219\u6e05\u9664cache return # skip clear action await cache . _security_ . delete ( TRADE_PRICE_LIMITS ) await cache . _security_ . delete ( TRADE_PRICE_LIMITS_DATE ) @classmethod async def save_trade_price_limits ( cls , price_limits : LimitPriceOnlyBarsArray , to_cache : bool ): \"\"\"\u4fdd\u5b58\u6da8\u8dcc\u505c\u4ef7 Args: price_limits: \u8981\u4fdd\u5b58\u7684\u6da8\u8dcc\u505c\u4ef7\u683c\u6570\u636e\u3002 to_cache: \u662f\u4fdd\u5b58\u5230\u7f13\u5b58\u4e2d\uff0c\u8fd8\u662f\u4fdd\u5b58\u5230\u6301\u4e45\u5316\u5b58\u50a8\u4e2d \"\"\" if len ( price_limits ) == 0 : return if to_cache : # \u6bcf\u4e2a\u4ea4\u6613\u65e5\u4e0a\u53489\u70b9\u66f4\u65b0\u4e24\u6b21 pl = cache . _security_ . pipeline () for row in price_limits : # .item convert np.float64 to python float pl . hset ( TRADE_PRICE_LIMITS , f \" { row [ 'code' ] } .high_limit\" , row [ \"high_limit\" ] . item (), ) pl . hset ( TRADE_PRICE_LIMITS , f \" { row [ 'code' ] } .low_limit\" , row [ \"low_limit\" ] . item (), ) dt = price_limits [ - 1 ][ \"frame\" ] pl . set ( TRADE_PRICE_LIMITS_DATE , dt . strftime ( \"%Y-%m- %d \" )) await pl . execute () else : # to influxdb\uff0c \u6bcf\u4e2a\u4ea4\u6613\u65e5\u7684\u7b2c\u4e8c\u5929\u65e9\u4e0a2\u70b9\u4fdd\u5b58 client = get_influx_client () await client . save ( price_limits , cls . _measurement_name ( FrameType . DAY ), tag_keys = \"code\" , time_key = \"frame\" , ) @classmethod async def trade_price_limit_flags ( cls , code : str , start : datetime . date , end : datetime . date ) -> Tuple [ List [ bool ]]: \"\"\"\u83b7\u53d6\u4e2a\u80a1\u5728[start, end]\u4e4b\u95f4\u7684\u6da8\u8dcc\u505c\u6807\u5fd7 !!!Note \u672c\u51fd\u6570\u8fd4\u56de\u7684\u5e8f\u5217\u5728\u80a1\u7968\u6709\u505c\u724c\u7684\u60c5\u51b5\u4e0b\uff0c\u5c06\u4e0d\u80fd\u4e0e[start, end]\u4e00\u4e00\u5bf9\u5e94\u3002 Args: code: \u4e2a\u80a1\u4ee3\u7801 start: \u5f00\u59cb\u65e5\u671f end: \u7ed3\u675f\u65e5\u671f Returns: \u6da8\u8dcc\u505c\u6807\u5fd7\u5217\u8868(buy, sell) \"\"\" cols = [ \"_time\" , \"close\" , \"high_limit\" , \"low_limit\" ] client = get_influx_client () measurement = cls . _measurement_name ( FrameType . DAY ) flux = ( Flux () . bucket ( client . _bucket ) . measurement ( measurement ) . range ( start , end ) . tags ({ \"code\" : code }) . fields ( cols ) . sort ( \"_time\" ) ) dtype = [ ( \"frame\" , \"O\" ), ( \"close\" , \"f4\" ), ( \"high_limit\" , \"f4\" ), ( \"low_limit\" , \"f4\" ), ] ds = NumpyDeserializer ( dtype , use_cols = [ \"_time\" , \"close\" , \"high_limit\" , \"low_limit\" ], converters = { \"_time\" : lambda x : ciso8601 . parse_datetime ( x ) . date ()}, # since we ask parse date in convertors, so we have to disable parse_date parse_date = None , ) result = await client . query ( flux , ds ) if result . size == 0 : return np . array ([], dtype = dtype ) return ( array_price_equal ( result [ \"close\" ], result [ \"high_limit\" ]), array_price_equal ( result [ \"close\" ], result [ \"low_limit\" ]), ) @classmethod async def trade_price_limit_flags_ex ( cls , code : str , start : datetime . date , end : datetime . date ) -> Dict [ datetime . date , Tuple [ bool , bool ]]: \"\"\"\u83b7\u53d6\u80a1\u7968`code`\u5728`[start, end]`\u533a\u95f4\u7684\u6da8\u8dcc\u505c\u6807\u5fd7 !!!Note \u5982\u679cend\u4e3a\u5f53\u5929\uff0c\u6ce8\u610f\u5728\u672a\u6536\u76d8\u4e4b\u524d\uff0c\u8fd9\u4e2a\u6da8\u8dcc\u505c\u6807\u5fd7\u90fd\u662f\u4e0d\u7a33\u5b9a\u7684 Args: code: \u80a1\u7968\u4ee3\u7801 start: \u8d77\u59cb\u65e5\u671f end: \u7ed3\u675f\u65e5\u671f Returns: \u4ee5\u65e5\u671f\u4e3akey\uff0c\uff08\u6da8\u505c\uff0c\u8dcc\u505c\uff09\u4e3a\u503c\u7684dict \"\"\" limit_prices = await cls . get_trade_price_limits ( code , start , end ) bars = await Stock . get_bars_in_range ( code , FrameType . DAY , start = start , end = end , fq = False ) close = bars [ \"close\" ] results = {} # aligned = True for i in range ( len ( bars )): if bars [ i ][ \"frame\" ] . item () . date () != limit_prices [ i ][ \"frame\" ]: # aligned = False logger . warning ( \"\u6570\u636e\u540c\u6b65\u9519\u8bef\uff0c\u6da8\u8dcc\u505c\u4ef7\u683c\u4e0e\u6536\u76d8\u4ef7\u65f6\u95f4\u4e0d\u4e00\u81f4: %s , %s \" , code , bars [ i ][ \"frame\" ]) break results [ limit_prices [ i ][ \"frame\" ]] = ( price_equal ( limit_prices [ i ][ \"high_limit\" ], close [ i ]), price_equal ( limit_prices [ i ][ \"low_limit\" ], close [ i ]), ) # if not aligned: # bars = bars[i:] # limit_prices = limit_prices[i:] # for frame in bars[\"frame\"]: # frame = frame.item().date() # close = bars[bars[\"frame\"].item().date() == frame][\"close\"].item() # high = limit_prices[limit_prices[\"frame\"] == frame][\"high_limit\"].item() # low = limit_prices[limit_prices[\"frame\"] == frame][\"low_limit\"].item() # results[frame] = ( # price_equal(high, close), # price_equal(low, close) # ) return results @classmethod async def get_latest_price ( cls , codes : Iterable [ str ]) -> List [ str ]: \"\"\"\u83b7\u53d6\u591a\u652f\u80a1\u7968\u7684\u6700\u65b0\u4ef7\u683c\uff08\u4ea4\u6613\u65e5\u5f53\u5929\uff09\uff0c\u6682\u4e0d\u5305\u62ec\u6307\u6570 \u4ef7\u683c\u6570\u636e\u6bcf5\u79d2\u66f4\u65b0\u4e00\u6b21\uff0c\u63a5\u53d7\u591a\u53ea\u80a1\u7968\u67e5\u8be2\uff0c\u8fd4\u56de\u6700\u540e\u7f13\u5b58\u7684\u4ef7\u683c Args: codes: \u4ee3\u7801\u5217\u8868 Returns: \u8fd4\u56de\u4e00\u4e2aList\uff0c\u4ef7\u683c\u662f\u5b57\u7b26\u5f62\u5f0f\u7684\u6d6e\u70b9\u6570\u3002 \"\"\" if not codes : return [] _raw_code_list = [] for code_str in codes : code , _ = code_str . split ( \".\" ) _raw_code_list . append ( code ) _converted_data = [] raw_data = await cache . feature . hmget ( TRADE_LATEST_PRICE , * _raw_code_list ) for _data in raw_data : if _data is None : _converted_data . append ( _data ) else : _converted_data . append ( float ( _data )) return _converted_data security_type : SecurityType property readonly \u00b6 \u8fd4\u56de\u8bc1\u5238\u7c7b\u578b Returns: Type Description SecurityType [description] batch_cache_bars ( frame_type , bars ) async classmethod \u00b6 \u7f13\u5b58\u5df2\u6536\u76d8\u7684\u5206\u949f\u7ebf\u548c\u65e5\u7ebf \u5f53\u7f13\u5b58\u65e5\u7ebf\u65f6\uff0c\u4ec5\u9650\u4e8e\u5f53\u65e5\u6536\u76d8\u540e\u7684\u7b2c\u4e00\u6b21\u540c\u6b65\u65f6\u8c03\u7528\u3002 Parameters: Name Type Description Default frame_type FrameType \u5e27\u7c7b\u578b required bars Dict[str, numpy.ndarray[Any, numpy.dtype[dtype([('frame', ' Generator [ Dict [ str , BarsArray ], None , None ]: \"\"\"\u83b7\u53d6\u591a\u652f\u80a1\u7968\uff08\u6307\u6570\uff09\u5728[start, end)\u65f6\u95f4\u6bb5\u5185\u7684\u884c\u60c5\u6570\u636e \u5982\u679c\u8981\u83b7\u53d6\u7684\u884c\u60c5\u6570\u636e\u662f\u65e5\u7ebf\u7ea7\u522b\uff08\u53731d, 1w, 1M)\uff0c\u4f7f\u7528\u672c\u63a5\u53e3\u3002 \u505c\u724c\u6570\u636e\u5904\u7406\u8bf7\u89c1[get_bars][omicron.models.stock.Stock.get_bars]\u3002 \u672c\u51fd\u6570\u8fd4\u56de\u4e00\u4e2a\u8fed\u4ee3\u5668\uff0c\u4f7f\u7528\u65b9\u6cd5\u793a\u4f8b\uff1a ``` async for code, bars in Stock.batch_get_day_level_bars_in_range(...): print(code, bars) ``` \u5982\u679c`end`\u4e0d\u5728`frame_type`\u6240\u5c5e\u7684\u8fb9\u754c\u70b9\u4e0a\uff0c\u90a3\u4e48\uff0c\u5982\u679c`end`\u5927\u4e8e\u7b49\u4e8e\u5f53\u524d\u7f13\u5b58\u672a\u6536\u76d8\u6570\u636e\u65f6\u95f4\uff0c\u5219\u5c06\u5305\u542b\u672a\u6536\u76d8\u6570\u636e\uff1b\u5426\u5219\uff0c\u8fd4\u56de\u7684\u8bb0\u5f55\u5c06\u622a\u6b62\u5230`tf.floor(end, frame_type)`\u3002 Args: codes: \u4ee3\u7801\u5217\u8868 frame_type: \u5e27\u7c7b\u578b start: \u8d77\u59cb\u65f6\u95f4 end: \u7ed3\u675f\u65f6\u95f4 fq: \u662f\u5426\u8fdb\u884c\u590d\u6743\uff0c\u5982\u679c\u662f\uff0c\u5219\u8fdb\u884c\u524d\u590d\u6743\u3002Defaults to True. Returns: Generator[Dict[str, BarsArray], None, None]: \u8fed\u4ee3\u5668\uff0c\u6bcf\u6b21\u8fd4\u56de\u4e00\u4e2a\u5b57\u5178\uff0c\u5176\u4e2dkey\u4e3a\u4ee3\u7801\uff0cvalue\u4e3a\u884c\u60c5\u6570\u636e \"\"\" today = datetime . datetime . now () . date () # \u65e5\u7ebf\uff0cend\u4e0d\u7b49\u4e8e\u6700\u540e\u4ea4\u6613\u65e5\uff0c\u6b64\u65f6\u5df2\u65e0\u7f13\u5b58 if frame_type == FrameType . DAY and end == tf . floor ( today , frame_type ): from_cache = True elif frame_type != FrameType . DAY and start > tf . floor ( today , frame_type ): from_cache = True else : from_cache = False n = tf . count_frames ( start , end , frame_type ) max_query_size = min ( cfg . influxdb . max_query_size , INFLUXDB_MAX_QUERY_SIZE ) batch_size = max ( max_query_size // n , 1 ) for i in range ( 0 , len ( codes ), batch_size ): batch_codes = codes [ i : i + batch_size ] persisted = await cls . _batch_get_persisted_bars_in_range ( batch_codes , frame_type , start , end ) if from_cache : cached = await cls . _batch_get_cached_bars_n ( frame_type , 1 , end , batch_codes ) cached = pd . DataFrame ( cached , columns = bars_dtype_with_code . names ) df = pd . concat ([ persisted , cached ]) else : df = persisted for code in batch_codes : filtered = df [ df [ \"code\" ] == code ][ bars_cols ] bars = filtered . to_records ( index = False ) . astype ( bars_dtype ) if fq : bars = cls . qfq ( bars ) yield code , bars batch_get_min_level_bars_in_range ( codes , frame_type , start , end , fq = True ) classmethod \u00b6 \u83b7\u53d6\u591a\u652f\u80a1\u7968\uff08\u6307\u6570\uff09\u5728[start, end)\u65f6\u95f4\u6bb5\u5185\u7684\u884c\u60c5\u6570\u636e \u5982\u679c\u8981\u83b7\u53d6\u7684\u884c\u60c5\u6570\u636e\u662f\u5206\u949f\u7ea7\u522b\uff08\u53731m, 5m, 15m, 30m\u548c60m)\uff0c\u4f7f\u7528\u672c\u63a5\u53e3\u3002 \u505c\u724c\u6570\u636e\u5904\u7406\u8bf7\u89c1 get_bars \u3002 \u672c\u51fd\u6570\u8fd4\u56de\u4e00\u4e2a\u8fed\u4ee3\u5668\uff0c\u4f7f\u7528\u65b9\u6cd5\u793a\u4f8b\uff1a 1 2 async for code, bars in Stock.batch_get_min_level_bars_in_range(...): print(code, bars) \u5982\u679c end \u4e0d\u5728 frame_type \u6240\u5c5e\u7684\u8fb9\u754c\u70b9\u4e0a\uff0c\u90a3\u4e48\uff0c\u5982\u679c end \u5927\u4e8e\u7b49\u4e8e\u5f53\u524d\u7f13\u5b58\u672a\u6536\u76d8\u6570\u636e\u65f6\u95f4\uff0c\u5219\u5c06\u5305\u542b\u672a\u6536\u76d8\u6570\u636e\uff1b\u5426\u5219\uff0c\u8fd4\u56de\u7684\u8bb0\u5f55\u5c06\u622a\u6b62\u5230 tf.floor(end, frame_type) \u3002 Parameters: Name Type Description Default codes List[str] \u80a1\u7968/\u6307\u6570\u4ee3\u7801\u5217\u8868 required frame_type FrameType \u5e27\u7c7b\u578b required start Union[datetime.date, datetime.datetime] \u8d77\u59cb\u65f6\u95f4 required end Union[datetime.date, datetime.datetime] \u7ed3\u675f\u65f6\u95f4\u3002\u5982\u679c\u672a\u6307\u660e\uff0c\u5219\u53d6\u5f53\u524d\u65f6\u95f4\u3002 required fq bool \u662f\u5426\u8fdb\u884c\u590d\u6743\uff0c\u5982\u679c\u662f\uff0c\u5219\u8fdb\u884c\u524d\u590d\u6743\u3002Defaults to True. True Returns: Type Description Generator[Dict[str, BarsArray], None, None] \u8fed\u4ee3\u5668\uff0c\u6bcf\u6b21\u8fd4\u56de\u4e00\u4e2a\u5b57\u5178\uff0c\u5176\u4e2dkey\u4e3a\u4ee3\u7801\uff0cvalue\u4e3a\u884c\u60c5\u6570\u636e Source code in omicron/models/stock.py @classmethod async def batch_get_min_level_bars_in_range ( cls , codes : List [ str ], frame_type : FrameType , start : Frame , end : Frame , fq : bool = True , ) -> Generator [ Dict [ str , BarsArray ], None , None ]: \"\"\"\u83b7\u53d6\u591a\u652f\u80a1\u7968\uff08\u6307\u6570\uff09\u5728[start, end)\u65f6\u95f4\u6bb5\u5185\u7684\u884c\u60c5\u6570\u636e \u5982\u679c\u8981\u83b7\u53d6\u7684\u884c\u60c5\u6570\u636e\u662f\u5206\u949f\u7ea7\u522b\uff08\u53731m, 5m, 15m, 30m\u548c60m)\uff0c\u4f7f\u7528\u672c\u63a5\u53e3\u3002 \u505c\u724c\u6570\u636e\u5904\u7406\u8bf7\u89c1[get_bars][omicron.models.stock.Stock.get_bars]\u3002 \u672c\u51fd\u6570\u8fd4\u56de\u4e00\u4e2a\u8fed\u4ee3\u5668\uff0c\u4f7f\u7528\u65b9\u6cd5\u793a\u4f8b\uff1a ``` async for code, bars in Stock.batch_get_min_level_bars_in_range(...): print(code, bars) ``` \u5982\u679c`end`\u4e0d\u5728`frame_type`\u6240\u5c5e\u7684\u8fb9\u754c\u70b9\u4e0a\uff0c\u90a3\u4e48\uff0c\u5982\u679c`end`\u5927\u4e8e\u7b49\u4e8e\u5f53\u524d\u7f13\u5b58\u672a\u6536\u76d8\u6570\u636e\u65f6\u95f4\uff0c\u5219\u5c06\u5305\u542b\u672a\u6536\u76d8\u6570\u636e\uff1b\u5426\u5219\uff0c\u8fd4\u56de\u7684\u8bb0\u5f55\u5c06\u622a\u6b62\u5230`tf.floor(end, frame_type)`\u3002 Args: codes: \u80a1\u7968/\u6307\u6570\u4ee3\u7801\u5217\u8868 frame_type: \u5e27\u7c7b\u578b start: \u8d77\u59cb\u65f6\u95f4 end: \u7ed3\u675f\u65f6\u95f4\u3002\u5982\u679c\u672a\u6307\u660e\uff0c\u5219\u53d6\u5f53\u524d\u65f6\u95f4\u3002 fq: \u662f\u5426\u8fdb\u884c\u590d\u6743\uff0c\u5982\u679c\u662f\uff0c\u5219\u8fdb\u884c\u524d\u590d\u6743\u3002Defaults to True. Returns: Generator[Dict[str, BarsArray], None, None]: \u8fed\u4ee3\u5668\uff0c\u6bcf\u6b21\u8fd4\u56de\u4e00\u4e2a\u5b57\u5178\uff0c\u5176\u4e2dkey\u4e3a\u4ee3\u7801\uff0cvalue\u4e3a\u884c\u60c5\u6570\u636e \"\"\" closed_end = tf . floor ( end , frame_type ) n = tf . count_frames ( start , closed_end , frame_type ) max_query_size = min ( cfg . influxdb . max_query_size , INFLUXDB_MAX_QUERY_SIZE ) batch_size = max ( 1 , max_query_size // n ) ff = tf . first_min_frame ( datetime . datetime . now (), frame_type ) for i in range ( 0 , len ( codes ), batch_size ): batch_codes = codes [ i : i + batch_size ] if end < ff : part1 = await cls . _batch_get_persisted_bars_in_range ( batch_codes , frame_type , start , end ) part2 = pd . DataFrame ([], columns = bars_dtype_with_code . names ) elif start >= ff : part1 = pd . DataFrame ([], columns = bars_dtype_with_code . names ) n = tf . count_frames ( start , closed_end , frame_type ) + 1 cached = await cls . _batch_get_cached_bars_n ( frame_type , n , end , batch_codes ) cached = cached [ cached [ \"frame\" ] >= start ] part2 = pd . DataFrame ( cached , columns = bars_dtype_with_code . names ) else : part1 = await cls . _batch_get_persisted_bars_in_range ( batch_codes , frame_type , start , ff ) n = tf . count_frames ( start , closed_end , frame_type ) + 1 cached = await cls . _batch_get_cached_bars_n ( frame_type , n , end , batch_codes ) part2 = pd . DataFrame ( cached , columns = bars_dtype_with_code . names ) df = pd . concat ([ part1 , part2 ]) for code in batch_codes : filtered = df [ df [ \"code\" ] == code ][ bars_cols ] bars = filtered . to_records ( index = False ) . astype ( bars_dtype ) if fq : bars = cls . qfq ( bars ) yield code , bars cache_bars ( code , frame_type , bars ) async classmethod \u00b6 \u5c06\u5f53\u671f\u5df2\u6536\u76d8\u7684\u884c\u60c5\u6570\u636e\u7f13\u5b58 Note \u5f53\u524d\u53ea\u7f13\u5b581\u5206\u949f\u6570\u636e\u3002\u5176\u5b83\u5206\u949f\u6570\u636e\uff0c\u90fd\u5728\u8c03\u7528\u65f6\uff0c\u901a\u8fc7resample\u4e34\u65f6\u5408\u6210\u3002 \u884c\u60c5\u6570\u636e\u7f13\u5b58\u5728\u4ee5 bars:{frame_type.value}:{code} \u4e3akey, {frame}\u4e3afield\u7684hashmap\u4e2d\u3002 Parameters: Name Type Description Default code str the full qualified code of a security or index required frame_type FrameType frame type of the bars required bars numpy.ndarray[Any, numpy.dtype[dtype([('frame', ' int : \"\"\"\u83b7\u53d6\u4e0a\u5e02\u4ee5\u6765\u7ecf\u8fc7\u4e86\u591a\u5c11\u4e2a\u4ea4\u6613\u65e5 \u7531\u4e8e\u53d7\u4ea4\u6613\u65e5\u5386\u9650\u5236\uff082005\u5e741\u67084\u65e5\u4e4b\u524d\u7684\u4ea4\u6613\u65e5\u5386\u6ca1\u6709\uff09\uff0c\u5bf9\u4e8e\u5728\u4e4b\u524d\u4e0a\u5e02\u7684\u54c1\u79cd\uff0c\u90fd\u8fd4\u56de\u4ece2005\u5e741\u67084\u65e5\u8d77\u7684\u65e5\u671f\u3002 Returns: int: [description] \"\"\" epoch_start = arrow . get ( \"2005-01-04\" ) . date () ipo_day = self . ipo_date if self . ipo_date > epoch_start else epoch_start return tf . count_day_frames ( ipo_day , arrow . now () . date ()) format_code ( code ) staticmethod \u00b6 \u65b0\u4e09\u677f\u548c\u5317\u4ea4\u6240\u7684\u80a1\u7968, \u6682\u4e0d\u652f\u6301, \u9ed8\u8ba4\u8fd4\u56deNone \u4e0a\u8bc1A\u80a1: 600\u3001601\u3001603\u3001605 \u6df1\u8bc1A\u80a1: 000\u3001001 \u4e2d\u5c0f\u677f: 002\u3001003 \u521b\u4e1a\u677f: 300/301 \u79d1\u521b\u677f: 688 \u65b0\u4e09\u677f: 82\u300183\u300187\u300188\u3001430\u3001420\u3001400 \u5317\u4ea4\u6240: 43\u300183\u300187\u300188 Source code in omicron/models/stock.py @staticmethod def format_code ( code ) -> str : \"\"\"\u65b0\u4e09\u677f\u548c\u5317\u4ea4\u6240\u7684\u80a1\u7968, \u6682\u4e0d\u652f\u6301, \u9ed8\u8ba4\u8fd4\u56deNone \u4e0a\u8bc1A\u80a1: 600\u3001601\u3001603\u3001605 \u6df1\u8bc1A\u80a1: 000\u3001001 \u4e2d\u5c0f\u677f: 002\u3001003 \u521b\u4e1a\u677f: 300/301 \u79d1\u521b\u677f: 688 \u65b0\u4e09\u677f: 82\u300183\u300187\u300188\u3001430\u3001420\u3001400 \u5317\u4ea4\u6240: 43\u300183\u300187\u300188 \"\"\" if not code or len ( code ) != 6 : return None prefix = code [ 0 ] if prefix in ( \"0\" , \"3\" ): return f \" { code } .XSHE\" elif prefix == \"6\" : return f \" { code } .XSHG\" else : return None fuzzy_match ( query ) classmethod \u00b6 \u5bf9\u80a1\u7968/\u6307\u6570\u8fdb\u884c\u6a21\u7cca\u5339\u914d\u67e5\u627e query\u53ef\u4ee5\u662f\u80a1\u7968/\u6307\u6570\u4ee3\u7801\uff0c\u4e5f\u53ef\u4ee5\u662f\u5b57\u6bcd\uff08\u6309name\u67e5\u627e\uff09\uff0c\u4e5f\u53ef\u4ee5\u662f\u6c49\u5b57\uff08\u6309\u663e\u793a\u540d\u67e5\u627e\uff09 Parameters: Name Type Description Default query str \u67e5\u8be2\u5b57\u7b26\u4e32 required Returns: Type Description Dict[str, Tuple] \u67e5\u8be2\u7ed3\u679c\uff0c\u5176\u4e2dTuple\u4e3a(code, display_name, name, start, end, type) Source code in omicron/models/stock.py @classmethod def fuzzy_match ( cls , query : str ) -> Dict [ str , Tuple ]: \"\"\"\u5bf9\u80a1\u7968/\u6307\u6570\u8fdb\u884c\u6a21\u7cca\u5339\u914d\u67e5\u627e query\u53ef\u4ee5\u662f\u80a1\u7968/\u6307\u6570\u4ee3\u7801\uff0c\u4e5f\u53ef\u4ee5\u662f\u5b57\u6bcd\uff08\u6309name\u67e5\u627e\uff09\uff0c\u4e5f\u53ef\u4ee5\u662f\u6c49\u5b57\uff08\u6309\u663e\u793a\u540d\u67e5\u627e\uff09 Args: query (str): \u67e5\u8be2\u5b57\u7b26\u4e32 Returns: Dict[str, Tuple]: \u67e5\u8be2\u7ed3\u679c\uff0c\u5176\u4e2dTuple\u4e3a(code, display_name, name, start, end, type) \"\"\" query = query . upper () if re . match ( r \"\\d+\" , query ): return { sec [ \"code\" ]: sec . tolist () for sec in cls . _stocks if sec [ \"code\" ] . startswith ( query ) } elif re . match ( r \"[A-Z]+\" , query ): return { sec [ \"code\" ]: sec . tolist () for sec in cls . _stocks if sec [ \"name\" ] . startswith ( query ) } else : return { sec [ \"code\" ]: sec . tolist () for sec in cls . _stocks if sec [ \"alias\" ] . find ( query ) != - 1 } get_bars ( code , n , frame_type , end = None , fq = True , unclosed = True ) async classmethod \u00b6 \u83b7\u53d6\u5230 end \u4e3a\u6b62\u7684 n \u4e2a\u884c\u60c5\u6570\u636e\u3002 \u8fd4\u56de\u7684\u6570\u636e\u662f\u6309\u7167\u65f6\u95f4\u987a\u5e8f\u9012\u589e\u6392\u5e8f\u7684\u3002\u5728\u9047\u5230\u505c\u724c\u7684\u60c5\u51b5\u65f6\uff0c\u8be5\u65f6\u6bb5\u6570\u636e\u5c06\u88ab\u8df3\u8fc7\uff0c\u56e0\u6b64\u8fd4\u56de\u7684\u8bb0\u5f55\u53ef\u80fd\u4e0d\u662f\u4ea4\u6613\u65e5\u8fde\u7eed\u7684\uff0c\u5e76\u4e14\u53ef\u80fd\u4e0d\u8db3 n \u4e2a\u3002 \u5982\u679c\u7cfb\u7edf\u5f53\u524d\u6ca1\u6709\u5230\u6307\u5b9a\u65f6\u95f4 end \u7684\u6570\u636e\uff0c\u5c06\u5c3d\u6700\u5927\u52aa\u529b\u8fd4\u56de\u6570\u636e\u3002\u8c03\u7528\u8005\u53ef\u4ee5\u901a\u8fc7\u5224\u65ad\u6700\u540e\u4e00\u6761\u6570\u636e\u7684\u65f6\u95f4\u662f\u5426\u7b49\u4e8e end \u6765\u5224\u65ad\u662f\u5426\u83b7\u53d6\u5230\u4e86\u5168\u90e8\u6570\u636e\u3002 Parameters: Name Type Description Default code str \u8bc1\u5238\u4ee3\u7801 required n int \u8bb0\u5f55\u6570 required frame_type FrameType \u5e27\u7c7b\u578b required end Union[datetime.date, datetime.datetime] \u622a\u6b62\u65f6\u95f4,\u5982\u679c\u672a\u6307\u660e\uff0c\u5219\u53d6\u5f53\u524d\u65f6\u95f4 None fq \u662f\u5426\u5bf9\u8fd4\u56de\u8bb0\u5f55\u8fdb\u884c\u590d\u6743\u3002\u5982\u679c\u4e3a True \u7684\u8bdd\uff0c\u5219\u8fdb\u884c\u524d\u590d\u6743\u3002Defaults to True. True unclosed \u662f\u5426\u5305\u542b\u6700\u65b0\u672a\u6536\u76d8\u7684\u6570\u636e\uff1f Defaults to True. True Returns: Type Description numpy.ndarray[Any, numpy.dtype[dtype([('frame', ' BarsArray : \"\"\"\u83b7\u53d6\u5230`end`\u4e3a\u6b62\u7684`n`\u4e2a\u884c\u60c5\u6570\u636e\u3002 \u8fd4\u56de\u7684\u6570\u636e\u662f\u6309\u7167\u65f6\u95f4\u987a\u5e8f\u9012\u589e\u6392\u5e8f\u7684\u3002\u5728\u9047\u5230\u505c\u724c\u7684\u60c5\u51b5\u65f6\uff0c\u8be5\u65f6\u6bb5\u6570\u636e\u5c06\u88ab\u8df3\u8fc7\uff0c\u56e0\u6b64\u8fd4\u56de\u7684\u8bb0\u5f55\u53ef\u80fd\u4e0d\u662f\u4ea4\u6613\u65e5\u8fde\u7eed\u7684\uff0c\u5e76\u4e14\u53ef\u80fd\u4e0d\u8db3`n`\u4e2a\u3002 \u5982\u679c\u7cfb\u7edf\u5f53\u524d\u6ca1\u6709\u5230\u6307\u5b9a\u65f6\u95f4`end`\u7684\u6570\u636e\uff0c\u5c06\u5c3d\u6700\u5927\u52aa\u529b\u8fd4\u56de\u6570\u636e\u3002\u8c03\u7528\u8005\u53ef\u4ee5\u901a\u8fc7\u5224\u65ad\u6700\u540e\u4e00\u6761\u6570\u636e\u7684\u65f6\u95f4\u662f\u5426\u7b49\u4e8e`end`\u6765\u5224\u65ad\u662f\u5426\u83b7\u53d6\u5230\u4e86\u5168\u90e8\u6570\u636e\u3002 Args: code: \u8bc1\u5238\u4ee3\u7801 n: \u8bb0\u5f55\u6570 frame_type: \u5e27\u7c7b\u578b end: \u622a\u6b62\u65f6\u95f4,\u5982\u679c\u672a\u6307\u660e\uff0c\u5219\u53d6\u5f53\u524d\u65f6\u95f4 fq: \u662f\u5426\u5bf9\u8fd4\u56de\u8bb0\u5f55\u8fdb\u884c\u590d\u6743\u3002\u5982\u679c\u4e3a`True`\u7684\u8bdd\uff0c\u5219\u8fdb\u884c\u524d\u590d\u6743\u3002Defaults to True. unclosed: \u662f\u5426\u5305\u542b\u6700\u65b0\u672a\u6536\u76d8\u7684\u6570\u636e\uff1f Defaults to True. Returns: \u8fd4\u56dedtype\u4e3a`coretypes.bars_dtype`\u7684\u4e00\u7ef4numpy\u6570\u7ec4\u3002 \"\"\" now = datetime . datetime . now () try : cached = np . array ([], dtype = bars_dtype ) if frame_type in tf . day_level_frames : if end is None : end = now . date () elif type ( end ) == datetime . datetime : end = end . date () n0 = n if unclosed : cached = await cls . _get_cached_bars_n ( code , 1 , frame_type ) if cached . size > 0 : # \u5982\u679c\u7f13\u5b58\u7684\u672a\u6536\u76d8\u65e5\u671f > end\uff0c\u5219\u8be5\u7f13\u5b58\u4e0d\u662f\u9700\u8981\u7684 if cached [ 0 ][ \"frame\" ] . item () . date () > end : cached = np . array ([], dtype = bars_dtype ) else : n0 = n - 1 else : end = end or now closed_frame = tf . floor ( end , frame_type ) # fetch one more bar, in case we should discard unclosed bar cached = await cls . _get_cached_bars_n ( code , n + 1 , frame_type , end ) if not unclosed : cached = cached [ cached [ \"frame\" ] <= closed_frame ] # n bars we need fetch from persisted db n0 = n - cached . size if n0 > 0 : if cached . size > 0 : end0 = cached [ 0 ][ \"frame\" ] . item () else : end0 = end bars = await cls . _get_persisted_bars_n ( code , frame_type , n0 , end0 ) merged = np . concatenate (( bars , cached )) bars = merged [ - n :] else : bars = cached [ - n :] if fq : bars = cls . qfq ( bars ) return bars except Exception as e : logger . exception ( e ) logger . warning ( \"failed to get bars for %s , %s , %s , %s \" , code , n , frame_type , end ) raise get_bars_in_range ( code , frame_type , start , end = None , fq = True , unclosed = True ) async classmethod \u00b6 \u83b7\u53d6\u6307\u5b9a\u8bc1\u5238\uff08 code \uff09\u5728[ start , end ]\u671f\u95f4\u5e27\u7c7b\u578b\u4e3a frame_type \u7684\u884c\u60c5\u6570\u636e\u3002 Parameters: Name Type Description Default code \u8bc1\u5238\u4ee3\u7801 required frame_type \u884c\u60c5\u6570\u636e\u7684\u5e27\u7c7b\u578b required start \u8d77\u59cb\u65f6\u95f4 required end \u7ed3\u675f\u65f6\u95f4,\u5982\u679c\u4e3aNone\uff0c\u5219\u8868\u660e\u53d6\u5230\u5f53\u524d\u65f6\u95f4\u3002 None fq \u662f\u5426\u5bf9\u884c\u60c5\u6570\u636e\u6267\u884c\u524d\u590d\u6743\u64cd\u4f5c True unclosed \u662f\u5426\u5305\u542b\u672a\u6536\u76d8\u7684\u6570\u636e True Source code in omicron/models/stock.py @classmethod async def get_bars_in_range ( cls , code : str , frame_type : FrameType , start : Frame , end : Frame = None , fq = True , unclosed = True , ) -> BarsArray : \"\"\"\u83b7\u53d6\u6307\u5b9a\u8bc1\u5238\uff08`code`\uff09\u5728[`start`, `end`]\u671f\u95f4\u5e27\u7c7b\u578b\u4e3a`frame_type`\u7684\u884c\u60c5\u6570\u636e\u3002 Args: code : \u8bc1\u5238\u4ee3\u7801 frame_type : \u884c\u60c5\u6570\u636e\u7684\u5e27\u7c7b\u578b start : \u8d77\u59cb\u65f6\u95f4 end : \u7ed3\u675f\u65f6\u95f4,\u5982\u679c\u4e3aNone\uff0c\u5219\u8868\u660e\u53d6\u5230\u5f53\u524d\u65f6\u95f4\u3002 fq : \u662f\u5426\u5bf9\u884c\u60c5\u6570\u636e\u6267\u884c\u524d\u590d\u6743\u64cd\u4f5c unclosed : \u662f\u5426\u5305\u542b\u672a\u6536\u76d8\u7684\u6570\u636e \"\"\" now = datetime . datetime . now () if frame_type in tf . day_level_frames : end = end or now . date () if unclosed and tf . day_shift ( end , 0 ) == now . date (): part2 = await cls . _get_cached_bars_n ( code , 1 , frame_type ) else : part2 = np . array ([], dtype = bars_dtype ) # get rest from persisted part1 = await cls . _get_persisted_bars_in_range ( code , frame_type , start , end ) bars = np . concatenate (( part1 , part2 )) else : end = end or now closed_end = tf . floor ( end , frame_type ) ff_min1 = tf . first_min_frame ( now , FrameType . MIN1 ) if tf . day_shift ( end , 0 ) < now . date () or end < ff_min1 : part1 = await cls . _get_persisted_bars_in_range ( code , frame_type , start , end ) part2 = np . array ([], dtype = bars_dtype ) elif start >= ff_min1 : # all in cache part1 = np . array ([], dtype = bars_dtype ) n = tf . count_frames ( start , closed_end , frame_type ) + 1 part2 = await cls . _get_cached_bars_n ( code , n , frame_type , end ) part2 = part2 [ part2 [ \"frame\" ] >= start ] else : # in both cache and persisted ff = tf . first_min_frame ( now , frame_type ) part1 = await cls . _get_persisted_bars_in_range ( code , frame_type , start , ff ) n = tf . count_frames ( ff , closed_end , frame_type ) + 1 part2 = await cls . _get_cached_bars_n ( code , n , frame_type , end ) if not unclosed : part2 = part2 [ part2 [ \"frame\" ] <= closed_end ] bars = np . concatenate (( part1 , part2 )) if fq : return cls . qfq ( bars ) else : return bars get_latest_price ( codes ) async classmethod \u00b6 \u83b7\u53d6\u591a\u652f\u80a1\u7968\u7684\u6700\u65b0\u4ef7\u683c\uff08\u4ea4\u6613\u65e5\u5f53\u5929\uff09\uff0c\u6682\u4e0d\u5305\u62ec\u6307\u6570 \u4ef7\u683c\u6570\u636e\u6bcf5\u79d2\u66f4\u65b0\u4e00\u6b21\uff0c\u63a5\u53d7\u591a\u53ea\u80a1\u7968\u67e5\u8be2\uff0c\u8fd4\u56de\u6700\u540e\u7f13\u5b58\u7684\u4ef7\u683c Parameters: Name Type Description Default codes Iterable[str] \u4ee3\u7801\u5217\u8868 required Returns: Type Description List[str] \u8fd4\u56de\u4e00\u4e2aList\uff0c\u4ef7\u683c\u662f\u5b57\u7b26\u5f62\u5f0f\u7684\u6d6e\u70b9\u6570\u3002 Source code in omicron/models/stock.py @classmethod async def get_latest_price ( cls , codes : Iterable [ str ]) -> List [ str ]: \"\"\"\u83b7\u53d6\u591a\u652f\u80a1\u7968\u7684\u6700\u65b0\u4ef7\u683c\uff08\u4ea4\u6613\u65e5\u5f53\u5929\uff09\uff0c\u6682\u4e0d\u5305\u62ec\u6307\u6570 \u4ef7\u683c\u6570\u636e\u6bcf5\u79d2\u66f4\u65b0\u4e00\u6b21\uff0c\u63a5\u53d7\u591a\u53ea\u80a1\u7968\u67e5\u8be2\uff0c\u8fd4\u56de\u6700\u540e\u7f13\u5b58\u7684\u4ef7\u683c Args: codes: \u4ee3\u7801\u5217\u8868 Returns: \u8fd4\u56de\u4e00\u4e2aList\uff0c\u4ef7\u683c\u662f\u5b57\u7b26\u5f62\u5f0f\u7684\u6d6e\u70b9\u6570\u3002 \"\"\" if not codes : return [] _raw_code_list = [] for code_str in codes : code , _ = code_str . split ( \".\" ) _raw_code_list . append ( code ) _converted_data = [] raw_data = await cache . feature . hmget ( TRADE_LATEST_PRICE , * _raw_code_list ) for _data in raw_data : if _data is None : _converted_data . append ( _data ) else : _converted_data . append ( float ( _data )) return _converted_data get_trade_price_limits ( code , begin , end ) async classmethod \u00b6 \u4eceinfluxdb\u548ccache\u4e2d\u83b7\u53d6\u4e2a\u80a1\u5728[begin, end]\u4e4b\u95f4\u7684\u6da8\u8dcc\u505c\u4ef7\u3002 \u6da8\u8dcc\u505c\u4ef7\u53ea\u6709\u65e5\u7ebf\u6570\u636e\u624d\u6709\uff0c\u56e0\u6b64\uff0cFrameType\u56fa\u5b9a\u4e3aFrameType.DAY\uff0c \u5f53\u5929\u7684\u6570\u636e\u5b58\u653e\u4e8eredis\uff0c\u5982\u679c\u67e5\u8be2\u65e5\u671f\u5305\u542b\u5f53\u5929\uff08\u4ea4\u6613\u65e5\uff09\uff0c\u4ececache\u4e2d\u8bfb\u53d6\u5e76\u8ffd\u52a0\u5230\u7ed3\u679c\u4e2d Parameters: Name Type Description Default code \u4e2a\u80a1\u4ee3\u7801 required begin \u5f00\u59cb\u65e5\u671f required end \u7ed3\u675f\u65e5\u671f required Returns: Type Description numpy.ndarray[Any, numpy.dtype[dtype([('frame', ' BarsArray : \"\"\"\u4eceinfluxdb\u548ccache\u4e2d\u83b7\u53d6\u4e2a\u80a1\u5728[begin, end]\u4e4b\u95f4\u7684\u6da8\u8dcc\u505c\u4ef7\u3002 \u6da8\u8dcc\u505c\u4ef7\u53ea\u6709\u65e5\u7ebf\u6570\u636e\u624d\u6709\uff0c\u56e0\u6b64\uff0cFrameType\u56fa\u5b9a\u4e3aFrameType.DAY\uff0c \u5f53\u5929\u7684\u6570\u636e\u5b58\u653e\u4e8eredis\uff0c\u5982\u679c\u67e5\u8be2\u65e5\u671f\u5305\u542b\u5f53\u5929\uff08\u4ea4\u6613\u65e5\uff09\uff0c\u4ececache\u4e2d\u8bfb\u53d6\u5e76\u8ffd\u52a0\u5230\u7ed3\u679c\u4e2d Args: code : \u4e2a\u80a1\u4ee3\u7801 begin : \u5f00\u59cb\u65e5\u671f end : \u7ed3\u675f\u65e5\u671f Returns: dtype\u4e3a[('frame', 'O'), ('high_limit', 'f4'), ('low_limit', 'f4')]\u7684numpy\u6570\u7ec4 \"\"\" cols = [ \"_time\" , \"high_limit\" , \"low_limit\" ] dtype = [( \"frame\" , \"O\" ), ( \"high_limit\" , \"f4\" ), ( \"low_limit\" , \"f4\" )] if isinstance ( begin , datetime . datetime ): begin = begin . date () # \u5f3a\u5236\u8f6c\u6362\u4e3adate if isinstance ( end , datetime . datetime ): end = end . date () # \u5f3a\u5236\u8f6c\u6362\u4e3adate data_in_cache = await cls . _get_price_limit_in_cache ( code , begin , end ) client = get_influx_client () measurement = cls . _measurement_name ( FrameType . DAY ) flux = ( Flux () . bucket ( client . _bucket ) . measurement ( measurement ) . range ( begin , end ) . tags ({ \"code\" : code }) . fields ( cols ) . sort ( \"_time\" ) ) ds = NumpyDeserializer ( dtype , use_cols = cols , converters = { \"_time\" : lambda x : ciso8601 . parse_datetime ( x ) . date ()}, # since we ask parse date in convertors, so we have to disable parse_date parse_date = None , ) result = await client . query ( flux , ds ) if data_in_cache : result = np . concatenate ([ result , data_in_cache ]) return result persist_bars ( frame_type , bars ) async classmethod \u00b6 \u5c06\u884c\u60c5\u6570\u636e\u6301\u4e45\u5316 \u5982\u679c bars \u7c7b\u578b\u4e3aDict,\u5219key\u4e3a code \uff0cvalue\u4e3a bars \u3002\u5982\u679c\u5176\u7c7b\u578b\u4e3aBarsArray\u6216\u8005pd.DataFrame\uff0c\u5219 bars \u5404\u5217\u5b57\u6bb5\u5e94\u8be5\u4e3a coretypes.bars_dtype + (\"code\", \"O\")\u6784\u6210\u3002 Parameters: Name Type Description Default frame_type FrameType the frame type of the bars required bars Union[Dict[str, numpy.ndarray[Any, numpy.dtype[dtype([('frame', ' BarsArray : \"\"\"\u5bf9\u884c\u60c5\u6570\u636e\u6267\u884c\u524d\u590d\u6743\u64cd\u4f5c\"\"\" # todo: \u8fd9\u91cc\u53ef\u4ee5\u4f18\u5316 if bars . size == 0 : return bars last = bars [ - 1 ][ \"factor\" ] for field in [ \"open\" , \"high\" , \"low\" , \"close\" , \"volume\" ]: bars [ field ] = bars [ field ] * ( bars [ \"factor\" ] / last ) return bars resample ( bars , from_frame , to_frame ) classmethod \u00b6 \u5c06\u539f\u6765\u4e3a from_frame \u7684\u884c\u60c5\u6570\u636e\u8f6c\u6362\u4e3a to_frame \u7684\u884c\u60c5\u6570\u636e \u5982\u679c to_frame \u4e3a\u65e5\u7ebf\u6216\u8005\u5206\u949f\u7ea7\u522b\u7ebf\uff0c\u5219 from_frame \u5fc5\u987b\u4e3a\u5206\u949f\u7ebf\uff1b\u5982\u679c to_frame \u4e3a\u5468\u4ee5\u4e0a\u7ea7\u522b\u7ebf\uff0c\u5219 from_frame \u5fc5\u987b\u4e3a\u65e5\u7ebf\u3002\u5176\u5b83\u7ea7\u522b\u4e4b\u95f4\u7684\u8f6c\u6362\u4e0d\u652f\u6301\u3002 \u5982\u679c from_frame \u4e3a1\u5206\u949f\u7ebf\uff0c\u5219\u5fc5\u987b\u4ece9\uff1a31\u8d77\u3002 Parameters: Name Type Description Default bars BarsArray \u884c\u60c5\u6570\u636e required from_frame FrameType \u8f6c\u6362\u524d\u7684FrameType required to_frame FrameType \u8f6c\u6362\u540e\u7684FrameType required Returns: Type Description BarsArray \u8f6c\u6362\u540e\u7684\u884c\u60c5\u6570\u636e Source code in omicron/models/stock.py @classmethod def resample ( cls , bars : BarsArray , from_frame : FrameType , to_frame : FrameType ) -> BarsArray : \"\"\"\u5c06\u539f\u6765\u4e3a`from_frame`\u7684\u884c\u60c5\u6570\u636e\u8f6c\u6362\u4e3a`to_frame`\u7684\u884c\u60c5\u6570\u636e \u5982\u679c`to_frame`\u4e3a\u65e5\u7ebf\u6216\u8005\u5206\u949f\u7ea7\u522b\u7ebf\uff0c\u5219`from_frame`\u5fc5\u987b\u4e3a\u5206\u949f\u7ebf\uff1b\u5982\u679c`to_frame`\u4e3a\u5468\u4ee5\u4e0a\u7ea7\u522b\u7ebf\uff0c\u5219`from_frame`\u5fc5\u987b\u4e3a\u65e5\u7ebf\u3002\u5176\u5b83\u7ea7\u522b\u4e4b\u95f4\u7684\u8f6c\u6362\u4e0d\u652f\u6301\u3002 \u5982\u679c`from_frame`\u4e3a1\u5206\u949f\u7ebf\uff0c\u5219\u5fc5\u987b\u4ece9\uff1a31\u8d77\u3002 Args: bars (BarsArray): \u884c\u60c5\u6570\u636e from_frame (FrameType): \u8f6c\u6362\u524d\u7684FrameType to_frame (FrameType): \u8f6c\u6362\u540e\u7684FrameType Returns: BarsArray: \u8f6c\u6362\u540e\u7684\u884c\u60c5\u6570\u636e \"\"\" if from_frame == FrameType . MIN1 : return cls . _resample_from_min1 ( bars , to_frame ) elif from_frame == FrameType . DAY : # pragma: no cover return cls . _resample_from_day ( bars , to_frame ) else : # pragma: no cover raise TypeError ( f \"unsupported from_frame: { from_frame } \" ) reset_cache () async classmethod \u00b6 \u6e05\u9664\u7f13\u5b58\u7684\u884c\u60c5\u6570\u636e Source code in omicron/models/stock.py @classmethod async def reset_cache ( cls ): \"\"\"\u6e05\u9664\u7f13\u5b58\u7684\u884c\u60c5\u6570\u636e\"\"\" try : for ft in itertools . chain ( tf . minute_level_frames , tf . day_level_frames ): keys = await cache . security . keys ( f \"bars: { ft . value } :*\" ) if keys : await cache . security . delete ( * keys ) finally : cls . _is_cache_empty = True save_trade_price_limits ( price_limits , to_cache ) async classmethod \u00b6 \u4fdd\u5b58\u6da8\u8dcc\u505c\u4ef7 Parameters: Name Type Description Default price_limits numpy.ndarray[Any, numpy.dtype[dtype([('frame', 'O'), ('code', 'O'), ('high_limit', ' Tuple [ List [ bool ]]: \"\"\"\u83b7\u53d6\u4e2a\u80a1\u5728[start, end]\u4e4b\u95f4\u7684\u6da8\u8dcc\u505c\u6807\u5fd7 !!!Note \u672c\u51fd\u6570\u8fd4\u56de\u7684\u5e8f\u5217\u5728\u80a1\u7968\u6709\u505c\u724c\u7684\u60c5\u51b5\u4e0b\uff0c\u5c06\u4e0d\u80fd\u4e0e[start, end]\u4e00\u4e00\u5bf9\u5e94\u3002 Args: code: \u4e2a\u80a1\u4ee3\u7801 start: \u5f00\u59cb\u65e5\u671f end: \u7ed3\u675f\u65e5\u671f Returns: \u6da8\u8dcc\u505c\u6807\u5fd7\u5217\u8868(buy, sell) \"\"\" cols = [ \"_time\" , \"close\" , \"high_limit\" , \"low_limit\" ] client = get_influx_client () measurement = cls . _measurement_name ( FrameType . DAY ) flux = ( Flux () . bucket ( client . _bucket ) . measurement ( measurement ) . range ( start , end ) . tags ({ \"code\" : code }) . fields ( cols ) . sort ( \"_time\" ) ) dtype = [ ( \"frame\" , \"O\" ), ( \"close\" , \"f4\" ), ( \"high_limit\" , \"f4\" ), ( \"low_limit\" , \"f4\" ), ] ds = NumpyDeserializer ( dtype , use_cols = [ \"_time\" , \"close\" , \"high_limit\" , \"low_limit\" ], converters = { \"_time\" : lambda x : ciso8601 . parse_datetime ( x ) . date ()}, # since we ask parse date in convertors, so we have to disable parse_date parse_date = None , ) result = await client . query ( flux , ds ) if result . size == 0 : return np . array ([], dtype = dtype ) return ( array_price_equal ( result [ \"close\" ], result [ \"high_limit\" ]), array_price_equal ( result [ \"close\" ], result [ \"low_limit\" ]), ) trade_price_limit_flags_ex ( code , start , end ) async classmethod \u00b6 \u83b7\u53d6\u80a1\u7968 code \u5728 [start, end] \u533a\u95f4\u7684\u6da8\u8dcc\u505c\u6807\u5fd7 Note \u5982\u679cend\u4e3a\u5f53\u5929\uff0c\u6ce8\u610f\u5728\u672a\u6536\u76d8\u4e4b\u524d\uff0c\u8fd9\u4e2a\u6da8\u8dcc\u505c\u6807\u5fd7\u90fd\u662f\u4e0d\u7a33\u5b9a\u7684 Parameters: Name Type Description Default code str \u80a1\u7968\u4ee3\u7801 required start date \u8d77\u59cb\u65e5\u671f required end date \u7ed3\u675f\u65e5\u671f required Returns: Type Description Dict[datetime.date, Tuple[bool, bool]] \u4ee5\u65e5\u671f\u4e3akey\uff0c\uff08\u6da8\u505c\uff0c\u8dcc\u505c\uff09\u4e3a\u503c\u7684dict Source code in omicron/models/stock.py @classmethod async def trade_price_limit_flags_ex ( cls , code : str , start : datetime . date , end : datetime . date ) -> Dict [ datetime . date , Tuple [ bool , bool ]]: \"\"\"\u83b7\u53d6\u80a1\u7968`code`\u5728`[start, end]`\u533a\u95f4\u7684\u6da8\u8dcc\u505c\u6807\u5fd7 !!!Note \u5982\u679cend\u4e3a\u5f53\u5929\uff0c\u6ce8\u610f\u5728\u672a\u6536\u76d8\u4e4b\u524d\uff0c\u8fd9\u4e2a\u6da8\u8dcc\u505c\u6807\u5fd7\u90fd\u662f\u4e0d\u7a33\u5b9a\u7684 Args: code: \u80a1\u7968\u4ee3\u7801 start: \u8d77\u59cb\u65e5\u671f end: \u7ed3\u675f\u65e5\u671f Returns: \u4ee5\u65e5\u671f\u4e3akey\uff0c\uff08\u6da8\u505c\uff0c\u8dcc\u505c\uff09\u4e3a\u503c\u7684dict \"\"\" limit_prices = await cls . get_trade_price_limits ( code , start , end ) bars = await Stock . get_bars_in_range ( code , FrameType . DAY , start = start , end = end , fq = False ) close = bars [ \"close\" ] results = {} # aligned = True for i in range ( len ( bars )): if bars [ i ][ \"frame\" ] . item () . date () != limit_prices [ i ][ \"frame\" ]: # aligned = False logger . warning ( \"\u6570\u636e\u540c\u6b65\u9519\u8bef\uff0c\u6da8\u8dcc\u505c\u4ef7\u683c\u4e0e\u6536\u76d8\u4ef7\u65f6\u95f4\u4e0d\u4e00\u81f4: %s , %s \" , code , bars [ i ][ \"frame\" ]) break results [ limit_prices [ i ][ \"frame\" ]] = ( price_equal ( limit_prices [ i ][ \"high_limit\" ], close [ i ]), price_equal ( limit_prices [ i ][ \"low_limit\" ], close [ i ]), ) # if not aligned: # bars = bars[i:] # limit_prices = limit_prices[i:] # for frame in bars[\"frame\"]: # frame = frame.item().date() # close = bars[bars[\"frame\"].item().date() == frame][\"close\"].item() # high = limit_prices[limit_prices[\"frame\"] == frame][\"high_limit\"].item() # low = limit_prices[limit_prices[\"frame\"] == frame][\"low_limit\"].item() # results[frame] = ( # price_equal(high, close), # price_equal(low, close) # ) return results","title":"stock"},{"location":"api/stock/#omicron.models.stock.Stock","text":"Stock\u5bf9\u8c61\u7528\u4e8e\u5f52\u96c6\u67d0\u652f\u8bc1\u5238\uff08\u80a1\u7968\u548c\u6307\u6570\uff0c\u4e0d\u5305\u62ec\u5176\u5b83\u6295\u8d44\u54c1\u79cd\uff09\u7684\u76f8\u5173\u4fe1\u606f\uff0c\u6bd4\u5982\u884c\u60c5\u6570\u636e\uff08OHLC\u7b49\uff09\u3001\u5e02\u503c\u6570\u636e\u3001\u6240\u5c5e\u6982\u5ff5\u5206\u7c7b\u7b49\u3002 Source code in omicron/models/stock.py class Stock ( Security ): \"\"\" Stock\u5bf9\u8c61\u7528\u4e8e\u5f52\u96c6\u67d0\u652f\u8bc1\u5238\uff08\u80a1\u7968\u548c\u6307\u6570\uff0c\u4e0d\u5305\u62ec\u5176\u5b83\u6295\u8d44\u54c1\u79cd\uff09\u7684\u76f8\u5173\u4fe1\u606f\uff0c\u6bd4\u5982\u884c\u60c5\u6570\u636e\uff08OHLC\u7b49\uff09\u3001\u5e02\u503c\u6570\u636e\u3001\u6240\u5c5e\u6982\u5ff5\u5206\u7c7b\u7b49\u3002 \"\"\" _is_cache_empty = True def __init__ ( self , code : str ): self . _code = code self . _stock = self . get_stock ( code ) assert self . _stock , \"\u7cfb\u7edf\u4e2d\u4e0d\u5b58\u5728\u8be5code\" ( _ , self . _display_name , self . _name , ipo , end , _type ) = self . _stock self . _start_date = convert_nptime_to_datetime ( ipo ) . date () self . _end_date = convert_nptime_to_datetime ( end ) . date () self . _type = SecurityType ( _type ) @classmethod def choose_listed ( cls , dt : datetime . date , types : List [ str ] = [ \"stock\" , \"index\" ]): cond = np . array ([ False ] * len ( cls . _stocks )) dt = datetime . datetime . combine ( dt , datetime . time ()) for type_ in types : cond |= cls . _stocks [ \"type\" ] == type_ result = cls . _stocks [ cond ] result = result [ result [ \"end\" ] > dt ] result = result [ result [ \"ipo\" ] <= dt ] # result = np.array(result, dtype=cls.stock_info_dtype) return result [ \"code\" ] . tolist () @classmethod def fuzzy_match ( cls , query : str ) -> Dict [ str , Tuple ]: \"\"\"\u5bf9\u80a1\u7968/\u6307\u6570\u8fdb\u884c\u6a21\u7cca\u5339\u914d\u67e5\u627e query\u53ef\u4ee5\u662f\u80a1\u7968/\u6307\u6570\u4ee3\u7801\uff0c\u4e5f\u53ef\u4ee5\u662f\u5b57\u6bcd\uff08\u6309name\u67e5\u627e\uff09\uff0c\u4e5f\u53ef\u4ee5\u662f\u6c49\u5b57\uff08\u6309\u663e\u793a\u540d\u67e5\u627e\uff09 Args: query (str): \u67e5\u8be2\u5b57\u7b26\u4e32 Returns: Dict[str, Tuple]: \u67e5\u8be2\u7ed3\u679c\uff0c\u5176\u4e2dTuple\u4e3a(code, display_name, name, start, end, type) \"\"\" query = query . upper () if re . match ( r \"\\d+\" , query ): return { sec [ \"code\" ]: sec . tolist () for sec in cls . _stocks if sec [ \"code\" ] . startswith ( query ) } elif re . match ( r \"[A-Z]+\" , query ): return { sec [ \"code\" ]: sec . tolist () for sec in cls . _stocks if sec [ \"name\" ] . startswith ( query ) } else : return { sec [ \"code\" ]: sec . tolist () for sec in cls . _stocks if sec [ \"alias\" ] . find ( query ) != - 1 } def __str__ ( self ): return f \" { self . display_name } [ { self . code } ]\" @property def ipo_date ( self ) -> datetime . date : return self . _start_date @property def display_name ( self ) -> str : return self . _display_name @property def name ( self ) -> str : return self . _name @property def end_date ( self ) -> datetime . date : return self . _end_date @property def code ( self ) -> str : return self . _code @property def sim_code ( self ) -> str : return re . sub ( r \"\\.XSH[EG]\" , \"\" , self . code ) @property def security_type ( self ) -> SecurityType : \"\"\"\u8fd4\u56de\u8bc1\u5238\u7c7b\u578b Returns: SecurityType: [description] \"\"\" return self . _type @staticmethod def simplify_code ( code ) -> str : return re . sub ( r \"\\.XSH[EG]\" , \"\" , code ) @staticmethod def format_code ( code ) -> str : \"\"\"\u65b0\u4e09\u677f\u548c\u5317\u4ea4\u6240\u7684\u80a1\u7968, \u6682\u4e0d\u652f\u6301, \u9ed8\u8ba4\u8fd4\u56deNone \u4e0a\u8bc1A\u80a1: 600\u3001601\u3001603\u3001605 \u6df1\u8bc1A\u80a1: 000\u3001001 \u4e2d\u5c0f\u677f: 002\u3001003 \u521b\u4e1a\u677f: 300/301 \u79d1\u521b\u677f: 688 \u65b0\u4e09\u677f: 82\u300183\u300187\u300188\u3001430\u3001420\u3001400 \u5317\u4ea4\u6240: 43\u300183\u300187\u300188 \"\"\" if not code or len ( code ) != 6 : return None prefix = code [ 0 ] if prefix in ( \"0\" , \"3\" ): return f \" { code } .XSHE\" elif prefix == \"6\" : return f \" { code } .XSHG\" else : return None def days_since_ipo ( self ) -> int : \"\"\"\u83b7\u53d6\u4e0a\u5e02\u4ee5\u6765\u7ecf\u8fc7\u4e86\u591a\u5c11\u4e2a\u4ea4\u6613\u65e5 \u7531\u4e8e\u53d7\u4ea4\u6613\u65e5\u5386\u9650\u5236\uff082005\u5e741\u67084\u65e5\u4e4b\u524d\u7684\u4ea4\u6613\u65e5\u5386\u6ca1\u6709\uff09\uff0c\u5bf9\u4e8e\u5728\u4e4b\u524d\u4e0a\u5e02\u7684\u54c1\u79cd\uff0c\u90fd\u8fd4\u56de\u4ece2005\u5e741\u67084\u65e5\u8d77\u7684\u65e5\u671f\u3002 Returns: int: [description] \"\"\" epoch_start = arrow . get ( \"2005-01-04\" ) . date () ipo_day = self . ipo_date if self . ipo_date > epoch_start else epoch_start return tf . count_day_frames ( ipo_day , arrow . now () . date ()) @staticmethod def qfq ( bars : BarsArray ) -> BarsArray : \"\"\"\u5bf9\u884c\u60c5\u6570\u636e\u6267\u884c\u524d\u590d\u6743\u64cd\u4f5c\"\"\" # todo: \u8fd9\u91cc\u53ef\u4ee5\u4f18\u5316 if bars . size == 0 : return bars last = bars [ - 1 ][ \"factor\" ] for field in [ \"open\" , \"high\" , \"low\" , \"close\" , \"volume\" ]: bars [ field ] = bars [ field ] * ( bars [ \"factor\" ] / last ) return bars @classmethod async def batch_get_min_level_bars_in_range ( cls , codes : List [ str ], frame_type : FrameType , start : Frame , end : Frame , fq : bool = True , ) -> Generator [ Dict [ str , BarsArray ], None , None ]: \"\"\"\u83b7\u53d6\u591a\u652f\u80a1\u7968\uff08\u6307\u6570\uff09\u5728[start, end)\u65f6\u95f4\u6bb5\u5185\u7684\u884c\u60c5\u6570\u636e \u5982\u679c\u8981\u83b7\u53d6\u7684\u884c\u60c5\u6570\u636e\u662f\u5206\u949f\u7ea7\u522b\uff08\u53731m, 5m, 15m, 30m\u548c60m)\uff0c\u4f7f\u7528\u672c\u63a5\u53e3\u3002 \u505c\u724c\u6570\u636e\u5904\u7406\u8bf7\u89c1[get_bars][omicron.models.stock.Stock.get_bars]\u3002 \u672c\u51fd\u6570\u8fd4\u56de\u4e00\u4e2a\u8fed\u4ee3\u5668\uff0c\u4f7f\u7528\u65b9\u6cd5\u793a\u4f8b\uff1a ``` async for code, bars in Stock.batch_get_min_level_bars_in_range(...): print(code, bars) ``` \u5982\u679c`end`\u4e0d\u5728`frame_type`\u6240\u5c5e\u7684\u8fb9\u754c\u70b9\u4e0a\uff0c\u90a3\u4e48\uff0c\u5982\u679c`end`\u5927\u4e8e\u7b49\u4e8e\u5f53\u524d\u7f13\u5b58\u672a\u6536\u76d8\u6570\u636e\u65f6\u95f4\uff0c\u5219\u5c06\u5305\u542b\u672a\u6536\u76d8\u6570\u636e\uff1b\u5426\u5219\uff0c\u8fd4\u56de\u7684\u8bb0\u5f55\u5c06\u622a\u6b62\u5230`tf.floor(end, frame_type)`\u3002 Args: codes: \u80a1\u7968/\u6307\u6570\u4ee3\u7801\u5217\u8868 frame_type: \u5e27\u7c7b\u578b start: \u8d77\u59cb\u65f6\u95f4 end: \u7ed3\u675f\u65f6\u95f4\u3002\u5982\u679c\u672a\u6307\u660e\uff0c\u5219\u53d6\u5f53\u524d\u65f6\u95f4\u3002 fq: \u662f\u5426\u8fdb\u884c\u590d\u6743\uff0c\u5982\u679c\u662f\uff0c\u5219\u8fdb\u884c\u524d\u590d\u6743\u3002Defaults to True. Returns: Generator[Dict[str, BarsArray], None, None]: \u8fed\u4ee3\u5668\uff0c\u6bcf\u6b21\u8fd4\u56de\u4e00\u4e2a\u5b57\u5178\uff0c\u5176\u4e2dkey\u4e3a\u4ee3\u7801\uff0cvalue\u4e3a\u884c\u60c5\u6570\u636e \"\"\" closed_end = tf . floor ( end , frame_type ) n = tf . count_frames ( start , closed_end , frame_type ) max_query_size = min ( cfg . influxdb . max_query_size , INFLUXDB_MAX_QUERY_SIZE ) batch_size = max ( 1 , max_query_size // n ) ff = tf . first_min_frame ( datetime . datetime . now (), frame_type ) for i in range ( 0 , len ( codes ), batch_size ): batch_codes = codes [ i : i + batch_size ] if end < ff : part1 = await cls . _batch_get_persisted_bars_in_range ( batch_codes , frame_type , start , end ) part2 = pd . DataFrame ([], columns = bars_dtype_with_code . names ) elif start >= ff : part1 = pd . DataFrame ([], columns = bars_dtype_with_code . names ) n = tf . count_frames ( start , closed_end , frame_type ) + 1 cached = await cls . _batch_get_cached_bars_n ( frame_type , n , end , batch_codes ) cached = cached [ cached [ \"frame\" ] >= start ] part2 = pd . DataFrame ( cached , columns = bars_dtype_with_code . names ) else : part1 = await cls . _batch_get_persisted_bars_in_range ( batch_codes , frame_type , start , ff ) n = tf . count_frames ( start , closed_end , frame_type ) + 1 cached = await cls . _batch_get_cached_bars_n ( frame_type , n , end , batch_codes ) part2 = pd . DataFrame ( cached , columns = bars_dtype_with_code . names ) df = pd . concat ([ part1 , part2 ]) for code in batch_codes : filtered = df [ df [ \"code\" ] == code ][ bars_cols ] bars = filtered . to_records ( index = False ) . astype ( bars_dtype ) if fq : bars = cls . qfq ( bars ) yield code , bars @classmethod async def batch_get_day_level_bars_in_range ( cls , codes : List [ str ], frame_type : FrameType , start : Frame , end : Frame , fq : bool = True , ) -> Generator [ Dict [ str , BarsArray ], None , None ]: \"\"\"\u83b7\u53d6\u591a\u652f\u80a1\u7968\uff08\u6307\u6570\uff09\u5728[start, end)\u65f6\u95f4\u6bb5\u5185\u7684\u884c\u60c5\u6570\u636e \u5982\u679c\u8981\u83b7\u53d6\u7684\u884c\u60c5\u6570\u636e\u662f\u65e5\u7ebf\u7ea7\u522b\uff08\u53731d, 1w, 1M)\uff0c\u4f7f\u7528\u672c\u63a5\u53e3\u3002 \u505c\u724c\u6570\u636e\u5904\u7406\u8bf7\u89c1[get_bars][omicron.models.stock.Stock.get_bars]\u3002 \u672c\u51fd\u6570\u8fd4\u56de\u4e00\u4e2a\u8fed\u4ee3\u5668\uff0c\u4f7f\u7528\u65b9\u6cd5\u793a\u4f8b\uff1a ``` async for code, bars in Stock.batch_get_day_level_bars_in_range(...): print(code, bars) ``` \u5982\u679c`end`\u4e0d\u5728`frame_type`\u6240\u5c5e\u7684\u8fb9\u754c\u70b9\u4e0a\uff0c\u90a3\u4e48\uff0c\u5982\u679c`end`\u5927\u4e8e\u7b49\u4e8e\u5f53\u524d\u7f13\u5b58\u672a\u6536\u76d8\u6570\u636e\u65f6\u95f4\uff0c\u5219\u5c06\u5305\u542b\u672a\u6536\u76d8\u6570\u636e\uff1b\u5426\u5219\uff0c\u8fd4\u56de\u7684\u8bb0\u5f55\u5c06\u622a\u6b62\u5230`tf.floor(end, frame_type)`\u3002 Args: codes: \u4ee3\u7801\u5217\u8868 frame_type: \u5e27\u7c7b\u578b start: \u8d77\u59cb\u65f6\u95f4 end: \u7ed3\u675f\u65f6\u95f4 fq: \u662f\u5426\u8fdb\u884c\u590d\u6743\uff0c\u5982\u679c\u662f\uff0c\u5219\u8fdb\u884c\u524d\u590d\u6743\u3002Defaults to True. Returns: Generator[Dict[str, BarsArray], None, None]: \u8fed\u4ee3\u5668\uff0c\u6bcf\u6b21\u8fd4\u56de\u4e00\u4e2a\u5b57\u5178\uff0c\u5176\u4e2dkey\u4e3a\u4ee3\u7801\uff0cvalue\u4e3a\u884c\u60c5\u6570\u636e \"\"\" today = datetime . datetime . now () . date () # \u65e5\u7ebf\uff0cend\u4e0d\u7b49\u4e8e\u6700\u540e\u4ea4\u6613\u65e5\uff0c\u6b64\u65f6\u5df2\u65e0\u7f13\u5b58 if frame_type == FrameType . DAY and end == tf . floor ( today , frame_type ): from_cache = True elif frame_type != FrameType . DAY and start > tf . floor ( today , frame_type ): from_cache = True else : from_cache = False n = tf . count_frames ( start , end , frame_type ) max_query_size = min ( cfg . influxdb . max_query_size , INFLUXDB_MAX_QUERY_SIZE ) batch_size = max ( max_query_size // n , 1 ) for i in range ( 0 , len ( codes ), batch_size ): batch_codes = codes [ i : i + batch_size ] persisted = await cls . _batch_get_persisted_bars_in_range ( batch_codes , frame_type , start , end ) if from_cache : cached = await cls . _batch_get_cached_bars_n ( frame_type , 1 , end , batch_codes ) cached = pd . DataFrame ( cached , columns = bars_dtype_with_code . names ) df = pd . concat ([ persisted , cached ]) else : df = persisted for code in batch_codes : filtered = df [ df [ \"code\" ] == code ][ bars_cols ] bars = filtered . to_records ( index = False ) . astype ( bars_dtype ) if fq : bars = cls . qfq ( bars ) yield code , bars @classmethod async def get_bars_in_range ( cls , code : str , frame_type : FrameType , start : Frame , end : Frame = None , fq = True , unclosed = True , ) -> BarsArray : \"\"\"\u83b7\u53d6\u6307\u5b9a\u8bc1\u5238\uff08`code`\uff09\u5728[`start`, `end`]\u671f\u95f4\u5e27\u7c7b\u578b\u4e3a`frame_type`\u7684\u884c\u60c5\u6570\u636e\u3002 Args: code : \u8bc1\u5238\u4ee3\u7801 frame_type : \u884c\u60c5\u6570\u636e\u7684\u5e27\u7c7b\u578b start : \u8d77\u59cb\u65f6\u95f4 end : \u7ed3\u675f\u65f6\u95f4,\u5982\u679c\u4e3aNone\uff0c\u5219\u8868\u660e\u53d6\u5230\u5f53\u524d\u65f6\u95f4\u3002 fq : \u662f\u5426\u5bf9\u884c\u60c5\u6570\u636e\u6267\u884c\u524d\u590d\u6743\u64cd\u4f5c unclosed : \u662f\u5426\u5305\u542b\u672a\u6536\u76d8\u7684\u6570\u636e \"\"\" now = datetime . datetime . now () if frame_type in tf . day_level_frames : end = end or now . date () if unclosed and tf . day_shift ( end , 0 ) == now . date (): part2 = await cls . _get_cached_bars_n ( code , 1 , frame_type ) else : part2 = np . array ([], dtype = bars_dtype ) # get rest from persisted part1 = await cls . _get_persisted_bars_in_range ( code , frame_type , start , end ) bars = np . concatenate (( part1 , part2 )) else : end = end or now closed_end = tf . floor ( end , frame_type ) ff_min1 = tf . first_min_frame ( now , FrameType . MIN1 ) if tf . day_shift ( end , 0 ) < now . date () or end < ff_min1 : part1 = await cls . _get_persisted_bars_in_range ( code , frame_type , start , end ) part2 = np . array ([], dtype = bars_dtype ) elif start >= ff_min1 : # all in cache part1 = np . array ([], dtype = bars_dtype ) n = tf . count_frames ( start , closed_end , frame_type ) + 1 part2 = await cls . _get_cached_bars_n ( code , n , frame_type , end ) part2 = part2 [ part2 [ \"frame\" ] >= start ] else : # in both cache and persisted ff = tf . first_min_frame ( now , frame_type ) part1 = await cls . _get_persisted_bars_in_range ( code , frame_type , start , ff ) n = tf . count_frames ( ff , closed_end , frame_type ) + 1 part2 = await cls . _get_cached_bars_n ( code , n , frame_type , end ) if not unclosed : part2 = part2 [ part2 [ \"frame\" ] <= closed_end ] bars = np . concatenate (( part1 , part2 )) if fq : return cls . qfq ( bars ) else : return bars @classmethod async def get_bars ( cls , code : str , n : int , frame_type : FrameType , end : Frame = None , fq = True , unclosed = True , ) -> BarsArray : \"\"\"\u83b7\u53d6\u5230`end`\u4e3a\u6b62\u7684`n`\u4e2a\u884c\u60c5\u6570\u636e\u3002 \u8fd4\u56de\u7684\u6570\u636e\u662f\u6309\u7167\u65f6\u95f4\u987a\u5e8f\u9012\u589e\u6392\u5e8f\u7684\u3002\u5728\u9047\u5230\u505c\u724c\u7684\u60c5\u51b5\u65f6\uff0c\u8be5\u65f6\u6bb5\u6570\u636e\u5c06\u88ab\u8df3\u8fc7\uff0c\u56e0\u6b64\u8fd4\u56de\u7684\u8bb0\u5f55\u53ef\u80fd\u4e0d\u662f\u4ea4\u6613\u65e5\u8fde\u7eed\u7684\uff0c\u5e76\u4e14\u53ef\u80fd\u4e0d\u8db3`n`\u4e2a\u3002 \u5982\u679c\u7cfb\u7edf\u5f53\u524d\u6ca1\u6709\u5230\u6307\u5b9a\u65f6\u95f4`end`\u7684\u6570\u636e\uff0c\u5c06\u5c3d\u6700\u5927\u52aa\u529b\u8fd4\u56de\u6570\u636e\u3002\u8c03\u7528\u8005\u53ef\u4ee5\u901a\u8fc7\u5224\u65ad\u6700\u540e\u4e00\u6761\u6570\u636e\u7684\u65f6\u95f4\u662f\u5426\u7b49\u4e8e`end`\u6765\u5224\u65ad\u662f\u5426\u83b7\u53d6\u5230\u4e86\u5168\u90e8\u6570\u636e\u3002 Args: code: \u8bc1\u5238\u4ee3\u7801 n: \u8bb0\u5f55\u6570 frame_type: \u5e27\u7c7b\u578b end: \u622a\u6b62\u65f6\u95f4,\u5982\u679c\u672a\u6307\u660e\uff0c\u5219\u53d6\u5f53\u524d\u65f6\u95f4 fq: \u662f\u5426\u5bf9\u8fd4\u56de\u8bb0\u5f55\u8fdb\u884c\u590d\u6743\u3002\u5982\u679c\u4e3a`True`\u7684\u8bdd\uff0c\u5219\u8fdb\u884c\u524d\u590d\u6743\u3002Defaults to True. unclosed: \u662f\u5426\u5305\u542b\u6700\u65b0\u672a\u6536\u76d8\u7684\u6570\u636e\uff1f Defaults to True. Returns: \u8fd4\u56dedtype\u4e3a`coretypes.bars_dtype`\u7684\u4e00\u7ef4numpy\u6570\u7ec4\u3002 \"\"\" now = datetime . datetime . now () try : cached = np . array ([], dtype = bars_dtype ) if frame_type in tf . day_level_frames : if end is None : end = now . date () elif type ( end ) == datetime . datetime : end = end . date () n0 = n if unclosed : cached = await cls . _get_cached_bars_n ( code , 1 , frame_type ) if cached . size > 0 : # \u5982\u679c\u7f13\u5b58\u7684\u672a\u6536\u76d8\u65e5\u671f > end\uff0c\u5219\u8be5\u7f13\u5b58\u4e0d\u662f\u9700\u8981\u7684 if cached [ 0 ][ \"frame\" ] . item () . date () > end : cached = np . array ([], dtype = bars_dtype ) else : n0 = n - 1 else : end = end or now closed_frame = tf . floor ( end , frame_type ) # fetch one more bar, in case we should discard unclosed bar cached = await cls . _get_cached_bars_n ( code , n + 1 , frame_type , end ) if not unclosed : cached = cached [ cached [ \"frame\" ] <= closed_frame ] # n bars we need fetch from persisted db n0 = n - cached . size if n0 > 0 : if cached . size > 0 : end0 = cached [ 0 ][ \"frame\" ] . item () else : end0 = end bars = await cls . _get_persisted_bars_n ( code , frame_type , n0 , end0 ) merged = np . concatenate (( bars , cached )) bars = merged [ - n :] else : bars = cached [ - n :] if fq : bars = cls . qfq ( bars ) return bars except Exception as e : logger . exception ( e ) logger . warning ( \"failed to get bars for %s , %s , %s , %s \" , code , n , frame_type , end ) raise @classmethod async def _get_persisted_bars_in_range ( cls , code : str , frame_type : FrameType , start : Frame , end : Frame = None ) -> BarsArray : \"\"\"\u4ece\u6301\u4e45\u5316\u6570\u636e\u5e93\u4e2d\u83b7\u53d6\u4ecb\u4e8e[`start`, `end`]\u95f4\u7684\u884c\u60c5\u8bb0\u5f55 \u5982\u679c`start`\u5230`end`\u533a\u95f4\u67d0\u652f\u80a1\u7968\u505c\u724c\uff0c\u5219\u4f1a\u8fd4\u56de\u7a7a\u6570\u7ec4\u3002 Args: code: \u8bc1\u5238\u4ee3\u7801 frame_type: \u5e27\u7c7b\u578b start: \u8d77\u59cb\u65f6\u95f4 end: \u7ed3\u675f\u65f6\u95f4\uff0c\u5982\u679c\u672a\u6307\u660e\uff0c\u5219\u53d6\u5f53\u524d\u65f6\u95f4 Returns: \u8fd4\u56dedtype\u4e3a`coretypes.bars_dtype`\u7684\u4e00\u7ef4numpy\u6570\u7ec4\u3002 \"\"\" end = end or datetime . datetime . now () keep_cols = [ \"_time\" ] + list ( bars_cols [ 1 :]) measurement = cls . _measurement_name ( frame_type ) flux = ( Flux () . bucket ( cfg . influxdb . bucket_name ) . range ( start , end ) . measurement ( measurement ) . fields ( keep_cols ) . tags ({ \"code\" : code }) ) serializer = DataframeDeserializer ( encoding = \"utf-8\" , names = [ \"_\" , \"table\" , \"result\" , \"frame\" , \"code\" , \"amount\" , \"close\" , \"factor\" , \"high\" , \"low\" , \"open\" , \"volume\" , ], engine = \"c\" , skiprows = 0 , header = 0 , usecols = bars_cols , parse_dates = [ \"frame\" ], ) client = get_influx_client () result = await client . query ( flux , serializer ) return result . to_records ( index = False ) . astype ( bars_dtype ) @classmethod async def _get_persisted_bars_n ( cls , code : str , frame_type : FrameType , n : int , end : Frame = None ) -> BarsArray : \"\"\"\u4ece\u6301\u4e45\u5316\u6570\u636e\u5e93\u4e2d\u83b7\u53d6\u622a\u6b62\u5230`end`\u7684`n`\u6761\u884c\u60c5\u8bb0\u5f55 \u5982\u679c`end`\u672a\u6307\u5b9a\uff0c\u5219\u53d6\u5f53\u524d\u65f6\u95f4\u3002 \u57fa\u4e8einfluxdb\u67e5\u8be2\u7684\u7279\u6027\uff0c\u5728\u67e5\u8be2\u524d\uff0c\u5fc5\u987b\u5148\u6839\u636e`end`\u548c`n`\u8ba1\u7b97\u51fa\u8d77\u59cb\u65f6\u95f4\uff0c\u4f46\u5982\u679c\u5728\u6b64\u671f\u95f4\u67d0\u4e9b\u80a1\u7968\u6709\u505c\u724c\uff0c\u5219\u65e0\u6cd5\u8fd4\u56de\u7684\u6570\u636e\u5c06\u5c0f\u4e8e`n`\u3002\u800c\u5982\u679c\u8d77\u59cb\u65f6\u95f4\u8bbe\u7f6e\u5f97\u8db3\u591f\u65e9\uff0c\u867d\u7136\u80fd\u6ee1\u8db3\u8fd4\u56de\u6570\u636e\u6761\u6570\u7684\u8981\u6c42\uff0c\u4f46\u4f1a\u5e26\u6765\u6027\u80fd\u4e0a\u7684\u635f\u5931\u3002\u56e0\u6b64\uff0c\u6211\u4eec\u5728\u8ba1\u7b97\u8d77\u59cb\u65f6\u95f4\u65f6\uff0c\u4e0d\u662f\u4f7f\u7528`n`\u6765\u8ba1\u7b97\uff0c\u800c\u662f\u4f7f\u7528\u4e86`min(n * 2, n + 20)`\u6765\u8ba1\u7b97\u8d77\u59cb\u65f6\u95f4\uff0c\u8fd9\u6837\u591a\u6570\u60c5\u51b5\u4e0b\uff0c\u80fd\u591f\u4fdd\u8bc1\u8fd4\u56de\u6570\u636e\u7684\u6761\u6570\u4e3a`n`\u6761\u3002 \u8fd4\u56de\u7684\u6570\u636e\u6309`frame`\u8fdb\u884c\u5347\u5e8f\u6392\u5217\u3002 Args: code: \u8bc1\u5238\u4ee3\u7801 frame_type: \u5e27\u7c7b\u578b n: \u8fd4\u56de\u7ed3\u679c\u6570\u91cf end: \u7ed3\u675f\u65f6\u95f4\uff0c\u5982\u679c\u672a\u6307\u660e\uff0c\u5219\u53d6\u5f53\u524d\u65f6\u95f4 Returns: \u8fd4\u56dedtype\u4e3a`bars_dtype`\u7684numpy\u6570\u7ec4 \"\"\" # check is needed since tags accept List as well assert isinstance ( code , str ), \"`code` must be a string\" end = end or datetime . datetime . now () closed_end = tf . floor ( end , frame_type ) start = tf . shift ( closed_end , - min ( 2 * n , n + 20 ), frame_type ) keep_cols = [ \"_time\" ] + list ( bars_cols [ 1 :]) measurement = cls . _measurement_name ( frame_type ) flux = ( Flux () . bucket ( cfg . influxdb . bucket_name ) . range ( start , end ) . measurement ( measurement ) . fields ( keep_cols ) . tags ({ \"code\" : code }) . latest ( n ) ) serializer = DataframeDeserializer ( encoding = \"utf-8\" , names = [ \"_\" , \"table\" , \"result\" , \"frame\" , \"code\" , \"amount\" , \"close\" , \"factor\" , \"high\" , \"low\" , \"open\" , \"volume\" , ], engine = \"c\" , skiprows = 0 , header = 0 , usecols = bars_cols , parse_dates = [ \"frame\" ], ) client = get_influx_client () result = await client . query ( flux , serializer ) return result . to_records ( index = False ) . astype ( bars_dtype ) @classmethod async def _batch_get_persisted_bars_n ( cls , codes : List [ str ], frame_type : FrameType , n : int , end : Frame = None ) -> pd . DataFrame : \"\"\"\u4ece\u6301\u4e45\u5316\u5b58\u50a8\u4e2d\u83b7\u53d6`codes`\u6307\u5b9a\u7684\u4e00\u6279\u80a1\u7968\u622a\u6b62`end`\u65f6\u7684`n`\u6761\u8bb0\u5f55\u3002 \u8fd4\u56de\u7684\u6570\u636e\u6309`frame`\u8fdb\u884c\u5347\u5e8f\u6392\u5217\u3002\u5982\u679c\u4e0d\u5b58\u5728\u6ee1\u8db3\u6307\u5b9a\u6761\u4ef6\u7684\u67e5\u8be2\u7ed3\u679c\uff0c\u5c06\u8fd4\u56de\u7a7a\u7684DataFrame\u3002 \u57fa\u4e8einfluxdb\u67e5\u8be2\u7684\u7279\u6027\uff0c\u5728\u67e5\u8be2\u524d\uff0c\u5fc5\u987b\u5148\u6839\u636e`end`\u548c`n`\u8ba1\u7b97\u51fa\u8d77\u59cb\u65f6\u95f4\uff0c\u4f46\u5982\u679c\u5728\u6b64\u671f\u95f4\u67d0\u4e9b\u80a1\u7968\u6709\u505c\u724c\uff0c\u5219\u65e0\u6cd5\u8fd4\u56de\u7684\u6570\u636e\u5c06\u5c0f\u4e8e`n`\u3002\u5982\u679c\u8d77\u59cb\u65f6\u95f4\u8bbe\u7f6e\u7684\u8db3\u591f\u65e9\uff0c\u867d\u7136\u80fd\u6ee1\u8db3\u8fd4\u56de\u6570\u636e\u6761\u6570\u7684\u8981\u6c42\uff0c\u4f46\u4f1a\u5e26\u6765\u6027\u80fd\u4e0a\u7684\u635f\u5931\u3002\u56e0\u6b64\uff0c\u6211\u4eec\u5728\u8ba1\u7b97\u8d77\u59cb\u65f6\u95f4\u65f6\uff0c\u4e0d\u662f\u4f7f\u7528`n`\u6765\u8ba1\u7b97\uff0c\u800c\u662f\u4f7f\u7528\u4e86`min(n * 2, n + 20)`\u6765\u8ba1\u7b97\u8d77\u59cb\u65f6\u95f4\uff0c\u8fd9\u6837\u591a\u6570\u60c5\u51b5\u4e0b\uff0c\u80fd\u591f\u4fdd\u8bc1\u8fd4\u56de\u6570\u636e\u7684\u6761\u6570\u4e3a`n`\u6761\u3002 Args: codes: \u8bc1\u5238\u4ee3\u7801\u5217\u8868\u3002 frame_type: \u5e27\u7c7b\u578b n: \u8fd4\u56de\u7ed3\u679c\u6570\u91cf end: \u7ed3\u675f\u65f6\u95f4\uff0c\u5982\u679c\u672a\u6307\u5b9a\uff0c\u5219\u4f7f\u7528\u5f53\u524d\u65f6\u95f4 Returns: DataFrame, columns\u4e3a`code`, `frame`, `open`, `high`, `low`, `close`, `volume`, `amount`, `factor` \"\"\" max_query_size = min ( cfg . influxdb . max_query_size , INFLUXDB_MAX_QUERY_SIZE ) if len ( codes ) * min ( n + 20 , 2 * n ) > max_query_size : raise BadParameterError ( f \"codes\u7684\u6570\u91cf\u548cn\u7684\u4e58\u79ef\u8d85\u8fc7\u4e86influxdb\u7684\u6700\u5927\u67e5\u8be2\u6570\u91cf\u9650\u5236 { max_query_size } \" ) end = end or datetime . datetime . now () close_end = tf . floor ( end , frame_type ) begin = tf . shift ( close_end , - 1 * min ( n + 20 , n * 2 ), frame_type ) # influxdb\u7684\u67e5\u8be2\u7ed3\u679c\u683c\u5f0f\u7c7b\u4f3c\u4e8eCSV\uff0c\u5176\u5217\u987a\u5e8f\u4e3a_, result_alias, table_seq, _time, tags, fields,\u5176\u4e2dtags\u548cfields\u90fd\u662f\u5347\u5e8f\u6392\u5217 keep_cols = [ \"code\" ] + list ( bars_cols ) names = [ \"_\" , \"result\" , \"table\" , \"frame\" , \"code\" ] # influxdb will return fields in the order of name ascending parallel names . extend ( sorted ( bars_cols [ 1 :])) measurement = cls . _measurement_name ( frame_type ) flux = ( Flux () . bucket ( cfg . influxdb . bucket_name ) . range ( begin , end ) . measurement ( measurement ) . fields ( keep_cols ) . latest ( n ) ) if codes is not None : assert isinstance ( codes , list ), \"`codes` must be a list or None\" flux . tags ({ \"code\" : codes }) deserializer = DataframeDeserializer ( names = names , usecols = keep_cols , encoding = \"utf-8\" , time_col = \"frame\" , engine = \"c\" , ) client = get_influx_client () return await client . query ( flux , deserializer ) @classmethod async def _batch_get_persisted_bars_in_range ( cls , codes : List [ str ], frame_type : FrameType , begin : Frame , end : Frame = None ) -> pd . DataFrame : \"\"\"\u4ece\u6301\u4e45\u5316\u5b58\u50a8\u4e2d\u83b7\u53d6`codes`\u6307\u5b9a\u7684\u4e00\u6279\u80a1\u7968\u5728`begin`\u548c`end`\u4e4b\u95f4\u7684\u8bb0\u5f55\u3002 \u8fd4\u56de\u7684\u6570\u636e\u5c06\u6309`frame`\u8fdb\u884c\u5347\u5e8f\u6392\u5217\u3002 \u6ce8\u610f\uff0c\u8fd4\u56de\u7684\u6570\u636e\u6709\u53ef\u80fd\u4e0d\u662f\u7b49\u957f\u7684\uff0c\u56e0\u4e3a\u6709\u7684\u80a1\u7968\u53ef\u80fd\u505c\u724c\u3002 Args: codes: \u8bc1\u5238\u4ee3\u7801\u5217\u8868\u3002 frame_type: \u5e27\u7c7b\u578b begin: \u5f00\u59cb\u65f6\u95f4 end: \u7ed3\u675f\u65f6\u95f4 Returns: DataFrame, columns\u4e3a`code`, `frame`, `open`, `high`, `low`, `close`, `volume`, `amount`, `factor` \"\"\" end = end or datetime . datetime . now () n = tf . count_frames ( begin , end , frame_type ) max_query_size = min ( cfg . influxdb . max_query_size , INFLUXDB_MAX_QUERY_SIZE ) if len ( codes ) * n > max_query_size : raise BadParameterError ( f \"asked records is { len ( codes ) * n } , which is too large than { max_query_size } \" ) # influxdb\u7684\u67e5\u8be2\u7ed3\u679c\u683c\u5f0f\u7c7b\u4f3c\u4e8eCSV\uff0c\u5176\u5217\u987a\u5e8f\u4e3a_, result_alias, table_seq, _time, tags, fields,\u5176\u4e2dtags\u548cfields\u90fd\u662f\u5347\u5e8f\u6392\u5217 keep_cols = [ \"code\" ] + list ( bars_cols ) names = [ \"_\" , \"result\" , \"table\" , \"frame\" , \"code\" ] # influxdb will return fields in the order of name ascending parallel names . extend ( sorted ( bars_cols [ 1 :])) measurement = cls . _measurement_name ( frame_type ) flux = ( Flux () . bucket ( cfg . influxdb . bucket_name ) . range ( begin , end ) . measurement ( measurement ) . fields ( keep_cols ) ) flux . tags ({ \"code\" : codes }) deserializer = DataframeDeserializer ( names = names , usecols = keep_cols , encoding = \"utf-8\" , time_col = \"frame\" , engine = \"c\" , ) client = get_influx_client () df = await client . query ( flux , deserializer ) return df @classmethod async def batch_cache_bars ( cls , frame_type : FrameType , bars : Dict [ str , BarsArray ]): \"\"\"\u7f13\u5b58\u5df2\u6536\u76d8\u7684\u5206\u949f\u7ebf\u548c\u65e5\u7ebf \u5f53\u7f13\u5b58\u65e5\u7ebf\u65f6\uff0c\u4ec5\u9650\u4e8e\u5f53\u65e5\u6536\u76d8\u540e\u7684\u7b2c\u4e00\u6b21\u540c\u6b65\u65f6\u8c03\u7528\u3002 Args: frame_type: \u5e27\u7c7b\u578b bars: \u884c\u60c5\u6570\u636e\uff0c\u5176key\u4e3a\u80a1\u7968\u4ee3\u7801\uff0c\u5176value\u4e3adtype\u4e3a`bars_dtype`\u7684\u4e00\u7ef4numpy\u6570\u7ec4\u3002 Raises: RedisError: \u5982\u679c\u5728\u6267\u884c\u8fc7\u7a0b\u4e2d\u53d1\u751f\u9519\u8bef\uff0c\u5219\u629b\u51fa\u4ee5\u6b64\u5f02\u5e38\u4e3a\u57fa\u7c7b\u7684\u5404\u79cd\u5f02\u5e38\uff0c\u5177\u4f53\u53c2\u8003aioredis\u76f8\u5173\u6587\u6863\u3002 \"\"\" if frame_type == FrameType . DAY : await cls . batch_cache_unclosed_bars ( frame_type , bars ) return pl = cache . security . pipeline () for code , bars in bars . items (): key = f \"bars: { frame_type . value } : { code } \" for bar in bars : frame = tf . time2int ( bar [ \"frame\" ] . item ()) val = [ * bar ] val [ 0 ] = frame pl . hset ( key , frame , \",\" . join ( map ( str , val ))) await pl . execute () @classmethod async def batch_cache_unclosed_bars ( cls , frame_type : FrameType , bars : Dict [ str , BarsArray ] ): # pragma: no cover \"\"\"\u7f13\u5b58\u672a\u6536\u76d8\u76845\u300115\u300130\u300160\u5206\u949f\u7ebf\u53ca\u65e5\u7ebf\u3001\u5468\u7ebf\u3001\u6708\u7ebf Args: frame_type: \u5e27\u7c7b\u578b bars: \u884c\u60c5\u6570\u636e\uff0c\u5176key\u4e3a\u80a1\u7968\u4ee3\u7801\uff0c\u5176value\u4e3adtype\u4e3a`bars_dtype`\u7684\u4e00\u7ef4numpy\u6570\u7ec4\u3002bars\u4e0d\u80fd\u4e3aNone\uff0c\u6216\u8005empty\u3002 Raise: RedisError: \u5982\u679c\u5728\u6267\u884c\u8fc7\u7a0b\u4e2d\u53d1\u751f\u9519\u8bef\uff0c\u5219\u629b\u51fa\u4ee5\u6b64\u5f02\u5e38\u4e3a\u57fa\u7c7b\u7684\u5404\u79cd\u5f02\u5e38\uff0c\u5177\u4f53\u53c2\u8003aioredis\u76f8\u5173\u6587\u6863\u3002 \"\"\" pl = cache . security . pipeline () key = f \"bars: { frame_type . value } :unclosed\" convert = tf . time2int if frame_type in tf . minute_level_frames else tf . date2int for code , bar in bars . items (): val = [ * bar [ 0 ]] val [ 0 ] = convert ( bar [ \"frame\" ][ 0 ] . item ()) # \u65f6\u95f4\u8f6c\u6362 pl . hset ( key , code , \",\" . join ( map ( str , val ))) await pl . execute () @classmethod async def reset_cache ( cls ): \"\"\"\u6e05\u9664\u7f13\u5b58\u7684\u884c\u60c5\u6570\u636e\"\"\" try : for ft in itertools . chain ( tf . minute_level_frames , tf . day_level_frames ): keys = await cache . security . keys ( f \"bars: { ft . value } :*\" ) if keys : await cache . security . delete ( * keys ) finally : cls . _is_cache_empty = True @classmethod def _deserialize_cached_bars ( cls , raw : List [ str ], ft : FrameType ) -> BarsArray : \"\"\"\u4eceredis\u4e2d\u53cd\u5e8f\u5217\u5316\u7f13\u5b58\u7684\u6570\u636e \u5982\u679c`raw`\u7a7a\u6570\u7ec4\u6216\u8005\u5143\u7d20\u4e3a`None`\uff0c\u5219\u8fd4\u56de\u7a7a\u6570\u7ec4\u3002 Args: raw: redis\u4e2d\u7684\u7f13\u5b58\u6570\u636e ft: \u5e27\u7c7b\u578b sort: \u662f\u5426\u9700\u8981\u91cd\u65b0\u6392\u5e8f\uff0c\u7f3a\u7701\u4e3aFalse Returns: BarsArray: \u884c\u60c5\u6570\u636e \"\"\" fix_date = False if ft in tf . minute_level_frames : convert = tf . int2time else : convert = tf . int2date fix_date = True recs = [] # it's possible to treat raw as csv and use pandas to parse, however, the performance is 10 times worse than this method for raw_rec in raw : if raw_rec is None : continue f , o , h , l , c , v , m , fac = raw_rec . split ( \",\" ) if fix_date : f = f [: 8 ] recs . append ( ( convert ( f ), float ( o ), float ( h ), float ( l ), float ( c ), float ( v ), float ( m ), float ( fac ), ) ) return np . array ( recs , dtype = bars_dtype ) @classmethod async def _batch_get_cached_bars_n ( cls , frame_type : FrameType , n : int , end : Frame = None , codes : List [ str ] = None ) -> BarsPanel : \"\"\"\u6279\u91cf\u83b7\u53d6\u5728cache\u4e2d\u622a\u6b62`end`\u7684`n`\u4e2abars\u3002 \u5982\u679c`end`\u4e0d\u5728`frame_type`\u6240\u5c5e\u7684\u8fb9\u754c\u70b9\u4e0a\uff0c\u90a3\u4e48\uff0c\u5982\u679c`end`\u5927\u4e8e\u7b49\u4e8e\u5f53\u524d\u7f13\u5b58\u672a\u6536\u76d8\u6570\u636e\u65f6\u95f4\uff0c\u5219\u5c06\u5305\u542b\u672a\u6536\u76d8\u6570\u636e\uff1b\u5426\u5219\uff0c\u8fd4\u56de\u7684\u8bb0\u5f55\u5c06\u622a\u6b62\u5230`tf.floor(end, frame_type)`\u3002 Args: frame_type: \u65f6\u95f4\u5e27\u7c7b\u578b n: \u8fd4\u56de\u8bb0\u5f55\u6761\u6570 codes: \u8bc1\u5238\u4ee3\u7801\u5217\u8868 end: \u622a\u6b62\u65f6\u95f4, \u5982\u679c\u4e3aNone Returns: BarsPanel: \u884c\u60c5\u6570\u636e \"\"\" # \u8c03\u7528\u8005\u81ea\u5df1\u4fdd\u8bc1end\u5728\u7f13\u5b58\u4e2d cols = list ( bars_dtype_with_code . names ) if frame_type in tf . day_level_frames : key = f \"bars: { frame_type . value } :unclosed\" if codes is None : recs = await cache . security . hgetall ( key ) codes = list ( recs . keys ()) recs = recs . values () else : recs = await cache . security . hmget ( key , * codes ) barss = cls . _deserialize_cached_bars ( recs , frame_type ) if barss . size > 0 : if len ( barss ) != len ( codes ): # issue 39, \u5982\u679c\u67d0\u652f\u7968\u5f53\u5929\u505c\u724c\uff0c\u5219\u7f13\u5b58\u4e2d\u5c06\u4e0d\u4f1a\u6709\u5b83\u7684\u8bb0\u5f55\uff0c\u6b64\u65f6\u9700\u8981\u79fb\u9664\u5176\u4ee3\u7801 codes = [ codes [ i ] for i , item in enumerate ( recs ) if item is not None ] barss = numpy_append_fields ( barss , \"code\" , codes , [( \"code\" , \"O\" )]) return barss [ cols ] . astype ( bars_dtype_with_code ) else : return np . array ([], dtype = bars_dtype_with_code ) else : end = end or datetime . datetime . now () close_end = tf . floor ( end , frame_type ) all_bars = [] if codes is None : keys = await cache . security . keys ( f \"bars: { frame_type . value } :*[^unclosed]\" ) codes = [ key . split ( \":\" )[ - 1 ] for key in keys ] else : keys = [ f \"bars: { frame_type . value } : { code } \" for code in codes ] if frame_type != FrameType . MIN1 : unclosed = await cache . security . hgetall ( f \"bars: { frame_type . value } :unclosed\" ) else : unclosed = {} pl = cache . security . pipeline () frames = tf . get_frames_by_count ( close_end , n , frame_type ) for key in keys : pl . hmget ( key , * frames ) all_closed = await pl . execute () for code , raw in zip ( codes , all_closed ): raw . append ( unclosed . get ( code )) barss = cls . _deserialize_cached_bars ( raw , frame_type ) barss = numpy_append_fields ( barss , \"code\" , [ code ] * len ( barss ), [( \"code\" , \"O\" )] ) barss = barss [ cols ] . astype ( bars_dtype_with_code ) all_bars . append ( barss [ barss [ \"frame\" ] <= end ][ - n :]) try : return np . concatenate ( all_bars ) except ValueError as e : logger . exception ( e ) return np . array ([], dtype = bars_dtype_with_code ) @classmethod async def _get_cached_bars_n ( cls , code : str , n : int , frame_type : FrameType , end : Frame = None ) -> BarsArray : \"\"\"\u4ece\u7f13\u5b58\u4e2d\u83b7\u53d6\u6307\u5b9a\u4ee3\u7801\u7684\u884c\u60c5\u6570\u636e \u5b58\u53d6\u903b\u8f91\u662f\uff0c\u4ece`end`\u6307\u5b9a\u7684\u65f6\u95f4\u5411\u524d\u53d6`n`\u6761\u8bb0\u5f55\u3002`end`\u4e0d\u5e94\u8be5\u5927\u4e8e\u5f53\u524d\u7cfb\u7edf\u65f6\u95f4\uff0c\u5e76\u4e14\u6839\u636e`end`\u548c`n`\u8ba1\u7b97\u51fa\u6765\u7684\u8d77\u59cb\u65f6\u95f4\u5e94\u8be5\u5728\u7f13\u5b58\u4e2d\u5b58\u5728\u3002\u5426\u5219\uff0c\u4e24\u79cd\u60c5\u51b5\u4e0b\uff0c\u8fd4\u56de\u8bb0\u5f55\u6570\u90fd\u5c06\u5c0f\u4e8e`n`\u3002 \u5982\u679c`end`\u4e0d\u5904\u4e8e`frame_type`\u6240\u5c5e\u7684\u8fb9\u754c\u7ed3\u675f\u4f4d\u7f6e\uff0c\u4e14\u5c0f\u4e8e\u5f53\u524d\u5df2\u7f13\u5b58\u7684\u672a\u6536\u76d8bar\u65f6\u95f4\uff0c\u5219\u4f1a\u8fd4\u56de\u524d\u4e00\u4e2a\u5df2\u6536\u76d8\u7684\u6570\u636e\uff0c\u5426\u5219\uff0c\u8fd4\u56de\u7684\u8bb0\u5f55\u4e2d\u8fd8\u5c06\u5305\u542b\u672a\u6536\u76d8\u7684\u6570\u636e\u3002 args: code: \u8bc1\u5238\u4ee3\u7801\uff0c\u6bd4\u5982000001.XSHE n: \u8fd4\u56de\u8bb0\u5f55\u6761\u6570 frame_type: \u5e27\u7c7b\u578b end: \u7ed3\u675f\u5e27\uff0c\u5982\u679c\u4e3aNone\uff0c\u5219\u53d6\u5f53\u524d\u65f6\u95f4 returns: \u5143\u7d20\u7c7b\u578b\u4e3a`coretypes.bars_dtype`\u7684\u4e00\u7ef4numpy\u6570\u7ec4\u3002\u5982\u679c\u6ca1\u6709\u6570\u636e\uff0c\u5219\u8fd4\u56de\u7a7andarray\u3002 \"\"\" # 50 times faster than arrow.now().floor('second') end = end or datetime . datetime . now () . replace ( second = 0 , microsecond = 0 ) if frame_type in tf . minute_level_frames : cache_start = tf . first_min_frame ( end . date (), frame_type ) closed = tf . floor ( end , frame_type ) frames = ( tf . get_frames ( cache_start , closed , frame_type ))[ - n :] if len ( frames ) == 0 : recs = np . empty ( shape = ( 0 ,), dtype = bars_dtype ) else : key = f \"bars: { frame_type . value } : { code } \" recs = await cache . security . hmget ( key , * frames ) recs = cls . _deserialize_cached_bars ( recs , frame_type ) if closed < end : # for unclosed key = f \"bars: { frame_type . value } :unclosed\" unclosed = await cache . security . hget ( key , code ) unclosed = cls . _deserialize_cached_bars ([ unclosed ], frame_type ) if len ( unclosed ) == 0 : return recs [ - n :] if end < unclosed [ 0 ][ \"frame\" ] . item (): # \u5982\u679cunclosed\u4e3a9:36, \u8c03\u7528\u8005\u8981\u6c42\u53d69:29\u76845m\u6570\u636e\uff0c\u5219\u53d6\u5230\u7684unclosed\u4e0d\u5408\u8981\u6c42\uff0c\u629b\u5f03\u3002\u4f3c\u4e4e\u6ca1\u6709\u66f4\u597d\u7684\u65b9\u6cd5\u68c0\u6d4bend\u4e0eunclosed\u7684\u5173\u7cfb return recs [ - n :] else : bars = np . concatenate (( recs , unclosed )) return bars [ - n :] else : return recs [ - n :] else : # \u65e5\u7ebf\u53ca\u4ee5\u4e0a\u7ea7\u522b\uff0c\u4ec5\u5728\u7f13\u5b58\u4e2d\u5b58\u5728\u672a\u6536\u76d8\u6570\u636e key = f \"bars: { frame_type . value } :unclosed\" rec = await cache . security . hget ( key , code ) return cls . _deserialize_cached_bars ([ rec ], frame_type ) @classmethod async def cache_bars ( cls , code : str , frame_type : FrameType , bars : BarsArray ): \"\"\"\u5c06\u5f53\u671f\u5df2\u6536\u76d8\u7684\u884c\u60c5\u6570\u636e\u7f13\u5b58 Note: \u5f53\u524d\u53ea\u7f13\u5b581\u5206\u949f\u6570\u636e\u3002\u5176\u5b83\u5206\u949f\u6570\u636e\uff0c\u90fd\u5728\u8c03\u7528\u65f6\uff0c\u901a\u8fc7resample\u4e34\u65f6\u5408\u6210\u3002 \u884c\u60c5\u6570\u636e\u7f13\u5b58\u5728\u4ee5`bars:{frame_type.value}:{code}`\u4e3akey, {frame}\u4e3afield\u7684hashmap\u4e2d\u3002 Args: code: the full qualified code of a security or index frame_type: frame type of the bars bars: the bars to cache, which is a numpy array of dtype `coretypes.bars_dtype` Raises: RedisError: if redis operation failed, see documentation of aioredis \"\"\" # \u8f6c\u6362\u65f6\u95f4\u4e3aint convert = tf . time2int if frame_type in tf . minute_level_frames else tf . date2int key = f \"bars: { frame_type . value } : { code } \" pl = cache . security . pipeline () for bar in bars : val = [ * bar ] val [ 0 ] = convert ( bar [ \"frame\" ] . item ()) pl . hset ( key , val [ 0 ], \",\" . join ( map ( str , val ))) await pl . execute () @classmethod async def cache_unclosed_bars ( cls , code : str , frame_type : FrameType , bars : BarsArray ): # pragma: no cover \"\"\"\u5c06\u672a\u7ed3\u675f\u7684\u884c\u60c5\u6570\u636e\u7f13\u5b58 \u672a\u7ed3\u675f\u7684\u884c\u60c5\u6570\u636e\u7f13\u5b58\u5728\u4ee5`bars:{frame_type.value}:unclosed`\u4e3akey, {code}\u4e3afield\u7684hashmap\u4e2d\u3002 \u5c3d\u7ba1`bars`\u88ab\u58f0\u660e\u4e3aBarsArray\uff0c\u4f46\u5b9e\u9645\u4e0a\u5e94\u8be5\u53ea\u5305\u542b\u4e00\u4e2a\u5143\u7d20\u3002 Args: code: the full qualified code of a security or index frame_type: frame type of the bars bars: the bars to cache, which is a numpy array of dtype `coretypes.bars_dtype` Raises: RedisError: if redis operation failed, see documentation of aioredis \"\"\" converter = tf . time2int if frame_type in tf . minute_level_frames else tf . date2int assert len ( bars ) == 1 , \"unclosed bars should only have one record\" key = f \"bars: { frame_type . value } :unclosed\" bar = bars [ 0 ] val = [ * bar ] val [ 0 ] = converter ( bar [ \"frame\" ] . item ()) await cache . security . hset ( key , code , \",\" . join ( map ( str , val ))) @classmethod async def persist_bars ( cls , frame_type : FrameType , bars : Union [ Dict [ str , BarsArray ], BarsArray , pd . DataFrame ], ): \"\"\"\u5c06\u884c\u60c5\u6570\u636e\u6301\u4e45\u5316 \u5982\u679c`bars`\u7c7b\u578b\u4e3aDict,\u5219key\u4e3a`code`\uff0cvalue\u4e3a`bars`\u3002\u5982\u679c\u5176\u7c7b\u578b\u4e3aBarsArray\u6216\u8005pd.DataFrame\uff0c\u5219`bars`\u5404\u5217\u5b57\u6bb5\u5e94\u8be5\u4e3a`coretypes.bars_dtype` + (\"code\", \"O\")\u6784\u6210\u3002 Args: frame_type: the frame type of the bars bars: the bars to be persisted Raises: InfluxDBWriteError: if influxdb write failed \"\"\" client = get_influx_client () measurement = cls . _measurement_name ( frame_type ) logger . info ( \"persisting bars to influxdb: %s , %d secs\" , measurement , len ( bars )) if isinstance ( bars , dict ): for code , value in bars . items (): await client . save ( value , measurement , global_tags = { \"code\" : code }, time_key = \"frame\" ) else : await client . save ( bars , measurement , tag_keys = [ \"code\" ], time_key = \"frame\" ) @classmethod def resample ( cls , bars : BarsArray , from_frame : FrameType , to_frame : FrameType ) -> BarsArray : \"\"\"\u5c06\u539f\u6765\u4e3a`from_frame`\u7684\u884c\u60c5\u6570\u636e\u8f6c\u6362\u4e3a`to_frame`\u7684\u884c\u60c5\u6570\u636e \u5982\u679c`to_frame`\u4e3a\u65e5\u7ebf\u6216\u8005\u5206\u949f\u7ea7\u522b\u7ebf\uff0c\u5219`from_frame`\u5fc5\u987b\u4e3a\u5206\u949f\u7ebf\uff1b\u5982\u679c`to_frame`\u4e3a\u5468\u4ee5\u4e0a\u7ea7\u522b\u7ebf\uff0c\u5219`from_frame`\u5fc5\u987b\u4e3a\u65e5\u7ebf\u3002\u5176\u5b83\u7ea7\u522b\u4e4b\u95f4\u7684\u8f6c\u6362\u4e0d\u652f\u6301\u3002 \u5982\u679c`from_frame`\u4e3a1\u5206\u949f\u7ebf\uff0c\u5219\u5fc5\u987b\u4ece9\uff1a31\u8d77\u3002 Args: bars (BarsArray): \u884c\u60c5\u6570\u636e from_frame (FrameType): \u8f6c\u6362\u524d\u7684FrameType to_frame (FrameType): \u8f6c\u6362\u540e\u7684FrameType Returns: BarsArray: \u8f6c\u6362\u540e\u7684\u884c\u60c5\u6570\u636e \"\"\" if from_frame == FrameType . MIN1 : return cls . _resample_from_min1 ( bars , to_frame ) elif from_frame == FrameType . DAY : # pragma: no cover return cls . _resample_from_day ( bars , to_frame ) else : # pragma: no cover raise TypeError ( f \"unsupported from_frame: { from_frame } \" ) @classmethod def _measurement_name ( cls , frame_type ): return f \"stock_bars_ { frame_type . value } \" @classmethod def _resample_from_min1 ( cls , bars : BarsArray , to_frame : FrameType ) -> BarsArray : \"\"\"\u5c06`bars`\u4ece1\u5206\u949f\u7ebf\u8f6c\u6362\u4e3a`to_frame`\u7684\u884c\u60c5\u6570\u636e \u91cd\u91c7\u6837\u540e\u7684\u6570\u636e\u53ea\u5305\u542bframe, open, high, low, close, volume, amount, factor\uff0c\u65e0\u8bba\u4f20\u5165\u6570\u636e\u662f\u5426\u8fd8\u6709\u522b\u7684\u5b57\u6bb5\uff0c\u5b83\u4eec\u90fd\u5c06\u88ab\u4e22\u5f03\u3002 resampling 240\u6839\u5206\u949f\u7ebf\u52305\u5206\u949f\u5927\u7ea6\u9700\u8981100\u5fae\u79d2\u3002 TODO\uff1a \u5982\u679c`bars`\u4e2d\u5305\u542bnan\u600e\u4e48\u5904\u7406\uff1f \"\"\" if bars [ 0 ][ \"frame\" ] . item () . minute != 31 : raise ValueError ( \"resampling from 1min must start from 9:31\" ) if to_frame not in ( FrameType . MIN5 , FrameType . MIN15 , FrameType . MIN30 , FrameType . MIN60 , FrameType . DAY , ): raise ValueError ( f \"unsupported to_frame: { to_frame } \" ) bins_len = { FrameType . MIN5 : 5 , FrameType . MIN15 : 15 , FrameType . MIN30 : 30 , FrameType . MIN60 : 60 , FrameType . DAY : 240 , }[ to_frame ] bins = len ( bars ) // bins_len npart1 = bins * bins_len part1 = bars [: npart1 ] . reshape (( - 1 , bins_len )) part2 = bars [ npart1 :] open_pos = np . arange ( bins ) * bins_len close_pos = np . arange ( 1 , bins + 1 ) * bins_len - 1 if len ( bars ) > bins_len * bins : close_pos = np . append ( close_pos , len ( bars ) - 1 ) resampled = np . empty (( bins + 1 ,), dtype = bars_dtype ) else : resampled = np . empty (( bins ,), dtype = bars_dtype ) resampled [: bins ][ \"open\" ] = bars [ open_pos ][ \"open\" ] resampled [: bins ][ \"high\" ] = np . max ( part1 [ \"high\" ], axis = 1 ) resampled [: bins ][ \"low\" ] = np . min ( part1 [ \"low\" ], axis = 1 ) resampled [: bins ][ \"volume\" ] = np . sum ( part1 [ \"volume\" ], axis = 1 ) resampled [: bins ][ \"amount\" ] = np . sum ( part1 [ \"amount\" ], axis = 1 ) if len ( part2 ): resampled [ - 1 ][ \"open\" ] = part2 [ \"open\" ][ 0 ] resampled [ - 1 ][ \"high\" ] = np . max ( part2 [ \"high\" ]) resampled [ - 1 ][ \"low\" ] = np . min ( part2 [ \"low\" ]) resampled [ - 1 ][ \"volume\" ] = np . sum ( part2 [ \"volume\" ]) resampled [ - 1 ][ \"amount\" ] = np . sum ( part2 [ \"amount\" ]) cols = [ \"frame\" , \"close\" , \"factor\" ] resampled [ cols ] = bars [ close_pos ][ cols ] if to_frame == FrameType . DAY : resampled [ \"frame\" ] = bars [ - 1 ][ \"frame\" ] . item () . date () return resampled @classmethod def _resample_from_day ( cls , bars : BarsArray , to_frame : FrameType ) -> BarsArray : \"\"\"\u5c06`bars`\u4ece\u65e5\u7ebf\u8f6c\u6362\u6210`to_frame`\u7684\u884c\u60c5\u6570\u636e Args: bars (BarsArray): [description] to_frame (FrameType): [description] Returns: \u8f6c\u6362\u540e\u7684\u884c\u60c5\u6570\u636e \"\"\" rules = { \"frame\" : \"last\" , \"open\" : \"first\" , \"high\" : \"max\" , \"low\" : \"min\" , \"close\" : \"last\" , \"volume\" : \"sum\" , \"amount\" : \"sum\" , \"factor\" : \"last\" , } if to_frame == FrameType . WEEK : freq = \"W-Fri\" elif to_frame == FrameType . MONTH : freq = \"M\" elif to_frame == FrameType . QUARTER : freq = \"Q\" elif to_frame == FrameType . YEAR : freq = \"A\" else : raise ValueError ( f \"unsupported to_frame: { to_frame } \" ) df = pd . DataFrame ( bars ) df . index = pd . to_datetime ( bars [ \"frame\" ]) df = df . resample ( freq ) . agg ( rules ) bars = np . array ( df . to_records ( index = False ), dtype = bars_dtype ) # filter out data like (None, nan, ...) return bars [ np . isfinite ( bars [ \"close\" ])] @classmethod async def _get_price_limit_in_cache ( cls , code : str , begin : datetime . date , end : datetime . date ): date_str = await cache . _security_ . get ( TRADE_PRICE_LIMITS_DATE ) if date_str : date_in_cache = arrow . get ( date_str ) . date () if date_in_cache < begin or date_in_cache > end : return None else : return None dtype = [( \"frame\" , \"O\" ), ( \"high_limit\" , \"f4\" ), ( \"low_limit\" , \"f4\" )] hp = await cache . _security_ . hget ( TRADE_PRICE_LIMITS , f \" { code } .high_limit\" ) lp = await cache . _security_ . hget ( TRADE_PRICE_LIMITS , f \" { code } .low_limit\" ) if hp is None or lp is None : return None else : return np . array ([( date_in_cache , hp , lp )], dtype = dtype ) @classmethod async def get_trade_price_limits ( cls , code : str , begin : Frame , end : Frame ) -> BarsArray : \"\"\"\u4eceinfluxdb\u548ccache\u4e2d\u83b7\u53d6\u4e2a\u80a1\u5728[begin, end]\u4e4b\u95f4\u7684\u6da8\u8dcc\u505c\u4ef7\u3002 \u6da8\u8dcc\u505c\u4ef7\u53ea\u6709\u65e5\u7ebf\u6570\u636e\u624d\u6709\uff0c\u56e0\u6b64\uff0cFrameType\u56fa\u5b9a\u4e3aFrameType.DAY\uff0c \u5f53\u5929\u7684\u6570\u636e\u5b58\u653e\u4e8eredis\uff0c\u5982\u679c\u67e5\u8be2\u65e5\u671f\u5305\u542b\u5f53\u5929\uff08\u4ea4\u6613\u65e5\uff09\uff0c\u4ececache\u4e2d\u8bfb\u53d6\u5e76\u8ffd\u52a0\u5230\u7ed3\u679c\u4e2d Args: code : \u4e2a\u80a1\u4ee3\u7801 begin : \u5f00\u59cb\u65e5\u671f end : \u7ed3\u675f\u65e5\u671f Returns: dtype\u4e3a[('frame', 'O'), ('high_limit', 'f4'), ('low_limit', 'f4')]\u7684numpy\u6570\u7ec4 \"\"\" cols = [ \"_time\" , \"high_limit\" , \"low_limit\" ] dtype = [( \"frame\" , \"O\" ), ( \"high_limit\" , \"f4\" ), ( \"low_limit\" , \"f4\" )] if isinstance ( begin , datetime . datetime ): begin = begin . date () # \u5f3a\u5236\u8f6c\u6362\u4e3adate if isinstance ( end , datetime . datetime ): end = end . date () # \u5f3a\u5236\u8f6c\u6362\u4e3adate data_in_cache = await cls . _get_price_limit_in_cache ( code , begin , end ) client = get_influx_client () measurement = cls . _measurement_name ( FrameType . DAY ) flux = ( Flux () . bucket ( client . _bucket ) . measurement ( measurement ) . range ( begin , end ) . tags ({ \"code\" : code }) . fields ( cols ) . sort ( \"_time\" ) ) ds = NumpyDeserializer ( dtype , use_cols = cols , converters = { \"_time\" : lambda x : ciso8601 . parse_datetime ( x ) . date ()}, # since we ask parse date in convertors, so we have to disable parse_date parse_date = None , ) result = await client . query ( flux , ds ) if data_in_cache : result = np . concatenate ([ result , data_in_cache ]) return result @classmethod async def reset_price_limits_cache ( cls , cache_only : bool , dt : datetime . date = None ): if cache_only is False : date_str = await cache . _security_ . get ( TRADE_PRICE_LIMITS_DATE ) if not date_str : return # skip clear action if date not found in cache date_in_cache = arrow . get ( date_str ) . date () if dt is None or date_in_cache != dt : # \u66f4\u65b0\u7684\u65f6\u95f4\u548ccache\u7684\u65f6\u95f4\u76f8\u540c\uff0c\u5219\u6e05\u9664cache return # skip clear action await cache . _security_ . delete ( TRADE_PRICE_LIMITS ) await cache . _security_ . delete ( TRADE_PRICE_LIMITS_DATE ) @classmethod async def save_trade_price_limits ( cls , price_limits : LimitPriceOnlyBarsArray , to_cache : bool ): \"\"\"\u4fdd\u5b58\u6da8\u8dcc\u505c\u4ef7 Args: price_limits: \u8981\u4fdd\u5b58\u7684\u6da8\u8dcc\u505c\u4ef7\u683c\u6570\u636e\u3002 to_cache: \u662f\u4fdd\u5b58\u5230\u7f13\u5b58\u4e2d\uff0c\u8fd8\u662f\u4fdd\u5b58\u5230\u6301\u4e45\u5316\u5b58\u50a8\u4e2d \"\"\" if len ( price_limits ) == 0 : return if to_cache : # \u6bcf\u4e2a\u4ea4\u6613\u65e5\u4e0a\u53489\u70b9\u66f4\u65b0\u4e24\u6b21 pl = cache . _security_ . pipeline () for row in price_limits : # .item convert np.float64 to python float pl . hset ( TRADE_PRICE_LIMITS , f \" { row [ 'code' ] } .high_limit\" , row [ \"high_limit\" ] . item (), ) pl . hset ( TRADE_PRICE_LIMITS , f \" { row [ 'code' ] } .low_limit\" , row [ \"low_limit\" ] . item (), ) dt = price_limits [ - 1 ][ \"frame\" ] pl . set ( TRADE_PRICE_LIMITS_DATE , dt . strftime ( \"%Y-%m- %d \" )) await pl . execute () else : # to influxdb\uff0c \u6bcf\u4e2a\u4ea4\u6613\u65e5\u7684\u7b2c\u4e8c\u5929\u65e9\u4e0a2\u70b9\u4fdd\u5b58 client = get_influx_client () await client . save ( price_limits , cls . _measurement_name ( FrameType . DAY ), tag_keys = \"code\" , time_key = \"frame\" , ) @classmethod async def trade_price_limit_flags ( cls , code : str , start : datetime . date , end : datetime . date ) -> Tuple [ List [ bool ]]: \"\"\"\u83b7\u53d6\u4e2a\u80a1\u5728[start, end]\u4e4b\u95f4\u7684\u6da8\u8dcc\u505c\u6807\u5fd7 !!!Note \u672c\u51fd\u6570\u8fd4\u56de\u7684\u5e8f\u5217\u5728\u80a1\u7968\u6709\u505c\u724c\u7684\u60c5\u51b5\u4e0b\uff0c\u5c06\u4e0d\u80fd\u4e0e[start, end]\u4e00\u4e00\u5bf9\u5e94\u3002 Args: code: \u4e2a\u80a1\u4ee3\u7801 start: \u5f00\u59cb\u65e5\u671f end: \u7ed3\u675f\u65e5\u671f Returns: \u6da8\u8dcc\u505c\u6807\u5fd7\u5217\u8868(buy, sell) \"\"\" cols = [ \"_time\" , \"close\" , \"high_limit\" , \"low_limit\" ] client = get_influx_client () measurement = cls . _measurement_name ( FrameType . DAY ) flux = ( Flux () . bucket ( client . _bucket ) . measurement ( measurement ) . range ( start , end ) . tags ({ \"code\" : code }) . fields ( cols ) . sort ( \"_time\" ) ) dtype = [ ( \"frame\" , \"O\" ), ( \"close\" , \"f4\" ), ( \"high_limit\" , \"f4\" ), ( \"low_limit\" , \"f4\" ), ] ds = NumpyDeserializer ( dtype , use_cols = [ \"_time\" , \"close\" , \"high_limit\" , \"low_limit\" ], converters = { \"_time\" : lambda x : ciso8601 . parse_datetime ( x ) . date ()}, # since we ask parse date in convertors, so we have to disable parse_date parse_date = None , ) result = await client . query ( flux , ds ) if result . size == 0 : return np . array ([], dtype = dtype ) return ( array_price_equal ( result [ \"close\" ], result [ \"high_limit\" ]), array_price_equal ( result [ \"close\" ], result [ \"low_limit\" ]), ) @classmethod async def trade_price_limit_flags_ex ( cls , code : str , start : datetime . date , end : datetime . date ) -> Dict [ datetime . date , Tuple [ bool , bool ]]: \"\"\"\u83b7\u53d6\u80a1\u7968`code`\u5728`[start, end]`\u533a\u95f4\u7684\u6da8\u8dcc\u505c\u6807\u5fd7 !!!Note \u5982\u679cend\u4e3a\u5f53\u5929\uff0c\u6ce8\u610f\u5728\u672a\u6536\u76d8\u4e4b\u524d\uff0c\u8fd9\u4e2a\u6da8\u8dcc\u505c\u6807\u5fd7\u90fd\u662f\u4e0d\u7a33\u5b9a\u7684 Args: code: \u80a1\u7968\u4ee3\u7801 start: \u8d77\u59cb\u65e5\u671f end: \u7ed3\u675f\u65e5\u671f Returns: \u4ee5\u65e5\u671f\u4e3akey\uff0c\uff08\u6da8\u505c\uff0c\u8dcc\u505c\uff09\u4e3a\u503c\u7684dict \"\"\" limit_prices = await cls . get_trade_price_limits ( code , start , end ) bars = await Stock . get_bars_in_range ( code , FrameType . DAY , start = start , end = end , fq = False ) close = bars [ \"close\" ] results = {} # aligned = True for i in range ( len ( bars )): if bars [ i ][ \"frame\" ] . item () . date () != limit_prices [ i ][ \"frame\" ]: # aligned = False logger . warning ( \"\u6570\u636e\u540c\u6b65\u9519\u8bef\uff0c\u6da8\u8dcc\u505c\u4ef7\u683c\u4e0e\u6536\u76d8\u4ef7\u65f6\u95f4\u4e0d\u4e00\u81f4: %s , %s \" , code , bars [ i ][ \"frame\" ]) break results [ limit_prices [ i ][ \"frame\" ]] = ( price_equal ( limit_prices [ i ][ \"high_limit\" ], close [ i ]), price_equal ( limit_prices [ i ][ \"low_limit\" ], close [ i ]), ) # if not aligned: # bars = bars[i:] # limit_prices = limit_prices[i:] # for frame in bars[\"frame\"]: # frame = frame.item().date() # close = bars[bars[\"frame\"].item().date() == frame][\"close\"].item() # high = limit_prices[limit_prices[\"frame\"] == frame][\"high_limit\"].item() # low = limit_prices[limit_prices[\"frame\"] == frame][\"low_limit\"].item() # results[frame] = ( # price_equal(high, close), # price_equal(low, close) # ) return results @classmethod async def get_latest_price ( cls , codes : Iterable [ str ]) -> List [ str ]: \"\"\"\u83b7\u53d6\u591a\u652f\u80a1\u7968\u7684\u6700\u65b0\u4ef7\u683c\uff08\u4ea4\u6613\u65e5\u5f53\u5929\uff09\uff0c\u6682\u4e0d\u5305\u62ec\u6307\u6570 \u4ef7\u683c\u6570\u636e\u6bcf5\u79d2\u66f4\u65b0\u4e00\u6b21\uff0c\u63a5\u53d7\u591a\u53ea\u80a1\u7968\u67e5\u8be2\uff0c\u8fd4\u56de\u6700\u540e\u7f13\u5b58\u7684\u4ef7\u683c Args: codes: \u4ee3\u7801\u5217\u8868 Returns: \u8fd4\u56de\u4e00\u4e2aList\uff0c\u4ef7\u683c\u662f\u5b57\u7b26\u5f62\u5f0f\u7684\u6d6e\u70b9\u6570\u3002 \"\"\" if not codes : return [] _raw_code_list = [] for code_str in codes : code , _ = code_str . split ( \".\" ) _raw_code_list . append ( code ) _converted_data = [] raw_data = await cache . feature . hmget ( TRADE_LATEST_PRICE , * _raw_code_list ) for _data in raw_data : if _data is None : _converted_data . append ( _data ) else : _converted_data . append ( float ( _data )) return _converted_data","title":"Stock"},{"location":"api/stock/#omicron.models.stock.Stock.security_type","text":"\u8fd4\u56de\u8bc1\u5238\u7c7b\u578b Returns: Type Description SecurityType [description]","title":"security_type"},{"location":"api/stock/#omicron.models.stock.Stock.batch_cache_bars","text":"\u7f13\u5b58\u5df2\u6536\u76d8\u7684\u5206\u949f\u7ebf\u548c\u65e5\u7ebf \u5f53\u7f13\u5b58\u65e5\u7ebf\u65f6\uff0c\u4ec5\u9650\u4e8e\u5f53\u65e5\u6536\u76d8\u540e\u7684\u7b2c\u4e00\u6b21\u540c\u6b65\u65f6\u8c03\u7528\u3002 Parameters: Name Type Description Default frame_type FrameType \u5e27\u7c7b\u578b required bars Dict[str, numpy.ndarray[Any, numpy.dtype[dtype([('frame', ' Generator [ Dict [ str , BarsArray ], None , None ]: \"\"\"\u83b7\u53d6\u591a\u652f\u80a1\u7968\uff08\u6307\u6570\uff09\u5728[start, end)\u65f6\u95f4\u6bb5\u5185\u7684\u884c\u60c5\u6570\u636e \u5982\u679c\u8981\u83b7\u53d6\u7684\u884c\u60c5\u6570\u636e\u662f\u65e5\u7ebf\u7ea7\u522b\uff08\u53731d, 1w, 1M)\uff0c\u4f7f\u7528\u672c\u63a5\u53e3\u3002 \u505c\u724c\u6570\u636e\u5904\u7406\u8bf7\u89c1[get_bars][omicron.models.stock.Stock.get_bars]\u3002 \u672c\u51fd\u6570\u8fd4\u56de\u4e00\u4e2a\u8fed\u4ee3\u5668\uff0c\u4f7f\u7528\u65b9\u6cd5\u793a\u4f8b\uff1a ``` async for code, bars in Stock.batch_get_day_level_bars_in_range(...): print(code, bars) ``` \u5982\u679c`end`\u4e0d\u5728`frame_type`\u6240\u5c5e\u7684\u8fb9\u754c\u70b9\u4e0a\uff0c\u90a3\u4e48\uff0c\u5982\u679c`end`\u5927\u4e8e\u7b49\u4e8e\u5f53\u524d\u7f13\u5b58\u672a\u6536\u76d8\u6570\u636e\u65f6\u95f4\uff0c\u5219\u5c06\u5305\u542b\u672a\u6536\u76d8\u6570\u636e\uff1b\u5426\u5219\uff0c\u8fd4\u56de\u7684\u8bb0\u5f55\u5c06\u622a\u6b62\u5230`tf.floor(end, frame_type)`\u3002 Args: codes: \u4ee3\u7801\u5217\u8868 frame_type: \u5e27\u7c7b\u578b start: \u8d77\u59cb\u65f6\u95f4 end: \u7ed3\u675f\u65f6\u95f4 fq: \u662f\u5426\u8fdb\u884c\u590d\u6743\uff0c\u5982\u679c\u662f\uff0c\u5219\u8fdb\u884c\u524d\u590d\u6743\u3002Defaults to True. Returns: Generator[Dict[str, BarsArray], None, None]: \u8fed\u4ee3\u5668\uff0c\u6bcf\u6b21\u8fd4\u56de\u4e00\u4e2a\u5b57\u5178\uff0c\u5176\u4e2dkey\u4e3a\u4ee3\u7801\uff0cvalue\u4e3a\u884c\u60c5\u6570\u636e \"\"\" today = datetime . datetime . now () . date () # \u65e5\u7ebf\uff0cend\u4e0d\u7b49\u4e8e\u6700\u540e\u4ea4\u6613\u65e5\uff0c\u6b64\u65f6\u5df2\u65e0\u7f13\u5b58 if frame_type == FrameType . DAY and end == tf . floor ( today , frame_type ): from_cache = True elif frame_type != FrameType . DAY and start > tf . floor ( today , frame_type ): from_cache = True else : from_cache = False n = tf . count_frames ( start , end , frame_type ) max_query_size = min ( cfg . influxdb . max_query_size , INFLUXDB_MAX_QUERY_SIZE ) batch_size = max ( max_query_size // n , 1 ) for i in range ( 0 , len ( codes ), batch_size ): batch_codes = codes [ i : i + batch_size ] persisted = await cls . _batch_get_persisted_bars_in_range ( batch_codes , frame_type , start , end ) if from_cache : cached = await cls . _batch_get_cached_bars_n ( frame_type , 1 , end , batch_codes ) cached = pd . DataFrame ( cached , columns = bars_dtype_with_code . names ) df = pd . concat ([ persisted , cached ]) else : df = persisted for code in batch_codes : filtered = df [ df [ \"code\" ] == code ][ bars_cols ] bars = filtered . to_records ( index = False ) . astype ( bars_dtype ) if fq : bars = cls . qfq ( bars ) yield code , bars","title":"batch_get_day_level_bars_in_range()"},{"location":"api/stock/#omicron.models.stock.Stock.batch_get_min_level_bars_in_range","text":"\u83b7\u53d6\u591a\u652f\u80a1\u7968\uff08\u6307\u6570\uff09\u5728[start, end)\u65f6\u95f4\u6bb5\u5185\u7684\u884c\u60c5\u6570\u636e \u5982\u679c\u8981\u83b7\u53d6\u7684\u884c\u60c5\u6570\u636e\u662f\u5206\u949f\u7ea7\u522b\uff08\u53731m, 5m, 15m, 30m\u548c60m)\uff0c\u4f7f\u7528\u672c\u63a5\u53e3\u3002 \u505c\u724c\u6570\u636e\u5904\u7406\u8bf7\u89c1 get_bars \u3002 \u672c\u51fd\u6570\u8fd4\u56de\u4e00\u4e2a\u8fed\u4ee3\u5668\uff0c\u4f7f\u7528\u65b9\u6cd5\u793a\u4f8b\uff1a 1 2 async for code, bars in Stock.batch_get_min_level_bars_in_range(...): print(code, bars) \u5982\u679c end \u4e0d\u5728 frame_type \u6240\u5c5e\u7684\u8fb9\u754c\u70b9\u4e0a\uff0c\u90a3\u4e48\uff0c\u5982\u679c end \u5927\u4e8e\u7b49\u4e8e\u5f53\u524d\u7f13\u5b58\u672a\u6536\u76d8\u6570\u636e\u65f6\u95f4\uff0c\u5219\u5c06\u5305\u542b\u672a\u6536\u76d8\u6570\u636e\uff1b\u5426\u5219\uff0c\u8fd4\u56de\u7684\u8bb0\u5f55\u5c06\u622a\u6b62\u5230 tf.floor(end, frame_type) \u3002 Parameters: Name Type Description Default codes List[str] \u80a1\u7968/\u6307\u6570\u4ee3\u7801\u5217\u8868 required frame_type FrameType \u5e27\u7c7b\u578b required start Union[datetime.date, datetime.datetime] \u8d77\u59cb\u65f6\u95f4 required end Union[datetime.date, datetime.datetime] \u7ed3\u675f\u65f6\u95f4\u3002\u5982\u679c\u672a\u6307\u660e\uff0c\u5219\u53d6\u5f53\u524d\u65f6\u95f4\u3002 required fq bool \u662f\u5426\u8fdb\u884c\u590d\u6743\uff0c\u5982\u679c\u662f\uff0c\u5219\u8fdb\u884c\u524d\u590d\u6743\u3002Defaults to True. True Returns: Type Description Generator[Dict[str, BarsArray], None, None] \u8fed\u4ee3\u5668\uff0c\u6bcf\u6b21\u8fd4\u56de\u4e00\u4e2a\u5b57\u5178\uff0c\u5176\u4e2dkey\u4e3a\u4ee3\u7801\uff0cvalue\u4e3a\u884c\u60c5\u6570\u636e Source code in omicron/models/stock.py @classmethod async def batch_get_min_level_bars_in_range ( cls , codes : List [ str ], frame_type : FrameType , start : Frame , end : Frame , fq : bool = True , ) -> Generator [ Dict [ str , BarsArray ], None , None ]: \"\"\"\u83b7\u53d6\u591a\u652f\u80a1\u7968\uff08\u6307\u6570\uff09\u5728[start, end)\u65f6\u95f4\u6bb5\u5185\u7684\u884c\u60c5\u6570\u636e \u5982\u679c\u8981\u83b7\u53d6\u7684\u884c\u60c5\u6570\u636e\u662f\u5206\u949f\u7ea7\u522b\uff08\u53731m, 5m, 15m, 30m\u548c60m)\uff0c\u4f7f\u7528\u672c\u63a5\u53e3\u3002 \u505c\u724c\u6570\u636e\u5904\u7406\u8bf7\u89c1[get_bars][omicron.models.stock.Stock.get_bars]\u3002 \u672c\u51fd\u6570\u8fd4\u56de\u4e00\u4e2a\u8fed\u4ee3\u5668\uff0c\u4f7f\u7528\u65b9\u6cd5\u793a\u4f8b\uff1a ``` async for code, bars in Stock.batch_get_min_level_bars_in_range(...): print(code, bars) ``` \u5982\u679c`end`\u4e0d\u5728`frame_type`\u6240\u5c5e\u7684\u8fb9\u754c\u70b9\u4e0a\uff0c\u90a3\u4e48\uff0c\u5982\u679c`end`\u5927\u4e8e\u7b49\u4e8e\u5f53\u524d\u7f13\u5b58\u672a\u6536\u76d8\u6570\u636e\u65f6\u95f4\uff0c\u5219\u5c06\u5305\u542b\u672a\u6536\u76d8\u6570\u636e\uff1b\u5426\u5219\uff0c\u8fd4\u56de\u7684\u8bb0\u5f55\u5c06\u622a\u6b62\u5230`tf.floor(end, frame_type)`\u3002 Args: codes: \u80a1\u7968/\u6307\u6570\u4ee3\u7801\u5217\u8868 frame_type: \u5e27\u7c7b\u578b start: \u8d77\u59cb\u65f6\u95f4 end: \u7ed3\u675f\u65f6\u95f4\u3002\u5982\u679c\u672a\u6307\u660e\uff0c\u5219\u53d6\u5f53\u524d\u65f6\u95f4\u3002 fq: \u662f\u5426\u8fdb\u884c\u590d\u6743\uff0c\u5982\u679c\u662f\uff0c\u5219\u8fdb\u884c\u524d\u590d\u6743\u3002Defaults to True. Returns: Generator[Dict[str, BarsArray], None, None]: \u8fed\u4ee3\u5668\uff0c\u6bcf\u6b21\u8fd4\u56de\u4e00\u4e2a\u5b57\u5178\uff0c\u5176\u4e2dkey\u4e3a\u4ee3\u7801\uff0cvalue\u4e3a\u884c\u60c5\u6570\u636e \"\"\" closed_end = tf . floor ( end , frame_type ) n = tf . count_frames ( start , closed_end , frame_type ) max_query_size = min ( cfg . influxdb . max_query_size , INFLUXDB_MAX_QUERY_SIZE ) batch_size = max ( 1 , max_query_size // n ) ff = tf . first_min_frame ( datetime . datetime . now (), frame_type ) for i in range ( 0 , len ( codes ), batch_size ): batch_codes = codes [ i : i + batch_size ] if end < ff : part1 = await cls . _batch_get_persisted_bars_in_range ( batch_codes , frame_type , start , end ) part2 = pd . DataFrame ([], columns = bars_dtype_with_code . names ) elif start >= ff : part1 = pd . DataFrame ([], columns = bars_dtype_with_code . names ) n = tf . count_frames ( start , closed_end , frame_type ) + 1 cached = await cls . _batch_get_cached_bars_n ( frame_type , n , end , batch_codes ) cached = cached [ cached [ \"frame\" ] >= start ] part2 = pd . DataFrame ( cached , columns = bars_dtype_with_code . names ) else : part1 = await cls . _batch_get_persisted_bars_in_range ( batch_codes , frame_type , start , ff ) n = tf . count_frames ( start , closed_end , frame_type ) + 1 cached = await cls . _batch_get_cached_bars_n ( frame_type , n , end , batch_codes ) part2 = pd . DataFrame ( cached , columns = bars_dtype_with_code . names ) df = pd . concat ([ part1 , part2 ]) for code in batch_codes : filtered = df [ df [ \"code\" ] == code ][ bars_cols ] bars = filtered . to_records ( index = False ) . astype ( bars_dtype ) if fq : bars = cls . qfq ( bars ) yield code , bars","title":"batch_get_min_level_bars_in_range()"},{"location":"api/stock/#omicron.models.stock.Stock.cache_bars","text":"\u5c06\u5f53\u671f\u5df2\u6536\u76d8\u7684\u884c\u60c5\u6570\u636e\u7f13\u5b58 Note \u5f53\u524d\u53ea\u7f13\u5b581\u5206\u949f\u6570\u636e\u3002\u5176\u5b83\u5206\u949f\u6570\u636e\uff0c\u90fd\u5728\u8c03\u7528\u65f6\uff0c\u901a\u8fc7resample\u4e34\u65f6\u5408\u6210\u3002 \u884c\u60c5\u6570\u636e\u7f13\u5b58\u5728\u4ee5 bars:{frame_type.value}:{code} \u4e3akey, {frame}\u4e3afield\u7684hashmap\u4e2d\u3002 Parameters: Name Type Description Default code str the full qualified code of a security or index required frame_type FrameType frame type of the bars required bars numpy.ndarray[Any, numpy.dtype[dtype([('frame', ' int : \"\"\"\u83b7\u53d6\u4e0a\u5e02\u4ee5\u6765\u7ecf\u8fc7\u4e86\u591a\u5c11\u4e2a\u4ea4\u6613\u65e5 \u7531\u4e8e\u53d7\u4ea4\u6613\u65e5\u5386\u9650\u5236\uff082005\u5e741\u67084\u65e5\u4e4b\u524d\u7684\u4ea4\u6613\u65e5\u5386\u6ca1\u6709\uff09\uff0c\u5bf9\u4e8e\u5728\u4e4b\u524d\u4e0a\u5e02\u7684\u54c1\u79cd\uff0c\u90fd\u8fd4\u56de\u4ece2005\u5e741\u67084\u65e5\u8d77\u7684\u65e5\u671f\u3002 Returns: int: [description] \"\"\" epoch_start = arrow . get ( \"2005-01-04\" ) . date () ipo_day = self . ipo_date if self . ipo_date > epoch_start else epoch_start return tf . count_day_frames ( ipo_day , arrow . now () . date ())","title":"days_since_ipo()"},{"location":"api/stock/#omicron.models.stock.Stock.format_code","text":"\u65b0\u4e09\u677f\u548c\u5317\u4ea4\u6240\u7684\u80a1\u7968, \u6682\u4e0d\u652f\u6301, \u9ed8\u8ba4\u8fd4\u56deNone \u4e0a\u8bc1A\u80a1: 600\u3001601\u3001603\u3001605 \u6df1\u8bc1A\u80a1: 000\u3001001 \u4e2d\u5c0f\u677f: 002\u3001003 \u521b\u4e1a\u677f: 300/301 \u79d1\u521b\u677f: 688 \u65b0\u4e09\u677f: 82\u300183\u300187\u300188\u3001430\u3001420\u3001400 \u5317\u4ea4\u6240: 43\u300183\u300187\u300188 Source code in omicron/models/stock.py @staticmethod def format_code ( code ) -> str : \"\"\"\u65b0\u4e09\u677f\u548c\u5317\u4ea4\u6240\u7684\u80a1\u7968, \u6682\u4e0d\u652f\u6301, \u9ed8\u8ba4\u8fd4\u56deNone \u4e0a\u8bc1A\u80a1: 600\u3001601\u3001603\u3001605 \u6df1\u8bc1A\u80a1: 000\u3001001 \u4e2d\u5c0f\u677f: 002\u3001003 \u521b\u4e1a\u677f: 300/301 \u79d1\u521b\u677f: 688 \u65b0\u4e09\u677f: 82\u300183\u300187\u300188\u3001430\u3001420\u3001400 \u5317\u4ea4\u6240: 43\u300183\u300187\u300188 \"\"\" if not code or len ( code ) != 6 : return None prefix = code [ 0 ] if prefix in ( \"0\" , \"3\" ): return f \" { code } .XSHE\" elif prefix == \"6\" : return f \" { code } .XSHG\" else : return None","title":"format_code()"},{"location":"api/stock/#omicron.models.stock.Stock.fuzzy_match","text":"\u5bf9\u80a1\u7968/\u6307\u6570\u8fdb\u884c\u6a21\u7cca\u5339\u914d\u67e5\u627e query\u53ef\u4ee5\u662f\u80a1\u7968/\u6307\u6570\u4ee3\u7801\uff0c\u4e5f\u53ef\u4ee5\u662f\u5b57\u6bcd\uff08\u6309name\u67e5\u627e\uff09\uff0c\u4e5f\u53ef\u4ee5\u662f\u6c49\u5b57\uff08\u6309\u663e\u793a\u540d\u67e5\u627e\uff09 Parameters: Name Type Description Default query str \u67e5\u8be2\u5b57\u7b26\u4e32 required Returns: Type Description Dict[str, Tuple] \u67e5\u8be2\u7ed3\u679c\uff0c\u5176\u4e2dTuple\u4e3a(code, display_name, name, start, end, type) Source code in omicron/models/stock.py @classmethod def fuzzy_match ( cls , query : str ) -> Dict [ str , Tuple ]: \"\"\"\u5bf9\u80a1\u7968/\u6307\u6570\u8fdb\u884c\u6a21\u7cca\u5339\u914d\u67e5\u627e query\u53ef\u4ee5\u662f\u80a1\u7968/\u6307\u6570\u4ee3\u7801\uff0c\u4e5f\u53ef\u4ee5\u662f\u5b57\u6bcd\uff08\u6309name\u67e5\u627e\uff09\uff0c\u4e5f\u53ef\u4ee5\u662f\u6c49\u5b57\uff08\u6309\u663e\u793a\u540d\u67e5\u627e\uff09 Args: query (str): \u67e5\u8be2\u5b57\u7b26\u4e32 Returns: Dict[str, Tuple]: \u67e5\u8be2\u7ed3\u679c\uff0c\u5176\u4e2dTuple\u4e3a(code, display_name, name, start, end, type) \"\"\" query = query . upper () if re . match ( r \"\\d+\" , query ): return { sec [ \"code\" ]: sec . tolist () for sec in cls . _stocks if sec [ \"code\" ] . startswith ( query ) } elif re . match ( r \"[A-Z]+\" , query ): return { sec [ \"code\" ]: sec . tolist () for sec in cls . _stocks if sec [ \"name\" ] . startswith ( query ) } else : return { sec [ \"code\" ]: sec . tolist () for sec in cls . _stocks if sec [ \"alias\" ] . find ( query ) != - 1 }","title":"fuzzy_match()"},{"location":"api/stock/#omicron.models.stock.Stock.get_bars","text":"\u83b7\u53d6\u5230 end \u4e3a\u6b62\u7684 n \u4e2a\u884c\u60c5\u6570\u636e\u3002 \u8fd4\u56de\u7684\u6570\u636e\u662f\u6309\u7167\u65f6\u95f4\u987a\u5e8f\u9012\u589e\u6392\u5e8f\u7684\u3002\u5728\u9047\u5230\u505c\u724c\u7684\u60c5\u51b5\u65f6\uff0c\u8be5\u65f6\u6bb5\u6570\u636e\u5c06\u88ab\u8df3\u8fc7\uff0c\u56e0\u6b64\u8fd4\u56de\u7684\u8bb0\u5f55\u53ef\u80fd\u4e0d\u662f\u4ea4\u6613\u65e5\u8fde\u7eed\u7684\uff0c\u5e76\u4e14\u53ef\u80fd\u4e0d\u8db3 n \u4e2a\u3002 \u5982\u679c\u7cfb\u7edf\u5f53\u524d\u6ca1\u6709\u5230\u6307\u5b9a\u65f6\u95f4 end \u7684\u6570\u636e\uff0c\u5c06\u5c3d\u6700\u5927\u52aa\u529b\u8fd4\u56de\u6570\u636e\u3002\u8c03\u7528\u8005\u53ef\u4ee5\u901a\u8fc7\u5224\u65ad\u6700\u540e\u4e00\u6761\u6570\u636e\u7684\u65f6\u95f4\u662f\u5426\u7b49\u4e8e end \u6765\u5224\u65ad\u662f\u5426\u83b7\u53d6\u5230\u4e86\u5168\u90e8\u6570\u636e\u3002 Parameters: Name Type Description Default code str \u8bc1\u5238\u4ee3\u7801 required n int \u8bb0\u5f55\u6570 required frame_type FrameType \u5e27\u7c7b\u578b required end Union[datetime.date, datetime.datetime] \u622a\u6b62\u65f6\u95f4,\u5982\u679c\u672a\u6307\u660e\uff0c\u5219\u53d6\u5f53\u524d\u65f6\u95f4 None fq \u662f\u5426\u5bf9\u8fd4\u56de\u8bb0\u5f55\u8fdb\u884c\u590d\u6743\u3002\u5982\u679c\u4e3a True \u7684\u8bdd\uff0c\u5219\u8fdb\u884c\u524d\u590d\u6743\u3002Defaults to True. True unclosed \u662f\u5426\u5305\u542b\u6700\u65b0\u672a\u6536\u76d8\u7684\u6570\u636e\uff1f Defaults to True. True Returns: Type Description numpy.ndarray[Any, numpy.dtype[dtype([('frame', ' BarsArray : \"\"\"\u83b7\u53d6\u5230`end`\u4e3a\u6b62\u7684`n`\u4e2a\u884c\u60c5\u6570\u636e\u3002 \u8fd4\u56de\u7684\u6570\u636e\u662f\u6309\u7167\u65f6\u95f4\u987a\u5e8f\u9012\u589e\u6392\u5e8f\u7684\u3002\u5728\u9047\u5230\u505c\u724c\u7684\u60c5\u51b5\u65f6\uff0c\u8be5\u65f6\u6bb5\u6570\u636e\u5c06\u88ab\u8df3\u8fc7\uff0c\u56e0\u6b64\u8fd4\u56de\u7684\u8bb0\u5f55\u53ef\u80fd\u4e0d\u662f\u4ea4\u6613\u65e5\u8fde\u7eed\u7684\uff0c\u5e76\u4e14\u53ef\u80fd\u4e0d\u8db3`n`\u4e2a\u3002 \u5982\u679c\u7cfb\u7edf\u5f53\u524d\u6ca1\u6709\u5230\u6307\u5b9a\u65f6\u95f4`end`\u7684\u6570\u636e\uff0c\u5c06\u5c3d\u6700\u5927\u52aa\u529b\u8fd4\u56de\u6570\u636e\u3002\u8c03\u7528\u8005\u53ef\u4ee5\u901a\u8fc7\u5224\u65ad\u6700\u540e\u4e00\u6761\u6570\u636e\u7684\u65f6\u95f4\u662f\u5426\u7b49\u4e8e`end`\u6765\u5224\u65ad\u662f\u5426\u83b7\u53d6\u5230\u4e86\u5168\u90e8\u6570\u636e\u3002 Args: code: \u8bc1\u5238\u4ee3\u7801 n: \u8bb0\u5f55\u6570 frame_type: \u5e27\u7c7b\u578b end: \u622a\u6b62\u65f6\u95f4,\u5982\u679c\u672a\u6307\u660e\uff0c\u5219\u53d6\u5f53\u524d\u65f6\u95f4 fq: \u662f\u5426\u5bf9\u8fd4\u56de\u8bb0\u5f55\u8fdb\u884c\u590d\u6743\u3002\u5982\u679c\u4e3a`True`\u7684\u8bdd\uff0c\u5219\u8fdb\u884c\u524d\u590d\u6743\u3002Defaults to True. unclosed: \u662f\u5426\u5305\u542b\u6700\u65b0\u672a\u6536\u76d8\u7684\u6570\u636e\uff1f Defaults to True. Returns: \u8fd4\u56dedtype\u4e3a`coretypes.bars_dtype`\u7684\u4e00\u7ef4numpy\u6570\u7ec4\u3002 \"\"\" now = datetime . datetime . now () try : cached = np . array ([], dtype = bars_dtype ) if frame_type in tf . day_level_frames : if end is None : end = now . date () elif type ( end ) == datetime . datetime : end = end . date () n0 = n if unclosed : cached = await cls . _get_cached_bars_n ( code , 1 , frame_type ) if cached . size > 0 : # \u5982\u679c\u7f13\u5b58\u7684\u672a\u6536\u76d8\u65e5\u671f > end\uff0c\u5219\u8be5\u7f13\u5b58\u4e0d\u662f\u9700\u8981\u7684 if cached [ 0 ][ \"frame\" ] . item () . date () > end : cached = np . array ([], dtype = bars_dtype ) else : n0 = n - 1 else : end = end or now closed_frame = tf . floor ( end , frame_type ) # fetch one more bar, in case we should discard unclosed bar cached = await cls . _get_cached_bars_n ( code , n + 1 , frame_type , end ) if not unclosed : cached = cached [ cached [ \"frame\" ] <= closed_frame ] # n bars we need fetch from persisted db n0 = n - cached . size if n0 > 0 : if cached . size > 0 : end0 = cached [ 0 ][ \"frame\" ] . item () else : end0 = end bars = await cls . _get_persisted_bars_n ( code , frame_type , n0 , end0 ) merged = np . concatenate (( bars , cached )) bars = merged [ - n :] else : bars = cached [ - n :] if fq : bars = cls . qfq ( bars ) return bars except Exception as e : logger . exception ( e ) logger . warning ( \"failed to get bars for %s , %s , %s , %s \" , code , n , frame_type , end ) raise","title":"get_bars()"},{"location":"api/stock/#omicron.models.stock.Stock.get_bars_in_range","text":"\u83b7\u53d6\u6307\u5b9a\u8bc1\u5238\uff08 code \uff09\u5728[ start , end ]\u671f\u95f4\u5e27\u7c7b\u578b\u4e3a frame_type \u7684\u884c\u60c5\u6570\u636e\u3002 Parameters: Name Type Description Default code \u8bc1\u5238\u4ee3\u7801 required frame_type \u884c\u60c5\u6570\u636e\u7684\u5e27\u7c7b\u578b required start \u8d77\u59cb\u65f6\u95f4 required end \u7ed3\u675f\u65f6\u95f4,\u5982\u679c\u4e3aNone\uff0c\u5219\u8868\u660e\u53d6\u5230\u5f53\u524d\u65f6\u95f4\u3002 None fq \u662f\u5426\u5bf9\u884c\u60c5\u6570\u636e\u6267\u884c\u524d\u590d\u6743\u64cd\u4f5c True unclosed \u662f\u5426\u5305\u542b\u672a\u6536\u76d8\u7684\u6570\u636e True Source code in omicron/models/stock.py @classmethod async def get_bars_in_range ( cls , code : str , frame_type : FrameType , start : Frame , end : Frame = None , fq = True , unclosed = True , ) -> BarsArray : \"\"\"\u83b7\u53d6\u6307\u5b9a\u8bc1\u5238\uff08`code`\uff09\u5728[`start`, `end`]\u671f\u95f4\u5e27\u7c7b\u578b\u4e3a`frame_type`\u7684\u884c\u60c5\u6570\u636e\u3002 Args: code : \u8bc1\u5238\u4ee3\u7801 frame_type : \u884c\u60c5\u6570\u636e\u7684\u5e27\u7c7b\u578b start : \u8d77\u59cb\u65f6\u95f4 end : \u7ed3\u675f\u65f6\u95f4,\u5982\u679c\u4e3aNone\uff0c\u5219\u8868\u660e\u53d6\u5230\u5f53\u524d\u65f6\u95f4\u3002 fq : \u662f\u5426\u5bf9\u884c\u60c5\u6570\u636e\u6267\u884c\u524d\u590d\u6743\u64cd\u4f5c unclosed : \u662f\u5426\u5305\u542b\u672a\u6536\u76d8\u7684\u6570\u636e \"\"\" now = datetime . datetime . now () if frame_type in tf . day_level_frames : end = end or now . date () if unclosed and tf . day_shift ( end , 0 ) == now . date (): part2 = await cls . _get_cached_bars_n ( code , 1 , frame_type ) else : part2 = np . array ([], dtype = bars_dtype ) # get rest from persisted part1 = await cls . _get_persisted_bars_in_range ( code , frame_type , start , end ) bars = np . concatenate (( part1 , part2 )) else : end = end or now closed_end = tf . floor ( end , frame_type ) ff_min1 = tf . first_min_frame ( now , FrameType . MIN1 ) if tf . day_shift ( end , 0 ) < now . date () or end < ff_min1 : part1 = await cls . _get_persisted_bars_in_range ( code , frame_type , start , end ) part2 = np . array ([], dtype = bars_dtype ) elif start >= ff_min1 : # all in cache part1 = np . array ([], dtype = bars_dtype ) n = tf . count_frames ( start , closed_end , frame_type ) + 1 part2 = await cls . _get_cached_bars_n ( code , n , frame_type , end ) part2 = part2 [ part2 [ \"frame\" ] >= start ] else : # in both cache and persisted ff = tf . first_min_frame ( now , frame_type ) part1 = await cls . _get_persisted_bars_in_range ( code , frame_type , start , ff ) n = tf . count_frames ( ff , closed_end , frame_type ) + 1 part2 = await cls . _get_cached_bars_n ( code , n , frame_type , end ) if not unclosed : part2 = part2 [ part2 [ \"frame\" ] <= closed_end ] bars = np . concatenate (( part1 , part2 )) if fq : return cls . qfq ( bars ) else : return bars","title":"get_bars_in_range()"},{"location":"api/stock/#omicron.models.stock.Stock.get_latest_price","text":"\u83b7\u53d6\u591a\u652f\u80a1\u7968\u7684\u6700\u65b0\u4ef7\u683c\uff08\u4ea4\u6613\u65e5\u5f53\u5929\uff09\uff0c\u6682\u4e0d\u5305\u62ec\u6307\u6570 \u4ef7\u683c\u6570\u636e\u6bcf5\u79d2\u66f4\u65b0\u4e00\u6b21\uff0c\u63a5\u53d7\u591a\u53ea\u80a1\u7968\u67e5\u8be2\uff0c\u8fd4\u56de\u6700\u540e\u7f13\u5b58\u7684\u4ef7\u683c Parameters: Name Type Description Default codes Iterable[str] \u4ee3\u7801\u5217\u8868 required Returns: Type Description List[str] \u8fd4\u56de\u4e00\u4e2aList\uff0c\u4ef7\u683c\u662f\u5b57\u7b26\u5f62\u5f0f\u7684\u6d6e\u70b9\u6570\u3002 Source code in omicron/models/stock.py @classmethod async def get_latest_price ( cls , codes : Iterable [ str ]) -> List [ str ]: \"\"\"\u83b7\u53d6\u591a\u652f\u80a1\u7968\u7684\u6700\u65b0\u4ef7\u683c\uff08\u4ea4\u6613\u65e5\u5f53\u5929\uff09\uff0c\u6682\u4e0d\u5305\u62ec\u6307\u6570 \u4ef7\u683c\u6570\u636e\u6bcf5\u79d2\u66f4\u65b0\u4e00\u6b21\uff0c\u63a5\u53d7\u591a\u53ea\u80a1\u7968\u67e5\u8be2\uff0c\u8fd4\u56de\u6700\u540e\u7f13\u5b58\u7684\u4ef7\u683c Args: codes: \u4ee3\u7801\u5217\u8868 Returns: \u8fd4\u56de\u4e00\u4e2aList\uff0c\u4ef7\u683c\u662f\u5b57\u7b26\u5f62\u5f0f\u7684\u6d6e\u70b9\u6570\u3002 \"\"\" if not codes : return [] _raw_code_list = [] for code_str in codes : code , _ = code_str . split ( \".\" ) _raw_code_list . append ( code ) _converted_data = [] raw_data = await cache . feature . hmget ( TRADE_LATEST_PRICE , * _raw_code_list ) for _data in raw_data : if _data is None : _converted_data . append ( _data ) else : _converted_data . append ( float ( _data )) return _converted_data","title":"get_latest_price()"},{"location":"api/stock/#omicron.models.stock.Stock.get_trade_price_limits","text":"\u4eceinfluxdb\u548ccache\u4e2d\u83b7\u53d6\u4e2a\u80a1\u5728[begin, end]\u4e4b\u95f4\u7684\u6da8\u8dcc\u505c\u4ef7\u3002 \u6da8\u8dcc\u505c\u4ef7\u53ea\u6709\u65e5\u7ebf\u6570\u636e\u624d\u6709\uff0c\u56e0\u6b64\uff0cFrameType\u56fa\u5b9a\u4e3aFrameType.DAY\uff0c \u5f53\u5929\u7684\u6570\u636e\u5b58\u653e\u4e8eredis\uff0c\u5982\u679c\u67e5\u8be2\u65e5\u671f\u5305\u542b\u5f53\u5929\uff08\u4ea4\u6613\u65e5\uff09\uff0c\u4ececache\u4e2d\u8bfb\u53d6\u5e76\u8ffd\u52a0\u5230\u7ed3\u679c\u4e2d Parameters: Name Type Description Default code \u4e2a\u80a1\u4ee3\u7801 required begin \u5f00\u59cb\u65e5\u671f required end \u7ed3\u675f\u65e5\u671f required Returns: Type Description numpy.ndarray[Any, numpy.dtype[dtype([('frame', ' BarsArray : \"\"\"\u4eceinfluxdb\u548ccache\u4e2d\u83b7\u53d6\u4e2a\u80a1\u5728[begin, end]\u4e4b\u95f4\u7684\u6da8\u8dcc\u505c\u4ef7\u3002 \u6da8\u8dcc\u505c\u4ef7\u53ea\u6709\u65e5\u7ebf\u6570\u636e\u624d\u6709\uff0c\u56e0\u6b64\uff0cFrameType\u56fa\u5b9a\u4e3aFrameType.DAY\uff0c \u5f53\u5929\u7684\u6570\u636e\u5b58\u653e\u4e8eredis\uff0c\u5982\u679c\u67e5\u8be2\u65e5\u671f\u5305\u542b\u5f53\u5929\uff08\u4ea4\u6613\u65e5\uff09\uff0c\u4ececache\u4e2d\u8bfb\u53d6\u5e76\u8ffd\u52a0\u5230\u7ed3\u679c\u4e2d Args: code : \u4e2a\u80a1\u4ee3\u7801 begin : \u5f00\u59cb\u65e5\u671f end : \u7ed3\u675f\u65e5\u671f Returns: dtype\u4e3a[('frame', 'O'), ('high_limit', 'f4'), ('low_limit', 'f4')]\u7684numpy\u6570\u7ec4 \"\"\" cols = [ \"_time\" , \"high_limit\" , \"low_limit\" ] dtype = [( \"frame\" , \"O\" ), ( \"high_limit\" , \"f4\" ), ( \"low_limit\" , \"f4\" )] if isinstance ( begin , datetime . datetime ): begin = begin . date () # \u5f3a\u5236\u8f6c\u6362\u4e3adate if isinstance ( end , datetime . datetime ): end = end . date () # \u5f3a\u5236\u8f6c\u6362\u4e3adate data_in_cache = await cls . _get_price_limit_in_cache ( code , begin , end ) client = get_influx_client () measurement = cls . _measurement_name ( FrameType . DAY ) flux = ( Flux () . bucket ( client . _bucket ) . measurement ( measurement ) . range ( begin , end ) . tags ({ \"code\" : code }) . fields ( cols ) . sort ( \"_time\" ) ) ds = NumpyDeserializer ( dtype , use_cols = cols , converters = { \"_time\" : lambda x : ciso8601 . parse_datetime ( x ) . date ()}, # since we ask parse date in convertors, so we have to disable parse_date parse_date = None , ) result = await client . query ( flux , ds ) if data_in_cache : result = np . concatenate ([ result , data_in_cache ]) return result","title":"get_trade_price_limits()"},{"location":"api/stock/#omicron.models.stock.Stock.persist_bars","text":"\u5c06\u884c\u60c5\u6570\u636e\u6301\u4e45\u5316 \u5982\u679c bars \u7c7b\u578b\u4e3aDict,\u5219key\u4e3a code \uff0cvalue\u4e3a bars \u3002\u5982\u679c\u5176\u7c7b\u578b\u4e3aBarsArray\u6216\u8005pd.DataFrame\uff0c\u5219 bars \u5404\u5217\u5b57\u6bb5\u5e94\u8be5\u4e3a coretypes.bars_dtype + (\"code\", \"O\")\u6784\u6210\u3002 Parameters: Name Type Description Default frame_type FrameType the frame type of the bars required bars Union[Dict[str, numpy.ndarray[Any, numpy.dtype[dtype([('frame', ' BarsArray : \"\"\"\u5bf9\u884c\u60c5\u6570\u636e\u6267\u884c\u524d\u590d\u6743\u64cd\u4f5c\"\"\" # todo: \u8fd9\u91cc\u53ef\u4ee5\u4f18\u5316 if bars . size == 0 : return bars last = bars [ - 1 ][ \"factor\" ] for field in [ \"open\" , \"high\" , \"low\" , \"close\" , \"volume\" ]: bars [ field ] = bars [ field ] * ( bars [ \"factor\" ] / last ) return bars","title":"qfq()"},{"location":"api/stock/#omicron.models.stock.Stock.resample","text":"\u5c06\u539f\u6765\u4e3a from_frame \u7684\u884c\u60c5\u6570\u636e\u8f6c\u6362\u4e3a to_frame \u7684\u884c\u60c5\u6570\u636e \u5982\u679c to_frame \u4e3a\u65e5\u7ebf\u6216\u8005\u5206\u949f\u7ea7\u522b\u7ebf\uff0c\u5219 from_frame \u5fc5\u987b\u4e3a\u5206\u949f\u7ebf\uff1b\u5982\u679c to_frame \u4e3a\u5468\u4ee5\u4e0a\u7ea7\u522b\u7ebf\uff0c\u5219 from_frame \u5fc5\u987b\u4e3a\u65e5\u7ebf\u3002\u5176\u5b83\u7ea7\u522b\u4e4b\u95f4\u7684\u8f6c\u6362\u4e0d\u652f\u6301\u3002 \u5982\u679c from_frame \u4e3a1\u5206\u949f\u7ebf\uff0c\u5219\u5fc5\u987b\u4ece9\uff1a31\u8d77\u3002 Parameters: Name Type Description Default bars BarsArray \u884c\u60c5\u6570\u636e required from_frame FrameType \u8f6c\u6362\u524d\u7684FrameType required to_frame FrameType \u8f6c\u6362\u540e\u7684FrameType required Returns: Type Description BarsArray \u8f6c\u6362\u540e\u7684\u884c\u60c5\u6570\u636e Source code in omicron/models/stock.py @classmethod def resample ( cls , bars : BarsArray , from_frame : FrameType , to_frame : FrameType ) -> BarsArray : \"\"\"\u5c06\u539f\u6765\u4e3a`from_frame`\u7684\u884c\u60c5\u6570\u636e\u8f6c\u6362\u4e3a`to_frame`\u7684\u884c\u60c5\u6570\u636e \u5982\u679c`to_frame`\u4e3a\u65e5\u7ebf\u6216\u8005\u5206\u949f\u7ea7\u522b\u7ebf\uff0c\u5219`from_frame`\u5fc5\u987b\u4e3a\u5206\u949f\u7ebf\uff1b\u5982\u679c`to_frame`\u4e3a\u5468\u4ee5\u4e0a\u7ea7\u522b\u7ebf\uff0c\u5219`from_frame`\u5fc5\u987b\u4e3a\u65e5\u7ebf\u3002\u5176\u5b83\u7ea7\u522b\u4e4b\u95f4\u7684\u8f6c\u6362\u4e0d\u652f\u6301\u3002 \u5982\u679c`from_frame`\u4e3a1\u5206\u949f\u7ebf\uff0c\u5219\u5fc5\u987b\u4ece9\uff1a31\u8d77\u3002 Args: bars (BarsArray): \u884c\u60c5\u6570\u636e from_frame (FrameType): \u8f6c\u6362\u524d\u7684FrameType to_frame (FrameType): \u8f6c\u6362\u540e\u7684FrameType Returns: BarsArray: \u8f6c\u6362\u540e\u7684\u884c\u60c5\u6570\u636e \"\"\" if from_frame == FrameType . MIN1 : return cls . _resample_from_min1 ( bars , to_frame ) elif from_frame == FrameType . DAY : # pragma: no cover return cls . _resample_from_day ( bars , to_frame ) else : # pragma: no cover raise TypeError ( f \"unsupported from_frame: { from_frame } \" )","title":"resample()"},{"location":"api/stock/#omicron.models.stock.Stock.reset_cache","text":"\u6e05\u9664\u7f13\u5b58\u7684\u884c\u60c5\u6570\u636e Source code in omicron/models/stock.py @classmethod async def reset_cache ( cls ): \"\"\"\u6e05\u9664\u7f13\u5b58\u7684\u884c\u60c5\u6570\u636e\"\"\" try : for ft in itertools . chain ( tf . minute_level_frames , tf . day_level_frames ): keys = await cache . security . keys ( f \"bars: { ft . value } :*\" ) if keys : await cache . security . delete ( * keys ) finally : cls . _is_cache_empty = True","title":"reset_cache()"},{"location":"api/stock/#omicron.models.stock.Stock.save_trade_price_limits","text":"\u4fdd\u5b58\u6da8\u8dcc\u505c\u4ef7 Parameters: Name Type Description Default price_limits numpy.ndarray[Any, numpy.dtype[dtype([('frame', 'O'), ('code', 'O'), ('high_limit', ' Tuple [ List [ bool ]]: \"\"\"\u83b7\u53d6\u4e2a\u80a1\u5728[start, end]\u4e4b\u95f4\u7684\u6da8\u8dcc\u505c\u6807\u5fd7 !!!Note \u672c\u51fd\u6570\u8fd4\u56de\u7684\u5e8f\u5217\u5728\u80a1\u7968\u6709\u505c\u724c\u7684\u60c5\u51b5\u4e0b\uff0c\u5c06\u4e0d\u80fd\u4e0e[start, end]\u4e00\u4e00\u5bf9\u5e94\u3002 Args: code: \u4e2a\u80a1\u4ee3\u7801 start: \u5f00\u59cb\u65e5\u671f end: \u7ed3\u675f\u65e5\u671f Returns: \u6da8\u8dcc\u505c\u6807\u5fd7\u5217\u8868(buy, sell) \"\"\" cols = [ \"_time\" , \"close\" , \"high_limit\" , \"low_limit\" ] client = get_influx_client () measurement = cls . _measurement_name ( FrameType . DAY ) flux = ( Flux () . bucket ( client . _bucket ) . measurement ( measurement ) . range ( start , end ) . tags ({ \"code\" : code }) . fields ( cols ) . sort ( \"_time\" ) ) dtype = [ ( \"frame\" , \"O\" ), ( \"close\" , \"f4\" ), ( \"high_limit\" , \"f4\" ), ( \"low_limit\" , \"f4\" ), ] ds = NumpyDeserializer ( dtype , use_cols = [ \"_time\" , \"close\" , \"high_limit\" , \"low_limit\" ], converters = { \"_time\" : lambda x : ciso8601 . parse_datetime ( x ) . date ()}, # since we ask parse date in convertors, so we have to disable parse_date parse_date = None , ) result = await client . query ( flux , ds ) if result . size == 0 : return np . array ([], dtype = dtype ) return ( array_price_equal ( result [ \"close\" ], result [ \"high_limit\" ]), array_price_equal ( result [ \"close\" ], result [ \"low_limit\" ]), )","title":"trade_price_limit_flags()"},{"location":"api/stock/#omicron.models.stock.Stock.trade_price_limit_flags_ex","text":"\u83b7\u53d6\u80a1\u7968 code \u5728 [start, end] \u533a\u95f4\u7684\u6da8\u8dcc\u505c\u6807\u5fd7 Note \u5982\u679cend\u4e3a\u5f53\u5929\uff0c\u6ce8\u610f\u5728\u672a\u6536\u76d8\u4e4b\u524d\uff0c\u8fd9\u4e2a\u6da8\u8dcc\u505c\u6807\u5fd7\u90fd\u662f\u4e0d\u7a33\u5b9a\u7684 Parameters: Name Type Description Default code str \u80a1\u7968\u4ee3\u7801 required start date \u8d77\u59cb\u65e5\u671f required end date \u7ed3\u675f\u65e5\u671f required Returns: Type Description Dict[datetime.date, Tuple[bool, bool]] \u4ee5\u65e5\u671f\u4e3akey\uff0c\uff08\u6da8\u505c\uff0c\u8dcc\u505c\uff09\u4e3a\u503c\u7684dict Source code in omicron/models/stock.py @classmethod async def trade_price_limit_flags_ex ( cls , code : str , start : datetime . date , end : datetime . date ) -> Dict [ datetime . date , Tuple [ bool , bool ]]: \"\"\"\u83b7\u53d6\u80a1\u7968`code`\u5728`[start, end]`\u533a\u95f4\u7684\u6da8\u8dcc\u505c\u6807\u5fd7 !!!Note \u5982\u679cend\u4e3a\u5f53\u5929\uff0c\u6ce8\u610f\u5728\u672a\u6536\u76d8\u4e4b\u524d\uff0c\u8fd9\u4e2a\u6da8\u8dcc\u505c\u6807\u5fd7\u90fd\u662f\u4e0d\u7a33\u5b9a\u7684 Args: code: \u80a1\u7968\u4ee3\u7801 start: \u8d77\u59cb\u65e5\u671f end: \u7ed3\u675f\u65e5\u671f Returns: \u4ee5\u65e5\u671f\u4e3akey\uff0c\uff08\u6da8\u505c\uff0c\u8dcc\u505c\uff09\u4e3a\u503c\u7684dict \"\"\" limit_prices = await cls . get_trade_price_limits ( code , start , end ) bars = await Stock . get_bars_in_range ( code , FrameType . DAY , start = start , end = end , fq = False ) close = bars [ \"close\" ] results = {} # aligned = True for i in range ( len ( bars )): if bars [ i ][ \"frame\" ] . item () . date () != limit_prices [ i ][ \"frame\" ]: # aligned = False logger . warning ( \"\u6570\u636e\u540c\u6b65\u9519\u8bef\uff0c\u6da8\u8dcc\u505c\u4ef7\u683c\u4e0e\u6536\u76d8\u4ef7\u65f6\u95f4\u4e0d\u4e00\u81f4: %s , %s \" , code , bars [ i ][ \"frame\" ]) break results [ limit_prices [ i ][ \"frame\" ]] = ( price_equal ( limit_prices [ i ][ \"high_limit\" ], close [ i ]), price_equal ( limit_prices [ i ][ \"low_limit\" ], close [ i ]), ) # if not aligned: # bars = bars[i:] # limit_prices = limit_prices[i:] # for frame in bars[\"frame\"]: # frame = frame.item().date() # close = bars[bars[\"frame\"].item().date() == frame][\"close\"].item() # high = limit_prices[limit_prices[\"frame\"] == frame][\"high_limit\"].item() # low = limit_prices[limit_prices[\"frame\"] == frame][\"low_limit\"].item() # results[frame] = ( # price_equal(high, close), # price_equal(low, close) # ) return results","title":"trade_price_limit_flags_ex()"},{"location":"api/strategy/","text":"base \u00b6 BacktestState dataclass \u00b6 BacktestState(start: Union[datetime.date, datetime.datetime], end: Union[datetime.date, datetime.datetime], barss: Union[NoneType, Dict[str, numpy.ndarray[Any, numpy.dtype[dtype([('frame', ' frame_ ) or ( intra_day and next_frame . date () > frame_ . date () ): await self . after_trade ( frame_ , day_barss ) self . broker . stop_backtest () await self . after_stop () self . bills = self . broker . bills () baseline = kwargs . get ( \"baseline\" , \"399300.XSHE\" ) self . metrics = self . broker . metrics ( baseline = baseline ) self . bs . baseline = baseline @property def cash ( self ): \"\"\"\u8fd4\u56de\u5f53\u524d\u53ef\u7528\u73b0\u91d1\"\"\" return self . broker . available_money def positions ( self , dt : Optional [ datetime . date ] = None ): \"\"\"\u8fd4\u56de\u5f53\u524d\u6301\u4ed3\"\"\" return self . broker . positions ( dt ) def available_shares ( self , sec : str , dt : Optional [ Frame ] = None ): \"\"\"\u8fd4\u56de\u7ed9\u5b9a\u80a1\u7968\u5728`dt`\u65e5\u7684\u53ef\u552e\u80a1\u6570 Args: sec: \u8bc1\u5238\u4ee3\u7801 dt: \u65e5\u671f\uff0c\u5728\u5b9e\u76d8\u4e2d\u65e0\u610f\u4e49\uff0c\u53ea\u80fd\u8fd4\u56de\u6700\u65b0\u6570\u636e\uff1b\u5728\u56de\u6d4b\u65f6\uff0c\u5fc5\u987b\u6307\u5b9a\u65e5\u671f\uff0c\u4e14\u8fd4\u56de\u6307\u5b9a\u65e5\u671f\u4e0b\u7684\u6301\u4ed3\u3002 \"\"\" return self . broker . available_shares ( sec , dt ) async def buy ( self , sec : str , price : Optional [ float ] = None , vol : Optional [ int ] = None , money : Optional [ float ] = None , order_time : Optional [ datetime . datetime ] = None , ) -> Dict : \"\"\"\u4e70\u5165\u80a1\u7968 Args: sec: \u8bc1\u5238\u4ee3\u7801 price: \u59d4\u4e70\u4ef7\u3002\u5982\u679c\u4e3aNone\uff0c\u5219\u81ea\u52a8\u8f6c\u5e02\u4ef7\u4e70\u5165\u3002 vol: \u59d4\u4e70\u80a1\u6570\u3002\u8bf7\u81ea\u884c\u4fdd\u8bc1\u4e3a100\u7684\u6574\u6570\u3002\u5982\u679c\u4e3aNone, \u5219money\u5fc5\u987b\u4f20\u5165\u3002 money: \u59d4\u4e70\u91d1\u989d\u3002\u5982\u679c\u540c\u65f6\u4f20\u5165\u4e86vol\uff0c\u5219\u6b64\u53c2\u6570\u81ea\u52a8\u5ffd\u7565 order_time: \u4ec5\u5728\u56de\u6d4b\u6a21\u5f0f\u4e0b\u9700\u8981\u63d0\u4f9b\u3002\u5b9e\u76d8\u6a21\u5f0f\u4e0b\uff0c\u6b64\u53c2\u6570\u81ea\u52a8\u88ab\u5ffd\u7565 Returns: \u89c1traderclient\u4e2d\u7684`buy`\u65b9\u6cd5\u3002 \"\"\" logger . debug ( \"buy order: %s , %s , %s , %s \" , sec , f \" { price : .2f } \" if price is not None else None , f \" { vol : .0f } \" if vol is not None else None , f \" { money : .0f } \" if money is not None else None , date = order_time , ) if vol is None : if money is None : raise ValueError ( \"parameter `mnoey` must be presented!\" ) return await self . broker . buy_by_money ( sec , money , price , order_time = order_time ) elif price is None : return self . broker . market_buy ( sec , vol , order_time = order_time ) else : return self . broker . buy ( sec , price , vol , order_time = order_time ) async def sell ( self , sec : str , price : Optional [ float ] = None , vol : Optional [ float ] = None , percent : Optional [ float ] = None , order_time : Optional [ datetime . datetime ] = None , ) -> Union [ List , Dict ]: \"\"\"\u5356\u51fa\u80a1\u7968 Args: sec: \u8bc1\u5238\u4ee3\u7801 price: \u59d4\u5356\u4ef7\uff0c\u5982\u679c\u672a\u63d0\u4f9b\uff0c\u5219\u8f6c\u4e3a\u5e02\u4ef7\u5355 vol: \u59d4\u5356\u80a1\u6570\u3002\u5982\u679c\u4e3aNone\uff0c\u5219percent\u5fc5\u987b\u4f20\u5165 percent: \u5356\u51fa\u4e00\u5b9a\u6bd4\u4f8b\u7684\u6301\u4ed3\uff0c\u53d6\u503c\u4ecb\u4e8e0\u4e0e1\u4e4b\u95f4\u3002\u5982\u679c\u4e0evol\u540c\u65f6\u63d0\u4f9b\uff0c\u6b64\u53c2\u6570\u5c06\u88ab\u5ffd\u7565\u3002\u8bf7\u81ea\u884c\u4fdd\u8bc1\u6309\u6bd4\u4f8b\u6362\u7b97\u540e\u7684\u5356\u51fa\u6570\u636e\u662f\u7b26\u5408\u8981\u6c42\u7684\uff08\u6bd4\u5982\u4e0d\u4e3a100\u7684\u500d\u6570\uff0c\u4f46\u6709\u4e9b\u60c5\u51b5\u4e0b\u8fd9\u662f\u5141\u8bb8\u7684\uff0c\u6240\u4ee5\u7a0b\u5e8f\u8fd9\u91cc\u65e0\u6cd5\u5e2e\u4f60\u5224\u65ad\uff09 order_time: \u4ec5\u5728\u56de\u6d4b\u6a21\u5f0f\u4e0b\u9700\u8981\u63d0\u4f9b\u3002\u5b9e\u76d8\u6a21\u5f0f\u4e0b\uff0c\u6b64\u53c2\u6570\u81ea\u52a8\u88ab\u5ffd\u7565 Returns: Union[List, Dict]: \u6210\u4ea4\u8fd4\u56de\uff0c\u8be6\u89c1traderclient\u4e2d\u7684`buy`\u65b9\u6cd5\uff0ctrade server\u53ea\u8fd4\u56de\u4e00\u4e2a\u59d4\u6258\u5355\u4fe1\u606f \"\"\" logger . debug ( \"sell order: %s , %s , %s , %s \" , sec , f \" { price : .2f } \" if price is not None else None , f \" { vol : .0f } \" if vol is not None else None , f \" { percent : .2% } \" if percent is not None else None , date = order_time , ) if vol is None and percent is None : raise ValueError ( \"either vol or percent must be presented\" ) if vol is None : if price is None : price = await self . broker . _get_market_sell_price ( sec , order_time = order_time ) # there's no market_sell_percent API in traderclient return self . broker . sell_percent ( sec , price , percent , order_time = order_time ) # type: ignore else : if price is None : return self . broker . market_sell ( sec , vol , order_time = order_time ) else : return self . broker . sell ( sec , price , vol , order_time = order_time ) async def filter_paused_stock ( self , buylist : List [ str ], dt : datetime . date ): secs = await Security . select ( dt ) . eval () in_trading = jq . get_price ( secs , fields = [ \"paused\" ], start_date = dt , end_date = dt , skip_paused = True )[ \"code\" ] . to_numpy () return np . intersect1d ( buylist , in_trading ) async def before_start ( self ): \"\"\"\u7b56\u7565\u542f\u52a8\u524d\u7684\u51c6\u5907\u5de5\u4f5c\u3002 \u5728\u4e00\u6b21\u56de\u6d4b\u4e2d\uff0c\u5b83\u4f1a\u5728backtest\u4e2d\u3001\u8fdb\u5165\u5faa\u73af\u4e4b\u524d\u8c03\u7528\u3002\u5982\u679c\u7b56\u7565\u9700\u8981\u6839\u636e\u8fc7\u53bb\u7684\u6570\u636e\u6765\u8ba1\u7b97\u4e00\u4e9b\u81ea\u9002\u5e94\u53c2\u6570\uff0c\u53ef\u4ee5\u5728\u6b64\u65b9\u6cd5\u4e2d\u5b9e\u73b0\u3002 \"\"\" if self . bs is not None : logger . info ( \"BEFORE_START: %s < %s - %s >\" , self . name , self . bs . start , self . bs . end , date = self . bs . start , ) else : logger . info ( \"BEFORE_START: %s \" , self . name ) async def before_trade ( self , date : datetime . date , barss : Optional [ Dict [ str , BarsArray ]] = None ): \"\"\"\u6bcf\u65e5\u5f00\u76d8\u524d\u7684\u51c6\u5907\u5de5\u4f5c Args: date: \u65e5\u671f\u3002\u5728\u56de\u6d4b\u4e2d\u4e3a\u56de\u6d4b\u5f53\u65e5\u65e5\u671f\uff0c\u5728\u5b9e\u76d8\u4e2d\u4e3a\u7cfb\u7edf\u65e5\u671f barss: \u5982\u679c\u4e3b\u5468\u671f\u4e3a\u65e5\u7ebf\uff0c\u4e14\u652f\u6301\u9884\u53d6\uff0c\u5219\u4f1a\u5c06\u9884\u53d6\u7684barss\u4f20\u5165 \"\"\" logger . debug ( \"BEFORE_TRADE: %s \" , self . name , date = date ) async def after_trade ( self , date : Frame , barss : Optional [ Dict [ str , BarsArray ]] = None ): \"\"\"\u6bcf\u65e5\u6536\u76d8\u540e\u7684\u6536\u5c3e\u5de5\u4f5c Args: date: \u65e5\u671f\u3002\u5728\u56de\u6d4b\u4e2d\u4e3a\u56de\u6d4b\u5f53\u65e5\u65e5\u671f\uff0c\u5728\u5b9e\u76d8\u4e2d\u4e3a\u7cfb\u7edf\u65e5\u671f barss: \u5982\u679c\u4e3b\u5468\u671f\u4e3a\u65e5\u7ebf\uff0c\u4e14\u652f\u6301\u9884\u53d6\uff0c\u5219\u4f1a\u5c06\u9884\u53d6\u7684barss\u4f20\u5165 \"\"\" logger . debug ( \"AFTER_TRADE: %s \" , self . name , date = date ) async def after_stop ( self ): if self . bs is not None : logger . info ( \"STOP %s < %s - %s >\" , self . name , self . bs . start , self . bs . end , date = self . bs . end , ) else : logger . info ( \"STOP %s \" , self . name ) async def predict ( self , frame : Frame , frame_type : FrameType , i : int , barss : Optional [ Dict [ str , BarsArray ]] = None , ** kwargs , ): \"\"\"\u7b56\u7565\u8bc4\u4f30\u51fd\u6570\u3002\u5728\u6b64\u51fd\u6570\u4e2d\u5b9e\u73b0\u4ea4\u6613\u4fe1\u53f7\u68c0\u6d4b\u548c\u5904\u7406\u3002 Args: frame: \u5f53\u524d\u65f6\u95f4\u5e27 frame_type: \u5904\u7406\u7684\u6570\u636e\u4e3b\u5468\u671f i: \u5f53\u524d\u65f6\u95f4\u79bb\u56de\u6d4b\u8d77\u59cb\u7684\u5355\u4f4d\u6570 barss: \u5982\u679c\u8c03\u7528`backtest`\u65f6\u4f20\u5165\u4e86`portfolio`\u53ca\u53c2\u6570\uff0c\u5219`backtest`\u5c06\u4f1a\u5728\u56de\u6d4b\u4e4b\u524d\uff0c\u9884\u53d6\u4ece[start - warmup_period * frame_type, end]\u95f4\u7684portfolio\u884c\u60c5\u6570\u636e\uff0c\u5e76\u5728\u6bcf\u6b21\u8c03\u7528`predict`\u65b9\u6cd5\u65f6\uff0c\u901a\u8fc7`barss`\u53c2\u6570\uff0c\u5c06[start - warmup_period * frame_type, start + i * frame_type]\u95f4\u7684\u6570\u636e\u4f20\u7ed9`predict`\u65b9\u6cd5\u3002\u4f20\u5165\u7684\u6570\u636e\u5df2\u8fdb\u884c\u524d\u590d\u6743\u3002 Keyword Args: \u5728`backtest`\u65b9\u6cd5\u4e2d\u7684\u4f20\u5165\u7684kwargs\u53c2\u6570\u5c06\u88ab\u900f\u4f20\u5230\u6b64\u65b9\u6cd5\u4e2d\u3002 \"\"\" raise NotImplementedError @deprecated ( \"2.0.0\" , details = \"use `make_report` instead\" ) async def plot_metrics ( self , indicator : Union [ pd . DataFrame , List [ Tuple ], None ] = None ): return await self . make_report ( indicator ) async def make_report ( self , indicator : Union [ pd . DataFrame , List [ Tuple ], None ] = None ): \"\"\"\u7b56\u7565\u56de\u6d4b\u62a5\u544a Args: indicator: \u56de\u6d4b\u65f6\u4f7f\u7528\u7684\u6307\u6807\u3002\u5982\u679c\u5b58\u5728\uff0c\u5c06\u53e0\u52a0\u5230\u7b56\u7565\u56de\u6d4b\u56fe\u4e0a\u3002\u5b83\u5e94\u8be5\u662f\u4e00\u4e2a\u4ee5\u65e5\u671f\u4e3a\u7d22\u5f15\uff0c\u6307\u6807\u5217\u540d\u4e3a\"value\"\u7684DataFrame \"\"\" if self . bills is None or self . metrics is None : raise ValueError ( \"Please run `start_backtest` first.\" ) if isinstance ( indicator , list ): assert len ( indicator [ 0 ]) == 2 indicator = pd . DataFrame ( indicator , columns = [ \"date\" , \"value\" ]) indicator . set_index ( \"date\" , inplace = True ) mg = MetricsGraph ( self . bills , self . metrics , indicator = indicator , baseline_code = self . bs . baseline , ) await mg . plot () cash property readonly \u00b6 \u8fd4\u56de\u5f53\u524d\u53ef\u7528\u73b0\u91d1 __init__ ( self , url , account = None , token = None , name = None , ver = None , is_backtest = True , start = None , end = None , frame_type = None , warmup_period = 0 ) special \u00b6 \u6784\u9020\u51fd\u6570 Parameters: Name Type Description Default url str \u5b9e\u76d8/\u56de\u6d4b\u670d\u52a1\u5668\u7684\u5730\u5740\u3002 required start Union[datetime.date, datetime.datetime] \u56de\u6d4b\u8d77\u59cb\u65e5\u671f\u3002\u56de\u6d4b\u6a21\u5f0f\u4e0b\u5fc5\u987b\u4f20\u5165\u3002 None end Union[datetime.date, datetime.datetime] \u56de\u6d4b\u7ed3\u675f\u65e5\u671f\u3002\u56de\u6d4b\u6a21\u5f0f\u4e0b\u5fc5\u987b\u4f20\u5165\u3002 None account Optional[str] \u5b9e\u76d8/\u56de\u6d4b\u8d26\u53f7\u3002\u5b9e\u76d8\u6a21\u5f0f\u4e0b\u5fc5\u987b\u4f20\u5165\u3002\u5728\u56de\u6d4b\u6a21\u5f0f\u4e0b\uff0c\u5982\u679c\u672a\u4f20\u5165\uff0c\u5c06\u4ee5\u7b56\u7565\u540d+\u968f\u673a\u5b57\u7b26\u6784\u5efa\u8d26\u53f7\u3002 None token Optional[str] \u5b9e\u76d8/\u56de\u6d4b\u65f6\u7528\u7684token\u3002\u5b9e\u76d8\u6a21\u5f0f\u4e0b\u5fc5\u987b\u4f20\u5165\u3002\u5728\u56de\u6d4b\u6a21\u5f0f\u4e0b\uff0c\u5982\u679c\u672a\u4f20\u5165\uff0c\u5c06\u81ea\u52a8\u751f\u6210\u3002 None is_backtest bool \u662f\u5426\u4e3a\u56de\u6d4b\u6a21\u5f0f\u3002 True name Optional[str] \u7b56\u7565\u540d\u3002\u5982\u679c\u4e0d\u4f20\u5165\uff0c\u5219\u4f7f\u7528\u7c7b\u540d\u5b57\u5c0f\u5199 None ver Optional[str] \u7b56\u7565\u7248\u672c\u53f7\u3002\u5982\u679c\u4e0d\u4f20\u5165\uff0c\u5219\u9ed8\u8ba4\u4e3a0.1. None start Union[datetime.date, datetime.datetime] \u5982\u679c\u662f\u56de\u6d4b\u6a21\u5f0f\uff0c\u5219\u9700\u8981\u63d0\u4f9b\u56de\u6d4b\u8d77\u59cb\u65f6\u95f4 None end Union[datetime.date, datetime.datetime] \u5982\u679c\u662f\u56de\u6d4b\u6a21\u5f0f\uff0c\u5219\u9700\u8981\u63d0\u4f9b\u56de\u6d4b\u7ed3\u675f\u65f6\u95f4 None frame_type Optional[coretypes.types.FrameType] \u5982\u679c\u662f\u56de\u6d4b\u6a21\u5f0f\uff0c\u5219\u9700\u8981\u63d0\u4f9b\u56de\u6d4b\u65f6\u4f7f\u7528\u7684\u4e3b\u5468\u671f None warmup_period int \u7b56\u7565\u6267\u884c\u65f6\u9700\u8981\u7684\u6700\u5c0fbar\u6570\uff08\u4ee5frame_type\uff09\u8ba1\u3002 0 Source code in omicron/strategy/base.py def __init__ ( self , url : str , account : Optional [ str ] = None , token : Optional [ str ] = None , name : Optional [ str ] = None , ver : Optional [ str ] = None , is_backtest : bool = True , start : Optional [ Frame ] = None , end : Optional [ Frame ] = None , frame_type : Optional [ FrameType ] = None , warmup_period : int = 0 , ): \"\"\"\u6784\u9020\u51fd\u6570 Args: url: \u5b9e\u76d8/\u56de\u6d4b\u670d\u52a1\u5668\u7684\u5730\u5740\u3002 start: \u56de\u6d4b\u8d77\u59cb\u65e5\u671f\u3002\u56de\u6d4b\u6a21\u5f0f\u4e0b\u5fc5\u987b\u4f20\u5165\u3002 end: \u56de\u6d4b\u7ed3\u675f\u65e5\u671f\u3002\u56de\u6d4b\u6a21\u5f0f\u4e0b\u5fc5\u987b\u4f20\u5165\u3002 account: \u5b9e\u76d8/\u56de\u6d4b\u8d26\u53f7\u3002\u5b9e\u76d8\u6a21\u5f0f\u4e0b\u5fc5\u987b\u4f20\u5165\u3002\u5728\u56de\u6d4b\u6a21\u5f0f\u4e0b\uff0c\u5982\u679c\u672a\u4f20\u5165\uff0c\u5c06\u4ee5\u7b56\u7565\u540d+\u968f\u673a\u5b57\u7b26\u6784\u5efa\u8d26\u53f7\u3002 token: \u5b9e\u76d8/\u56de\u6d4b\u65f6\u7528\u7684token\u3002\u5b9e\u76d8\u6a21\u5f0f\u4e0b\u5fc5\u987b\u4f20\u5165\u3002\u5728\u56de\u6d4b\u6a21\u5f0f\u4e0b\uff0c\u5982\u679c\u672a\u4f20\u5165\uff0c\u5c06\u81ea\u52a8\u751f\u6210\u3002 is_backtest: \u662f\u5426\u4e3a\u56de\u6d4b\u6a21\u5f0f\u3002 name: \u7b56\u7565\u540d\u3002\u5982\u679c\u4e0d\u4f20\u5165\uff0c\u5219\u4f7f\u7528\u7c7b\u540d\u5b57\u5c0f\u5199 ver: \u7b56\u7565\u7248\u672c\u53f7\u3002\u5982\u679c\u4e0d\u4f20\u5165\uff0c\u5219\u9ed8\u8ba4\u4e3a0.1. start: \u5982\u679c\u662f\u56de\u6d4b\u6a21\u5f0f\uff0c\u5219\u9700\u8981\u63d0\u4f9b\u56de\u6d4b\u8d77\u59cb\u65f6\u95f4 end: \u5982\u679c\u662f\u56de\u6d4b\u6a21\u5f0f\uff0c\u5219\u9700\u8981\u63d0\u4f9b\u56de\u6d4b\u7ed3\u675f\u65f6\u95f4 frame_type: \u5982\u679c\u662f\u56de\u6d4b\u6a21\u5f0f\uff0c\u5219\u9700\u8981\u63d0\u4f9b\u56de\u6d4b\u65f6\u4f7f\u7528\u7684\u4e3b\u5468\u671f warmup_period: \u7b56\u7565\u6267\u884c\u65f6\u9700\u8981\u7684\u6700\u5c0fbar\u6570\uff08\u4ee5frame_type\uff09\u8ba1\u3002 \"\"\" self . ver = ver or \"0.1\" self . name = name or self . __class__ . __name__ . lower () + f \"_v { self . ver } \" self . token = token or uuid . uuid4 () . hex self . account = account or f \"smallcap- { self . token [ - 4 :] } \" self . url = url self . bills = None self . metrics = None # used by both live and backtest self . warmup_period = warmup_period self . is_backtest = is_backtest if is_backtest : if start is None or end is None or frame_type is None : raise ValueError ( \"start, end and frame_type must be presented.\" ) self . bs = BacktestState ( start , end , None , 0 , warmup_period ) self . _frame_type = frame_type self . broker = TraderClient ( url , self . account , self . token , is_backtest = True , start = self . bs . start , end = self . bs . end , ) else : if account is None or token is None : raise ValueError ( \"account and token must be presented.\" ) self . broker = TraderClient ( url , self . account , self . token , is_backtest = False ) after_trade ( self , date , barss = None ) async \u00b6 \u6bcf\u65e5\u6536\u76d8\u540e\u7684\u6536\u5c3e\u5de5\u4f5c Parameters: Name Type Description Default date Union[datetime.date, datetime.datetime] \u65e5\u671f\u3002\u5728\u56de\u6d4b\u4e2d\u4e3a\u56de\u6d4b\u5f53\u65e5\u65e5\u671f\uff0c\u5728\u5b9e\u76d8\u4e2d\u4e3a\u7cfb\u7edf\u65e5\u671f required barss Optional[Dict[str, numpy.ndarray[Any, numpy.dtype[dtype([('frame', ' frame_ ) or ( intra_day and next_frame . date () > frame_ . date () ): await self . after_trade ( frame_ , day_barss ) self . broker . stop_backtest () await self . after_stop () self . bills = self . broker . bills () baseline = kwargs . get ( \"baseline\" , \"399300.XSHE\" ) self . metrics = self . broker . metrics ( baseline = baseline ) self . bs . baseline = baseline before_start ( self ) async \u00b6 \u7b56\u7565\u542f\u52a8\u524d\u7684\u51c6\u5907\u5de5\u4f5c\u3002 \u5728\u4e00\u6b21\u56de\u6d4b\u4e2d\uff0c\u5b83\u4f1a\u5728backtest\u4e2d\u3001\u8fdb\u5165\u5faa\u73af\u4e4b\u524d\u8c03\u7528\u3002\u5982\u679c\u7b56\u7565\u9700\u8981\u6839\u636e\u8fc7\u53bb\u7684\u6570\u636e\u6765\u8ba1\u7b97\u4e00\u4e9b\u81ea\u9002\u5e94\u53c2\u6570\uff0c\u53ef\u4ee5\u5728\u6b64\u65b9\u6cd5\u4e2d\u5b9e\u73b0\u3002 Source code in omicron/strategy/base.py async def before_start ( self ): \"\"\"\u7b56\u7565\u542f\u52a8\u524d\u7684\u51c6\u5907\u5de5\u4f5c\u3002 \u5728\u4e00\u6b21\u56de\u6d4b\u4e2d\uff0c\u5b83\u4f1a\u5728backtest\u4e2d\u3001\u8fdb\u5165\u5faa\u73af\u4e4b\u524d\u8c03\u7528\u3002\u5982\u679c\u7b56\u7565\u9700\u8981\u6839\u636e\u8fc7\u53bb\u7684\u6570\u636e\u6765\u8ba1\u7b97\u4e00\u4e9b\u81ea\u9002\u5e94\u53c2\u6570\uff0c\u53ef\u4ee5\u5728\u6b64\u65b9\u6cd5\u4e2d\u5b9e\u73b0\u3002 \"\"\" if self . bs is not None : logger . info ( \"BEFORE_START: %s < %s - %s >\" , self . name , self . bs . start , self . bs . end , date = self . bs . start , ) else : logger . info ( \"BEFORE_START: %s \" , self . name ) before_trade ( self , date , barss = None ) async \u00b6 \u6bcf\u65e5\u5f00\u76d8\u524d\u7684\u51c6\u5907\u5de5\u4f5c Parameters: Name Type Description Default date date \u65e5\u671f\u3002\u5728\u56de\u6d4b\u4e2d\u4e3a\u56de\u6d4b\u5f53\u65e5\u65e5\u671f\uff0c\u5728\u5b9e\u76d8\u4e2d\u4e3a\u7cfb\u7edf\u65e5\u671f required barss Optional[Dict[str, numpy.ndarray[Any, numpy.dtype[dtype([('frame', ' Dict : \"\"\"\u4e70\u5165\u80a1\u7968 Args: sec: \u8bc1\u5238\u4ee3\u7801 price: \u59d4\u4e70\u4ef7\u3002\u5982\u679c\u4e3aNone\uff0c\u5219\u81ea\u52a8\u8f6c\u5e02\u4ef7\u4e70\u5165\u3002 vol: \u59d4\u4e70\u80a1\u6570\u3002\u8bf7\u81ea\u884c\u4fdd\u8bc1\u4e3a100\u7684\u6574\u6570\u3002\u5982\u679c\u4e3aNone, \u5219money\u5fc5\u987b\u4f20\u5165\u3002 money: \u59d4\u4e70\u91d1\u989d\u3002\u5982\u679c\u540c\u65f6\u4f20\u5165\u4e86vol\uff0c\u5219\u6b64\u53c2\u6570\u81ea\u52a8\u5ffd\u7565 order_time: \u4ec5\u5728\u56de\u6d4b\u6a21\u5f0f\u4e0b\u9700\u8981\u63d0\u4f9b\u3002\u5b9e\u76d8\u6a21\u5f0f\u4e0b\uff0c\u6b64\u53c2\u6570\u81ea\u52a8\u88ab\u5ffd\u7565 Returns: \u89c1traderclient\u4e2d\u7684`buy`\u65b9\u6cd5\u3002 \"\"\" logger . debug ( \"buy order: %s , %s , %s , %s \" , sec , f \" { price : .2f } \" if price is not None else None , f \" { vol : .0f } \" if vol is not None else None , f \" { money : .0f } \" if money is not None else None , date = order_time , ) if vol is None : if money is None : raise ValueError ( \"parameter `mnoey` must be presented!\" ) return await self . broker . buy_by_money ( sec , money , price , order_time = order_time ) elif price is None : return self . broker . market_buy ( sec , vol , order_time = order_time ) else : return self . broker . buy ( sec , price , vol , order_time = order_time ) make_report ( self , indicator = None ) async \u00b6 \u7b56\u7565\u56de\u6d4b\u62a5\u544a Parameters: Name Type Description Default indicator Union[pandas.core.frame.DataFrame, List[Tuple]] \u56de\u6d4b\u65f6\u4f7f\u7528\u7684\u6307\u6807\u3002\u5982\u679c\u5b58\u5728\uff0c\u5c06\u53e0\u52a0\u5230\u7b56\u7565\u56de\u6d4b\u56fe\u4e0a\u3002\u5b83\u5e94\u8be5\u662f\u4e00\u4e2a\u4ee5\u65e5\u671f\u4e3a\u7d22\u5f15\uff0c\u6307\u6807\u5217\u540d\u4e3a\"value\"\u7684DataFrame None Source code in omicron/strategy/base.py async def make_report ( self , indicator : Union [ pd . DataFrame , List [ Tuple ], None ] = None ): \"\"\"\u7b56\u7565\u56de\u6d4b\u62a5\u544a Args: indicator: \u56de\u6d4b\u65f6\u4f7f\u7528\u7684\u6307\u6807\u3002\u5982\u679c\u5b58\u5728\uff0c\u5c06\u53e0\u52a0\u5230\u7b56\u7565\u56de\u6d4b\u56fe\u4e0a\u3002\u5b83\u5e94\u8be5\u662f\u4e00\u4e2a\u4ee5\u65e5\u671f\u4e3a\u7d22\u5f15\uff0c\u6307\u6807\u5217\u540d\u4e3a\"value\"\u7684DataFrame \"\"\" if self . bills is None or self . metrics is None : raise ValueError ( \"Please run `start_backtest` first.\" ) if isinstance ( indicator , list ): assert len ( indicator [ 0 ]) == 2 indicator = pd . DataFrame ( indicator , columns = [ \"date\" , \"value\" ]) indicator . set_index ( \"date\" , inplace = True ) mg = MetricsGraph ( self . bills , self . metrics , indicator = indicator , baseline_code = self . bs . baseline , ) await mg . plot () peek ( self , code , n ) async \u00b6 \u5141\u8bb8\u7b56\u7565\u5077\u770b\u672a\u6765\u6570\u636e \u53ef\u7528\u4ee5\u56e0\u5b50\u68c0\u9a8c\u573a\u666f\u3002\u8981\u6c42\u6570\u636e\u672c\u8eab\u5df2\u7f13\u5b58\u3002\u5426\u5219\u8bf7\u7528Stock.get_bars\u7b49\u65b9\u6cd5\u83b7\u53d6\u3002 Source code in omicron/strategy/base.py async def peek ( self , code : str , n : int ): \"\"\"\u5141\u8bb8\u7b56\u7565\u5077\u770b\u672a\u6765\u6570\u636e \u53ef\u7528\u4ee5\u56e0\u5b50\u68c0\u9a8c\u573a\u666f\u3002\u8981\u6c42\u6570\u636e\u672c\u8eab\u5df2\u7f13\u5b58\u3002\u5426\u5219\u8bf7\u7528Stock.get_bars\u7b49\u65b9\u6cd5\u83b7\u53d6\u3002 \"\"\" if self . bs is None or self . bs . barss is None : raise ValueError ( \"data is not cached\" ) if code in self . bs . barss : if self . bs . cursor + n + 1 < len ( self . bs . barss [ code ]): return Stock . qfq ( self . bs . barss [ code ][ self . bs . cursor : self . bs . cursor + n ] ) else : raise ValueError ( \"data is not cached\" ) plot_metrics ( self , indicator = None ) async \u00b6 .. deprecated:: 2.0.0 use make_report instead Source code in omicron/strategy/base.py @deprecated ( \"2.0.0\" , details = \"use `make_report` instead\" ) async def plot_metrics ( self , indicator : Union [ pd . DataFrame , List [ Tuple ], None ] = None ): return await self . make_report ( indicator ) positions ( self , dt = None ) \u00b6 \u8fd4\u56de\u5f53\u524d\u6301\u4ed3 Source code in omicron/strategy/base.py def positions ( self , dt : Optional [ datetime . date ] = None ): \"\"\"\u8fd4\u56de\u5f53\u524d\u6301\u4ed3\"\"\" return self . broker . positions ( dt ) predict ( self , frame , frame_type , i , barss = None , ** kwargs ) async \u00b6 \u7b56\u7565\u8bc4\u4f30\u51fd\u6570\u3002\u5728\u6b64\u51fd\u6570\u4e2d\u5b9e\u73b0\u4ea4\u6613\u4fe1\u53f7\u68c0\u6d4b\u548c\u5904\u7406\u3002 Parameters: Name Type Description Default frame Union[datetime.date, datetime.datetime] \u5f53\u524d\u65f6\u95f4\u5e27 required frame_type FrameType \u5904\u7406\u7684\u6570\u636e\u4e3b\u5468\u671f required i int \u5f53\u524d\u65f6\u95f4\u79bb\u56de\u6d4b\u8d77\u59cb\u7684\u5355\u4f4d\u6570 required barss Optional[Dict[str, numpy.ndarray[Any, numpy.dtype[dtype([('frame', ' Union [ List , Dict ]: \"\"\"\u5356\u51fa\u80a1\u7968 Args: sec: \u8bc1\u5238\u4ee3\u7801 price: \u59d4\u5356\u4ef7\uff0c\u5982\u679c\u672a\u63d0\u4f9b\uff0c\u5219\u8f6c\u4e3a\u5e02\u4ef7\u5355 vol: \u59d4\u5356\u80a1\u6570\u3002\u5982\u679c\u4e3aNone\uff0c\u5219percent\u5fc5\u987b\u4f20\u5165 percent: \u5356\u51fa\u4e00\u5b9a\u6bd4\u4f8b\u7684\u6301\u4ed3\uff0c\u53d6\u503c\u4ecb\u4e8e0\u4e0e1\u4e4b\u95f4\u3002\u5982\u679c\u4e0evol\u540c\u65f6\u63d0\u4f9b\uff0c\u6b64\u53c2\u6570\u5c06\u88ab\u5ffd\u7565\u3002\u8bf7\u81ea\u884c\u4fdd\u8bc1\u6309\u6bd4\u4f8b\u6362\u7b97\u540e\u7684\u5356\u51fa\u6570\u636e\u662f\u7b26\u5408\u8981\u6c42\u7684\uff08\u6bd4\u5982\u4e0d\u4e3a100\u7684\u500d\u6570\uff0c\u4f46\u6709\u4e9b\u60c5\u51b5\u4e0b\u8fd9\u662f\u5141\u8bb8\u7684\uff0c\u6240\u4ee5\u7a0b\u5e8f\u8fd9\u91cc\u65e0\u6cd5\u5e2e\u4f60\u5224\u65ad\uff09 order_time: \u4ec5\u5728\u56de\u6d4b\u6a21\u5f0f\u4e0b\u9700\u8981\u63d0\u4f9b\u3002\u5b9e\u76d8\u6a21\u5f0f\u4e0b\uff0c\u6b64\u53c2\u6570\u81ea\u52a8\u88ab\u5ffd\u7565 Returns: Union[List, Dict]: \u6210\u4ea4\u8fd4\u56de\uff0c\u8be6\u89c1traderclient\u4e2d\u7684`buy`\u65b9\u6cd5\uff0ctrade server\u53ea\u8fd4\u56de\u4e00\u4e2a\u59d4\u6258\u5355\u4fe1\u606f \"\"\" logger . debug ( \"sell order: %s , %s , %s , %s \" , sec , f \" { price : .2f } \" if price is not None else None , f \" { vol : .0f } \" if vol is not None else None , f \" { percent : .2% } \" if percent is not None else None , date = order_time , ) if vol is None and percent is None : raise ValueError ( \"either vol or percent must be presented\" ) if vol is None : if price is None : price = await self . broker . _get_market_sell_price ( sec , order_time = order_time ) # there's no market_sell_percent API in traderclient return self . broker . sell_percent ( sec , price , percent , order_time = order_time ) # type: ignore else : if price is None : return self . broker . market_sell ( sec , vol , order_time = order_time ) else : return self . broker . sell ( sec , price , vol , order_time = order_time ) sma \u00b6 SMAStrategy ( BaseStrategy ) \u00b6 Source code in omicron/strategy/sma.py class SMAStrategy ( BaseStrategy ): def __init__ ( self , sec : str , n_short : int = 5 , n_long : int = 10 , * args , ** kwargs ): self . _sec = sec self . _n_short = n_short self . _n_long = n_long self . indicators = [] super () . __init__ ( * args , ** kwargs ) async def before_start ( self ): date = self . bs . end if self . bs is not None else None logger . info ( \"before_start, cash is %s \" , self . cash , date = date ) async def before_trade ( self , date : datetime . date ): logger . info ( \"before_trade, cash is %s , portfolio is %s \" , self . cash , self . positions ( date ), date = date , ) async def after_trade ( self , date : datetime . date ): logger . info ( \"after_trade, cash is %s , portfolio is %s \" , self . cash , self . positions ( date ), date = date , ) async def after_stop ( self ): date = self . bs . end if self . bs is not None else None logger . info ( \"after_stop, cash is %s , portfolio is %s \" , self . cash , self . positions , date = date , ) async def predict ( self , frame : Frame , frame_type : FrameType , i : int , barss , ** kwargs ): if barss is None : raise ValueError ( \"please specify `prefetch_stocks`\" ) bars : Union [ BarsArray , None ] = barss . get ( self . _sec ) if bars is None : raise ValueError ( f \" { self . _sec } not found in `prefetch_stocks`\" ) ma_short = np . mean ( bars [ \"close\" ][ - self . _n_short :]) ma_long = np . mean ( bars [ \"close\" ][ - self . _n_long :]) if ma_short > ma_long : self . indicators . append (( frame , 1 )) if self . cash >= 100 * bars [ \"close\" ][ - 1 ]: await self . buy ( self . _sec , money = self . cash , order_time = tf . combine_time ( frame , 14 , 55 ), ) elif ma_short < ma_long : self . indicators . append (( frame , - 1 )) if self . available_shares ( self . _sec , frame ) > 0 : await self . sell ( self . _sec , percent = 1.0 , order_time = tf . combine_time ( frame , 14 , 55 ) ) after_trade ( self , date ) async \u00b6 \u6bcf\u65e5\u6536\u76d8\u540e\u7684\u6536\u5c3e\u5de5\u4f5c Parameters: Name Type Description Default date date \u65e5\u671f\u3002\u5728\u56de\u6d4b\u4e2d\u4e3a\u56de\u6d4b\u5f53\u65e5\u65e5\u671f\uff0c\u5728\u5b9e\u76d8\u4e2d\u4e3a\u7cfb\u7edf\u65e5\u671f required barss \u5982\u679c\u4e3b\u5468\u671f\u4e3a\u65e5\u7ebf\uff0c\u4e14\u652f\u6301\u9884\u53d6\uff0c\u5219\u4f1a\u5c06\u9884\u53d6\u7684barss\u4f20\u5165 required Source code in omicron/strategy/sma.py async def after_trade ( self , date : datetime . date ): logger . info ( \"after_trade, cash is %s , portfolio is %s \" , self . cash , self . positions ( date ), date = date , ) before_start ( self ) async \u00b6 \u7b56\u7565\u542f\u52a8\u524d\u7684\u51c6\u5907\u5de5\u4f5c\u3002 \u5728\u4e00\u6b21\u56de\u6d4b\u4e2d\uff0c\u5b83\u4f1a\u5728backtest\u4e2d\u3001\u8fdb\u5165\u5faa\u73af\u4e4b\u524d\u8c03\u7528\u3002\u5982\u679c\u7b56\u7565\u9700\u8981\u6839\u636e\u8fc7\u53bb\u7684\u6570\u636e\u6765\u8ba1\u7b97\u4e00\u4e9b\u81ea\u9002\u5e94\u53c2\u6570\uff0c\u53ef\u4ee5\u5728\u6b64\u65b9\u6cd5\u4e2d\u5b9e\u73b0\u3002 Source code in omicron/strategy/sma.py async def before_start ( self ): date = self . bs . end if self . bs is not None else None logger . info ( \"before_start, cash is %s \" , self . cash , date = date ) before_trade ( self , date ) async \u00b6 \u6bcf\u65e5\u5f00\u76d8\u524d\u7684\u51c6\u5907\u5de5\u4f5c Parameters: Name Type Description Default date date \u65e5\u671f\u3002\u5728\u56de\u6d4b\u4e2d\u4e3a\u56de\u6d4b\u5f53\u65e5\u65e5\u671f\uff0c\u5728\u5b9e\u76d8\u4e2d\u4e3a\u7cfb\u7edf\u65e5\u671f required barss \u5982\u679c\u4e3b\u5468\u671f\u4e3a\u65e5\u7ebf\uff0c\u4e14\u652f\u6301\u9884\u53d6\uff0c\u5219\u4f1a\u5c06\u9884\u53d6\u7684barss\u4f20\u5165 required Source code in omicron/strategy/sma.py async def before_trade ( self , date : datetime . date ): logger . info ( \"before_trade, cash is %s , portfolio is %s \" , self . cash , self . positions ( date ), date = date , ) predict ( self , frame , frame_type , i , barss , ** kwargs ) async \u00b6 \u7b56\u7565\u8bc4\u4f30\u51fd\u6570\u3002\u5728\u6b64\u51fd\u6570\u4e2d\u5b9e\u73b0\u4ea4\u6613\u4fe1\u53f7\u68c0\u6d4b\u548c\u5904\u7406\u3002 Parameters: Name Type Description Default frame Union[datetime.date, datetime.datetime] \u5f53\u524d\u65f6\u95f4\u5e27 required frame_type FrameType \u5904\u7406\u7684\u6570\u636e\u4e3b\u5468\u671f required i int \u5f53\u524d\u65f6\u95f4\u79bb\u56de\u6d4b\u8d77\u59cb\u7684\u5355\u4f4d\u6570 required barss \u5982\u679c\u8c03\u7528 backtest \u65f6\u4f20\u5165\u4e86 portfolio \u53ca\u53c2\u6570\uff0c\u5219 backtest \u5c06\u4f1a\u5728\u56de\u6d4b\u4e4b\u524d\uff0c\u9884\u53d6\u4ece[start - warmup_period * frame_type, end]\u95f4\u7684portfolio\u884c\u60c5\u6570\u636e\uff0c\u5e76\u5728\u6bcf\u6b21\u8c03\u7528 predict \u65b9\u6cd5\u65f6\uff0c\u901a\u8fc7 barss \u53c2\u6570\uff0c\u5c06[start - warmup_period * frame_type, start + i * frame_type]\u95f4\u7684\u6570\u636e\u4f20\u7ed9 predict \u65b9\u6cd5\u3002\u4f20\u5165\u7684\u6570\u636e\u5df2\u8fdb\u884c\u524d\u590d\u6743\u3002 required Keyword Args: \u5728 backtest \u65b9\u6cd5\u4e2d\u7684\u4f20\u5165\u7684kwargs\u53c2\u6570\u5c06\u88ab\u900f\u4f20\u5230\u6b64\u65b9\u6cd5\u4e2d\u3002 Source code in omicron/strategy/sma.py async def predict ( self , frame : Frame , frame_type : FrameType , i : int , barss , ** kwargs ): if barss is None : raise ValueError ( \"please specify `prefetch_stocks`\" ) bars : Union [ BarsArray , None ] = barss . get ( self . _sec ) if bars is None : raise ValueError ( f \" { self . _sec } not found in `prefetch_stocks`\" ) ma_short = np . mean ( bars [ \"close\" ][ - self . _n_short :]) ma_long = np . mean ( bars [ \"close\" ][ - self . _n_long :]) if ma_short > ma_long : self . indicators . append (( frame , 1 )) if self . cash >= 100 * bars [ \"close\" ][ - 1 ]: await self . buy ( self . _sec , money = self . cash , order_time = tf . combine_time ( frame , 14 , 55 ), ) elif ma_short < ma_long : self . indicators . append (( frame , - 1 )) if self . available_shares ( self . _sec , frame ) > 0 : await self . sell ( self . _sec , percent = 1.0 , order_time = tf . combine_time ( frame , 14 , 55 ) )","title":"\u7b56\u7565\u6846\u67b6"},{"location":"api/strategy/#omicron.strategy.base","text":"","title":"base"},{"location":"api/strategy/#omicron.strategy.base.BacktestState","text":"BacktestState(start: Union[datetime.date, datetime.datetime], end: Union[datetime.date, datetime.datetime], barss: Union[NoneType, Dict[str, numpy.ndarray[Any, numpy.dtype[dtype([('frame', ' frame_ ) or ( intra_day and next_frame . date () > frame_ . date () ): await self . after_trade ( frame_ , day_barss ) self . broker . stop_backtest () await self . after_stop () self . bills = self . broker . bills () baseline = kwargs . get ( \"baseline\" , \"399300.XSHE\" ) self . metrics = self . broker . metrics ( baseline = baseline ) self . bs . baseline = baseline @property def cash ( self ): \"\"\"\u8fd4\u56de\u5f53\u524d\u53ef\u7528\u73b0\u91d1\"\"\" return self . broker . available_money def positions ( self , dt : Optional [ datetime . date ] = None ): \"\"\"\u8fd4\u56de\u5f53\u524d\u6301\u4ed3\"\"\" return self . broker . positions ( dt ) def available_shares ( self , sec : str , dt : Optional [ Frame ] = None ): \"\"\"\u8fd4\u56de\u7ed9\u5b9a\u80a1\u7968\u5728`dt`\u65e5\u7684\u53ef\u552e\u80a1\u6570 Args: sec: \u8bc1\u5238\u4ee3\u7801 dt: \u65e5\u671f\uff0c\u5728\u5b9e\u76d8\u4e2d\u65e0\u610f\u4e49\uff0c\u53ea\u80fd\u8fd4\u56de\u6700\u65b0\u6570\u636e\uff1b\u5728\u56de\u6d4b\u65f6\uff0c\u5fc5\u987b\u6307\u5b9a\u65e5\u671f\uff0c\u4e14\u8fd4\u56de\u6307\u5b9a\u65e5\u671f\u4e0b\u7684\u6301\u4ed3\u3002 \"\"\" return self . broker . available_shares ( sec , dt ) async def buy ( self , sec : str , price : Optional [ float ] = None , vol : Optional [ int ] = None , money : Optional [ float ] = None , order_time : Optional [ datetime . datetime ] = None , ) -> Dict : \"\"\"\u4e70\u5165\u80a1\u7968 Args: sec: \u8bc1\u5238\u4ee3\u7801 price: \u59d4\u4e70\u4ef7\u3002\u5982\u679c\u4e3aNone\uff0c\u5219\u81ea\u52a8\u8f6c\u5e02\u4ef7\u4e70\u5165\u3002 vol: \u59d4\u4e70\u80a1\u6570\u3002\u8bf7\u81ea\u884c\u4fdd\u8bc1\u4e3a100\u7684\u6574\u6570\u3002\u5982\u679c\u4e3aNone, \u5219money\u5fc5\u987b\u4f20\u5165\u3002 money: \u59d4\u4e70\u91d1\u989d\u3002\u5982\u679c\u540c\u65f6\u4f20\u5165\u4e86vol\uff0c\u5219\u6b64\u53c2\u6570\u81ea\u52a8\u5ffd\u7565 order_time: \u4ec5\u5728\u56de\u6d4b\u6a21\u5f0f\u4e0b\u9700\u8981\u63d0\u4f9b\u3002\u5b9e\u76d8\u6a21\u5f0f\u4e0b\uff0c\u6b64\u53c2\u6570\u81ea\u52a8\u88ab\u5ffd\u7565 Returns: \u89c1traderclient\u4e2d\u7684`buy`\u65b9\u6cd5\u3002 \"\"\" logger . debug ( \"buy order: %s , %s , %s , %s \" , sec , f \" { price : .2f } \" if price is not None else None , f \" { vol : .0f } \" if vol is not None else None , f \" { money : .0f } \" if money is not None else None , date = order_time , ) if vol is None : if money is None : raise ValueError ( \"parameter `mnoey` must be presented!\" ) return await self . broker . buy_by_money ( sec , money , price , order_time = order_time ) elif price is None : return self . broker . market_buy ( sec , vol , order_time = order_time ) else : return self . broker . buy ( sec , price , vol , order_time = order_time ) async def sell ( self , sec : str , price : Optional [ float ] = None , vol : Optional [ float ] = None , percent : Optional [ float ] = None , order_time : Optional [ datetime . datetime ] = None , ) -> Union [ List , Dict ]: \"\"\"\u5356\u51fa\u80a1\u7968 Args: sec: \u8bc1\u5238\u4ee3\u7801 price: \u59d4\u5356\u4ef7\uff0c\u5982\u679c\u672a\u63d0\u4f9b\uff0c\u5219\u8f6c\u4e3a\u5e02\u4ef7\u5355 vol: \u59d4\u5356\u80a1\u6570\u3002\u5982\u679c\u4e3aNone\uff0c\u5219percent\u5fc5\u987b\u4f20\u5165 percent: \u5356\u51fa\u4e00\u5b9a\u6bd4\u4f8b\u7684\u6301\u4ed3\uff0c\u53d6\u503c\u4ecb\u4e8e0\u4e0e1\u4e4b\u95f4\u3002\u5982\u679c\u4e0evol\u540c\u65f6\u63d0\u4f9b\uff0c\u6b64\u53c2\u6570\u5c06\u88ab\u5ffd\u7565\u3002\u8bf7\u81ea\u884c\u4fdd\u8bc1\u6309\u6bd4\u4f8b\u6362\u7b97\u540e\u7684\u5356\u51fa\u6570\u636e\u662f\u7b26\u5408\u8981\u6c42\u7684\uff08\u6bd4\u5982\u4e0d\u4e3a100\u7684\u500d\u6570\uff0c\u4f46\u6709\u4e9b\u60c5\u51b5\u4e0b\u8fd9\u662f\u5141\u8bb8\u7684\uff0c\u6240\u4ee5\u7a0b\u5e8f\u8fd9\u91cc\u65e0\u6cd5\u5e2e\u4f60\u5224\u65ad\uff09 order_time: \u4ec5\u5728\u56de\u6d4b\u6a21\u5f0f\u4e0b\u9700\u8981\u63d0\u4f9b\u3002\u5b9e\u76d8\u6a21\u5f0f\u4e0b\uff0c\u6b64\u53c2\u6570\u81ea\u52a8\u88ab\u5ffd\u7565 Returns: Union[List, Dict]: \u6210\u4ea4\u8fd4\u56de\uff0c\u8be6\u89c1traderclient\u4e2d\u7684`buy`\u65b9\u6cd5\uff0ctrade server\u53ea\u8fd4\u56de\u4e00\u4e2a\u59d4\u6258\u5355\u4fe1\u606f \"\"\" logger . debug ( \"sell order: %s , %s , %s , %s \" , sec , f \" { price : .2f } \" if price is not None else None , f \" { vol : .0f } \" if vol is not None else None , f \" { percent : .2% } \" if percent is not None else None , date = order_time , ) if vol is None and percent is None : raise ValueError ( \"either vol or percent must be presented\" ) if vol is None : if price is None : price = await self . broker . _get_market_sell_price ( sec , order_time = order_time ) # there's no market_sell_percent API in traderclient return self . broker . sell_percent ( sec , price , percent , order_time = order_time ) # type: ignore else : if price is None : return self . broker . market_sell ( sec , vol , order_time = order_time ) else : return self . broker . sell ( sec , price , vol , order_time = order_time ) async def filter_paused_stock ( self , buylist : List [ str ], dt : datetime . date ): secs = await Security . select ( dt ) . eval () in_trading = jq . get_price ( secs , fields = [ \"paused\" ], start_date = dt , end_date = dt , skip_paused = True )[ \"code\" ] . to_numpy () return np . intersect1d ( buylist , in_trading ) async def before_start ( self ): \"\"\"\u7b56\u7565\u542f\u52a8\u524d\u7684\u51c6\u5907\u5de5\u4f5c\u3002 \u5728\u4e00\u6b21\u56de\u6d4b\u4e2d\uff0c\u5b83\u4f1a\u5728backtest\u4e2d\u3001\u8fdb\u5165\u5faa\u73af\u4e4b\u524d\u8c03\u7528\u3002\u5982\u679c\u7b56\u7565\u9700\u8981\u6839\u636e\u8fc7\u53bb\u7684\u6570\u636e\u6765\u8ba1\u7b97\u4e00\u4e9b\u81ea\u9002\u5e94\u53c2\u6570\uff0c\u53ef\u4ee5\u5728\u6b64\u65b9\u6cd5\u4e2d\u5b9e\u73b0\u3002 \"\"\" if self . bs is not None : logger . info ( \"BEFORE_START: %s < %s - %s >\" , self . name , self . bs . start , self . bs . end , date = self . bs . start , ) else : logger . info ( \"BEFORE_START: %s \" , self . name ) async def before_trade ( self , date : datetime . date , barss : Optional [ Dict [ str , BarsArray ]] = None ): \"\"\"\u6bcf\u65e5\u5f00\u76d8\u524d\u7684\u51c6\u5907\u5de5\u4f5c Args: date: \u65e5\u671f\u3002\u5728\u56de\u6d4b\u4e2d\u4e3a\u56de\u6d4b\u5f53\u65e5\u65e5\u671f\uff0c\u5728\u5b9e\u76d8\u4e2d\u4e3a\u7cfb\u7edf\u65e5\u671f barss: \u5982\u679c\u4e3b\u5468\u671f\u4e3a\u65e5\u7ebf\uff0c\u4e14\u652f\u6301\u9884\u53d6\uff0c\u5219\u4f1a\u5c06\u9884\u53d6\u7684barss\u4f20\u5165 \"\"\" logger . debug ( \"BEFORE_TRADE: %s \" , self . name , date = date ) async def after_trade ( self , date : Frame , barss : Optional [ Dict [ str , BarsArray ]] = None ): \"\"\"\u6bcf\u65e5\u6536\u76d8\u540e\u7684\u6536\u5c3e\u5de5\u4f5c Args: date: \u65e5\u671f\u3002\u5728\u56de\u6d4b\u4e2d\u4e3a\u56de\u6d4b\u5f53\u65e5\u65e5\u671f\uff0c\u5728\u5b9e\u76d8\u4e2d\u4e3a\u7cfb\u7edf\u65e5\u671f barss: \u5982\u679c\u4e3b\u5468\u671f\u4e3a\u65e5\u7ebf\uff0c\u4e14\u652f\u6301\u9884\u53d6\uff0c\u5219\u4f1a\u5c06\u9884\u53d6\u7684barss\u4f20\u5165 \"\"\" logger . debug ( \"AFTER_TRADE: %s \" , self . name , date = date ) async def after_stop ( self ): if self . bs is not None : logger . info ( \"STOP %s < %s - %s >\" , self . name , self . bs . start , self . bs . end , date = self . bs . end , ) else : logger . info ( \"STOP %s \" , self . name ) async def predict ( self , frame : Frame , frame_type : FrameType , i : int , barss : Optional [ Dict [ str , BarsArray ]] = None , ** kwargs , ): \"\"\"\u7b56\u7565\u8bc4\u4f30\u51fd\u6570\u3002\u5728\u6b64\u51fd\u6570\u4e2d\u5b9e\u73b0\u4ea4\u6613\u4fe1\u53f7\u68c0\u6d4b\u548c\u5904\u7406\u3002 Args: frame: \u5f53\u524d\u65f6\u95f4\u5e27 frame_type: \u5904\u7406\u7684\u6570\u636e\u4e3b\u5468\u671f i: \u5f53\u524d\u65f6\u95f4\u79bb\u56de\u6d4b\u8d77\u59cb\u7684\u5355\u4f4d\u6570 barss: \u5982\u679c\u8c03\u7528`backtest`\u65f6\u4f20\u5165\u4e86`portfolio`\u53ca\u53c2\u6570\uff0c\u5219`backtest`\u5c06\u4f1a\u5728\u56de\u6d4b\u4e4b\u524d\uff0c\u9884\u53d6\u4ece[start - warmup_period * frame_type, end]\u95f4\u7684portfolio\u884c\u60c5\u6570\u636e\uff0c\u5e76\u5728\u6bcf\u6b21\u8c03\u7528`predict`\u65b9\u6cd5\u65f6\uff0c\u901a\u8fc7`barss`\u53c2\u6570\uff0c\u5c06[start - warmup_period * frame_type, start + i * frame_type]\u95f4\u7684\u6570\u636e\u4f20\u7ed9`predict`\u65b9\u6cd5\u3002\u4f20\u5165\u7684\u6570\u636e\u5df2\u8fdb\u884c\u524d\u590d\u6743\u3002 Keyword Args: \u5728`backtest`\u65b9\u6cd5\u4e2d\u7684\u4f20\u5165\u7684kwargs\u53c2\u6570\u5c06\u88ab\u900f\u4f20\u5230\u6b64\u65b9\u6cd5\u4e2d\u3002 \"\"\" raise NotImplementedError @deprecated ( \"2.0.0\" , details = \"use `make_report` instead\" ) async def plot_metrics ( self , indicator : Union [ pd . DataFrame , List [ Tuple ], None ] = None ): return await self . make_report ( indicator ) async def make_report ( self , indicator : Union [ pd . DataFrame , List [ Tuple ], None ] = None ): \"\"\"\u7b56\u7565\u56de\u6d4b\u62a5\u544a Args: indicator: \u56de\u6d4b\u65f6\u4f7f\u7528\u7684\u6307\u6807\u3002\u5982\u679c\u5b58\u5728\uff0c\u5c06\u53e0\u52a0\u5230\u7b56\u7565\u56de\u6d4b\u56fe\u4e0a\u3002\u5b83\u5e94\u8be5\u662f\u4e00\u4e2a\u4ee5\u65e5\u671f\u4e3a\u7d22\u5f15\uff0c\u6307\u6807\u5217\u540d\u4e3a\"value\"\u7684DataFrame \"\"\" if self . bills is None or self . metrics is None : raise ValueError ( \"Please run `start_backtest` first.\" ) if isinstance ( indicator , list ): assert len ( indicator [ 0 ]) == 2 indicator = pd . DataFrame ( indicator , columns = [ \"date\" , \"value\" ]) indicator . set_index ( \"date\" , inplace = True ) mg = MetricsGraph ( self . bills , self . metrics , indicator = indicator , baseline_code = self . bs . baseline , ) await mg . plot ()","title":"BaseStrategy"},{"location":"api/strategy/#omicron.strategy.base.BaseStrategy.cash","text":"\u8fd4\u56de\u5f53\u524d\u53ef\u7528\u73b0\u91d1","title":"cash"},{"location":"api/strategy/#omicron.strategy.base.BaseStrategy.__init__","text":"\u6784\u9020\u51fd\u6570 Parameters: Name Type Description Default url str \u5b9e\u76d8/\u56de\u6d4b\u670d\u52a1\u5668\u7684\u5730\u5740\u3002 required start Union[datetime.date, datetime.datetime] \u56de\u6d4b\u8d77\u59cb\u65e5\u671f\u3002\u56de\u6d4b\u6a21\u5f0f\u4e0b\u5fc5\u987b\u4f20\u5165\u3002 None end Union[datetime.date, datetime.datetime] \u56de\u6d4b\u7ed3\u675f\u65e5\u671f\u3002\u56de\u6d4b\u6a21\u5f0f\u4e0b\u5fc5\u987b\u4f20\u5165\u3002 None account Optional[str] \u5b9e\u76d8/\u56de\u6d4b\u8d26\u53f7\u3002\u5b9e\u76d8\u6a21\u5f0f\u4e0b\u5fc5\u987b\u4f20\u5165\u3002\u5728\u56de\u6d4b\u6a21\u5f0f\u4e0b\uff0c\u5982\u679c\u672a\u4f20\u5165\uff0c\u5c06\u4ee5\u7b56\u7565\u540d+\u968f\u673a\u5b57\u7b26\u6784\u5efa\u8d26\u53f7\u3002 None token Optional[str] \u5b9e\u76d8/\u56de\u6d4b\u65f6\u7528\u7684token\u3002\u5b9e\u76d8\u6a21\u5f0f\u4e0b\u5fc5\u987b\u4f20\u5165\u3002\u5728\u56de\u6d4b\u6a21\u5f0f\u4e0b\uff0c\u5982\u679c\u672a\u4f20\u5165\uff0c\u5c06\u81ea\u52a8\u751f\u6210\u3002 None is_backtest bool \u662f\u5426\u4e3a\u56de\u6d4b\u6a21\u5f0f\u3002 True name Optional[str] \u7b56\u7565\u540d\u3002\u5982\u679c\u4e0d\u4f20\u5165\uff0c\u5219\u4f7f\u7528\u7c7b\u540d\u5b57\u5c0f\u5199 None ver Optional[str] \u7b56\u7565\u7248\u672c\u53f7\u3002\u5982\u679c\u4e0d\u4f20\u5165\uff0c\u5219\u9ed8\u8ba4\u4e3a0.1. None start Union[datetime.date, datetime.datetime] \u5982\u679c\u662f\u56de\u6d4b\u6a21\u5f0f\uff0c\u5219\u9700\u8981\u63d0\u4f9b\u56de\u6d4b\u8d77\u59cb\u65f6\u95f4 None end Union[datetime.date, datetime.datetime] \u5982\u679c\u662f\u56de\u6d4b\u6a21\u5f0f\uff0c\u5219\u9700\u8981\u63d0\u4f9b\u56de\u6d4b\u7ed3\u675f\u65f6\u95f4 None frame_type Optional[coretypes.types.FrameType] \u5982\u679c\u662f\u56de\u6d4b\u6a21\u5f0f\uff0c\u5219\u9700\u8981\u63d0\u4f9b\u56de\u6d4b\u65f6\u4f7f\u7528\u7684\u4e3b\u5468\u671f None warmup_period int \u7b56\u7565\u6267\u884c\u65f6\u9700\u8981\u7684\u6700\u5c0fbar\u6570\uff08\u4ee5frame_type\uff09\u8ba1\u3002 0 Source code in omicron/strategy/base.py def __init__ ( self , url : str , account : Optional [ str ] = None , token : Optional [ str ] = None , name : Optional [ str ] = None , ver : Optional [ str ] = None , is_backtest : bool = True , start : Optional [ Frame ] = None , end : Optional [ Frame ] = None , frame_type : Optional [ FrameType ] = None , warmup_period : int = 0 , ): \"\"\"\u6784\u9020\u51fd\u6570 Args: url: \u5b9e\u76d8/\u56de\u6d4b\u670d\u52a1\u5668\u7684\u5730\u5740\u3002 start: \u56de\u6d4b\u8d77\u59cb\u65e5\u671f\u3002\u56de\u6d4b\u6a21\u5f0f\u4e0b\u5fc5\u987b\u4f20\u5165\u3002 end: \u56de\u6d4b\u7ed3\u675f\u65e5\u671f\u3002\u56de\u6d4b\u6a21\u5f0f\u4e0b\u5fc5\u987b\u4f20\u5165\u3002 account: \u5b9e\u76d8/\u56de\u6d4b\u8d26\u53f7\u3002\u5b9e\u76d8\u6a21\u5f0f\u4e0b\u5fc5\u987b\u4f20\u5165\u3002\u5728\u56de\u6d4b\u6a21\u5f0f\u4e0b\uff0c\u5982\u679c\u672a\u4f20\u5165\uff0c\u5c06\u4ee5\u7b56\u7565\u540d+\u968f\u673a\u5b57\u7b26\u6784\u5efa\u8d26\u53f7\u3002 token: \u5b9e\u76d8/\u56de\u6d4b\u65f6\u7528\u7684token\u3002\u5b9e\u76d8\u6a21\u5f0f\u4e0b\u5fc5\u987b\u4f20\u5165\u3002\u5728\u56de\u6d4b\u6a21\u5f0f\u4e0b\uff0c\u5982\u679c\u672a\u4f20\u5165\uff0c\u5c06\u81ea\u52a8\u751f\u6210\u3002 is_backtest: \u662f\u5426\u4e3a\u56de\u6d4b\u6a21\u5f0f\u3002 name: \u7b56\u7565\u540d\u3002\u5982\u679c\u4e0d\u4f20\u5165\uff0c\u5219\u4f7f\u7528\u7c7b\u540d\u5b57\u5c0f\u5199 ver: \u7b56\u7565\u7248\u672c\u53f7\u3002\u5982\u679c\u4e0d\u4f20\u5165\uff0c\u5219\u9ed8\u8ba4\u4e3a0.1. start: \u5982\u679c\u662f\u56de\u6d4b\u6a21\u5f0f\uff0c\u5219\u9700\u8981\u63d0\u4f9b\u56de\u6d4b\u8d77\u59cb\u65f6\u95f4 end: \u5982\u679c\u662f\u56de\u6d4b\u6a21\u5f0f\uff0c\u5219\u9700\u8981\u63d0\u4f9b\u56de\u6d4b\u7ed3\u675f\u65f6\u95f4 frame_type: \u5982\u679c\u662f\u56de\u6d4b\u6a21\u5f0f\uff0c\u5219\u9700\u8981\u63d0\u4f9b\u56de\u6d4b\u65f6\u4f7f\u7528\u7684\u4e3b\u5468\u671f warmup_period: \u7b56\u7565\u6267\u884c\u65f6\u9700\u8981\u7684\u6700\u5c0fbar\u6570\uff08\u4ee5frame_type\uff09\u8ba1\u3002 \"\"\" self . ver = ver or \"0.1\" self . name = name or self . __class__ . __name__ . lower () + f \"_v { self . ver } \" self . token = token or uuid . uuid4 () . hex self . account = account or f \"smallcap- { self . token [ - 4 :] } \" self . url = url self . bills = None self . metrics = None # used by both live and backtest self . warmup_period = warmup_period self . is_backtest = is_backtest if is_backtest : if start is None or end is None or frame_type is None : raise ValueError ( \"start, end and frame_type must be presented.\" ) self . bs = BacktestState ( start , end , None , 0 , warmup_period ) self . _frame_type = frame_type self . broker = TraderClient ( url , self . account , self . token , is_backtest = True , start = self . bs . start , end = self . bs . end , ) else : if account is None or token is None : raise ValueError ( \"account and token must be presented.\" ) self . broker = TraderClient ( url , self . account , self . token , is_backtest = False )","title":"__init__()"},{"location":"api/strategy/#omicron.strategy.base.BaseStrategy.after_trade","text":"\u6bcf\u65e5\u6536\u76d8\u540e\u7684\u6536\u5c3e\u5de5\u4f5c Parameters: Name Type Description Default date Union[datetime.date, datetime.datetime] \u65e5\u671f\u3002\u5728\u56de\u6d4b\u4e2d\u4e3a\u56de\u6d4b\u5f53\u65e5\u65e5\u671f\uff0c\u5728\u5b9e\u76d8\u4e2d\u4e3a\u7cfb\u7edf\u65e5\u671f required barss Optional[Dict[str, numpy.ndarray[Any, numpy.dtype[dtype([('frame', ' frame_ ) or ( intra_day and next_frame . date () > frame_ . date () ): await self . after_trade ( frame_ , day_barss ) self . broker . stop_backtest () await self . after_stop () self . bills = self . broker . bills () baseline = kwargs . get ( \"baseline\" , \"399300.XSHE\" ) self . metrics = self . broker . metrics ( baseline = baseline ) self . bs . baseline = baseline","title":"backtest()"},{"location":"api/strategy/#omicron.strategy.base.BaseStrategy.before_start","text":"\u7b56\u7565\u542f\u52a8\u524d\u7684\u51c6\u5907\u5de5\u4f5c\u3002 \u5728\u4e00\u6b21\u56de\u6d4b\u4e2d\uff0c\u5b83\u4f1a\u5728backtest\u4e2d\u3001\u8fdb\u5165\u5faa\u73af\u4e4b\u524d\u8c03\u7528\u3002\u5982\u679c\u7b56\u7565\u9700\u8981\u6839\u636e\u8fc7\u53bb\u7684\u6570\u636e\u6765\u8ba1\u7b97\u4e00\u4e9b\u81ea\u9002\u5e94\u53c2\u6570\uff0c\u53ef\u4ee5\u5728\u6b64\u65b9\u6cd5\u4e2d\u5b9e\u73b0\u3002 Source code in omicron/strategy/base.py async def before_start ( self ): \"\"\"\u7b56\u7565\u542f\u52a8\u524d\u7684\u51c6\u5907\u5de5\u4f5c\u3002 \u5728\u4e00\u6b21\u56de\u6d4b\u4e2d\uff0c\u5b83\u4f1a\u5728backtest\u4e2d\u3001\u8fdb\u5165\u5faa\u73af\u4e4b\u524d\u8c03\u7528\u3002\u5982\u679c\u7b56\u7565\u9700\u8981\u6839\u636e\u8fc7\u53bb\u7684\u6570\u636e\u6765\u8ba1\u7b97\u4e00\u4e9b\u81ea\u9002\u5e94\u53c2\u6570\uff0c\u53ef\u4ee5\u5728\u6b64\u65b9\u6cd5\u4e2d\u5b9e\u73b0\u3002 \"\"\" if self . bs is not None : logger . info ( \"BEFORE_START: %s < %s - %s >\" , self . name , self . bs . start , self . bs . end , date = self . bs . start , ) else : logger . info ( \"BEFORE_START: %s \" , self . name )","title":"before_start()"},{"location":"api/strategy/#omicron.strategy.base.BaseStrategy.before_trade","text":"\u6bcf\u65e5\u5f00\u76d8\u524d\u7684\u51c6\u5907\u5de5\u4f5c Parameters: Name Type Description Default date date \u65e5\u671f\u3002\u5728\u56de\u6d4b\u4e2d\u4e3a\u56de\u6d4b\u5f53\u65e5\u65e5\u671f\uff0c\u5728\u5b9e\u76d8\u4e2d\u4e3a\u7cfb\u7edf\u65e5\u671f required barss Optional[Dict[str, numpy.ndarray[Any, numpy.dtype[dtype([('frame', ' Dict : \"\"\"\u4e70\u5165\u80a1\u7968 Args: sec: \u8bc1\u5238\u4ee3\u7801 price: \u59d4\u4e70\u4ef7\u3002\u5982\u679c\u4e3aNone\uff0c\u5219\u81ea\u52a8\u8f6c\u5e02\u4ef7\u4e70\u5165\u3002 vol: \u59d4\u4e70\u80a1\u6570\u3002\u8bf7\u81ea\u884c\u4fdd\u8bc1\u4e3a100\u7684\u6574\u6570\u3002\u5982\u679c\u4e3aNone, \u5219money\u5fc5\u987b\u4f20\u5165\u3002 money: \u59d4\u4e70\u91d1\u989d\u3002\u5982\u679c\u540c\u65f6\u4f20\u5165\u4e86vol\uff0c\u5219\u6b64\u53c2\u6570\u81ea\u52a8\u5ffd\u7565 order_time: \u4ec5\u5728\u56de\u6d4b\u6a21\u5f0f\u4e0b\u9700\u8981\u63d0\u4f9b\u3002\u5b9e\u76d8\u6a21\u5f0f\u4e0b\uff0c\u6b64\u53c2\u6570\u81ea\u52a8\u88ab\u5ffd\u7565 Returns: \u89c1traderclient\u4e2d\u7684`buy`\u65b9\u6cd5\u3002 \"\"\" logger . debug ( \"buy order: %s , %s , %s , %s \" , sec , f \" { price : .2f } \" if price is not None else None , f \" { vol : .0f } \" if vol is not None else None , f \" { money : .0f } \" if money is not None else None , date = order_time , ) if vol is None : if money is None : raise ValueError ( \"parameter `mnoey` must be presented!\" ) return await self . broker . buy_by_money ( sec , money , price , order_time = order_time ) elif price is None : return self . broker . market_buy ( sec , vol , order_time = order_time ) else : return self . broker . buy ( sec , price , vol , order_time = order_time )","title":"buy()"},{"location":"api/strategy/#omicron.strategy.base.BaseStrategy.make_report","text":"\u7b56\u7565\u56de\u6d4b\u62a5\u544a Parameters: Name Type Description Default indicator Union[pandas.core.frame.DataFrame, List[Tuple]] \u56de\u6d4b\u65f6\u4f7f\u7528\u7684\u6307\u6807\u3002\u5982\u679c\u5b58\u5728\uff0c\u5c06\u53e0\u52a0\u5230\u7b56\u7565\u56de\u6d4b\u56fe\u4e0a\u3002\u5b83\u5e94\u8be5\u662f\u4e00\u4e2a\u4ee5\u65e5\u671f\u4e3a\u7d22\u5f15\uff0c\u6307\u6807\u5217\u540d\u4e3a\"value\"\u7684DataFrame None Source code in omicron/strategy/base.py async def make_report ( self , indicator : Union [ pd . DataFrame , List [ Tuple ], None ] = None ): \"\"\"\u7b56\u7565\u56de\u6d4b\u62a5\u544a Args: indicator: \u56de\u6d4b\u65f6\u4f7f\u7528\u7684\u6307\u6807\u3002\u5982\u679c\u5b58\u5728\uff0c\u5c06\u53e0\u52a0\u5230\u7b56\u7565\u56de\u6d4b\u56fe\u4e0a\u3002\u5b83\u5e94\u8be5\u662f\u4e00\u4e2a\u4ee5\u65e5\u671f\u4e3a\u7d22\u5f15\uff0c\u6307\u6807\u5217\u540d\u4e3a\"value\"\u7684DataFrame \"\"\" if self . bills is None or self . metrics is None : raise ValueError ( \"Please run `start_backtest` first.\" ) if isinstance ( indicator , list ): assert len ( indicator [ 0 ]) == 2 indicator = pd . DataFrame ( indicator , columns = [ \"date\" , \"value\" ]) indicator . set_index ( \"date\" , inplace = True ) mg = MetricsGraph ( self . bills , self . metrics , indicator = indicator , baseline_code = self . bs . baseline , ) await mg . plot ()","title":"make_report()"},{"location":"api/strategy/#omicron.strategy.base.BaseStrategy.peek","text":"\u5141\u8bb8\u7b56\u7565\u5077\u770b\u672a\u6765\u6570\u636e \u53ef\u7528\u4ee5\u56e0\u5b50\u68c0\u9a8c\u573a\u666f\u3002\u8981\u6c42\u6570\u636e\u672c\u8eab\u5df2\u7f13\u5b58\u3002\u5426\u5219\u8bf7\u7528Stock.get_bars\u7b49\u65b9\u6cd5\u83b7\u53d6\u3002 Source code in omicron/strategy/base.py async def peek ( self , code : str , n : int ): \"\"\"\u5141\u8bb8\u7b56\u7565\u5077\u770b\u672a\u6765\u6570\u636e \u53ef\u7528\u4ee5\u56e0\u5b50\u68c0\u9a8c\u573a\u666f\u3002\u8981\u6c42\u6570\u636e\u672c\u8eab\u5df2\u7f13\u5b58\u3002\u5426\u5219\u8bf7\u7528Stock.get_bars\u7b49\u65b9\u6cd5\u83b7\u53d6\u3002 \"\"\" if self . bs is None or self . bs . barss is None : raise ValueError ( \"data is not cached\" ) if code in self . bs . barss : if self . bs . cursor + n + 1 < len ( self . bs . barss [ code ]): return Stock . qfq ( self . bs . barss [ code ][ self . bs . cursor : self . bs . cursor + n ] ) else : raise ValueError ( \"data is not cached\" )","title":"peek()"},{"location":"api/strategy/#omicron.strategy.base.BaseStrategy.plot_metrics","text":".. deprecated:: 2.0.0 use make_report instead Source code in omicron/strategy/base.py @deprecated ( \"2.0.0\" , details = \"use `make_report` instead\" ) async def plot_metrics ( self , indicator : Union [ pd . DataFrame , List [ Tuple ], None ] = None ): return await self . make_report ( indicator )","title":"plot_metrics()"},{"location":"api/strategy/#omicron.strategy.base.BaseStrategy.positions","text":"\u8fd4\u56de\u5f53\u524d\u6301\u4ed3 Source code in omicron/strategy/base.py def positions ( self , dt : Optional [ datetime . date ] = None ): \"\"\"\u8fd4\u56de\u5f53\u524d\u6301\u4ed3\"\"\" return self . broker . positions ( dt )","title":"positions()"},{"location":"api/strategy/#omicron.strategy.base.BaseStrategy.predict","text":"\u7b56\u7565\u8bc4\u4f30\u51fd\u6570\u3002\u5728\u6b64\u51fd\u6570\u4e2d\u5b9e\u73b0\u4ea4\u6613\u4fe1\u53f7\u68c0\u6d4b\u548c\u5904\u7406\u3002 Parameters: Name Type Description Default frame Union[datetime.date, datetime.datetime] \u5f53\u524d\u65f6\u95f4\u5e27 required frame_type FrameType \u5904\u7406\u7684\u6570\u636e\u4e3b\u5468\u671f required i int \u5f53\u524d\u65f6\u95f4\u79bb\u56de\u6d4b\u8d77\u59cb\u7684\u5355\u4f4d\u6570 required barss Optional[Dict[str, numpy.ndarray[Any, numpy.dtype[dtype([('frame', ' Union [ List , Dict ]: \"\"\"\u5356\u51fa\u80a1\u7968 Args: sec: \u8bc1\u5238\u4ee3\u7801 price: \u59d4\u5356\u4ef7\uff0c\u5982\u679c\u672a\u63d0\u4f9b\uff0c\u5219\u8f6c\u4e3a\u5e02\u4ef7\u5355 vol: \u59d4\u5356\u80a1\u6570\u3002\u5982\u679c\u4e3aNone\uff0c\u5219percent\u5fc5\u987b\u4f20\u5165 percent: \u5356\u51fa\u4e00\u5b9a\u6bd4\u4f8b\u7684\u6301\u4ed3\uff0c\u53d6\u503c\u4ecb\u4e8e0\u4e0e1\u4e4b\u95f4\u3002\u5982\u679c\u4e0evol\u540c\u65f6\u63d0\u4f9b\uff0c\u6b64\u53c2\u6570\u5c06\u88ab\u5ffd\u7565\u3002\u8bf7\u81ea\u884c\u4fdd\u8bc1\u6309\u6bd4\u4f8b\u6362\u7b97\u540e\u7684\u5356\u51fa\u6570\u636e\u662f\u7b26\u5408\u8981\u6c42\u7684\uff08\u6bd4\u5982\u4e0d\u4e3a100\u7684\u500d\u6570\uff0c\u4f46\u6709\u4e9b\u60c5\u51b5\u4e0b\u8fd9\u662f\u5141\u8bb8\u7684\uff0c\u6240\u4ee5\u7a0b\u5e8f\u8fd9\u91cc\u65e0\u6cd5\u5e2e\u4f60\u5224\u65ad\uff09 order_time: \u4ec5\u5728\u56de\u6d4b\u6a21\u5f0f\u4e0b\u9700\u8981\u63d0\u4f9b\u3002\u5b9e\u76d8\u6a21\u5f0f\u4e0b\uff0c\u6b64\u53c2\u6570\u81ea\u52a8\u88ab\u5ffd\u7565 Returns: Union[List, Dict]: \u6210\u4ea4\u8fd4\u56de\uff0c\u8be6\u89c1traderclient\u4e2d\u7684`buy`\u65b9\u6cd5\uff0ctrade server\u53ea\u8fd4\u56de\u4e00\u4e2a\u59d4\u6258\u5355\u4fe1\u606f \"\"\" logger . debug ( \"sell order: %s , %s , %s , %s \" , sec , f \" { price : .2f } \" if price is not None else None , f \" { vol : .0f } \" if vol is not None else None , f \" { percent : .2% } \" if percent is not None else None , date = order_time , ) if vol is None and percent is None : raise ValueError ( \"either vol or percent must be presented\" ) if vol is None : if price is None : price = await self . broker . _get_market_sell_price ( sec , order_time = order_time ) # there's no market_sell_percent API in traderclient return self . broker . sell_percent ( sec , price , percent , order_time = order_time ) # type: ignore else : if price is None : return self . broker . market_sell ( sec , vol , order_time = order_time ) else : return self . broker . sell ( sec , price , vol , order_time = order_time )","title":"sell()"},{"location":"api/strategy/#omicron.strategy.sma","text":"","title":"sma"},{"location":"api/strategy/#omicron.strategy.sma.SMAStrategy","text":"Source code in omicron/strategy/sma.py class SMAStrategy ( BaseStrategy ): def __init__ ( self , sec : str , n_short : int = 5 , n_long : int = 10 , * args , ** kwargs ): self . _sec = sec self . _n_short = n_short self . _n_long = n_long self . indicators = [] super () . __init__ ( * args , ** kwargs ) async def before_start ( self ): date = self . bs . end if self . bs is not None else None logger . info ( \"before_start, cash is %s \" , self . cash , date = date ) async def before_trade ( self , date : datetime . date ): logger . info ( \"before_trade, cash is %s , portfolio is %s \" , self . cash , self . positions ( date ), date = date , ) async def after_trade ( self , date : datetime . date ): logger . info ( \"after_trade, cash is %s , portfolio is %s \" , self . cash , self . positions ( date ), date = date , ) async def after_stop ( self ): date = self . bs . end if self . bs is not None else None logger . info ( \"after_stop, cash is %s , portfolio is %s \" , self . cash , self . positions , date = date , ) async def predict ( self , frame : Frame , frame_type : FrameType , i : int , barss , ** kwargs ): if barss is None : raise ValueError ( \"please specify `prefetch_stocks`\" ) bars : Union [ BarsArray , None ] = barss . get ( self . _sec ) if bars is None : raise ValueError ( f \" { self . _sec } not found in `prefetch_stocks`\" ) ma_short = np . mean ( bars [ \"close\" ][ - self . _n_short :]) ma_long = np . mean ( bars [ \"close\" ][ - self . _n_long :]) if ma_short > ma_long : self . indicators . append (( frame , 1 )) if self . cash >= 100 * bars [ \"close\" ][ - 1 ]: await self . buy ( self . _sec , money = self . cash , order_time = tf . combine_time ( frame , 14 , 55 ), ) elif ma_short < ma_long : self . indicators . append (( frame , - 1 )) if self . available_shares ( self . _sec , frame ) > 0 : await self . sell ( self . _sec , percent = 1.0 , order_time = tf . combine_time ( frame , 14 , 55 ) )","title":"SMAStrategy"},{"location":"api/strategy/#omicron.strategy.sma.SMAStrategy.after_trade","text":"\u6bcf\u65e5\u6536\u76d8\u540e\u7684\u6536\u5c3e\u5de5\u4f5c Parameters: Name Type Description Default date date \u65e5\u671f\u3002\u5728\u56de\u6d4b\u4e2d\u4e3a\u56de\u6d4b\u5f53\u65e5\u65e5\u671f\uff0c\u5728\u5b9e\u76d8\u4e2d\u4e3a\u7cfb\u7edf\u65e5\u671f required barss \u5982\u679c\u4e3b\u5468\u671f\u4e3a\u65e5\u7ebf\uff0c\u4e14\u652f\u6301\u9884\u53d6\uff0c\u5219\u4f1a\u5c06\u9884\u53d6\u7684barss\u4f20\u5165 required Source code in omicron/strategy/sma.py async def after_trade ( self , date : datetime . date ): logger . info ( \"after_trade, cash is %s , portfolio is %s \" , self . cash , self . positions ( date ), date = date , )","title":"after_trade()"},{"location":"api/strategy/#omicron.strategy.sma.SMAStrategy.before_start","text":"\u7b56\u7565\u542f\u52a8\u524d\u7684\u51c6\u5907\u5de5\u4f5c\u3002 \u5728\u4e00\u6b21\u56de\u6d4b\u4e2d\uff0c\u5b83\u4f1a\u5728backtest\u4e2d\u3001\u8fdb\u5165\u5faa\u73af\u4e4b\u524d\u8c03\u7528\u3002\u5982\u679c\u7b56\u7565\u9700\u8981\u6839\u636e\u8fc7\u53bb\u7684\u6570\u636e\u6765\u8ba1\u7b97\u4e00\u4e9b\u81ea\u9002\u5e94\u53c2\u6570\uff0c\u53ef\u4ee5\u5728\u6b64\u65b9\u6cd5\u4e2d\u5b9e\u73b0\u3002 Source code in omicron/strategy/sma.py async def before_start ( self ): date = self . bs . end if self . bs is not None else None logger . info ( \"before_start, cash is %s \" , self . cash , date = date )","title":"before_start()"},{"location":"api/strategy/#omicron.strategy.sma.SMAStrategy.before_trade","text":"\u6bcf\u65e5\u5f00\u76d8\u524d\u7684\u51c6\u5907\u5de5\u4f5c Parameters: Name Type Description Default date date \u65e5\u671f\u3002\u5728\u56de\u6d4b\u4e2d\u4e3a\u56de\u6d4b\u5f53\u65e5\u65e5\u671f\uff0c\u5728\u5b9e\u76d8\u4e2d\u4e3a\u7cfb\u7edf\u65e5\u671f required barss \u5982\u679c\u4e3b\u5468\u671f\u4e3a\u65e5\u7ebf\uff0c\u4e14\u652f\u6301\u9884\u53d6\uff0c\u5219\u4f1a\u5c06\u9884\u53d6\u7684barss\u4f20\u5165 required Source code in omicron/strategy/sma.py async def before_trade ( self , date : datetime . date ): logger . info ( \"before_trade, cash is %s , portfolio is %s \" , self . cash , self . positions ( date ), date = date , )","title":"before_trade()"},{"location":"api/strategy/#omicron.strategy.sma.SMAStrategy.predict","text":"\u7b56\u7565\u8bc4\u4f30\u51fd\u6570\u3002\u5728\u6b64\u51fd\u6570\u4e2d\u5b9e\u73b0\u4ea4\u6613\u4fe1\u53f7\u68c0\u6d4b\u548c\u5904\u7406\u3002 Parameters: Name Type Description Default frame Union[datetime.date, datetime.datetime] \u5f53\u524d\u65f6\u95f4\u5e27 required frame_type FrameType \u5904\u7406\u7684\u6570\u636e\u4e3b\u5468\u671f required i int \u5f53\u524d\u65f6\u95f4\u79bb\u56de\u6d4b\u8d77\u59cb\u7684\u5355\u4f4d\u6570 required barss \u5982\u679c\u8c03\u7528 backtest \u65f6\u4f20\u5165\u4e86 portfolio \u53ca\u53c2\u6570\uff0c\u5219 backtest \u5c06\u4f1a\u5728\u56de\u6d4b\u4e4b\u524d\uff0c\u9884\u53d6\u4ece[start - warmup_period * frame_type, end]\u95f4\u7684portfolio\u884c\u60c5\u6570\u636e\uff0c\u5e76\u5728\u6bcf\u6b21\u8c03\u7528 predict \u65b9\u6cd5\u65f6\uff0c\u901a\u8fc7 barss \u53c2\u6570\uff0c\u5c06[start - warmup_period * frame_type, start + i * frame_type]\u95f4\u7684\u6570\u636e\u4f20\u7ed9 predict \u65b9\u6cd5\u3002\u4f20\u5165\u7684\u6570\u636e\u5df2\u8fdb\u884c\u524d\u590d\u6743\u3002 required Keyword Args: \u5728 backtest \u65b9\u6cd5\u4e2d\u7684\u4f20\u5165\u7684kwargs\u53c2\u6570\u5c06\u88ab\u900f\u4f20\u5230\u6b64\u65b9\u6cd5\u4e2d\u3002 Source code in omicron/strategy/sma.py async def predict ( self , frame : Frame , frame_type : FrameType , i : int , barss , ** kwargs ): if barss is None : raise ValueError ( \"please specify `prefetch_stocks`\" ) bars : Union [ BarsArray , None ] = barss . get ( self . _sec ) if bars is None : raise ValueError ( f \" { self . _sec } not found in `prefetch_stocks`\" ) ma_short = np . mean ( bars [ \"close\" ][ - self . _n_short :]) ma_long = np . mean ( bars [ \"close\" ][ - self . _n_long :]) if ma_short > ma_long : self . indicators . append (( frame , 1 )) if self . cash >= 100 * bars [ \"close\" ][ - 1 ]: await self . buy ( self . _sec , money = self . cash , order_time = tf . combine_time ( frame , 14 , 55 ), ) elif ma_short < ma_long : self . indicators . append (( frame , - 1 )) if self . available_shares ( self . _sec , frame ) > 0 : await self . sell ( self . _sec , percent = 1.0 , order_time = tf . combine_time ( frame , 14 , 55 ) )","title":"predict()"},{"location":"api/talib/","text":"core \u00b6 angle ( ts , threshold = 0.01 , loss_func = 're' ) \u00b6 \u6c42\u65f6\u95f4\u5e8f\u5217 ts \u62df\u5408\u76f4\u7ebf\u76f8\u5bf9\u4e8e x \u8f74\u7684\u5939\u89d2\u7684\u4f59\u5f26\u503c \u672c\u51fd\u6570\u53ef\u4ee5\u7528\u6765\u5224\u65ad\u65f6\u95f4\u5e8f\u5217\u7684\u589e\u957f\u8d8b\u52bf\u3002\u5f53 angle \u5904\u4e8e[-1, 0]\u65f6\uff0c\u8d8a\u9760\u8fd10\uff0c\u4e0b\u964d\u8d8a\u5feb\uff1b\u5f53 angle \u5904\u4e8e[0, 1]\u65f6\uff0c\u8d8a\u63a5\u8fd10\uff0c\u4e0a\u5347\u8d8a\u5feb\u3002 \u5982\u679c ts \u65e0\u6cd5\u5f88\u597d\u5730\u62df\u5408\u4e3a\u76f4\u7ebf\uff0c\u5219\u8fd4\u56de[float, None] Examples: >>> ts = np . array ([ i for i in range ( 5 )]) >>> round ( angle ( ts )[ 1 ], 3 ) # degree: 45, rad: pi/2 0.707 >>> ts = np . array ([ np . sqrt ( 3 ) / 3 * i for i in range ( 10 )]) >>> round ( angle ( ts )[ 1 ], 3 ) # degree: 30, rad: pi/6 0.866 >>> ts = np . array ([ - np . sqrt ( 3 ) / 3 * i for i in range ( 7 )]) >>> round ( angle ( ts )[ 1 ], 3 ) # degree: 150, rad: 5*pi/6 - 0.866 Parameters: Name Type Description Default ts required Returns: Type Description Tuple[float, float] \u8fd4\u56de (error, consine(theta))\uff0c\u5373\u62df\u5408\u8bef\u5dee\u548c\u5939\u89d2\u4f59\u5f26\u503c\u3002 Source code in omicron/talib/core.py def angle ( ts , threshold = 0.01 , loss_func = \"re\" ) -> Tuple [ float , float ]: \"\"\"\u6c42\u65f6\u95f4\u5e8f\u5217`ts`\u62df\u5408\u76f4\u7ebf\u76f8\u5bf9\u4e8e`x`\u8f74\u7684\u5939\u89d2\u7684\u4f59\u5f26\u503c \u672c\u51fd\u6570\u53ef\u4ee5\u7528\u6765\u5224\u65ad\u65f6\u95f4\u5e8f\u5217\u7684\u589e\u957f\u8d8b\u52bf\u3002\u5f53`angle`\u5904\u4e8e[-1, 0]\u65f6\uff0c\u8d8a\u9760\u8fd10\uff0c\u4e0b\u964d\u8d8a\u5feb\uff1b\u5f53`angle` \u5904\u4e8e[0, 1]\u65f6\uff0c\u8d8a\u63a5\u8fd10\uff0c\u4e0a\u5347\u8d8a\u5feb\u3002 \u5982\u679c`ts`\u65e0\u6cd5\u5f88\u597d\u5730\u62df\u5408\u4e3a\u76f4\u7ebf\uff0c\u5219\u8fd4\u56de[float, None] Examples: >>> ts = np.array([ i for i in range(5)]) >>> round(angle(ts)[1], 3) # degree: 45, rad: pi/2 0.707 >>> ts = np.array([ np.sqrt(3) / 3 * i for i in range(10)]) >>> round(angle(ts)[1],3) # degree: 30, rad: pi/6 0.866 >>> ts = np.array([ -np.sqrt(3) / 3 * i for i in range(7)]) >>> round(angle(ts)[1], 3) # degree: 150, rad: 5*pi/6 -0.866 Args: ts: Returns: \u8fd4\u56de (error, consine(theta))\uff0c\u5373\u62df\u5408\u8bef\u5dee\u548c\u5939\u89d2\u4f59\u5f26\u503c\u3002 \"\"\" err , ( a , b ) = polyfit ( ts , deg = 1 , loss_func = loss_func ) if err > threshold : return ( err , None ) v = np . array ([ 1 , a + b ]) vx = np . array ([ 1 , 0 ]) return err , copysign ( np . dot ( v , vx ) / ( norm ( v ) * norm ( vx )), a ) clustering ( numbers , n ) \u00b6 \u5c06\u6570\u7ec4 numbers \u5212\u5206\u4e3a n \u4e2a\u7c07 \u8fd4\u56de\u503c\u4e3a\u4e00\u4e2aList, \u6bcf\u4e00\u4e2a\u5143\u7d20\u4e3a\u4e00\u4e2a\u5217\u8868\uff0c\u5206\u522b\u4e3a\u7c07\u7684\u8d77\u59cb\u70b9\u548c\u957f\u5ea6\u3002 Examples: >>> numbers = np . array ([ 1 , 1 , 1 , 2 , 4 , 6 , 8 , 7 , 4 , 5 , 6 ]) >>> clustering ( numbers , 2 ) [( 0 , 4 ), ( 4 , 7 )] Returns: Type Description List[Tuple[int, int]] \u5212\u5206\u540e\u7684\u7c07\u5217\u8868\u3002 Source code in omicron/talib/core.py def clustering ( numbers : np . ndarray , n : int ) -> List [ Tuple [ int , int ]]: \"\"\"\u5c06\u6570\u7ec4`numbers`\u5212\u5206\u4e3a`n`\u4e2a\u7c07 \u8fd4\u56de\u503c\u4e3a\u4e00\u4e2aList, \u6bcf\u4e00\u4e2a\u5143\u7d20\u4e3a\u4e00\u4e2a\u5217\u8868\uff0c\u5206\u522b\u4e3a\u7c07\u7684\u8d77\u59cb\u70b9\u548c\u957f\u5ea6\u3002 Examples: >>> numbers = np.array([1,1,1,2,4,6,8,7,4,5,6]) >>> clustering(numbers, 2) [(0, 4), (4, 7)] Returns: \u5212\u5206\u540e\u7684\u7c07\u5217\u8868\u3002 \"\"\" result = ckwrap . cksegs ( numbers , n ) clusters = [] for pos , size in zip ( result . centers , result . sizes ): clusters . append (( int ( pos - size // 2 - 1 ), int ( size ))) return clusters exp_moving_average ( values , window ) \u00b6 Numpy implementation of EMA Source code in omicron/talib/core.py def exp_moving_average ( values , window ): \"\"\"Numpy implementation of EMA\"\"\" weights = np . exp ( np . linspace ( - 1.0 , 0.0 , window )) weights /= weights . sum () a = np . convolve ( values , weights , mode = \"full\" )[: len ( values )] a [: window ] = a [ window ] return a mean_absolute_error ( y , y_hat ) \u00b6 \u8fd4\u56de\u9884\u6d4b\u5e8f\u5217\u76f8\u5bf9\u4e8e\u771f\u503c\u5e8f\u5217\u7684\u5e73\u5747\u7edd\u5bf9\u503c\u5dee \u4e24\u4e2a\u5e8f\u5217\u5e94\u8be5\u5177\u6709\u76f8\u540c\u7684\u957f\u5ea6\u3002\u5982\u679c\u5b58\u5728nan\uff0c\u5219nan\u7684\u503c\u4e0d\u8ba1\u5165\u5e73\u5747\u503c\u3002 Examples: >>> y = np . arange ( 5 ) >>> y_hat = np . arange ( 5 ) >>> y_hat [ 4 ] = 0 >>> mean_absolute_error ( y , y ) 0.0 >>> mean_absolute_error ( y , y_hat ) 0.8 Parameters: Name Type Description Default y np.array \u771f\u503c\u5e8f\u5217 required y_hat \u6bd4\u8f83\u5e8f\u5217 required Returns: Type Description float \u5e73\u5747\u7edd\u5bf9\u503c\u5dee Source code in omicron/talib/core.py def mean_absolute_error ( y : np . array , y_hat : np . array ) -> float : \"\"\"\u8fd4\u56de\u9884\u6d4b\u5e8f\u5217\u76f8\u5bf9\u4e8e\u771f\u503c\u5e8f\u5217\u7684\u5e73\u5747\u7edd\u5bf9\u503c\u5dee \u4e24\u4e2a\u5e8f\u5217\u5e94\u8be5\u5177\u6709\u76f8\u540c\u7684\u957f\u5ea6\u3002\u5982\u679c\u5b58\u5728nan\uff0c\u5219nan\u7684\u503c\u4e0d\u8ba1\u5165\u5e73\u5747\u503c\u3002 Examples: >>> y = np.arange(5) >>> y_hat = np.arange(5) >>> y_hat[4] = 0 >>> mean_absolute_error(y, y) 0.0 >>> mean_absolute_error(y, y_hat) 0.8 Args: y (np.array): \u771f\u503c\u5e8f\u5217 y_hat: \u6bd4\u8f83\u5e8f\u5217 Returns: float: \u5e73\u5747\u7edd\u5bf9\u503c\u5dee \"\"\" return nanmean ( np . abs ( y - y_hat )) moving_average ( ts , win , padding = True ) \u00b6 \u751f\u6210ts\u5e8f\u5217\u7684\u79fb\u52a8\u5e73\u5747\u503c Examples: >>> ts = np . arange ( 7 ) >>> moving_average ( ts , 5 ) array ([ nan , nan , nan , nan , 2. , 3. , 4. ]) Parameters: Name Type Description Default ts Sequence the input array required win int the window size required padding if True, then the return will be equal length as input, padding with np.NaN at the beginning True Returns: Type Description ndarray The moving mean of the input array along the specified axis. The output has the same shape as the input. Source code in omicron/talib/core.py def moving_average ( ts : Sequence , win : int , padding = True ) -> np . ndarray : \"\"\"\u751f\u6210ts\u5e8f\u5217\u7684\u79fb\u52a8\u5e73\u5747\u503c Examples: >>> ts = np.arange(7) >>> moving_average(ts, 5) array([nan, nan, nan, nan, 2., 3., 4.]) Args: ts (Sequence): the input array win (int): the window size padding: if True, then the return will be equal length as input, padding with np.NaN at the beginning Returns: The moving mean of the input array along the specified axis. The output has the same shape as the input. \"\"\" ma = move_mean ( ts , win ) if padding : return ma else : return ma [ win - 1 :] normalize ( X , scaler = 'maxabs' ) \u00b6 \u5bf9\u6570\u636e\u8fdb\u884c\u89c4\u8303\u5316\u5904\u7406\u3002 \u5982\u679cscaler\u4e3amaxabs\uff0c\u5219X\u7684\u5404\u5143\u7d20\u88ab\u538b\u7f29\u5230[-1,1]\u4e4b\u95f4 \u5982\u679cscaler\u4e3aunit_vector\uff0c\u5219\u5c06X\u7684\u5404\u5143\u7d20\u538b\u7f29\u5230\u5355\u4f4d\u8303\u6570 \u5982\u679cscaler\u4e3aminmax,\u5219X\u7684\u5404\u5143\u7d20\u88ab\u538b\u7f29\u5230[0,1]\u4e4b\u95f4 \u5982\u679cscaler\u4e3astandard,\u5219X\u7684\u5404\u5143\u7d20\u88ab\u538b\u7f29\u5230\u5355\u4f4d\u65b9\u5dee\u4e4b\u95f4\uff0c\u4e14\u5747\u503c\u4e3a\u96f6\u3002 \u53c2\u8003 sklearn Examples: >>> X = [[ 1. , - 1. , 2. ], ... [ 2. , 0. , 0. ], ... [ 0. , 1. , - 1. ]] >>> expected = [[ 0.4082 , - 0.4082 , 0.8165 ], ... [ 1. , 0. , 0. ], ... [ 0. , 0.7071 , - 0.7071 ]] >>> X_hat = normalize ( X , scaler = 'unit_vector' ) >>> np . testing . assert_array_almost_equal ( expected , X_hat , decimal = 4 ) >>> expected = [[ 0.5 , - 1. , 1. ], ... [ 1. , 0. , 0. ], ... [ 0. , 1. , - 0.5 ]] >>> X_hat = normalize ( X , scaler = 'maxabs' ) >>> np . testing . assert_array_almost_equal ( expected , X_hat , decimal = 2 ) >>> expected = [[ 0.5 , 0. , 1. ], ... [ 1. , 0.5 , 0.33333333 ], ... [ 0. , 1. , 0. ]] >>> X_hat = normalize ( X , scaler = 'minmax' ) >>> np . testing . assert_array_almost_equal ( expected , X_hat , decimal = 3 ) >>> X = [[ 0 , 0 ], ... [ 0 , 0 ], ... [ 1 , 1 ], ... [ 1 , 1 ]] >>> expected = [[ - 1. , - 1. ], ... [ - 1. , - 1. ], ... [ 1. , 1. ], ... [ 1. , 1. ]] >>> X_hat = normalize ( X , scaler = 'standard' ) >>> np . testing . assert_array_almost_equal ( expected , X_hat , decimal = 3 ) Parameters: Name Type Description Default X 2D array required scaler str [description]. Defaults to 'maxabs_scale'. 'maxabs' Source code in omicron/talib/core.py def normalize ( X , scaler = \"maxabs\" ): \"\"\"\u5bf9\u6570\u636e\u8fdb\u884c\u89c4\u8303\u5316\u5904\u7406\u3002 \u5982\u679cscaler\u4e3amaxabs\uff0c\u5219X\u7684\u5404\u5143\u7d20\u88ab\u538b\u7f29\u5230[-1,1]\u4e4b\u95f4 \u5982\u679cscaler\u4e3aunit_vector\uff0c\u5219\u5c06X\u7684\u5404\u5143\u7d20\u538b\u7f29\u5230\u5355\u4f4d\u8303\u6570 \u5982\u679cscaler\u4e3aminmax,\u5219X\u7684\u5404\u5143\u7d20\u88ab\u538b\u7f29\u5230[0,1]\u4e4b\u95f4 \u5982\u679cscaler\u4e3astandard,\u5219X\u7684\u5404\u5143\u7d20\u88ab\u538b\u7f29\u5230\u5355\u4f4d\u65b9\u5dee\u4e4b\u95f4\uff0c\u4e14\u5747\u503c\u4e3a\u96f6\u3002 \u53c2\u8003 [sklearn] [sklearn]: https://scikit-learn.org/stable/auto_examples/preprocessing/plot_all_scaling.html#results Examples: >>> X = [[ 1., -1., 2.], ... [ 2., 0., 0.], ... [ 0., 1., -1.]] >>> expected = [[ 0.4082, -0.4082, 0.8165], ... [ 1., 0., 0.], ... [ 0., 0.7071, -0.7071]] >>> X_hat = normalize(X, scaler='unit_vector') >>> np.testing.assert_array_almost_equal(expected, X_hat, decimal=4) >>> expected = [[0.5, -1., 1.], ... [1., 0., 0.], ... [0., 1., -0.5]] >>> X_hat = normalize(X, scaler='maxabs') >>> np.testing.assert_array_almost_equal(expected, X_hat, decimal = 2) >>> expected = [[0.5 , 0. , 1. ], ... [1. , 0.5 , 0.33333333], ... [0. , 1. , 0. ]] >>> X_hat = normalize(X, scaler='minmax') >>> np.testing.assert_array_almost_equal(expected, X_hat, decimal= 3) >>> X = [[0, 0], ... [0, 0], ... [1, 1], ... [1, 1]] >>> expected = [[-1., -1.], ... [-1., -1.], ... [ 1., 1.], ... [ 1., 1.]] >>> X_hat = normalize(X, scaler='standard') >>> np.testing.assert_array_almost_equal(expected, X_hat, decimal = 3) Args: X (2D array): scaler (str, optional): [description]. Defaults to 'maxabs_scale'. \"\"\" if scaler == \"maxabs\" : return MaxAbsScaler () . fit_transform ( X ) elif scaler == \"unit_vector\" : return sklearn . preprocessing . normalize ( X , norm = \"l2\" ) elif scaler == \"minmax\" : return minmax_scale ( X ) elif scaler == \"standard\" : return StandardScaler () . fit_transform ( X ) pct_error ( y , y_hat ) \u00b6 \u76f8\u5bf9\u4e8e\u5e8f\u5217\u7b97\u672f\u5747\u503c\u7684\u8bef\u5dee\u503c Examples: >>> y = np . arange ( 5 ) >>> y_hat = np . arange ( 5 ) >>> y_hat [ 4 ] = 0 >>> pct_error ( y , y_hat ) 0.4 Parameters: Name Type Description Default y np.array [description] required y_hat np.array [description] required Returns: Type Description float [description] Source code in omicron/talib/core.py def pct_error ( y : np . array , y_hat : np . array ) -> float : \"\"\"\u76f8\u5bf9\u4e8e\u5e8f\u5217\u7b97\u672f\u5747\u503c\u7684\u8bef\u5dee\u503c Examples: >>> y = np.arange(5) >>> y_hat = np.arange(5) >>> y_hat[4] = 0 >>> pct_error(y, y_hat) 0.4 Args: y (np.array): [description] y_hat (np.array): [description] Returns: float: [description] \"\"\" mae = mean_absolute_error ( y , y_hat ) return mae / nanmean ( np . abs ( y )) polyfit ( ts , deg = 2 , loss_func = 're' ) \u00b6 \u5bf9\u7ed9\u5b9a\u7684\u65f6\u95f4\u5e8f\u5217\u8fdb\u884c\u76f4\u7ebf/\u4e8c\u6b21\u66f2\u7ebf\u62df\u5408\u3002 \u4e8c\u6b21\u66f2\u7ebf\u53ef\u4ee5\u62df\u5408\u5230\u53cd\u751f\u53cd\u8f6c\u7684\u884c\u60c5\uff0c\u5982\u5706\u5f27\u5e95\u3001\u5706\u5f27\u9876\uff1b\u4e5f\u53ef\u4ee5\u62df\u5408\u5230\u4e0a\u8ff0\u8d8b\u52bf\u4e2d\u7684\u5355\u8fb9\u8d70\u52bf\uff0c\u5373\u5176\u4e2d\u4e00\u6bb5\u66f2\u7ebf\u3002\u5bf9\u4e8e\u5982\u957f\u671f\u5747\u7ebf\uff0c\u5728\u4e00\u6bb5\u65f6\u95f4\u5185\u8d70\u52bf\u53ef\u80fd\u5448\u73b0\u4e3a\u4e00\u6761\u76f4\u7ebf\uff0c\u6545\u4e5f\u53ef\u7528\u6b64\u51fd\u6570\u8fdb\u884c\u76f4\u7ebf\u62df\u5408\u3002 \u4e3a\u4fbf\u4e8e\u5728\u4e0d\u540c\u54c1\u79cd\u3001\u4e0d\u540c\u7684\u65f6\u95f4\u4e4b\u95f4\u5bf9\u8bef\u5dee\u3001\u7cfb\u6570\u8fdb\u884c\u6bd4\u8f83\uff0c\u8bf7\u4e8b\u5148\u5bf9ts\u8fdb\u884c\u5f52\u4e00\u5316\u3002 \u5982\u679c\u9047\u5230\u65e0\u6cd5\u62df\u5408\u7684\u60c5\u51b5\uff08\u5f02\u5e38\uff09\uff0c\u5c06\u8fd4\u56de\u4e00\u4e2a\u975e\u5e38\u5927\u7684\u8bef\u5dee\uff0c\u5e76\u5c06\u5176\u5b83\u9879\u7f6e\u4e3anp.nan Examples: >>> ts = [ i for i in range ( 5 )] >>> err , ( a , b ) = polyfit ( ts , deg = 1 ) >>> print ( round ( err , 3 ), round ( a , 1 )) 0.0 1.0 Parameters: Name Type Description Default ts Sequence \u5f85\u62df\u5408\u7684\u65f6\u95f4\u5e8f\u5217 required deg int \u5982\u679c\u8981\u8fdb\u884c\u76f4\u7ebf\u62df\u5408\uff0c\u53d61\uff1b\u4e8c\u6b21\u66f2\u7ebf\u62df\u5408\u53d62. Defaults to 2 2 loss_func str \u8bef\u5dee\u8ba1\u7b97\u65b9\u6cd5\uff0c\u53d6\u503c\u4e3a mae , rmse , mse \u6216 re \u3002Defaults to re (relative_error) 're' Returns: Type Description [Tuple] \u5982\u679c\u4e3a\u76f4\u7ebf\u62df\u5408\uff0c\u8fd4\u56de\u8bef\u5dee\uff0c(a,b)(\u4e00\u6b21\u9879\u7cfb\u6570\u548c\u5e38\u6570)\u3002\u5982\u679c\u4e3a\u4e8c\u6b21\u66f2\u7ebf\u62df\u5408\uff0c\u8fd4\u56de \u8bef\u5dee, (a,b,c)(\u4e8c\u6b21\u9879\u3001\u4e00\u6b21\u9879\u548c\u5e38\u91cf\uff09, (vert_x, vert_y)(\u9876\u70b9\u5904\u7684index\uff0c\u9876\u70b9\u503c) Source code in omicron/talib/core.py def polyfit ( ts : Sequence , deg : int = 2 , loss_func = \"re\" ) -> Tuple : \"\"\"\u5bf9\u7ed9\u5b9a\u7684\u65f6\u95f4\u5e8f\u5217\u8fdb\u884c\u76f4\u7ebf/\u4e8c\u6b21\u66f2\u7ebf\u62df\u5408\u3002 \u4e8c\u6b21\u66f2\u7ebf\u53ef\u4ee5\u62df\u5408\u5230\u53cd\u751f\u53cd\u8f6c\u7684\u884c\u60c5\uff0c\u5982\u5706\u5f27\u5e95\u3001\u5706\u5f27\u9876\uff1b\u4e5f\u53ef\u4ee5\u62df\u5408\u5230\u4e0a\u8ff0\u8d8b\u52bf\u4e2d\u7684\u5355\u8fb9\u8d70\u52bf\uff0c\u5373\u5176\u4e2d\u4e00\u6bb5\u66f2\u7ebf\u3002\u5bf9\u4e8e\u5982\u957f\u671f\u5747\u7ebf\uff0c\u5728\u4e00\u6bb5\u65f6\u95f4\u5185\u8d70\u52bf\u53ef\u80fd\u5448\u73b0\u4e3a\u4e00\u6761\u76f4\u7ebf\uff0c\u6545\u4e5f\u53ef\u7528\u6b64\u51fd\u6570\u8fdb\u884c\u76f4\u7ebf\u62df\u5408\u3002 \u4e3a\u4fbf\u4e8e\u5728\u4e0d\u540c\u54c1\u79cd\u3001\u4e0d\u540c\u7684\u65f6\u95f4\u4e4b\u95f4\u5bf9\u8bef\u5dee\u3001\u7cfb\u6570\u8fdb\u884c\u6bd4\u8f83\uff0c\u8bf7\u4e8b\u5148\u5bf9ts\u8fdb\u884c\u5f52\u4e00\u5316\u3002 \u5982\u679c\u9047\u5230\u65e0\u6cd5\u62df\u5408\u7684\u60c5\u51b5\uff08\u5f02\u5e38\uff09\uff0c\u5c06\u8fd4\u56de\u4e00\u4e2a\u975e\u5e38\u5927\u7684\u8bef\u5dee\uff0c\u5e76\u5c06\u5176\u5b83\u9879\u7f6e\u4e3anp.nan Examples: >>> ts = [i for i in range(5)] >>> err, (a, b) = polyfit(ts, deg=1) >>> print(round(err, 3), round(a, 1)) 0.0 1.0 Args: ts (Sequence): \u5f85\u62df\u5408\u7684\u65f6\u95f4\u5e8f\u5217 deg (int): \u5982\u679c\u8981\u8fdb\u884c\u76f4\u7ebf\u62df\u5408\uff0c\u53d61\uff1b\u4e8c\u6b21\u66f2\u7ebf\u62df\u5408\u53d62. Defaults to 2 loss_func (str): \u8bef\u5dee\u8ba1\u7b97\u65b9\u6cd5\uff0c\u53d6\u503c\u4e3a`mae`, `rmse`,`mse` \u6216`re`\u3002Defaults to `re` (relative_error) Returns: [Tuple]: \u5982\u679c\u4e3a\u76f4\u7ebf\u62df\u5408\uff0c\u8fd4\u56de\u8bef\u5dee\uff0c(a,b)(\u4e00\u6b21\u9879\u7cfb\u6570\u548c\u5e38\u6570)\u3002\u5982\u679c\u4e3a\u4e8c\u6b21\u66f2\u7ebf\u62df\u5408\uff0c\u8fd4\u56de \u8bef\u5dee, (a,b,c)(\u4e8c\u6b21\u9879\u3001\u4e00\u6b21\u9879\u548c\u5e38\u91cf\uff09, (vert_x, vert_y)(\u9876\u70b9\u5904\u7684index\uff0c\u9876\u70b9\u503c) \"\"\" if deg not in ( 1 , 2 ): raise ValueError ( \"deg must be 1 or 2\" ) try : if any ( np . isnan ( ts )): raise ValueError ( \"ts contains nan\" ) x = np . array ( list ( range ( len ( ts )))) z = np . polyfit ( x , ts , deg = deg ) p = np . poly1d ( z ) ts_hat = np . array ([ p ( xi ) for xi in x ]) if loss_func == \"mse\" : error = np . mean ( np . square ( ts - ts_hat )) elif loss_func == \"rmse\" : error = np . sqrt ( np . mean ( np . square ( ts - ts_hat ))) elif loss_func == \"mae\" : error = mean_absolute_error ( ts , ts_hat ) else : # defaults to relative error error = pct_error ( ts , ts_hat ) if deg == 2 : a , b , c = z [ 0 ], z [ 1 ], z [ 2 ] axis_x = - b / ( 2 * a ) if a != 0 : axis_y = ( 4 * a * c - b * b ) / ( 4 * a ) else : axis_y = None return error , z , ( axis_x , axis_y ) elif deg == 1 : return error , z except Exception : error = 1e9 if deg == 1 : return error , ( np . nan , np . nan ) else : return error , ( np . nan , np . nan , np . nan ), ( np . nan , np . nan ) slope ( ts , loss_func = 're' ) \u00b6 \u6c42ts\u8868\u793a\u7684\u76f4\u7ebf\uff08\u5982\u679c\u80fd\u62df\u5408\u6210\u76f4\u7ebf\u7684\u8bdd\uff09\u7684\u659c\u7387 Parameters: Name Type Description Default ts np.array [description] required loss_func str [description]. Defaults to 're'. 're' Source code in omicron/talib/core.py def slope ( ts : np . array , loss_func = \"re\" ): \"\"\"\u6c42ts\u8868\u793a\u7684\u76f4\u7ebf\uff08\u5982\u679c\u80fd\u62df\u5408\u6210\u76f4\u7ebf\u7684\u8bdd\uff09\u7684\u659c\u7387 Args: ts (np.array): [description] loss_func (str, optional): [description]. Defaults to 're'. \"\"\" err , ( a , b ) = polyfit ( ts , deg = 1 , loss_func = loss_func ) return err , a smooth ( ts , win , poly_order = 1 , mode = 'interp' ) \u00b6 \u5e73\u6ed1\u5e8f\u5217ts\uff0c\u4f7f\u7528\u7a97\u53e3\u5927\u5c0f\u4e3awin\u7684\u5e73\u6ed1\u6a21\u578b\uff0c\u9ed8\u8ba4\u4f7f\u7528\u7ebf\u6027\u6a21\u578b \u63d0\u4f9b\u672c\u51fd\u6570\u4e3b\u8981\u57fa\u4e8e\u8fd9\u6837\u7684\u8003\u8651\uff1a omicron\u7684\u4f7f\u7528\u8005\u53ef\u80fd\u5e76\u4e0d\u719f\u6089\u4fe1\u53f7\u5904\u7406\u7684\u6982\u5ff5\uff0c\u8fd9\u91cc\u76f8\u5f53\u4e8e\u63d0\u4f9b\u4e86\u76f8\u5173\u529f\u80fd\u7684\u4e00\u4e2a\u5165\u53e3\u3002 Parameters: Name Type Description Default ts np.array [description] required win int [description] required poly_order int [description]. Defaults to 1. 1 Source code in omicron/talib/core.py def smooth ( ts : np . array , win : int , poly_order = 1 , mode = \"interp\" ): \"\"\"\u5e73\u6ed1\u5e8f\u5217ts\uff0c\u4f7f\u7528\u7a97\u53e3\u5927\u5c0f\u4e3awin\u7684\u5e73\u6ed1\u6a21\u578b\uff0c\u9ed8\u8ba4\u4f7f\u7528\u7ebf\u6027\u6a21\u578b \u63d0\u4f9b\u672c\u51fd\u6570\u4e3b\u8981\u57fa\u4e8e\u8fd9\u6837\u7684\u8003\u8651\uff1a omicron\u7684\u4f7f\u7528\u8005\u53ef\u80fd\u5e76\u4e0d\u719f\u6089\u4fe1\u53f7\u5904\u7406\u7684\u6982\u5ff5\uff0c\u8fd9\u91cc\u76f8\u5f53\u4e8e\u63d0\u4f9b\u4e86\u76f8\u5173\u529f\u80fd\u7684\u4e00\u4e2a\u5165\u53e3\u3002 Args: ts (np.array): [description] win (int): [description] poly_order (int, optional): [description]. Defaults to 1. \"\"\" return savgol_filter ( ts , win , poly_order , mode = mode ) weighted_moving_average ( ts , win ) \u00b6 \u8ba1\u7b97\u52a0\u6743\u79fb\u52a8\u5e73\u5747 Parameters: Name Type Description Default ts np.array [description] required win int [description] required Returns: Type Description np.array [description] Source code in omicron/talib/core.py def weighted_moving_average ( ts : np . array , win : int ) -> np . array : \"\"\"\u8ba1\u7b97\u52a0\u6743\u79fb\u52a8\u5e73\u5747 Args: ts (np.array): [description] win (int): [description] Returns: np.array: [description] \"\"\" w = [ 2 * ( i + 1 ) / ( win * ( win + 1 )) for i in range ( win )] return np . convolve ( ts , w , \"valid\" ) morph \u00b6 \u5f62\u6001\u68c0\u6d4b\u76f8\u5173\u65b9\u6cd5 BreakoutFlag ( IntEnum ) \u00b6 An enumeration. Source code in omicron/talib/morph.py class BreakoutFlag ( IntEnum ): UP = 1 DOWN = - 1 NONE = 0 CrossFlag ( IntEnum ) \u00b6 An enumeration. Source code in omicron/talib/morph.py class CrossFlag ( IntEnum ): UPCROSS = 1 DOWNCROSS = - 1 NONE = 0 breakout ( ts , upthres = 0.01 , downthres =- 0.01 , confirm = 1 ) \u00b6 \u68c0\u6d4b\u65f6\u95f4\u5e8f\u5217\u662f\u5426\u7a81\u7834\u4e86\u538b\u529b\u7ebf\uff08\u6574\u7406\u7ebf\uff09 Parameters: Name Type Description Default ts np.ndarray \u65f6\u95f4\u5e8f\u5217 required upthres float \u8bf7\u53c2\u8003 peaks_and_valleys 0.01 downthres float \u8bf7\u53c2\u8003 peaks_and_valleys -0.01 confirm int \u7ecf\u8fc7\u591a\u5c11\u4e2abars\u540e\uff0c\u624d\u786e\u8ba4\u7a81\u7834\u3002\u9ed8\u8ba4\u4e3a1 1 Returns: Type Description BreakoutFlag \u5982\u679c\u4e0a\u5411\u7a81\u7834\u538b\u529b\u7ebf\uff0c\u8fd4\u56de1\uff0c\u5982\u679c\u5411\u4e0b\u7a81\u7834\u538b\u529b\u7ebf\uff0c\u8fd4\u56de-1\uff0c\u5426\u5219\u8fd4\u56de0 Source code in omicron/talib/morph.py def breakout ( ts : np . ndarray , upthres : float = 0.01 , downthres : float = - 0.01 , confirm : int = 1 ) -> BreakoutFlag : \"\"\"\u68c0\u6d4b\u65f6\u95f4\u5e8f\u5217\u662f\u5426\u7a81\u7834\u4e86\u538b\u529b\u7ebf\uff08\u6574\u7406\u7ebf\uff09 Args: ts (np.ndarray): \u65f6\u95f4\u5e8f\u5217 upthres (float, optional): \u8bf7\u53c2\u8003[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys] downthres (float, optional): \u8bf7\u53c2\u8003[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys] confirm (int, optional): \u7ecf\u8fc7\u591a\u5c11\u4e2abars\u540e\uff0c\u624d\u786e\u8ba4\u7a81\u7834\u3002\u9ed8\u8ba4\u4e3a1 Returns: \u5982\u679c\u4e0a\u5411\u7a81\u7834\u538b\u529b\u7ebf\uff0c\u8fd4\u56de1\uff0c\u5982\u679c\u5411\u4e0b\u7a81\u7834\u538b\u529b\u7ebf\uff0c\u8fd4\u56de-1\uff0c\u5426\u5219\u8fd4\u56de0 \"\"\" support , resist , _ = support_resist_lines ( ts [: - confirm ], upthres , downthres ) x0 = len ( ts ) - confirm - 1 x = list ( range ( len ( ts ) - confirm , len ( ts ))) if resist is not None : if np . all ( ts [ x ] > resist ( x )) and ts [ x0 ] <= resist ( x0 ): return BreakoutFlag . UP if support is not None : if np . all ( ts [ x ] < support ( x )) and ts [ x0 ] >= support ( x0 ): return BreakoutFlag . DOWN return BreakoutFlag . NONE cross ( f , g ) \u00b6 \u5224\u65ad\u5e8f\u5217f\u662f\u5426\u4e0eg\u76f8\u4ea4\u3002\u5982\u679c\u4e24\u4e2a\u5e8f\u5217\u6709\u4e14\u4ec5\u6709\u4e00\u4e2a\u4ea4\u70b9\uff0c\u5219\u8fd4\u56de1\u8868\u660ef\u4e0a\u4ea4g\uff1b-1\u8868\u660ef\u4e0b\u4ea4g \u672c\u65b9\u6cd5\u53ef\u7528\u4ee5\u5224\u65ad\u4e24\u6761\u5747\u7ebf\u662f\u5426\u76f8\u4ea4\u3002 Returns: Type Description CrossFlag (flag, index), \u5176\u4e2dflag\u53d6\u503c\u4e3a\uff1a 0 \u65e0\u6548 -1 f\u5411\u4e0b\u4ea4\u53c9g 1 f\u5411\u4e0a\u4ea4\u53c9g Source code in omicron/talib/morph.py def cross ( f : np . ndarray , g : np . ndarray ) -> CrossFlag : \"\"\"\u5224\u65ad\u5e8f\u5217f\u662f\u5426\u4e0eg\u76f8\u4ea4\u3002\u5982\u679c\u4e24\u4e2a\u5e8f\u5217\u6709\u4e14\u4ec5\u6709\u4e00\u4e2a\u4ea4\u70b9\uff0c\u5219\u8fd4\u56de1\u8868\u660ef\u4e0a\u4ea4g\uff1b-1\u8868\u660ef\u4e0b\u4ea4g \u672c\u65b9\u6cd5\u53ef\u7528\u4ee5\u5224\u65ad\u4e24\u6761\u5747\u7ebf\u662f\u5426\u76f8\u4ea4\u3002 returns: (flag, index), \u5176\u4e2dflag\u53d6\u503c\u4e3a\uff1a 0 \u65e0\u6548 -1 f\u5411\u4e0b\u4ea4\u53c9g 1 f\u5411\u4e0a\u4ea4\u53c9g \"\"\" indices = np . argwhere ( np . diff ( np . sign ( f - g ))) . flatten () if len ( indices ) == 0 : return CrossFlag . NONE , 0 # \u5982\u679c\u5b58\u5728\u4e00\u4e2a\u6216\u8005\u591a\u4e2a\u4ea4\u70b9\uff0c\u53d6\u6700\u540e\u4e00\u4e2a idx = indices [ - 1 ] if f [ idx ] < g [ idx ]: return CrossFlag . UPCROSS , idx elif f [ idx ] > g [ idx ]: return CrossFlag . DOWNCROSS , idx else : return CrossFlag ( np . sign ( g [ idx - 1 ] - f [ idx - 1 ])), idx energy_hump ( bars , thresh = 2 ) \u00b6 \u68c0\u6d4b bars \u4e2d\u662f\u5426\u5b58\u5728\u4e24\u6ce2\u4ee5\u4e0a\u91cf\u80fd\u5267\u70c8\u589e\u52a0\u7684\u60c5\u5f62\uff08\u80fd\u91cf\u9a7c\u5cf0\uff09\uff0c\u8fd4\u56de\u6700\u540e\u4e00\u6ce2\u8ddd\u73b0\u5728\u7684\u4f4d\u7f6e\u53ca\u533a\u95f4\u957f\u5ea6\u3002 \u6ce8\u610f\u5982\u679c\u6700\u540e\u4e00\u4e2a\u80fd\u91cf\u9a7c\u5cf0\u8ddd\u73b0\u5728\u8fc7\u8fdc\uff08\u6bd4\u5982\u8d85\u8fc710\u4e2abar),\u53ef\u80fd\u610f\u5473\u7740\u8d44\u91d1\u5df2\u7ecf\u9003\u79bb\uff0c\u80fd\u91cf\u5df2\u7ecf\u8017\u5c3d\u3002 Parameters: Name Type Description Default bars [('frame', ' Optional [ Tuple [ int , int ]]: \"\"\"\u68c0\u6d4b`bars`\u4e2d\u662f\u5426\u5b58\u5728\u4e24\u6ce2\u4ee5\u4e0a\u91cf\u80fd\u5267\u70c8\u589e\u52a0\u7684\u60c5\u5f62\uff08\u80fd\u91cf\u9a7c\u5cf0\uff09\uff0c\u8fd4\u56de\u6700\u540e\u4e00\u6ce2\u8ddd\u73b0\u5728\u7684\u4f4d\u7f6e\u53ca\u533a\u95f4\u957f\u5ea6\u3002 \u6ce8\u610f\u5982\u679c\u6700\u540e\u4e00\u4e2a\u80fd\u91cf\u9a7c\u5cf0\u8ddd\u73b0\u5728\u8fc7\u8fdc\uff08\u6bd4\u5982\u8d85\u8fc710\u4e2abar),\u53ef\u80fd\u610f\u5473\u7740\u8d44\u91d1\u5df2\u7ecf\u9003\u79bb\uff0c\u80fd\u91cf\u5df2\u7ecf\u8017\u5c3d\u3002 Args: bars: \u884c\u60c5\u6570\u636e thresh: \u6700\u540e\u4e00\u6ce2\u91cf\u5fc5\u987b\u5927\u4e8e20\u5929\u5747\u91cf\u7684\u500d\u6570\u3002 Returns: \u5982\u679c\u4e0d\u5b58\u5728\u80fd\u91cf\u9a7c\u5cf0\u7684\u60c5\u5f62\uff0c\u5219\u8fd4\u56deNone\uff0c\u5426\u5219\u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u9a7c\u5cf0\u79bb\u73b0\u5728\u7684\u8ddd\u79bb\u53ca\u533a\u95f4\u957f\u5ea6\u3002 \"\"\" vol = bars [ \"volume\" ] std = np . std ( vol [ 1 :] / vol [: - 1 ]) pvs = peak_valley_pivots ( vol , std , 0 ) frames = bars [ \"frame\" ] pvs [ 0 ] = 0 pvs [ - 1 ] = - 1 peaks = np . argwhere ( pvs == 1 ) mn = np . mean ( vol [ peaks ]) # \u9876\u70b9\u4e0d\u80fd\u7f29\u91cf\u5230\u5c16\u5cf0\u5747\u503c\u4ee5\u4e0b real_peaks = np . intersect1d ( np . argwhere ( vol > mn ), peaks ) if len ( real_peaks ) < 2 : return None logger . debug ( \"found %s peaks at %s \" , len ( real_peaks ), frames [ real_peaks ]) lp = real_peaks [ - 1 ] ma = moving_average ( vol , 20 )[ lp ] if vol [ lp ] < ma * thresh : logger . debug ( \"vol of last peak[ %s ] is less than mean_vol(20) * thresh[ %s ]\" , vol [ lp ], ma * thresh , ) return None return len ( bars ) - real_peaks [ - 1 ], real_peaks [ - 1 ] - real_peaks [ 0 ] inverse_vcross ( f , g ) \u00b6 \u5224\u65ad\u5e8f\u5217f\u662f\u5426\u4e0e\u5e8f\u5217g\u5b58\u5728^\u578b\u76f8\u4ea4\u3002\u5373\u5b58\u5728\u4e24\u4e2a\u4ea4\u70b9\uff0c\u7b2c\u4e00\u4e2a\u4ea4\u70b9\u4e3a\u5411\u4e0a\u76f8\u4ea4\uff0c\u7b2c\u4e8c\u4e2a\u4ea4\u70b9\u4e3a\u5411\u4e0b \u76f8\u4ea4\u3002\u53ef\u7528\u4e8e\u5224\u65ad\u89c1\u9876\u7279\u5f81\u7b49\u573a\u5408\u3002 Parameters: Name Type Description Default f np.array [description] required g np.array [description] required Returns: Type Description Tuple [description] Source code in omicron/talib/morph.py def inverse_vcross ( f : np . array , g : np . array ) -> Tuple : \"\"\"\u5224\u65ad\u5e8f\u5217f\u662f\u5426\u4e0e\u5e8f\u5217g\u5b58\u5728^\u578b\u76f8\u4ea4\u3002\u5373\u5b58\u5728\u4e24\u4e2a\u4ea4\u70b9\uff0c\u7b2c\u4e00\u4e2a\u4ea4\u70b9\u4e3a\u5411\u4e0a\u76f8\u4ea4\uff0c\u7b2c\u4e8c\u4e2a\u4ea4\u70b9\u4e3a\u5411\u4e0b \u76f8\u4ea4\u3002\u53ef\u7528\u4e8e\u5224\u65ad\u89c1\u9876\u7279\u5f81\u7b49\u573a\u5408\u3002 Args: f (np.array): [description] g (np.array): [description] Returns: Tuple: [description] \"\"\" indices = np . argwhere ( np . diff ( np . sign ( f - g ))) . flatten () if len ( indices ) == 2 : idx0 , idx1 = indices if f [ idx0 ] < g [ idx0 ] and f [ idx1 ] > g [ idx1 ]: return True , ( idx0 , idx1 ) return False , ( None , None ) peaks_and_valleys ( ts , up_thresh = None , down_thresh = None ) \u00b6 \u5bfb\u627ets\u4e2d\u7684\u6ce2\u5cf0\u548c\u6ce2\u8c37\uff0c\u8fd4\u56de\u6570\u7ec4\u6307\u793a\u5728\u8be5\u4f4d\u7f6e\u4e0a\u662f\u5426\u4e3a\u6ce2\u5cf0\u6216\u6ce2\u8c37\u3002\u5982\u679c\u4e3a1\uff0c\u5219\u4e3a\u6ce2\u5cf0\uff1b\u5982\u679c\u4e3a-1\uff0c\u5219\u4e3a\u6ce2\u8c37\u3002 \u672c\u51fd\u6570\u76f4\u63a5\u4f7f\u7528\u4e86zigzag\u4e2d\u7684peak_valley_pivots. \u6709\u5f88\u591a\u65b9\u6cd5\u53ef\u4ee5\u5b9e\u73b0\u672c\u529f\u80fd\uff0c\u6bd4\u5982scipy.signals.find_peaks_cwt, peak_valley_pivots\u7b49\u3002\u672c\u51fd\u6570\u66f4\u9002\u5408\u91d1\u878d\u65f6\u95f4\u5e8f\u5217\uff0c\u5e76\u4e14\u4f7f\u7528\u4e86cython\u52a0\u901f\u3002 Parameters: Name Type Description Default ts np.ndarray \u65f6\u95f4\u5e8f\u5217 required up_thresh float \u6ce2\u5cf0\u7684\u9608\u503c\uff0c\u5982\u679c\u4e3aNone,\u5219\u4f7f\u7528ts\u53d8\u5316\u7387\u7684\u4e8c\u500d\u6807\u51c6\u5dee None down_thresh float \u6ce2\u8c37\u7684\u9608\u503c\uff0c\u5982\u679c\u4e3aNone,\u5219\u4f7f\u7528ts\u53d8\u5316\u7387\u7684\u4e8c\u500d\u6807\u51c6\u5dee\u4e58\u4ee5-1 None Returns: Type Description np.ndarray \u8fd4\u56de\u6570\u7ec4\u6307\u793a\u5728\u8be5\u4f4d\u7f6e\u4e0a\u662f\u5426\u4e3a\u6ce2\u5cf0\u6216\u6ce2\u8c37\u3002 Source code in omicron/talib/morph.py def peaks_and_valleys ( ts : np . ndarray , up_thresh : Optional [ float ] = None , down_thresh : Optional [ float ] = None , ) -> np . ndarray : \"\"\"\u5bfb\u627ets\u4e2d\u7684\u6ce2\u5cf0\u548c\u6ce2\u8c37\uff0c\u8fd4\u56de\u6570\u7ec4\u6307\u793a\u5728\u8be5\u4f4d\u7f6e\u4e0a\u662f\u5426\u4e3a\u6ce2\u5cf0\u6216\u6ce2\u8c37\u3002\u5982\u679c\u4e3a1\uff0c\u5219\u4e3a\u6ce2\u5cf0\uff1b\u5982\u679c\u4e3a-1\uff0c\u5219\u4e3a\u6ce2\u8c37\u3002 \u672c\u51fd\u6570\u76f4\u63a5\u4f7f\u7528\u4e86zigzag\u4e2d\u7684peak_valley_pivots. \u6709\u5f88\u591a\u65b9\u6cd5\u53ef\u4ee5\u5b9e\u73b0\u672c\u529f\u80fd\uff0c\u6bd4\u5982scipy.signals.find_peaks_cwt, peak_valley_pivots\u7b49\u3002\u672c\u51fd\u6570\u66f4\u9002\u5408\u91d1\u878d\u65f6\u95f4\u5e8f\u5217\uff0c\u5e76\u4e14\u4f7f\u7528\u4e86cython\u52a0\u901f\u3002 Args: ts (np.ndarray): \u65f6\u95f4\u5e8f\u5217 up_thresh (float): \u6ce2\u5cf0\u7684\u9608\u503c\uff0c\u5982\u679c\u4e3aNone,\u5219\u4f7f\u7528ts\u53d8\u5316\u7387\u7684\u4e8c\u500d\u6807\u51c6\u5dee down_thresh (float): \u6ce2\u8c37\u7684\u9608\u503c\uff0c\u5982\u679c\u4e3aNone,\u5219\u4f7f\u7528ts\u53d8\u5316\u7387\u7684\u4e8c\u500d\u6807\u51c6\u5dee\u4e58\u4ee5-1 Returns: np.ndarray: \u8fd4\u56de\u6570\u7ec4\u6307\u793a\u5728\u8be5\u4f4d\u7f6e\u4e0a\u662f\u5426\u4e3a\u6ce2\u5cf0\u6216\u6ce2\u8c37\u3002 \"\"\" if ts . dtype != np . float64 : ts = ts . astype ( np . float64 ) if any ([ up_thresh is None , down_thresh is None ]): change_rate = ts [ 1 :] / ts [: - 1 ] - 1 std = np . std ( change_rate ) up_thresh = up_thresh or 2 * std down_thresh = down_thresh or - 2 * std return peak_valley_pivots ( ts , up_thresh , down_thresh ) plateaus ( numbers , min_size , fall_in_range_ratio = 0.97 ) \u00b6 \u7edf\u8ba1\u6570\u7ec4 numbers \u4e2d\u7684\u53ef\u80fd\u5b58\u5728\u7684\u5e73\u53f0\u6574\u7406\u3002 \u5982\u679c\u4e00\u4e2a\u6570\u7ec4\u4e2d\u5b58\u5728\u7740\u5b50\u6570\u7ec4\uff0c\u4f7f\u5f97\u5176\u5143\u7d20\u4e0e\u5747\u503c\u7684\u8ddd\u79bb\u843d\u5728\u4e09\u4e2a\u6807\u51c6\u5dee\u4ee5\u5185\u7684\u6bd4\u4f8b\u8d85\u8fc7 fall_in_range_ratio \u7684\uff0c\u5219\u8ba4\u4e3a\u8be5\u5b50\u6570\u7ec4\u6ee1\u8db3\u5e73\u53f0\u6574\u7406\u3002 Parameters: Name Type Description Default numbers ndarray \u8f93\u5165\u6570\u7ec4 required min_size int \u5e73\u53f0\u7684\u6700\u5c0f\u957f\u5ea6 required fall_in_range_ratio float \u8d85\u8fc7 fall_in_range_ratio \u6bd4\u4f8b\u7684\u5143\u7d20\u843d\u5728\u5747\u503c\u7684\u4e09\u4e2a\u6807\u51c6\u5dee\u4ee5\u5185\uff0c\u5c31\u8ba4\u4e3a\u8be5\u5b50\u6570\u7ec4\u6784\u6210\u4e00\u4e2a\u5e73\u53f0 0.97 Returns: Type Description List[Tuple] \u5e73\u53f0\u7684\u8d77\u59cb\u4f4d\u7f6e\u548c\u957f\u5ea6\u7684\u6570\u7ec4 Source code in omicron/talib/morph.py def plateaus ( numbers : np . ndarray , min_size : int , fall_in_range_ratio : float = 0.97 ) -> List [ Tuple ]: \"\"\"\u7edf\u8ba1\u6570\u7ec4`numbers`\u4e2d\u7684\u53ef\u80fd\u5b58\u5728\u7684\u5e73\u53f0\u6574\u7406\u3002 \u5982\u679c\u4e00\u4e2a\u6570\u7ec4\u4e2d\u5b58\u5728\u7740\u5b50\u6570\u7ec4\uff0c\u4f7f\u5f97\u5176\u5143\u7d20\u4e0e\u5747\u503c\u7684\u8ddd\u79bb\u843d\u5728\u4e09\u4e2a\u6807\u51c6\u5dee\u4ee5\u5185\u7684\u6bd4\u4f8b\u8d85\u8fc7`fall_in_range_ratio`\u7684\uff0c\u5219\u8ba4\u4e3a\u8be5\u5b50\u6570\u7ec4\u6ee1\u8db3\u5e73\u53f0\u6574\u7406\u3002 Args: numbers: \u8f93\u5165\u6570\u7ec4 min_size: \u5e73\u53f0\u7684\u6700\u5c0f\u957f\u5ea6 fall_in_range_ratio: \u8d85\u8fc7`fall_in_range_ratio`\u6bd4\u4f8b\u7684\u5143\u7d20\u843d\u5728\u5747\u503c\u7684\u4e09\u4e2a\u6807\u51c6\u5dee\u4ee5\u5185\uff0c\u5c31\u8ba4\u4e3a\u8be5\u5b50\u6570\u7ec4\u6784\u6210\u4e00\u4e2a\u5e73\u53f0 Returns: \u5e73\u53f0\u7684\u8d77\u59cb\u4f4d\u7f6e\u548c\u957f\u5ea6\u7684\u6570\u7ec4 \"\"\" if numbers . size <= min_size : n = 1 else : n = numbers . size // min_size clusters = clustering ( numbers , n ) plats = [] for ( start , length ) in clusters : if length < min_size : continue y = numbers [ start : start + length ] mean = np . mean ( y ) std = np . std ( y ) inrange = len ( y [ np . abs ( y - mean ) < 3 * std ]) ratio = inrange / length if ratio >= fall_in_range_ratio : plats . append (( start , length )) return plats rsi_bottom_distance ( close , thresh = None ) \u00b6 \u6839\u636e\u7ed9\u5b9a\u7684\u6536\u76d8\u4ef7\uff0c\u8ba1\u7b97\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51farsi\u4f4e\u6c34\u5e73\u7684\u8ddd\u79bb\uff0c \u5982\u679c\u4ece\u4e0a\u4e00\u4e2a\u6700\u4f4e\u70b9rsi\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5e76\u672a\u53d1\u51fa\u4f4e\u6c34\u5e73\u4fe1\u53f7\uff0c \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51fa\u6700\u4f4e\u70b9rsi\u7684\u8ddd\u79bb\u3002 \u5176\u4e2dclose\u7684\u957f\u5ea6\u4e00\u822c\u4e0d\u5c0f\u4e8e60\u3002 \u8fd4\u56de\u503c\u4e3a\u8ddd\u79bb\u6574\u6570\uff0c\u4e0d\u6ee1\u8db3\u6761\u4ef6\u5219\u8fd4\u56deNone\u3002 Parameters: Name Type Description Default close np.array \u5177\u6709\u65f6\u95f4\u5e8f\u5217\u7684\u6536\u76d8\u4ef7 required thresh Tuple[float, float]) None\u9002\u7528\u6240\u6709\u80a1\u7968\uff0c\u4e0d\u5fc5\u66f4\u6539\uff0c\u4e5f\u53ef\u81ea\u884c\u8bbe\u7f6e\u3002 None Returns: Type Description int \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51farsi\u4f4e\u6c34\u5e73\u7684\u8ddd\u79bb\u3002 \u5982\u679c\u4ece\u4e0a\u4e00\u4e2a\u6700\u4f4e\u70b9rsi\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5e76\u672a\u53d1\u51fa\u4f4e\u6c34\u5e73\u4fe1\u53f7\uff0c \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51fa\u6700\u4f4e\u70b9rsi\u7684\u8ddd\u79bb\u3002 \u9664\u6b64\u4e4b\u5916\uff0c\u8fd4\u56deNone\u3002 Source code in omicron/talib/morph.py def rsi_bottom_distance ( close : np . array , thresh : Tuple [ float , float ] = None ) -> int : \"\"\"\u6839\u636e\u7ed9\u5b9a\u7684\u6536\u76d8\u4ef7\uff0c\u8ba1\u7b97\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51farsi\u4f4e\u6c34\u5e73\u7684\u8ddd\u79bb\uff0c \u5982\u679c\u4ece\u4e0a\u4e00\u4e2a\u6700\u4f4e\u70b9rsi\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5e76\u672a\u53d1\u51fa\u4f4e\u6c34\u5e73\u4fe1\u53f7\uff0c \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51fa\u6700\u4f4e\u70b9rsi\u7684\u8ddd\u79bb\u3002 \u5176\u4e2dclose\u7684\u957f\u5ea6\u4e00\u822c\u4e0d\u5c0f\u4e8e60\u3002 \u8fd4\u56de\u503c\u4e3a\u8ddd\u79bb\u6574\u6570\uff0c\u4e0d\u6ee1\u8db3\u6761\u4ef6\u5219\u8fd4\u56deNone\u3002 Args: close (np.array): \u5177\u6709\u65f6\u95f4\u5e8f\u5217\u7684\u6536\u76d8\u4ef7 thresh (Tuple[float, float]) : None\u9002\u7528\u6240\u6709\u80a1\u7968\uff0c\u4e0d\u5fc5\u66f4\u6539\uff0c\u4e5f\u53ef\u81ea\u884c\u8bbe\u7f6e\u3002 Returns: \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51farsi\u4f4e\u6c34\u5e73\u7684\u8ddd\u79bb\u3002 \u5982\u679c\u4ece\u4e0a\u4e00\u4e2a\u6700\u4f4e\u70b9rsi\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5e76\u672a\u53d1\u51fa\u4f4e\u6c34\u5e73\u4fe1\u53f7\uff0c \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51fa\u6700\u4f4e\u70b9rsi\u7684\u8ddd\u79bb\u3002 \u9664\u6b64\u4e4b\u5916\uff0c\u8fd4\u56deNone\u3002\"\"\" assert len ( close ) >= 60 , \"must provide an array with at least 60 length!\" if close . dtype != np . float64 : close = close . astype ( np . float64 ) if thresh is None : std = np . std ( close [ - 59 :] / close [ - 60 : - 1 ] - 1 ) thresh = ( 2 * std , - 2 * std ) rsi = ta . RSI ( close , 6 ) watermarks = rsi_watermarks ( close , thresh ) if watermarks is not None : low_watermark , _ , _ = watermarks pivots = peak_valley_pivots ( close , thresh [ 0 ], thresh [ 1 ]) pivots [ 0 ], pivots [ - 1 ] = 0 , 0 # \u8c37\u503cRSI<30 valley_rsi_index = np . where (( rsi < 30 ) & ( pivots == - 1 ))[ 0 ] # RSI\u4f4e\u6c34\u5e73\u7684\u6700\u5927\u503c\uff1a\u4f4e\u6c34\u5e73*1.01 low_rsi_index = np . where ( rsi <= low_watermark * 1.01 )[ 0 ] if len ( valley_rsi_index ) > 0 : distance = len ( rsi ) - 1 - valley_rsi_index [ - 1 ] if len ( low_rsi_index ) > 0 : if low_rsi_index [ - 1 ] >= valley_rsi_index [ - 1 ]: distance = len ( rsi ) - 1 - low_rsi_index [ - 1 ] return distance rsi_bottom_divergent ( close , thresh = None , rsi_limit = 30 ) \u00b6 \u5bfb\u627e\u6700\u8fd1\u6ee1\u8db3\u6761\u4ef6\u7684rsi\u5e95\u80cc\u79bb\u3002 \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u6700\u8fd1\u5e95\u80cc\u79bb\u53d1\u751f\u70b9\u7684\u8ddd\u79bb\uff1b\u6ca1\u6709\u6ee1\u8db3\u6761\u4ef6\u7684\u5e95\u80cc\u79bb\uff0c\u8fd4\u56deNone\u3002 Parameters: Name Type Description Default close np.array \u65f6\u95f4\u5e8f\u5217\u6536\u76d8\u4ef7 required thresh Tuple[float, float] \u8bf7\u53c2\u8003 peaks_and_valleys None rsi_limit float RSI\u53d1\u751f\u5e95\u80cc\u79bb\u65f6\u7684\u9608\u503c, \u9ed8\u8ba4\u503c30\uff0820\u6548\u679c\u66f4\u4f73\uff0c\u4f46\u662f\u68c0\u6d4b\u51fa\u6765\u6570\u91cf\u592a\u5c11\uff09\uff0c\u5373\u53ea\u8fc7\u6ee4RSI6<30\u7684\u5c40\u90e8\u6700\u4f4e\u6536\u76d8\u4ef7\u3002 30 Returns: Type Description int \u8fd4\u56deint\u7c7b\u578b\u7684\u6574\u6570\uff0c\u8868\u793a\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u6700\u8fd1\u5e95\u80cc\u79bb\u53d1\u751f\u70b9\u7684\u8ddd\u79bb\uff1b\u6ca1\u6709\u6ee1\u8db3\u6761\u4ef6\u7684\u5e95\u80cc\u79bb\uff0c\u8fd4\u56deNone\u3002 Source code in omicron/talib/morph.py def rsi_bottom_divergent ( close : np . array , thresh : Tuple [ float , float ] = None , rsi_limit : float = 30 ) -> int : \"\"\"\u5bfb\u627e\u6700\u8fd1\u6ee1\u8db3\u6761\u4ef6\u7684rsi\u5e95\u80cc\u79bb\u3002 \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u6700\u8fd1\u5e95\u80cc\u79bb\u53d1\u751f\u70b9\u7684\u8ddd\u79bb\uff1b\u6ca1\u6709\u6ee1\u8db3\u6761\u4ef6\u7684\u5e95\u80cc\u79bb\uff0c\u8fd4\u56deNone\u3002 Args: close (np.array): \u65f6\u95f4\u5e8f\u5217\u6536\u76d8\u4ef7 thresh (Tuple[float, float]): \u8bf7\u53c2\u8003[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys] rsi_limit (float, optional): RSI\u53d1\u751f\u5e95\u80cc\u79bb\u65f6\u7684\u9608\u503c, \u9ed8\u8ba4\u503c30\uff0820\u6548\u679c\u66f4\u4f73\uff0c\u4f46\u662f\u68c0\u6d4b\u51fa\u6765\u6570\u91cf\u592a\u5c11\uff09\uff0c\u5373\u53ea\u8fc7\u6ee4RSI6<30\u7684\u5c40\u90e8\u6700\u4f4e\u6536\u76d8\u4ef7\u3002 Returns: \u8fd4\u56deint\u7c7b\u578b\u7684\u6574\u6570\uff0c\u8868\u793a\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u6700\u8fd1\u5e95\u80cc\u79bb\u53d1\u751f\u70b9\u7684\u8ddd\u79bb\uff1b\u6ca1\u6709\u6ee1\u8db3\u6761\u4ef6\u7684\u5e95\u80cc\u79bb\uff0c\u8fd4\u56deNone\u3002 \"\"\" assert len ( close ) >= 60 , \"must provide an array with at least 60 length!\" if close . dtype != np . float64 : close = close . astype ( np . float64 ) rsi = ta . RSI ( close , 6 ) if thresh is None : std = np . std ( close [ - 59 :] / close [ - 60 : - 1 ] - 1 ) thresh = ( 2 * std , - 2 * std ) pivots = peak_valley_pivots ( close , thresh [ 0 ], thresh [ 1 ]) pivots [ 0 ], pivots [ - 1 ] = 0 , 0 length = len ( close ) valley_index = np . where (( pivots == - 1 ) & ( rsi <= rsi_limit ))[ 0 ] if len ( valley_index ) >= 2 : if ( close [ valley_index [ - 1 ]] < close [ valley_index [ - 2 ]]) and ( rsi [ valley_index [ - 1 ]] > rsi [ valley_index [ - 2 ]] ): bottom_dev_distance = length - 1 - valley_index [ - 1 ] return bottom_dev_distance rsi_predict_price ( close , thresh = None ) \u00b6 \u7ed9\u5b9a\u4e00\u6bb5\u884c\u60c5\uff0c\u6839\u636e\u6700\u8fd1\u7684\u4e24\u4e2aRSI\u7684\u6781\u5c0f\u503c\u548c\u6781\u5927\u503c\u9884\u6d4b\u4e0b\u4e00\u4e2a\u5468\u671f\u53ef\u80fd\u8fbe\u5230\u7684\u6700\u4f4e\u4ef7\u683c\u548c\u6700\u9ad8\u4ef7\u683c\u3002 \u5176\u539f\u7406\u662f\uff0c\u4ee5\u9884\u6d4b\u6700\u8fd1\u7684\u4e24\u4e2a\u6700\u9ad8\u4ef7\u548c\u6700\u4f4e\u4ef7\uff0c\u6c42\u51fa\u5176\u76f8\u5bf9\u5e94\u7684RSI\u503c\uff0c\u6c42\u51fa\u6700\u9ad8\u4ef7\u548c\u6700\u4f4e\u4ef7RSI\u7684\u5747\u503c\uff0c \u82e5\u53ea\u6709\u4e00\u4e2a\u5219\u53d6\u6700\u8fd1\u7684\u4e00\u4e2a\u3002\u518d\u7531RSI\u516c\u5f0f\uff0c\u53cd\u63a8\u4ef7\u683c\u3002\u6b64\u65f6\u8fd4\u56de\u503c\u4e3a(None, float)\uff0c\u5373\u53ea\u6709\u6700\u9ad8\u4ef7\uff0c\u6ca1\u6709\u6700\u4f4e\u4ef7\u3002\u53cd\u4e4b\u4ea6\u7136\u3002 Parameters: Name Type Description Default close np.ndarray \u5177\u6709\u65f6\u95f4\u5e8f\u5217\u7684\u6536\u76d8\u4ef7 required thresh Tuple[float, float]) \u8bf7\u53c2\u8003 peaks_and_valleys None Returns: Type Description Tuple[float, float] \u8fd4\u56de\u6570\u7ec4[predicted_low_price, predicted_high_price], \u6570\u7ec4\u7b2c\u4e00\u4e2a\u503c\u4e3a\u5229\u7528\u8fbe\u5230\u4e4b\u524d\u6700\u4f4e\u6536\u76d8\u4ef7\u7684RSI\u9884\u6d4b\u7684\u6700\u4f4e\u4ef7\u3002 \u7b2c\u4e8c\u4e2a\u503c\u4e3a\u5229\u7528\u8fbe\u5230\u4e4b\u524d\u6700\u9ad8\u6536\u76d8\u4ef7\u7684RSI\u9884\u6d4b\u7684\u6700\u9ad8\u4ef7\u3002 Source code in omicron/talib/morph.py def rsi_predict_price ( close : np . ndarray , thresh : Tuple [ float , float ] = None ) -> Tuple [ float , float ]: \"\"\"\u7ed9\u5b9a\u4e00\u6bb5\u884c\u60c5\uff0c\u6839\u636e\u6700\u8fd1\u7684\u4e24\u4e2aRSI\u7684\u6781\u5c0f\u503c\u548c\u6781\u5927\u503c\u9884\u6d4b\u4e0b\u4e00\u4e2a\u5468\u671f\u53ef\u80fd\u8fbe\u5230\u7684\u6700\u4f4e\u4ef7\u683c\u548c\u6700\u9ad8\u4ef7\u683c\u3002 \u5176\u539f\u7406\u662f\uff0c\u4ee5\u9884\u6d4b\u6700\u8fd1\u7684\u4e24\u4e2a\u6700\u9ad8\u4ef7\u548c\u6700\u4f4e\u4ef7\uff0c\u6c42\u51fa\u5176\u76f8\u5bf9\u5e94\u7684RSI\u503c\uff0c\u6c42\u51fa\u6700\u9ad8\u4ef7\u548c\u6700\u4f4e\u4ef7RSI\u7684\u5747\u503c\uff0c \u82e5\u53ea\u6709\u4e00\u4e2a\u5219\u53d6\u6700\u8fd1\u7684\u4e00\u4e2a\u3002\u518d\u7531RSI\u516c\u5f0f\uff0c\u53cd\u63a8\u4ef7\u683c\u3002\u6b64\u65f6\u8fd4\u56de\u503c\u4e3a(None, float)\uff0c\u5373\u53ea\u6709\u6700\u9ad8\u4ef7\uff0c\u6ca1\u6709\u6700\u4f4e\u4ef7\u3002\u53cd\u4e4b\u4ea6\u7136\u3002 Args: close (np.ndarray): \u5177\u6709\u65f6\u95f4\u5e8f\u5217\u7684\u6536\u76d8\u4ef7 thresh (Tuple[float, float]) : \u8bf7\u53c2\u8003[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys] Returns: \u8fd4\u56de\u6570\u7ec4[predicted_low_price, predicted_high_price], \u6570\u7ec4\u7b2c\u4e00\u4e2a\u503c\u4e3a\u5229\u7528\u8fbe\u5230\u4e4b\u524d\u6700\u4f4e\u6536\u76d8\u4ef7\u7684RSI\u9884\u6d4b\u7684\u6700\u4f4e\u4ef7\u3002 \u7b2c\u4e8c\u4e2a\u503c\u4e3a\u5229\u7528\u8fbe\u5230\u4e4b\u524d\u6700\u9ad8\u6536\u76d8\u4ef7\u7684RSI\u9884\u6d4b\u7684\u6700\u9ad8\u4ef7\u3002 \"\"\" assert len ( close ) >= 60 , \"must provide an array with at least 60 length!\" if thresh is None : std = np . std ( close [ - 59 :] / close [ - 60 : - 1 ] - 1 ) thresh = ( 2 * std , - 2 * std ) if close . dtype != np . float64 : close = close . astype ( np . float64 ) valley_rsi , peak_rsi , _ = rsi_watermarks ( close , thresh = thresh ) pivot = peak_valley_pivots ( close , thresh [ 0 ], thresh [ 1 ]) pivot [ 0 ], pivot [ - 1 ] = 0 , 0 # \u6390\u5934\u53bb\u5c3e price_change = pd . Series ( close ) . diff ( 1 ) . values ave_price_change = ( abs ( price_change )[ - 6 :] . mean ()) * 5 ave_price_raise = ( np . maximum ( price_change , 0 )[ - 6 :] . mean ()) * 5 if valley_rsi is not None : predicted_low_change = ( ave_price_change ) - ave_price_raise / ( 0.01 * valley_rsi ) if predicted_low_change > 0 : predicted_low_change = 0 predicted_low_price = close [ - 1 ] + predicted_low_change else : predicted_low_price = None if peak_rsi is not None : predicted_high_change = ( ave_price_raise - ave_price_change ) / ( 0.01 * peak_rsi - 1 ) - ave_price_change if predicted_high_change < 0 : predicted_high_change = 0 predicted_high_price = close [ - 1 ] + predicted_high_change else : predicted_high_price = None return predicted_low_price , predicted_high_price rsi_top_distance ( close , thresh = None ) \u00b6 \u6839\u636e\u7ed9\u5b9a\u7684\u6536\u76d8\u4ef7\uff0c\u8ba1\u7b97\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51farsi\u9ad8\u6c34\u5e73\u7684\u8ddd\u79bb\uff0c \u5982\u679c\u4ece\u4e0a\u4e00\u4e2a\u6700\u9ad8\u70b9rsi\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5e76\u672a\u53d1\u51fa\u9ad8\u6c34\u5e73\u4fe1\u53f7\uff0c \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51fa\u6700\u9ad8\u70b9rsi\u7684\u8ddd\u79bb\u3002 \u5176\u4e2dclose\u7684\u957f\u5ea6\u4e00\u822c\u4e0d\u5c0f\u4e8e60\u3002 \u8fd4\u56de\u503c\u4e3a\u8ddd\u79bb\u6574\u6570\uff0c\u4e0d\u6ee1\u8db3\u6761\u4ef6\u5219\u8fd4\u56deNone\u3002 Parameters: Name Type Description Default close np.array \u5177\u6709\u65f6\u95f4\u5e8f\u5217\u7684\u6536\u76d8\u4ef7 required thresh Tuple[float, float]) None\u9002\u7528\u6240\u6709\u80a1\u7968\uff0c\u4e0d\u5fc5\u66f4\u6539\uff0c\u4e5f\u53ef\u81ea\u884c\u8bbe\u7f6e\u3002 None Returns: Type Description int \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51farsi\u9ad8\u6c34\u5e73\u7684\u8ddd\u79bb\u3002 \u5982\u679c\u4ece\u4e0a\u4e00\u4e2a\u6700\u9ad8\u70b9rsi\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5e76\u672a\u53d1\u51fa\u9ad8\u6c34\u5e73\u4fe1\u53f7\uff0c \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51fa\u6700\u9ad8\u70b9rsi\u7684\u8ddd\u79bb\u3002 \u9664\u6b64\u4e4b\u5916\uff0c\u8fd4\u56deNone\u3002 Source code in omicron/talib/morph.py def rsi_top_distance ( close : np . array , thresh : Tuple [ float , float ] = None ) -> int : \"\"\"\u6839\u636e\u7ed9\u5b9a\u7684\u6536\u76d8\u4ef7\uff0c\u8ba1\u7b97\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51farsi\u9ad8\u6c34\u5e73\u7684\u8ddd\u79bb\uff0c \u5982\u679c\u4ece\u4e0a\u4e00\u4e2a\u6700\u9ad8\u70b9rsi\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5e76\u672a\u53d1\u51fa\u9ad8\u6c34\u5e73\u4fe1\u53f7\uff0c \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51fa\u6700\u9ad8\u70b9rsi\u7684\u8ddd\u79bb\u3002 \u5176\u4e2dclose\u7684\u957f\u5ea6\u4e00\u822c\u4e0d\u5c0f\u4e8e60\u3002 \u8fd4\u56de\u503c\u4e3a\u8ddd\u79bb\u6574\u6570\uff0c\u4e0d\u6ee1\u8db3\u6761\u4ef6\u5219\u8fd4\u56deNone\u3002 Args: close (np.array): \u5177\u6709\u65f6\u95f4\u5e8f\u5217\u7684\u6536\u76d8\u4ef7 thresh (Tuple[float, float]) : None\u9002\u7528\u6240\u6709\u80a1\u7968\uff0c\u4e0d\u5fc5\u66f4\u6539\uff0c\u4e5f\u53ef\u81ea\u884c\u8bbe\u7f6e\u3002 Returns: \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51farsi\u9ad8\u6c34\u5e73\u7684\u8ddd\u79bb\u3002 \u5982\u679c\u4ece\u4e0a\u4e00\u4e2a\u6700\u9ad8\u70b9rsi\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5e76\u672a\u53d1\u51fa\u9ad8\u6c34\u5e73\u4fe1\u53f7\uff0c \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51fa\u6700\u9ad8\u70b9rsi\u7684\u8ddd\u79bb\u3002 \u9664\u6b64\u4e4b\u5916\uff0c\u8fd4\u56deNone\u3002\"\"\" assert len ( close ) >= 60 , \"must provide an array with at least 60 length!\" if close . dtype != np . float64 : close = close . astype ( np . float64 ) if thresh is None : std = np . std ( close [ - 59 :] / close [ - 60 : - 1 ] - 1 ) thresh = ( 2 * std , - 2 * std ) rsi = ta . RSI ( close , 6 ) watermarks = rsi_watermarks ( close , thresh ) if watermarks is not None : _ , high_watermark , _ = watermarks pivots = peak_valley_pivots ( close , thresh [ 0 ], thresh [ 1 ]) pivots [ 0 ], pivots [ - 1 ] = 0 , 0 # \u5cf0\u503cRSI>70 peak_rsi_index = np . where (( rsi > 70 ) & ( pivots == 1 ))[ 0 ] # RSI\u9ad8\u6c34\u5e73\u7684\u6700\u5c0f\u503c\uff1a\u9ad8\u6c34\u5e73*0.99 high_rsi_index = np . where ( rsi >= high_watermark * 0.99 )[ 0 ] if len ( peak_rsi_index ) > 0 : distance = len ( rsi ) - 1 - peak_rsi_index [ - 1 ] if len ( high_rsi_index ) > 0 : if high_rsi_index [ - 1 ] >= peak_rsi_index [ - 1 ]: distance = len ( rsi ) - 1 - high_rsi_index [ - 1 ] return distance rsi_top_divergent ( close , thresh = None , rsi_limit = 70 ) \u00b6 \u5bfb\u627e\u6700\u8fd1\u6ee1\u8db3\u6761\u4ef6\u7684rsi\u9876\u80cc\u79bb\u3002 \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u6700\u8fd1\u9876\u80cc\u79bb\u53d1\u751f\u70b9\u7684\u8ddd\u79bb\uff1b\u6ca1\u6709\u6ee1\u8db3\u6761\u4ef6\u7684\u9876\u80cc\u79bb\uff0c\u8fd4\u56deNone\u3002 Parameters: Name Type Description Default close np.array \u65f6\u95f4\u5e8f\u5217\u6536\u76d8\u4ef7 required thresh Tuple[float, float] \u8bf7\u53c2\u8003 peaks_and_valleys None rsi_limit float RSI\u53d1\u751f\u9876\u80cc\u79bb\u65f6\u7684\u9608\u503c, \u9ed8\u8ba4\u503c70\uff0880\u6548\u679c\u66f4\u4f73\uff0c\u4f46\u662f\u68c0\u6d4b\u51fa\u6765\u6570\u91cf\u592a\u5c11\uff09\uff0c\u5373\u53ea\u8fc7\u6ee4RSI6>70\u7684\u5c40\u90e8\u6700\u9ad8\u6536\u76d8\u4ef7\u3002 70 Returns: Type Description Tuple[int, int] \u8fd4\u56deint\u7c7b\u578b\u7684\u6574\u6570\uff0c\u8868\u793a\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u6700\u8fd1\u9876\u80cc\u79bb\u53d1\u751f\u70b9\u7684\u8ddd\u79bb\uff1b\u6ca1\u6709\u6ee1\u8db3\u6761\u4ef6\u7684\u9876\u80cc\u79bb\uff0c\u8fd4\u56deNone\u3002 Source code in omicron/talib/morph.py def rsi_top_divergent ( close : np . array , thresh : Tuple [ float , float ] = None , rsi_limit : float = 70 ) -> Tuple [ int , int ]: \"\"\"\u5bfb\u627e\u6700\u8fd1\u6ee1\u8db3\u6761\u4ef6\u7684rsi\u9876\u80cc\u79bb\u3002 \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u6700\u8fd1\u9876\u80cc\u79bb\u53d1\u751f\u70b9\u7684\u8ddd\u79bb\uff1b\u6ca1\u6709\u6ee1\u8db3\u6761\u4ef6\u7684\u9876\u80cc\u79bb\uff0c\u8fd4\u56deNone\u3002 Args: close (np.array): \u65f6\u95f4\u5e8f\u5217\u6536\u76d8\u4ef7 thresh (Tuple[float, float]): \u8bf7\u53c2\u8003[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys] rsi_limit (float, optional): RSI\u53d1\u751f\u9876\u80cc\u79bb\u65f6\u7684\u9608\u503c, \u9ed8\u8ba4\u503c70\uff0880\u6548\u679c\u66f4\u4f73\uff0c\u4f46\u662f\u68c0\u6d4b\u51fa\u6765\u6570\u91cf\u592a\u5c11\uff09\uff0c\u5373\u53ea\u8fc7\u6ee4RSI6>70\u7684\u5c40\u90e8\u6700\u9ad8\u6536\u76d8\u4ef7\u3002 Returns: \u8fd4\u56deint\u7c7b\u578b\u7684\u6574\u6570\uff0c\u8868\u793a\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u6700\u8fd1\u9876\u80cc\u79bb\u53d1\u751f\u70b9\u7684\u8ddd\u79bb\uff1b\u6ca1\u6709\u6ee1\u8db3\u6761\u4ef6\u7684\u9876\u80cc\u79bb\uff0c\u8fd4\u56deNone\u3002 \"\"\" assert len ( close ) >= 60 , \"must provide an array with at least 60 length!\" if close . dtype != np . float64 : close = close . astype ( np . float64 ) rsi = ta . RSI ( close , 6 ) if thresh is None : std = np . std ( close [ - 59 :] / close [ - 60 : - 1 ] - 1 ) thresh = ( 2 * std , - 2 * std ) pivots = peak_valley_pivots ( close , thresh [ 0 ], thresh [ 1 ]) pivots [ 0 ], pivots [ - 1 ] = 0 , 0 length = len ( close ) peak_index = np . where (( pivots == 1 ) & ( rsi >= rsi_limit ))[ 0 ] if len ( peak_index ) >= 2 : if ( close [ peak_index [ - 1 ]] > close [ peak_index [ - 2 ]]) and ( rsi [ peak_index [ - 1 ]] < rsi [ peak_index [ - 2 ]] ): top_dev_distance = length - 1 - peak_index [ - 1 ] return top_dev_distance rsi_watermarks ( close , thresh = None ) \u00b6 \u7ed9\u5b9a\u4e00\u6bb5\u884c\u60c5\u6570\u636e\u548c\u7528\u4ee5\u68c0\u6d4b\u9876\u548c\u5e95\u7684\u9608\u503c\uff0c\u8fd4\u56de\u8be5\u6bb5\u884c\u60c5\u4e2d\uff0c\u8c37\u548c\u5cf0\u5904RSI\u5747\u503c\uff0c\u6700\u540e\u4e00\u4e2aRSI6\u503c\u3002 \u5176\u4e2dclose\u7684\u957f\u5ea6\u4e00\u822c\u4e0d\u5c0f\u4e8e60\uff0c\u4e0d\u5927\u4e8e120\u3002\u8fd4\u56de\u503c\u4e2d\uff0c\u4e00\u4e2a\u4e3alow_wartermark\uff08\u8c37\u5e95\u5904RSI\u503c\uff09\uff0c \u4e00\u4e2a\u4e3ahigh_wartermark\uff08\u9ad8\u5cf0\u5904RSI\u503c)\uff0c\u4e00\u4e2a\u4e3aRSI6\u7684\u6700\u540e\u4e00\u4e2a\u503c\uff0c\u7528\u4ee5\u5bf9\u6bd4\u524d\u4e24\u4e2a\u8b66\u6212\u503c\u3002 Parameters: Name Type Description Default close np.array \u5177\u6709\u65f6\u95f4\u5e8f\u5217\u7684\u6536\u76d8\u4ef7 required thresh Tuple[float, float]) None\u9002\u7528\u6240\u6709\u80a1\u7968\uff0c\u4e0d\u5fc5\u66f4\u6539\uff0c\u4e5f\u53ef\u81ea\u884c\u8bbe\u7f6e\u3002 None Returns: Type Description Tuple[float, float, float] \u8fd4\u56de\u6570\u7ec4[low_watermark, high_watermark\uff0c rsi[-1]], \u7b2c\u4e00\u4e2a\u4e3a\u6700\u8fd1\u4e24\u4e2a\u6700\u4f4e\u6536\u76d8\u4ef7\u7684RSI\u5747\u503c\uff0c \u7b2c\u4e8c\u4e2a\u4e3a\u6700\u8fd1\u4e24\u4e2a\u6700\u9ad8\u6536\u76d8\u4ef7\u7684RSI\u5747\u503c\u3002 \u82e5\u4f20\u5165\u6536\u76d8\u4ef7\u53ea\u6709\u4e00\u4e2a\u6700\u503c\uff0c\u53ea\u8fd4\u56de\u4e00\u4e2a\u3002\u6ca1\u6709\u6700\u503c\uff0c\u5219\u8fd4\u56deNone, \u7b2c\u4e09\u4e2a\u4e3a\u5b9e\u9645\u7684\u6700\u540eRSI6\u7684\u503c\u3002 Source code in omicron/talib/morph.py def rsi_watermarks ( close : np . array , thresh : Tuple [ float , float ] = None ) -> Tuple [ float , float , float ]: \"\"\"\u7ed9\u5b9a\u4e00\u6bb5\u884c\u60c5\u6570\u636e\u548c\u7528\u4ee5\u68c0\u6d4b\u9876\u548c\u5e95\u7684\u9608\u503c\uff0c\u8fd4\u56de\u8be5\u6bb5\u884c\u60c5\u4e2d\uff0c\u8c37\u548c\u5cf0\u5904RSI\u5747\u503c\uff0c\u6700\u540e\u4e00\u4e2aRSI6\u503c\u3002 \u5176\u4e2dclose\u7684\u957f\u5ea6\u4e00\u822c\u4e0d\u5c0f\u4e8e60\uff0c\u4e0d\u5927\u4e8e120\u3002\u8fd4\u56de\u503c\u4e2d\uff0c\u4e00\u4e2a\u4e3alow_wartermark\uff08\u8c37\u5e95\u5904RSI\u503c\uff09\uff0c \u4e00\u4e2a\u4e3ahigh_wartermark\uff08\u9ad8\u5cf0\u5904RSI\u503c)\uff0c\u4e00\u4e2a\u4e3aRSI6\u7684\u6700\u540e\u4e00\u4e2a\u503c\uff0c\u7528\u4ee5\u5bf9\u6bd4\u524d\u4e24\u4e2a\u8b66\u6212\u503c\u3002 Args: close (np.array): \u5177\u6709\u65f6\u95f4\u5e8f\u5217\u7684\u6536\u76d8\u4ef7 thresh (Tuple[float, float]) : None\u9002\u7528\u6240\u6709\u80a1\u7968\uff0c\u4e0d\u5fc5\u66f4\u6539\uff0c\u4e5f\u53ef\u81ea\u884c\u8bbe\u7f6e\u3002 Returns: \u8fd4\u56de\u6570\u7ec4[low_watermark, high_watermark\uff0c rsi[-1]], \u7b2c\u4e00\u4e2a\u4e3a\u6700\u8fd1\u4e24\u4e2a\u6700\u4f4e\u6536\u76d8\u4ef7\u7684RSI\u5747\u503c\uff0c \u7b2c\u4e8c\u4e2a\u4e3a\u6700\u8fd1\u4e24\u4e2a\u6700\u9ad8\u6536\u76d8\u4ef7\u7684RSI\u5747\u503c\u3002 \u82e5\u4f20\u5165\u6536\u76d8\u4ef7\u53ea\u6709\u4e00\u4e2a\u6700\u503c\uff0c\u53ea\u8fd4\u56de\u4e00\u4e2a\u3002\u6ca1\u6709\u6700\u503c\uff0c\u5219\u8fd4\u56deNone, \u7b2c\u4e09\u4e2a\u4e3a\u5b9e\u9645\u7684\u6700\u540eRSI6\u7684\u503c\u3002 \"\"\" assert len ( close ) >= 60 , \"must provide an array with at least 60 length!\" if thresh is None : std = np . std ( close [ - 59 :] / close [ - 60 : - 1 ] - 1 ) thresh = ( 2 * std , - 2 * std ) if close . dtype != np . float64 : close = close . astype ( np . float64 ) rsi = ta . RSI ( close , 6 ) pivots = peak_valley_pivots ( close , thresh [ 0 ], thresh [ 1 ]) pivots [ 0 ], pivots [ - 1 ] = 0 , 0 # \u6390\u5934\u53bb\u5c3e # \u5cf0\u503cRSI>70; \u8c37\u5904\u7684RSI<30; peaks_rsi_index = np . where (( rsi > 70 ) & ( pivots == 1 ))[ 0 ] valleys_rsi_index = np . where (( rsi < 30 ) & ( pivots == - 1 ))[ 0 ] if len ( peaks_rsi_index ) == 0 : high_watermark = None elif len ( peaks_rsi_index ) == 1 : high_watermark = rsi [ peaks_rsi_index [ 0 ]] else : # \u6709\u4e24\u4e2a\u4ee5\u4e0a\u7684\u5cf0\uff0c\u901a\u8fc7\u6700\u8fd1\u7684\u4e24\u4e2a\u5cf0\u5747\u503c\u6765\u786e\u5b9a\u8d70\u52bf high_watermark = np . nanmean ( rsi [ peaks_rsi_index [ - 2 :]]) if len ( valleys_rsi_index ) == 0 : low_watermark = None elif len ( valleys_rsi_index ) == 1 : low_watermark = rsi [ valleys_rsi_index [ 0 ]] else : # \u6709\u4e24\u4e2a\u4ee5\u4e0a\u7684\u5cf0\uff0c\u901a\u8fc7\u6700\u8fd1\u7684\u4e24\u4e2a\u5cf0\u6765\u786e\u5b9a\u8d70\u52bf low_watermark = np . nanmean ( rsi [ valleys_rsi_index [ - 2 :]]) return low_watermark , high_watermark , rsi [ - 1 ] support_resist_lines ( ts , upthres = None , downthres = None ) \u00b6 \u8ba1\u7b97\u65f6\u95f4\u5e8f\u5217\u7684\u652f\u6491\u7ebf\u548c\u963b\u529b\u7ebf \u4f7f\u7528\u6700\u8fd1\u7684\u4e24\u4e2a\u9ad8\u70b9\u8fde\u63a5\u6210\u9634\u529b\u7ebf\uff0c\u4e24\u4e2a\u4f4e\u70b9\u8fde\u63a5\u6210\u652f\u6491\u7ebf\u3002 Examples: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 def show_support_resist_lines ( ts ): import plotly.graph_objects as go fig = go . Figure () support , resist , x_start = support_resist_lines ( ts , 0.03 , - 0.03 ) fig . add_trace ( go . Scatter ( x = np . arange ( len ( ts )), y = ts )) x = np . arange ( len ( ts ))[ x_start :] fig . add_trace ( go . Line ( x = x , y = support ( x ))) fig . add_trace ( go . Line ( x = x , y = resist ( x ))) fig . show () np . random . seed ( 1978 ) X = np . cumprod ( 1 + np . random . randn ( 100 ) * 0.01 ) show_support_resist_lines ( X ) the above code will show this Parameters: Name Type Description Default ts np.ndarray \u65f6\u95f4\u5e8f\u5217 required upthres float \u8bf7\u53c2\u8003 peaks_and_valleys None downthres float \u8bf7\u53c2\u8003 peaks_and_valleys None Returns: Type Description Tuple[Callable, Callable, numpy.ndarray] \u8fd4\u56de\u652f\u6491\u7ebf\u548c\u963b\u529b\u7ebf\u7684\u8ba1\u7b97\u51fd\u6570\u53ca\u8d77\u59cb\u70b9\u5750\u6807\uff0c\u5982\u679c\u6ca1\u6709\u652f\u6491\u7ebf\u6216\u963b\u529b\u7ebf\uff0c\u5219\u8fd4\u56deNone Source code in omicron/talib/morph.py def support_resist_lines ( ts : np . ndarray , upthres : float = None , downthres : float = None ) -> Tuple [ Callable , Callable , np . ndarray ]: \"\"\"\u8ba1\u7b97\u65f6\u95f4\u5e8f\u5217\u7684\u652f\u6491\u7ebf\u548c\u963b\u529b\u7ebf \u4f7f\u7528\u6700\u8fd1\u7684\u4e24\u4e2a\u9ad8\u70b9\u8fde\u63a5\u6210\u9634\u529b\u7ebf\uff0c\u4e24\u4e2a\u4f4e\u70b9\u8fde\u63a5\u6210\u652f\u6491\u7ebf\u3002 Examples: ```python def show_support_resist_lines(ts): import plotly.graph_objects as go fig = go.Figure() support, resist, x_start = support_resist_lines(ts, 0.03, -0.03) fig.add_trace(go.Scatter(x=np.arange(len(ts)), y=ts)) x = np.arange(len(ts))[x_start:] fig.add_trace(go.Line(x=x, y = support(x))) fig.add_trace(go.Line(x=x, y = resist(x))) fig.show() np.random.seed(1978) X = np.cumprod(1 + np.random.randn(100) * 0.01) show_support_resist_lines(X) ``` the above code will show this ![](https://images.jieyu.ai/images/202204/support_resist.png) Args: ts (np.ndarray): \u65f6\u95f4\u5e8f\u5217 upthres (float, optional): \u8bf7\u53c2\u8003[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys] downthres (float, optional): \u8bf7\u53c2\u8003[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys] Returns: \u8fd4\u56de\u652f\u6491\u7ebf\u548c\u963b\u529b\u7ebf\u7684\u8ba1\u7b97\u51fd\u6570\u53ca\u8d77\u59cb\u70b9\u5750\u6807\uff0c\u5982\u679c\u6ca1\u6709\u652f\u6491\u7ebf\u6216\u963b\u529b\u7ebf\uff0c\u5219\u8fd4\u56deNone \"\"\" if ts . dtype != np . float64 : ts = ts . astype ( np . float64 ) pivots = peaks_and_valleys ( ts , upthres , downthres ) pivots [ 0 ] = 0 pivots [ - 1 ] = 0 arg_max = np . argwhere ( pivots == 1 ) . flatten () arg_min = np . argwhere ( pivots == - 1 ) . flatten () resist = None support = None if len ( arg_max ) >= 2 : arg_max = arg_max [ - 2 :] y = ts [ arg_max ] coeff = np . polyfit ( arg_max , y , deg = 1 ) resist = np . poly1d ( coeff ) if len ( arg_min ) >= 2 : arg_min = arg_min [ - 2 :] y = ts [ arg_min ] coeff = np . polyfit ( arg_min , y , deg = 1 ) support = np . poly1d ( coeff ) return support , resist , np . min ([ * arg_min , * arg_max ]) valley_detect ( close , thresh = ( 0.05 , - 0.02 )) \u00b6 \u7ed9\u5b9a\u4e00\u6bb5\u884c\u60c5\u6570\u636e\u548c\u7528\u4ee5\u68c0\u6d4b\u8fd1\u671f\u5df2\u53d1\u751f\u53cd\u8f6c\u7684\u6700\u4f4e\u70b9\uff0c\u8fd4\u56de\u8be5\u6bb5\u884c\u60c5\u4e2d\uff0c\u6700\u4f4e\u70b9\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u7684\u8ddd\u79bb\u548c\u6536\u76ca\u7387\u6570\u7ec4\uff0c \u5982\u679c\u7ed9\u5b9a\u884c\u60c5\u4e2d\u672a\u627e\u5230\u6ee1\u8db3\u53c2\u6570\u7684\u6700\u4f4e\u70b9\uff0c\u5219\u8fd4\u56de\u4e24\u4e2a\u7a7a\u503c\u6570\u7ec4\u3002 \u5176\u4e2dbars\u7684\u957f\u5ea6\u4e00\u822c\u4e0d\u5c0f\u4e8e60\uff0c\u4e0d\u5927\u4e8e120\u3002\u6b64\u51fd\u6570\u91c7\u7528\u4e86zigzag\u4e2d\u7684\u8c37\u5cf0\u68c0\u6d4b\u65b9\u6cd5\uff0c\u5176\u4e2d\u53c2\u6570\u9ed8\u8ba4(0.05,-0.02), \u6b64\u53c2\u6570\u5bf9\u6240\u6709\u80a1\u7968\u6570\u636e\u90fd\u9002\u7528\u3002\u82e5\u6ee1\u8db3\u53c2\u6570\uff0c\u8fd4\u56de\u503c\u4e2d\uff0c\u8ddd\u79bb\u4e3a\u5927\u4e8e0\u7684\u6574\u6570\uff0c\u6536\u76ca\u7387\u662f0~1\u7684\u5c0f\u6570\u3002 Parameters: Name Type Description Default close np.ndarray \u5177\u6709\u65f6\u95f4\u5e8f\u5217\u7684\u6536\u76d8\u4ef7 required thresh Tuple[float, float]) \u8bf7\u53c2\u8003 peaks_and_valleys (0.05, -0.02) Returns: Type Description int \u8fd4\u56de\u8be5\u6bb5\u884c\u60c5\u4e2d\uff0c\u6700\u4f4e\u70b9\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u7684\u8ddd\u79bb\u548c\u6536\u76ca\u7387\u6570\u7ec4\uff0c \u5982\u679c\u7ed9\u5b9a\u884c\u60c5\u4e2d\u672a\u627e\u5230\u6ee1\u8db3\u53c2\u6570\u7684\u6700\u4f4e\u70b9\uff0c\u5219\u8fd4\u56de\u4e24\u4e2a\u7a7a\u503c\u6570\u7ec4\u3002 Source code in omicron/talib/morph.py def valley_detect ( close : np . ndarray , thresh : Tuple [ float , float ] = ( 0.05 , - 0.02 ) ) -> int : \"\"\"\u7ed9\u5b9a\u4e00\u6bb5\u884c\u60c5\u6570\u636e\u548c\u7528\u4ee5\u68c0\u6d4b\u8fd1\u671f\u5df2\u53d1\u751f\u53cd\u8f6c\u7684\u6700\u4f4e\u70b9\uff0c\u8fd4\u56de\u8be5\u6bb5\u884c\u60c5\u4e2d\uff0c\u6700\u4f4e\u70b9\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u7684\u8ddd\u79bb\u548c\u6536\u76ca\u7387\u6570\u7ec4\uff0c \u5982\u679c\u7ed9\u5b9a\u884c\u60c5\u4e2d\u672a\u627e\u5230\u6ee1\u8db3\u53c2\u6570\u7684\u6700\u4f4e\u70b9\uff0c\u5219\u8fd4\u56de\u4e24\u4e2a\u7a7a\u503c\u6570\u7ec4\u3002 \u5176\u4e2dbars\u7684\u957f\u5ea6\u4e00\u822c\u4e0d\u5c0f\u4e8e60\uff0c\u4e0d\u5927\u4e8e120\u3002\u6b64\u51fd\u6570\u91c7\u7528\u4e86zigzag\u4e2d\u7684\u8c37\u5cf0\u68c0\u6d4b\u65b9\u6cd5\uff0c\u5176\u4e2d\u53c2\u6570\u9ed8\u8ba4(0.05,-0.02), \u6b64\u53c2\u6570\u5bf9\u6240\u6709\u80a1\u7968\u6570\u636e\u90fd\u9002\u7528\u3002\u82e5\u6ee1\u8db3\u53c2\u6570\uff0c\u8fd4\u56de\u503c\u4e2d\uff0c\u8ddd\u79bb\u4e3a\u5927\u4e8e0\u7684\u6574\u6570\uff0c\u6536\u76ca\u7387\u662f0~1\u7684\u5c0f\u6570\u3002 Args: close (np.ndarray): \u5177\u6709\u65f6\u95f4\u5e8f\u5217\u7684\u6536\u76d8\u4ef7 thresh (Tuple[float, float]) : \u8bf7\u53c2\u8003[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys] Returns: \u8fd4\u56de\u8be5\u6bb5\u884c\u60c5\u4e2d\uff0c\u6700\u4f4e\u70b9\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u7684\u8ddd\u79bb\u548c\u6536\u76ca\u7387\u6570\u7ec4\uff0c \u5982\u679c\u7ed9\u5b9a\u884c\u60c5\u4e2d\u672a\u627e\u5230\u6ee1\u8db3\u53c2\u6570\u7684\u6700\u4f4e\u70b9\uff0c\u5219\u8fd4\u56de\u4e24\u4e2a\u7a7a\u503c\u6570\u7ec4\u3002 \"\"\" assert len ( close ) >= 60 , \"must provide an array with at least 60 length!\" if close . dtype != np . float64 : close = close . astype ( np . float64 ) if thresh is None : std = np . std ( close [ - 59 :] / close [ - 60 : - 1 ] - 1 ) thresh = ( 2 * std , - 2 * std ) pivots = peak_valley_pivots ( close , thresh [ 0 ], thresh [ 1 ]) flags = pivots [ pivots != 0 ] increased = None lowest_distance = None if ( flags [ - 2 ] == - 1 ) and ( flags [ - 1 ] == 1 ): length = len ( pivots ) valley_index = np . where ( pivots == - 1 )[ 0 ] increased = ( close [ - 1 ] - close [ valley_index [ - 1 ]]) / close [ valley_index [ - 1 ]] lowest_distance = int ( length - 1 - valley_index [ - 1 ]) return lowest_distance , increased vcross ( f , g ) \u00b6 \u5224\u65ad\u5e8f\u5217f\u662f\u5426\u4e0eg\u5b58\u5728\u7c7b\u578bv\u578b\u7684\u76f8\u4ea4\u3002\u5373\u5b58\u5728\u4e24\u4e2a\u4ea4\u70b9\uff0c\u7b2c\u4e00\u4e2a\u4ea4\u70b9\u4e3a\u5411\u4e0b\u76f8\u4ea4\uff0c\u7b2c\u4e8c\u4e2a\u4ea4\u70b9\u4e3a\u5411\u4e0a \u76f8\u4ea4\u3002\u4e00\u822c\u53cd\u6620\u4e3a\u6d17\u76d8\u62c9\u5347\u7684\u7279\u5f81\u3002 Examples: >>> f = np . array ([ 3 * i ** 2 - 20 * i + 2 for i in range ( 10 )]) >>> g = np . array ([ i - 5 for i in range ( 10 )]) >>> flag , indices = vcross ( f , g ) >>> assert flag is True >>> assert indices [ 0 ] == 0 >>> assert indices [ 1 ] == 6 Parameters: Name Type Description Default f first sequence required g the second sequence required Returns: Type Description Tuple (flag, indices), \u5176\u4e2dflag\u53d6\u503c\u4e3aTrue\u65f6\uff0c\u5b58\u5728vcross\uff0cindices\u4e3a\u4ea4\u70b9\u7684\u7d22\u5f15\u3002 Source code in omicron/talib/morph.py def vcross ( f : np . array , g : np . array ) -> Tuple : \"\"\"\u5224\u65ad\u5e8f\u5217f\u662f\u5426\u4e0eg\u5b58\u5728\u7c7b\u578bv\u578b\u7684\u76f8\u4ea4\u3002\u5373\u5b58\u5728\u4e24\u4e2a\u4ea4\u70b9\uff0c\u7b2c\u4e00\u4e2a\u4ea4\u70b9\u4e3a\u5411\u4e0b\u76f8\u4ea4\uff0c\u7b2c\u4e8c\u4e2a\u4ea4\u70b9\u4e3a\u5411\u4e0a \u76f8\u4ea4\u3002\u4e00\u822c\u53cd\u6620\u4e3a\u6d17\u76d8\u62c9\u5347\u7684\u7279\u5f81\u3002 Examples: >>> f = np.array([ 3 * i ** 2 - 20 * i + 2 for i in range(10)]) >>> g = np.array([ i - 5 for i in range(10)]) >>> flag, indices = vcross(f, g) >>> assert flag is True >>> assert indices[0] == 0 >>> assert indices[1] == 6 Args: f: first sequence g: the second sequence Returns: (flag, indices), \u5176\u4e2dflag\u53d6\u503c\u4e3aTrue\u65f6\uff0c\u5b58\u5728vcross\uff0cindices\u4e3a\u4ea4\u70b9\u7684\u7d22\u5f15\u3002 \"\"\" indices = np . argwhere ( np . diff ( np . sign ( f - g ))) . flatten () if len ( indices ) == 2 : idx0 , idx1 = indices if f [ idx0 ] > g [ idx0 ] and f [ idx1 ] < g [ idx1 ]: return True , ( idx0 , idx1 ) return False , ( None , None )","title":"talib"},{"location":"api/talib/#omicron.talib.core","text":"","title":"core"},{"location":"api/talib/#omicron.talib.core.angle","text":"\u6c42\u65f6\u95f4\u5e8f\u5217 ts \u62df\u5408\u76f4\u7ebf\u76f8\u5bf9\u4e8e x \u8f74\u7684\u5939\u89d2\u7684\u4f59\u5f26\u503c \u672c\u51fd\u6570\u53ef\u4ee5\u7528\u6765\u5224\u65ad\u65f6\u95f4\u5e8f\u5217\u7684\u589e\u957f\u8d8b\u52bf\u3002\u5f53 angle \u5904\u4e8e[-1, 0]\u65f6\uff0c\u8d8a\u9760\u8fd10\uff0c\u4e0b\u964d\u8d8a\u5feb\uff1b\u5f53 angle \u5904\u4e8e[0, 1]\u65f6\uff0c\u8d8a\u63a5\u8fd10\uff0c\u4e0a\u5347\u8d8a\u5feb\u3002 \u5982\u679c ts \u65e0\u6cd5\u5f88\u597d\u5730\u62df\u5408\u4e3a\u76f4\u7ebf\uff0c\u5219\u8fd4\u56de[float, None] Examples: >>> ts = np . array ([ i for i in range ( 5 )]) >>> round ( angle ( ts )[ 1 ], 3 ) # degree: 45, rad: pi/2 0.707 >>> ts = np . array ([ np . sqrt ( 3 ) / 3 * i for i in range ( 10 )]) >>> round ( angle ( ts )[ 1 ], 3 ) # degree: 30, rad: pi/6 0.866 >>> ts = np . array ([ - np . sqrt ( 3 ) / 3 * i for i in range ( 7 )]) >>> round ( angle ( ts )[ 1 ], 3 ) # degree: 150, rad: 5*pi/6 - 0.866 Parameters: Name Type Description Default ts required Returns: Type Description Tuple[float, float] \u8fd4\u56de (error, consine(theta))\uff0c\u5373\u62df\u5408\u8bef\u5dee\u548c\u5939\u89d2\u4f59\u5f26\u503c\u3002 Source code in omicron/talib/core.py def angle ( ts , threshold = 0.01 , loss_func = \"re\" ) -> Tuple [ float , float ]: \"\"\"\u6c42\u65f6\u95f4\u5e8f\u5217`ts`\u62df\u5408\u76f4\u7ebf\u76f8\u5bf9\u4e8e`x`\u8f74\u7684\u5939\u89d2\u7684\u4f59\u5f26\u503c \u672c\u51fd\u6570\u53ef\u4ee5\u7528\u6765\u5224\u65ad\u65f6\u95f4\u5e8f\u5217\u7684\u589e\u957f\u8d8b\u52bf\u3002\u5f53`angle`\u5904\u4e8e[-1, 0]\u65f6\uff0c\u8d8a\u9760\u8fd10\uff0c\u4e0b\u964d\u8d8a\u5feb\uff1b\u5f53`angle` \u5904\u4e8e[0, 1]\u65f6\uff0c\u8d8a\u63a5\u8fd10\uff0c\u4e0a\u5347\u8d8a\u5feb\u3002 \u5982\u679c`ts`\u65e0\u6cd5\u5f88\u597d\u5730\u62df\u5408\u4e3a\u76f4\u7ebf\uff0c\u5219\u8fd4\u56de[float, None] Examples: >>> ts = np.array([ i for i in range(5)]) >>> round(angle(ts)[1], 3) # degree: 45, rad: pi/2 0.707 >>> ts = np.array([ np.sqrt(3) / 3 * i for i in range(10)]) >>> round(angle(ts)[1],3) # degree: 30, rad: pi/6 0.866 >>> ts = np.array([ -np.sqrt(3) / 3 * i for i in range(7)]) >>> round(angle(ts)[1], 3) # degree: 150, rad: 5*pi/6 -0.866 Args: ts: Returns: \u8fd4\u56de (error, consine(theta))\uff0c\u5373\u62df\u5408\u8bef\u5dee\u548c\u5939\u89d2\u4f59\u5f26\u503c\u3002 \"\"\" err , ( a , b ) = polyfit ( ts , deg = 1 , loss_func = loss_func ) if err > threshold : return ( err , None ) v = np . array ([ 1 , a + b ]) vx = np . array ([ 1 , 0 ]) return err , copysign ( np . dot ( v , vx ) / ( norm ( v ) * norm ( vx )), a )","title":"angle()"},{"location":"api/talib/#omicron.talib.core.clustering","text":"\u5c06\u6570\u7ec4 numbers \u5212\u5206\u4e3a n \u4e2a\u7c07 \u8fd4\u56de\u503c\u4e3a\u4e00\u4e2aList, \u6bcf\u4e00\u4e2a\u5143\u7d20\u4e3a\u4e00\u4e2a\u5217\u8868\uff0c\u5206\u522b\u4e3a\u7c07\u7684\u8d77\u59cb\u70b9\u548c\u957f\u5ea6\u3002 Examples: >>> numbers = np . array ([ 1 , 1 , 1 , 2 , 4 , 6 , 8 , 7 , 4 , 5 , 6 ]) >>> clustering ( numbers , 2 ) [( 0 , 4 ), ( 4 , 7 )] Returns: Type Description List[Tuple[int, int]] \u5212\u5206\u540e\u7684\u7c07\u5217\u8868\u3002 Source code in omicron/talib/core.py def clustering ( numbers : np . ndarray , n : int ) -> List [ Tuple [ int , int ]]: \"\"\"\u5c06\u6570\u7ec4`numbers`\u5212\u5206\u4e3a`n`\u4e2a\u7c07 \u8fd4\u56de\u503c\u4e3a\u4e00\u4e2aList, \u6bcf\u4e00\u4e2a\u5143\u7d20\u4e3a\u4e00\u4e2a\u5217\u8868\uff0c\u5206\u522b\u4e3a\u7c07\u7684\u8d77\u59cb\u70b9\u548c\u957f\u5ea6\u3002 Examples: >>> numbers = np.array([1,1,1,2,4,6,8,7,4,5,6]) >>> clustering(numbers, 2) [(0, 4), (4, 7)] Returns: \u5212\u5206\u540e\u7684\u7c07\u5217\u8868\u3002 \"\"\" result = ckwrap . cksegs ( numbers , n ) clusters = [] for pos , size in zip ( result . centers , result . sizes ): clusters . append (( int ( pos - size // 2 - 1 ), int ( size ))) return clusters","title":"clustering()"},{"location":"api/talib/#omicron.talib.core.exp_moving_average","text":"Numpy implementation of EMA Source code in omicron/talib/core.py def exp_moving_average ( values , window ): \"\"\"Numpy implementation of EMA\"\"\" weights = np . exp ( np . linspace ( - 1.0 , 0.0 , window )) weights /= weights . sum () a = np . convolve ( values , weights , mode = \"full\" )[: len ( values )] a [: window ] = a [ window ] return a","title":"exp_moving_average()"},{"location":"api/talib/#omicron.talib.core.mean_absolute_error","text":"\u8fd4\u56de\u9884\u6d4b\u5e8f\u5217\u76f8\u5bf9\u4e8e\u771f\u503c\u5e8f\u5217\u7684\u5e73\u5747\u7edd\u5bf9\u503c\u5dee \u4e24\u4e2a\u5e8f\u5217\u5e94\u8be5\u5177\u6709\u76f8\u540c\u7684\u957f\u5ea6\u3002\u5982\u679c\u5b58\u5728nan\uff0c\u5219nan\u7684\u503c\u4e0d\u8ba1\u5165\u5e73\u5747\u503c\u3002 Examples: >>> y = np . arange ( 5 ) >>> y_hat = np . arange ( 5 ) >>> y_hat [ 4 ] = 0 >>> mean_absolute_error ( y , y ) 0.0 >>> mean_absolute_error ( y , y_hat ) 0.8 Parameters: Name Type Description Default y np.array \u771f\u503c\u5e8f\u5217 required y_hat \u6bd4\u8f83\u5e8f\u5217 required Returns: Type Description float \u5e73\u5747\u7edd\u5bf9\u503c\u5dee Source code in omicron/talib/core.py def mean_absolute_error ( y : np . array , y_hat : np . array ) -> float : \"\"\"\u8fd4\u56de\u9884\u6d4b\u5e8f\u5217\u76f8\u5bf9\u4e8e\u771f\u503c\u5e8f\u5217\u7684\u5e73\u5747\u7edd\u5bf9\u503c\u5dee \u4e24\u4e2a\u5e8f\u5217\u5e94\u8be5\u5177\u6709\u76f8\u540c\u7684\u957f\u5ea6\u3002\u5982\u679c\u5b58\u5728nan\uff0c\u5219nan\u7684\u503c\u4e0d\u8ba1\u5165\u5e73\u5747\u503c\u3002 Examples: >>> y = np.arange(5) >>> y_hat = np.arange(5) >>> y_hat[4] = 0 >>> mean_absolute_error(y, y) 0.0 >>> mean_absolute_error(y, y_hat) 0.8 Args: y (np.array): \u771f\u503c\u5e8f\u5217 y_hat: \u6bd4\u8f83\u5e8f\u5217 Returns: float: \u5e73\u5747\u7edd\u5bf9\u503c\u5dee \"\"\" return nanmean ( np . abs ( y - y_hat ))","title":"mean_absolute_error()"},{"location":"api/talib/#omicron.talib.core.moving_average","text":"\u751f\u6210ts\u5e8f\u5217\u7684\u79fb\u52a8\u5e73\u5747\u503c Examples: >>> ts = np . arange ( 7 ) >>> moving_average ( ts , 5 ) array ([ nan , nan , nan , nan , 2. , 3. , 4. ]) Parameters: Name Type Description Default ts Sequence the input array required win int the window size required padding if True, then the return will be equal length as input, padding with np.NaN at the beginning True Returns: Type Description ndarray The moving mean of the input array along the specified axis. The output has the same shape as the input. Source code in omicron/talib/core.py def moving_average ( ts : Sequence , win : int , padding = True ) -> np . ndarray : \"\"\"\u751f\u6210ts\u5e8f\u5217\u7684\u79fb\u52a8\u5e73\u5747\u503c Examples: >>> ts = np.arange(7) >>> moving_average(ts, 5) array([nan, nan, nan, nan, 2., 3., 4.]) Args: ts (Sequence): the input array win (int): the window size padding: if True, then the return will be equal length as input, padding with np.NaN at the beginning Returns: The moving mean of the input array along the specified axis. The output has the same shape as the input. \"\"\" ma = move_mean ( ts , win ) if padding : return ma else : return ma [ win - 1 :]","title":"moving_average()"},{"location":"api/talib/#omicron.talib.core.normalize","text":"\u5bf9\u6570\u636e\u8fdb\u884c\u89c4\u8303\u5316\u5904\u7406\u3002 \u5982\u679cscaler\u4e3amaxabs\uff0c\u5219X\u7684\u5404\u5143\u7d20\u88ab\u538b\u7f29\u5230[-1,1]\u4e4b\u95f4 \u5982\u679cscaler\u4e3aunit_vector\uff0c\u5219\u5c06X\u7684\u5404\u5143\u7d20\u538b\u7f29\u5230\u5355\u4f4d\u8303\u6570 \u5982\u679cscaler\u4e3aminmax,\u5219X\u7684\u5404\u5143\u7d20\u88ab\u538b\u7f29\u5230[0,1]\u4e4b\u95f4 \u5982\u679cscaler\u4e3astandard,\u5219X\u7684\u5404\u5143\u7d20\u88ab\u538b\u7f29\u5230\u5355\u4f4d\u65b9\u5dee\u4e4b\u95f4\uff0c\u4e14\u5747\u503c\u4e3a\u96f6\u3002 \u53c2\u8003 sklearn Examples: >>> X = [[ 1. , - 1. , 2. ], ... [ 2. , 0. , 0. ], ... [ 0. , 1. , - 1. ]] >>> expected = [[ 0.4082 , - 0.4082 , 0.8165 ], ... [ 1. , 0. , 0. ], ... [ 0. , 0.7071 , - 0.7071 ]] >>> X_hat = normalize ( X , scaler = 'unit_vector' ) >>> np . testing . assert_array_almost_equal ( expected , X_hat , decimal = 4 ) >>> expected = [[ 0.5 , - 1. , 1. ], ... [ 1. , 0. , 0. ], ... [ 0. , 1. , - 0.5 ]] >>> X_hat = normalize ( X , scaler = 'maxabs' ) >>> np . testing . assert_array_almost_equal ( expected , X_hat , decimal = 2 ) >>> expected = [[ 0.5 , 0. , 1. ], ... [ 1. , 0.5 , 0.33333333 ], ... [ 0. , 1. , 0. ]] >>> X_hat = normalize ( X , scaler = 'minmax' ) >>> np . testing . assert_array_almost_equal ( expected , X_hat , decimal = 3 ) >>> X = [[ 0 , 0 ], ... [ 0 , 0 ], ... [ 1 , 1 ], ... [ 1 , 1 ]] >>> expected = [[ - 1. , - 1. ], ... [ - 1. , - 1. ], ... [ 1. , 1. ], ... [ 1. , 1. ]] >>> X_hat = normalize ( X , scaler = 'standard' ) >>> np . testing . assert_array_almost_equal ( expected , X_hat , decimal = 3 ) Parameters: Name Type Description Default X 2D array required scaler str [description]. Defaults to 'maxabs_scale'. 'maxabs' Source code in omicron/talib/core.py def normalize ( X , scaler = \"maxabs\" ): \"\"\"\u5bf9\u6570\u636e\u8fdb\u884c\u89c4\u8303\u5316\u5904\u7406\u3002 \u5982\u679cscaler\u4e3amaxabs\uff0c\u5219X\u7684\u5404\u5143\u7d20\u88ab\u538b\u7f29\u5230[-1,1]\u4e4b\u95f4 \u5982\u679cscaler\u4e3aunit_vector\uff0c\u5219\u5c06X\u7684\u5404\u5143\u7d20\u538b\u7f29\u5230\u5355\u4f4d\u8303\u6570 \u5982\u679cscaler\u4e3aminmax,\u5219X\u7684\u5404\u5143\u7d20\u88ab\u538b\u7f29\u5230[0,1]\u4e4b\u95f4 \u5982\u679cscaler\u4e3astandard,\u5219X\u7684\u5404\u5143\u7d20\u88ab\u538b\u7f29\u5230\u5355\u4f4d\u65b9\u5dee\u4e4b\u95f4\uff0c\u4e14\u5747\u503c\u4e3a\u96f6\u3002 \u53c2\u8003 [sklearn] [sklearn]: https://scikit-learn.org/stable/auto_examples/preprocessing/plot_all_scaling.html#results Examples: >>> X = [[ 1., -1., 2.], ... [ 2., 0., 0.], ... [ 0., 1., -1.]] >>> expected = [[ 0.4082, -0.4082, 0.8165], ... [ 1., 0., 0.], ... [ 0., 0.7071, -0.7071]] >>> X_hat = normalize(X, scaler='unit_vector') >>> np.testing.assert_array_almost_equal(expected, X_hat, decimal=4) >>> expected = [[0.5, -1., 1.], ... [1., 0., 0.], ... [0., 1., -0.5]] >>> X_hat = normalize(X, scaler='maxabs') >>> np.testing.assert_array_almost_equal(expected, X_hat, decimal = 2) >>> expected = [[0.5 , 0. , 1. ], ... [1. , 0.5 , 0.33333333], ... [0. , 1. , 0. ]] >>> X_hat = normalize(X, scaler='minmax') >>> np.testing.assert_array_almost_equal(expected, X_hat, decimal= 3) >>> X = [[0, 0], ... [0, 0], ... [1, 1], ... [1, 1]] >>> expected = [[-1., -1.], ... [-1., -1.], ... [ 1., 1.], ... [ 1., 1.]] >>> X_hat = normalize(X, scaler='standard') >>> np.testing.assert_array_almost_equal(expected, X_hat, decimal = 3) Args: X (2D array): scaler (str, optional): [description]. Defaults to 'maxabs_scale'. \"\"\" if scaler == \"maxabs\" : return MaxAbsScaler () . fit_transform ( X ) elif scaler == \"unit_vector\" : return sklearn . preprocessing . normalize ( X , norm = \"l2\" ) elif scaler == \"minmax\" : return minmax_scale ( X ) elif scaler == \"standard\" : return StandardScaler () . fit_transform ( X )","title":"normalize()"},{"location":"api/talib/#omicron.talib.core.pct_error","text":"\u76f8\u5bf9\u4e8e\u5e8f\u5217\u7b97\u672f\u5747\u503c\u7684\u8bef\u5dee\u503c Examples: >>> y = np . arange ( 5 ) >>> y_hat = np . arange ( 5 ) >>> y_hat [ 4 ] = 0 >>> pct_error ( y , y_hat ) 0.4 Parameters: Name Type Description Default y np.array [description] required y_hat np.array [description] required Returns: Type Description float [description] Source code in omicron/talib/core.py def pct_error ( y : np . array , y_hat : np . array ) -> float : \"\"\"\u76f8\u5bf9\u4e8e\u5e8f\u5217\u7b97\u672f\u5747\u503c\u7684\u8bef\u5dee\u503c Examples: >>> y = np.arange(5) >>> y_hat = np.arange(5) >>> y_hat[4] = 0 >>> pct_error(y, y_hat) 0.4 Args: y (np.array): [description] y_hat (np.array): [description] Returns: float: [description] \"\"\" mae = mean_absolute_error ( y , y_hat ) return mae / nanmean ( np . abs ( y ))","title":"pct_error()"},{"location":"api/talib/#omicron.talib.core.polyfit","text":"\u5bf9\u7ed9\u5b9a\u7684\u65f6\u95f4\u5e8f\u5217\u8fdb\u884c\u76f4\u7ebf/\u4e8c\u6b21\u66f2\u7ebf\u62df\u5408\u3002 \u4e8c\u6b21\u66f2\u7ebf\u53ef\u4ee5\u62df\u5408\u5230\u53cd\u751f\u53cd\u8f6c\u7684\u884c\u60c5\uff0c\u5982\u5706\u5f27\u5e95\u3001\u5706\u5f27\u9876\uff1b\u4e5f\u53ef\u4ee5\u62df\u5408\u5230\u4e0a\u8ff0\u8d8b\u52bf\u4e2d\u7684\u5355\u8fb9\u8d70\u52bf\uff0c\u5373\u5176\u4e2d\u4e00\u6bb5\u66f2\u7ebf\u3002\u5bf9\u4e8e\u5982\u957f\u671f\u5747\u7ebf\uff0c\u5728\u4e00\u6bb5\u65f6\u95f4\u5185\u8d70\u52bf\u53ef\u80fd\u5448\u73b0\u4e3a\u4e00\u6761\u76f4\u7ebf\uff0c\u6545\u4e5f\u53ef\u7528\u6b64\u51fd\u6570\u8fdb\u884c\u76f4\u7ebf\u62df\u5408\u3002 \u4e3a\u4fbf\u4e8e\u5728\u4e0d\u540c\u54c1\u79cd\u3001\u4e0d\u540c\u7684\u65f6\u95f4\u4e4b\u95f4\u5bf9\u8bef\u5dee\u3001\u7cfb\u6570\u8fdb\u884c\u6bd4\u8f83\uff0c\u8bf7\u4e8b\u5148\u5bf9ts\u8fdb\u884c\u5f52\u4e00\u5316\u3002 \u5982\u679c\u9047\u5230\u65e0\u6cd5\u62df\u5408\u7684\u60c5\u51b5\uff08\u5f02\u5e38\uff09\uff0c\u5c06\u8fd4\u56de\u4e00\u4e2a\u975e\u5e38\u5927\u7684\u8bef\u5dee\uff0c\u5e76\u5c06\u5176\u5b83\u9879\u7f6e\u4e3anp.nan Examples: >>> ts = [ i for i in range ( 5 )] >>> err , ( a , b ) = polyfit ( ts , deg = 1 ) >>> print ( round ( err , 3 ), round ( a , 1 )) 0.0 1.0 Parameters: Name Type Description Default ts Sequence \u5f85\u62df\u5408\u7684\u65f6\u95f4\u5e8f\u5217 required deg int \u5982\u679c\u8981\u8fdb\u884c\u76f4\u7ebf\u62df\u5408\uff0c\u53d61\uff1b\u4e8c\u6b21\u66f2\u7ebf\u62df\u5408\u53d62. Defaults to 2 2 loss_func str \u8bef\u5dee\u8ba1\u7b97\u65b9\u6cd5\uff0c\u53d6\u503c\u4e3a mae , rmse , mse \u6216 re \u3002Defaults to re (relative_error) 're' Returns: Type Description [Tuple] \u5982\u679c\u4e3a\u76f4\u7ebf\u62df\u5408\uff0c\u8fd4\u56de\u8bef\u5dee\uff0c(a,b)(\u4e00\u6b21\u9879\u7cfb\u6570\u548c\u5e38\u6570)\u3002\u5982\u679c\u4e3a\u4e8c\u6b21\u66f2\u7ebf\u62df\u5408\uff0c\u8fd4\u56de \u8bef\u5dee, (a,b,c)(\u4e8c\u6b21\u9879\u3001\u4e00\u6b21\u9879\u548c\u5e38\u91cf\uff09, (vert_x, vert_y)(\u9876\u70b9\u5904\u7684index\uff0c\u9876\u70b9\u503c) Source code in omicron/talib/core.py def polyfit ( ts : Sequence , deg : int = 2 , loss_func = \"re\" ) -> Tuple : \"\"\"\u5bf9\u7ed9\u5b9a\u7684\u65f6\u95f4\u5e8f\u5217\u8fdb\u884c\u76f4\u7ebf/\u4e8c\u6b21\u66f2\u7ebf\u62df\u5408\u3002 \u4e8c\u6b21\u66f2\u7ebf\u53ef\u4ee5\u62df\u5408\u5230\u53cd\u751f\u53cd\u8f6c\u7684\u884c\u60c5\uff0c\u5982\u5706\u5f27\u5e95\u3001\u5706\u5f27\u9876\uff1b\u4e5f\u53ef\u4ee5\u62df\u5408\u5230\u4e0a\u8ff0\u8d8b\u52bf\u4e2d\u7684\u5355\u8fb9\u8d70\u52bf\uff0c\u5373\u5176\u4e2d\u4e00\u6bb5\u66f2\u7ebf\u3002\u5bf9\u4e8e\u5982\u957f\u671f\u5747\u7ebf\uff0c\u5728\u4e00\u6bb5\u65f6\u95f4\u5185\u8d70\u52bf\u53ef\u80fd\u5448\u73b0\u4e3a\u4e00\u6761\u76f4\u7ebf\uff0c\u6545\u4e5f\u53ef\u7528\u6b64\u51fd\u6570\u8fdb\u884c\u76f4\u7ebf\u62df\u5408\u3002 \u4e3a\u4fbf\u4e8e\u5728\u4e0d\u540c\u54c1\u79cd\u3001\u4e0d\u540c\u7684\u65f6\u95f4\u4e4b\u95f4\u5bf9\u8bef\u5dee\u3001\u7cfb\u6570\u8fdb\u884c\u6bd4\u8f83\uff0c\u8bf7\u4e8b\u5148\u5bf9ts\u8fdb\u884c\u5f52\u4e00\u5316\u3002 \u5982\u679c\u9047\u5230\u65e0\u6cd5\u62df\u5408\u7684\u60c5\u51b5\uff08\u5f02\u5e38\uff09\uff0c\u5c06\u8fd4\u56de\u4e00\u4e2a\u975e\u5e38\u5927\u7684\u8bef\u5dee\uff0c\u5e76\u5c06\u5176\u5b83\u9879\u7f6e\u4e3anp.nan Examples: >>> ts = [i for i in range(5)] >>> err, (a, b) = polyfit(ts, deg=1) >>> print(round(err, 3), round(a, 1)) 0.0 1.0 Args: ts (Sequence): \u5f85\u62df\u5408\u7684\u65f6\u95f4\u5e8f\u5217 deg (int): \u5982\u679c\u8981\u8fdb\u884c\u76f4\u7ebf\u62df\u5408\uff0c\u53d61\uff1b\u4e8c\u6b21\u66f2\u7ebf\u62df\u5408\u53d62. Defaults to 2 loss_func (str): \u8bef\u5dee\u8ba1\u7b97\u65b9\u6cd5\uff0c\u53d6\u503c\u4e3a`mae`, `rmse`,`mse` \u6216`re`\u3002Defaults to `re` (relative_error) Returns: [Tuple]: \u5982\u679c\u4e3a\u76f4\u7ebf\u62df\u5408\uff0c\u8fd4\u56de\u8bef\u5dee\uff0c(a,b)(\u4e00\u6b21\u9879\u7cfb\u6570\u548c\u5e38\u6570)\u3002\u5982\u679c\u4e3a\u4e8c\u6b21\u66f2\u7ebf\u62df\u5408\uff0c\u8fd4\u56de \u8bef\u5dee, (a,b,c)(\u4e8c\u6b21\u9879\u3001\u4e00\u6b21\u9879\u548c\u5e38\u91cf\uff09, (vert_x, vert_y)(\u9876\u70b9\u5904\u7684index\uff0c\u9876\u70b9\u503c) \"\"\" if deg not in ( 1 , 2 ): raise ValueError ( \"deg must be 1 or 2\" ) try : if any ( np . isnan ( ts )): raise ValueError ( \"ts contains nan\" ) x = np . array ( list ( range ( len ( ts )))) z = np . polyfit ( x , ts , deg = deg ) p = np . poly1d ( z ) ts_hat = np . array ([ p ( xi ) for xi in x ]) if loss_func == \"mse\" : error = np . mean ( np . square ( ts - ts_hat )) elif loss_func == \"rmse\" : error = np . sqrt ( np . mean ( np . square ( ts - ts_hat ))) elif loss_func == \"mae\" : error = mean_absolute_error ( ts , ts_hat ) else : # defaults to relative error error = pct_error ( ts , ts_hat ) if deg == 2 : a , b , c = z [ 0 ], z [ 1 ], z [ 2 ] axis_x = - b / ( 2 * a ) if a != 0 : axis_y = ( 4 * a * c - b * b ) / ( 4 * a ) else : axis_y = None return error , z , ( axis_x , axis_y ) elif deg == 1 : return error , z except Exception : error = 1e9 if deg == 1 : return error , ( np . nan , np . nan ) else : return error , ( np . nan , np . nan , np . nan ), ( np . nan , np . nan )","title":"polyfit()"},{"location":"api/talib/#omicron.talib.core.slope","text":"\u6c42ts\u8868\u793a\u7684\u76f4\u7ebf\uff08\u5982\u679c\u80fd\u62df\u5408\u6210\u76f4\u7ebf\u7684\u8bdd\uff09\u7684\u659c\u7387 Parameters: Name Type Description Default ts np.array [description] required loss_func str [description]. Defaults to 're'. 're' Source code in omicron/talib/core.py def slope ( ts : np . array , loss_func = \"re\" ): \"\"\"\u6c42ts\u8868\u793a\u7684\u76f4\u7ebf\uff08\u5982\u679c\u80fd\u62df\u5408\u6210\u76f4\u7ebf\u7684\u8bdd\uff09\u7684\u659c\u7387 Args: ts (np.array): [description] loss_func (str, optional): [description]. Defaults to 're'. \"\"\" err , ( a , b ) = polyfit ( ts , deg = 1 , loss_func = loss_func ) return err , a","title":"slope()"},{"location":"api/talib/#omicron.talib.core.smooth","text":"\u5e73\u6ed1\u5e8f\u5217ts\uff0c\u4f7f\u7528\u7a97\u53e3\u5927\u5c0f\u4e3awin\u7684\u5e73\u6ed1\u6a21\u578b\uff0c\u9ed8\u8ba4\u4f7f\u7528\u7ebf\u6027\u6a21\u578b \u63d0\u4f9b\u672c\u51fd\u6570\u4e3b\u8981\u57fa\u4e8e\u8fd9\u6837\u7684\u8003\u8651\uff1a omicron\u7684\u4f7f\u7528\u8005\u53ef\u80fd\u5e76\u4e0d\u719f\u6089\u4fe1\u53f7\u5904\u7406\u7684\u6982\u5ff5\uff0c\u8fd9\u91cc\u76f8\u5f53\u4e8e\u63d0\u4f9b\u4e86\u76f8\u5173\u529f\u80fd\u7684\u4e00\u4e2a\u5165\u53e3\u3002 Parameters: Name Type Description Default ts np.array [description] required win int [description] required poly_order int [description]. Defaults to 1. 1 Source code in omicron/talib/core.py def smooth ( ts : np . array , win : int , poly_order = 1 , mode = \"interp\" ): \"\"\"\u5e73\u6ed1\u5e8f\u5217ts\uff0c\u4f7f\u7528\u7a97\u53e3\u5927\u5c0f\u4e3awin\u7684\u5e73\u6ed1\u6a21\u578b\uff0c\u9ed8\u8ba4\u4f7f\u7528\u7ebf\u6027\u6a21\u578b \u63d0\u4f9b\u672c\u51fd\u6570\u4e3b\u8981\u57fa\u4e8e\u8fd9\u6837\u7684\u8003\u8651\uff1a omicron\u7684\u4f7f\u7528\u8005\u53ef\u80fd\u5e76\u4e0d\u719f\u6089\u4fe1\u53f7\u5904\u7406\u7684\u6982\u5ff5\uff0c\u8fd9\u91cc\u76f8\u5f53\u4e8e\u63d0\u4f9b\u4e86\u76f8\u5173\u529f\u80fd\u7684\u4e00\u4e2a\u5165\u53e3\u3002 Args: ts (np.array): [description] win (int): [description] poly_order (int, optional): [description]. Defaults to 1. \"\"\" return savgol_filter ( ts , win , poly_order , mode = mode )","title":"smooth()"},{"location":"api/talib/#omicron.talib.core.weighted_moving_average","text":"\u8ba1\u7b97\u52a0\u6743\u79fb\u52a8\u5e73\u5747 Parameters: Name Type Description Default ts np.array [description] required win int [description] required Returns: Type Description np.array [description] Source code in omicron/talib/core.py def weighted_moving_average ( ts : np . array , win : int ) -> np . array : \"\"\"\u8ba1\u7b97\u52a0\u6743\u79fb\u52a8\u5e73\u5747 Args: ts (np.array): [description] win (int): [description] Returns: np.array: [description] \"\"\" w = [ 2 * ( i + 1 ) / ( win * ( win + 1 )) for i in range ( win )] return np . convolve ( ts , w , \"valid\" )","title":"weighted_moving_average()"},{"location":"api/talib/#omicron.talib.morph","text":"\u5f62\u6001\u68c0\u6d4b\u76f8\u5173\u65b9\u6cd5","title":"morph"},{"location":"api/talib/#omicron.talib.morph.BreakoutFlag","text":"An enumeration. Source code in omicron/talib/morph.py class BreakoutFlag ( IntEnum ): UP = 1 DOWN = - 1 NONE = 0","title":"BreakoutFlag"},{"location":"api/talib/#omicron.talib.morph.CrossFlag","text":"An enumeration. Source code in omicron/talib/morph.py class CrossFlag ( IntEnum ): UPCROSS = 1 DOWNCROSS = - 1 NONE = 0","title":"CrossFlag"},{"location":"api/talib/#omicron.talib.morph.breakout","text":"\u68c0\u6d4b\u65f6\u95f4\u5e8f\u5217\u662f\u5426\u7a81\u7834\u4e86\u538b\u529b\u7ebf\uff08\u6574\u7406\u7ebf\uff09 Parameters: Name Type Description Default ts np.ndarray \u65f6\u95f4\u5e8f\u5217 required upthres float \u8bf7\u53c2\u8003 peaks_and_valleys 0.01 downthres float \u8bf7\u53c2\u8003 peaks_and_valleys -0.01 confirm int \u7ecf\u8fc7\u591a\u5c11\u4e2abars\u540e\uff0c\u624d\u786e\u8ba4\u7a81\u7834\u3002\u9ed8\u8ba4\u4e3a1 1 Returns: Type Description BreakoutFlag \u5982\u679c\u4e0a\u5411\u7a81\u7834\u538b\u529b\u7ebf\uff0c\u8fd4\u56de1\uff0c\u5982\u679c\u5411\u4e0b\u7a81\u7834\u538b\u529b\u7ebf\uff0c\u8fd4\u56de-1\uff0c\u5426\u5219\u8fd4\u56de0 Source code in omicron/talib/morph.py def breakout ( ts : np . ndarray , upthres : float = 0.01 , downthres : float = - 0.01 , confirm : int = 1 ) -> BreakoutFlag : \"\"\"\u68c0\u6d4b\u65f6\u95f4\u5e8f\u5217\u662f\u5426\u7a81\u7834\u4e86\u538b\u529b\u7ebf\uff08\u6574\u7406\u7ebf\uff09 Args: ts (np.ndarray): \u65f6\u95f4\u5e8f\u5217 upthres (float, optional): \u8bf7\u53c2\u8003[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys] downthres (float, optional): \u8bf7\u53c2\u8003[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys] confirm (int, optional): \u7ecf\u8fc7\u591a\u5c11\u4e2abars\u540e\uff0c\u624d\u786e\u8ba4\u7a81\u7834\u3002\u9ed8\u8ba4\u4e3a1 Returns: \u5982\u679c\u4e0a\u5411\u7a81\u7834\u538b\u529b\u7ebf\uff0c\u8fd4\u56de1\uff0c\u5982\u679c\u5411\u4e0b\u7a81\u7834\u538b\u529b\u7ebf\uff0c\u8fd4\u56de-1\uff0c\u5426\u5219\u8fd4\u56de0 \"\"\" support , resist , _ = support_resist_lines ( ts [: - confirm ], upthres , downthres ) x0 = len ( ts ) - confirm - 1 x = list ( range ( len ( ts ) - confirm , len ( ts ))) if resist is not None : if np . all ( ts [ x ] > resist ( x )) and ts [ x0 ] <= resist ( x0 ): return BreakoutFlag . UP if support is not None : if np . all ( ts [ x ] < support ( x )) and ts [ x0 ] >= support ( x0 ): return BreakoutFlag . DOWN return BreakoutFlag . NONE","title":"breakout()"},{"location":"api/talib/#omicron.talib.morph.cross","text":"\u5224\u65ad\u5e8f\u5217f\u662f\u5426\u4e0eg\u76f8\u4ea4\u3002\u5982\u679c\u4e24\u4e2a\u5e8f\u5217\u6709\u4e14\u4ec5\u6709\u4e00\u4e2a\u4ea4\u70b9\uff0c\u5219\u8fd4\u56de1\u8868\u660ef\u4e0a\u4ea4g\uff1b-1\u8868\u660ef\u4e0b\u4ea4g \u672c\u65b9\u6cd5\u53ef\u7528\u4ee5\u5224\u65ad\u4e24\u6761\u5747\u7ebf\u662f\u5426\u76f8\u4ea4\u3002 Returns: Type Description CrossFlag (flag, index), \u5176\u4e2dflag\u53d6\u503c\u4e3a\uff1a 0 \u65e0\u6548 -1 f\u5411\u4e0b\u4ea4\u53c9g 1 f\u5411\u4e0a\u4ea4\u53c9g Source code in omicron/talib/morph.py def cross ( f : np . ndarray , g : np . ndarray ) -> CrossFlag : \"\"\"\u5224\u65ad\u5e8f\u5217f\u662f\u5426\u4e0eg\u76f8\u4ea4\u3002\u5982\u679c\u4e24\u4e2a\u5e8f\u5217\u6709\u4e14\u4ec5\u6709\u4e00\u4e2a\u4ea4\u70b9\uff0c\u5219\u8fd4\u56de1\u8868\u660ef\u4e0a\u4ea4g\uff1b-1\u8868\u660ef\u4e0b\u4ea4g \u672c\u65b9\u6cd5\u53ef\u7528\u4ee5\u5224\u65ad\u4e24\u6761\u5747\u7ebf\u662f\u5426\u76f8\u4ea4\u3002 returns: (flag, index), \u5176\u4e2dflag\u53d6\u503c\u4e3a\uff1a 0 \u65e0\u6548 -1 f\u5411\u4e0b\u4ea4\u53c9g 1 f\u5411\u4e0a\u4ea4\u53c9g \"\"\" indices = np . argwhere ( np . diff ( np . sign ( f - g ))) . flatten () if len ( indices ) == 0 : return CrossFlag . NONE , 0 # \u5982\u679c\u5b58\u5728\u4e00\u4e2a\u6216\u8005\u591a\u4e2a\u4ea4\u70b9\uff0c\u53d6\u6700\u540e\u4e00\u4e2a idx = indices [ - 1 ] if f [ idx ] < g [ idx ]: return CrossFlag . UPCROSS , idx elif f [ idx ] > g [ idx ]: return CrossFlag . DOWNCROSS , idx else : return CrossFlag ( np . sign ( g [ idx - 1 ] - f [ idx - 1 ])), idx","title":"cross()"},{"location":"api/talib/#omicron.talib.morph.energy_hump","text":"\u68c0\u6d4b bars \u4e2d\u662f\u5426\u5b58\u5728\u4e24\u6ce2\u4ee5\u4e0a\u91cf\u80fd\u5267\u70c8\u589e\u52a0\u7684\u60c5\u5f62\uff08\u80fd\u91cf\u9a7c\u5cf0\uff09\uff0c\u8fd4\u56de\u6700\u540e\u4e00\u6ce2\u8ddd\u73b0\u5728\u7684\u4f4d\u7f6e\u53ca\u533a\u95f4\u957f\u5ea6\u3002 \u6ce8\u610f\u5982\u679c\u6700\u540e\u4e00\u4e2a\u80fd\u91cf\u9a7c\u5cf0\u8ddd\u73b0\u5728\u8fc7\u8fdc\uff08\u6bd4\u5982\u8d85\u8fc710\u4e2abar),\u53ef\u80fd\u610f\u5473\u7740\u8d44\u91d1\u5df2\u7ecf\u9003\u79bb\uff0c\u80fd\u91cf\u5df2\u7ecf\u8017\u5c3d\u3002 Parameters: Name Type Description Default bars [('frame', ' Optional [ Tuple [ int , int ]]: \"\"\"\u68c0\u6d4b`bars`\u4e2d\u662f\u5426\u5b58\u5728\u4e24\u6ce2\u4ee5\u4e0a\u91cf\u80fd\u5267\u70c8\u589e\u52a0\u7684\u60c5\u5f62\uff08\u80fd\u91cf\u9a7c\u5cf0\uff09\uff0c\u8fd4\u56de\u6700\u540e\u4e00\u6ce2\u8ddd\u73b0\u5728\u7684\u4f4d\u7f6e\u53ca\u533a\u95f4\u957f\u5ea6\u3002 \u6ce8\u610f\u5982\u679c\u6700\u540e\u4e00\u4e2a\u80fd\u91cf\u9a7c\u5cf0\u8ddd\u73b0\u5728\u8fc7\u8fdc\uff08\u6bd4\u5982\u8d85\u8fc710\u4e2abar),\u53ef\u80fd\u610f\u5473\u7740\u8d44\u91d1\u5df2\u7ecf\u9003\u79bb\uff0c\u80fd\u91cf\u5df2\u7ecf\u8017\u5c3d\u3002 Args: bars: \u884c\u60c5\u6570\u636e thresh: \u6700\u540e\u4e00\u6ce2\u91cf\u5fc5\u987b\u5927\u4e8e20\u5929\u5747\u91cf\u7684\u500d\u6570\u3002 Returns: \u5982\u679c\u4e0d\u5b58\u5728\u80fd\u91cf\u9a7c\u5cf0\u7684\u60c5\u5f62\uff0c\u5219\u8fd4\u56deNone\uff0c\u5426\u5219\u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u9a7c\u5cf0\u79bb\u73b0\u5728\u7684\u8ddd\u79bb\u53ca\u533a\u95f4\u957f\u5ea6\u3002 \"\"\" vol = bars [ \"volume\" ] std = np . std ( vol [ 1 :] / vol [: - 1 ]) pvs = peak_valley_pivots ( vol , std , 0 ) frames = bars [ \"frame\" ] pvs [ 0 ] = 0 pvs [ - 1 ] = - 1 peaks = np . argwhere ( pvs == 1 ) mn = np . mean ( vol [ peaks ]) # \u9876\u70b9\u4e0d\u80fd\u7f29\u91cf\u5230\u5c16\u5cf0\u5747\u503c\u4ee5\u4e0b real_peaks = np . intersect1d ( np . argwhere ( vol > mn ), peaks ) if len ( real_peaks ) < 2 : return None logger . debug ( \"found %s peaks at %s \" , len ( real_peaks ), frames [ real_peaks ]) lp = real_peaks [ - 1 ] ma = moving_average ( vol , 20 )[ lp ] if vol [ lp ] < ma * thresh : logger . debug ( \"vol of last peak[ %s ] is less than mean_vol(20) * thresh[ %s ]\" , vol [ lp ], ma * thresh , ) return None return len ( bars ) - real_peaks [ - 1 ], real_peaks [ - 1 ] - real_peaks [ 0 ]","title":"energy_hump()"},{"location":"api/talib/#omicron.talib.morph.inverse_vcross","text":"\u5224\u65ad\u5e8f\u5217f\u662f\u5426\u4e0e\u5e8f\u5217g\u5b58\u5728^\u578b\u76f8\u4ea4\u3002\u5373\u5b58\u5728\u4e24\u4e2a\u4ea4\u70b9\uff0c\u7b2c\u4e00\u4e2a\u4ea4\u70b9\u4e3a\u5411\u4e0a\u76f8\u4ea4\uff0c\u7b2c\u4e8c\u4e2a\u4ea4\u70b9\u4e3a\u5411\u4e0b \u76f8\u4ea4\u3002\u53ef\u7528\u4e8e\u5224\u65ad\u89c1\u9876\u7279\u5f81\u7b49\u573a\u5408\u3002 Parameters: Name Type Description Default f np.array [description] required g np.array [description] required Returns: Type Description Tuple [description] Source code in omicron/talib/morph.py def inverse_vcross ( f : np . array , g : np . array ) -> Tuple : \"\"\"\u5224\u65ad\u5e8f\u5217f\u662f\u5426\u4e0e\u5e8f\u5217g\u5b58\u5728^\u578b\u76f8\u4ea4\u3002\u5373\u5b58\u5728\u4e24\u4e2a\u4ea4\u70b9\uff0c\u7b2c\u4e00\u4e2a\u4ea4\u70b9\u4e3a\u5411\u4e0a\u76f8\u4ea4\uff0c\u7b2c\u4e8c\u4e2a\u4ea4\u70b9\u4e3a\u5411\u4e0b \u76f8\u4ea4\u3002\u53ef\u7528\u4e8e\u5224\u65ad\u89c1\u9876\u7279\u5f81\u7b49\u573a\u5408\u3002 Args: f (np.array): [description] g (np.array): [description] Returns: Tuple: [description] \"\"\" indices = np . argwhere ( np . diff ( np . sign ( f - g ))) . flatten () if len ( indices ) == 2 : idx0 , idx1 = indices if f [ idx0 ] < g [ idx0 ] and f [ idx1 ] > g [ idx1 ]: return True , ( idx0 , idx1 ) return False , ( None , None )","title":"inverse_vcross()"},{"location":"api/talib/#omicron.talib.morph.peaks_and_valleys","text":"\u5bfb\u627ets\u4e2d\u7684\u6ce2\u5cf0\u548c\u6ce2\u8c37\uff0c\u8fd4\u56de\u6570\u7ec4\u6307\u793a\u5728\u8be5\u4f4d\u7f6e\u4e0a\u662f\u5426\u4e3a\u6ce2\u5cf0\u6216\u6ce2\u8c37\u3002\u5982\u679c\u4e3a1\uff0c\u5219\u4e3a\u6ce2\u5cf0\uff1b\u5982\u679c\u4e3a-1\uff0c\u5219\u4e3a\u6ce2\u8c37\u3002 \u672c\u51fd\u6570\u76f4\u63a5\u4f7f\u7528\u4e86zigzag\u4e2d\u7684peak_valley_pivots. \u6709\u5f88\u591a\u65b9\u6cd5\u53ef\u4ee5\u5b9e\u73b0\u672c\u529f\u80fd\uff0c\u6bd4\u5982scipy.signals.find_peaks_cwt, peak_valley_pivots\u7b49\u3002\u672c\u51fd\u6570\u66f4\u9002\u5408\u91d1\u878d\u65f6\u95f4\u5e8f\u5217\uff0c\u5e76\u4e14\u4f7f\u7528\u4e86cython\u52a0\u901f\u3002 Parameters: Name Type Description Default ts np.ndarray \u65f6\u95f4\u5e8f\u5217 required up_thresh float \u6ce2\u5cf0\u7684\u9608\u503c\uff0c\u5982\u679c\u4e3aNone,\u5219\u4f7f\u7528ts\u53d8\u5316\u7387\u7684\u4e8c\u500d\u6807\u51c6\u5dee None down_thresh float \u6ce2\u8c37\u7684\u9608\u503c\uff0c\u5982\u679c\u4e3aNone,\u5219\u4f7f\u7528ts\u53d8\u5316\u7387\u7684\u4e8c\u500d\u6807\u51c6\u5dee\u4e58\u4ee5-1 None Returns: Type Description np.ndarray \u8fd4\u56de\u6570\u7ec4\u6307\u793a\u5728\u8be5\u4f4d\u7f6e\u4e0a\u662f\u5426\u4e3a\u6ce2\u5cf0\u6216\u6ce2\u8c37\u3002 Source code in omicron/talib/morph.py def peaks_and_valleys ( ts : np . ndarray , up_thresh : Optional [ float ] = None , down_thresh : Optional [ float ] = None , ) -> np . ndarray : \"\"\"\u5bfb\u627ets\u4e2d\u7684\u6ce2\u5cf0\u548c\u6ce2\u8c37\uff0c\u8fd4\u56de\u6570\u7ec4\u6307\u793a\u5728\u8be5\u4f4d\u7f6e\u4e0a\u662f\u5426\u4e3a\u6ce2\u5cf0\u6216\u6ce2\u8c37\u3002\u5982\u679c\u4e3a1\uff0c\u5219\u4e3a\u6ce2\u5cf0\uff1b\u5982\u679c\u4e3a-1\uff0c\u5219\u4e3a\u6ce2\u8c37\u3002 \u672c\u51fd\u6570\u76f4\u63a5\u4f7f\u7528\u4e86zigzag\u4e2d\u7684peak_valley_pivots. \u6709\u5f88\u591a\u65b9\u6cd5\u53ef\u4ee5\u5b9e\u73b0\u672c\u529f\u80fd\uff0c\u6bd4\u5982scipy.signals.find_peaks_cwt, peak_valley_pivots\u7b49\u3002\u672c\u51fd\u6570\u66f4\u9002\u5408\u91d1\u878d\u65f6\u95f4\u5e8f\u5217\uff0c\u5e76\u4e14\u4f7f\u7528\u4e86cython\u52a0\u901f\u3002 Args: ts (np.ndarray): \u65f6\u95f4\u5e8f\u5217 up_thresh (float): \u6ce2\u5cf0\u7684\u9608\u503c\uff0c\u5982\u679c\u4e3aNone,\u5219\u4f7f\u7528ts\u53d8\u5316\u7387\u7684\u4e8c\u500d\u6807\u51c6\u5dee down_thresh (float): \u6ce2\u8c37\u7684\u9608\u503c\uff0c\u5982\u679c\u4e3aNone,\u5219\u4f7f\u7528ts\u53d8\u5316\u7387\u7684\u4e8c\u500d\u6807\u51c6\u5dee\u4e58\u4ee5-1 Returns: np.ndarray: \u8fd4\u56de\u6570\u7ec4\u6307\u793a\u5728\u8be5\u4f4d\u7f6e\u4e0a\u662f\u5426\u4e3a\u6ce2\u5cf0\u6216\u6ce2\u8c37\u3002 \"\"\" if ts . dtype != np . float64 : ts = ts . astype ( np . float64 ) if any ([ up_thresh is None , down_thresh is None ]): change_rate = ts [ 1 :] / ts [: - 1 ] - 1 std = np . std ( change_rate ) up_thresh = up_thresh or 2 * std down_thresh = down_thresh or - 2 * std return peak_valley_pivots ( ts , up_thresh , down_thresh )","title":"peaks_and_valleys()"},{"location":"api/talib/#omicron.talib.morph.plateaus","text":"\u7edf\u8ba1\u6570\u7ec4 numbers \u4e2d\u7684\u53ef\u80fd\u5b58\u5728\u7684\u5e73\u53f0\u6574\u7406\u3002 \u5982\u679c\u4e00\u4e2a\u6570\u7ec4\u4e2d\u5b58\u5728\u7740\u5b50\u6570\u7ec4\uff0c\u4f7f\u5f97\u5176\u5143\u7d20\u4e0e\u5747\u503c\u7684\u8ddd\u79bb\u843d\u5728\u4e09\u4e2a\u6807\u51c6\u5dee\u4ee5\u5185\u7684\u6bd4\u4f8b\u8d85\u8fc7 fall_in_range_ratio \u7684\uff0c\u5219\u8ba4\u4e3a\u8be5\u5b50\u6570\u7ec4\u6ee1\u8db3\u5e73\u53f0\u6574\u7406\u3002 Parameters: Name Type Description Default numbers ndarray \u8f93\u5165\u6570\u7ec4 required min_size int \u5e73\u53f0\u7684\u6700\u5c0f\u957f\u5ea6 required fall_in_range_ratio float \u8d85\u8fc7 fall_in_range_ratio \u6bd4\u4f8b\u7684\u5143\u7d20\u843d\u5728\u5747\u503c\u7684\u4e09\u4e2a\u6807\u51c6\u5dee\u4ee5\u5185\uff0c\u5c31\u8ba4\u4e3a\u8be5\u5b50\u6570\u7ec4\u6784\u6210\u4e00\u4e2a\u5e73\u53f0 0.97 Returns: Type Description List[Tuple] \u5e73\u53f0\u7684\u8d77\u59cb\u4f4d\u7f6e\u548c\u957f\u5ea6\u7684\u6570\u7ec4 Source code in omicron/talib/morph.py def plateaus ( numbers : np . ndarray , min_size : int , fall_in_range_ratio : float = 0.97 ) -> List [ Tuple ]: \"\"\"\u7edf\u8ba1\u6570\u7ec4`numbers`\u4e2d\u7684\u53ef\u80fd\u5b58\u5728\u7684\u5e73\u53f0\u6574\u7406\u3002 \u5982\u679c\u4e00\u4e2a\u6570\u7ec4\u4e2d\u5b58\u5728\u7740\u5b50\u6570\u7ec4\uff0c\u4f7f\u5f97\u5176\u5143\u7d20\u4e0e\u5747\u503c\u7684\u8ddd\u79bb\u843d\u5728\u4e09\u4e2a\u6807\u51c6\u5dee\u4ee5\u5185\u7684\u6bd4\u4f8b\u8d85\u8fc7`fall_in_range_ratio`\u7684\uff0c\u5219\u8ba4\u4e3a\u8be5\u5b50\u6570\u7ec4\u6ee1\u8db3\u5e73\u53f0\u6574\u7406\u3002 Args: numbers: \u8f93\u5165\u6570\u7ec4 min_size: \u5e73\u53f0\u7684\u6700\u5c0f\u957f\u5ea6 fall_in_range_ratio: \u8d85\u8fc7`fall_in_range_ratio`\u6bd4\u4f8b\u7684\u5143\u7d20\u843d\u5728\u5747\u503c\u7684\u4e09\u4e2a\u6807\u51c6\u5dee\u4ee5\u5185\uff0c\u5c31\u8ba4\u4e3a\u8be5\u5b50\u6570\u7ec4\u6784\u6210\u4e00\u4e2a\u5e73\u53f0 Returns: \u5e73\u53f0\u7684\u8d77\u59cb\u4f4d\u7f6e\u548c\u957f\u5ea6\u7684\u6570\u7ec4 \"\"\" if numbers . size <= min_size : n = 1 else : n = numbers . size // min_size clusters = clustering ( numbers , n ) plats = [] for ( start , length ) in clusters : if length < min_size : continue y = numbers [ start : start + length ] mean = np . mean ( y ) std = np . std ( y ) inrange = len ( y [ np . abs ( y - mean ) < 3 * std ]) ratio = inrange / length if ratio >= fall_in_range_ratio : plats . append (( start , length )) return plats","title":"plateaus()"},{"location":"api/talib/#omicron.talib.morph.rsi_bottom_distance","text":"\u6839\u636e\u7ed9\u5b9a\u7684\u6536\u76d8\u4ef7\uff0c\u8ba1\u7b97\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51farsi\u4f4e\u6c34\u5e73\u7684\u8ddd\u79bb\uff0c \u5982\u679c\u4ece\u4e0a\u4e00\u4e2a\u6700\u4f4e\u70b9rsi\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5e76\u672a\u53d1\u51fa\u4f4e\u6c34\u5e73\u4fe1\u53f7\uff0c \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51fa\u6700\u4f4e\u70b9rsi\u7684\u8ddd\u79bb\u3002 \u5176\u4e2dclose\u7684\u957f\u5ea6\u4e00\u822c\u4e0d\u5c0f\u4e8e60\u3002 \u8fd4\u56de\u503c\u4e3a\u8ddd\u79bb\u6574\u6570\uff0c\u4e0d\u6ee1\u8db3\u6761\u4ef6\u5219\u8fd4\u56deNone\u3002 Parameters: Name Type Description Default close np.array \u5177\u6709\u65f6\u95f4\u5e8f\u5217\u7684\u6536\u76d8\u4ef7 required thresh Tuple[float, float]) None\u9002\u7528\u6240\u6709\u80a1\u7968\uff0c\u4e0d\u5fc5\u66f4\u6539\uff0c\u4e5f\u53ef\u81ea\u884c\u8bbe\u7f6e\u3002 None Returns: Type Description int \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51farsi\u4f4e\u6c34\u5e73\u7684\u8ddd\u79bb\u3002 \u5982\u679c\u4ece\u4e0a\u4e00\u4e2a\u6700\u4f4e\u70b9rsi\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5e76\u672a\u53d1\u51fa\u4f4e\u6c34\u5e73\u4fe1\u53f7\uff0c \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51fa\u6700\u4f4e\u70b9rsi\u7684\u8ddd\u79bb\u3002 \u9664\u6b64\u4e4b\u5916\uff0c\u8fd4\u56deNone\u3002 Source code in omicron/talib/morph.py def rsi_bottom_distance ( close : np . array , thresh : Tuple [ float , float ] = None ) -> int : \"\"\"\u6839\u636e\u7ed9\u5b9a\u7684\u6536\u76d8\u4ef7\uff0c\u8ba1\u7b97\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51farsi\u4f4e\u6c34\u5e73\u7684\u8ddd\u79bb\uff0c \u5982\u679c\u4ece\u4e0a\u4e00\u4e2a\u6700\u4f4e\u70b9rsi\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5e76\u672a\u53d1\u51fa\u4f4e\u6c34\u5e73\u4fe1\u53f7\uff0c \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51fa\u6700\u4f4e\u70b9rsi\u7684\u8ddd\u79bb\u3002 \u5176\u4e2dclose\u7684\u957f\u5ea6\u4e00\u822c\u4e0d\u5c0f\u4e8e60\u3002 \u8fd4\u56de\u503c\u4e3a\u8ddd\u79bb\u6574\u6570\uff0c\u4e0d\u6ee1\u8db3\u6761\u4ef6\u5219\u8fd4\u56deNone\u3002 Args: close (np.array): \u5177\u6709\u65f6\u95f4\u5e8f\u5217\u7684\u6536\u76d8\u4ef7 thresh (Tuple[float, float]) : None\u9002\u7528\u6240\u6709\u80a1\u7968\uff0c\u4e0d\u5fc5\u66f4\u6539\uff0c\u4e5f\u53ef\u81ea\u884c\u8bbe\u7f6e\u3002 Returns: \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51farsi\u4f4e\u6c34\u5e73\u7684\u8ddd\u79bb\u3002 \u5982\u679c\u4ece\u4e0a\u4e00\u4e2a\u6700\u4f4e\u70b9rsi\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5e76\u672a\u53d1\u51fa\u4f4e\u6c34\u5e73\u4fe1\u53f7\uff0c \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51fa\u6700\u4f4e\u70b9rsi\u7684\u8ddd\u79bb\u3002 \u9664\u6b64\u4e4b\u5916\uff0c\u8fd4\u56deNone\u3002\"\"\" assert len ( close ) >= 60 , \"must provide an array with at least 60 length!\" if close . dtype != np . float64 : close = close . astype ( np . float64 ) if thresh is None : std = np . std ( close [ - 59 :] / close [ - 60 : - 1 ] - 1 ) thresh = ( 2 * std , - 2 * std ) rsi = ta . RSI ( close , 6 ) watermarks = rsi_watermarks ( close , thresh ) if watermarks is not None : low_watermark , _ , _ = watermarks pivots = peak_valley_pivots ( close , thresh [ 0 ], thresh [ 1 ]) pivots [ 0 ], pivots [ - 1 ] = 0 , 0 # \u8c37\u503cRSI<30 valley_rsi_index = np . where (( rsi < 30 ) & ( pivots == - 1 ))[ 0 ] # RSI\u4f4e\u6c34\u5e73\u7684\u6700\u5927\u503c\uff1a\u4f4e\u6c34\u5e73*1.01 low_rsi_index = np . where ( rsi <= low_watermark * 1.01 )[ 0 ] if len ( valley_rsi_index ) > 0 : distance = len ( rsi ) - 1 - valley_rsi_index [ - 1 ] if len ( low_rsi_index ) > 0 : if low_rsi_index [ - 1 ] >= valley_rsi_index [ - 1 ]: distance = len ( rsi ) - 1 - low_rsi_index [ - 1 ] return distance","title":"rsi_bottom_distance()"},{"location":"api/talib/#omicron.talib.morph.rsi_bottom_divergent","text":"\u5bfb\u627e\u6700\u8fd1\u6ee1\u8db3\u6761\u4ef6\u7684rsi\u5e95\u80cc\u79bb\u3002 \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u6700\u8fd1\u5e95\u80cc\u79bb\u53d1\u751f\u70b9\u7684\u8ddd\u79bb\uff1b\u6ca1\u6709\u6ee1\u8db3\u6761\u4ef6\u7684\u5e95\u80cc\u79bb\uff0c\u8fd4\u56deNone\u3002 Parameters: Name Type Description Default close np.array \u65f6\u95f4\u5e8f\u5217\u6536\u76d8\u4ef7 required thresh Tuple[float, float] \u8bf7\u53c2\u8003 peaks_and_valleys None rsi_limit float RSI\u53d1\u751f\u5e95\u80cc\u79bb\u65f6\u7684\u9608\u503c, \u9ed8\u8ba4\u503c30\uff0820\u6548\u679c\u66f4\u4f73\uff0c\u4f46\u662f\u68c0\u6d4b\u51fa\u6765\u6570\u91cf\u592a\u5c11\uff09\uff0c\u5373\u53ea\u8fc7\u6ee4RSI6<30\u7684\u5c40\u90e8\u6700\u4f4e\u6536\u76d8\u4ef7\u3002 30 Returns: Type Description int \u8fd4\u56deint\u7c7b\u578b\u7684\u6574\u6570\uff0c\u8868\u793a\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u6700\u8fd1\u5e95\u80cc\u79bb\u53d1\u751f\u70b9\u7684\u8ddd\u79bb\uff1b\u6ca1\u6709\u6ee1\u8db3\u6761\u4ef6\u7684\u5e95\u80cc\u79bb\uff0c\u8fd4\u56deNone\u3002 Source code in omicron/talib/morph.py def rsi_bottom_divergent ( close : np . array , thresh : Tuple [ float , float ] = None , rsi_limit : float = 30 ) -> int : \"\"\"\u5bfb\u627e\u6700\u8fd1\u6ee1\u8db3\u6761\u4ef6\u7684rsi\u5e95\u80cc\u79bb\u3002 \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u6700\u8fd1\u5e95\u80cc\u79bb\u53d1\u751f\u70b9\u7684\u8ddd\u79bb\uff1b\u6ca1\u6709\u6ee1\u8db3\u6761\u4ef6\u7684\u5e95\u80cc\u79bb\uff0c\u8fd4\u56deNone\u3002 Args: close (np.array): \u65f6\u95f4\u5e8f\u5217\u6536\u76d8\u4ef7 thresh (Tuple[float, float]): \u8bf7\u53c2\u8003[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys] rsi_limit (float, optional): RSI\u53d1\u751f\u5e95\u80cc\u79bb\u65f6\u7684\u9608\u503c, \u9ed8\u8ba4\u503c30\uff0820\u6548\u679c\u66f4\u4f73\uff0c\u4f46\u662f\u68c0\u6d4b\u51fa\u6765\u6570\u91cf\u592a\u5c11\uff09\uff0c\u5373\u53ea\u8fc7\u6ee4RSI6<30\u7684\u5c40\u90e8\u6700\u4f4e\u6536\u76d8\u4ef7\u3002 Returns: \u8fd4\u56deint\u7c7b\u578b\u7684\u6574\u6570\uff0c\u8868\u793a\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u6700\u8fd1\u5e95\u80cc\u79bb\u53d1\u751f\u70b9\u7684\u8ddd\u79bb\uff1b\u6ca1\u6709\u6ee1\u8db3\u6761\u4ef6\u7684\u5e95\u80cc\u79bb\uff0c\u8fd4\u56deNone\u3002 \"\"\" assert len ( close ) >= 60 , \"must provide an array with at least 60 length!\" if close . dtype != np . float64 : close = close . astype ( np . float64 ) rsi = ta . RSI ( close , 6 ) if thresh is None : std = np . std ( close [ - 59 :] / close [ - 60 : - 1 ] - 1 ) thresh = ( 2 * std , - 2 * std ) pivots = peak_valley_pivots ( close , thresh [ 0 ], thresh [ 1 ]) pivots [ 0 ], pivots [ - 1 ] = 0 , 0 length = len ( close ) valley_index = np . where (( pivots == - 1 ) & ( rsi <= rsi_limit ))[ 0 ] if len ( valley_index ) >= 2 : if ( close [ valley_index [ - 1 ]] < close [ valley_index [ - 2 ]]) and ( rsi [ valley_index [ - 1 ]] > rsi [ valley_index [ - 2 ]] ): bottom_dev_distance = length - 1 - valley_index [ - 1 ] return bottom_dev_distance","title":"rsi_bottom_divergent()"},{"location":"api/talib/#omicron.talib.morph.rsi_predict_price","text":"\u7ed9\u5b9a\u4e00\u6bb5\u884c\u60c5\uff0c\u6839\u636e\u6700\u8fd1\u7684\u4e24\u4e2aRSI\u7684\u6781\u5c0f\u503c\u548c\u6781\u5927\u503c\u9884\u6d4b\u4e0b\u4e00\u4e2a\u5468\u671f\u53ef\u80fd\u8fbe\u5230\u7684\u6700\u4f4e\u4ef7\u683c\u548c\u6700\u9ad8\u4ef7\u683c\u3002 \u5176\u539f\u7406\u662f\uff0c\u4ee5\u9884\u6d4b\u6700\u8fd1\u7684\u4e24\u4e2a\u6700\u9ad8\u4ef7\u548c\u6700\u4f4e\u4ef7\uff0c\u6c42\u51fa\u5176\u76f8\u5bf9\u5e94\u7684RSI\u503c\uff0c\u6c42\u51fa\u6700\u9ad8\u4ef7\u548c\u6700\u4f4e\u4ef7RSI\u7684\u5747\u503c\uff0c \u82e5\u53ea\u6709\u4e00\u4e2a\u5219\u53d6\u6700\u8fd1\u7684\u4e00\u4e2a\u3002\u518d\u7531RSI\u516c\u5f0f\uff0c\u53cd\u63a8\u4ef7\u683c\u3002\u6b64\u65f6\u8fd4\u56de\u503c\u4e3a(None, float)\uff0c\u5373\u53ea\u6709\u6700\u9ad8\u4ef7\uff0c\u6ca1\u6709\u6700\u4f4e\u4ef7\u3002\u53cd\u4e4b\u4ea6\u7136\u3002 Parameters: Name Type Description Default close np.ndarray \u5177\u6709\u65f6\u95f4\u5e8f\u5217\u7684\u6536\u76d8\u4ef7 required thresh Tuple[float, float]) \u8bf7\u53c2\u8003 peaks_and_valleys None Returns: Type Description Tuple[float, float] \u8fd4\u56de\u6570\u7ec4[predicted_low_price, predicted_high_price], \u6570\u7ec4\u7b2c\u4e00\u4e2a\u503c\u4e3a\u5229\u7528\u8fbe\u5230\u4e4b\u524d\u6700\u4f4e\u6536\u76d8\u4ef7\u7684RSI\u9884\u6d4b\u7684\u6700\u4f4e\u4ef7\u3002 \u7b2c\u4e8c\u4e2a\u503c\u4e3a\u5229\u7528\u8fbe\u5230\u4e4b\u524d\u6700\u9ad8\u6536\u76d8\u4ef7\u7684RSI\u9884\u6d4b\u7684\u6700\u9ad8\u4ef7\u3002 Source code in omicron/talib/morph.py def rsi_predict_price ( close : np . ndarray , thresh : Tuple [ float , float ] = None ) -> Tuple [ float , float ]: \"\"\"\u7ed9\u5b9a\u4e00\u6bb5\u884c\u60c5\uff0c\u6839\u636e\u6700\u8fd1\u7684\u4e24\u4e2aRSI\u7684\u6781\u5c0f\u503c\u548c\u6781\u5927\u503c\u9884\u6d4b\u4e0b\u4e00\u4e2a\u5468\u671f\u53ef\u80fd\u8fbe\u5230\u7684\u6700\u4f4e\u4ef7\u683c\u548c\u6700\u9ad8\u4ef7\u683c\u3002 \u5176\u539f\u7406\u662f\uff0c\u4ee5\u9884\u6d4b\u6700\u8fd1\u7684\u4e24\u4e2a\u6700\u9ad8\u4ef7\u548c\u6700\u4f4e\u4ef7\uff0c\u6c42\u51fa\u5176\u76f8\u5bf9\u5e94\u7684RSI\u503c\uff0c\u6c42\u51fa\u6700\u9ad8\u4ef7\u548c\u6700\u4f4e\u4ef7RSI\u7684\u5747\u503c\uff0c \u82e5\u53ea\u6709\u4e00\u4e2a\u5219\u53d6\u6700\u8fd1\u7684\u4e00\u4e2a\u3002\u518d\u7531RSI\u516c\u5f0f\uff0c\u53cd\u63a8\u4ef7\u683c\u3002\u6b64\u65f6\u8fd4\u56de\u503c\u4e3a(None, float)\uff0c\u5373\u53ea\u6709\u6700\u9ad8\u4ef7\uff0c\u6ca1\u6709\u6700\u4f4e\u4ef7\u3002\u53cd\u4e4b\u4ea6\u7136\u3002 Args: close (np.ndarray): \u5177\u6709\u65f6\u95f4\u5e8f\u5217\u7684\u6536\u76d8\u4ef7 thresh (Tuple[float, float]) : \u8bf7\u53c2\u8003[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys] Returns: \u8fd4\u56de\u6570\u7ec4[predicted_low_price, predicted_high_price], \u6570\u7ec4\u7b2c\u4e00\u4e2a\u503c\u4e3a\u5229\u7528\u8fbe\u5230\u4e4b\u524d\u6700\u4f4e\u6536\u76d8\u4ef7\u7684RSI\u9884\u6d4b\u7684\u6700\u4f4e\u4ef7\u3002 \u7b2c\u4e8c\u4e2a\u503c\u4e3a\u5229\u7528\u8fbe\u5230\u4e4b\u524d\u6700\u9ad8\u6536\u76d8\u4ef7\u7684RSI\u9884\u6d4b\u7684\u6700\u9ad8\u4ef7\u3002 \"\"\" assert len ( close ) >= 60 , \"must provide an array with at least 60 length!\" if thresh is None : std = np . std ( close [ - 59 :] / close [ - 60 : - 1 ] - 1 ) thresh = ( 2 * std , - 2 * std ) if close . dtype != np . float64 : close = close . astype ( np . float64 ) valley_rsi , peak_rsi , _ = rsi_watermarks ( close , thresh = thresh ) pivot = peak_valley_pivots ( close , thresh [ 0 ], thresh [ 1 ]) pivot [ 0 ], pivot [ - 1 ] = 0 , 0 # \u6390\u5934\u53bb\u5c3e price_change = pd . Series ( close ) . diff ( 1 ) . values ave_price_change = ( abs ( price_change )[ - 6 :] . mean ()) * 5 ave_price_raise = ( np . maximum ( price_change , 0 )[ - 6 :] . mean ()) * 5 if valley_rsi is not None : predicted_low_change = ( ave_price_change ) - ave_price_raise / ( 0.01 * valley_rsi ) if predicted_low_change > 0 : predicted_low_change = 0 predicted_low_price = close [ - 1 ] + predicted_low_change else : predicted_low_price = None if peak_rsi is not None : predicted_high_change = ( ave_price_raise - ave_price_change ) / ( 0.01 * peak_rsi - 1 ) - ave_price_change if predicted_high_change < 0 : predicted_high_change = 0 predicted_high_price = close [ - 1 ] + predicted_high_change else : predicted_high_price = None return predicted_low_price , predicted_high_price","title":"rsi_predict_price()"},{"location":"api/talib/#omicron.talib.morph.rsi_top_distance","text":"\u6839\u636e\u7ed9\u5b9a\u7684\u6536\u76d8\u4ef7\uff0c\u8ba1\u7b97\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51farsi\u9ad8\u6c34\u5e73\u7684\u8ddd\u79bb\uff0c \u5982\u679c\u4ece\u4e0a\u4e00\u4e2a\u6700\u9ad8\u70b9rsi\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5e76\u672a\u53d1\u51fa\u9ad8\u6c34\u5e73\u4fe1\u53f7\uff0c \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51fa\u6700\u9ad8\u70b9rsi\u7684\u8ddd\u79bb\u3002 \u5176\u4e2dclose\u7684\u957f\u5ea6\u4e00\u822c\u4e0d\u5c0f\u4e8e60\u3002 \u8fd4\u56de\u503c\u4e3a\u8ddd\u79bb\u6574\u6570\uff0c\u4e0d\u6ee1\u8db3\u6761\u4ef6\u5219\u8fd4\u56deNone\u3002 Parameters: Name Type Description Default close np.array \u5177\u6709\u65f6\u95f4\u5e8f\u5217\u7684\u6536\u76d8\u4ef7 required thresh Tuple[float, float]) None\u9002\u7528\u6240\u6709\u80a1\u7968\uff0c\u4e0d\u5fc5\u66f4\u6539\uff0c\u4e5f\u53ef\u81ea\u884c\u8bbe\u7f6e\u3002 None Returns: Type Description int \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51farsi\u9ad8\u6c34\u5e73\u7684\u8ddd\u79bb\u3002 \u5982\u679c\u4ece\u4e0a\u4e00\u4e2a\u6700\u9ad8\u70b9rsi\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5e76\u672a\u53d1\u51fa\u9ad8\u6c34\u5e73\u4fe1\u53f7\uff0c \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51fa\u6700\u9ad8\u70b9rsi\u7684\u8ddd\u79bb\u3002 \u9664\u6b64\u4e4b\u5916\uff0c\u8fd4\u56deNone\u3002 Source code in omicron/talib/morph.py def rsi_top_distance ( close : np . array , thresh : Tuple [ float , float ] = None ) -> int : \"\"\"\u6839\u636e\u7ed9\u5b9a\u7684\u6536\u76d8\u4ef7\uff0c\u8ba1\u7b97\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51farsi\u9ad8\u6c34\u5e73\u7684\u8ddd\u79bb\uff0c \u5982\u679c\u4ece\u4e0a\u4e00\u4e2a\u6700\u9ad8\u70b9rsi\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5e76\u672a\u53d1\u51fa\u9ad8\u6c34\u5e73\u4fe1\u53f7\uff0c \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51fa\u6700\u9ad8\u70b9rsi\u7684\u8ddd\u79bb\u3002 \u5176\u4e2dclose\u7684\u957f\u5ea6\u4e00\u822c\u4e0d\u5c0f\u4e8e60\u3002 \u8fd4\u56de\u503c\u4e3a\u8ddd\u79bb\u6574\u6570\uff0c\u4e0d\u6ee1\u8db3\u6761\u4ef6\u5219\u8fd4\u56deNone\u3002 Args: close (np.array): \u5177\u6709\u65f6\u95f4\u5e8f\u5217\u7684\u6536\u76d8\u4ef7 thresh (Tuple[float, float]) : None\u9002\u7528\u6240\u6709\u80a1\u7968\uff0c\u4e0d\u5fc5\u66f4\u6539\uff0c\u4e5f\u53ef\u81ea\u884c\u8bbe\u7f6e\u3002 Returns: \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51farsi\u9ad8\u6c34\u5e73\u7684\u8ddd\u79bb\u3002 \u5982\u679c\u4ece\u4e0a\u4e00\u4e2a\u6700\u9ad8\u70b9rsi\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5e76\u672a\u53d1\u51fa\u9ad8\u6c34\u5e73\u4fe1\u53f7\uff0c \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u4e0a\u4e00\u4e2a\u53d1\u51fa\u6700\u9ad8\u70b9rsi\u7684\u8ddd\u79bb\u3002 \u9664\u6b64\u4e4b\u5916\uff0c\u8fd4\u56deNone\u3002\"\"\" assert len ( close ) >= 60 , \"must provide an array with at least 60 length!\" if close . dtype != np . float64 : close = close . astype ( np . float64 ) if thresh is None : std = np . std ( close [ - 59 :] / close [ - 60 : - 1 ] - 1 ) thresh = ( 2 * std , - 2 * std ) rsi = ta . RSI ( close , 6 ) watermarks = rsi_watermarks ( close , thresh ) if watermarks is not None : _ , high_watermark , _ = watermarks pivots = peak_valley_pivots ( close , thresh [ 0 ], thresh [ 1 ]) pivots [ 0 ], pivots [ - 1 ] = 0 , 0 # \u5cf0\u503cRSI>70 peak_rsi_index = np . where (( rsi > 70 ) & ( pivots == 1 ))[ 0 ] # RSI\u9ad8\u6c34\u5e73\u7684\u6700\u5c0f\u503c\uff1a\u9ad8\u6c34\u5e73*0.99 high_rsi_index = np . where ( rsi >= high_watermark * 0.99 )[ 0 ] if len ( peak_rsi_index ) > 0 : distance = len ( rsi ) - 1 - peak_rsi_index [ - 1 ] if len ( high_rsi_index ) > 0 : if high_rsi_index [ - 1 ] >= peak_rsi_index [ - 1 ]: distance = len ( rsi ) - 1 - high_rsi_index [ - 1 ] return distance","title":"rsi_top_distance()"},{"location":"api/talib/#omicron.talib.morph.rsi_top_divergent","text":"\u5bfb\u627e\u6700\u8fd1\u6ee1\u8db3\u6761\u4ef6\u7684rsi\u9876\u80cc\u79bb\u3002 \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u6700\u8fd1\u9876\u80cc\u79bb\u53d1\u751f\u70b9\u7684\u8ddd\u79bb\uff1b\u6ca1\u6709\u6ee1\u8db3\u6761\u4ef6\u7684\u9876\u80cc\u79bb\uff0c\u8fd4\u56deNone\u3002 Parameters: Name Type Description Default close np.array \u65f6\u95f4\u5e8f\u5217\u6536\u76d8\u4ef7 required thresh Tuple[float, float] \u8bf7\u53c2\u8003 peaks_and_valleys None rsi_limit float RSI\u53d1\u751f\u9876\u80cc\u79bb\u65f6\u7684\u9608\u503c, \u9ed8\u8ba4\u503c70\uff0880\u6548\u679c\u66f4\u4f73\uff0c\u4f46\u662f\u68c0\u6d4b\u51fa\u6765\u6570\u91cf\u592a\u5c11\uff09\uff0c\u5373\u53ea\u8fc7\u6ee4RSI6>70\u7684\u5c40\u90e8\u6700\u9ad8\u6536\u76d8\u4ef7\u3002 70 Returns: Type Description Tuple[int, int] \u8fd4\u56deint\u7c7b\u578b\u7684\u6574\u6570\uff0c\u8868\u793a\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u6700\u8fd1\u9876\u80cc\u79bb\u53d1\u751f\u70b9\u7684\u8ddd\u79bb\uff1b\u6ca1\u6709\u6ee1\u8db3\u6761\u4ef6\u7684\u9876\u80cc\u79bb\uff0c\u8fd4\u56deNone\u3002 Source code in omicron/talib/morph.py def rsi_top_divergent ( close : np . array , thresh : Tuple [ float , float ] = None , rsi_limit : float = 70 ) -> Tuple [ int , int ]: \"\"\"\u5bfb\u627e\u6700\u8fd1\u6ee1\u8db3\u6761\u4ef6\u7684rsi\u9876\u80cc\u79bb\u3002 \u8fd4\u56de\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u6700\u8fd1\u9876\u80cc\u79bb\u53d1\u751f\u70b9\u7684\u8ddd\u79bb\uff1b\u6ca1\u6709\u6ee1\u8db3\u6761\u4ef6\u7684\u9876\u80cc\u79bb\uff0c\u8fd4\u56deNone\u3002 Args: close (np.array): \u65f6\u95f4\u5e8f\u5217\u6536\u76d8\u4ef7 thresh (Tuple[float, float]): \u8bf7\u53c2\u8003[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys] rsi_limit (float, optional): RSI\u53d1\u751f\u9876\u80cc\u79bb\u65f6\u7684\u9608\u503c, \u9ed8\u8ba4\u503c70\uff0880\u6548\u679c\u66f4\u4f73\uff0c\u4f46\u662f\u68c0\u6d4b\u51fa\u6765\u6570\u91cf\u592a\u5c11\uff09\uff0c\u5373\u53ea\u8fc7\u6ee4RSI6>70\u7684\u5c40\u90e8\u6700\u9ad8\u6536\u76d8\u4ef7\u3002 Returns: \u8fd4\u56deint\u7c7b\u578b\u7684\u6574\u6570\uff0c\u8868\u793a\u6700\u540e\u4e00\u4e2a\u6570\u636e\u5230\u6700\u8fd1\u9876\u80cc\u79bb\u53d1\u751f\u70b9\u7684\u8ddd\u79bb\uff1b\u6ca1\u6709\u6ee1\u8db3\u6761\u4ef6\u7684\u9876\u80cc\u79bb\uff0c\u8fd4\u56deNone\u3002 \"\"\" assert len ( close ) >= 60 , \"must provide an array with at least 60 length!\" if close . dtype != np . float64 : close = close . astype ( np . float64 ) rsi = ta . RSI ( close , 6 ) if thresh is None : std = np . std ( close [ - 59 :] / close [ - 60 : - 1 ] - 1 ) thresh = ( 2 * std , - 2 * std ) pivots = peak_valley_pivots ( close , thresh [ 0 ], thresh [ 1 ]) pivots [ 0 ], pivots [ - 1 ] = 0 , 0 length = len ( close ) peak_index = np . where (( pivots == 1 ) & ( rsi >= rsi_limit ))[ 0 ] if len ( peak_index ) >= 2 : if ( close [ peak_index [ - 1 ]] > close [ peak_index [ - 2 ]]) and ( rsi [ peak_index [ - 1 ]] < rsi [ peak_index [ - 2 ]] ): top_dev_distance = length - 1 - peak_index [ - 1 ] return top_dev_distance","title":"rsi_top_divergent()"},{"location":"api/talib/#omicron.talib.morph.rsi_watermarks","text":"\u7ed9\u5b9a\u4e00\u6bb5\u884c\u60c5\u6570\u636e\u548c\u7528\u4ee5\u68c0\u6d4b\u9876\u548c\u5e95\u7684\u9608\u503c\uff0c\u8fd4\u56de\u8be5\u6bb5\u884c\u60c5\u4e2d\uff0c\u8c37\u548c\u5cf0\u5904RSI\u5747\u503c\uff0c\u6700\u540e\u4e00\u4e2aRSI6\u503c\u3002 \u5176\u4e2dclose\u7684\u957f\u5ea6\u4e00\u822c\u4e0d\u5c0f\u4e8e60\uff0c\u4e0d\u5927\u4e8e120\u3002\u8fd4\u56de\u503c\u4e2d\uff0c\u4e00\u4e2a\u4e3alow_wartermark\uff08\u8c37\u5e95\u5904RSI\u503c\uff09\uff0c \u4e00\u4e2a\u4e3ahigh_wartermark\uff08\u9ad8\u5cf0\u5904RSI\u503c)\uff0c\u4e00\u4e2a\u4e3aRSI6\u7684\u6700\u540e\u4e00\u4e2a\u503c\uff0c\u7528\u4ee5\u5bf9\u6bd4\u524d\u4e24\u4e2a\u8b66\u6212\u503c\u3002 Parameters: Name Type Description Default close np.array \u5177\u6709\u65f6\u95f4\u5e8f\u5217\u7684\u6536\u76d8\u4ef7 required thresh Tuple[float, float]) None\u9002\u7528\u6240\u6709\u80a1\u7968\uff0c\u4e0d\u5fc5\u66f4\u6539\uff0c\u4e5f\u53ef\u81ea\u884c\u8bbe\u7f6e\u3002 None Returns: Type Description Tuple[float, float, float] \u8fd4\u56de\u6570\u7ec4[low_watermark, high_watermark\uff0c rsi[-1]], \u7b2c\u4e00\u4e2a\u4e3a\u6700\u8fd1\u4e24\u4e2a\u6700\u4f4e\u6536\u76d8\u4ef7\u7684RSI\u5747\u503c\uff0c \u7b2c\u4e8c\u4e2a\u4e3a\u6700\u8fd1\u4e24\u4e2a\u6700\u9ad8\u6536\u76d8\u4ef7\u7684RSI\u5747\u503c\u3002 \u82e5\u4f20\u5165\u6536\u76d8\u4ef7\u53ea\u6709\u4e00\u4e2a\u6700\u503c\uff0c\u53ea\u8fd4\u56de\u4e00\u4e2a\u3002\u6ca1\u6709\u6700\u503c\uff0c\u5219\u8fd4\u56deNone, \u7b2c\u4e09\u4e2a\u4e3a\u5b9e\u9645\u7684\u6700\u540eRSI6\u7684\u503c\u3002 Source code in omicron/talib/morph.py def rsi_watermarks ( close : np . array , thresh : Tuple [ float , float ] = None ) -> Tuple [ float , float , float ]: \"\"\"\u7ed9\u5b9a\u4e00\u6bb5\u884c\u60c5\u6570\u636e\u548c\u7528\u4ee5\u68c0\u6d4b\u9876\u548c\u5e95\u7684\u9608\u503c\uff0c\u8fd4\u56de\u8be5\u6bb5\u884c\u60c5\u4e2d\uff0c\u8c37\u548c\u5cf0\u5904RSI\u5747\u503c\uff0c\u6700\u540e\u4e00\u4e2aRSI6\u503c\u3002 \u5176\u4e2dclose\u7684\u957f\u5ea6\u4e00\u822c\u4e0d\u5c0f\u4e8e60\uff0c\u4e0d\u5927\u4e8e120\u3002\u8fd4\u56de\u503c\u4e2d\uff0c\u4e00\u4e2a\u4e3alow_wartermark\uff08\u8c37\u5e95\u5904RSI\u503c\uff09\uff0c \u4e00\u4e2a\u4e3ahigh_wartermark\uff08\u9ad8\u5cf0\u5904RSI\u503c)\uff0c\u4e00\u4e2a\u4e3aRSI6\u7684\u6700\u540e\u4e00\u4e2a\u503c\uff0c\u7528\u4ee5\u5bf9\u6bd4\u524d\u4e24\u4e2a\u8b66\u6212\u503c\u3002 Args: close (np.array): \u5177\u6709\u65f6\u95f4\u5e8f\u5217\u7684\u6536\u76d8\u4ef7 thresh (Tuple[float, float]) : None\u9002\u7528\u6240\u6709\u80a1\u7968\uff0c\u4e0d\u5fc5\u66f4\u6539\uff0c\u4e5f\u53ef\u81ea\u884c\u8bbe\u7f6e\u3002 Returns: \u8fd4\u56de\u6570\u7ec4[low_watermark, high_watermark\uff0c rsi[-1]], \u7b2c\u4e00\u4e2a\u4e3a\u6700\u8fd1\u4e24\u4e2a\u6700\u4f4e\u6536\u76d8\u4ef7\u7684RSI\u5747\u503c\uff0c \u7b2c\u4e8c\u4e2a\u4e3a\u6700\u8fd1\u4e24\u4e2a\u6700\u9ad8\u6536\u76d8\u4ef7\u7684RSI\u5747\u503c\u3002 \u82e5\u4f20\u5165\u6536\u76d8\u4ef7\u53ea\u6709\u4e00\u4e2a\u6700\u503c\uff0c\u53ea\u8fd4\u56de\u4e00\u4e2a\u3002\u6ca1\u6709\u6700\u503c\uff0c\u5219\u8fd4\u56deNone, \u7b2c\u4e09\u4e2a\u4e3a\u5b9e\u9645\u7684\u6700\u540eRSI6\u7684\u503c\u3002 \"\"\" assert len ( close ) >= 60 , \"must provide an array with at least 60 length!\" if thresh is None : std = np . std ( close [ - 59 :] / close [ - 60 : - 1 ] - 1 ) thresh = ( 2 * std , - 2 * std ) if close . dtype != np . float64 : close = close . astype ( np . float64 ) rsi = ta . RSI ( close , 6 ) pivots = peak_valley_pivots ( close , thresh [ 0 ], thresh [ 1 ]) pivots [ 0 ], pivots [ - 1 ] = 0 , 0 # \u6390\u5934\u53bb\u5c3e # \u5cf0\u503cRSI>70; \u8c37\u5904\u7684RSI<30; peaks_rsi_index = np . where (( rsi > 70 ) & ( pivots == 1 ))[ 0 ] valleys_rsi_index = np . where (( rsi < 30 ) & ( pivots == - 1 ))[ 0 ] if len ( peaks_rsi_index ) == 0 : high_watermark = None elif len ( peaks_rsi_index ) == 1 : high_watermark = rsi [ peaks_rsi_index [ 0 ]] else : # \u6709\u4e24\u4e2a\u4ee5\u4e0a\u7684\u5cf0\uff0c\u901a\u8fc7\u6700\u8fd1\u7684\u4e24\u4e2a\u5cf0\u5747\u503c\u6765\u786e\u5b9a\u8d70\u52bf high_watermark = np . nanmean ( rsi [ peaks_rsi_index [ - 2 :]]) if len ( valleys_rsi_index ) == 0 : low_watermark = None elif len ( valleys_rsi_index ) == 1 : low_watermark = rsi [ valleys_rsi_index [ 0 ]] else : # \u6709\u4e24\u4e2a\u4ee5\u4e0a\u7684\u5cf0\uff0c\u901a\u8fc7\u6700\u8fd1\u7684\u4e24\u4e2a\u5cf0\u6765\u786e\u5b9a\u8d70\u52bf low_watermark = np . nanmean ( rsi [ valleys_rsi_index [ - 2 :]]) return low_watermark , high_watermark , rsi [ - 1 ]","title":"rsi_watermarks()"},{"location":"api/talib/#omicron.talib.morph.support_resist_lines","text":"\u8ba1\u7b97\u65f6\u95f4\u5e8f\u5217\u7684\u652f\u6491\u7ebf\u548c\u963b\u529b\u7ebf \u4f7f\u7528\u6700\u8fd1\u7684\u4e24\u4e2a\u9ad8\u70b9\u8fde\u63a5\u6210\u9634\u529b\u7ebf\uff0c\u4e24\u4e2a\u4f4e\u70b9\u8fde\u63a5\u6210\u652f\u6491\u7ebf\u3002 Examples: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 def show_support_resist_lines ( ts ): import plotly.graph_objects as go fig = go . Figure () support , resist , x_start = support_resist_lines ( ts , 0.03 , - 0.03 ) fig . add_trace ( go . Scatter ( x = np . arange ( len ( ts )), y = ts )) x = np . arange ( len ( ts ))[ x_start :] fig . add_trace ( go . Line ( x = x , y = support ( x ))) fig . add_trace ( go . Line ( x = x , y = resist ( x ))) fig . show () np . random . seed ( 1978 ) X = np . cumprod ( 1 + np . random . randn ( 100 ) * 0.01 ) show_support_resist_lines ( X ) the above code will show this Parameters: Name Type Description Default ts np.ndarray \u65f6\u95f4\u5e8f\u5217 required upthres float \u8bf7\u53c2\u8003 peaks_and_valleys None downthres float \u8bf7\u53c2\u8003 peaks_and_valleys None Returns: Type Description Tuple[Callable, Callable, numpy.ndarray] \u8fd4\u56de\u652f\u6491\u7ebf\u548c\u963b\u529b\u7ebf\u7684\u8ba1\u7b97\u51fd\u6570\u53ca\u8d77\u59cb\u70b9\u5750\u6807\uff0c\u5982\u679c\u6ca1\u6709\u652f\u6491\u7ebf\u6216\u963b\u529b\u7ebf\uff0c\u5219\u8fd4\u56deNone Source code in omicron/talib/morph.py def support_resist_lines ( ts : np . ndarray , upthres : float = None , downthres : float = None ) -> Tuple [ Callable , Callable , np . ndarray ]: \"\"\"\u8ba1\u7b97\u65f6\u95f4\u5e8f\u5217\u7684\u652f\u6491\u7ebf\u548c\u963b\u529b\u7ebf \u4f7f\u7528\u6700\u8fd1\u7684\u4e24\u4e2a\u9ad8\u70b9\u8fde\u63a5\u6210\u9634\u529b\u7ebf\uff0c\u4e24\u4e2a\u4f4e\u70b9\u8fde\u63a5\u6210\u652f\u6491\u7ebf\u3002 Examples: ```python def show_support_resist_lines(ts): import plotly.graph_objects as go fig = go.Figure() support, resist, x_start = support_resist_lines(ts, 0.03, -0.03) fig.add_trace(go.Scatter(x=np.arange(len(ts)), y=ts)) x = np.arange(len(ts))[x_start:] fig.add_trace(go.Line(x=x, y = support(x))) fig.add_trace(go.Line(x=x, y = resist(x))) fig.show() np.random.seed(1978) X = np.cumprod(1 + np.random.randn(100) * 0.01) show_support_resist_lines(X) ``` the above code will show this ![](https://images.jieyu.ai/images/202204/support_resist.png) Args: ts (np.ndarray): \u65f6\u95f4\u5e8f\u5217 upthres (float, optional): \u8bf7\u53c2\u8003[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys] downthres (float, optional): \u8bf7\u53c2\u8003[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys] Returns: \u8fd4\u56de\u652f\u6491\u7ebf\u548c\u963b\u529b\u7ebf\u7684\u8ba1\u7b97\u51fd\u6570\u53ca\u8d77\u59cb\u70b9\u5750\u6807\uff0c\u5982\u679c\u6ca1\u6709\u652f\u6491\u7ebf\u6216\u963b\u529b\u7ebf\uff0c\u5219\u8fd4\u56deNone \"\"\" if ts . dtype != np . float64 : ts = ts . astype ( np . float64 ) pivots = peaks_and_valleys ( ts , upthres , downthres ) pivots [ 0 ] = 0 pivots [ - 1 ] = 0 arg_max = np . argwhere ( pivots == 1 ) . flatten () arg_min = np . argwhere ( pivots == - 1 ) . flatten () resist = None support = None if len ( arg_max ) >= 2 : arg_max = arg_max [ - 2 :] y = ts [ arg_max ] coeff = np . polyfit ( arg_max , y , deg = 1 ) resist = np . poly1d ( coeff ) if len ( arg_min ) >= 2 : arg_min = arg_min [ - 2 :] y = ts [ arg_min ] coeff = np . polyfit ( arg_min , y , deg = 1 ) support = np . poly1d ( coeff ) return support , resist , np . min ([ * arg_min , * arg_max ])","title":"support_resist_lines()"},{"location":"api/talib/#omicron.talib.morph.valley_detect","text":"\u7ed9\u5b9a\u4e00\u6bb5\u884c\u60c5\u6570\u636e\u548c\u7528\u4ee5\u68c0\u6d4b\u8fd1\u671f\u5df2\u53d1\u751f\u53cd\u8f6c\u7684\u6700\u4f4e\u70b9\uff0c\u8fd4\u56de\u8be5\u6bb5\u884c\u60c5\u4e2d\uff0c\u6700\u4f4e\u70b9\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u7684\u8ddd\u79bb\u548c\u6536\u76ca\u7387\u6570\u7ec4\uff0c \u5982\u679c\u7ed9\u5b9a\u884c\u60c5\u4e2d\u672a\u627e\u5230\u6ee1\u8db3\u53c2\u6570\u7684\u6700\u4f4e\u70b9\uff0c\u5219\u8fd4\u56de\u4e24\u4e2a\u7a7a\u503c\u6570\u7ec4\u3002 \u5176\u4e2dbars\u7684\u957f\u5ea6\u4e00\u822c\u4e0d\u5c0f\u4e8e60\uff0c\u4e0d\u5927\u4e8e120\u3002\u6b64\u51fd\u6570\u91c7\u7528\u4e86zigzag\u4e2d\u7684\u8c37\u5cf0\u68c0\u6d4b\u65b9\u6cd5\uff0c\u5176\u4e2d\u53c2\u6570\u9ed8\u8ba4(0.05,-0.02), \u6b64\u53c2\u6570\u5bf9\u6240\u6709\u80a1\u7968\u6570\u636e\u90fd\u9002\u7528\u3002\u82e5\u6ee1\u8db3\u53c2\u6570\uff0c\u8fd4\u56de\u503c\u4e2d\uff0c\u8ddd\u79bb\u4e3a\u5927\u4e8e0\u7684\u6574\u6570\uff0c\u6536\u76ca\u7387\u662f0~1\u7684\u5c0f\u6570\u3002 Parameters: Name Type Description Default close np.ndarray \u5177\u6709\u65f6\u95f4\u5e8f\u5217\u7684\u6536\u76d8\u4ef7 required thresh Tuple[float, float]) \u8bf7\u53c2\u8003 peaks_and_valleys (0.05, -0.02) Returns: Type Description int \u8fd4\u56de\u8be5\u6bb5\u884c\u60c5\u4e2d\uff0c\u6700\u4f4e\u70b9\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u7684\u8ddd\u79bb\u548c\u6536\u76ca\u7387\u6570\u7ec4\uff0c \u5982\u679c\u7ed9\u5b9a\u884c\u60c5\u4e2d\u672a\u627e\u5230\u6ee1\u8db3\u53c2\u6570\u7684\u6700\u4f4e\u70b9\uff0c\u5219\u8fd4\u56de\u4e24\u4e2a\u7a7a\u503c\u6570\u7ec4\u3002 Source code in omicron/talib/morph.py def valley_detect ( close : np . ndarray , thresh : Tuple [ float , float ] = ( 0.05 , - 0.02 ) ) -> int : \"\"\"\u7ed9\u5b9a\u4e00\u6bb5\u884c\u60c5\u6570\u636e\u548c\u7528\u4ee5\u68c0\u6d4b\u8fd1\u671f\u5df2\u53d1\u751f\u53cd\u8f6c\u7684\u6700\u4f4e\u70b9\uff0c\u8fd4\u56de\u8be5\u6bb5\u884c\u60c5\u4e2d\uff0c\u6700\u4f4e\u70b9\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u7684\u8ddd\u79bb\u548c\u6536\u76ca\u7387\u6570\u7ec4\uff0c \u5982\u679c\u7ed9\u5b9a\u884c\u60c5\u4e2d\u672a\u627e\u5230\u6ee1\u8db3\u53c2\u6570\u7684\u6700\u4f4e\u70b9\uff0c\u5219\u8fd4\u56de\u4e24\u4e2a\u7a7a\u503c\u6570\u7ec4\u3002 \u5176\u4e2dbars\u7684\u957f\u5ea6\u4e00\u822c\u4e0d\u5c0f\u4e8e60\uff0c\u4e0d\u5927\u4e8e120\u3002\u6b64\u51fd\u6570\u91c7\u7528\u4e86zigzag\u4e2d\u7684\u8c37\u5cf0\u68c0\u6d4b\u65b9\u6cd5\uff0c\u5176\u4e2d\u53c2\u6570\u9ed8\u8ba4(0.05,-0.02), \u6b64\u53c2\u6570\u5bf9\u6240\u6709\u80a1\u7968\u6570\u636e\u90fd\u9002\u7528\u3002\u82e5\u6ee1\u8db3\u53c2\u6570\uff0c\u8fd4\u56de\u503c\u4e2d\uff0c\u8ddd\u79bb\u4e3a\u5927\u4e8e0\u7684\u6574\u6570\uff0c\u6536\u76ca\u7387\u662f0~1\u7684\u5c0f\u6570\u3002 Args: close (np.ndarray): \u5177\u6709\u65f6\u95f4\u5e8f\u5217\u7684\u6536\u76d8\u4ef7 thresh (Tuple[float, float]) : \u8bf7\u53c2\u8003[peaks_and_valleys][omicron.talib.morph.peaks_and_valleys] Returns: \u8fd4\u56de\u8be5\u6bb5\u884c\u60c5\u4e2d\uff0c\u6700\u4f4e\u70b9\u5230\u6700\u540e\u4e00\u4e2a\u6570\u636e\u7684\u8ddd\u79bb\u548c\u6536\u76ca\u7387\u6570\u7ec4\uff0c \u5982\u679c\u7ed9\u5b9a\u884c\u60c5\u4e2d\u672a\u627e\u5230\u6ee1\u8db3\u53c2\u6570\u7684\u6700\u4f4e\u70b9\uff0c\u5219\u8fd4\u56de\u4e24\u4e2a\u7a7a\u503c\u6570\u7ec4\u3002 \"\"\" assert len ( close ) >= 60 , \"must provide an array with at least 60 length!\" if close . dtype != np . float64 : close = close . astype ( np . float64 ) if thresh is None : std = np . std ( close [ - 59 :] / close [ - 60 : - 1 ] - 1 ) thresh = ( 2 * std , - 2 * std ) pivots = peak_valley_pivots ( close , thresh [ 0 ], thresh [ 1 ]) flags = pivots [ pivots != 0 ] increased = None lowest_distance = None if ( flags [ - 2 ] == - 1 ) and ( flags [ - 1 ] == 1 ): length = len ( pivots ) valley_index = np . where ( pivots == - 1 )[ 0 ] increased = ( close [ - 1 ] - close [ valley_index [ - 1 ]]) / close [ valley_index [ - 1 ]] lowest_distance = int ( length - 1 - valley_index [ - 1 ]) return lowest_distance , increased","title":"valley_detect()"},{"location":"api/talib/#omicron.talib.morph.vcross","text":"\u5224\u65ad\u5e8f\u5217f\u662f\u5426\u4e0eg\u5b58\u5728\u7c7b\u578bv\u578b\u7684\u76f8\u4ea4\u3002\u5373\u5b58\u5728\u4e24\u4e2a\u4ea4\u70b9\uff0c\u7b2c\u4e00\u4e2a\u4ea4\u70b9\u4e3a\u5411\u4e0b\u76f8\u4ea4\uff0c\u7b2c\u4e8c\u4e2a\u4ea4\u70b9\u4e3a\u5411\u4e0a \u76f8\u4ea4\u3002\u4e00\u822c\u53cd\u6620\u4e3a\u6d17\u76d8\u62c9\u5347\u7684\u7279\u5f81\u3002 Examples: >>> f = np . array ([ 3 * i ** 2 - 20 * i + 2 for i in range ( 10 )]) >>> g = np . array ([ i - 5 for i in range ( 10 )]) >>> flag , indices = vcross ( f , g ) >>> assert flag is True >>> assert indices [ 0 ] == 0 >>> assert indices [ 1 ] == 6 Parameters: Name Type Description Default f first sequence required g the second sequence required Returns: Type Description Tuple (flag, indices), \u5176\u4e2dflag\u53d6\u503c\u4e3aTrue\u65f6\uff0c\u5b58\u5728vcross\uff0cindices\u4e3a\u4ea4\u70b9\u7684\u7d22\u5f15\u3002 Source code in omicron/talib/morph.py def vcross ( f : np . array , g : np . array ) -> Tuple : \"\"\"\u5224\u65ad\u5e8f\u5217f\u662f\u5426\u4e0eg\u5b58\u5728\u7c7b\u578bv\u578b\u7684\u76f8\u4ea4\u3002\u5373\u5b58\u5728\u4e24\u4e2a\u4ea4\u70b9\uff0c\u7b2c\u4e00\u4e2a\u4ea4\u70b9\u4e3a\u5411\u4e0b\u76f8\u4ea4\uff0c\u7b2c\u4e8c\u4e2a\u4ea4\u70b9\u4e3a\u5411\u4e0a \u76f8\u4ea4\u3002\u4e00\u822c\u53cd\u6620\u4e3a\u6d17\u76d8\u62c9\u5347\u7684\u7279\u5f81\u3002 Examples: >>> f = np.array([ 3 * i ** 2 - 20 * i + 2 for i in range(10)]) >>> g = np.array([ i - 5 for i in range(10)]) >>> flag, indices = vcross(f, g) >>> assert flag is True >>> assert indices[0] == 0 >>> assert indices[1] == 6 Args: f: first sequence g: the second sequence Returns: (flag, indices), \u5176\u4e2dflag\u53d6\u503c\u4e3aTrue\u65f6\uff0c\u5b58\u5728vcross\uff0cindices\u4e3a\u4ea4\u70b9\u7684\u7d22\u5f15\u3002 \"\"\" indices = np . argwhere ( np . diff ( np . sign ( f - g ))) . flatten () if len ( indices ) == 2 : idx0 , idx1 = indices if f [ idx0 ] > g [ idx0 ] and f [ idx1 ] < g [ idx1 ]: return True , ( idx0 , idx1 ) return False , ( None , None )","title":"vcross()"},{"location":"api/timeframe/","text":"TimeFrame \u00b6 Source code in omicron/models/timeframe.py class TimeFrame : minute_level_frames = [ FrameType . MIN1 , FrameType . MIN5 , FrameType . MIN15 , FrameType . MIN30 , FrameType . MIN60 , ] day_level_frames = [ FrameType . DAY , FrameType . WEEK , FrameType . MONTH , FrameType . QUARTER , FrameType . YEAR , ] ticks = { FrameType . MIN1 : [ i for i in itertools . chain ( range ( 571 , 691 ), range ( 781 , 901 ))], FrameType . MIN5 : [ i for i in itertools . chain ( range ( 575 , 695 , 5 ), range ( 785 , 905 , 5 )) ], FrameType . MIN15 : [ i for i in itertools . chain ( range ( 585 , 705 , 15 ), range ( 795 , 915 , 15 )) ], FrameType . MIN30 : [ int ( s [: 2 ]) * 60 + int ( s [ 2 :]) for s in [ \"1000\" , \"1030\" , \"1100\" , \"1130\" , \"1330\" , \"1400\" , \"1430\" , \"1500\" ] ], FrameType . MIN60 : [ int ( s [: 2 ]) * 60 + int ( s [ 2 :]) for s in [ \"1030\" , \"1130\" , \"1400\" , \"1500\" ] ], } day_frames = None week_frames = None month_frames = None quarter_frames = None year_frames = None @classmethod def service_degrade ( cls ): \"\"\"\u5f53cache\u4e2d\u4e0d\u5b58\u5728\u65e5\u5386\u65f6\uff0c\u542f\u7528\u968fomicron\u7248\u672c\u4e00\u8d77\u53d1\u884c\u65f6\u81ea\u5e26\u7684\u65e5\u5386\u3002 \u6ce8\u610f\uff1a\u968fomicron\u7248\u672c\u4e00\u8d77\u53d1\u884c\u65f6\u81ea\u5e26\u7684\u65e5\u5386\u5f88\u53ef\u80fd\u4e0d\u662f\u6700\u65b0\u7684\uff0c\u5e76\u4e14\u53ef\u80fd\u5305\u542b\u9519\u8bef\u3002\u6bd4\u5982\uff0c\u5b58\u5728\u8fd9\u6837\u7684\u60c5\u51b5\uff0c\u5728\u672c\u7248\u672c\u7684omicron\u53d1\u884c\u65f6\uff0c\u65e5\u5386\u66f4\u65b0\u5230\u4e862021\u5e7412\u670831\u65e5\uff0c\u5728\u8fd9\u4e4b\u524d\u7684\u65e5\u5386\u90fd\u662f\u51c6\u786e\u7684\uff0c\u4f46\u5728\u6b64\u4e4b\u540e\u7684\u65e5\u5386\uff0c\u5219\u6709\u53ef\u80fd\u51fa\u73b0\u9519\u8bef\u3002\u56e0\u6b64\uff0c\u53ea\u5e94\u8be5\u5728\u7279\u6b8a\u7684\u60c5\u51b5\u4e0b\uff08\u6bd4\u5982\u6d4b\u8bd5\uff09\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u4ee5\u83b7\u5f97\u4e00\u4e2a\u964d\u7ea7\u7684\u670d\u52a1\u3002 \"\"\" _dir = os . path . dirname ( __file__ ) file = os . path . join ( _dir , \"..\" , \"config\" , \"calendar.json\" ) with open ( file , \"r\" ) as f : data = json . load ( f ) for k , v in data . items (): setattr ( cls , k , np . array ( v )) @classmethod async def _load_calendar ( cls ): \"\"\"\u4ece\u6570\u636e\u7f13\u5b58\u4e2d\u52a0\u8f7d\u66f4\u65b0\u65e5\u5386\"\"\" from omicron import cache names = [ \"day_frames\" , \"week_frames\" , \"month_frames\" , \"quarter_frames\" , \"year_frames\" , ] for name , frame_type in zip ( names , cls . day_level_frames ): key = f \"calendar: { frame_type . value } \" result = await cache . security . lrange ( key , 0 , - 1 ) if result is not None and len ( result ): frames = [ int ( x ) for x in result ] setattr ( cls , name , np . array ( frames )) else : # pragma: no cover raise DataNotReadyError ( f \"calendar data is not ready: { name } missed\" ) @classmethod async def init ( cls ): \"\"\"\u521d\u59cb\u5316\u65e5\u5386\"\"\" await cls . _load_calendar () @classmethod def int2time ( cls , tm : int ) -> datetime . datetime : \"\"\"\u5c06\u6574\u6570\u8868\u793a\u7684\u65f6\u95f4\u8f6c\u6362\u4e3a`datetime`\u7c7b\u578b\u8868\u793a examples: >>> TimeFrame.int2time(202005011500) datetime.datetime(2020, 5, 1, 15, 0) Args: tm: time in YYYYMMDDHHmm format Returns: \u8f6c\u6362\u540e\u7684\u65f6\u95f4 \"\"\" s = str ( tm ) # its 8 times faster than arrow.get() return datetime . datetime ( int ( s [: 4 ]), int ( s [ 4 : 6 ]), int ( s [ 6 : 8 ]), int ( s [ 8 : 10 ]), int ( s [ 10 : 12 ]) ) @classmethod def time2int ( cls , tm : Union [ datetime . datetime , Arrow ]) -> int : \"\"\"\u5c06\u65f6\u95f4\u7c7b\u578b\u8f6c\u6362\u4e3a\u6574\u6570\u7c7b\u578b tm\u53ef\u4ee5\u662fArrow\u7c7b\u578b\uff0c\u4e5f\u53ef\u4ee5\u662fdatetime.datetime\u6216\u8005\u4efb\u4f55\u5176\u5b83\u7c7b\u578b\uff0c\u53ea\u8981\u5b83\u6709year,month...\u7b49 \u5c5e\u6027 Examples: >>> TimeFrame.time2int(datetime.datetime(2020, 5, 1, 15)) 202005011500 Args: tm: Returns: \u8f6c\u6362\u540e\u7684\u6574\u6570\uff0c\u6bd4\u59822020050115 \"\"\" return int ( f \" { tm . year : 04 }{ tm . month : 02 }{ tm . day : 02 }{ tm . hour : 02 }{ tm . minute : 02 } \" ) @classmethod def date2int ( cls , d : Union [ datetime . datetime , datetime . date , Arrow ]) -> int : \"\"\"\u5c06\u65e5\u671f\u8f6c\u6362\u4e3a\u6574\u6570\u8868\u793a \u5728zillionare\u4e2d\uff0c\u5982\u679c\u8981\u5bf9\u65f6\u95f4\u548c\u65e5\u671f\u8fdb\u884c\u6301\u4e45\u5316\u64cd\u4f5c\uff0c\u6211\u4eec\u4e00\u822c\u5c06\u5176\u8f6c\u6362\u4e3aint\u7c7b\u578b Examples: >>> TimeFrame.date2int(datetime.date(2020,5,1)) 20200501 Args: d: date Returns: \u65e5\u671f\u7684\u6574\u6570\u8868\u793a\uff0c\u6bd4\u598220220211 \"\"\" return int ( f \" { d . year : 04 }{ d . month : 02 }{ d . day : 02 } \" ) @classmethod def int2date ( cls , d : Union [ int , str ]) -> datetime . date : \"\"\"\u5c06\u6570\u5b57\u8868\u793a\u7684\u65e5\u671f\u8f6c\u6362\u6210\u4e3a\u65e5\u671f\u683c\u5f0f Examples: >>> TimeFrame.int2date(20200501) datetime.date(2020, 5, 1) Args: d: YYYYMMDD\u8868\u793a\u7684\u65e5\u671f Returns: \u8f6c\u6362\u540e\u7684\u65e5\u671f \"\"\" s = str ( d ) # it's 8 times faster than arrow.get return datetime . date ( int ( s [: 4 ]), int ( s [ 4 : 6 ]), int ( s [ 6 :])) @classmethod def day_shift ( cls , start : datetime . date , offset : int ) -> datetime . date : \"\"\"\u5bf9\u6307\u5b9a\u65e5\u671f\u8fdb\u884c\u524d\u540e\u79fb\u4f4d\u64cd\u4f5c \u5982\u679c n == 0\uff0c\u5219\u8fd4\u56ded\u5bf9\u5e94\u7684\u4ea4\u6613\u65e5\uff08\u5982\u679c\u662f\u975e\u4ea4\u6613\u65e5\uff0c\u5219\u8fd4\u56de\u521a\u7ed3\u675f\u7684\u4e00\u4e2a\u4ea4\u6613\u65e5\uff09 \u5982\u679c n > 0\uff0c\u5219\u8fd4\u56ded\u5bf9\u5e94\u7684\u4ea4\u6613\u65e5\u540e\u7b2c n \u4e2a\u4ea4\u6613\u65e5 \u5982\u679c n < 0\uff0c\u5219\u8fd4\u56ded\u5bf9\u5e94\u7684\u4ea4\u6613\u65e5\u524d\u7b2c n \u4e2a\u4ea4\u6613\u65e5 Examples: >>> TimeFrame.day_frames = [20191212, 20191213, 20191216, 20191217,20191218, 20191219] >>> TimeFrame.day_shift(datetime.date(2019,12,13), 0) datetime.date(2019, 12, 13) >>> TimeFrame.day_shift(datetime.date(2019, 12, 15), 0) datetime.date(2019, 12, 13) >>> TimeFrame.day_shift(datetime.date(2019, 12, 15), 1) datetime.date(2019, 12, 16) >>> TimeFrame.day_shift(datetime.date(2019, 12, 13), 1) datetime.date(2019, 12, 16) Args: start: the origin day offset: days to shift, can be negative Returns: \u79fb\u4f4d\u540e\u7684\u65e5\u671f \"\"\" # accelerated from 0.12 to 0.07, per 10000 loop, type conversion time included start = cls . date2int ( start ) return cls . int2date ( ext . shift ( cls . day_frames , start , offset )) @classmethod def week_shift ( cls , start : datetime . date , offset : int ) -> datetime . date : \"\"\"\u5bf9\u6307\u5b9a\u65e5\u671f\u6309\u5468\u7ebf\u5e27\u8fdb\u884c\u524d\u540e\u79fb\u4f4d\u64cd\u4f5c \u53c2\u8003 [omicron.models.timeframe.TimeFrame.day_shift][] Examples: >>> TimeFrame.week_frames = np.array([20200103, 20200110, 20200117, 20200123,20200207, 20200214]) >>> moment = arrow.get('2020-1-21').date() >>> TimeFrame.week_shift(moment, 1) datetime.date(2020, 1, 23) >>> TimeFrame.week_shift(moment, 0) datetime.date(2020, 1, 17) >>> TimeFrame.week_shift(moment, -1) datetime.date(2020, 1, 10) Returns: \u79fb\u4f4d\u540e\u7684\u65e5\u671f \"\"\" start = cls . date2int ( start ) return cls . int2date ( ext . shift ( cls . week_frames , start , offset )) @classmethod def month_shift ( cls , start : datetime . date , offset : int ) -> datetime . date : \"\"\"\u6c42`start`\u6240\u5728\u7684\u6708\u79fb\u4f4d\u540e\u7684frame \u672c\u51fd\u6570\u9996\u5148\u5c06`start`\u5bf9\u9f50\uff0c\u7136\u540e\u8fdb\u884c\u79fb\u4f4d\u3002 Examples: >>> TimeFrame.month_frames = np.array([20150130, 20150227, 20150331, 20150430]) >>> TimeFrame.month_shift(arrow.get('2015-2-26').date(), 0) datetime.date(2015, 1, 30) >>> TimeFrame.month_shift(arrow.get('2015-2-27').date(), 0) datetime.date(2015, 2, 27) >>> TimeFrame.month_shift(arrow.get('2015-3-1').date(), 0) datetime.date(2015, 2, 27) >>> TimeFrame.month_shift(arrow.get('2015-3-1').date(), 1) datetime.date(2015, 3, 31) Returns: \u79fb\u4f4d\u540e\u7684\u65e5\u671f \"\"\" start = cls . date2int ( start ) return cls . int2date ( ext . shift ( cls . month_frames , start , offset )) @classmethod def get_ticks ( cls , frame_type : FrameType ) -> Union [ List , np . array ]: \"\"\"\u53d6\u6708\u7ebf\u3001\u5468\u7ebf\u3001\u65e5\u7ebf\u53ca\u5404\u5206\u949f\u7ebf\u5bf9\u5e94\u7684frame \u5bf9\u5206\u949f\u7ebf\uff0c\u8fd4\u56de\u503c\u4ec5\u5305\u542b\u65f6\u95f4\uff0c\u4e0d\u5305\u542b\u65e5\u671f\uff08\u5747\u4e3a\u6574\u6570\u8868\u793a\uff09 Examples: >>> TimeFrame.month_frames = np.array([20050131, 20050228, 20050331]) >>> TimeFrame.get_ticks(FrameType.MONTH)[:3] array([20050131, 20050228, 20050331]) Args: frame_type : [description] Raises: ValueError: [description] Returns: \u6708\u7ebf\u3001\u5468\u7ebf\u3001\u65e5\u7ebf\u53ca\u5404\u5206\u949f\u7ebf\u5bf9\u5e94\u7684frame \"\"\" if frame_type in cls . minute_level_frames : return cls . ticks [ frame_type ] if frame_type == FrameType . DAY : return cls . day_frames elif frame_type == FrameType . WEEK : return cls . week_frames elif frame_type == FrameType . MONTH : return cls . month_frames else : # pragma: no cover raise ValueError ( f \" { frame_type } not supported!\" ) @classmethod def shift ( cls , moment : Union [ Arrow , datetime . date , datetime . datetime ], n : int , frame_type : FrameType , ) -> Union [ datetime . date , datetime . datetime ]: \"\"\"\u5c06\u6307\u5b9a\u7684moment\u79fb\u52a8N\u4e2a`frame_type`\u4f4d\u7f6e\u3002 \u5f53N\u4e3a\u8d1f\u6570\u65f6\uff0c\u610f\u5473\u7740\u5411\u524d\u79fb\u52a8\uff1b\u5f53N\u4e3a\u6b63\u6570\u65f6\uff0c\u610f\u5473\u7740\u5411\u540e\u79fb\u52a8\u3002\u5982\u679cn\u4e3a\u96f6\uff0c\u610f\u5473\u7740\u79fb\u52a8\u5230\u6700\u63a5\u8fd1 \u7684\u4e00\u4e2a\u5df2\u7ed3\u675f\u7684frame\u3002 \u5982\u679cmoment\u6ca1\u6709\u5bf9\u9f50\u5230frame_type\u5bf9\u5e94\u7684\u65f6\u95f4\uff0c\u5c06\u9996\u5148\u8fdb\u884c\u5bf9\u9f50\u3002 See also: - [day_shift][omicron.models.timeframe.TimeFrame.day_shift] - [week_shift][omicron.models.timeframe.TimeFrame.week_shift] - [month_shift][omicron.models.timeframe.TimeFrame.month_shift] Examples: >>> TimeFrame.shift(datetime.date(2020, 1, 3), 1, FrameType.DAY) datetime.date(2020, 1, 6) >>> TimeFrame.shift(datetime.datetime(2020, 1, 6, 11), 1, FrameType.MIN30) datetime.datetime(2020, 1, 6, 11, 30) Args: moment: n: frame_type: Returns: \u79fb\u4f4d\u540e\u7684Frame \"\"\" if frame_type == FrameType . DAY : return cls . day_shift ( moment , n ) elif frame_type == FrameType . WEEK : return cls . week_shift ( moment , n ) elif frame_type == FrameType . MONTH : return cls . month_shift ( moment , n ) elif frame_type in [ FrameType . MIN1 , FrameType . MIN5 , FrameType . MIN15 , FrameType . MIN30 , FrameType . MIN60 , ]: tm = moment . hour * 60 + moment . minute new_tick_pos = cls . ticks [ frame_type ] . index ( tm ) + n days = new_tick_pos // len ( cls . ticks [ frame_type ]) min_part = new_tick_pos % len ( cls . ticks [ frame_type ]) date_part = cls . day_shift ( moment . date (), days ) minutes = cls . ticks [ frame_type ][ min_part ] h , m = minutes // 60 , minutes % 60 return datetime . datetime ( date_part . year , date_part . month , date_part . day , h , m , tzinfo = moment . tzinfo , ) else : # pragma: no cover raise ValueError ( f \" { frame_type } is not supported.\" ) @classmethod def count_day_frames ( cls , start : Union [ datetime . date , Arrow ], end : Union [ datetime . date , Arrow ] ) -> int : \"\"\"calc trade days between start and end in close-to-close way. if start == end, this will returns 1. Both start/end will be aligned to open trade day before calculation. Examples: >>> start = datetime.date(2019, 12, 21) >>> end = datetime.date(2019, 12, 21) >>> TimeFrame.day_frames = [20191219, 20191220, 20191223, 20191224, 20191225] >>> TimeFrame.count_day_frames(start, end) 1 >>> # non-trade days are removed >>> TimeFrame.day_frames = [20200121, 20200122, 20200123, 20200203, 20200204, 20200205] >>> start = datetime.date(2020, 1, 23) >>> end = datetime.date(2020, 2, 4) >>> TimeFrame.count_day_frames(start, end) 3 args: start: end: returns: count of days \"\"\" start = cls . date2int ( start ) end = cls . date2int ( end ) return int ( ext . count_between ( cls . day_frames , start , end )) @classmethod def count_week_frames ( cls , start : datetime . date , end : datetime . date ) -> int : \"\"\" calc trade weeks between start and end in close-to-close way. Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1 for examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames] args: start: end: returns: count of weeks \"\"\" start = cls . date2int ( start ) end = cls . date2int ( end ) return int ( ext . count_between ( cls . week_frames , start , end )) @classmethod def count_month_frames ( cls , start : datetime . date , end : datetime . date ) -> int : \"\"\"calc trade months between start and end date in close-to-close way Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1. For examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames] Args: start: end: Returns: months between start and end \"\"\" start = cls . date2int ( start ) end = cls . date2int ( end ) return int ( ext . count_between ( cls . month_frames , start , end )) @classmethod def count_quarter_frames ( cls , start : datetime . date , end : datetime . date ) -> int : \"\"\"calc trade quarters between start and end date in close-to-close way Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1. For examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames] Args: start (datetime.date): [description] end (datetime.date): [description] Returns: quarters between start and end \"\"\" start = cls . date2int ( start ) end = cls . date2int ( end ) return int ( ext . count_between ( cls . quarter_frames , start , end )) @classmethod def count_year_frames ( cls , start : datetime . date , end : datetime . date ) -> int : \"\"\"calc trade years between start and end date in close-to-close way Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1. For examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames] Args: start (datetime.date): [description] end (datetime.date): [description] Returns: years between start and end \"\"\" start = cls . date2int ( start ) end = cls . date2int ( end ) return int ( ext . count_between ( cls . year_frames , start , end )) @classmethod def count_frames ( cls , start : Union [ datetime . date , datetime . datetime , Arrow ], end : Union [ datetime . date , datetime . datetime , Arrow ], frame_type , ) -> int : \"\"\"\u8ba1\u7b97start\u4e0eend\u4e4b\u95f4\u6709\u591a\u5c11\u4e2a\u5468\u671f\u4e3aframe_type\u7684frames See also: - [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames] - [count_week_frames][omicron.models.timeframe.TimeFrame.count_week_frames] - [count_month_frames][omicron.models.timeframe.TimeFrame.count_month_frames] Args: start : start frame end : end frame frame_type : the type of frame Raises: ValueError: \u5982\u679cframe_type\u4e0d\u652f\u6301\uff0c\u5219\u4f1a\u629b\u51fa\u6b64\u5f02\u5e38\u3002 Returns: \u4ecestart\u5230end\u7684\u5e27\u6570 \"\"\" if frame_type == FrameType . DAY : return cls . count_day_frames ( start , end ) elif frame_type == FrameType . WEEK : return cls . count_week_frames ( start , end ) elif frame_type == FrameType . MONTH : return cls . count_month_frames ( start , end ) elif frame_type == FrameType . QUARTER : return cls . count_quarter_frames ( start , end ) elif frame_type == FrameType . YEAR : return cls . count_year_frames ( start , end ) elif frame_type in [ FrameType . MIN1 , FrameType . MIN5 , FrameType . MIN15 , FrameType . MIN30 , FrameType . MIN60 , ]: tm_start = start . hour * 60 + start . minute tm_end = end . hour * 60 + end . minute days = cls . count_day_frames ( start . date (), end . date ()) - 1 tm_start_pos = cls . ticks [ frame_type ] . index ( tm_start ) tm_end_pos = cls . ticks [ frame_type ] . index ( tm_end ) min_bars = tm_end_pos - tm_start_pos + 1 return days * len ( cls . ticks [ frame_type ]) + min_bars else : # pragma: no cover raise ValueError ( f \" { frame_type } is not supported yet\" ) @classmethod def is_trade_day ( cls , dt : Union [ datetime . date , datetime . datetime , Arrow ]) -> bool : \"\"\"\u5224\u65ad`dt`\u662f\u5426\u4e3a\u4ea4\u6613\u65e5 Examples: >>> TimeFrame.is_trade_day(arrow.get('2020-1-1')) False Args: dt : Returns: bool \"\"\" return cls . date2int ( dt ) in cls . day_frames @classmethod def is_open_time ( cls , tm : Union [ datetime . datetime , Arrow ] = None ) -> bool : \"\"\"\u5224\u65ad`tm`\u6307\u5b9a\u7684\u65f6\u95f4\u662f\u5426\u5904\u5728\u4ea4\u6613\u65f6\u95f4\u6bb5\u3002 \u4ea4\u6613\u65f6\u95f4\u6bb5\u662f\u6307\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4\u6bb5\u4e4b\u5916\u7684\u5f00\u76d8\u65f6\u95f4 Examples: >>> TimeFrame.day_frames = np.array([20200102, 20200103, 20200106, 20200107, 20200108]) >>> TimeFrame.is_open_time(arrow.get('2020-1-1 14:59').naive) False >>> TimeFrame.is_open_time(arrow.get('2020-1-3 14:59').naive) True Args: tm : [description]. Defaults to None. Returns: bool \"\"\" tm = tm or arrow . now () if not cls . is_trade_day ( tm ): return False tick = tm . hour * 60 + tm . minute return tick in cls . ticks [ FrameType . MIN1 ] @classmethod def is_opening_call_auction_time ( cls , tm : Union [ Arrow , datetime . datetime ] = None ) -> bool : \"\"\"\u5224\u65ad`tm`\u6307\u5b9a\u7684\u65f6\u95f4\u662f\u5426\u4e3a\u5f00\u76d8\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4 Args: tm : [description]. Defaults to None. Returns: bool \"\"\" if tm is None : tm = cls . now () if not cls . is_trade_day ( tm ): return False minutes = tm . hour * 60 + tm . minute return 9 * 60 + 15 < minutes <= 9 * 60 + 25 @classmethod def is_closing_call_auction_time ( cls , tm : Union [ datetime . datetime , Arrow ] = None ) -> bool : \"\"\"\u5224\u65ad`tm`\u6307\u5b9a\u7684\u65f6\u95f4\u662f\u5426\u4e3a\u6536\u76d8\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4 Fixme: \u6b64\u5904\u5b9e\u73b0\u6709\u8bef\uff0c\u6536\u76d8\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4\u5e94\u8be5\u8fd8\u5305\u542b\u4e0a\u5348\u6536\u76d8\u65f6\u95f4 Args: tm : [description]. Defaults to None. Returns: bool \"\"\" tm = tm or cls . now () if not cls . is_trade_day ( tm ): return False minutes = tm . hour * 60 + tm . minute return 15 * 60 - 3 <= minutes < 15 * 60 @classmethod def floor ( cls , moment : Frame , frame_type : FrameType ) -> Frame : \"\"\"\u6c42`moment`\u5728\u6307\u5b9a\u7684`frame_type`\u4e2d\u7684\u4e0b\u754c \u6bd4\u5982\uff0c\u5982\u679c`moment`\u4e3a10:37\uff0c\u5219\u5f53`frame_type`\u4e3a30\u5206\u949f\u65f6\uff0c\u5bf9\u5e94\u7684\u4e0a\u754c\u4e3a10:00 Examples: >>> # \u5982\u679cmoment\u4e3a\u65e5\u671f\uff0c\u5219\u5f53\u6210\u5df2\u6536\u76d8\u5904\u7406 >>> TimeFrame.day_frames = np.array([20050104, 20050105, 20050106, 20050107, 20050110, 20050111]) >>> TimeFrame.floor(datetime.date(2005, 1, 7), FrameType.DAY) datetime.date(2005, 1, 7) >>> # moment\u6307\u5b9a\u7684\u65f6\u95f4\u8fd8\u672a\u6536\u76d8\uff0cfloor\u5230\u4e0a\u4e00\u4e2a\u4ea4\u6613\u65e5 >>> TimeFrame.floor(datetime.datetime(2005, 1, 7, 14, 59), FrameType.DAY) datetime.date(2005, 1, 6) >>> TimeFrame.floor(datetime.date(2005, 1, 13), FrameType.WEEK) datetime.date(2005, 1, 7) >>> TimeFrame.floor(datetime.date(2005,2, 27), FrameType.MONTH) datetime.date(2005, 1, 31) >>> TimeFrame.floor(datetime.datetime(2005,1,5,14,59), FrameType.MIN30) datetime.datetime(2005, 1, 5, 14, 30) >>> TimeFrame.floor(datetime.datetime(2005, 1, 5, 14, 59), FrameType.MIN1) datetime.datetime(2005, 1, 5, 14, 59) >>> TimeFrame.floor(arrow.get('2005-1-5 14:59').naive, FrameType.MIN1) datetime.datetime(2005, 1, 5, 14, 59) Args: moment: frame_type: Returns: `moment`\u5728\u6307\u5b9a\u7684`frame_type`\u4e2d\u7684\u4e0b\u754c \"\"\" if frame_type in cls . minute_level_frames : tm , day_offset = cls . minute_frames_floor ( cls . ticks [ frame_type ], moment . hour * 60 + moment . minute ) h , m = tm // 60 , tm % 60 if cls . day_shift ( moment , 0 ) < moment . date () or day_offset == - 1 : h = 15 m = 0 new_day = cls . day_shift ( moment , day_offset ) else : new_day = moment . date () return datetime . datetime ( new_day . year , new_day . month , new_day . day , h , m ) if type ( moment ) == datetime . date : moment = datetime . datetime ( moment . year , moment . month , moment . day , 15 ) # \u5982\u679c\u662f\u4ea4\u6613\u65e5\uff0c\u4f46\u8fd8\u672a\u6536\u76d8 if ( cls . date2int ( moment ) in cls . day_frames and moment . hour * 60 + moment . minute < 900 ): moment = cls . day_shift ( moment , - 1 ) day = cls . date2int ( moment ) if frame_type == FrameType . DAY : arr = cls . day_frames elif frame_type == FrameType . WEEK : arr = cls . week_frames elif frame_type == FrameType . MONTH : arr = cls . month_frames else : # pragma: no cover raise ValueError ( f \"frame type { frame_type } not supported.\" ) floored = ext . floor ( arr , day ) return cls . int2date ( floored ) @classmethod def last_min_frame ( cls , day : Union [ str , Arrow , datetime . date ], frame_type : FrameType ) -> Union [ datetime . date , datetime . datetime ]: \"\"\"\u83b7\u53d6`day`\u65e5\u5468\u671f\u4e3a`frame_type`\u7684\u7ed3\u675fframe\u3002 Example: >>> TimeFrame.last_min_frame(arrow.get('2020-1-5').date(), FrameType.MIN30) datetime.datetime(2020, 1, 3, 15, 0) Args: day: frame_type: Returns: `day`\u65e5\u5468\u671f\u4e3a`frame_type`\u7684\u7ed3\u675fframe \"\"\" if isinstance ( day , str ): day = cls . date2int ( arrow . get ( day ) . date ()) elif isinstance ( day , arrow . Arrow ) or isinstance ( day , datetime . datetime ): day = cls . date2int ( day . date ()) elif isinstance ( day , datetime . date ): day = cls . date2int ( day ) else : raise TypeError ( f \" { type ( day ) } is not supported.\" ) if frame_type in cls . minute_level_frames : last_close_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( last_close_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 15 , minute = 0 ) else : # pragma: no cover raise ValueError ( f \" { frame_type } not supported\" ) @classmethod def frame_len ( cls , frame_type : FrameType ) -> int : \"\"\"\u8fd4\u56de\u4ee5\u5206\u949f\u4e3a\u5355\u4f4d\u7684frame\u957f\u5ea6\u3002 \u5bf9\u65e5\u7ebf\u4ee5\u4e0a\u7ea7\u522b\u6ca1\u6709\u610f\u4e49\uff0c\u4f46\u4f1a\u8fd4\u56de240 Examples: >>> TimeFrame.frame_len(FrameType.MIN5) 5 Args: frame_type: Returns: \u8fd4\u56de\u4ee5\u5206\u949f\u4e3a\u5355\u4f4d\u7684frame\u957f\u5ea6\u3002 \"\"\" if frame_type == FrameType . MIN1 : return 1 elif frame_type == FrameType . MIN5 : return 5 elif frame_type == FrameType . MIN15 : return 15 elif frame_type == FrameType . MIN30 : return 30 elif frame_type == FrameType . MIN60 : return 60 else : return 240 @classmethod def first_min_frame ( cls , day : Union [ str , Arrow , Frame ], frame_type : FrameType ) -> Union [ datetime . date , datetime . datetime ]: \"\"\"\u83b7\u53d6\u6307\u5b9a\u65e5\u671f\u7c7b\u578b\u4e3a`frame_type`\u7684`frame`\u3002 Examples: >>> TimeFrame.day_frames = np.array([20191227, 20191230, 20191231, 20200102, 20200103]) >>> TimeFrame.first_min_frame('2019-12-31', FrameType.MIN1) datetime.datetime(2019, 12, 31, 9, 31) Args: day: which day? frame_type: which frame_type? Returns: `day`\u5f53\u65e5\u7684\u7b2c\u4e00\u5e27 \"\"\" day = cls . date2int ( arrow . get ( day ) . date ()) if frame_type == FrameType . MIN1 : floor_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( floor_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 9 , minute = 31 ) elif frame_type == FrameType . MIN5 : floor_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( floor_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 9 , minute = 35 ) elif frame_type == FrameType . MIN15 : floor_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( floor_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 9 , minute = 45 ) elif frame_type == FrameType . MIN30 : floor_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( floor_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 10 ) elif frame_type == FrameType . MIN60 : floor_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( floor_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 10 , minute = 30 ) else : # pragma: no cover raise ValueError ( f \" { frame_type } not supported\" ) @classmethod def get_frames ( cls , start : Frame , end : Frame , frame_type : FrameType ) -> List [ int ]: \"\"\"\u53d6[start, end]\u95f4\u6240\u6709\u7c7b\u578b\u4e3aframe_type\u7684frames \u8c03\u7528\u672c\u51fd\u6570\u524d\uff0c\u8bf7\u5148\u901a\u8fc7`floor`\u6216\u8005`ceiling`\u5c06\u65f6\u95f4\u5e27\u5bf9\u9f50\u5230`frame_type`\u7684\u8fb9\u754c\u503c Example: >>> start = arrow.get('2020-1-13 10:00').naive >>> end = arrow.get('2020-1-13 13:30').naive >>> TimeFrame.day_frames = np.array([20200109, 20200110, 20200113,20200114, 20200115, 20200116]) >>> TimeFrame.get_frames(start, end, FrameType.MIN30) [202001131000, 202001131030, 202001131100, 202001131130, 202001131330] Args: start: end: frame_type: Returns: frame list \"\"\" n = cls . count_frames ( start , end , frame_type ) return cls . get_frames_by_count ( end , n , frame_type ) @classmethod def get_frames_by_count ( cls , end : Arrow , n : int , frame_type : FrameType ) -> List [ int ]: \"\"\"\u53d6\u4ee5end\u4e3a\u7ed3\u675f\u70b9,\u5468\u671f\u4e3aframe_type\u7684n\u4e2aframe \u8c03\u7528\u524d\u8bf7\u5c06`end`\u5bf9\u9f50\u5230`frame_type`\u7684\u8fb9\u754c Examples: >>> end = arrow.get('2020-1-6 14:30').naive >>> TimeFrame.day_frames = np.array([20200102, 20200103,20200106, 20200107, 20200108, 20200109]) >>> TimeFrame.get_frames_by_count(end, 2, FrameType.MIN30) [202001061400, 202001061430] Args: end: n: frame_type: Returns: frame list \"\"\" if frame_type == FrameType . DAY : end = cls . date2int ( end ) pos = np . searchsorted ( cls . day_frames , end , side = \"right\" ) return cls . day_frames [ max ( 0 , pos - n ) : pos ] . tolist () elif frame_type == FrameType . WEEK : end = cls . date2int ( end ) pos = np . searchsorted ( cls . week_frames , end , side = \"right\" ) return cls . week_frames [ max ( 0 , pos - n ) : pos ] . tolist () elif frame_type == FrameType . MONTH : end = cls . date2int ( end ) pos = np . searchsorted ( cls . month_frames , end , side = \"right\" ) return cls . month_frames [ max ( 0 , pos - n ) : pos ] . tolist () elif frame_type in { FrameType . MIN1 , FrameType . MIN5 , FrameType . MIN15 , FrameType . MIN30 , FrameType . MIN60 , }: n_days = n // len ( cls . ticks [ frame_type ]) + 2 ticks = cls . ticks [ frame_type ] * n_days days = cls . get_frames_by_count ( end , n_days , FrameType . DAY ) days = np . repeat ( days , len ( cls . ticks [ frame_type ])) ticks = [ day . item () * 10000 + int ( tm / 60 ) * 100 + tm % 60 for day , tm in zip ( days , ticks ) ] # list index is much faster than ext.index_sorted when the arr is small pos = ticks . index ( cls . time2int ( end )) + 1 return ticks [ max ( 0 , pos - n ) : pos ] else : # pragma: no cover raise ValueError ( f \" { frame_type } not support yet\" ) @classmethod def ceiling ( cls , moment : Frame , frame_type : FrameType ) -> Frame : \"\"\"\u6c42`moment`\u6240\u5728\u7c7b\u578b\u4e3a`frame_type`\u5468\u671f\u7684\u4e0a\u754c \u6bd4\u5982`moment`\u4e3a14:59\u5206\uff0c\u5982\u679c`frame_type`\u4e3a30\u5206\u949f\uff0c\u5219\u5b83\u7684\u4e0a\u754c\u5e94\u8be5\u4e3a15:00 Example: >>> TimeFrame.day_frames = [20050104, 20050105, 20050106, 20050107] >>> TimeFrame.ceiling(datetime.date(2005, 1, 7), FrameType.DAY) datetime.date(2005, 1, 7) >>> TimeFrame.week_frames = [20050107, 20050114, 20050121, 20050128] >>> TimeFrame.ceiling(datetime.date(2005, 1, 4), FrameType.WEEK) datetime.date(2005, 1, 7) >>> TimeFrame.ceiling(datetime.date(2005,1,7), FrameType.WEEK) datetime.date(2005, 1, 7) >>> TimeFrame.month_frames = [20050131, 20050228] >>> TimeFrame.ceiling(datetime.date(2005,1 ,1), FrameType.MONTH) datetime.date(2005, 1, 31) >>> TimeFrame.ceiling(datetime.datetime(2005,1,5,14,59), FrameType.MIN30) datetime.datetime(2005, 1, 5, 15, 0) >>> TimeFrame.ceiling(datetime.datetime(2005, 1, 5, 14, 59), FrameType.MIN1) datetime.datetime(2005, 1, 5, 14, 59) >>> TimeFrame.ceiling(arrow.get('2005-1-5 14:59').naive, FrameType.MIN1) datetime.datetime(2005, 1, 5, 14, 59) Args: moment (datetime.datetime): [description] frame_type (FrameType): [description] Returns: `moment`\u6240\u5728\u7c7b\u578b\u4e3a`frame_type`\u5468\u671f\u7684\u4e0a\u754c \"\"\" if frame_type in cls . day_level_frames and type ( moment ) == datetime . datetime : moment = moment . date () floor = cls . floor ( moment , frame_type ) if floor == moment : return moment elif floor > moment : return floor else : return cls . shift ( floor , 1 , frame_type ) @classmethod def combine_time ( cls , date : datetime . date , hour : int , minute : int = 0 , second : int = 0 , microsecond : int = 0 , ) -> datetime . datetime : \"\"\"\u7528`date`\u6307\u5b9a\u7684\u65e5\u671f\u4e0e`hour`, `minute`, `second`\u7b49\u53c2\u6570\u4e00\u8d77\u5408\u6210\u65b0\u7684\u65f6\u95f4 Examples: >>> TimeFrame.combine_time(datetime.date(2020, 1, 1), 14, 30) datetime.datetime(2020, 1, 1, 14, 30) Args: date : [description] hour : [description] minute : [description]. Defaults to 0. second : [description]. Defaults to 0. microsecond : [description]. Defaults to 0. Returns: \u5408\u6210\u540e\u7684\u65f6\u95f4 \"\"\" return datetime . datetime ( date . year , date . month , date . day , hour , minute , second , microsecond ) @classmethod def replace_date ( cls , dtm : datetime . datetime , dt : datetime . date ) -> datetime . datetime : \"\"\"\u5c06`dtm`\u53d8\u91cf\u7684\u65e5\u671f\u66f4\u6362\u4e3a`dt`\u6307\u5b9a\u7684\u65e5\u671f Example: >>> TimeFrame.replace_date(arrow.get('2020-1-1 13:49').datetime, datetime.date(2019, 1,1)) datetime.datetime(2019, 1, 1, 13, 49) Args: dtm (datetime.datetime): [description] dt (datetime.date): [description] Returns: \u53d8\u6362\u540e\u7684\u65f6\u95f4 \"\"\" return datetime . datetime ( dt . year , dt . month , dt . day , dtm . hour , dtm . minute , dtm . second , dtm . microsecond ) @classmethod def resample_frames ( cls , trade_days : Iterable [ datetime . date ], frame_type : FrameType ) -> List [ int ]: \"\"\"\u5c06\u4ece\u884c\u60c5\u670d\u52a1\u5668\u83b7\u53d6\u7684\u4ea4\u6613\u65e5\u5386\u91cd\u91c7\u6837\uff0c\u751f\u6210\u5468\u5e27\u548c\u6708\u7ebf\u5e27 Args: trade_days (Iterable): [description] frame_type (FrameType): [description] Returns: List[int]: \u91cd\u91c7\u6837\u540e\u7684\u65e5\u671f\u5217\u8868\uff0c\u65e5\u671f\u7528\u6574\u6570\u8868\u793a \"\"\" if frame_type == FrameType . WEEK : weeks = [] last = trade_days [ 0 ] for cur in trade_days : if cur . weekday () < last . weekday () or ( cur - last ) . days >= 7 : weeks . append ( last ) last = cur if weeks [ - 1 ] < last : weeks . append ( last ) return weeks elif frame_type == FrameType . MONTH : months = [] last = trade_days [ 0 ] for cur in trade_days : if cur . day < last . day : months . append ( last ) last = cur months . append ( last ) return months elif frame_type == FrameType . QUARTER : quarters = [] last = trade_days [ 0 ] for cur in trade_days : if last . month % 3 == 0 : if cur . month > last . month or cur . year > last . year : quarters . append ( last ) last = cur quarters . append ( last ) return quarters elif frame_type == FrameType . YEAR : years = [] last = trade_days [ 0 ] for cur in trade_days : if cur . year > last . year : years . append ( last ) last = cur years . append ( last ) return years else : # pragma: no cover raise ValueError ( f \"Unsupported FrameType: { frame_type } \" ) @classmethod def minute_frames_floor ( cls , ticks , moment ) -> Tuple [ int , int ]: \"\"\" \u5bf9\u4e8e\u5206\u949f\u7ea7\u7684frame,\u8fd4\u56de\u5b83\u4eec\u4e0eframe\u523b\u5ea6\u5411\u4e0b\u5bf9\u9f50\u540e\u7684frame\u53ca\u65e5\u671f\u8fdb\u4f4d\u3002\u5982\u679c\u9700\u8981\u5bf9\u9f50\u5230\u4e0a\u4e00\u4e2a\u4ea4\u6613 \u65e5\uff0c\u5219\u8fdb\u4f4d\u4e3a-1\uff0c\u5426\u5219\u4e3a0. Examples: >>> ticks = [600, 630, 660, 690, 810, 840, 870, 900] >>> TimeFrame.minute_frames_floor(ticks, 545) (900, -1) >>> TimeFrame.minute_frames_floor(ticks, 600) (600, 0) >>> TimeFrame.minute_frames_floor(ticks, 605) (600, 0) >>> TimeFrame.minute_frames_floor(ticks, 899) (870, 0) >>> TimeFrame.minute_frames_floor(ticks, 900) (900, 0) >>> TimeFrame.minute_frames_floor(ticks, 905) (900, 0) Args: ticks (np.array or list): frames\u523b\u5ea6 moment (int): \u6574\u6570\u8868\u793a\u7684\u5206\u949f\u6570\uff0c\u6bd4\u5982900\u8868\u793a15\uff1a00 Returns: tuple, the first is the new moment, the second is carry-on \"\"\" if moment < ticks [ 0 ]: return ticks [ - 1 ], - 1 # \u2019right' \u76f8\u5f53\u4e8e ticks <= m index = np . searchsorted ( ticks , moment , side = \"right\" ) return ticks [ index - 1 ], 0 @classmethod async def save_calendar ( cls , trade_days ): # avoid circular import from omicron import cache for ft in [ FrameType . WEEK , FrameType . MONTH , FrameType . QUARTER , FrameType . YEAR ]: days = cls . resample_frames ( trade_days , ft ) frames = [ cls . date2int ( x ) for x in days ] key = f \"calendar: { ft . value } \" pl = cache . security . pipeline () pl . delete ( key ) pl . rpush ( key , * frames ) await pl . execute () frames = [ cls . date2int ( x ) for x in trade_days ] key = f \"calendar: { FrameType . DAY . value } \" pl = cache . security . pipeline () pl . delete ( key ) pl . rpush ( key , * frames ) await pl . execute () @classmethod async def remove_calendar ( cls ): # avoid circular import from omicron import cache for ft in cls . day_level_frames : key = f \"calendar: { ft . value } \" await cache . security . delete ( key ) @classmethod def is_bar_closed ( cls , frame : Frame , ft : FrameType ) -> bool : \"\"\"\u5224\u65ad`frame`\u6240\u4ee3\u8868\u7684bar\u662f\u5426\u5df2\u7ecf\u6536\u76d8\uff08\u7ed3\u675f\uff09 \u5982\u679c\u662f\u65e5\u7ebf\uff0cframe\u4e0d\u4e3a\u5f53\u5929\uff0c\u5219\u8ba4\u4e3a\u5df2\u6536\u76d8\uff1b\u6216\u8005\u5f53\u524d\u65f6\u95f4\u5728\u6536\u76d8\u65f6\u95f4\u4e4b\u540e\uff0c\u4e5f\u8ba4\u4e3a\u5df2\u6536\u76d8\u3002 \u5982\u679c\u662f\u5176\u5b83\u5468\u671f\uff0c\u5219\u53ea\u6709\u5f53frame\u6b63\u597d\u5728\u8fb9\u754c\u4e0a\uff0c\u624d\u8ba4\u4e3a\u662f\u5df2\u6536\u76d8\u3002\u8fd9\u91cc\u6709\u4e00\u4e2a\u5047\u8bbe\uff1a\u6211\u4eec\u4e0d\u4f1a\u5728\u5176\u5b83\u5468\u671f\u4e0a\uff0c\u5224\u65ad\u672a\u6765\u7684\u67d0\u4e2aframe\u662f\u5426\u5df2\u7ecf\u6536\u76d8\u3002 Args: frame : bar\u6240\u5904\u7684\u65f6\u95f4\uff0c\u5fc5\u987b\u5c0f\u4e8e\u5f53\u524d\u65f6\u95f4 ft: bar\u6240\u4ee3\u8868\u7684\u5e27\u7c7b\u578b Returns: bool: \u662f\u5426\u5df2\u7ecf\u6536\u76d8 \"\"\" floor = cls . floor ( frame , ft ) now = arrow . now () if ft == FrameType . DAY : return floor < now . date () or now . hour >= 15 else : return floor == frame @classmethod def get_frame_scope ( cls , frame : Frame , ft : FrameType ) -> Tuple [ Frame , Frame ]: # todo: \u51fd\u6570\u7684\u901a\u7528\u6027\u4e0d\u8db3\uff0c\u4f3c\u4e4e\u5e94\u8be5\u653e\u5728\u5177\u4f53\u7684\u4e1a\u52a1\u7c7b\u4e2d\u3002\u5982\u679c\u662f\u901a\u7528\u578b\u7684\u51fd\u6570\uff0c\u53c2\u6570\u4e0d\u5e94\u8be5\u5c40\u9650\u4e8e\u5468\u548c\u6708\u3002 \"\"\"\u5bf9\u4e8e\u7ed9\u5b9a\u7684\u65f6\u95f4\uff0c\u53d6\u6240\u5728\u5468\u7684\u7b2c\u4e00\u5929\u548c\u6700\u540e\u4e00\u5929\uff0c\u6240\u5728\u6708\u7684\u7b2c\u4e00\u5929\u548c\u6700\u540e\u4e00\u5929 Args: frame : \u6307\u5b9a\u7684\u65e5\u671f\uff0cdate\u5bf9\u8c61 ft: \u5e27\u7c7b\u578b\uff0c\u652f\u6301WEEK\u548cMONTH Returns: Tuple[Frame, Frame]: \u5468\u6216\u8005\u6708\u7684\u9996\u672b\u65e5\u671f\uff08date\u5bf9\u8c61\uff09 \"\"\" if frame is None : raise ValueError ( \"frame cannot be None\" ) if ft not in ( FrameType . WEEK , FrameType . MONTH ): raise ValueError ( f \"FrameType only supports WEEK and MONTH: { ft } \" ) if isinstance ( frame , datetime . datetime ): frame = frame . date () if frame < CALENDAR_START : raise ValueError ( f \"cannot be earlier than { CALENDAR_START } : { frame } \" ) # datetime.date(2021, 10, 8)\uff0c\u8fd9\u662f\u4e2a\u7279\u6b8a\u7684\u65e5\u671f if ft == FrameType . WEEK : if frame < datetime . date ( 2005 , 1 , 10 ): return datetime . date ( 2005 , 1 , 4 ), datetime . date ( 2005 , 1 , 7 ) if not cls . is_trade_day ( frame ): # \u975e\u4ea4\u6613\u65e5\u7684\u60c5\u51b5\uff0c\u76f4\u63a5\u56de\u9000\u4e00\u5929 week_day = cls . day_shift ( frame , 0 ) else : week_day = frame w1 = TimeFrame . floor ( week_day , FrameType . WEEK ) if w1 == week_day : # \u672c\u5468\u7684\u6700\u540e\u4e00\u4e2a\u4ea4\u6613\u65e5 week_end = w1 else : week_end = TimeFrame . week_shift ( week_day , 1 ) w0 = TimeFrame . week_shift ( week_end , - 1 ) week_start = TimeFrame . day_shift ( w0 , 1 ) return week_start , week_end if ft == FrameType . MONTH : if frame <= datetime . date ( 2005 , 1 , 31 ): return datetime . date ( 2005 , 1 , 4 ), datetime . date ( 2005 , 1 , 31 ) month_start = frame . replace ( day = 1 ) if not cls . is_trade_day ( month_start ): # \u975e\u4ea4\u6613\u65e5\u7684\u60c5\u51b5\uff0c\u76f4\u63a5\u52a01 month_start = cls . day_shift ( month_start , 1 ) month_end = TimeFrame . month_shift ( month_start , 1 ) return month_start , month_end @classmethod def get_previous_trade_day ( cls , now : datetime . date ): \"\"\"\u83b7\u53d6\u4e0a\u4e00\u4e2a\u4ea4\u6613\u65e5 \u5982\u679c\u5f53\u5929\u662f\u5468\u516d\u6216\u8005\u5468\u65e5\uff0c\u8fd4\u56de\u5468\u4e94\uff08\u4ea4\u6613\u65e5\uff09\uff0c\u5982\u679c\u5f53\u5929\u662f\u5468\u4e00\uff0c\u8fd4\u56de\u5468\u4e94\uff0c\u5982\u679c\u5f53\u5929\u662f\u5468\u4e94\uff0c\u8fd4\u56de\u5468\u56db Args: now : \u6307\u5b9a\u7684\u65e5\u671f\uff0cdate\u5bf9\u8c61 Returns: datetime.date: \u4e0a\u4e00\u4e2a\u4ea4\u6613\u65e5 \"\"\" if now == datetime . date ( 2005 , 1 , 4 ): return now if TimeFrame . is_trade_day ( now ): pre_trade_day = TimeFrame . day_shift ( now , - 1 ) else : pre_trade_day = TimeFrame . day_shift ( now , 0 ) return pre_trade_day ceiling ( moment , frame_type ) classmethod \u00b6 \u6c42 moment \u6240\u5728\u7c7b\u578b\u4e3a frame_type \u5468\u671f\u7684\u4e0a\u754c \u6bd4\u5982 moment \u4e3a14:59\u5206\uff0c\u5982\u679c frame_type \u4e3a30\u5206\u949f\uff0c\u5219\u5b83\u7684\u4e0a\u754c\u5e94\u8be5\u4e3a15:00 Examples: >>> TimeFrame . day_frames = [ 20050104 , 20050105 , 20050106 , 20050107 ] >>> TimeFrame . ceiling ( datetime . date ( 2005 , 1 , 7 ), FrameType . DAY ) datetime . date ( 2005 , 1 , 7 ) >>> TimeFrame . week_frames = [ 20050107 , 20050114 , 20050121 , 20050128 ] >>> TimeFrame . ceiling ( datetime . date ( 2005 , 1 , 4 ), FrameType . WEEK ) datetime . date ( 2005 , 1 , 7 ) >>> TimeFrame . ceiling ( datetime . date ( 2005 , 1 , 7 ), FrameType . WEEK ) datetime . date ( 2005 , 1 , 7 ) >>> TimeFrame . month_frames = [ 20050131 , 20050228 ] >>> TimeFrame . ceiling ( datetime . date ( 2005 , 1 , 1 ), FrameType . MONTH ) datetime . date ( 2005 , 1 , 31 ) >>> TimeFrame . ceiling ( datetime . datetime ( 2005 , 1 , 5 , 14 , 59 ), FrameType . MIN30 ) datetime . datetime ( 2005 , 1 , 5 , 15 , 0 ) >>> TimeFrame . ceiling ( datetime . datetime ( 2005 , 1 , 5 , 14 , 59 ), FrameType . MIN1 ) datetime . datetime ( 2005 , 1 , 5 , 14 , 59 ) >>> TimeFrame . ceiling ( arrow . get ( '2005-1-5 14:59' ) . naive , FrameType . MIN1 ) datetime . datetime ( 2005 , 1 , 5 , 14 , 59 ) Parameters: Name Type Description Default moment datetime.datetime [description] required frame_type FrameType [description] required Returns: Type Description Frame moment \u6240\u5728\u7c7b\u578b\u4e3a frame_type \u5468\u671f\u7684\u4e0a\u754c Source code in omicron/models/timeframe.py @classmethod def ceiling ( cls , moment : Frame , frame_type : FrameType ) -> Frame : \"\"\"\u6c42`moment`\u6240\u5728\u7c7b\u578b\u4e3a`frame_type`\u5468\u671f\u7684\u4e0a\u754c \u6bd4\u5982`moment`\u4e3a14:59\u5206\uff0c\u5982\u679c`frame_type`\u4e3a30\u5206\u949f\uff0c\u5219\u5b83\u7684\u4e0a\u754c\u5e94\u8be5\u4e3a15:00 Example: >>> TimeFrame.day_frames = [20050104, 20050105, 20050106, 20050107] >>> TimeFrame.ceiling(datetime.date(2005, 1, 7), FrameType.DAY) datetime.date(2005, 1, 7) >>> TimeFrame.week_frames = [20050107, 20050114, 20050121, 20050128] >>> TimeFrame.ceiling(datetime.date(2005, 1, 4), FrameType.WEEK) datetime.date(2005, 1, 7) >>> TimeFrame.ceiling(datetime.date(2005,1,7), FrameType.WEEK) datetime.date(2005, 1, 7) >>> TimeFrame.month_frames = [20050131, 20050228] >>> TimeFrame.ceiling(datetime.date(2005,1 ,1), FrameType.MONTH) datetime.date(2005, 1, 31) >>> TimeFrame.ceiling(datetime.datetime(2005,1,5,14,59), FrameType.MIN30) datetime.datetime(2005, 1, 5, 15, 0) >>> TimeFrame.ceiling(datetime.datetime(2005, 1, 5, 14, 59), FrameType.MIN1) datetime.datetime(2005, 1, 5, 14, 59) >>> TimeFrame.ceiling(arrow.get('2005-1-5 14:59').naive, FrameType.MIN1) datetime.datetime(2005, 1, 5, 14, 59) Args: moment (datetime.datetime): [description] frame_type (FrameType): [description] Returns: `moment`\u6240\u5728\u7c7b\u578b\u4e3a`frame_type`\u5468\u671f\u7684\u4e0a\u754c \"\"\" if frame_type in cls . day_level_frames and type ( moment ) == datetime . datetime : moment = moment . date () floor = cls . floor ( moment , frame_type ) if floor == moment : return moment elif floor > moment : return floor else : return cls . shift ( floor , 1 , frame_type ) combine_time ( date , hour , minute = 0 , second = 0 , microsecond = 0 ) classmethod \u00b6 \u7528 date \u6307\u5b9a\u7684\u65e5\u671f\u4e0e hour , minute , second \u7b49\u53c2\u6570\u4e00\u8d77\u5408\u6210\u65b0\u7684\u65f6\u95f4 Examples: >>> TimeFrame . combine_time ( datetime . date ( 2020 , 1 , 1 ), 14 , 30 ) datetime . datetime ( 2020 , 1 , 1 , 14 , 30 ) Parameters: Name Type Description Default date [description] required hour [description] required minute [description]. Defaults to 0. 0 second [description]. Defaults to 0. 0 microsecond [description]. Defaults to 0. 0 Returns: Type Description datetime.datetime \u5408\u6210\u540e\u7684\u65f6\u95f4 Source code in omicron/models/timeframe.py @classmethod def combine_time ( cls , date : datetime . date , hour : int , minute : int = 0 , second : int = 0 , microsecond : int = 0 , ) -> datetime . datetime : \"\"\"\u7528`date`\u6307\u5b9a\u7684\u65e5\u671f\u4e0e`hour`, `minute`, `second`\u7b49\u53c2\u6570\u4e00\u8d77\u5408\u6210\u65b0\u7684\u65f6\u95f4 Examples: >>> TimeFrame.combine_time(datetime.date(2020, 1, 1), 14, 30) datetime.datetime(2020, 1, 1, 14, 30) Args: date : [description] hour : [description] minute : [description]. Defaults to 0. second : [description]. Defaults to 0. microsecond : [description]. Defaults to 0. Returns: \u5408\u6210\u540e\u7684\u65f6\u95f4 \"\"\" return datetime . datetime ( date . year , date . month , date . day , hour , minute , second , microsecond ) count_day_frames ( start , end ) classmethod \u00b6 calc trade days between start and end in close-to-close way. if start == end, this will returns 1. Both start/end will be aligned to open trade day before calculation. Examples: >>> start = datetime . date ( 2019 , 12 , 21 ) >>> end = datetime . date ( 2019 , 12 , 21 ) >>> TimeFrame . day_frames = [ 20191219 , 20191220 , 20191223 , 20191224 , 20191225 ] >>> TimeFrame . count_day_frames ( start , end ) 1 >>> # non-trade days are removed >>> TimeFrame . day_frames = [ 20200121 , 20200122 , 20200123 , 20200203 , 20200204 , 20200205 ] >>> start = datetime . date ( 2020 , 1 , 23 ) >>> end = datetime . date ( 2020 , 2 , 4 ) >>> TimeFrame . count_day_frames ( start , end ) 3 Parameters: Name Type Description Default start Union[datetime.date, Arrow] required end Union[datetime.date, Arrow] required Returns: Type Description int count of days Source code in omicron/models/timeframe.py @classmethod def count_day_frames ( cls , start : Union [ datetime . date , Arrow ], end : Union [ datetime . date , Arrow ] ) -> int : \"\"\"calc trade days between start and end in close-to-close way. if start == end, this will returns 1. Both start/end will be aligned to open trade day before calculation. Examples: >>> start = datetime.date(2019, 12, 21) >>> end = datetime.date(2019, 12, 21) >>> TimeFrame.day_frames = [20191219, 20191220, 20191223, 20191224, 20191225] >>> TimeFrame.count_day_frames(start, end) 1 >>> # non-trade days are removed >>> TimeFrame.day_frames = [20200121, 20200122, 20200123, 20200203, 20200204, 20200205] >>> start = datetime.date(2020, 1, 23) >>> end = datetime.date(2020, 2, 4) >>> TimeFrame.count_day_frames(start, end) 3 args: start: end: returns: count of days \"\"\" start = cls . date2int ( start ) end = cls . date2int ( end ) return int ( ext . count_between ( cls . day_frames , start , end )) count_frames ( start , end , frame_type ) classmethod \u00b6 \u8ba1\u7b97start\u4e0eend\u4e4b\u95f4\u6709\u591a\u5c11\u4e2a\u5468\u671f\u4e3aframe_type\u7684frames See also: count_day_frames count_week_frames count_month_frames Parameters: Name Type Description Default start start frame required end end frame required frame_type the type of frame required Exceptions: Type Description ValueError \u5982\u679cframe_type\u4e0d\u652f\u6301\uff0c\u5219\u4f1a\u629b\u51fa\u6b64\u5f02\u5e38\u3002 Returns: Type Description int \u4ecestart\u5230end\u7684\u5e27\u6570 Source code in omicron/models/timeframe.py @classmethod def count_frames ( cls , start : Union [ datetime . date , datetime . datetime , Arrow ], end : Union [ datetime . date , datetime . datetime , Arrow ], frame_type , ) -> int : \"\"\"\u8ba1\u7b97start\u4e0eend\u4e4b\u95f4\u6709\u591a\u5c11\u4e2a\u5468\u671f\u4e3aframe_type\u7684frames See also: - [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames] - [count_week_frames][omicron.models.timeframe.TimeFrame.count_week_frames] - [count_month_frames][omicron.models.timeframe.TimeFrame.count_month_frames] Args: start : start frame end : end frame frame_type : the type of frame Raises: ValueError: \u5982\u679cframe_type\u4e0d\u652f\u6301\uff0c\u5219\u4f1a\u629b\u51fa\u6b64\u5f02\u5e38\u3002 Returns: \u4ecestart\u5230end\u7684\u5e27\u6570 \"\"\" if frame_type == FrameType . DAY : return cls . count_day_frames ( start , end ) elif frame_type == FrameType . WEEK : return cls . count_week_frames ( start , end ) elif frame_type == FrameType . MONTH : return cls . count_month_frames ( start , end ) elif frame_type == FrameType . QUARTER : return cls . count_quarter_frames ( start , end ) elif frame_type == FrameType . YEAR : return cls . count_year_frames ( start , end ) elif frame_type in [ FrameType . MIN1 , FrameType . MIN5 , FrameType . MIN15 , FrameType . MIN30 , FrameType . MIN60 , ]: tm_start = start . hour * 60 + start . minute tm_end = end . hour * 60 + end . minute days = cls . count_day_frames ( start . date (), end . date ()) - 1 tm_start_pos = cls . ticks [ frame_type ] . index ( tm_start ) tm_end_pos = cls . ticks [ frame_type ] . index ( tm_end ) min_bars = tm_end_pos - tm_start_pos + 1 return days * len ( cls . ticks [ frame_type ]) + min_bars else : # pragma: no cover raise ValueError ( f \" { frame_type } is not supported yet\" ) count_month_frames ( start , end ) classmethod \u00b6 calc trade months between start and end date in close-to-close way Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1. For examples, please refer to count_day_frames Parameters: Name Type Description Default start datetime.date required end datetime.date required Returns: Type Description int months between start and end Source code in omicron/models/timeframe.py @classmethod def count_month_frames ( cls , start : datetime . date , end : datetime . date ) -> int : \"\"\"calc trade months between start and end date in close-to-close way Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1. For examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames] Args: start: end: Returns: months between start and end \"\"\" start = cls . date2int ( start ) end = cls . date2int ( end ) return int ( ext . count_between ( cls . month_frames , start , end )) count_quarter_frames ( start , end ) classmethod \u00b6 calc trade quarters between start and end date in close-to-close way Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1. For examples, please refer to count_day_frames Parameters: Name Type Description Default start datetime.date [description] required end datetime.date [description] required Returns: Type Description int quarters between start and end Source code in omicron/models/timeframe.py @classmethod def count_quarter_frames ( cls , start : datetime . date , end : datetime . date ) -> int : \"\"\"calc trade quarters between start and end date in close-to-close way Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1. For examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames] Args: start (datetime.date): [description] end (datetime.date): [description] Returns: quarters between start and end \"\"\" start = cls . date2int ( start ) end = cls . date2int ( end ) return int ( ext . count_between ( cls . quarter_frames , start , end )) count_week_frames ( start , end ) classmethod \u00b6 calc trade weeks between start and end in close-to-close way. Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1 for examples, please refer to count_day_frames Parameters: Name Type Description Default start datetime.date required end datetime.date required Returns: Type Description int count of weeks Source code in omicron/models/timeframe.py @classmethod def count_week_frames ( cls , start : datetime . date , end : datetime . date ) -> int : \"\"\" calc trade weeks between start and end in close-to-close way. Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1 for examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames] args: start: end: returns: count of weeks \"\"\" start = cls . date2int ( start ) end = cls . date2int ( end ) return int ( ext . count_between ( cls . week_frames , start , end )) count_year_frames ( start , end ) classmethod \u00b6 calc trade years between start and end date in close-to-close way Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1. For examples, please refer to count_day_frames Parameters: Name Type Description Default start datetime.date [description] required end datetime.date [description] required Returns: Type Description int years between start and end Source code in omicron/models/timeframe.py @classmethod def count_year_frames ( cls , start : datetime . date , end : datetime . date ) -> int : \"\"\"calc trade years between start and end date in close-to-close way Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1. For examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames] Args: start (datetime.date): [description] end (datetime.date): [description] Returns: years between start and end \"\"\" start = cls . date2int ( start ) end = cls . date2int ( end ) return int ( ext . count_between ( cls . year_frames , start , end )) date2int ( d ) classmethod \u00b6 \u5c06\u65e5\u671f\u8f6c\u6362\u4e3a\u6574\u6570\u8868\u793a \u5728zillionare\u4e2d\uff0c\u5982\u679c\u8981\u5bf9\u65f6\u95f4\u548c\u65e5\u671f\u8fdb\u884c\u6301\u4e45\u5316\u64cd\u4f5c\uff0c\u6211\u4eec\u4e00\u822c\u5c06\u5176\u8f6c\u6362\u4e3aint\u7c7b\u578b Examples: >>> TimeFrame . date2int ( datetime . date ( 2020 , 5 , 1 )) 20200501 Parameters: Name Type Description Default d Union[datetime.datetime, datetime.date, Arrow] date required Returns: Type Description int \u65e5\u671f\u7684\u6574\u6570\u8868\u793a\uff0c\u6bd4\u598220220211 Source code in omicron/models/timeframe.py @classmethod def date2int ( cls , d : Union [ datetime . datetime , datetime . date , Arrow ]) -> int : \"\"\"\u5c06\u65e5\u671f\u8f6c\u6362\u4e3a\u6574\u6570\u8868\u793a \u5728zillionare\u4e2d\uff0c\u5982\u679c\u8981\u5bf9\u65f6\u95f4\u548c\u65e5\u671f\u8fdb\u884c\u6301\u4e45\u5316\u64cd\u4f5c\uff0c\u6211\u4eec\u4e00\u822c\u5c06\u5176\u8f6c\u6362\u4e3aint\u7c7b\u578b Examples: >>> TimeFrame.date2int(datetime.date(2020,5,1)) 20200501 Args: d: date Returns: \u65e5\u671f\u7684\u6574\u6570\u8868\u793a\uff0c\u6bd4\u598220220211 \"\"\" return int ( f \" { d . year : 04 }{ d . month : 02 }{ d . day : 02 } \" ) day_shift ( start , offset ) classmethod \u00b6 \u5bf9\u6307\u5b9a\u65e5\u671f\u8fdb\u884c\u524d\u540e\u79fb\u4f4d\u64cd\u4f5c \u5982\u679c n == 0\uff0c\u5219\u8fd4\u56ded\u5bf9\u5e94\u7684\u4ea4\u6613\u65e5\uff08\u5982\u679c\u662f\u975e\u4ea4\u6613\u65e5\uff0c\u5219\u8fd4\u56de\u521a\u7ed3\u675f\u7684\u4e00\u4e2a\u4ea4\u6613\u65e5\uff09 \u5982\u679c n > 0\uff0c\u5219\u8fd4\u56ded\u5bf9\u5e94\u7684\u4ea4\u6613\u65e5\u540e\u7b2c n \u4e2a\u4ea4\u6613\u65e5 \u5982\u679c n < 0\uff0c\u5219\u8fd4\u56ded\u5bf9\u5e94\u7684\u4ea4\u6613\u65e5\u524d\u7b2c n \u4e2a\u4ea4\u6613\u65e5 Examples: >>> TimeFrame . day_frames = [ 20191212 , 20191213 , 20191216 , 20191217 , 20191218 , 20191219 ] >>> TimeFrame . day_shift ( datetime . date ( 2019 , 12 , 13 ), 0 ) datetime . date ( 2019 , 12 , 13 ) >>> TimeFrame . day_shift ( datetime . date ( 2019 , 12 , 15 ), 0 ) datetime . date ( 2019 , 12 , 13 ) >>> TimeFrame . day_shift ( datetime . date ( 2019 , 12 , 15 ), 1 ) datetime . date ( 2019 , 12 , 16 ) >>> TimeFrame . day_shift ( datetime . date ( 2019 , 12 , 13 ), 1 ) datetime . date ( 2019 , 12 , 16 ) Parameters: Name Type Description Default start datetime.date the origin day required offset int days to shift, can be negative required Returns: Type Description datetime.date \u79fb\u4f4d\u540e\u7684\u65e5\u671f Source code in omicron/models/timeframe.py @classmethod def day_shift ( cls , start : datetime . date , offset : int ) -> datetime . date : \"\"\"\u5bf9\u6307\u5b9a\u65e5\u671f\u8fdb\u884c\u524d\u540e\u79fb\u4f4d\u64cd\u4f5c \u5982\u679c n == 0\uff0c\u5219\u8fd4\u56ded\u5bf9\u5e94\u7684\u4ea4\u6613\u65e5\uff08\u5982\u679c\u662f\u975e\u4ea4\u6613\u65e5\uff0c\u5219\u8fd4\u56de\u521a\u7ed3\u675f\u7684\u4e00\u4e2a\u4ea4\u6613\u65e5\uff09 \u5982\u679c n > 0\uff0c\u5219\u8fd4\u56ded\u5bf9\u5e94\u7684\u4ea4\u6613\u65e5\u540e\u7b2c n \u4e2a\u4ea4\u6613\u65e5 \u5982\u679c n < 0\uff0c\u5219\u8fd4\u56ded\u5bf9\u5e94\u7684\u4ea4\u6613\u65e5\u524d\u7b2c n \u4e2a\u4ea4\u6613\u65e5 Examples: >>> TimeFrame.day_frames = [20191212, 20191213, 20191216, 20191217,20191218, 20191219] >>> TimeFrame.day_shift(datetime.date(2019,12,13), 0) datetime.date(2019, 12, 13) >>> TimeFrame.day_shift(datetime.date(2019, 12, 15), 0) datetime.date(2019, 12, 13) >>> TimeFrame.day_shift(datetime.date(2019, 12, 15), 1) datetime.date(2019, 12, 16) >>> TimeFrame.day_shift(datetime.date(2019, 12, 13), 1) datetime.date(2019, 12, 16) Args: start: the origin day offset: days to shift, can be negative Returns: \u79fb\u4f4d\u540e\u7684\u65e5\u671f \"\"\" # accelerated from 0.12 to 0.07, per 10000 loop, type conversion time included start = cls . date2int ( start ) return cls . int2date ( ext . shift ( cls . day_frames , start , offset )) first_min_frame ( day , frame_type ) classmethod \u00b6 \u83b7\u53d6\u6307\u5b9a\u65e5\u671f\u7c7b\u578b\u4e3a frame_type \u7684 frame \u3002 Examples: >>> TimeFrame . day_frames = np . array ([ 20191227 , 20191230 , 20191231 , 20200102 , 20200103 ]) >>> TimeFrame . first_min_frame ( '2019-12-31' , FrameType . MIN1 ) datetime . datetime ( 2019 , 12 , 31 , 9 , 31 ) Parameters: Name Type Description Default day Union[str, Arrow, Frame] which day? required frame_type FrameType which frame_type? required Returns: Type Description Union[datetime.date, datetime.datetime] day \u5f53\u65e5\u7684\u7b2c\u4e00\u5e27 Source code in omicron/models/timeframe.py @classmethod def first_min_frame ( cls , day : Union [ str , Arrow , Frame ], frame_type : FrameType ) -> Union [ datetime . date , datetime . datetime ]: \"\"\"\u83b7\u53d6\u6307\u5b9a\u65e5\u671f\u7c7b\u578b\u4e3a`frame_type`\u7684`frame`\u3002 Examples: >>> TimeFrame.day_frames = np.array([20191227, 20191230, 20191231, 20200102, 20200103]) >>> TimeFrame.first_min_frame('2019-12-31', FrameType.MIN1) datetime.datetime(2019, 12, 31, 9, 31) Args: day: which day? frame_type: which frame_type? Returns: `day`\u5f53\u65e5\u7684\u7b2c\u4e00\u5e27 \"\"\" day = cls . date2int ( arrow . get ( day ) . date ()) if frame_type == FrameType . MIN1 : floor_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( floor_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 9 , minute = 31 ) elif frame_type == FrameType . MIN5 : floor_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( floor_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 9 , minute = 35 ) elif frame_type == FrameType . MIN15 : floor_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( floor_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 9 , minute = 45 ) elif frame_type == FrameType . MIN30 : floor_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( floor_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 10 ) elif frame_type == FrameType . MIN60 : floor_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( floor_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 10 , minute = 30 ) else : # pragma: no cover raise ValueError ( f \" { frame_type } not supported\" ) floor ( moment , frame_type ) classmethod \u00b6 \u6c42 moment \u5728\u6307\u5b9a\u7684 frame_type \u4e2d\u7684\u4e0b\u754c \u6bd4\u5982\uff0c\u5982\u679c moment \u4e3a10:37\uff0c\u5219\u5f53 frame_type \u4e3a30\u5206\u949f\u65f6\uff0c\u5bf9\u5e94\u7684\u4e0a\u754c\u4e3a10:00 Examples: >>> # \u5982\u679cmoment\u4e3a\u65e5\u671f\uff0c\u5219\u5f53\u6210\u5df2\u6536\u76d8\u5904\u7406 >>> TimeFrame . day_frames = np . array ([ 20050104 , 20050105 , 20050106 , 20050107 , 20050110 , 20050111 ]) >>> TimeFrame . floor ( datetime . date ( 2005 , 1 , 7 ), FrameType . DAY ) datetime . date ( 2005 , 1 , 7 ) >>> # moment\u6307\u5b9a\u7684\u65f6\u95f4\u8fd8\u672a\u6536\u76d8\uff0cfloor\u5230\u4e0a\u4e00\u4e2a\u4ea4\u6613\u65e5 >>> TimeFrame . floor ( datetime . datetime ( 2005 , 1 , 7 , 14 , 59 ), FrameType . DAY ) datetime . date ( 2005 , 1 , 6 ) >>> TimeFrame . floor ( datetime . date ( 2005 , 1 , 13 ), FrameType . WEEK ) datetime . date ( 2005 , 1 , 7 ) >>> TimeFrame . floor ( datetime . date ( 2005 , 2 , 27 ), FrameType . MONTH ) datetime . date ( 2005 , 1 , 31 ) >>> TimeFrame . floor ( datetime . datetime ( 2005 , 1 , 5 , 14 , 59 ), FrameType . MIN30 ) datetime . datetime ( 2005 , 1 , 5 , 14 , 30 ) >>> TimeFrame . floor ( datetime . datetime ( 2005 , 1 , 5 , 14 , 59 ), FrameType . MIN1 ) datetime . datetime ( 2005 , 1 , 5 , 14 , 59 ) >>> TimeFrame . floor ( arrow . get ( '2005-1-5 14:59' ) . naive , FrameType . MIN1 ) datetime . datetime ( 2005 , 1 , 5 , 14 , 59 ) Parameters: Name Type Description Default moment Frame required frame_type FrameType required Returns: Type Description Frame moment \u5728\u6307\u5b9a\u7684 frame_type \u4e2d\u7684\u4e0b\u754c Source code in omicron/models/timeframe.py @classmethod def floor ( cls , moment : Frame , frame_type : FrameType ) -> Frame : \"\"\"\u6c42`moment`\u5728\u6307\u5b9a\u7684`frame_type`\u4e2d\u7684\u4e0b\u754c \u6bd4\u5982\uff0c\u5982\u679c`moment`\u4e3a10:37\uff0c\u5219\u5f53`frame_type`\u4e3a30\u5206\u949f\u65f6\uff0c\u5bf9\u5e94\u7684\u4e0a\u754c\u4e3a10:00 Examples: >>> # \u5982\u679cmoment\u4e3a\u65e5\u671f\uff0c\u5219\u5f53\u6210\u5df2\u6536\u76d8\u5904\u7406 >>> TimeFrame.day_frames = np.array([20050104, 20050105, 20050106, 20050107, 20050110, 20050111]) >>> TimeFrame.floor(datetime.date(2005, 1, 7), FrameType.DAY) datetime.date(2005, 1, 7) >>> # moment\u6307\u5b9a\u7684\u65f6\u95f4\u8fd8\u672a\u6536\u76d8\uff0cfloor\u5230\u4e0a\u4e00\u4e2a\u4ea4\u6613\u65e5 >>> TimeFrame.floor(datetime.datetime(2005, 1, 7, 14, 59), FrameType.DAY) datetime.date(2005, 1, 6) >>> TimeFrame.floor(datetime.date(2005, 1, 13), FrameType.WEEK) datetime.date(2005, 1, 7) >>> TimeFrame.floor(datetime.date(2005,2, 27), FrameType.MONTH) datetime.date(2005, 1, 31) >>> TimeFrame.floor(datetime.datetime(2005,1,5,14,59), FrameType.MIN30) datetime.datetime(2005, 1, 5, 14, 30) >>> TimeFrame.floor(datetime.datetime(2005, 1, 5, 14, 59), FrameType.MIN1) datetime.datetime(2005, 1, 5, 14, 59) >>> TimeFrame.floor(arrow.get('2005-1-5 14:59').naive, FrameType.MIN1) datetime.datetime(2005, 1, 5, 14, 59) Args: moment: frame_type: Returns: `moment`\u5728\u6307\u5b9a\u7684`frame_type`\u4e2d\u7684\u4e0b\u754c \"\"\" if frame_type in cls . minute_level_frames : tm , day_offset = cls . minute_frames_floor ( cls . ticks [ frame_type ], moment . hour * 60 + moment . minute ) h , m = tm // 60 , tm % 60 if cls . day_shift ( moment , 0 ) < moment . date () or day_offset == - 1 : h = 15 m = 0 new_day = cls . day_shift ( moment , day_offset ) else : new_day = moment . date () return datetime . datetime ( new_day . year , new_day . month , new_day . day , h , m ) if type ( moment ) == datetime . date : moment = datetime . datetime ( moment . year , moment . month , moment . day , 15 ) # \u5982\u679c\u662f\u4ea4\u6613\u65e5\uff0c\u4f46\u8fd8\u672a\u6536\u76d8 if ( cls . date2int ( moment ) in cls . day_frames and moment . hour * 60 + moment . minute < 900 ): moment = cls . day_shift ( moment , - 1 ) day = cls . date2int ( moment ) if frame_type == FrameType . DAY : arr = cls . day_frames elif frame_type == FrameType . WEEK : arr = cls . week_frames elif frame_type == FrameType . MONTH : arr = cls . month_frames else : # pragma: no cover raise ValueError ( f \"frame type { frame_type } not supported.\" ) floored = ext . floor ( arr , day ) return cls . int2date ( floored ) frame_len ( frame_type ) classmethod \u00b6 \u8fd4\u56de\u4ee5\u5206\u949f\u4e3a\u5355\u4f4d\u7684frame\u957f\u5ea6\u3002 \u5bf9\u65e5\u7ebf\u4ee5\u4e0a\u7ea7\u522b\u6ca1\u6709\u610f\u4e49\uff0c\u4f46\u4f1a\u8fd4\u56de240 Examples: >>> TimeFrame . frame_len ( FrameType . MIN5 ) 5 Parameters: Name Type Description Default frame_type FrameType required Returns: Type Description int \u8fd4\u56de\u4ee5\u5206\u949f\u4e3a\u5355\u4f4d\u7684frame\u957f\u5ea6\u3002 Source code in omicron/models/timeframe.py @classmethod def frame_len ( cls , frame_type : FrameType ) -> int : \"\"\"\u8fd4\u56de\u4ee5\u5206\u949f\u4e3a\u5355\u4f4d\u7684frame\u957f\u5ea6\u3002 \u5bf9\u65e5\u7ebf\u4ee5\u4e0a\u7ea7\u522b\u6ca1\u6709\u610f\u4e49\uff0c\u4f46\u4f1a\u8fd4\u56de240 Examples: >>> TimeFrame.frame_len(FrameType.MIN5) 5 Args: frame_type: Returns: \u8fd4\u56de\u4ee5\u5206\u949f\u4e3a\u5355\u4f4d\u7684frame\u957f\u5ea6\u3002 \"\"\" if frame_type == FrameType . MIN1 : return 1 elif frame_type == FrameType . MIN5 : return 5 elif frame_type == FrameType . MIN15 : return 15 elif frame_type == FrameType . MIN30 : return 30 elif frame_type == FrameType . MIN60 : return 60 else : return 240 get_frame_scope ( frame , ft ) classmethod \u00b6 \u5bf9\u4e8e\u7ed9\u5b9a\u7684\u65f6\u95f4\uff0c\u53d6\u6240\u5728\u5468\u7684\u7b2c\u4e00\u5929\u548c\u6700\u540e\u4e00\u5929\uff0c\u6240\u5728\u6708\u7684\u7b2c\u4e00\u5929\u548c\u6700\u540e\u4e00\u5929 Parameters: Name Type Description Default frame \u6307\u5b9a\u7684\u65e5\u671f\uff0cdate\u5bf9\u8c61 required ft FrameType \u5e27\u7c7b\u578b\uff0c\u652f\u6301WEEK\u548cMONTH required Returns: Type Description Tuple[Frame, Frame] \u5468\u6216\u8005\u6708\u7684\u9996\u672b\u65e5\u671f\uff08date\u5bf9\u8c61\uff09 Source code in omicron/models/timeframe.py @classmethod def get_frame_scope ( cls , frame : Frame , ft : FrameType ) -> Tuple [ Frame , Frame ]: # todo: \u51fd\u6570\u7684\u901a\u7528\u6027\u4e0d\u8db3\uff0c\u4f3c\u4e4e\u5e94\u8be5\u653e\u5728\u5177\u4f53\u7684\u4e1a\u52a1\u7c7b\u4e2d\u3002\u5982\u679c\u662f\u901a\u7528\u578b\u7684\u51fd\u6570\uff0c\u53c2\u6570\u4e0d\u5e94\u8be5\u5c40\u9650\u4e8e\u5468\u548c\u6708\u3002 \"\"\"\u5bf9\u4e8e\u7ed9\u5b9a\u7684\u65f6\u95f4\uff0c\u53d6\u6240\u5728\u5468\u7684\u7b2c\u4e00\u5929\u548c\u6700\u540e\u4e00\u5929\uff0c\u6240\u5728\u6708\u7684\u7b2c\u4e00\u5929\u548c\u6700\u540e\u4e00\u5929 Args: frame : \u6307\u5b9a\u7684\u65e5\u671f\uff0cdate\u5bf9\u8c61 ft: \u5e27\u7c7b\u578b\uff0c\u652f\u6301WEEK\u548cMONTH Returns: Tuple[Frame, Frame]: \u5468\u6216\u8005\u6708\u7684\u9996\u672b\u65e5\u671f\uff08date\u5bf9\u8c61\uff09 \"\"\" if frame is None : raise ValueError ( \"frame cannot be None\" ) if ft not in ( FrameType . WEEK , FrameType . MONTH ): raise ValueError ( f \"FrameType only supports WEEK and MONTH: { ft } \" ) if isinstance ( frame , datetime . datetime ): frame = frame . date () if frame < CALENDAR_START : raise ValueError ( f \"cannot be earlier than { CALENDAR_START } : { frame } \" ) # datetime.date(2021, 10, 8)\uff0c\u8fd9\u662f\u4e2a\u7279\u6b8a\u7684\u65e5\u671f if ft == FrameType . WEEK : if frame < datetime . date ( 2005 , 1 , 10 ): return datetime . date ( 2005 , 1 , 4 ), datetime . date ( 2005 , 1 , 7 ) if not cls . is_trade_day ( frame ): # \u975e\u4ea4\u6613\u65e5\u7684\u60c5\u51b5\uff0c\u76f4\u63a5\u56de\u9000\u4e00\u5929 week_day = cls . day_shift ( frame , 0 ) else : week_day = frame w1 = TimeFrame . floor ( week_day , FrameType . WEEK ) if w1 == week_day : # \u672c\u5468\u7684\u6700\u540e\u4e00\u4e2a\u4ea4\u6613\u65e5 week_end = w1 else : week_end = TimeFrame . week_shift ( week_day , 1 ) w0 = TimeFrame . week_shift ( week_end , - 1 ) week_start = TimeFrame . day_shift ( w0 , 1 ) return week_start , week_end if ft == FrameType . MONTH : if frame <= datetime . date ( 2005 , 1 , 31 ): return datetime . date ( 2005 , 1 , 4 ), datetime . date ( 2005 , 1 , 31 ) month_start = frame . replace ( day = 1 ) if not cls . is_trade_day ( month_start ): # \u975e\u4ea4\u6613\u65e5\u7684\u60c5\u51b5\uff0c\u76f4\u63a5\u52a01 month_start = cls . day_shift ( month_start , 1 ) month_end = TimeFrame . month_shift ( month_start , 1 ) return month_start , month_end get_frames ( start , end , frame_type ) classmethod \u00b6 \u53d6[start, end]\u95f4\u6240\u6709\u7c7b\u578b\u4e3aframe_type\u7684frames \u8c03\u7528\u672c\u51fd\u6570\u524d\uff0c\u8bf7\u5148\u901a\u8fc7 floor \u6216\u8005 ceiling \u5c06\u65f6\u95f4\u5e27\u5bf9\u9f50\u5230 frame_type \u7684\u8fb9\u754c\u503c Examples: >>> start = arrow . get ( '2020-1-13 10:00' ) . naive >>> end = arrow . get ( '2020-1-13 13:30' ) . naive >>> TimeFrame . day_frames = np . array ([ 20200109 , 20200110 , 20200113 , 20200114 , 20200115 , 20200116 ]) >>> TimeFrame . get_frames ( start , end , FrameType . MIN30 ) [ 202001131000 , 202001131030 , 202001131100 , 202001131130 , 202001131330 ] Parameters: Name Type Description Default start Frame required end Frame required frame_type FrameType required Returns: Type Description List[int] frame list Source code in omicron/models/timeframe.py @classmethod def get_frames ( cls , start : Frame , end : Frame , frame_type : FrameType ) -> List [ int ]: \"\"\"\u53d6[start, end]\u95f4\u6240\u6709\u7c7b\u578b\u4e3aframe_type\u7684frames \u8c03\u7528\u672c\u51fd\u6570\u524d\uff0c\u8bf7\u5148\u901a\u8fc7`floor`\u6216\u8005`ceiling`\u5c06\u65f6\u95f4\u5e27\u5bf9\u9f50\u5230`frame_type`\u7684\u8fb9\u754c\u503c Example: >>> start = arrow.get('2020-1-13 10:00').naive >>> end = arrow.get('2020-1-13 13:30').naive >>> TimeFrame.day_frames = np.array([20200109, 20200110, 20200113,20200114, 20200115, 20200116]) >>> TimeFrame.get_frames(start, end, FrameType.MIN30) [202001131000, 202001131030, 202001131100, 202001131130, 202001131330] Args: start: end: frame_type: Returns: frame list \"\"\" n = cls . count_frames ( start , end , frame_type ) return cls . get_frames_by_count ( end , n , frame_type ) get_frames_by_count ( end , n , frame_type ) classmethod \u00b6 \u53d6\u4ee5end\u4e3a\u7ed3\u675f\u70b9,\u5468\u671f\u4e3aframe_type\u7684n\u4e2aframe \u8c03\u7528\u524d\u8bf7\u5c06 end \u5bf9\u9f50\u5230 frame_type \u7684\u8fb9\u754c Examples: >>> end = arrow . get ( '2020-1-6 14:30' ) . naive >>> TimeFrame . day_frames = np . array ([ 20200102 , 20200103 , 20200106 , 20200107 , 20200108 , 20200109 ]) >>> TimeFrame . get_frames_by_count ( end , 2 , FrameType . MIN30 ) [ 202001061400 , 202001061430 ] Parameters: Name Type Description Default end Arrow required n int required frame_type FrameType required Returns: Type Description List[int] frame list Source code in omicron/models/timeframe.py @classmethod def get_frames_by_count ( cls , end : Arrow , n : int , frame_type : FrameType ) -> List [ int ]: \"\"\"\u53d6\u4ee5end\u4e3a\u7ed3\u675f\u70b9,\u5468\u671f\u4e3aframe_type\u7684n\u4e2aframe \u8c03\u7528\u524d\u8bf7\u5c06`end`\u5bf9\u9f50\u5230`frame_type`\u7684\u8fb9\u754c Examples: >>> end = arrow.get('2020-1-6 14:30').naive >>> TimeFrame.day_frames = np.array([20200102, 20200103,20200106, 20200107, 20200108, 20200109]) >>> TimeFrame.get_frames_by_count(end, 2, FrameType.MIN30) [202001061400, 202001061430] Args: end: n: frame_type: Returns: frame list \"\"\" if frame_type == FrameType . DAY : end = cls . date2int ( end ) pos = np . searchsorted ( cls . day_frames , end , side = \"right\" ) return cls . day_frames [ max ( 0 , pos - n ) : pos ] . tolist () elif frame_type == FrameType . WEEK : end = cls . date2int ( end ) pos = np . searchsorted ( cls . week_frames , end , side = \"right\" ) return cls . week_frames [ max ( 0 , pos - n ) : pos ] . tolist () elif frame_type == FrameType . MONTH : end = cls . date2int ( end ) pos = np . searchsorted ( cls . month_frames , end , side = \"right\" ) return cls . month_frames [ max ( 0 , pos - n ) : pos ] . tolist () elif frame_type in { FrameType . MIN1 , FrameType . MIN5 , FrameType . MIN15 , FrameType . MIN30 , FrameType . MIN60 , }: n_days = n // len ( cls . ticks [ frame_type ]) + 2 ticks = cls . ticks [ frame_type ] * n_days days = cls . get_frames_by_count ( end , n_days , FrameType . DAY ) days = np . repeat ( days , len ( cls . ticks [ frame_type ])) ticks = [ day . item () * 10000 + int ( tm / 60 ) * 100 + tm % 60 for day , tm in zip ( days , ticks ) ] # list index is much faster than ext.index_sorted when the arr is small pos = ticks . index ( cls . time2int ( end )) + 1 return ticks [ max ( 0 , pos - n ) : pos ] else : # pragma: no cover raise ValueError ( f \" { frame_type } not support yet\" ) get_previous_trade_day ( now ) classmethod \u00b6 \u83b7\u53d6\u4e0a\u4e00\u4e2a\u4ea4\u6613\u65e5 \u5982\u679c\u5f53\u5929\u662f\u5468\u516d\u6216\u8005\u5468\u65e5\uff0c\u8fd4\u56de\u5468\u4e94\uff08\u4ea4\u6613\u65e5\uff09\uff0c\u5982\u679c\u5f53\u5929\u662f\u5468\u4e00\uff0c\u8fd4\u56de\u5468\u4e94\uff0c\u5982\u679c\u5f53\u5929\u662f\u5468\u4e94\uff0c\u8fd4\u56de\u5468\u56db Parameters: Name Type Description Default now \u6307\u5b9a\u7684\u65e5\u671f\uff0cdate\u5bf9\u8c61 required Returns: Type Description datetime.date \u4e0a\u4e00\u4e2a\u4ea4\u6613\u65e5 Source code in omicron/models/timeframe.py @classmethod def get_previous_trade_day ( cls , now : datetime . date ): \"\"\"\u83b7\u53d6\u4e0a\u4e00\u4e2a\u4ea4\u6613\u65e5 \u5982\u679c\u5f53\u5929\u662f\u5468\u516d\u6216\u8005\u5468\u65e5\uff0c\u8fd4\u56de\u5468\u4e94\uff08\u4ea4\u6613\u65e5\uff09\uff0c\u5982\u679c\u5f53\u5929\u662f\u5468\u4e00\uff0c\u8fd4\u56de\u5468\u4e94\uff0c\u5982\u679c\u5f53\u5929\u662f\u5468\u4e94\uff0c\u8fd4\u56de\u5468\u56db Args: now : \u6307\u5b9a\u7684\u65e5\u671f\uff0cdate\u5bf9\u8c61 Returns: datetime.date: \u4e0a\u4e00\u4e2a\u4ea4\u6613\u65e5 \"\"\" if now == datetime . date ( 2005 , 1 , 4 ): return now if TimeFrame . is_trade_day ( now ): pre_trade_day = TimeFrame . day_shift ( now , - 1 ) else : pre_trade_day = TimeFrame . day_shift ( now , 0 ) return pre_trade_day get_ticks ( frame_type ) classmethod \u00b6 \u53d6\u6708\u7ebf\u3001\u5468\u7ebf\u3001\u65e5\u7ebf\u53ca\u5404\u5206\u949f\u7ebf\u5bf9\u5e94\u7684frame \u5bf9\u5206\u949f\u7ebf\uff0c\u8fd4\u56de\u503c\u4ec5\u5305\u542b\u65f6\u95f4\uff0c\u4e0d\u5305\u542b\u65e5\u671f\uff08\u5747\u4e3a\u6574\u6570\u8868\u793a\uff09 Examples: >>> TimeFrame . month_frames = np . array ([ 20050131 , 20050228 , 20050331 ]) >>> TimeFrame . get_ticks ( FrameType . MONTH )[: 3 ] array ([ 20050131 , 20050228 , 20050331 ]) Parameters: Name Type Description Default frame_type [description] required Exceptions: Type Description ValueError [description] Returns: Type Description Union[List, np.array] \u6708\u7ebf\u3001\u5468\u7ebf\u3001\u65e5\u7ebf\u53ca\u5404\u5206\u949f\u7ebf\u5bf9\u5e94\u7684frame Source code in omicron/models/timeframe.py @classmethod def get_ticks ( cls , frame_type : FrameType ) -> Union [ List , np . array ]: \"\"\"\u53d6\u6708\u7ebf\u3001\u5468\u7ebf\u3001\u65e5\u7ebf\u53ca\u5404\u5206\u949f\u7ebf\u5bf9\u5e94\u7684frame \u5bf9\u5206\u949f\u7ebf\uff0c\u8fd4\u56de\u503c\u4ec5\u5305\u542b\u65f6\u95f4\uff0c\u4e0d\u5305\u542b\u65e5\u671f\uff08\u5747\u4e3a\u6574\u6570\u8868\u793a\uff09 Examples: >>> TimeFrame.month_frames = np.array([20050131, 20050228, 20050331]) >>> TimeFrame.get_ticks(FrameType.MONTH)[:3] array([20050131, 20050228, 20050331]) Args: frame_type : [description] Raises: ValueError: [description] Returns: \u6708\u7ebf\u3001\u5468\u7ebf\u3001\u65e5\u7ebf\u53ca\u5404\u5206\u949f\u7ebf\u5bf9\u5e94\u7684frame \"\"\" if frame_type in cls . minute_level_frames : return cls . ticks [ frame_type ] if frame_type == FrameType . DAY : return cls . day_frames elif frame_type == FrameType . WEEK : return cls . week_frames elif frame_type == FrameType . MONTH : return cls . month_frames else : # pragma: no cover raise ValueError ( f \" { frame_type } not supported!\" ) init () async classmethod \u00b6 \u521d\u59cb\u5316\u65e5\u5386 Source code in omicron/models/timeframe.py @classmethod async def init ( cls ): \"\"\"\u521d\u59cb\u5316\u65e5\u5386\"\"\" await cls . _load_calendar () int2date ( d ) classmethod \u00b6 \u5c06\u6570\u5b57\u8868\u793a\u7684\u65e5\u671f\u8f6c\u6362\u6210\u4e3a\u65e5\u671f\u683c\u5f0f Examples: >>> TimeFrame . int2date ( 20200501 ) datetime . date ( 2020 , 5 , 1 ) Parameters: Name Type Description Default d Union[int, str] YYYYMMDD\u8868\u793a\u7684\u65e5\u671f required Returns: Type Description datetime.date \u8f6c\u6362\u540e\u7684\u65e5\u671f Source code in omicron/models/timeframe.py @classmethod def int2date ( cls , d : Union [ int , str ]) -> datetime . date : \"\"\"\u5c06\u6570\u5b57\u8868\u793a\u7684\u65e5\u671f\u8f6c\u6362\u6210\u4e3a\u65e5\u671f\u683c\u5f0f Examples: >>> TimeFrame.int2date(20200501) datetime.date(2020, 5, 1) Args: d: YYYYMMDD\u8868\u793a\u7684\u65e5\u671f Returns: \u8f6c\u6362\u540e\u7684\u65e5\u671f \"\"\" s = str ( d ) # it's 8 times faster than arrow.get return datetime . date ( int ( s [: 4 ]), int ( s [ 4 : 6 ]), int ( s [ 6 :])) int2time ( tm ) classmethod \u00b6 \u5c06\u6574\u6570\u8868\u793a\u7684\u65f6\u95f4\u8f6c\u6362\u4e3a datetime \u7c7b\u578b\u8868\u793a Examples: >>> TimeFrame . int2time ( 202005011500 ) datetime . datetime ( 2020 , 5 , 1 , 15 , 0 ) Parameters: Name Type Description Default tm int time in YYYYMMDDHHmm format required Returns: Type Description datetime.datetime \u8f6c\u6362\u540e\u7684\u65f6\u95f4 Source code in omicron/models/timeframe.py @classmethod def int2time ( cls , tm : int ) -> datetime . datetime : \"\"\"\u5c06\u6574\u6570\u8868\u793a\u7684\u65f6\u95f4\u8f6c\u6362\u4e3a`datetime`\u7c7b\u578b\u8868\u793a examples: >>> TimeFrame.int2time(202005011500) datetime.datetime(2020, 5, 1, 15, 0) Args: tm: time in YYYYMMDDHHmm format Returns: \u8f6c\u6362\u540e\u7684\u65f6\u95f4 \"\"\" s = str ( tm ) # its 8 times faster than arrow.get() return datetime . datetime ( int ( s [: 4 ]), int ( s [ 4 : 6 ]), int ( s [ 6 : 8 ]), int ( s [ 8 : 10 ]), int ( s [ 10 : 12 ]) ) is_bar_closed ( frame , ft ) classmethod \u00b6 \u5224\u65ad frame \u6240\u4ee3\u8868\u7684bar\u662f\u5426\u5df2\u7ecf\u6536\u76d8\uff08\u7ed3\u675f\uff09 \u5982\u679c\u662f\u65e5\u7ebf\uff0cframe\u4e0d\u4e3a\u5f53\u5929\uff0c\u5219\u8ba4\u4e3a\u5df2\u6536\u76d8\uff1b\u6216\u8005\u5f53\u524d\u65f6\u95f4\u5728\u6536\u76d8\u65f6\u95f4\u4e4b\u540e\uff0c\u4e5f\u8ba4\u4e3a\u5df2\u6536\u76d8\u3002 \u5982\u679c\u662f\u5176\u5b83\u5468\u671f\uff0c\u5219\u53ea\u6709\u5f53frame\u6b63\u597d\u5728\u8fb9\u754c\u4e0a\uff0c\u624d\u8ba4\u4e3a\u662f\u5df2\u6536\u76d8\u3002\u8fd9\u91cc\u6709\u4e00\u4e2a\u5047\u8bbe\uff1a\u6211\u4eec\u4e0d\u4f1a\u5728\u5176\u5b83\u5468\u671f\u4e0a\uff0c\u5224\u65ad\u672a\u6765\u7684\u67d0\u4e2aframe\u662f\u5426\u5df2\u7ecf\u6536\u76d8\u3002 Parameters: Name Type Description Default frame bar\u6240\u5904\u7684\u65f6\u95f4\uff0c\u5fc5\u987b\u5c0f\u4e8e\u5f53\u524d\u65f6\u95f4 required ft FrameType bar\u6240\u4ee3\u8868\u7684\u5e27\u7c7b\u578b required Returns: Type Description bool \u662f\u5426\u5df2\u7ecf\u6536\u76d8 Source code in omicron/models/timeframe.py @classmethod def is_bar_closed ( cls , frame : Frame , ft : FrameType ) -> bool : \"\"\"\u5224\u65ad`frame`\u6240\u4ee3\u8868\u7684bar\u662f\u5426\u5df2\u7ecf\u6536\u76d8\uff08\u7ed3\u675f\uff09 \u5982\u679c\u662f\u65e5\u7ebf\uff0cframe\u4e0d\u4e3a\u5f53\u5929\uff0c\u5219\u8ba4\u4e3a\u5df2\u6536\u76d8\uff1b\u6216\u8005\u5f53\u524d\u65f6\u95f4\u5728\u6536\u76d8\u65f6\u95f4\u4e4b\u540e\uff0c\u4e5f\u8ba4\u4e3a\u5df2\u6536\u76d8\u3002 \u5982\u679c\u662f\u5176\u5b83\u5468\u671f\uff0c\u5219\u53ea\u6709\u5f53frame\u6b63\u597d\u5728\u8fb9\u754c\u4e0a\uff0c\u624d\u8ba4\u4e3a\u662f\u5df2\u6536\u76d8\u3002\u8fd9\u91cc\u6709\u4e00\u4e2a\u5047\u8bbe\uff1a\u6211\u4eec\u4e0d\u4f1a\u5728\u5176\u5b83\u5468\u671f\u4e0a\uff0c\u5224\u65ad\u672a\u6765\u7684\u67d0\u4e2aframe\u662f\u5426\u5df2\u7ecf\u6536\u76d8\u3002 Args: frame : bar\u6240\u5904\u7684\u65f6\u95f4\uff0c\u5fc5\u987b\u5c0f\u4e8e\u5f53\u524d\u65f6\u95f4 ft: bar\u6240\u4ee3\u8868\u7684\u5e27\u7c7b\u578b Returns: bool: \u662f\u5426\u5df2\u7ecf\u6536\u76d8 \"\"\" floor = cls . floor ( frame , ft ) now = arrow . now () if ft == FrameType . DAY : return floor < now . date () or now . hour >= 15 else : return floor == frame is_closing_call_auction_time ( tm = None ) classmethod \u00b6 \u5224\u65ad tm \u6307\u5b9a\u7684\u65f6\u95f4\u662f\u5426\u4e3a\u6536\u76d8\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4 Fixme \u6b64\u5904\u5b9e\u73b0\u6709\u8bef\uff0c\u6536\u76d8\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4\u5e94\u8be5\u8fd8\u5305\u542b\u4e0a\u5348\u6536\u76d8\u65f6\u95f4 Parameters: Name Type Description Default tm [description]. Defaults to None. None Returns: Type Description bool bool Source code in omicron/models/timeframe.py @classmethod def is_closing_call_auction_time ( cls , tm : Union [ datetime . datetime , Arrow ] = None ) -> bool : \"\"\"\u5224\u65ad`tm`\u6307\u5b9a\u7684\u65f6\u95f4\u662f\u5426\u4e3a\u6536\u76d8\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4 Fixme: \u6b64\u5904\u5b9e\u73b0\u6709\u8bef\uff0c\u6536\u76d8\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4\u5e94\u8be5\u8fd8\u5305\u542b\u4e0a\u5348\u6536\u76d8\u65f6\u95f4 Args: tm : [description]. Defaults to None. Returns: bool \"\"\" tm = tm or cls . now () if not cls . is_trade_day ( tm ): return False minutes = tm . hour * 60 + tm . minute return 15 * 60 - 3 <= minutes < 15 * 60 is_open_time ( tm = None ) classmethod \u00b6 \u5224\u65ad tm \u6307\u5b9a\u7684\u65f6\u95f4\u662f\u5426\u5904\u5728\u4ea4\u6613\u65f6\u95f4\u6bb5\u3002 \u4ea4\u6613\u65f6\u95f4\u6bb5\u662f\u6307\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4\u6bb5\u4e4b\u5916\u7684\u5f00\u76d8\u65f6\u95f4 Examples: >>> TimeFrame . day_frames = np . array ([ 20200102 , 20200103 , 20200106 , 20200107 , 20200108 ]) >>> TimeFrame . is_open_time ( arrow . get ( '2020-1-1 14:59' ) . naive ) False >>> TimeFrame . is_open_time ( arrow . get ( '2020-1-3 14:59' ) . naive ) True Parameters: Name Type Description Default tm [description]. Defaults to None. None Returns: Type Description bool bool Source code in omicron/models/timeframe.py @classmethod def is_open_time ( cls , tm : Union [ datetime . datetime , Arrow ] = None ) -> bool : \"\"\"\u5224\u65ad`tm`\u6307\u5b9a\u7684\u65f6\u95f4\u662f\u5426\u5904\u5728\u4ea4\u6613\u65f6\u95f4\u6bb5\u3002 \u4ea4\u6613\u65f6\u95f4\u6bb5\u662f\u6307\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4\u6bb5\u4e4b\u5916\u7684\u5f00\u76d8\u65f6\u95f4 Examples: >>> TimeFrame.day_frames = np.array([20200102, 20200103, 20200106, 20200107, 20200108]) >>> TimeFrame.is_open_time(arrow.get('2020-1-1 14:59').naive) False >>> TimeFrame.is_open_time(arrow.get('2020-1-3 14:59').naive) True Args: tm : [description]. Defaults to None. Returns: bool \"\"\" tm = tm or arrow . now () if not cls . is_trade_day ( tm ): return False tick = tm . hour * 60 + tm . minute return tick in cls . ticks [ FrameType . MIN1 ] is_opening_call_auction_time ( tm = None ) classmethod \u00b6 \u5224\u65ad tm \u6307\u5b9a\u7684\u65f6\u95f4\u662f\u5426\u4e3a\u5f00\u76d8\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4 Parameters: Name Type Description Default tm [description]. Defaults to None. None Returns: Type Description bool bool Source code in omicron/models/timeframe.py @classmethod def is_opening_call_auction_time ( cls , tm : Union [ Arrow , datetime . datetime ] = None ) -> bool : \"\"\"\u5224\u65ad`tm`\u6307\u5b9a\u7684\u65f6\u95f4\u662f\u5426\u4e3a\u5f00\u76d8\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4 Args: tm : [description]. Defaults to None. Returns: bool \"\"\" if tm is None : tm = cls . now () if not cls . is_trade_day ( tm ): return False minutes = tm . hour * 60 + tm . minute return 9 * 60 + 15 < minutes <= 9 * 60 + 25 is_trade_day ( dt ) classmethod \u00b6 \u5224\u65ad dt \u662f\u5426\u4e3a\u4ea4\u6613\u65e5 Examples: >>> TimeFrame . is_trade_day ( arrow . get ( '2020-1-1' )) False Parameters: Name Type Description Default dt required Returns: Type Description bool bool Source code in omicron/models/timeframe.py @classmethod def is_trade_day ( cls , dt : Union [ datetime . date , datetime . datetime , Arrow ]) -> bool : \"\"\"\u5224\u65ad`dt`\u662f\u5426\u4e3a\u4ea4\u6613\u65e5 Examples: >>> TimeFrame.is_trade_day(arrow.get('2020-1-1')) False Args: dt : Returns: bool \"\"\" return cls . date2int ( dt ) in cls . day_frames last_min_frame ( day , frame_type ) classmethod \u00b6 \u83b7\u53d6 day \u65e5\u5468\u671f\u4e3a frame_type \u7684\u7ed3\u675fframe\u3002 Examples: >>> TimeFrame . last_min_frame ( arrow . get ( '2020-1-5' ) . date (), FrameType . MIN30 ) datetime . datetime ( 2020 , 1 , 3 , 15 , 0 ) Parameters: Name Type Description Default day Union[str, Arrow, datetime.date] required frame_type FrameType required Returns: Type Description Union[datetime.date, datetime.datetime] day \u65e5\u5468\u671f\u4e3a frame_type \u7684\u7ed3\u675fframe Source code in omicron/models/timeframe.py @classmethod def last_min_frame ( cls , day : Union [ str , Arrow , datetime . date ], frame_type : FrameType ) -> Union [ datetime . date , datetime . datetime ]: \"\"\"\u83b7\u53d6`day`\u65e5\u5468\u671f\u4e3a`frame_type`\u7684\u7ed3\u675fframe\u3002 Example: >>> TimeFrame.last_min_frame(arrow.get('2020-1-5').date(), FrameType.MIN30) datetime.datetime(2020, 1, 3, 15, 0) Args: day: frame_type: Returns: `day`\u65e5\u5468\u671f\u4e3a`frame_type`\u7684\u7ed3\u675fframe \"\"\" if isinstance ( day , str ): day = cls . date2int ( arrow . get ( day ) . date ()) elif isinstance ( day , arrow . Arrow ) or isinstance ( day , datetime . datetime ): day = cls . date2int ( day . date ()) elif isinstance ( day , datetime . date ): day = cls . date2int ( day ) else : raise TypeError ( f \" { type ( day ) } is not supported.\" ) if frame_type in cls . minute_level_frames : last_close_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( last_close_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 15 , minute = 0 ) else : # pragma: no cover raise ValueError ( f \" { frame_type } not supported\" ) minute_frames_floor ( ticks , moment ) classmethod \u00b6 \u5bf9\u4e8e\u5206\u949f\u7ea7\u7684frame,\u8fd4\u56de\u5b83\u4eec\u4e0eframe\u523b\u5ea6\u5411\u4e0b\u5bf9\u9f50\u540e\u7684frame\u53ca\u65e5\u671f\u8fdb\u4f4d\u3002\u5982\u679c\u9700\u8981\u5bf9\u9f50\u5230\u4e0a\u4e00\u4e2a\u4ea4\u6613 \u65e5\uff0c\u5219\u8fdb\u4f4d\u4e3a-1\uff0c\u5426\u5219\u4e3a0. Examples: >>> ticks = [ 600 , 630 , 660 , 690 , 810 , 840 , 870 , 900 ] >>> TimeFrame . minute_frames_floor ( ticks , 545 ) ( 900 , - 1 ) >>> TimeFrame . minute_frames_floor ( ticks , 600 ) ( 600 , 0 ) >>> TimeFrame . minute_frames_floor ( ticks , 605 ) ( 600 , 0 ) >>> TimeFrame . minute_frames_floor ( ticks , 899 ) ( 870 , 0 ) >>> TimeFrame . minute_frames_floor ( ticks , 900 ) ( 900 , 0 ) >>> TimeFrame . minute_frames_floor ( ticks , 905 ) ( 900 , 0 ) Parameters: Name Type Description Default ticks np.array or list frames\u523b\u5ea6 required moment int \u6574\u6570\u8868\u793a\u7684\u5206\u949f\u6570\uff0c\u6bd4\u5982900\u8868\u793a15\uff1a00 required Returns: Type Description Tuple[int, int] tuple, the first is the new moment, the second is carry-on Source code in omicron/models/timeframe.py @classmethod def minute_frames_floor ( cls , ticks , moment ) -> Tuple [ int , int ]: \"\"\" \u5bf9\u4e8e\u5206\u949f\u7ea7\u7684frame,\u8fd4\u56de\u5b83\u4eec\u4e0eframe\u523b\u5ea6\u5411\u4e0b\u5bf9\u9f50\u540e\u7684frame\u53ca\u65e5\u671f\u8fdb\u4f4d\u3002\u5982\u679c\u9700\u8981\u5bf9\u9f50\u5230\u4e0a\u4e00\u4e2a\u4ea4\u6613 \u65e5\uff0c\u5219\u8fdb\u4f4d\u4e3a-1\uff0c\u5426\u5219\u4e3a0. Examples: >>> ticks = [600, 630, 660, 690, 810, 840, 870, 900] >>> TimeFrame.minute_frames_floor(ticks, 545) (900, -1) >>> TimeFrame.minute_frames_floor(ticks, 600) (600, 0) >>> TimeFrame.minute_frames_floor(ticks, 605) (600, 0) >>> TimeFrame.minute_frames_floor(ticks, 899) (870, 0) >>> TimeFrame.minute_frames_floor(ticks, 900) (900, 0) >>> TimeFrame.minute_frames_floor(ticks, 905) (900, 0) Args: ticks (np.array or list): frames\u523b\u5ea6 moment (int): \u6574\u6570\u8868\u793a\u7684\u5206\u949f\u6570\uff0c\u6bd4\u5982900\u8868\u793a15\uff1a00 Returns: tuple, the first is the new moment, the second is carry-on \"\"\" if moment < ticks [ 0 ]: return ticks [ - 1 ], - 1 # \u2019right' \u76f8\u5f53\u4e8e ticks <= m index = np . searchsorted ( ticks , moment , side = \"right\" ) return ticks [ index - 1 ], 0 month_shift ( start , offset ) classmethod \u00b6 \u6c42 start \u6240\u5728\u7684\u6708\u79fb\u4f4d\u540e\u7684frame \u672c\u51fd\u6570\u9996\u5148\u5c06 start \u5bf9\u9f50\uff0c\u7136\u540e\u8fdb\u884c\u79fb\u4f4d\u3002 Examples: >>> TimeFrame . month_frames = np . array ([ 20150130 , 20150227 , 20150331 , 20150430 ]) >>> TimeFrame . month_shift ( arrow . get ( '2015-2-26' ) . date (), 0 ) datetime . date ( 2015 , 1 , 30 ) >>> TimeFrame . month_shift ( arrow . get ( '2015-2-27' ) . date (), 0 ) datetime . date ( 2015 , 2 , 27 ) >>> TimeFrame . month_shift ( arrow . get ( '2015-3-1' ) . date (), 0 ) datetime . date ( 2015 , 2 , 27 ) >>> TimeFrame . month_shift ( arrow . get ( '2015-3-1' ) . date (), 1 ) datetime . date ( 2015 , 3 , 31 ) Returns: Type Description datetime.date \u79fb\u4f4d\u540e\u7684\u65e5\u671f Source code in omicron/models/timeframe.py @classmethod def month_shift ( cls , start : datetime . date , offset : int ) -> datetime . date : \"\"\"\u6c42`start`\u6240\u5728\u7684\u6708\u79fb\u4f4d\u540e\u7684frame \u672c\u51fd\u6570\u9996\u5148\u5c06`start`\u5bf9\u9f50\uff0c\u7136\u540e\u8fdb\u884c\u79fb\u4f4d\u3002 Examples: >>> TimeFrame.month_frames = np.array([20150130, 20150227, 20150331, 20150430]) >>> TimeFrame.month_shift(arrow.get('2015-2-26').date(), 0) datetime.date(2015, 1, 30) >>> TimeFrame.month_shift(arrow.get('2015-2-27').date(), 0) datetime.date(2015, 2, 27) >>> TimeFrame.month_shift(arrow.get('2015-3-1').date(), 0) datetime.date(2015, 2, 27) >>> TimeFrame.month_shift(arrow.get('2015-3-1').date(), 1) datetime.date(2015, 3, 31) Returns: \u79fb\u4f4d\u540e\u7684\u65e5\u671f \"\"\" start = cls . date2int ( start ) return cls . int2date ( ext . shift ( cls . month_frames , start , offset )) replace_date ( dtm , dt ) classmethod \u00b6 \u5c06 dtm \u53d8\u91cf\u7684\u65e5\u671f\u66f4\u6362\u4e3a dt \u6307\u5b9a\u7684\u65e5\u671f Examples: >>> TimeFrame . replace_date ( arrow . get ( '2020-1-1 13:49' ) . datetime , datetime . date ( 2019 , 1 , 1 )) datetime . datetime ( 2019 , 1 , 1 , 13 , 49 ) Parameters: Name Type Description Default dtm datetime.datetime [description] required dt datetime.date [description] required Returns: Type Description datetime.datetime \u53d8\u6362\u540e\u7684\u65f6\u95f4 Source code in omicron/models/timeframe.py @classmethod def replace_date ( cls , dtm : datetime . datetime , dt : datetime . date ) -> datetime . datetime : \"\"\"\u5c06`dtm`\u53d8\u91cf\u7684\u65e5\u671f\u66f4\u6362\u4e3a`dt`\u6307\u5b9a\u7684\u65e5\u671f Example: >>> TimeFrame.replace_date(arrow.get('2020-1-1 13:49').datetime, datetime.date(2019, 1,1)) datetime.datetime(2019, 1, 1, 13, 49) Args: dtm (datetime.datetime): [description] dt (datetime.date): [description] Returns: \u53d8\u6362\u540e\u7684\u65f6\u95f4 \"\"\" return datetime . datetime ( dt . year , dt . month , dt . day , dtm . hour , dtm . minute , dtm . second , dtm . microsecond ) resample_frames ( trade_days , frame_type ) classmethod \u00b6 \u5c06\u4ece\u884c\u60c5\u670d\u52a1\u5668\u83b7\u53d6\u7684\u4ea4\u6613\u65e5\u5386\u91cd\u91c7\u6837\uff0c\u751f\u6210\u5468\u5e27\u548c\u6708\u7ebf\u5e27 Parameters: Name Type Description Default trade_days Iterable [description] required frame_type FrameType [description] required Returns: Type Description List[int] \u91cd\u91c7\u6837\u540e\u7684\u65e5\u671f\u5217\u8868\uff0c\u65e5\u671f\u7528\u6574\u6570\u8868\u793a Source code in omicron/models/timeframe.py @classmethod def resample_frames ( cls , trade_days : Iterable [ datetime . date ], frame_type : FrameType ) -> List [ int ]: \"\"\"\u5c06\u4ece\u884c\u60c5\u670d\u52a1\u5668\u83b7\u53d6\u7684\u4ea4\u6613\u65e5\u5386\u91cd\u91c7\u6837\uff0c\u751f\u6210\u5468\u5e27\u548c\u6708\u7ebf\u5e27 Args: trade_days (Iterable): [description] frame_type (FrameType): [description] Returns: List[int]: \u91cd\u91c7\u6837\u540e\u7684\u65e5\u671f\u5217\u8868\uff0c\u65e5\u671f\u7528\u6574\u6570\u8868\u793a \"\"\" if frame_type == FrameType . WEEK : weeks = [] last = trade_days [ 0 ] for cur in trade_days : if cur . weekday () < last . weekday () or ( cur - last ) . days >= 7 : weeks . append ( last ) last = cur if weeks [ - 1 ] < last : weeks . append ( last ) return weeks elif frame_type == FrameType . MONTH : months = [] last = trade_days [ 0 ] for cur in trade_days : if cur . day < last . day : months . append ( last ) last = cur months . append ( last ) return months elif frame_type == FrameType . QUARTER : quarters = [] last = trade_days [ 0 ] for cur in trade_days : if last . month % 3 == 0 : if cur . month > last . month or cur . year > last . year : quarters . append ( last ) last = cur quarters . append ( last ) return quarters elif frame_type == FrameType . YEAR : years = [] last = trade_days [ 0 ] for cur in trade_days : if cur . year > last . year : years . append ( last ) last = cur years . append ( last ) return years else : # pragma: no cover raise ValueError ( f \"Unsupported FrameType: { frame_type } \" ) service_degrade () classmethod \u00b6 \u5f53cache\u4e2d\u4e0d\u5b58\u5728\u65e5\u5386\u65f6\uff0c\u542f\u7528\u968fomicron\u7248\u672c\u4e00\u8d77\u53d1\u884c\u65f6\u81ea\u5e26\u7684\u65e5\u5386\u3002 \u6ce8\u610f\uff1a\u968fomicron\u7248\u672c\u4e00\u8d77\u53d1\u884c\u65f6\u81ea\u5e26\u7684\u65e5\u5386\u5f88\u53ef\u80fd\u4e0d\u662f\u6700\u65b0\u7684\uff0c\u5e76\u4e14\u53ef\u80fd\u5305\u542b\u9519\u8bef\u3002\u6bd4\u5982\uff0c\u5b58\u5728\u8fd9\u6837\u7684\u60c5\u51b5\uff0c\u5728\u672c\u7248\u672c\u7684omicron\u53d1\u884c\u65f6\uff0c\u65e5\u5386\u66f4\u65b0\u5230\u4e862021\u5e7412\u670831\u65e5\uff0c\u5728\u8fd9\u4e4b\u524d\u7684\u65e5\u5386\u90fd\u662f\u51c6\u786e\u7684\uff0c\u4f46\u5728\u6b64\u4e4b\u540e\u7684\u65e5\u5386\uff0c\u5219\u6709\u53ef\u80fd\u51fa\u73b0\u9519\u8bef\u3002\u56e0\u6b64\uff0c\u53ea\u5e94\u8be5\u5728\u7279\u6b8a\u7684\u60c5\u51b5\u4e0b\uff08\u6bd4\u5982\u6d4b\u8bd5\uff09\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u4ee5\u83b7\u5f97\u4e00\u4e2a\u964d\u7ea7\u7684\u670d\u52a1\u3002 Source code in omicron/models/timeframe.py @classmethod def service_degrade ( cls ): \"\"\"\u5f53cache\u4e2d\u4e0d\u5b58\u5728\u65e5\u5386\u65f6\uff0c\u542f\u7528\u968fomicron\u7248\u672c\u4e00\u8d77\u53d1\u884c\u65f6\u81ea\u5e26\u7684\u65e5\u5386\u3002 \u6ce8\u610f\uff1a\u968fomicron\u7248\u672c\u4e00\u8d77\u53d1\u884c\u65f6\u81ea\u5e26\u7684\u65e5\u5386\u5f88\u53ef\u80fd\u4e0d\u662f\u6700\u65b0\u7684\uff0c\u5e76\u4e14\u53ef\u80fd\u5305\u542b\u9519\u8bef\u3002\u6bd4\u5982\uff0c\u5b58\u5728\u8fd9\u6837\u7684\u60c5\u51b5\uff0c\u5728\u672c\u7248\u672c\u7684omicron\u53d1\u884c\u65f6\uff0c\u65e5\u5386\u66f4\u65b0\u5230\u4e862021\u5e7412\u670831\u65e5\uff0c\u5728\u8fd9\u4e4b\u524d\u7684\u65e5\u5386\u90fd\u662f\u51c6\u786e\u7684\uff0c\u4f46\u5728\u6b64\u4e4b\u540e\u7684\u65e5\u5386\uff0c\u5219\u6709\u53ef\u80fd\u51fa\u73b0\u9519\u8bef\u3002\u56e0\u6b64\uff0c\u53ea\u5e94\u8be5\u5728\u7279\u6b8a\u7684\u60c5\u51b5\u4e0b\uff08\u6bd4\u5982\u6d4b\u8bd5\uff09\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u4ee5\u83b7\u5f97\u4e00\u4e2a\u964d\u7ea7\u7684\u670d\u52a1\u3002 \"\"\" _dir = os . path . dirname ( __file__ ) file = os . path . join ( _dir , \"..\" , \"config\" , \"calendar.json\" ) with open ( file , \"r\" ) as f : data = json . load ( f ) for k , v in data . items (): setattr ( cls , k , np . array ( v )) shift ( moment , n , frame_type ) classmethod \u00b6 \u5c06\u6307\u5b9a\u7684moment\u79fb\u52a8N\u4e2a frame_type \u4f4d\u7f6e\u3002 \u5f53N\u4e3a\u8d1f\u6570\u65f6\uff0c\u610f\u5473\u7740\u5411\u524d\u79fb\u52a8\uff1b\u5f53N\u4e3a\u6b63\u6570\u65f6\uff0c\u610f\u5473\u7740\u5411\u540e\u79fb\u52a8\u3002\u5982\u679cn\u4e3a\u96f6\uff0c\u610f\u5473\u7740\u79fb\u52a8\u5230\u6700\u63a5\u8fd1 \u7684\u4e00\u4e2a\u5df2\u7ed3\u675f\u7684frame\u3002 \u5982\u679cmoment\u6ca1\u6709\u5bf9\u9f50\u5230frame_type\u5bf9\u5e94\u7684\u65f6\u95f4\uff0c\u5c06\u9996\u5148\u8fdb\u884c\u5bf9\u9f50\u3002 See also: day_shift week_shift month_shift Examples: >>> TimeFrame . shift ( datetime . date ( 2020 , 1 , 3 ), 1 , FrameType . DAY ) datetime . date ( 2020 , 1 , 6 ) >>> TimeFrame . shift ( datetime . datetime ( 2020 , 1 , 6 , 11 ), 1 , FrameType . MIN30 ) datetime . datetime ( 2020 , 1 , 6 , 11 , 30 ) Parameters: Name Type Description Default moment Union[Arrow, datetime.date, datetime.datetime] required n int required frame_type FrameType required Returns: Type Description Union[datetime.date, datetime.datetime] \u79fb\u4f4d\u540e\u7684Frame Source code in omicron/models/timeframe.py @classmethod def shift ( cls , moment : Union [ Arrow , datetime . date , datetime . datetime ], n : int , frame_type : FrameType , ) -> Union [ datetime . date , datetime . datetime ]: \"\"\"\u5c06\u6307\u5b9a\u7684moment\u79fb\u52a8N\u4e2a`frame_type`\u4f4d\u7f6e\u3002 \u5f53N\u4e3a\u8d1f\u6570\u65f6\uff0c\u610f\u5473\u7740\u5411\u524d\u79fb\u52a8\uff1b\u5f53N\u4e3a\u6b63\u6570\u65f6\uff0c\u610f\u5473\u7740\u5411\u540e\u79fb\u52a8\u3002\u5982\u679cn\u4e3a\u96f6\uff0c\u610f\u5473\u7740\u79fb\u52a8\u5230\u6700\u63a5\u8fd1 \u7684\u4e00\u4e2a\u5df2\u7ed3\u675f\u7684frame\u3002 \u5982\u679cmoment\u6ca1\u6709\u5bf9\u9f50\u5230frame_type\u5bf9\u5e94\u7684\u65f6\u95f4\uff0c\u5c06\u9996\u5148\u8fdb\u884c\u5bf9\u9f50\u3002 See also: - [day_shift][omicron.models.timeframe.TimeFrame.day_shift] - [week_shift][omicron.models.timeframe.TimeFrame.week_shift] - [month_shift][omicron.models.timeframe.TimeFrame.month_shift] Examples: >>> TimeFrame.shift(datetime.date(2020, 1, 3), 1, FrameType.DAY) datetime.date(2020, 1, 6) >>> TimeFrame.shift(datetime.datetime(2020, 1, 6, 11), 1, FrameType.MIN30) datetime.datetime(2020, 1, 6, 11, 30) Args: moment: n: frame_type: Returns: \u79fb\u4f4d\u540e\u7684Frame \"\"\" if frame_type == FrameType . DAY : return cls . day_shift ( moment , n ) elif frame_type == FrameType . WEEK : return cls . week_shift ( moment , n ) elif frame_type == FrameType . MONTH : return cls . month_shift ( moment , n ) elif frame_type in [ FrameType . MIN1 , FrameType . MIN5 , FrameType . MIN15 , FrameType . MIN30 , FrameType . MIN60 , ]: tm = moment . hour * 60 + moment . minute new_tick_pos = cls . ticks [ frame_type ] . index ( tm ) + n days = new_tick_pos // len ( cls . ticks [ frame_type ]) min_part = new_tick_pos % len ( cls . ticks [ frame_type ]) date_part = cls . day_shift ( moment . date (), days ) minutes = cls . ticks [ frame_type ][ min_part ] h , m = minutes // 60 , minutes % 60 return datetime . datetime ( date_part . year , date_part . month , date_part . day , h , m , tzinfo = moment . tzinfo , ) else : # pragma: no cover raise ValueError ( f \" { frame_type } is not supported.\" ) time2int ( tm ) classmethod \u00b6 \u5c06\u65f6\u95f4\u7c7b\u578b\u8f6c\u6362\u4e3a\u6574\u6570\u7c7b\u578b tm\u53ef\u4ee5\u662fArrow\u7c7b\u578b\uff0c\u4e5f\u53ef\u4ee5\u662fdatetime.datetime\u6216\u8005\u4efb\u4f55\u5176\u5b83\u7c7b\u578b\uff0c\u53ea\u8981\u5b83\u6709year,month...\u7b49 \u5c5e\u6027 Examples: >>> TimeFrame . time2int ( datetime . datetime ( 2020 , 5 , 1 , 15 )) 202005011500 Parameters: Name Type Description Default tm Union[datetime.datetime, Arrow] required Returns: Type Description int \u8f6c\u6362\u540e\u7684\u6574\u6570\uff0c\u6bd4\u59822020050115 Source code in omicron/models/timeframe.py @classmethod def time2int ( cls , tm : Union [ datetime . datetime , Arrow ]) -> int : \"\"\"\u5c06\u65f6\u95f4\u7c7b\u578b\u8f6c\u6362\u4e3a\u6574\u6570\u7c7b\u578b tm\u53ef\u4ee5\u662fArrow\u7c7b\u578b\uff0c\u4e5f\u53ef\u4ee5\u662fdatetime.datetime\u6216\u8005\u4efb\u4f55\u5176\u5b83\u7c7b\u578b\uff0c\u53ea\u8981\u5b83\u6709year,month...\u7b49 \u5c5e\u6027 Examples: >>> TimeFrame.time2int(datetime.datetime(2020, 5, 1, 15)) 202005011500 Args: tm: Returns: \u8f6c\u6362\u540e\u7684\u6574\u6570\uff0c\u6bd4\u59822020050115 \"\"\" return int ( f \" { tm . year : 04 }{ tm . month : 02 }{ tm . day : 02 }{ tm . hour : 02 }{ tm . minute : 02 } \" ) week_shift ( start , offset ) classmethod \u00b6 \u5bf9\u6307\u5b9a\u65e5\u671f\u6309\u5468\u7ebf\u5e27\u8fdb\u884c\u524d\u540e\u79fb\u4f4d\u64cd\u4f5c \u53c2\u8003 omicron.models.timeframe.TimeFrame.day_shift Examples: >>> TimeFrame . week_frames = np . array ([ 20200103 , 20200110 , 20200117 , 20200123 , 20200207 , 20200214 ]) >>> moment = arrow . get ( '2020-1-21' ) . date () >>> TimeFrame . week_shift ( moment , 1 ) datetime . date ( 2020 , 1 , 23 ) >>> TimeFrame . week_shift ( moment , 0 ) datetime . date ( 2020 , 1 , 17 ) >>> TimeFrame . week_shift ( moment , - 1 ) datetime . date ( 2020 , 1 , 10 ) Returns: Type Description datetime.date \u79fb\u4f4d\u540e\u7684\u65e5\u671f Source code in omicron/models/timeframe.py @classmethod def week_shift ( cls , start : datetime . date , offset : int ) -> datetime . date : \"\"\"\u5bf9\u6307\u5b9a\u65e5\u671f\u6309\u5468\u7ebf\u5e27\u8fdb\u884c\u524d\u540e\u79fb\u4f4d\u64cd\u4f5c \u53c2\u8003 [omicron.models.timeframe.TimeFrame.day_shift][] Examples: >>> TimeFrame.week_frames = np.array([20200103, 20200110, 20200117, 20200123,20200207, 20200214]) >>> moment = arrow.get('2020-1-21').date() >>> TimeFrame.week_shift(moment, 1) datetime.date(2020, 1, 23) >>> TimeFrame.week_shift(moment, 0) datetime.date(2020, 1, 17) >>> TimeFrame.week_shift(moment, -1) datetime.date(2020, 1, 10) Returns: \u79fb\u4f4d\u540e\u7684\u65e5\u671f \"\"\" start = cls . date2int ( start ) return cls . int2date ( ext . shift ( cls . week_frames , start , offset )) rendering: heading_level: 1","title":"timeframe"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame","text":"Source code in omicron/models/timeframe.py class TimeFrame : minute_level_frames = [ FrameType . MIN1 , FrameType . MIN5 , FrameType . MIN15 , FrameType . MIN30 , FrameType . MIN60 , ] day_level_frames = [ FrameType . DAY , FrameType . WEEK , FrameType . MONTH , FrameType . QUARTER , FrameType . YEAR , ] ticks = { FrameType . MIN1 : [ i for i in itertools . chain ( range ( 571 , 691 ), range ( 781 , 901 ))], FrameType . MIN5 : [ i for i in itertools . chain ( range ( 575 , 695 , 5 ), range ( 785 , 905 , 5 )) ], FrameType . MIN15 : [ i for i in itertools . chain ( range ( 585 , 705 , 15 ), range ( 795 , 915 , 15 )) ], FrameType . MIN30 : [ int ( s [: 2 ]) * 60 + int ( s [ 2 :]) for s in [ \"1000\" , \"1030\" , \"1100\" , \"1130\" , \"1330\" , \"1400\" , \"1430\" , \"1500\" ] ], FrameType . MIN60 : [ int ( s [: 2 ]) * 60 + int ( s [ 2 :]) for s in [ \"1030\" , \"1130\" , \"1400\" , \"1500\" ] ], } day_frames = None week_frames = None month_frames = None quarter_frames = None year_frames = None @classmethod def service_degrade ( cls ): \"\"\"\u5f53cache\u4e2d\u4e0d\u5b58\u5728\u65e5\u5386\u65f6\uff0c\u542f\u7528\u968fomicron\u7248\u672c\u4e00\u8d77\u53d1\u884c\u65f6\u81ea\u5e26\u7684\u65e5\u5386\u3002 \u6ce8\u610f\uff1a\u968fomicron\u7248\u672c\u4e00\u8d77\u53d1\u884c\u65f6\u81ea\u5e26\u7684\u65e5\u5386\u5f88\u53ef\u80fd\u4e0d\u662f\u6700\u65b0\u7684\uff0c\u5e76\u4e14\u53ef\u80fd\u5305\u542b\u9519\u8bef\u3002\u6bd4\u5982\uff0c\u5b58\u5728\u8fd9\u6837\u7684\u60c5\u51b5\uff0c\u5728\u672c\u7248\u672c\u7684omicron\u53d1\u884c\u65f6\uff0c\u65e5\u5386\u66f4\u65b0\u5230\u4e862021\u5e7412\u670831\u65e5\uff0c\u5728\u8fd9\u4e4b\u524d\u7684\u65e5\u5386\u90fd\u662f\u51c6\u786e\u7684\uff0c\u4f46\u5728\u6b64\u4e4b\u540e\u7684\u65e5\u5386\uff0c\u5219\u6709\u53ef\u80fd\u51fa\u73b0\u9519\u8bef\u3002\u56e0\u6b64\uff0c\u53ea\u5e94\u8be5\u5728\u7279\u6b8a\u7684\u60c5\u51b5\u4e0b\uff08\u6bd4\u5982\u6d4b\u8bd5\uff09\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u4ee5\u83b7\u5f97\u4e00\u4e2a\u964d\u7ea7\u7684\u670d\u52a1\u3002 \"\"\" _dir = os . path . dirname ( __file__ ) file = os . path . join ( _dir , \"..\" , \"config\" , \"calendar.json\" ) with open ( file , \"r\" ) as f : data = json . load ( f ) for k , v in data . items (): setattr ( cls , k , np . array ( v )) @classmethod async def _load_calendar ( cls ): \"\"\"\u4ece\u6570\u636e\u7f13\u5b58\u4e2d\u52a0\u8f7d\u66f4\u65b0\u65e5\u5386\"\"\" from omicron import cache names = [ \"day_frames\" , \"week_frames\" , \"month_frames\" , \"quarter_frames\" , \"year_frames\" , ] for name , frame_type in zip ( names , cls . day_level_frames ): key = f \"calendar: { frame_type . value } \" result = await cache . security . lrange ( key , 0 , - 1 ) if result is not None and len ( result ): frames = [ int ( x ) for x in result ] setattr ( cls , name , np . array ( frames )) else : # pragma: no cover raise DataNotReadyError ( f \"calendar data is not ready: { name } missed\" ) @classmethod async def init ( cls ): \"\"\"\u521d\u59cb\u5316\u65e5\u5386\"\"\" await cls . _load_calendar () @classmethod def int2time ( cls , tm : int ) -> datetime . datetime : \"\"\"\u5c06\u6574\u6570\u8868\u793a\u7684\u65f6\u95f4\u8f6c\u6362\u4e3a`datetime`\u7c7b\u578b\u8868\u793a examples: >>> TimeFrame.int2time(202005011500) datetime.datetime(2020, 5, 1, 15, 0) Args: tm: time in YYYYMMDDHHmm format Returns: \u8f6c\u6362\u540e\u7684\u65f6\u95f4 \"\"\" s = str ( tm ) # its 8 times faster than arrow.get() return datetime . datetime ( int ( s [: 4 ]), int ( s [ 4 : 6 ]), int ( s [ 6 : 8 ]), int ( s [ 8 : 10 ]), int ( s [ 10 : 12 ]) ) @classmethod def time2int ( cls , tm : Union [ datetime . datetime , Arrow ]) -> int : \"\"\"\u5c06\u65f6\u95f4\u7c7b\u578b\u8f6c\u6362\u4e3a\u6574\u6570\u7c7b\u578b tm\u53ef\u4ee5\u662fArrow\u7c7b\u578b\uff0c\u4e5f\u53ef\u4ee5\u662fdatetime.datetime\u6216\u8005\u4efb\u4f55\u5176\u5b83\u7c7b\u578b\uff0c\u53ea\u8981\u5b83\u6709year,month...\u7b49 \u5c5e\u6027 Examples: >>> TimeFrame.time2int(datetime.datetime(2020, 5, 1, 15)) 202005011500 Args: tm: Returns: \u8f6c\u6362\u540e\u7684\u6574\u6570\uff0c\u6bd4\u59822020050115 \"\"\" return int ( f \" { tm . year : 04 }{ tm . month : 02 }{ tm . day : 02 }{ tm . hour : 02 }{ tm . minute : 02 } \" ) @classmethod def date2int ( cls , d : Union [ datetime . datetime , datetime . date , Arrow ]) -> int : \"\"\"\u5c06\u65e5\u671f\u8f6c\u6362\u4e3a\u6574\u6570\u8868\u793a \u5728zillionare\u4e2d\uff0c\u5982\u679c\u8981\u5bf9\u65f6\u95f4\u548c\u65e5\u671f\u8fdb\u884c\u6301\u4e45\u5316\u64cd\u4f5c\uff0c\u6211\u4eec\u4e00\u822c\u5c06\u5176\u8f6c\u6362\u4e3aint\u7c7b\u578b Examples: >>> TimeFrame.date2int(datetime.date(2020,5,1)) 20200501 Args: d: date Returns: \u65e5\u671f\u7684\u6574\u6570\u8868\u793a\uff0c\u6bd4\u598220220211 \"\"\" return int ( f \" { d . year : 04 }{ d . month : 02 }{ d . day : 02 } \" ) @classmethod def int2date ( cls , d : Union [ int , str ]) -> datetime . date : \"\"\"\u5c06\u6570\u5b57\u8868\u793a\u7684\u65e5\u671f\u8f6c\u6362\u6210\u4e3a\u65e5\u671f\u683c\u5f0f Examples: >>> TimeFrame.int2date(20200501) datetime.date(2020, 5, 1) Args: d: YYYYMMDD\u8868\u793a\u7684\u65e5\u671f Returns: \u8f6c\u6362\u540e\u7684\u65e5\u671f \"\"\" s = str ( d ) # it's 8 times faster than arrow.get return datetime . date ( int ( s [: 4 ]), int ( s [ 4 : 6 ]), int ( s [ 6 :])) @classmethod def day_shift ( cls , start : datetime . date , offset : int ) -> datetime . date : \"\"\"\u5bf9\u6307\u5b9a\u65e5\u671f\u8fdb\u884c\u524d\u540e\u79fb\u4f4d\u64cd\u4f5c \u5982\u679c n == 0\uff0c\u5219\u8fd4\u56ded\u5bf9\u5e94\u7684\u4ea4\u6613\u65e5\uff08\u5982\u679c\u662f\u975e\u4ea4\u6613\u65e5\uff0c\u5219\u8fd4\u56de\u521a\u7ed3\u675f\u7684\u4e00\u4e2a\u4ea4\u6613\u65e5\uff09 \u5982\u679c n > 0\uff0c\u5219\u8fd4\u56ded\u5bf9\u5e94\u7684\u4ea4\u6613\u65e5\u540e\u7b2c n \u4e2a\u4ea4\u6613\u65e5 \u5982\u679c n < 0\uff0c\u5219\u8fd4\u56ded\u5bf9\u5e94\u7684\u4ea4\u6613\u65e5\u524d\u7b2c n \u4e2a\u4ea4\u6613\u65e5 Examples: >>> TimeFrame.day_frames = [20191212, 20191213, 20191216, 20191217,20191218, 20191219] >>> TimeFrame.day_shift(datetime.date(2019,12,13), 0) datetime.date(2019, 12, 13) >>> TimeFrame.day_shift(datetime.date(2019, 12, 15), 0) datetime.date(2019, 12, 13) >>> TimeFrame.day_shift(datetime.date(2019, 12, 15), 1) datetime.date(2019, 12, 16) >>> TimeFrame.day_shift(datetime.date(2019, 12, 13), 1) datetime.date(2019, 12, 16) Args: start: the origin day offset: days to shift, can be negative Returns: \u79fb\u4f4d\u540e\u7684\u65e5\u671f \"\"\" # accelerated from 0.12 to 0.07, per 10000 loop, type conversion time included start = cls . date2int ( start ) return cls . int2date ( ext . shift ( cls . day_frames , start , offset )) @classmethod def week_shift ( cls , start : datetime . date , offset : int ) -> datetime . date : \"\"\"\u5bf9\u6307\u5b9a\u65e5\u671f\u6309\u5468\u7ebf\u5e27\u8fdb\u884c\u524d\u540e\u79fb\u4f4d\u64cd\u4f5c \u53c2\u8003 [omicron.models.timeframe.TimeFrame.day_shift][] Examples: >>> TimeFrame.week_frames = np.array([20200103, 20200110, 20200117, 20200123,20200207, 20200214]) >>> moment = arrow.get('2020-1-21').date() >>> TimeFrame.week_shift(moment, 1) datetime.date(2020, 1, 23) >>> TimeFrame.week_shift(moment, 0) datetime.date(2020, 1, 17) >>> TimeFrame.week_shift(moment, -1) datetime.date(2020, 1, 10) Returns: \u79fb\u4f4d\u540e\u7684\u65e5\u671f \"\"\" start = cls . date2int ( start ) return cls . int2date ( ext . shift ( cls . week_frames , start , offset )) @classmethod def month_shift ( cls , start : datetime . date , offset : int ) -> datetime . date : \"\"\"\u6c42`start`\u6240\u5728\u7684\u6708\u79fb\u4f4d\u540e\u7684frame \u672c\u51fd\u6570\u9996\u5148\u5c06`start`\u5bf9\u9f50\uff0c\u7136\u540e\u8fdb\u884c\u79fb\u4f4d\u3002 Examples: >>> TimeFrame.month_frames = np.array([20150130, 20150227, 20150331, 20150430]) >>> TimeFrame.month_shift(arrow.get('2015-2-26').date(), 0) datetime.date(2015, 1, 30) >>> TimeFrame.month_shift(arrow.get('2015-2-27').date(), 0) datetime.date(2015, 2, 27) >>> TimeFrame.month_shift(arrow.get('2015-3-1').date(), 0) datetime.date(2015, 2, 27) >>> TimeFrame.month_shift(arrow.get('2015-3-1').date(), 1) datetime.date(2015, 3, 31) Returns: \u79fb\u4f4d\u540e\u7684\u65e5\u671f \"\"\" start = cls . date2int ( start ) return cls . int2date ( ext . shift ( cls . month_frames , start , offset )) @classmethod def get_ticks ( cls , frame_type : FrameType ) -> Union [ List , np . array ]: \"\"\"\u53d6\u6708\u7ebf\u3001\u5468\u7ebf\u3001\u65e5\u7ebf\u53ca\u5404\u5206\u949f\u7ebf\u5bf9\u5e94\u7684frame \u5bf9\u5206\u949f\u7ebf\uff0c\u8fd4\u56de\u503c\u4ec5\u5305\u542b\u65f6\u95f4\uff0c\u4e0d\u5305\u542b\u65e5\u671f\uff08\u5747\u4e3a\u6574\u6570\u8868\u793a\uff09 Examples: >>> TimeFrame.month_frames = np.array([20050131, 20050228, 20050331]) >>> TimeFrame.get_ticks(FrameType.MONTH)[:3] array([20050131, 20050228, 20050331]) Args: frame_type : [description] Raises: ValueError: [description] Returns: \u6708\u7ebf\u3001\u5468\u7ebf\u3001\u65e5\u7ebf\u53ca\u5404\u5206\u949f\u7ebf\u5bf9\u5e94\u7684frame \"\"\" if frame_type in cls . minute_level_frames : return cls . ticks [ frame_type ] if frame_type == FrameType . DAY : return cls . day_frames elif frame_type == FrameType . WEEK : return cls . week_frames elif frame_type == FrameType . MONTH : return cls . month_frames else : # pragma: no cover raise ValueError ( f \" { frame_type } not supported!\" ) @classmethod def shift ( cls , moment : Union [ Arrow , datetime . date , datetime . datetime ], n : int , frame_type : FrameType , ) -> Union [ datetime . date , datetime . datetime ]: \"\"\"\u5c06\u6307\u5b9a\u7684moment\u79fb\u52a8N\u4e2a`frame_type`\u4f4d\u7f6e\u3002 \u5f53N\u4e3a\u8d1f\u6570\u65f6\uff0c\u610f\u5473\u7740\u5411\u524d\u79fb\u52a8\uff1b\u5f53N\u4e3a\u6b63\u6570\u65f6\uff0c\u610f\u5473\u7740\u5411\u540e\u79fb\u52a8\u3002\u5982\u679cn\u4e3a\u96f6\uff0c\u610f\u5473\u7740\u79fb\u52a8\u5230\u6700\u63a5\u8fd1 \u7684\u4e00\u4e2a\u5df2\u7ed3\u675f\u7684frame\u3002 \u5982\u679cmoment\u6ca1\u6709\u5bf9\u9f50\u5230frame_type\u5bf9\u5e94\u7684\u65f6\u95f4\uff0c\u5c06\u9996\u5148\u8fdb\u884c\u5bf9\u9f50\u3002 See also: - [day_shift][omicron.models.timeframe.TimeFrame.day_shift] - [week_shift][omicron.models.timeframe.TimeFrame.week_shift] - [month_shift][omicron.models.timeframe.TimeFrame.month_shift] Examples: >>> TimeFrame.shift(datetime.date(2020, 1, 3), 1, FrameType.DAY) datetime.date(2020, 1, 6) >>> TimeFrame.shift(datetime.datetime(2020, 1, 6, 11), 1, FrameType.MIN30) datetime.datetime(2020, 1, 6, 11, 30) Args: moment: n: frame_type: Returns: \u79fb\u4f4d\u540e\u7684Frame \"\"\" if frame_type == FrameType . DAY : return cls . day_shift ( moment , n ) elif frame_type == FrameType . WEEK : return cls . week_shift ( moment , n ) elif frame_type == FrameType . MONTH : return cls . month_shift ( moment , n ) elif frame_type in [ FrameType . MIN1 , FrameType . MIN5 , FrameType . MIN15 , FrameType . MIN30 , FrameType . MIN60 , ]: tm = moment . hour * 60 + moment . minute new_tick_pos = cls . ticks [ frame_type ] . index ( tm ) + n days = new_tick_pos // len ( cls . ticks [ frame_type ]) min_part = new_tick_pos % len ( cls . ticks [ frame_type ]) date_part = cls . day_shift ( moment . date (), days ) minutes = cls . ticks [ frame_type ][ min_part ] h , m = minutes // 60 , minutes % 60 return datetime . datetime ( date_part . year , date_part . month , date_part . day , h , m , tzinfo = moment . tzinfo , ) else : # pragma: no cover raise ValueError ( f \" { frame_type } is not supported.\" ) @classmethod def count_day_frames ( cls , start : Union [ datetime . date , Arrow ], end : Union [ datetime . date , Arrow ] ) -> int : \"\"\"calc trade days between start and end in close-to-close way. if start == end, this will returns 1. Both start/end will be aligned to open trade day before calculation. Examples: >>> start = datetime.date(2019, 12, 21) >>> end = datetime.date(2019, 12, 21) >>> TimeFrame.day_frames = [20191219, 20191220, 20191223, 20191224, 20191225] >>> TimeFrame.count_day_frames(start, end) 1 >>> # non-trade days are removed >>> TimeFrame.day_frames = [20200121, 20200122, 20200123, 20200203, 20200204, 20200205] >>> start = datetime.date(2020, 1, 23) >>> end = datetime.date(2020, 2, 4) >>> TimeFrame.count_day_frames(start, end) 3 args: start: end: returns: count of days \"\"\" start = cls . date2int ( start ) end = cls . date2int ( end ) return int ( ext . count_between ( cls . day_frames , start , end )) @classmethod def count_week_frames ( cls , start : datetime . date , end : datetime . date ) -> int : \"\"\" calc trade weeks between start and end in close-to-close way. Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1 for examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames] args: start: end: returns: count of weeks \"\"\" start = cls . date2int ( start ) end = cls . date2int ( end ) return int ( ext . count_between ( cls . week_frames , start , end )) @classmethod def count_month_frames ( cls , start : datetime . date , end : datetime . date ) -> int : \"\"\"calc trade months between start and end date in close-to-close way Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1. For examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames] Args: start: end: Returns: months between start and end \"\"\" start = cls . date2int ( start ) end = cls . date2int ( end ) return int ( ext . count_between ( cls . month_frames , start , end )) @classmethod def count_quarter_frames ( cls , start : datetime . date , end : datetime . date ) -> int : \"\"\"calc trade quarters between start and end date in close-to-close way Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1. For examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames] Args: start (datetime.date): [description] end (datetime.date): [description] Returns: quarters between start and end \"\"\" start = cls . date2int ( start ) end = cls . date2int ( end ) return int ( ext . count_between ( cls . quarter_frames , start , end )) @classmethod def count_year_frames ( cls , start : datetime . date , end : datetime . date ) -> int : \"\"\"calc trade years between start and end date in close-to-close way Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1. For examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames] Args: start (datetime.date): [description] end (datetime.date): [description] Returns: years between start and end \"\"\" start = cls . date2int ( start ) end = cls . date2int ( end ) return int ( ext . count_between ( cls . year_frames , start , end )) @classmethod def count_frames ( cls , start : Union [ datetime . date , datetime . datetime , Arrow ], end : Union [ datetime . date , datetime . datetime , Arrow ], frame_type , ) -> int : \"\"\"\u8ba1\u7b97start\u4e0eend\u4e4b\u95f4\u6709\u591a\u5c11\u4e2a\u5468\u671f\u4e3aframe_type\u7684frames See also: - [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames] - [count_week_frames][omicron.models.timeframe.TimeFrame.count_week_frames] - [count_month_frames][omicron.models.timeframe.TimeFrame.count_month_frames] Args: start : start frame end : end frame frame_type : the type of frame Raises: ValueError: \u5982\u679cframe_type\u4e0d\u652f\u6301\uff0c\u5219\u4f1a\u629b\u51fa\u6b64\u5f02\u5e38\u3002 Returns: \u4ecestart\u5230end\u7684\u5e27\u6570 \"\"\" if frame_type == FrameType . DAY : return cls . count_day_frames ( start , end ) elif frame_type == FrameType . WEEK : return cls . count_week_frames ( start , end ) elif frame_type == FrameType . MONTH : return cls . count_month_frames ( start , end ) elif frame_type == FrameType . QUARTER : return cls . count_quarter_frames ( start , end ) elif frame_type == FrameType . YEAR : return cls . count_year_frames ( start , end ) elif frame_type in [ FrameType . MIN1 , FrameType . MIN5 , FrameType . MIN15 , FrameType . MIN30 , FrameType . MIN60 , ]: tm_start = start . hour * 60 + start . minute tm_end = end . hour * 60 + end . minute days = cls . count_day_frames ( start . date (), end . date ()) - 1 tm_start_pos = cls . ticks [ frame_type ] . index ( tm_start ) tm_end_pos = cls . ticks [ frame_type ] . index ( tm_end ) min_bars = tm_end_pos - tm_start_pos + 1 return days * len ( cls . ticks [ frame_type ]) + min_bars else : # pragma: no cover raise ValueError ( f \" { frame_type } is not supported yet\" ) @classmethod def is_trade_day ( cls , dt : Union [ datetime . date , datetime . datetime , Arrow ]) -> bool : \"\"\"\u5224\u65ad`dt`\u662f\u5426\u4e3a\u4ea4\u6613\u65e5 Examples: >>> TimeFrame.is_trade_day(arrow.get('2020-1-1')) False Args: dt : Returns: bool \"\"\" return cls . date2int ( dt ) in cls . day_frames @classmethod def is_open_time ( cls , tm : Union [ datetime . datetime , Arrow ] = None ) -> bool : \"\"\"\u5224\u65ad`tm`\u6307\u5b9a\u7684\u65f6\u95f4\u662f\u5426\u5904\u5728\u4ea4\u6613\u65f6\u95f4\u6bb5\u3002 \u4ea4\u6613\u65f6\u95f4\u6bb5\u662f\u6307\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4\u6bb5\u4e4b\u5916\u7684\u5f00\u76d8\u65f6\u95f4 Examples: >>> TimeFrame.day_frames = np.array([20200102, 20200103, 20200106, 20200107, 20200108]) >>> TimeFrame.is_open_time(arrow.get('2020-1-1 14:59').naive) False >>> TimeFrame.is_open_time(arrow.get('2020-1-3 14:59').naive) True Args: tm : [description]. Defaults to None. Returns: bool \"\"\" tm = tm or arrow . now () if not cls . is_trade_day ( tm ): return False tick = tm . hour * 60 + tm . minute return tick in cls . ticks [ FrameType . MIN1 ] @classmethod def is_opening_call_auction_time ( cls , tm : Union [ Arrow , datetime . datetime ] = None ) -> bool : \"\"\"\u5224\u65ad`tm`\u6307\u5b9a\u7684\u65f6\u95f4\u662f\u5426\u4e3a\u5f00\u76d8\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4 Args: tm : [description]. Defaults to None. Returns: bool \"\"\" if tm is None : tm = cls . now () if not cls . is_trade_day ( tm ): return False minutes = tm . hour * 60 + tm . minute return 9 * 60 + 15 < minutes <= 9 * 60 + 25 @classmethod def is_closing_call_auction_time ( cls , tm : Union [ datetime . datetime , Arrow ] = None ) -> bool : \"\"\"\u5224\u65ad`tm`\u6307\u5b9a\u7684\u65f6\u95f4\u662f\u5426\u4e3a\u6536\u76d8\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4 Fixme: \u6b64\u5904\u5b9e\u73b0\u6709\u8bef\uff0c\u6536\u76d8\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4\u5e94\u8be5\u8fd8\u5305\u542b\u4e0a\u5348\u6536\u76d8\u65f6\u95f4 Args: tm : [description]. Defaults to None. Returns: bool \"\"\" tm = tm or cls . now () if not cls . is_trade_day ( tm ): return False minutes = tm . hour * 60 + tm . minute return 15 * 60 - 3 <= minutes < 15 * 60 @classmethod def floor ( cls , moment : Frame , frame_type : FrameType ) -> Frame : \"\"\"\u6c42`moment`\u5728\u6307\u5b9a\u7684`frame_type`\u4e2d\u7684\u4e0b\u754c \u6bd4\u5982\uff0c\u5982\u679c`moment`\u4e3a10:37\uff0c\u5219\u5f53`frame_type`\u4e3a30\u5206\u949f\u65f6\uff0c\u5bf9\u5e94\u7684\u4e0a\u754c\u4e3a10:00 Examples: >>> # \u5982\u679cmoment\u4e3a\u65e5\u671f\uff0c\u5219\u5f53\u6210\u5df2\u6536\u76d8\u5904\u7406 >>> TimeFrame.day_frames = np.array([20050104, 20050105, 20050106, 20050107, 20050110, 20050111]) >>> TimeFrame.floor(datetime.date(2005, 1, 7), FrameType.DAY) datetime.date(2005, 1, 7) >>> # moment\u6307\u5b9a\u7684\u65f6\u95f4\u8fd8\u672a\u6536\u76d8\uff0cfloor\u5230\u4e0a\u4e00\u4e2a\u4ea4\u6613\u65e5 >>> TimeFrame.floor(datetime.datetime(2005, 1, 7, 14, 59), FrameType.DAY) datetime.date(2005, 1, 6) >>> TimeFrame.floor(datetime.date(2005, 1, 13), FrameType.WEEK) datetime.date(2005, 1, 7) >>> TimeFrame.floor(datetime.date(2005,2, 27), FrameType.MONTH) datetime.date(2005, 1, 31) >>> TimeFrame.floor(datetime.datetime(2005,1,5,14,59), FrameType.MIN30) datetime.datetime(2005, 1, 5, 14, 30) >>> TimeFrame.floor(datetime.datetime(2005, 1, 5, 14, 59), FrameType.MIN1) datetime.datetime(2005, 1, 5, 14, 59) >>> TimeFrame.floor(arrow.get('2005-1-5 14:59').naive, FrameType.MIN1) datetime.datetime(2005, 1, 5, 14, 59) Args: moment: frame_type: Returns: `moment`\u5728\u6307\u5b9a\u7684`frame_type`\u4e2d\u7684\u4e0b\u754c \"\"\" if frame_type in cls . minute_level_frames : tm , day_offset = cls . minute_frames_floor ( cls . ticks [ frame_type ], moment . hour * 60 + moment . minute ) h , m = tm // 60 , tm % 60 if cls . day_shift ( moment , 0 ) < moment . date () or day_offset == - 1 : h = 15 m = 0 new_day = cls . day_shift ( moment , day_offset ) else : new_day = moment . date () return datetime . datetime ( new_day . year , new_day . month , new_day . day , h , m ) if type ( moment ) == datetime . date : moment = datetime . datetime ( moment . year , moment . month , moment . day , 15 ) # \u5982\u679c\u662f\u4ea4\u6613\u65e5\uff0c\u4f46\u8fd8\u672a\u6536\u76d8 if ( cls . date2int ( moment ) in cls . day_frames and moment . hour * 60 + moment . minute < 900 ): moment = cls . day_shift ( moment , - 1 ) day = cls . date2int ( moment ) if frame_type == FrameType . DAY : arr = cls . day_frames elif frame_type == FrameType . WEEK : arr = cls . week_frames elif frame_type == FrameType . MONTH : arr = cls . month_frames else : # pragma: no cover raise ValueError ( f \"frame type { frame_type } not supported.\" ) floored = ext . floor ( arr , day ) return cls . int2date ( floored ) @classmethod def last_min_frame ( cls , day : Union [ str , Arrow , datetime . date ], frame_type : FrameType ) -> Union [ datetime . date , datetime . datetime ]: \"\"\"\u83b7\u53d6`day`\u65e5\u5468\u671f\u4e3a`frame_type`\u7684\u7ed3\u675fframe\u3002 Example: >>> TimeFrame.last_min_frame(arrow.get('2020-1-5').date(), FrameType.MIN30) datetime.datetime(2020, 1, 3, 15, 0) Args: day: frame_type: Returns: `day`\u65e5\u5468\u671f\u4e3a`frame_type`\u7684\u7ed3\u675fframe \"\"\" if isinstance ( day , str ): day = cls . date2int ( arrow . get ( day ) . date ()) elif isinstance ( day , arrow . Arrow ) or isinstance ( day , datetime . datetime ): day = cls . date2int ( day . date ()) elif isinstance ( day , datetime . date ): day = cls . date2int ( day ) else : raise TypeError ( f \" { type ( day ) } is not supported.\" ) if frame_type in cls . minute_level_frames : last_close_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( last_close_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 15 , minute = 0 ) else : # pragma: no cover raise ValueError ( f \" { frame_type } not supported\" ) @classmethod def frame_len ( cls , frame_type : FrameType ) -> int : \"\"\"\u8fd4\u56de\u4ee5\u5206\u949f\u4e3a\u5355\u4f4d\u7684frame\u957f\u5ea6\u3002 \u5bf9\u65e5\u7ebf\u4ee5\u4e0a\u7ea7\u522b\u6ca1\u6709\u610f\u4e49\uff0c\u4f46\u4f1a\u8fd4\u56de240 Examples: >>> TimeFrame.frame_len(FrameType.MIN5) 5 Args: frame_type: Returns: \u8fd4\u56de\u4ee5\u5206\u949f\u4e3a\u5355\u4f4d\u7684frame\u957f\u5ea6\u3002 \"\"\" if frame_type == FrameType . MIN1 : return 1 elif frame_type == FrameType . MIN5 : return 5 elif frame_type == FrameType . MIN15 : return 15 elif frame_type == FrameType . MIN30 : return 30 elif frame_type == FrameType . MIN60 : return 60 else : return 240 @classmethod def first_min_frame ( cls , day : Union [ str , Arrow , Frame ], frame_type : FrameType ) -> Union [ datetime . date , datetime . datetime ]: \"\"\"\u83b7\u53d6\u6307\u5b9a\u65e5\u671f\u7c7b\u578b\u4e3a`frame_type`\u7684`frame`\u3002 Examples: >>> TimeFrame.day_frames = np.array([20191227, 20191230, 20191231, 20200102, 20200103]) >>> TimeFrame.first_min_frame('2019-12-31', FrameType.MIN1) datetime.datetime(2019, 12, 31, 9, 31) Args: day: which day? frame_type: which frame_type? Returns: `day`\u5f53\u65e5\u7684\u7b2c\u4e00\u5e27 \"\"\" day = cls . date2int ( arrow . get ( day ) . date ()) if frame_type == FrameType . MIN1 : floor_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( floor_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 9 , minute = 31 ) elif frame_type == FrameType . MIN5 : floor_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( floor_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 9 , minute = 35 ) elif frame_type == FrameType . MIN15 : floor_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( floor_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 9 , minute = 45 ) elif frame_type == FrameType . MIN30 : floor_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( floor_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 10 ) elif frame_type == FrameType . MIN60 : floor_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( floor_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 10 , minute = 30 ) else : # pragma: no cover raise ValueError ( f \" { frame_type } not supported\" ) @classmethod def get_frames ( cls , start : Frame , end : Frame , frame_type : FrameType ) -> List [ int ]: \"\"\"\u53d6[start, end]\u95f4\u6240\u6709\u7c7b\u578b\u4e3aframe_type\u7684frames \u8c03\u7528\u672c\u51fd\u6570\u524d\uff0c\u8bf7\u5148\u901a\u8fc7`floor`\u6216\u8005`ceiling`\u5c06\u65f6\u95f4\u5e27\u5bf9\u9f50\u5230`frame_type`\u7684\u8fb9\u754c\u503c Example: >>> start = arrow.get('2020-1-13 10:00').naive >>> end = arrow.get('2020-1-13 13:30').naive >>> TimeFrame.day_frames = np.array([20200109, 20200110, 20200113,20200114, 20200115, 20200116]) >>> TimeFrame.get_frames(start, end, FrameType.MIN30) [202001131000, 202001131030, 202001131100, 202001131130, 202001131330] Args: start: end: frame_type: Returns: frame list \"\"\" n = cls . count_frames ( start , end , frame_type ) return cls . get_frames_by_count ( end , n , frame_type ) @classmethod def get_frames_by_count ( cls , end : Arrow , n : int , frame_type : FrameType ) -> List [ int ]: \"\"\"\u53d6\u4ee5end\u4e3a\u7ed3\u675f\u70b9,\u5468\u671f\u4e3aframe_type\u7684n\u4e2aframe \u8c03\u7528\u524d\u8bf7\u5c06`end`\u5bf9\u9f50\u5230`frame_type`\u7684\u8fb9\u754c Examples: >>> end = arrow.get('2020-1-6 14:30').naive >>> TimeFrame.day_frames = np.array([20200102, 20200103,20200106, 20200107, 20200108, 20200109]) >>> TimeFrame.get_frames_by_count(end, 2, FrameType.MIN30) [202001061400, 202001061430] Args: end: n: frame_type: Returns: frame list \"\"\" if frame_type == FrameType . DAY : end = cls . date2int ( end ) pos = np . searchsorted ( cls . day_frames , end , side = \"right\" ) return cls . day_frames [ max ( 0 , pos - n ) : pos ] . tolist () elif frame_type == FrameType . WEEK : end = cls . date2int ( end ) pos = np . searchsorted ( cls . week_frames , end , side = \"right\" ) return cls . week_frames [ max ( 0 , pos - n ) : pos ] . tolist () elif frame_type == FrameType . MONTH : end = cls . date2int ( end ) pos = np . searchsorted ( cls . month_frames , end , side = \"right\" ) return cls . month_frames [ max ( 0 , pos - n ) : pos ] . tolist () elif frame_type in { FrameType . MIN1 , FrameType . MIN5 , FrameType . MIN15 , FrameType . MIN30 , FrameType . MIN60 , }: n_days = n // len ( cls . ticks [ frame_type ]) + 2 ticks = cls . ticks [ frame_type ] * n_days days = cls . get_frames_by_count ( end , n_days , FrameType . DAY ) days = np . repeat ( days , len ( cls . ticks [ frame_type ])) ticks = [ day . item () * 10000 + int ( tm / 60 ) * 100 + tm % 60 for day , tm in zip ( days , ticks ) ] # list index is much faster than ext.index_sorted when the arr is small pos = ticks . index ( cls . time2int ( end )) + 1 return ticks [ max ( 0 , pos - n ) : pos ] else : # pragma: no cover raise ValueError ( f \" { frame_type } not support yet\" ) @classmethod def ceiling ( cls , moment : Frame , frame_type : FrameType ) -> Frame : \"\"\"\u6c42`moment`\u6240\u5728\u7c7b\u578b\u4e3a`frame_type`\u5468\u671f\u7684\u4e0a\u754c \u6bd4\u5982`moment`\u4e3a14:59\u5206\uff0c\u5982\u679c`frame_type`\u4e3a30\u5206\u949f\uff0c\u5219\u5b83\u7684\u4e0a\u754c\u5e94\u8be5\u4e3a15:00 Example: >>> TimeFrame.day_frames = [20050104, 20050105, 20050106, 20050107] >>> TimeFrame.ceiling(datetime.date(2005, 1, 7), FrameType.DAY) datetime.date(2005, 1, 7) >>> TimeFrame.week_frames = [20050107, 20050114, 20050121, 20050128] >>> TimeFrame.ceiling(datetime.date(2005, 1, 4), FrameType.WEEK) datetime.date(2005, 1, 7) >>> TimeFrame.ceiling(datetime.date(2005,1,7), FrameType.WEEK) datetime.date(2005, 1, 7) >>> TimeFrame.month_frames = [20050131, 20050228] >>> TimeFrame.ceiling(datetime.date(2005,1 ,1), FrameType.MONTH) datetime.date(2005, 1, 31) >>> TimeFrame.ceiling(datetime.datetime(2005,1,5,14,59), FrameType.MIN30) datetime.datetime(2005, 1, 5, 15, 0) >>> TimeFrame.ceiling(datetime.datetime(2005, 1, 5, 14, 59), FrameType.MIN1) datetime.datetime(2005, 1, 5, 14, 59) >>> TimeFrame.ceiling(arrow.get('2005-1-5 14:59').naive, FrameType.MIN1) datetime.datetime(2005, 1, 5, 14, 59) Args: moment (datetime.datetime): [description] frame_type (FrameType): [description] Returns: `moment`\u6240\u5728\u7c7b\u578b\u4e3a`frame_type`\u5468\u671f\u7684\u4e0a\u754c \"\"\" if frame_type in cls . day_level_frames and type ( moment ) == datetime . datetime : moment = moment . date () floor = cls . floor ( moment , frame_type ) if floor == moment : return moment elif floor > moment : return floor else : return cls . shift ( floor , 1 , frame_type ) @classmethod def combine_time ( cls , date : datetime . date , hour : int , minute : int = 0 , second : int = 0 , microsecond : int = 0 , ) -> datetime . datetime : \"\"\"\u7528`date`\u6307\u5b9a\u7684\u65e5\u671f\u4e0e`hour`, `minute`, `second`\u7b49\u53c2\u6570\u4e00\u8d77\u5408\u6210\u65b0\u7684\u65f6\u95f4 Examples: >>> TimeFrame.combine_time(datetime.date(2020, 1, 1), 14, 30) datetime.datetime(2020, 1, 1, 14, 30) Args: date : [description] hour : [description] minute : [description]. Defaults to 0. second : [description]. Defaults to 0. microsecond : [description]. Defaults to 0. Returns: \u5408\u6210\u540e\u7684\u65f6\u95f4 \"\"\" return datetime . datetime ( date . year , date . month , date . day , hour , minute , second , microsecond ) @classmethod def replace_date ( cls , dtm : datetime . datetime , dt : datetime . date ) -> datetime . datetime : \"\"\"\u5c06`dtm`\u53d8\u91cf\u7684\u65e5\u671f\u66f4\u6362\u4e3a`dt`\u6307\u5b9a\u7684\u65e5\u671f Example: >>> TimeFrame.replace_date(arrow.get('2020-1-1 13:49').datetime, datetime.date(2019, 1,1)) datetime.datetime(2019, 1, 1, 13, 49) Args: dtm (datetime.datetime): [description] dt (datetime.date): [description] Returns: \u53d8\u6362\u540e\u7684\u65f6\u95f4 \"\"\" return datetime . datetime ( dt . year , dt . month , dt . day , dtm . hour , dtm . minute , dtm . second , dtm . microsecond ) @classmethod def resample_frames ( cls , trade_days : Iterable [ datetime . date ], frame_type : FrameType ) -> List [ int ]: \"\"\"\u5c06\u4ece\u884c\u60c5\u670d\u52a1\u5668\u83b7\u53d6\u7684\u4ea4\u6613\u65e5\u5386\u91cd\u91c7\u6837\uff0c\u751f\u6210\u5468\u5e27\u548c\u6708\u7ebf\u5e27 Args: trade_days (Iterable): [description] frame_type (FrameType): [description] Returns: List[int]: \u91cd\u91c7\u6837\u540e\u7684\u65e5\u671f\u5217\u8868\uff0c\u65e5\u671f\u7528\u6574\u6570\u8868\u793a \"\"\" if frame_type == FrameType . WEEK : weeks = [] last = trade_days [ 0 ] for cur in trade_days : if cur . weekday () < last . weekday () or ( cur - last ) . days >= 7 : weeks . append ( last ) last = cur if weeks [ - 1 ] < last : weeks . append ( last ) return weeks elif frame_type == FrameType . MONTH : months = [] last = trade_days [ 0 ] for cur in trade_days : if cur . day < last . day : months . append ( last ) last = cur months . append ( last ) return months elif frame_type == FrameType . QUARTER : quarters = [] last = trade_days [ 0 ] for cur in trade_days : if last . month % 3 == 0 : if cur . month > last . month or cur . year > last . year : quarters . append ( last ) last = cur quarters . append ( last ) return quarters elif frame_type == FrameType . YEAR : years = [] last = trade_days [ 0 ] for cur in trade_days : if cur . year > last . year : years . append ( last ) last = cur years . append ( last ) return years else : # pragma: no cover raise ValueError ( f \"Unsupported FrameType: { frame_type } \" ) @classmethod def minute_frames_floor ( cls , ticks , moment ) -> Tuple [ int , int ]: \"\"\" \u5bf9\u4e8e\u5206\u949f\u7ea7\u7684frame,\u8fd4\u56de\u5b83\u4eec\u4e0eframe\u523b\u5ea6\u5411\u4e0b\u5bf9\u9f50\u540e\u7684frame\u53ca\u65e5\u671f\u8fdb\u4f4d\u3002\u5982\u679c\u9700\u8981\u5bf9\u9f50\u5230\u4e0a\u4e00\u4e2a\u4ea4\u6613 \u65e5\uff0c\u5219\u8fdb\u4f4d\u4e3a-1\uff0c\u5426\u5219\u4e3a0. Examples: >>> ticks = [600, 630, 660, 690, 810, 840, 870, 900] >>> TimeFrame.minute_frames_floor(ticks, 545) (900, -1) >>> TimeFrame.minute_frames_floor(ticks, 600) (600, 0) >>> TimeFrame.minute_frames_floor(ticks, 605) (600, 0) >>> TimeFrame.minute_frames_floor(ticks, 899) (870, 0) >>> TimeFrame.minute_frames_floor(ticks, 900) (900, 0) >>> TimeFrame.minute_frames_floor(ticks, 905) (900, 0) Args: ticks (np.array or list): frames\u523b\u5ea6 moment (int): \u6574\u6570\u8868\u793a\u7684\u5206\u949f\u6570\uff0c\u6bd4\u5982900\u8868\u793a15\uff1a00 Returns: tuple, the first is the new moment, the second is carry-on \"\"\" if moment < ticks [ 0 ]: return ticks [ - 1 ], - 1 # \u2019right' \u76f8\u5f53\u4e8e ticks <= m index = np . searchsorted ( ticks , moment , side = \"right\" ) return ticks [ index - 1 ], 0 @classmethod async def save_calendar ( cls , trade_days ): # avoid circular import from omicron import cache for ft in [ FrameType . WEEK , FrameType . MONTH , FrameType . QUARTER , FrameType . YEAR ]: days = cls . resample_frames ( trade_days , ft ) frames = [ cls . date2int ( x ) for x in days ] key = f \"calendar: { ft . value } \" pl = cache . security . pipeline () pl . delete ( key ) pl . rpush ( key , * frames ) await pl . execute () frames = [ cls . date2int ( x ) for x in trade_days ] key = f \"calendar: { FrameType . DAY . value } \" pl = cache . security . pipeline () pl . delete ( key ) pl . rpush ( key , * frames ) await pl . execute () @classmethod async def remove_calendar ( cls ): # avoid circular import from omicron import cache for ft in cls . day_level_frames : key = f \"calendar: { ft . value } \" await cache . security . delete ( key ) @classmethod def is_bar_closed ( cls , frame : Frame , ft : FrameType ) -> bool : \"\"\"\u5224\u65ad`frame`\u6240\u4ee3\u8868\u7684bar\u662f\u5426\u5df2\u7ecf\u6536\u76d8\uff08\u7ed3\u675f\uff09 \u5982\u679c\u662f\u65e5\u7ebf\uff0cframe\u4e0d\u4e3a\u5f53\u5929\uff0c\u5219\u8ba4\u4e3a\u5df2\u6536\u76d8\uff1b\u6216\u8005\u5f53\u524d\u65f6\u95f4\u5728\u6536\u76d8\u65f6\u95f4\u4e4b\u540e\uff0c\u4e5f\u8ba4\u4e3a\u5df2\u6536\u76d8\u3002 \u5982\u679c\u662f\u5176\u5b83\u5468\u671f\uff0c\u5219\u53ea\u6709\u5f53frame\u6b63\u597d\u5728\u8fb9\u754c\u4e0a\uff0c\u624d\u8ba4\u4e3a\u662f\u5df2\u6536\u76d8\u3002\u8fd9\u91cc\u6709\u4e00\u4e2a\u5047\u8bbe\uff1a\u6211\u4eec\u4e0d\u4f1a\u5728\u5176\u5b83\u5468\u671f\u4e0a\uff0c\u5224\u65ad\u672a\u6765\u7684\u67d0\u4e2aframe\u662f\u5426\u5df2\u7ecf\u6536\u76d8\u3002 Args: frame : bar\u6240\u5904\u7684\u65f6\u95f4\uff0c\u5fc5\u987b\u5c0f\u4e8e\u5f53\u524d\u65f6\u95f4 ft: bar\u6240\u4ee3\u8868\u7684\u5e27\u7c7b\u578b Returns: bool: \u662f\u5426\u5df2\u7ecf\u6536\u76d8 \"\"\" floor = cls . floor ( frame , ft ) now = arrow . now () if ft == FrameType . DAY : return floor < now . date () or now . hour >= 15 else : return floor == frame @classmethod def get_frame_scope ( cls , frame : Frame , ft : FrameType ) -> Tuple [ Frame , Frame ]: # todo: \u51fd\u6570\u7684\u901a\u7528\u6027\u4e0d\u8db3\uff0c\u4f3c\u4e4e\u5e94\u8be5\u653e\u5728\u5177\u4f53\u7684\u4e1a\u52a1\u7c7b\u4e2d\u3002\u5982\u679c\u662f\u901a\u7528\u578b\u7684\u51fd\u6570\uff0c\u53c2\u6570\u4e0d\u5e94\u8be5\u5c40\u9650\u4e8e\u5468\u548c\u6708\u3002 \"\"\"\u5bf9\u4e8e\u7ed9\u5b9a\u7684\u65f6\u95f4\uff0c\u53d6\u6240\u5728\u5468\u7684\u7b2c\u4e00\u5929\u548c\u6700\u540e\u4e00\u5929\uff0c\u6240\u5728\u6708\u7684\u7b2c\u4e00\u5929\u548c\u6700\u540e\u4e00\u5929 Args: frame : \u6307\u5b9a\u7684\u65e5\u671f\uff0cdate\u5bf9\u8c61 ft: \u5e27\u7c7b\u578b\uff0c\u652f\u6301WEEK\u548cMONTH Returns: Tuple[Frame, Frame]: \u5468\u6216\u8005\u6708\u7684\u9996\u672b\u65e5\u671f\uff08date\u5bf9\u8c61\uff09 \"\"\" if frame is None : raise ValueError ( \"frame cannot be None\" ) if ft not in ( FrameType . WEEK , FrameType . MONTH ): raise ValueError ( f \"FrameType only supports WEEK and MONTH: { ft } \" ) if isinstance ( frame , datetime . datetime ): frame = frame . date () if frame < CALENDAR_START : raise ValueError ( f \"cannot be earlier than { CALENDAR_START } : { frame } \" ) # datetime.date(2021, 10, 8)\uff0c\u8fd9\u662f\u4e2a\u7279\u6b8a\u7684\u65e5\u671f if ft == FrameType . WEEK : if frame < datetime . date ( 2005 , 1 , 10 ): return datetime . date ( 2005 , 1 , 4 ), datetime . date ( 2005 , 1 , 7 ) if not cls . is_trade_day ( frame ): # \u975e\u4ea4\u6613\u65e5\u7684\u60c5\u51b5\uff0c\u76f4\u63a5\u56de\u9000\u4e00\u5929 week_day = cls . day_shift ( frame , 0 ) else : week_day = frame w1 = TimeFrame . floor ( week_day , FrameType . WEEK ) if w1 == week_day : # \u672c\u5468\u7684\u6700\u540e\u4e00\u4e2a\u4ea4\u6613\u65e5 week_end = w1 else : week_end = TimeFrame . week_shift ( week_day , 1 ) w0 = TimeFrame . week_shift ( week_end , - 1 ) week_start = TimeFrame . day_shift ( w0 , 1 ) return week_start , week_end if ft == FrameType . MONTH : if frame <= datetime . date ( 2005 , 1 , 31 ): return datetime . date ( 2005 , 1 , 4 ), datetime . date ( 2005 , 1 , 31 ) month_start = frame . replace ( day = 1 ) if not cls . is_trade_day ( month_start ): # \u975e\u4ea4\u6613\u65e5\u7684\u60c5\u51b5\uff0c\u76f4\u63a5\u52a01 month_start = cls . day_shift ( month_start , 1 ) month_end = TimeFrame . month_shift ( month_start , 1 ) return month_start , month_end @classmethod def get_previous_trade_day ( cls , now : datetime . date ): \"\"\"\u83b7\u53d6\u4e0a\u4e00\u4e2a\u4ea4\u6613\u65e5 \u5982\u679c\u5f53\u5929\u662f\u5468\u516d\u6216\u8005\u5468\u65e5\uff0c\u8fd4\u56de\u5468\u4e94\uff08\u4ea4\u6613\u65e5\uff09\uff0c\u5982\u679c\u5f53\u5929\u662f\u5468\u4e00\uff0c\u8fd4\u56de\u5468\u4e94\uff0c\u5982\u679c\u5f53\u5929\u662f\u5468\u4e94\uff0c\u8fd4\u56de\u5468\u56db Args: now : \u6307\u5b9a\u7684\u65e5\u671f\uff0cdate\u5bf9\u8c61 Returns: datetime.date: \u4e0a\u4e00\u4e2a\u4ea4\u6613\u65e5 \"\"\" if now == datetime . date ( 2005 , 1 , 4 ): return now if TimeFrame . is_trade_day ( now ): pre_trade_day = TimeFrame . day_shift ( now , - 1 ) else : pre_trade_day = TimeFrame . day_shift ( now , 0 ) return pre_trade_day","title":"TimeFrame"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.ceiling","text":"\u6c42 moment \u6240\u5728\u7c7b\u578b\u4e3a frame_type \u5468\u671f\u7684\u4e0a\u754c \u6bd4\u5982 moment \u4e3a14:59\u5206\uff0c\u5982\u679c frame_type \u4e3a30\u5206\u949f\uff0c\u5219\u5b83\u7684\u4e0a\u754c\u5e94\u8be5\u4e3a15:00 Examples: >>> TimeFrame . day_frames = [ 20050104 , 20050105 , 20050106 , 20050107 ] >>> TimeFrame . ceiling ( datetime . date ( 2005 , 1 , 7 ), FrameType . DAY ) datetime . date ( 2005 , 1 , 7 ) >>> TimeFrame . week_frames = [ 20050107 , 20050114 , 20050121 , 20050128 ] >>> TimeFrame . ceiling ( datetime . date ( 2005 , 1 , 4 ), FrameType . WEEK ) datetime . date ( 2005 , 1 , 7 ) >>> TimeFrame . ceiling ( datetime . date ( 2005 , 1 , 7 ), FrameType . WEEK ) datetime . date ( 2005 , 1 , 7 ) >>> TimeFrame . month_frames = [ 20050131 , 20050228 ] >>> TimeFrame . ceiling ( datetime . date ( 2005 , 1 , 1 ), FrameType . MONTH ) datetime . date ( 2005 , 1 , 31 ) >>> TimeFrame . ceiling ( datetime . datetime ( 2005 , 1 , 5 , 14 , 59 ), FrameType . MIN30 ) datetime . datetime ( 2005 , 1 , 5 , 15 , 0 ) >>> TimeFrame . ceiling ( datetime . datetime ( 2005 , 1 , 5 , 14 , 59 ), FrameType . MIN1 ) datetime . datetime ( 2005 , 1 , 5 , 14 , 59 ) >>> TimeFrame . ceiling ( arrow . get ( '2005-1-5 14:59' ) . naive , FrameType . MIN1 ) datetime . datetime ( 2005 , 1 , 5 , 14 , 59 ) Parameters: Name Type Description Default moment datetime.datetime [description] required frame_type FrameType [description] required Returns: Type Description Frame moment \u6240\u5728\u7c7b\u578b\u4e3a frame_type \u5468\u671f\u7684\u4e0a\u754c Source code in omicron/models/timeframe.py @classmethod def ceiling ( cls , moment : Frame , frame_type : FrameType ) -> Frame : \"\"\"\u6c42`moment`\u6240\u5728\u7c7b\u578b\u4e3a`frame_type`\u5468\u671f\u7684\u4e0a\u754c \u6bd4\u5982`moment`\u4e3a14:59\u5206\uff0c\u5982\u679c`frame_type`\u4e3a30\u5206\u949f\uff0c\u5219\u5b83\u7684\u4e0a\u754c\u5e94\u8be5\u4e3a15:00 Example: >>> TimeFrame.day_frames = [20050104, 20050105, 20050106, 20050107] >>> TimeFrame.ceiling(datetime.date(2005, 1, 7), FrameType.DAY) datetime.date(2005, 1, 7) >>> TimeFrame.week_frames = [20050107, 20050114, 20050121, 20050128] >>> TimeFrame.ceiling(datetime.date(2005, 1, 4), FrameType.WEEK) datetime.date(2005, 1, 7) >>> TimeFrame.ceiling(datetime.date(2005,1,7), FrameType.WEEK) datetime.date(2005, 1, 7) >>> TimeFrame.month_frames = [20050131, 20050228] >>> TimeFrame.ceiling(datetime.date(2005,1 ,1), FrameType.MONTH) datetime.date(2005, 1, 31) >>> TimeFrame.ceiling(datetime.datetime(2005,1,5,14,59), FrameType.MIN30) datetime.datetime(2005, 1, 5, 15, 0) >>> TimeFrame.ceiling(datetime.datetime(2005, 1, 5, 14, 59), FrameType.MIN1) datetime.datetime(2005, 1, 5, 14, 59) >>> TimeFrame.ceiling(arrow.get('2005-1-5 14:59').naive, FrameType.MIN1) datetime.datetime(2005, 1, 5, 14, 59) Args: moment (datetime.datetime): [description] frame_type (FrameType): [description] Returns: `moment`\u6240\u5728\u7c7b\u578b\u4e3a`frame_type`\u5468\u671f\u7684\u4e0a\u754c \"\"\" if frame_type in cls . day_level_frames and type ( moment ) == datetime . datetime : moment = moment . date () floor = cls . floor ( moment , frame_type ) if floor == moment : return moment elif floor > moment : return floor else : return cls . shift ( floor , 1 , frame_type )","title":"ceiling()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.combine_time","text":"\u7528 date \u6307\u5b9a\u7684\u65e5\u671f\u4e0e hour , minute , second \u7b49\u53c2\u6570\u4e00\u8d77\u5408\u6210\u65b0\u7684\u65f6\u95f4 Examples: >>> TimeFrame . combine_time ( datetime . date ( 2020 , 1 , 1 ), 14 , 30 ) datetime . datetime ( 2020 , 1 , 1 , 14 , 30 ) Parameters: Name Type Description Default date [description] required hour [description] required minute [description]. Defaults to 0. 0 second [description]. Defaults to 0. 0 microsecond [description]. Defaults to 0. 0 Returns: Type Description datetime.datetime \u5408\u6210\u540e\u7684\u65f6\u95f4 Source code in omicron/models/timeframe.py @classmethod def combine_time ( cls , date : datetime . date , hour : int , minute : int = 0 , second : int = 0 , microsecond : int = 0 , ) -> datetime . datetime : \"\"\"\u7528`date`\u6307\u5b9a\u7684\u65e5\u671f\u4e0e`hour`, `minute`, `second`\u7b49\u53c2\u6570\u4e00\u8d77\u5408\u6210\u65b0\u7684\u65f6\u95f4 Examples: >>> TimeFrame.combine_time(datetime.date(2020, 1, 1), 14, 30) datetime.datetime(2020, 1, 1, 14, 30) Args: date : [description] hour : [description] minute : [description]. Defaults to 0. second : [description]. Defaults to 0. microsecond : [description]. Defaults to 0. Returns: \u5408\u6210\u540e\u7684\u65f6\u95f4 \"\"\" return datetime . datetime ( date . year , date . month , date . day , hour , minute , second , microsecond )","title":"combine_time()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.count_day_frames","text":"calc trade days between start and end in close-to-close way. if start == end, this will returns 1. Both start/end will be aligned to open trade day before calculation. Examples: >>> start = datetime . date ( 2019 , 12 , 21 ) >>> end = datetime . date ( 2019 , 12 , 21 ) >>> TimeFrame . day_frames = [ 20191219 , 20191220 , 20191223 , 20191224 , 20191225 ] >>> TimeFrame . count_day_frames ( start , end ) 1 >>> # non-trade days are removed >>> TimeFrame . day_frames = [ 20200121 , 20200122 , 20200123 , 20200203 , 20200204 , 20200205 ] >>> start = datetime . date ( 2020 , 1 , 23 ) >>> end = datetime . date ( 2020 , 2 , 4 ) >>> TimeFrame . count_day_frames ( start , end ) 3 Parameters: Name Type Description Default start Union[datetime.date, Arrow] required end Union[datetime.date, Arrow] required Returns: Type Description int count of days Source code in omicron/models/timeframe.py @classmethod def count_day_frames ( cls , start : Union [ datetime . date , Arrow ], end : Union [ datetime . date , Arrow ] ) -> int : \"\"\"calc trade days between start and end in close-to-close way. if start == end, this will returns 1. Both start/end will be aligned to open trade day before calculation. Examples: >>> start = datetime.date(2019, 12, 21) >>> end = datetime.date(2019, 12, 21) >>> TimeFrame.day_frames = [20191219, 20191220, 20191223, 20191224, 20191225] >>> TimeFrame.count_day_frames(start, end) 1 >>> # non-trade days are removed >>> TimeFrame.day_frames = [20200121, 20200122, 20200123, 20200203, 20200204, 20200205] >>> start = datetime.date(2020, 1, 23) >>> end = datetime.date(2020, 2, 4) >>> TimeFrame.count_day_frames(start, end) 3 args: start: end: returns: count of days \"\"\" start = cls . date2int ( start ) end = cls . date2int ( end ) return int ( ext . count_between ( cls . day_frames , start , end ))","title":"count_day_frames()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.count_frames","text":"\u8ba1\u7b97start\u4e0eend\u4e4b\u95f4\u6709\u591a\u5c11\u4e2a\u5468\u671f\u4e3aframe_type\u7684frames See also: count_day_frames count_week_frames count_month_frames Parameters: Name Type Description Default start start frame required end end frame required frame_type the type of frame required Exceptions: Type Description ValueError \u5982\u679cframe_type\u4e0d\u652f\u6301\uff0c\u5219\u4f1a\u629b\u51fa\u6b64\u5f02\u5e38\u3002 Returns: Type Description int \u4ecestart\u5230end\u7684\u5e27\u6570 Source code in omicron/models/timeframe.py @classmethod def count_frames ( cls , start : Union [ datetime . date , datetime . datetime , Arrow ], end : Union [ datetime . date , datetime . datetime , Arrow ], frame_type , ) -> int : \"\"\"\u8ba1\u7b97start\u4e0eend\u4e4b\u95f4\u6709\u591a\u5c11\u4e2a\u5468\u671f\u4e3aframe_type\u7684frames See also: - [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames] - [count_week_frames][omicron.models.timeframe.TimeFrame.count_week_frames] - [count_month_frames][omicron.models.timeframe.TimeFrame.count_month_frames] Args: start : start frame end : end frame frame_type : the type of frame Raises: ValueError: \u5982\u679cframe_type\u4e0d\u652f\u6301\uff0c\u5219\u4f1a\u629b\u51fa\u6b64\u5f02\u5e38\u3002 Returns: \u4ecestart\u5230end\u7684\u5e27\u6570 \"\"\" if frame_type == FrameType . DAY : return cls . count_day_frames ( start , end ) elif frame_type == FrameType . WEEK : return cls . count_week_frames ( start , end ) elif frame_type == FrameType . MONTH : return cls . count_month_frames ( start , end ) elif frame_type == FrameType . QUARTER : return cls . count_quarter_frames ( start , end ) elif frame_type == FrameType . YEAR : return cls . count_year_frames ( start , end ) elif frame_type in [ FrameType . MIN1 , FrameType . MIN5 , FrameType . MIN15 , FrameType . MIN30 , FrameType . MIN60 , ]: tm_start = start . hour * 60 + start . minute tm_end = end . hour * 60 + end . minute days = cls . count_day_frames ( start . date (), end . date ()) - 1 tm_start_pos = cls . ticks [ frame_type ] . index ( tm_start ) tm_end_pos = cls . ticks [ frame_type ] . index ( tm_end ) min_bars = tm_end_pos - tm_start_pos + 1 return days * len ( cls . ticks [ frame_type ]) + min_bars else : # pragma: no cover raise ValueError ( f \" { frame_type } is not supported yet\" )","title":"count_frames()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.count_month_frames","text":"calc trade months between start and end date in close-to-close way Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1. For examples, please refer to count_day_frames Parameters: Name Type Description Default start datetime.date required end datetime.date required Returns: Type Description int months between start and end Source code in omicron/models/timeframe.py @classmethod def count_month_frames ( cls , start : datetime . date , end : datetime . date ) -> int : \"\"\"calc trade months between start and end date in close-to-close way Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1. For examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames] Args: start: end: Returns: months between start and end \"\"\" start = cls . date2int ( start ) end = cls . date2int ( end ) return int ( ext . count_between ( cls . month_frames , start , end ))","title":"count_month_frames()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.count_quarter_frames","text":"calc trade quarters between start and end date in close-to-close way Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1. For examples, please refer to count_day_frames Parameters: Name Type Description Default start datetime.date [description] required end datetime.date [description] required Returns: Type Description int quarters between start and end Source code in omicron/models/timeframe.py @classmethod def count_quarter_frames ( cls , start : datetime . date , end : datetime . date ) -> int : \"\"\"calc trade quarters between start and end date in close-to-close way Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1. For examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames] Args: start (datetime.date): [description] end (datetime.date): [description] Returns: quarters between start and end \"\"\" start = cls . date2int ( start ) end = cls . date2int ( end ) return int ( ext . count_between ( cls . quarter_frames , start , end ))","title":"count_quarter_frames()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.count_week_frames","text":"calc trade weeks between start and end in close-to-close way. Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1 for examples, please refer to count_day_frames Parameters: Name Type Description Default start datetime.date required end datetime.date required Returns: Type Description int count of weeks Source code in omicron/models/timeframe.py @classmethod def count_week_frames ( cls , start : datetime . date , end : datetime . date ) -> int : \"\"\" calc trade weeks between start and end in close-to-close way. Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1 for examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames] args: start: end: returns: count of weeks \"\"\" start = cls . date2int ( start ) end = cls . date2int ( end ) return int ( ext . count_between ( cls . week_frames , start , end ))","title":"count_week_frames()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.count_year_frames","text":"calc trade years between start and end date in close-to-close way Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1. For examples, please refer to count_day_frames Parameters: Name Type Description Default start datetime.date [description] required end datetime.date [description] required Returns: Type Description int years between start and end Source code in omicron/models/timeframe.py @classmethod def count_year_frames ( cls , start : datetime . date , end : datetime . date ) -> int : \"\"\"calc trade years between start and end date in close-to-close way Both start and end will be aligned to open trade day before calculation. After that, if start == end, this will returns 1. For examples, please refer to [count_day_frames][omicron.models.timeframe.TimeFrame.count_day_frames] Args: start (datetime.date): [description] end (datetime.date): [description] Returns: years between start and end \"\"\" start = cls . date2int ( start ) end = cls . date2int ( end ) return int ( ext . count_between ( cls . year_frames , start , end ))","title":"count_year_frames()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.date2int","text":"\u5c06\u65e5\u671f\u8f6c\u6362\u4e3a\u6574\u6570\u8868\u793a \u5728zillionare\u4e2d\uff0c\u5982\u679c\u8981\u5bf9\u65f6\u95f4\u548c\u65e5\u671f\u8fdb\u884c\u6301\u4e45\u5316\u64cd\u4f5c\uff0c\u6211\u4eec\u4e00\u822c\u5c06\u5176\u8f6c\u6362\u4e3aint\u7c7b\u578b Examples: >>> TimeFrame . date2int ( datetime . date ( 2020 , 5 , 1 )) 20200501 Parameters: Name Type Description Default d Union[datetime.datetime, datetime.date, Arrow] date required Returns: Type Description int \u65e5\u671f\u7684\u6574\u6570\u8868\u793a\uff0c\u6bd4\u598220220211 Source code in omicron/models/timeframe.py @classmethod def date2int ( cls , d : Union [ datetime . datetime , datetime . date , Arrow ]) -> int : \"\"\"\u5c06\u65e5\u671f\u8f6c\u6362\u4e3a\u6574\u6570\u8868\u793a \u5728zillionare\u4e2d\uff0c\u5982\u679c\u8981\u5bf9\u65f6\u95f4\u548c\u65e5\u671f\u8fdb\u884c\u6301\u4e45\u5316\u64cd\u4f5c\uff0c\u6211\u4eec\u4e00\u822c\u5c06\u5176\u8f6c\u6362\u4e3aint\u7c7b\u578b Examples: >>> TimeFrame.date2int(datetime.date(2020,5,1)) 20200501 Args: d: date Returns: \u65e5\u671f\u7684\u6574\u6570\u8868\u793a\uff0c\u6bd4\u598220220211 \"\"\" return int ( f \" { d . year : 04 }{ d . month : 02 }{ d . day : 02 } \" )","title":"date2int()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.day_shift","text":"\u5bf9\u6307\u5b9a\u65e5\u671f\u8fdb\u884c\u524d\u540e\u79fb\u4f4d\u64cd\u4f5c \u5982\u679c n == 0\uff0c\u5219\u8fd4\u56ded\u5bf9\u5e94\u7684\u4ea4\u6613\u65e5\uff08\u5982\u679c\u662f\u975e\u4ea4\u6613\u65e5\uff0c\u5219\u8fd4\u56de\u521a\u7ed3\u675f\u7684\u4e00\u4e2a\u4ea4\u6613\u65e5\uff09 \u5982\u679c n > 0\uff0c\u5219\u8fd4\u56ded\u5bf9\u5e94\u7684\u4ea4\u6613\u65e5\u540e\u7b2c n \u4e2a\u4ea4\u6613\u65e5 \u5982\u679c n < 0\uff0c\u5219\u8fd4\u56ded\u5bf9\u5e94\u7684\u4ea4\u6613\u65e5\u524d\u7b2c n \u4e2a\u4ea4\u6613\u65e5 Examples: >>> TimeFrame . day_frames = [ 20191212 , 20191213 , 20191216 , 20191217 , 20191218 , 20191219 ] >>> TimeFrame . day_shift ( datetime . date ( 2019 , 12 , 13 ), 0 ) datetime . date ( 2019 , 12 , 13 ) >>> TimeFrame . day_shift ( datetime . date ( 2019 , 12 , 15 ), 0 ) datetime . date ( 2019 , 12 , 13 ) >>> TimeFrame . day_shift ( datetime . date ( 2019 , 12 , 15 ), 1 ) datetime . date ( 2019 , 12 , 16 ) >>> TimeFrame . day_shift ( datetime . date ( 2019 , 12 , 13 ), 1 ) datetime . date ( 2019 , 12 , 16 ) Parameters: Name Type Description Default start datetime.date the origin day required offset int days to shift, can be negative required Returns: Type Description datetime.date \u79fb\u4f4d\u540e\u7684\u65e5\u671f Source code in omicron/models/timeframe.py @classmethod def day_shift ( cls , start : datetime . date , offset : int ) -> datetime . date : \"\"\"\u5bf9\u6307\u5b9a\u65e5\u671f\u8fdb\u884c\u524d\u540e\u79fb\u4f4d\u64cd\u4f5c \u5982\u679c n == 0\uff0c\u5219\u8fd4\u56ded\u5bf9\u5e94\u7684\u4ea4\u6613\u65e5\uff08\u5982\u679c\u662f\u975e\u4ea4\u6613\u65e5\uff0c\u5219\u8fd4\u56de\u521a\u7ed3\u675f\u7684\u4e00\u4e2a\u4ea4\u6613\u65e5\uff09 \u5982\u679c n > 0\uff0c\u5219\u8fd4\u56ded\u5bf9\u5e94\u7684\u4ea4\u6613\u65e5\u540e\u7b2c n \u4e2a\u4ea4\u6613\u65e5 \u5982\u679c n < 0\uff0c\u5219\u8fd4\u56ded\u5bf9\u5e94\u7684\u4ea4\u6613\u65e5\u524d\u7b2c n \u4e2a\u4ea4\u6613\u65e5 Examples: >>> TimeFrame.day_frames = [20191212, 20191213, 20191216, 20191217,20191218, 20191219] >>> TimeFrame.day_shift(datetime.date(2019,12,13), 0) datetime.date(2019, 12, 13) >>> TimeFrame.day_shift(datetime.date(2019, 12, 15), 0) datetime.date(2019, 12, 13) >>> TimeFrame.day_shift(datetime.date(2019, 12, 15), 1) datetime.date(2019, 12, 16) >>> TimeFrame.day_shift(datetime.date(2019, 12, 13), 1) datetime.date(2019, 12, 16) Args: start: the origin day offset: days to shift, can be negative Returns: \u79fb\u4f4d\u540e\u7684\u65e5\u671f \"\"\" # accelerated from 0.12 to 0.07, per 10000 loop, type conversion time included start = cls . date2int ( start ) return cls . int2date ( ext . shift ( cls . day_frames , start , offset ))","title":"day_shift()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.first_min_frame","text":"\u83b7\u53d6\u6307\u5b9a\u65e5\u671f\u7c7b\u578b\u4e3a frame_type \u7684 frame \u3002 Examples: >>> TimeFrame . day_frames = np . array ([ 20191227 , 20191230 , 20191231 , 20200102 , 20200103 ]) >>> TimeFrame . first_min_frame ( '2019-12-31' , FrameType . MIN1 ) datetime . datetime ( 2019 , 12 , 31 , 9 , 31 ) Parameters: Name Type Description Default day Union[str, Arrow, Frame] which day? required frame_type FrameType which frame_type? required Returns: Type Description Union[datetime.date, datetime.datetime] day \u5f53\u65e5\u7684\u7b2c\u4e00\u5e27 Source code in omicron/models/timeframe.py @classmethod def first_min_frame ( cls , day : Union [ str , Arrow , Frame ], frame_type : FrameType ) -> Union [ datetime . date , datetime . datetime ]: \"\"\"\u83b7\u53d6\u6307\u5b9a\u65e5\u671f\u7c7b\u578b\u4e3a`frame_type`\u7684`frame`\u3002 Examples: >>> TimeFrame.day_frames = np.array([20191227, 20191230, 20191231, 20200102, 20200103]) >>> TimeFrame.first_min_frame('2019-12-31', FrameType.MIN1) datetime.datetime(2019, 12, 31, 9, 31) Args: day: which day? frame_type: which frame_type? Returns: `day`\u5f53\u65e5\u7684\u7b2c\u4e00\u5e27 \"\"\" day = cls . date2int ( arrow . get ( day ) . date ()) if frame_type == FrameType . MIN1 : floor_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( floor_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 9 , minute = 31 ) elif frame_type == FrameType . MIN5 : floor_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( floor_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 9 , minute = 35 ) elif frame_type == FrameType . MIN15 : floor_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( floor_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 9 , minute = 45 ) elif frame_type == FrameType . MIN30 : floor_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( floor_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 10 ) elif frame_type == FrameType . MIN60 : floor_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( floor_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 10 , minute = 30 ) else : # pragma: no cover raise ValueError ( f \" { frame_type } not supported\" )","title":"first_min_frame()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.floor","text":"\u6c42 moment \u5728\u6307\u5b9a\u7684 frame_type \u4e2d\u7684\u4e0b\u754c \u6bd4\u5982\uff0c\u5982\u679c moment \u4e3a10:37\uff0c\u5219\u5f53 frame_type \u4e3a30\u5206\u949f\u65f6\uff0c\u5bf9\u5e94\u7684\u4e0a\u754c\u4e3a10:00 Examples: >>> # \u5982\u679cmoment\u4e3a\u65e5\u671f\uff0c\u5219\u5f53\u6210\u5df2\u6536\u76d8\u5904\u7406 >>> TimeFrame . day_frames = np . array ([ 20050104 , 20050105 , 20050106 , 20050107 , 20050110 , 20050111 ]) >>> TimeFrame . floor ( datetime . date ( 2005 , 1 , 7 ), FrameType . DAY ) datetime . date ( 2005 , 1 , 7 ) >>> # moment\u6307\u5b9a\u7684\u65f6\u95f4\u8fd8\u672a\u6536\u76d8\uff0cfloor\u5230\u4e0a\u4e00\u4e2a\u4ea4\u6613\u65e5 >>> TimeFrame . floor ( datetime . datetime ( 2005 , 1 , 7 , 14 , 59 ), FrameType . DAY ) datetime . date ( 2005 , 1 , 6 ) >>> TimeFrame . floor ( datetime . date ( 2005 , 1 , 13 ), FrameType . WEEK ) datetime . date ( 2005 , 1 , 7 ) >>> TimeFrame . floor ( datetime . date ( 2005 , 2 , 27 ), FrameType . MONTH ) datetime . date ( 2005 , 1 , 31 ) >>> TimeFrame . floor ( datetime . datetime ( 2005 , 1 , 5 , 14 , 59 ), FrameType . MIN30 ) datetime . datetime ( 2005 , 1 , 5 , 14 , 30 ) >>> TimeFrame . floor ( datetime . datetime ( 2005 , 1 , 5 , 14 , 59 ), FrameType . MIN1 ) datetime . datetime ( 2005 , 1 , 5 , 14 , 59 ) >>> TimeFrame . floor ( arrow . get ( '2005-1-5 14:59' ) . naive , FrameType . MIN1 ) datetime . datetime ( 2005 , 1 , 5 , 14 , 59 ) Parameters: Name Type Description Default moment Frame required frame_type FrameType required Returns: Type Description Frame moment \u5728\u6307\u5b9a\u7684 frame_type \u4e2d\u7684\u4e0b\u754c Source code in omicron/models/timeframe.py @classmethod def floor ( cls , moment : Frame , frame_type : FrameType ) -> Frame : \"\"\"\u6c42`moment`\u5728\u6307\u5b9a\u7684`frame_type`\u4e2d\u7684\u4e0b\u754c \u6bd4\u5982\uff0c\u5982\u679c`moment`\u4e3a10:37\uff0c\u5219\u5f53`frame_type`\u4e3a30\u5206\u949f\u65f6\uff0c\u5bf9\u5e94\u7684\u4e0a\u754c\u4e3a10:00 Examples: >>> # \u5982\u679cmoment\u4e3a\u65e5\u671f\uff0c\u5219\u5f53\u6210\u5df2\u6536\u76d8\u5904\u7406 >>> TimeFrame.day_frames = np.array([20050104, 20050105, 20050106, 20050107, 20050110, 20050111]) >>> TimeFrame.floor(datetime.date(2005, 1, 7), FrameType.DAY) datetime.date(2005, 1, 7) >>> # moment\u6307\u5b9a\u7684\u65f6\u95f4\u8fd8\u672a\u6536\u76d8\uff0cfloor\u5230\u4e0a\u4e00\u4e2a\u4ea4\u6613\u65e5 >>> TimeFrame.floor(datetime.datetime(2005, 1, 7, 14, 59), FrameType.DAY) datetime.date(2005, 1, 6) >>> TimeFrame.floor(datetime.date(2005, 1, 13), FrameType.WEEK) datetime.date(2005, 1, 7) >>> TimeFrame.floor(datetime.date(2005,2, 27), FrameType.MONTH) datetime.date(2005, 1, 31) >>> TimeFrame.floor(datetime.datetime(2005,1,5,14,59), FrameType.MIN30) datetime.datetime(2005, 1, 5, 14, 30) >>> TimeFrame.floor(datetime.datetime(2005, 1, 5, 14, 59), FrameType.MIN1) datetime.datetime(2005, 1, 5, 14, 59) >>> TimeFrame.floor(arrow.get('2005-1-5 14:59').naive, FrameType.MIN1) datetime.datetime(2005, 1, 5, 14, 59) Args: moment: frame_type: Returns: `moment`\u5728\u6307\u5b9a\u7684`frame_type`\u4e2d\u7684\u4e0b\u754c \"\"\" if frame_type in cls . minute_level_frames : tm , day_offset = cls . minute_frames_floor ( cls . ticks [ frame_type ], moment . hour * 60 + moment . minute ) h , m = tm // 60 , tm % 60 if cls . day_shift ( moment , 0 ) < moment . date () or day_offset == - 1 : h = 15 m = 0 new_day = cls . day_shift ( moment , day_offset ) else : new_day = moment . date () return datetime . datetime ( new_day . year , new_day . month , new_day . day , h , m ) if type ( moment ) == datetime . date : moment = datetime . datetime ( moment . year , moment . month , moment . day , 15 ) # \u5982\u679c\u662f\u4ea4\u6613\u65e5\uff0c\u4f46\u8fd8\u672a\u6536\u76d8 if ( cls . date2int ( moment ) in cls . day_frames and moment . hour * 60 + moment . minute < 900 ): moment = cls . day_shift ( moment , - 1 ) day = cls . date2int ( moment ) if frame_type == FrameType . DAY : arr = cls . day_frames elif frame_type == FrameType . WEEK : arr = cls . week_frames elif frame_type == FrameType . MONTH : arr = cls . month_frames else : # pragma: no cover raise ValueError ( f \"frame type { frame_type } not supported.\" ) floored = ext . floor ( arr , day ) return cls . int2date ( floored )","title":"floor()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.frame_len","text":"\u8fd4\u56de\u4ee5\u5206\u949f\u4e3a\u5355\u4f4d\u7684frame\u957f\u5ea6\u3002 \u5bf9\u65e5\u7ebf\u4ee5\u4e0a\u7ea7\u522b\u6ca1\u6709\u610f\u4e49\uff0c\u4f46\u4f1a\u8fd4\u56de240 Examples: >>> TimeFrame . frame_len ( FrameType . MIN5 ) 5 Parameters: Name Type Description Default frame_type FrameType required Returns: Type Description int \u8fd4\u56de\u4ee5\u5206\u949f\u4e3a\u5355\u4f4d\u7684frame\u957f\u5ea6\u3002 Source code in omicron/models/timeframe.py @classmethod def frame_len ( cls , frame_type : FrameType ) -> int : \"\"\"\u8fd4\u56de\u4ee5\u5206\u949f\u4e3a\u5355\u4f4d\u7684frame\u957f\u5ea6\u3002 \u5bf9\u65e5\u7ebf\u4ee5\u4e0a\u7ea7\u522b\u6ca1\u6709\u610f\u4e49\uff0c\u4f46\u4f1a\u8fd4\u56de240 Examples: >>> TimeFrame.frame_len(FrameType.MIN5) 5 Args: frame_type: Returns: \u8fd4\u56de\u4ee5\u5206\u949f\u4e3a\u5355\u4f4d\u7684frame\u957f\u5ea6\u3002 \"\"\" if frame_type == FrameType . MIN1 : return 1 elif frame_type == FrameType . MIN5 : return 5 elif frame_type == FrameType . MIN15 : return 15 elif frame_type == FrameType . MIN30 : return 30 elif frame_type == FrameType . MIN60 : return 60 else : return 240","title":"frame_len()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.get_frame_scope","text":"\u5bf9\u4e8e\u7ed9\u5b9a\u7684\u65f6\u95f4\uff0c\u53d6\u6240\u5728\u5468\u7684\u7b2c\u4e00\u5929\u548c\u6700\u540e\u4e00\u5929\uff0c\u6240\u5728\u6708\u7684\u7b2c\u4e00\u5929\u548c\u6700\u540e\u4e00\u5929 Parameters: Name Type Description Default frame \u6307\u5b9a\u7684\u65e5\u671f\uff0cdate\u5bf9\u8c61 required ft FrameType \u5e27\u7c7b\u578b\uff0c\u652f\u6301WEEK\u548cMONTH required Returns: Type Description Tuple[Frame, Frame] \u5468\u6216\u8005\u6708\u7684\u9996\u672b\u65e5\u671f\uff08date\u5bf9\u8c61\uff09 Source code in omicron/models/timeframe.py @classmethod def get_frame_scope ( cls , frame : Frame , ft : FrameType ) -> Tuple [ Frame , Frame ]: # todo: \u51fd\u6570\u7684\u901a\u7528\u6027\u4e0d\u8db3\uff0c\u4f3c\u4e4e\u5e94\u8be5\u653e\u5728\u5177\u4f53\u7684\u4e1a\u52a1\u7c7b\u4e2d\u3002\u5982\u679c\u662f\u901a\u7528\u578b\u7684\u51fd\u6570\uff0c\u53c2\u6570\u4e0d\u5e94\u8be5\u5c40\u9650\u4e8e\u5468\u548c\u6708\u3002 \"\"\"\u5bf9\u4e8e\u7ed9\u5b9a\u7684\u65f6\u95f4\uff0c\u53d6\u6240\u5728\u5468\u7684\u7b2c\u4e00\u5929\u548c\u6700\u540e\u4e00\u5929\uff0c\u6240\u5728\u6708\u7684\u7b2c\u4e00\u5929\u548c\u6700\u540e\u4e00\u5929 Args: frame : \u6307\u5b9a\u7684\u65e5\u671f\uff0cdate\u5bf9\u8c61 ft: \u5e27\u7c7b\u578b\uff0c\u652f\u6301WEEK\u548cMONTH Returns: Tuple[Frame, Frame]: \u5468\u6216\u8005\u6708\u7684\u9996\u672b\u65e5\u671f\uff08date\u5bf9\u8c61\uff09 \"\"\" if frame is None : raise ValueError ( \"frame cannot be None\" ) if ft not in ( FrameType . WEEK , FrameType . MONTH ): raise ValueError ( f \"FrameType only supports WEEK and MONTH: { ft } \" ) if isinstance ( frame , datetime . datetime ): frame = frame . date () if frame < CALENDAR_START : raise ValueError ( f \"cannot be earlier than { CALENDAR_START } : { frame } \" ) # datetime.date(2021, 10, 8)\uff0c\u8fd9\u662f\u4e2a\u7279\u6b8a\u7684\u65e5\u671f if ft == FrameType . WEEK : if frame < datetime . date ( 2005 , 1 , 10 ): return datetime . date ( 2005 , 1 , 4 ), datetime . date ( 2005 , 1 , 7 ) if not cls . is_trade_day ( frame ): # \u975e\u4ea4\u6613\u65e5\u7684\u60c5\u51b5\uff0c\u76f4\u63a5\u56de\u9000\u4e00\u5929 week_day = cls . day_shift ( frame , 0 ) else : week_day = frame w1 = TimeFrame . floor ( week_day , FrameType . WEEK ) if w1 == week_day : # \u672c\u5468\u7684\u6700\u540e\u4e00\u4e2a\u4ea4\u6613\u65e5 week_end = w1 else : week_end = TimeFrame . week_shift ( week_day , 1 ) w0 = TimeFrame . week_shift ( week_end , - 1 ) week_start = TimeFrame . day_shift ( w0 , 1 ) return week_start , week_end if ft == FrameType . MONTH : if frame <= datetime . date ( 2005 , 1 , 31 ): return datetime . date ( 2005 , 1 , 4 ), datetime . date ( 2005 , 1 , 31 ) month_start = frame . replace ( day = 1 ) if not cls . is_trade_day ( month_start ): # \u975e\u4ea4\u6613\u65e5\u7684\u60c5\u51b5\uff0c\u76f4\u63a5\u52a01 month_start = cls . day_shift ( month_start , 1 ) month_end = TimeFrame . month_shift ( month_start , 1 ) return month_start , month_end","title":"get_frame_scope()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.get_frames","text":"\u53d6[start, end]\u95f4\u6240\u6709\u7c7b\u578b\u4e3aframe_type\u7684frames \u8c03\u7528\u672c\u51fd\u6570\u524d\uff0c\u8bf7\u5148\u901a\u8fc7 floor \u6216\u8005 ceiling \u5c06\u65f6\u95f4\u5e27\u5bf9\u9f50\u5230 frame_type \u7684\u8fb9\u754c\u503c Examples: >>> start = arrow . get ( '2020-1-13 10:00' ) . naive >>> end = arrow . get ( '2020-1-13 13:30' ) . naive >>> TimeFrame . day_frames = np . array ([ 20200109 , 20200110 , 20200113 , 20200114 , 20200115 , 20200116 ]) >>> TimeFrame . get_frames ( start , end , FrameType . MIN30 ) [ 202001131000 , 202001131030 , 202001131100 , 202001131130 , 202001131330 ] Parameters: Name Type Description Default start Frame required end Frame required frame_type FrameType required Returns: Type Description List[int] frame list Source code in omicron/models/timeframe.py @classmethod def get_frames ( cls , start : Frame , end : Frame , frame_type : FrameType ) -> List [ int ]: \"\"\"\u53d6[start, end]\u95f4\u6240\u6709\u7c7b\u578b\u4e3aframe_type\u7684frames \u8c03\u7528\u672c\u51fd\u6570\u524d\uff0c\u8bf7\u5148\u901a\u8fc7`floor`\u6216\u8005`ceiling`\u5c06\u65f6\u95f4\u5e27\u5bf9\u9f50\u5230`frame_type`\u7684\u8fb9\u754c\u503c Example: >>> start = arrow.get('2020-1-13 10:00').naive >>> end = arrow.get('2020-1-13 13:30').naive >>> TimeFrame.day_frames = np.array([20200109, 20200110, 20200113,20200114, 20200115, 20200116]) >>> TimeFrame.get_frames(start, end, FrameType.MIN30) [202001131000, 202001131030, 202001131100, 202001131130, 202001131330] Args: start: end: frame_type: Returns: frame list \"\"\" n = cls . count_frames ( start , end , frame_type ) return cls . get_frames_by_count ( end , n , frame_type )","title":"get_frames()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.get_frames_by_count","text":"\u53d6\u4ee5end\u4e3a\u7ed3\u675f\u70b9,\u5468\u671f\u4e3aframe_type\u7684n\u4e2aframe \u8c03\u7528\u524d\u8bf7\u5c06 end \u5bf9\u9f50\u5230 frame_type \u7684\u8fb9\u754c Examples: >>> end = arrow . get ( '2020-1-6 14:30' ) . naive >>> TimeFrame . day_frames = np . array ([ 20200102 , 20200103 , 20200106 , 20200107 , 20200108 , 20200109 ]) >>> TimeFrame . get_frames_by_count ( end , 2 , FrameType . MIN30 ) [ 202001061400 , 202001061430 ] Parameters: Name Type Description Default end Arrow required n int required frame_type FrameType required Returns: Type Description List[int] frame list Source code in omicron/models/timeframe.py @classmethod def get_frames_by_count ( cls , end : Arrow , n : int , frame_type : FrameType ) -> List [ int ]: \"\"\"\u53d6\u4ee5end\u4e3a\u7ed3\u675f\u70b9,\u5468\u671f\u4e3aframe_type\u7684n\u4e2aframe \u8c03\u7528\u524d\u8bf7\u5c06`end`\u5bf9\u9f50\u5230`frame_type`\u7684\u8fb9\u754c Examples: >>> end = arrow.get('2020-1-6 14:30').naive >>> TimeFrame.day_frames = np.array([20200102, 20200103,20200106, 20200107, 20200108, 20200109]) >>> TimeFrame.get_frames_by_count(end, 2, FrameType.MIN30) [202001061400, 202001061430] Args: end: n: frame_type: Returns: frame list \"\"\" if frame_type == FrameType . DAY : end = cls . date2int ( end ) pos = np . searchsorted ( cls . day_frames , end , side = \"right\" ) return cls . day_frames [ max ( 0 , pos - n ) : pos ] . tolist () elif frame_type == FrameType . WEEK : end = cls . date2int ( end ) pos = np . searchsorted ( cls . week_frames , end , side = \"right\" ) return cls . week_frames [ max ( 0 , pos - n ) : pos ] . tolist () elif frame_type == FrameType . MONTH : end = cls . date2int ( end ) pos = np . searchsorted ( cls . month_frames , end , side = \"right\" ) return cls . month_frames [ max ( 0 , pos - n ) : pos ] . tolist () elif frame_type in { FrameType . MIN1 , FrameType . MIN5 , FrameType . MIN15 , FrameType . MIN30 , FrameType . MIN60 , }: n_days = n // len ( cls . ticks [ frame_type ]) + 2 ticks = cls . ticks [ frame_type ] * n_days days = cls . get_frames_by_count ( end , n_days , FrameType . DAY ) days = np . repeat ( days , len ( cls . ticks [ frame_type ])) ticks = [ day . item () * 10000 + int ( tm / 60 ) * 100 + tm % 60 for day , tm in zip ( days , ticks ) ] # list index is much faster than ext.index_sorted when the arr is small pos = ticks . index ( cls . time2int ( end )) + 1 return ticks [ max ( 0 , pos - n ) : pos ] else : # pragma: no cover raise ValueError ( f \" { frame_type } not support yet\" )","title":"get_frames_by_count()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.get_previous_trade_day","text":"\u83b7\u53d6\u4e0a\u4e00\u4e2a\u4ea4\u6613\u65e5 \u5982\u679c\u5f53\u5929\u662f\u5468\u516d\u6216\u8005\u5468\u65e5\uff0c\u8fd4\u56de\u5468\u4e94\uff08\u4ea4\u6613\u65e5\uff09\uff0c\u5982\u679c\u5f53\u5929\u662f\u5468\u4e00\uff0c\u8fd4\u56de\u5468\u4e94\uff0c\u5982\u679c\u5f53\u5929\u662f\u5468\u4e94\uff0c\u8fd4\u56de\u5468\u56db Parameters: Name Type Description Default now \u6307\u5b9a\u7684\u65e5\u671f\uff0cdate\u5bf9\u8c61 required Returns: Type Description datetime.date \u4e0a\u4e00\u4e2a\u4ea4\u6613\u65e5 Source code in omicron/models/timeframe.py @classmethod def get_previous_trade_day ( cls , now : datetime . date ): \"\"\"\u83b7\u53d6\u4e0a\u4e00\u4e2a\u4ea4\u6613\u65e5 \u5982\u679c\u5f53\u5929\u662f\u5468\u516d\u6216\u8005\u5468\u65e5\uff0c\u8fd4\u56de\u5468\u4e94\uff08\u4ea4\u6613\u65e5\uff09\uff0c\u5982\u679c\u5f53\u5929\u662f\u5468\u4e00\uff0c\u8fd4\u56de\u5468\u4e94\uff0c\u5982\u679c\u5f53\u5929\u662f\u5468\u4e94\uff0c\u8fd4\u56de\u5468\u56db Args: now : \u6307\u5b9a\u7684\u65e5\u671f\uff0cdate\u5bf9\u8c61 Returns: datetime.date: \u4e0a\u4e00\u4e2a\u4ea4\u6613\u65e5 \"\"\" if now == datetime . date ( 2005 , 1 , 4 ): return now if TimeFrame . is_trade_day ( now ): pre_trade_day = TimeFrame . day_shift ( now , - 1 ) else : pre_trade_day = TimeFrame . day_shift ( now , 0 ) return pre_trade_day","title":"get_previous_trade_day()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.get_ticks","text":"\u53d6\u6708\u7ebf\u3001\u5468\u7ebf\u3001\u65e5\u7ebf\u53ca\u5404\u5206\u949f\u7ebf\u5bf9\u5e94\u7684frame \u5bf9\u5206\u949f\u7ebf\uff0c\u8fd4\u56de\u503c\u4ec5\u5305\u542b\u65f6\u95f4\uff0c\u4e0d\u5305\u542b\u65e5\u671f\uff08\u5747\u4e3a\u6574\u6570\u8868\u793a\uff09 Examples: >>> TimeFrame . month_frames = np . array ([ 20050131 , 20050228 , 20050331 ]) >>> TimeFrame . get_ticks ( FrameType . MONTH )[: 3 ] array ([ 20050131 , 20050228 , 20050331 ]) Parameters: Name Type Description Default frame_type [description] required Exceptions: Type Description ValueError [description] Returns: Type Description Union[List, np.array] \u6708\u7ebf\u3001\u5468\u7ebf\u3001\u65e5\u7ebf\u53ca\u5404\u5206\u949f\u7ebf\u5bf9\u5e94\u7684frame Source code in omicron/models/timeframe.py @classmethod def get_ticks ( cls , frame_type : FrameType ) -> Union [ List , np . array ]: \"\"\"\u53d6\u6708\u7ebf\u3001\u5468\u7ebf\u3001\u65e5\u7ebf\u53ca\u5404\u5206\u949f\u7ebf\u5bf9\u5e94\u7684frame \u5bf9\u5206\u949f\u7ebf\uff0c\u8fd4\u56de\u503c\u4ec5\u5305\u542b\u65f6\u95f4\uff0c\u4e0d\u5305\u542b\u65e5\u671f\uff08\u5747\u4e3a\u6574\u6570\u8868\u793a\uff09 Examples: >>> TimeFrame.month_frames = np.array([20050131, 20050228, 20050331]) >>> TimeFrame.get_ticks(FrameType.MONTH)[:3] array([20050131, 20050228, 20050331]) Args: frame_type : [description] Raises: ValueError: [description] Returns: \u6708\u7ebf\u3001\u5468\u7ebf\u3001\u65e5\u7ebf\u53ca\u5404\u5206\u949f\u7ebf\u5bf9\u5e94\u7684frame \"\"\" if frame_type in cls . minute_level_frames : return cls . ticks [ frame_type ] if frame_type == FrameType . DAY : return cls . day_frames elif frame_type == FrameType . WEEK : return cls . week_frames elif frame_type == FrameType . MONTH : return cls . month_frames else : # pragma: no cover raise ValueError ( f \" { frame_type } not supported!\" )","title":"get_ticks()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.init","text":"\u521d\u59cb\u5316\u65e5\u5386 Source code in omicron/models/timeframe.py @classmethod async def init ( cls ): \"\"\"\u521d\u59cb\u5316\u65e5\u5386\"\"\" await cls . _load_calendar ()","title":"init()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.int2date","text":"\u5c06\u6570\u5b57\u8868\u793a\u7684\u65e5\u671f\u8f6c\u6362\u6210\u4e3a\u65e5\u671f\u683c\u5f0f Examples: >>> TimeFrame . int2date ( 20200501 ) datetime . date ( 2020 , 5 , 1 ) Parameters: Name Type Description Default d Union[int, str] YYYYMMDD\u8868\u793a\u7684\u65e5\u671f required Returns: Type Description datetime.date \u8f6c\u6362\u540e\u7684\u65e5\u671f Source code in omicron/models/timeframe.py @classmethod def int2date ( cls , d : Union [ int , str ]) -> datetime . date : \"\"\"\u5c06\u6570\u5b57\u8868\u793a\u7684\u65e5\u671f\u8f6c\u6362\u6210\u4e3a\u65e5\u671f\u683c\u5f0f Examples: >>> TimeFrame.int2date(20200501) datetime.date(2020, 5, 1) Args: d: YYYYMMDD\u8868\u793a\u7684\u65e5\u671f Returns: \u8f6c\u6362\u540e\u7684\u65e5\u671f \"\"\" s = str ( d ) # it's 8 times faster than arrow.get return datetime . date ( int ( s [: 4 ]), int ( s [ 4 : 6 ]), int ( s [ 6 :]))","title":"int2date()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.int2time","text":"\u5c06\u6574\u6570\u8868\u793a\u7684\u65f6\u95f4\u8f6c\u6362\u4e3a datetime \u7c7b\u578b\u8868\u793a Examples: >>> TimeFrame . int2time ( 202005011500 ) datetime . datetime ( 2020 , 5 , 1 , 15 , 0 ) Parameters: Name Type Description Default tm int time in YYYYMMDDHHmm format required Returns: Type Description datetime.datetime \u8f6c\u6362\u540e\u7684\u65f6\u95f4 Source code in omicron/models/timeframe.py @classmethod def int2time ( cls , tm : int ) -> datetime . datetime : \"\"\"\u5c06\u6574\u6570\u8868\u793a\u7684\u65f6\u95f4\u8f6c\u6362\u4e3a`datetime`\u7c7b\u578b\u8868\u793a examples: >>> TimeFrame.int2time(202005011500) datetime.datetime(2020, 5, 1, 15, 0) Args: tm: time in YYYYMMDDHHmm format Returns: \u8f6c\u6362\u540e\u7684\u65f6\u95f4 \"\"\" s = str ( tm ) # its 8 times faster than arrow.get() return datetime . datetime ( int ( s [: 4 ]), int ( s [ 4 : 6 ]), int ( s [ 6 : 8 ]), int ( s [ 8 : 10 ]), int ( s [ 10 : 12 ]) )","title":"int2time()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.is_bar_closed","text":"\u5224\u65ad frame \u6240\u4ee3\u8868\u7684bar\u662f\u5426\u5df2\u7ecf\u6536\u76d8\uff08\u7ed3\u675f\uff09 \u5982\u679c\u662f\u65e5\u7ebf\uff0cframe\u4e0d\u4e3a\u5f53\u5929\uff0c\u5219\u8ba4\u4e3a\u5df2\u6536\u76d8\uff1b\u6216\u8005\u5f53\u524d\u65f6\u95f4\u5728\u6536\u76d8\u65f6\u95f4\u4e4b\u540e\uff0c\u4e5f\u8ba4\u4e3a\u5df2\u6536\u76d8\u3002 \u5982\u679c\u662f\u5176\u5b83\u5468\u671f\uff0c\u5219\u53ea\u6709\u5f53frame\u6b63\u597d\u5728\u8fb9\u754c\u4e0a\uff0c\u624d\u8ba4\u4e3a\u662f\u5df2\u6536\u76d8\u3002\u8fd9\u91cc\u6709\u4e00\u4e2a\u5047\u8bbe\uff1a\u6211\u4eec\u4e0d\u4f1a\u5728\u5176\u5b83\u5468\u671f\u4e0a\uff0c\u5224\u65ad\u672a\u6765\u7684\u67d0\u4e2aframe\u662f\u5426\u5df2\u7ecf\u6536\u76d8\u3002 Parameters: Name Type Description Default frame bar\u6240\u5904\u7684\u65f6\u95f4\uff0c\u5fc5\u987b\u5c0f\u4e8e\u5f53\u524d\u65f6\u95f4 required ft FrameType bar\u6240\u4ee3\u8868\u7684\u5e27\u7c7b\u578b required Returns: Type Description bool \u662f\u5426\u5df2\u7ecf\u6536\u76d8 Source code in omicron/models/timeframe.py @classmethod def is_bar_closed ( cls , frame : Frame , ft : FrameType ) -> bool : \"\"\"\u5224\u65ad`frame`\u6240\u4ee3\u8868\u7684bar\u662f\u5426\u5df2\u7ecf\u6536\u76d8\uff08\u7ed3\u675f\uff09 \u5982\u679c\u662f\u65e5\u7ebf\uff0cframe\u4e0d\u4e3a\u5f53\u5929\uff0c\u5219\u8ba4\u4e3a\u5df2\u6536\u76d8\uff1b\u6216\u8005\u5f53\u524d\u65f6\u95f4\u5728\u6536\u76d8\u65f6\u95f4\u4e4b\u540e\uff0c\u4e5f\u8ba4\u4e3a\u5df2\u6536\u76d8\u3002 \u5982\u679c\u662f\u5176\u5b83\u5468\u671f\uff0c\u5219\u53ea\u6709\u5f53frame\u6b63\u597d\u5728\u8fb9\u754c\u4e0a\uff0c\u624d\u8ba4\u4e3a\u662f\u5df2\u6536\u76d8\u3002\u8fd9\u91cc\u6709\u4e00\u4e2a\u5047\u8bbe\uff1a\u6211\u4eec\u4e0d\u4f1a\u5728\u5176\u5b83\u5468\u671f\u4e0a\uff0c\u5224\u65ad\u672a\u6765\u7684\u67d0\u4e2aframe\u662f\u5426\u5df2\u7ecf\u6536\u76d8\u3002 Args: frame : bar\u6240\u5904\u7684\u65f6\u95f4\uff0c\u5fc5\u987b\u5c0f\u4e8e\u5f53\u524d\u65f6\u95f4 ft: bar\u6240\u4ee3\u8868\u7684\u5e27\u7c7b\u578b Returns: bool: \u662f\u5426\u5df2\u7ecf\u6536\u76d8 \"\"\" floor = cls . floor ( frame , ft ) now = arrow . now () if ft == FrameType . DAY : return floor < now . date () or now . hour >= 15 else : return floor == frame","title":"is_bar_closed()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.is_closing_call_auction_time","text":"\u5224\u65ad tm \u6307\u5b9a\u7684\u65f6\u95f4\u662f\u5426\u4e3a\u6536\u76d8\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4 Fixme \u6b64\u5904\u5b9e\u73b0\u6709\u8bef\uff0c\u6536\u76d8\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4\u5e94\u8be5\u8fd8\u5305\u542b\u4e0a\u5348\u6536\u76d8\u65f6\u95f4 Parameters: Name Type Description Default tm [description]. Defaults to None. None Returns: Type Description bool bool Source code in omicron/models/timeframe.py @classmethod def is_closing_call_auction_time ( cls , tm : Union [ datetime . datetime , Arrow ] = None ) -> bool : \"\"\"\u5224\u65ad`tm`\u6307\u5b9a\u7684\u65f6\u95f4\u662f\u5426\u4e3a\u6536\u76d8\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4 Fixme: \u6b64\u5904\u5b9e\u73b0\u6709\u8bef\uff0c\u6536\u76d8\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4\u5e94\u8be5\u8fd8\u5305\u542b\u4e0a\u5348\u6536\u76d8\u65f6\u95f4 Args: tm : [description]. Defaults to None. Returns: bool \"\"\" tm = tm or cls . now () if not cls . is_trade_day ( tm ): return False minutes = tm . hour * 60 + tm . minute return 15 * 60 - 3 <= minutes < 15 * 60","title":"is_closing_call_auction_time()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.is_open_time","text":"\u5224\u65ad tm \u6307\u5b9a\u7684\u65f6\u95f4\u662f\u5426\u5904\u5728\u4ea4\u6613\u65f6\u95f4\u6bb5\u3002 \u4ea4\u6613\u65f6\u95f4\u6bb5\u662f\u6307\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4\u6bb5\u4e4b\u5916\u7684\u5f00\u76d8\u65f6\u95f4 Examples: >>> TimeFrame . day_frames = np . array ([ 20200102 , 20200103 , 20200106 , 20200107 , 20200108 ]) >>> TimeFrame . is_open_time ( arrow . get ( '2020-1-1 14:59' ) . naive ) False >>> TimeFrame . is_open_time ( arrow . get ( '2020-1-3 14:59' ) . naive ) True Parameters: Name Type Description Default tm [description]. Defaults to None. None Returns: Type Description bool bool Source code in omicron/models/timeframe.py @classmethod def is_open_time ( cls , tm : Union [ datetime . datetime , Arrow ] = None ) -> bool : \"\"\"\u5224\u65ad`tm`\u6307\u5b9a\u7684\u65f6\u95f4\u662f\u5426\u5904\u5728\u4ea4\u6613\u65f6\u95f4\u6bb5\u3002 \u4ea4\u6613\u65f6\u95f4\u6bb5\u662f\u6307\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4\u6bb5\u4e4b\u5916\u7684\u5f00\u76d8\u65f6\u95f4 Examples: >>> TimeFrame.day_frames = np.array([20200102, 20200103, 20200106, 20200107, 20200108]) >>> TimeFrame.is_open_time(arrow.get('2020-1-1 14:59').naive) False >>> TimeFrame.is_open_time(arrow.get('2020-1-3 14:59').naive) True Args: tm : [description]. Defaults to None. Returns: bool \"\"\" tm = tm or arrow . now () if not cls . is_trade_day ( tm ): return False tick = tm . hour * 60 + tm . minute return tick in cls . ticks [ FrameType . MIN1 ]","title":"is_open_time()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.is_opening_call_auction_time","text":"\u5224\u65ad tm \u6307\u5b9a\u7684\u65f6\u95f4\u662f\u5426\u4e3a\u5f00\u76d8\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4 Parameters: Name Type Description Default tm [description]. Defaults to None. None Returns: Type Description bool bool Source code in omicron/models/timeframe.py @classmethod def is_opening_call_auction_time ( cls , tm : Union [ Arrow , datetime . datetime ] = None ) -> bool : \"\"\"\u5224\u65ad`tm`\u6307\u5b9a\u7684\u65f6\u95f4\u662f\u5426\u4e3a\u5f00\u76d8\u96c6\u5408\u7ade\u4ef7\u65f6\u95f4 Args: tm : [description]. Defaults to None. Returns: bool \"\"\" if tm is None : tm = cls . now () if not cls . is_trade_day ( tm ): return False minutes = tm . hour * 60 + tm . minute return 9 * 60 + 15 < minutes <= 9 * 60 + 25","title":"is_opening_call_auction_time()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.is_trade_day","text":"\u5224\u65ad dt \u662f\u5426\u4e3a\u4ea4\u6613\u65e5 Examples: >>> TimeFrame . is_trade_day ( arrow . get ( '2020-1-1' )) False Parameters: Name Type Description Default dt required Returns: Type Description bool bool Source code in omicron/models/timeframe.py @classmethod def is_trade_day ( cls , dt : Union [ datetime . date , datetime . datetime , Arrow ]) -> bool : \"\"\"\u5224\u65ad`dt`\u662f\u5426\u4e3a\u4ea4\u6613\u65e5 Examples: >>> TimeFrame.is_trade_day(arrow.get('2020-1-1')) False Args: dt : Returns: bool \"\"\" return cls . date2int ( dt ) in cls . day_frames","title":"is_trade_day()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.last_min_frame","text":"\u83b7\u53d6 day \u65e5\u5468\u671f\u4e3a frame_type \u7684\u7ed3\u675fframe\u3002 Examples: >>> TimeFrame . last_min_frame ( arrow . get ( '2020-1-5' ) . date (), FrameType . MIN30 ) datetime . datetime ( 2020 , 1 , 3 , 15 , 0 ) Parameters: Name Type Description Default day Union[str, Arrow, datetime.date] required frame_type FrameType required Returns: Type Description Union[datetime.date, datetime.datetime] day \u65e5\u5468\u671f\u4e3a frame_type \u7684\u7ed3\u675fframe Source code in omicron/models/timeframe.py @classmethod def last_min_frame ( cls , day : Union [ str , Arrow , datetime . date ], frame_type : FrameType ) -> Union [ datetime . date , datetime . datetime ]: \"\"\"\u83b7\u53d6`day`\u65e5\u5468\u671f\u4e3a`frame_type`\u7684\u7ed3\u675fframe\u3002 Example: >>> TimeFrame.last_min_frame(arrow.get('2020-1-5').date(), FrameType.MIN30) datetime.datetime(2020, 1, 3, 15, 0) Args: day: frame_type: Returns: `day`\u65e5\u5468\u671f\u4e3a`frame_type`\u7684\u7ed3\u675fframe \"\"\" if isinstance ( day , str ): day = cls . date2int ( arrow . get ( day ) . date ()) elif isinstance ( day , arrow . Arrow ) or isinstance ( day , datetime . datetime ): day = cls . date2int ( day . date ()) elif isinstance ( day , datetime . date ): day = cls . date2int ( day ) else : raise TypeError ( f \" { type ( day ) } is not supported.\" ) if frame_type in cls . minute_level_frames : last_close_day = cls . day_frames [ cls . day_frames <= day ][ - 1 ] day = cls . int2date ( last_close_day ) return datetime . datetime ( day . year , day . month , day . day , hour = 15 , minute = 0 ) else : # pragma: no cover raise ValueError ( f \" { frame_type } not supported\" )","title":"last_min_frame()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.minute_frames_floor","text":"\u5bf9\u4e8e\u5206\u949f\u7ea7\u7684frame,\u8fd4\u56de\u5b83\u4eec\u4e0eframe\u523b\u5ea6\u5411\u4e0b\u5bf9\u9f50\u540e\u7684frame\u53ca\u65e5\u671f\u8fdb\u4f4d\u3002\u5982\u679c\u9700\u8981\u5bf9\u9f50\u5230\u4e0a\u4e00\u4e2a\u4ea4\u6613 \u65e5\uff0c\u5219\u8fdb\u4f4d\u4e3a-1\uff0c\u5426\u5219\u4e3a0. Examples: >>> ticks = [ 600 , 630 , 660 , 690 , 810 , 840 , 870 , 900 ] >>> TimeFrame . minute_frames_floor ( ticks , 545 ) ( 900 , - 1 ) >>> TimeFrame . minute_frames_floor ( ticks , 600 ) ( 600 , 0 ) >>> TimeFrame . minute_frames_floor ( ticks , 605 ) ( 600 , 0 ) >>> TimeFrame . minute_frames_floor ( ticks , 899 ) ( 870 , 0 ) >>> TimeFrame . minute_frames_floor ( ticks , 900 ) ( 900 , 0 ) >>> TimeFrame . minute_frames_floor ( ticks , 905 ) ( 900 , 0 ) Parameters: Name Type Description Default ticks np.array or list frames\u523b\u5ea6 required moment int \u6574\u6570\u8868\u793a\u7684\u5206\u949f\u6570\uff0c\u6bd4\u5982900\u8868\u793a15\uff1a00 required Returns: Type Description Tuple[int, int] tuple, the first is the new moment, the second is carry-on Source code in omicron/models/timeframe.py @classmethod def minute_frames_floor ( cls , ticks , moment ) -> Tuple [ int , int ]: \"\"\" \u5bf9\u4e8e\u5206\u949f\u7ea7\u7684frame,\u8fd4\u56de\u5b83\u4eec\u4e0eframe\u523b\u5ea6\u5411\u4e0b\u5bf9\u9f50\u540e\u7684frame\u53ca\u65e5\u671f\u8fdb\u4f4d\u3002\u5982\u679c\u9700\u8981\u5bf9\u9f50\u5230\u4e0a\u4e00\u4e2a\u4ea4\u6613 \u65e5\uff0c\u5219\u8fdb\u4f4d\u4e3a-1\uff0c\u5426\u5219\u4e3a0. Examples: >>> ticks = [600, 630, 660, 690, 810, 840, 870, 900] >>> TimeFrame.minute_frames_floor(ticks, 545) (900, -1) >>> TimeFrame.minute_frames_floor(ticks, 600) (600, 0) >>> TimeFrame.minute_frames_floor(ticks, 605) (600, 0) >>> TimeFrame.minute_frames_floor(ticks, 899) (870, 0) >>> TimeFrame.minute_frames_floor(ticks, 900) (900, 0) >>> TimeFrame.minute_frames_floor(ticks, 905) (900, 0) Args: ticks (np.array or list): frames\u523b\u5ea6 moment (int): \u6574\u6570\u8868\u793a\u7684\u5206\u949f\u6570\uff0c\u6bd4\u5982900\u8868\u793a15\uff1a00 Returns: tuple, the first is the new moment, the second is carry-on \"\"\" if moment < ticks [ 0 ]: return ticks [ - 1 ], - 1 # \u2019right' \u76f8\u5f53\u4e8e ticks <= m index = np . searchsorted ( ticks , moment , side = \"right\" ) return ticks [ index - 1 ], 0","title":"minute_frames_floor()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.month_shift","text":"\u6c42 start \u6240\u5728\u7684\u6708\u79fb\u4f4d\u540e\u7684frame \u672c\u51fd\u6570\u9996\u5148\u5c06 start \u5bf9\u9f50\uff0c\u7136\u540e\u8fdb\u884c\u79fb\u4f4d\u3002 Examples: >>> TimeFrame . month_frames = np . array ([ 20150130 , 20150227 , 20150331 , 20150430 ]) >>> TimeFrame . month_shift ( arrow . get ( '2015-2-26' ) . date (), 0 ) datetime . date ( 2015 , 1 , 30 ) >>> TimeFrame . month_shift ( arrow . get ( '2015-2-27' ) . date (), 0 ) datetime . date ( 2015 , 2 , 27 ) >>> TimeFrame . month_shift ( arrow . get ( '2015-3-1' ) . date (), 0 ) datetime . date ( 2015 , 2 , 27 ) >>> TimeFrame . month_shift ( arrow . get ( '2015-3-1' ) . date (), 1 ) datetime . date ( 2015 , 3 , 31 ) Returns: Type Description datetime.date \u79fb\u4f4d\u540e\u7684\u65e5\u671f Source code in omicron/models/timeframe.py @classmethod def month_shift ( cls , start : datetime . date , offset : int ) -> datetime . date : \"\"\"\u6c42`start`\u6240\u5728\u7684\u6708\u79fb\u4f4d\u540e\u7684frame \u672c\u51fd\u6570\u9996\u5148\u5c06`start`\u5bf9\u9f50\uff0c\u7136\u540e\u8fdb\u884c\u79fb\u4f4d\u3002 Examples: >>> TimeFrame.month_frames = np.array([20150130, 20150227, 20150331, 20150430]) >>> TimeFrame.month_shift(arrow.get('2015-2-26').date(), 0) datetime.date(2015, 1, 30) >>> TimeFrame.month_shift(arrow.get('2015-2-27').date(), 0) datetime.date(2015, 2, 27) >>> TimeFrame.month_shift(arrow.get('2015-3-1').date(), 0) datetime.date(2015, 2, 27) >>> TimeFrame.month_shift(arrow.get('2015-3-1').date(), 1) datetime.date(2015, 3, 31) Returns: \u79fb\u4f4d\u540e\u7684\u65e5\u671f \"\"\" start = cls . date2int ( start ) return cls . int2date ( ext . shift ( cls . month_frames , start , offset ))","title":"month_shift()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.replace_date","text":"\u5c06 dtm \u53d8\u91cf\u7684\u65e5\u671f\u66f4\u6362\u4e3a dt \u6307\u5b9a\u7684\u65e5\u671f Examples: >>> TimeFrame . replace_date ( arrow . get ( '2020-1-1 13:49' ) . datetime , datetime . date ( 2019 , 1 , 1 )) datetime . datetime ( 2019 , 1 , 1 , 13 , 49 ) Parameters: Name Type Description Default dtm datetime.datetime [description] required dt datetime.date [description] required Returns: Type Description datetime.datetime \u53d8\u6362\u540e\u7684\u65f6\u95f4 Source code in omicron/models/timeframe.py @classmethod def replace_date ( cls , dtm : datetime . datetime , dt : datetime . date ) -> datetime . datetime : \"\"\"\u5c06`dtm`\u53d8\u91cf\u7684\u65e5\u671f\u66f4\u6362\u4e3a`dt`\u6307\u5b9a\u7684\u65e5\u671f Example: >>> TimeFrame.replace_date(arrow.get('2020-1-1 13:49').datetime, datetime.date(2019, 1,1)) datetime.datetime(2019, 1, 1, 13, 49) Args: dtm (datetime.datetime): [description] dt (datetime.date): [description] Returns: \u53d8\u6362\u540e\u7684\u65f6\u95f4 \"\"\" return datetime . datetime ( dt . year , dt . month , dt . day , dtm . hour , dtm . minute , dtm . second , dtm . microsecond )","title":"replace_date()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.resample_frames","text":"\u5c06\u4ece\u884c\u60c5\u670d\u52a1\u5668\u83b7\u53d6\u7684\u4ea4\u6613\u65e5\u5386\u91cd\u91c7\u6837\uff0c\u751f\u6210\u5468\u5e27\u548c\u6708\u7ebf\u5e27 Parameters: Name Type Description Default trade_days Iterable [description] required frame_type FrameType [description] required Returns: Type Description List[int] \u91cd\u91c7\u6837\u540e\u7684\u65e5\u671f\u5217\u8868\uff0c\u65e5\u671f\u7528\u6574\u6570\u8868\u793a Source code in omicron/models/timeframe.py @classmethod def resample_frames ( cls , trade_days : Iterable [ datetime . date ], frame_type : FrameType ) -> List [ int ]: \"\"\"\u5c06\u4ece\u884c\u60c5\u670d\u52a1\u5668\u83b7\u53d6\u7684\u4ea4\u6613\u65e5\u5386\u91cd\u91c7\u6837\uff0c\u751f\u6210\u5468\u5e27\u548c\u6708\u7ebf\u5e27 Args: trade_days (Iterable): [description] frame_type (FrameType): [description] Returns: List[int]: \u91cd\u91c7\u6837\u540e\u7684\u65e5\u671f\u5217\u8868\uff0c\u65e5\u671f\u7528\u6574\u6570\u8868\u793a \"\"\" if frame_type == FrameType . WEEK : weeks = [] last = trade_days [ 0 ] for cur in trade_days : if cur . weekday () < last . weekday () or ( cur - last ) . days >= 7 : weeks . append ( last ) last = cur if weeks [ - 1 ] < last : weeks . append ( last ) return weeks elif frame_type == FrameType . MONTH : months = [] last = trade_days [ 0 ] for cur in trade_days : if cur . day < last . day : months . append ( last ) last = cur months . append ( last ) return months elif frame_type == FrameType . QUARTER : quarters = [] last = trade_days [ 0 ] for cur in trade_days : if last . month % 3 == 0 : if cur . month > last . month or cur . year > last . year : quarters . append ( last ) last = cur quarters . append ( last ) return quarters elif frame_type == FrameType . YEAR : years = [] last = trade_days [ 0 ] for cur in trade_days : if cur . year > last . year : years . append ( last ) last = cur years . append ( last ) return years else : # pragma: no cover raise ValueError ( f \"Unsupported FrameType: { frame_type } \" )","title":"resample_frames()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.service_degrade","text":"\u5f53cache\u4e2d\u4e0d\u5b58\u5728\u65e5\u5386\u65f6\uff0c\u542f\u7528\u968fomicron\u7248\u672c\u4e00\u8d77\u53d1\u884c\u65f6\u81ea\u5e26\u7684\u65e5\u5386\u3002 \u6ce8\u610f\uff1a\u968fomicron\u7248\u672c\u4e00\u8d77\u53d1\u884c\u65f6\u81ea\u5e26\u7684\u65e5\u5386\u5f88\u53ef\u80fd\u4e0d\u662f\u6700\u65b0\u7684\uff0c\u5e76\u4e14\u53ef\u80fd\u5305\u542b\u9519\u8bef\u3002\u6bd4\u5982\uff0c\u5b58\u5728\u8fd9\u6837\u7684\u60c5\u51b5\uff0c\u5728\u672c\u7248\u672c\u7684omicron\u53d1\u884c\u65f6\uff0c\u65e5\u5386\u66f4\u65b0\u5230\u4e862021\u5e7412\u670831\u65e5\uff0c\u5728\u8fd9\u4e4b\u524d\u7684\u65e5\u5386\u90fd\u662f\u51c6\u786e\u7684\uff0c\u4f46\u5728\u6b64\u4e4b\u540e\u7684\u65e5\u5386\uff0c\u5219\u6709\u53ef\u80fd\u51fa\u73b0\u9519\u8bef\u3002\u56e0\u6b64\uff0c\u53ea\u5e94\u8be5\u5728\u7279\u6b8a\u7684\u60c5\u51b5\u4e0b\uff08\u6bd4\u5982\u6d4b\u8bd5\uff09\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u4ee5\u83b7\u5f97\u4e00\u4e2a\u964d\u7ea7\u7684\u670d\u52a1\u3002 Source code in omicron/models/timeframe.py @classmethod def service_degrade ( cls ): \"\"\"\u5f53cache\u4e2d\u4e0d\u5b58\u5728\u65e5\u5386\u65f6\uff0c\u542f\u7528\u968fomicron\u7248\u672c\u4e00\u8d77\u53d1\u884c\u65f6\u81ea\u5e26\u7684\u65e5\u5386\u3002 \u6ce8\u610f\uff1a\u968fomicron\u7248\u672c\u4e00\u8d77\u53d1\u884c\u65f6\u81ea\u5e26\u7684\u65e5\u5386\u5f88\u53ef\u80fd\u4e0d\u662f\u6700\u65b0\u7684\uff0c\u5e76\u4e14\u53ef\u80fd\u5305\u542b\u9519\u8bef\u3002\u6bd4\u5982\uff0c\u5b58\u5728\u8fd9\u6837\u7684\u60c5\u51b5\uff0c\u5728\u672c\u7248\u672c\u7684omicron\u53d1\u884c\u65f6\uff0c\u65e5\u5386\u66f4\u65b0\u5230\u4e862021\u5e7412\u670831\u65e5\uff0c\u5728\u8fd9\u4e4b\u524d\u7684\u65e5\u5386\u90fd\u662f\u51c6\u786e\u7684\uff0c\u4f46\u5728\u6b64\u4e4b\u540e\u7684\u65e5\u5386\uff0c\u5219\u6709\u53ef\u80fd\u51fa\u73b0\u9519\u8bef\u3002\u56e0\u6b64\uff0c\u53ea\u5e94\u8be5\u5728\u7279\u6b8a\u7684\u60c5\u51b5\u4e0b\uff08\u6bd4\u5982\u6d4b\u8bd5\uff09\u8c03\u7528\u6b64\u65b9\u6cd5\uff0c\u4ee5\u83b7\u5f97\u4e00\u4e2a\u964d\u7ea7\u7684\u670d\u52a1\u3002 \"\"\" _dir = os . path . dirname ( __file__ ) file = os . path . join ( _dir , \"..\" , \"config\" , \"calendar.json\" ) with open ( file , \"r\" ) as f : data = json . load ( f ) for k , v in data . items (): setattr ( cls , k , np . array ( v ))","title":"service_degrade()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.shift","text":"\u5c06\u6307\u5b9a\u7684moment\u79fb\u52a8N\u4e2a frame_type \u4f4d\u7f6e\u3002 \u5f53N\u4e3a\u8d1f\u6570\u65f6\uff0c\u610f\u5473\u7740\u5411\u524d\u79fb\u52a8\uff1b\u5f53N\u4e3a\u6b63\u6570\u65f6\uff0c\u610f\u5473\u7740\u5411\u540e\u79fb\u52a8\u3002\u5982\u679cn\u4e3a\u96f6\uff0c\u610f\u5473\u7740\u79fb\u52a8\u5230\u6700\u63a5\u8fd1 \u7684\u4e00\u4e2a\u5df2\u7ed3\u675f\u7684frame\u3002 \u5982\u679cmoment\u6ca1\u6709\u5bf9\u9f50\u5230frame_type\u5bf9\u5e94\u7684\u65f6\u95f4\uff0c\u5c06\u9996\u5148\u8fdb\u884c\u5bf9\u9f50\u3002 See also: day_shift week_shift month_shift Examples: >>> TimeFrame . shift ( datetime . date ( 2020 , 1 , 3 ), 1 , FrameType . DAY ) datetime . date ( 2020 , 1 , 6 ) >>> TimeFrame . shift ( datetime . datetime ( 2020 , 1 , 6 , 11 ), 1 , FrameType . MIN30 ) datetime . datetime ( 2020 , 1 , 6 , 11 , 30 ) Parameters: Name Type Description Default moment Union[Arrow, datetime.date, datetime.datetime] required n int required frame_type FrameType required Returns: Type Description Union[datetime.date, datetime.datetime] \u79fb\u4f4d\u540e\u7684Frame Source code in omicron/models/timeframe.py @classmethod def shift ( cls , moment : Union [ Arrow , datetime . date , datetime . datetime ], n : int , frame_type : FrameType , ) -> Union [ datetime . date , datetime . datetime ]: \"\"\"\u5c06\u6307\u5b9a\u7684moment\u79fb\u52a8N\u4e2a`frame_type`\u4f4d\u7f6e\u3002 \u5f53N\u4e3a\u8d1f\u6570\u65f6\uff0c\u610f\u5473\u7740\u5411\u524d\u79fb\u52a8\uff1b\u5f53N\u4e3a\u6b63\u6570\u65f6\uff0c\u610f\u5473\u7740\u5411\u540e\u79fb\u52a8\u3002\u5982\u679cn\u4e3a\u96f6\uff0c\u610f\u5473\u7740\u79fb\u52a8\u5230\u6700\u63a5\u8fd1 \u7684\u4e00\u4e2a\u5df2\u7ed3\u675f\u7684frame\u3002 \u5982\u679cmoment\u6ca1\u6709\u5bf9\u9f50\u5230frame_type\u5bf9\u5e94\u7684\u65f6\u95f4\uff0c\u5c06\u9996\u5148\u8fdb\u884c\u5bf9\u9f50\u3002 See also: - [day_shift][omicron.models.timeframe.TimeFrame.day_shift] - [week_shift][omicron.models.timeframe.TimeFrame.week_shift] - [month_shift][omicron.models.timeframe.TimeFrame.month_shift] Examples: >>> TimeFrame.shift(datetime.date(2020, 1, 3), 1, FrameType.DAY) datetime.date(2020, 1, 6) >>> TimeFrame.shift(datetime.datetime(2020, 1, 6, 11), 1, FrameType.MIN30) datetime.datetime(2020, 1, 6, 11, 30) Args: moment: n: frame_type: Returns: \u79fb\u4f4d\u540e\u7684Frame \"\"\" if frame_type == FrameType . DAY : return cls . day_shift ( moment , n ) elif frame_type == FrameType . WEEK : return cls . week_shift ( moment , n ) elif frame_type == FrameType . MONTH : return cls . month_shift ( moment , n ) elif frame_type in [ FrameType . MIN1 , FrameType . MIN5 , FrameType . MIN15 , FrameType . MIN30 , FrameType . MIN60 , ]: tm = moment . hour * 60 + moment . minute new_tick_pos = cls . ticks [ frame_type ] . index ( tm ) + n days = new_tick_pos // len ( cls . ticks [ frame_type ]) min_part = new_tick_pos % len ( cls . ticks [ frame_type ]) date_part = cls . day_shift ( moment . date (), days ) minutes = cls . ticks [ frame_type ][ min_part ] h , m = minutes // 60 , minutes % 60 return datetime . datetime ( date_part . year , date_part . month , date_part . day , h , m , tzinfo = moment . tzinfo , ) else : # pragma: no cover raise ValueError ( f \" { frame_type } is not supported.\" )","title":"shift()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.time2int","text":"\u5c06\u65f6\u95f4\u7c7b\u578b\u8f6c\u6362\u4e3a\u6574\u6570\u7c7b\u578b tm\u53ef\u4ee5\u662fArrow\u7c7b\u578b\uff0c\u4e5f\u53ef\u4ee5\u662fdatetime.datetime\u6216\u8005\u4efb\u4f55\u5176\u5b83\u7c7b\u578b\uff0c\u53ea\u8981\u5b83\u6709year,month...\u7b49 \u5c5e\u6027 Examples: >>> TimeFrame . time2int ( datetime . datetime ( 2020 , 5 , 1 , 15 )) 202005011500 Parameters: Name Type Description Default tm Union[datetime.datetime, Arrow] required Returns: Type Description int \u8f6c\u6362\u540e\u7684\u6574\u6570\uff0c\u6bd4\u59822020050115 Source code in omicron/models/timeframe.py @classmethod def time2int ( cls , tm : Union [ datetime . datetime , Arrow ]) -> int : \"\"\"\u5c06\u65f6\u95f4\u7c7b\u578b\u8f6c\u6362\u4e3a\u6574\u6570\u7c7b\u578b tm\u53ef\u4ee5\u662fArrow\u7c7b\u578b\uff0c\u4e5f\u53ef\u4ee5\u662fdatetime.datetime\u6216\u8005\u4efb\u4f55\u5176\u5b83\u7c7b\u578b\uff0c\u53ea\u8981\u5b83\u6709year,month...\u7b49 \u5c5e\u6027 Examples: >>> TimeFrame.time2int(datetime.datetime(2020, 5, 1, 15)) 202005011500 Args: tm: Returns: \u8f6c\u6362\u540e\u7684\u6574\u6570\uff0c\u6bd4\u59822020050115 \"\"\" return int ( f \" { tm . year : 04 }{ tm . month : 02 }{ tm . day : 02 }{ tm . hour : 02 }{ tm . minute : 02 } \" )","title":"time2int()"},{"location":"api/timeframe/#omicron.models.timeframe.TimeFrame.week_shift","text":"\u5bf9\u6307\u5b9a\u65e5\u671f\u6309\u5468\u7ebf\u5e27\u8fdb\u884c\u524d\u540e\u79fb\u4f4d\u64cd\u4f5c \u53c2\u8003 omicron.models.timeframe.TimeFrame.day_shift Examples: >>> TimeFrame . week_frames = np . array ([ 20200103 , 20200110 , 20200117 , 20200123 , 20200207 , 20200214 ]) >>> moment = arrow . get ( '2020-1-21' ) . date () >>> TimeFrame . week_shift ( moment , 1 ) datetime . date ( 2020 , 1 , 23 ) >>> TimeFrame . week_shift ( moment , 0 ) datetime . date ( 2020 , 1 , 17 ) >>> TimeFrame . week_shift ( moment , - 1 ) datetime . date ( 2020 , 1 , 10 ) Returns: Type Description datetime.date \u79fb\u4f4d\u540e\u7684\u65e5\u671f Source code in omicron/models/timeframe.py @classmethod def week_shift ( cls , start : datetime . date , offset : int ) -> datetime . date : \"\"\"\u5bf9\u6307\u5b9a\u65e5\u671f\u6309\u5468\u7ebf\u5e27\u8fdb\u884c\u524d\u540e\u79fb\u4f4d\u64cd\u4f5c \u53c2\u8003 [omicron.models.timeframe.TimeFrame.day_shift][] Examples: >>> TimeFrame.week_frames = np.array([20200103, 20200110, 20200117, 20200123,20200207, 20200214]) >>> moment = arrow.get('2020-1-21').date() >>> TimeFrame.week_shift(moment, 1) datetime.date(2020, 1, 23) >>> TimeFrame.week_shift(moment, 0) datetime.date(2020, 1, 17) >>> TimeFrame.week_shift(moment, -1) datetime.date(2020, 1, 10) Returns: \u79fb\u4f4d\u540e\u7684\u65e5\u671f \"\"\" start = cls . date2int ( start ) return cls . int2date ( ext . shift ( cls . week_frames , start , offset )) rendering: heading_level: 1","title":"week_shift()"},{"location":"api/triggers/","text":"\u5728apscheduler.triggers\u7684\u57fa\u7840\u4e0a\u63d0\u4f9b\u4e86FrameTrigger\u548cIntervalTrigger\uff0c\u4f7f\u5f97\u5b83\u4eec\u53ea\u5728\u4ea4\u6613\u65e5\uff08\u6216\u8005 \u57fa\u4e8e\u4ea4\u6613\u65e5+\u5ef6\u65f6\uff09\u65f6\u6fc0\u53d1\u3002 FrameTrigger ( BaseTrigger ) \u00b6 A cron like trigger fires on each valid Frame Source code in omicron/core/triggers.py class FrameTrigger ( BaseTrigger ): \"\"\" A cron like trigger fires on each valid Frame \"\"\" def __init__ ( self , frame_type : Union [ str , FrameType ], jitter : str = None ): \"\"\"\u6784\u9020\u51fd\u6570 jitter\u7684\u683c\u5f0f\u7528\u6b63\u5219\u5f0f\u8868\u8fbe\u4e3a`r\"([-]?)(\\\\d+)([mshd])\"`\uff0c\u5176\u4e2d\u7b2c\u4e00\u7ec4\u4e3a\u7b26\u53f7\uff0c'-'\u8868\u793a\u63d0\u524d\uff1b \u7b2c\u4e8c\u7ec4\u4e3a\u6570\u5b57\uff0c\u7b2c\u4e09\u7ec4\u4e3a\u5355\u4f4d\uff0c\u53ef\u4ee5\u4e3a`m`(\u5206\u949f), `s`(\u79d2), `h`\uff08\u5c0f\u65f6\uff09,`d`(\u5929)\u3002 \u4e0b\u9762\u7684\u793a\u4f8b\u6784\u9020\u4e86\u4e00\u4e2a\u53ea\u5728\u4ea4\u6613\u65e5\uff0c\u6bcf30\u5206\u949f\u89e6\u53d1\u4e00\u6b21\uff0c\u6bcf\u6b21\u63d0\u524d15\u79d2\u89e6\u7684trigger\u3002\u5373\u5b83\u7684\u89e6\u53d1\u65f6 \u95f4\u662f\u6bcf\u4e2a\u4ea4\u6613\u65e5\u768409:29:45, 09:59:45, ... Examples: >>> FrameTrigger(FrameType.MIN30, '-15s') # doctest: +ELLIPSIS Args: frame_type: jitter: \u5355\u4f4d\u79d2\u3002\u5176\u4e2doffset\u5fc5\u987b\u5728\u4e00\u4e2aFrameType\u7684\u957f\u5ea6\u4ee5\u5185 \"\"\" self . frame_type = FrameType ( frame_type ) if jitter is None : _jitter = 0 else : matched = re . match ( r \"([-]?)(\\d+)([mshd])\" , jitter ) if matched is None : # pragma: no cover raise ValueError ( \"malformed. jitter should be [-](number)(unit), \" \"for example, -30m, or 30s\" ) sign , num , unit = matched . groups () num = int ( num ) if unit . lower () == \"m\" : _jitter = 60 * num elif unit . lower () == \"s\" : _jitter = num elif unit . lower () == \"h\" : _jitter = 3600 * num elif unit . lower () == \"d\" : _jitter = 3600 * 24 * num else : # pragma: no cover raise ValueError ( \"bad time unit. only s,h,m,d is acceptable\" ) if sign == \"-\" : _jitter = - _jitter self . jitter = datetime . timedelta ( seconds = _jitter ) if ( frame_type == FrameType . MIN1 and abs ( _jitter ) >= 60 or frame_type == FrameType . MIN5 and abs ( _jitter ) >= 300 or frame_type == FrameType . MIN15 and abs ( _jitter ) >= 900 or frame_type == FrameType . MIN30 and abs ( _jitter ) >= 1800 or frame_type == FrameType . MIN60 and abs ( _jitter ) >= 3600 or frame_type == FrameType . DAY and abs ( _jitter ) >= 24 * 3600 # it's still not allowed if offset > week, month, etc. Would anybody # really specify an offset longer than that? ): raise ValueError ( \"offset must be less than frame length\" ) def __str__ ( self ): return f \" { self . __class__ . __name__ } : { self . frame_type . value } : { self . jitter } \" def get_next_fire_time ( self , previous_fire_time : Union [ datetime . date , datetime . datetime ], now : Union [ datetime . date , datetime . datetime ], ): \"\"\"\"\"\" ft = self . frame_type # `now` is timezone aware, while ceiling isn't now = now . replace ( tzinfo = None ) next_tick = now next_frame = TimeFrame . ceiling ( now , ft ) while next_tick <= now : if ft in TimeFrame . day_level_frames : next_tick = TimeFrame . combine_time ( next_frame , 15 ) + self . jitter else : next_tick = next_frame + self . jitter if next_tick > now : tz = tzlocal . get_localzone () return next_tick . astimezone ( tz ) else : next_frame = TimeFrame . shift ( next_frame , 1 , ft ) __init__ ( self , frame_type , jitter = None ) special \u00b6 \u6784\u9020\u51fd\u6570 jitter\u7684\u683c\u5f0f\u7528\u6b63\u5219\u5f0f\u8868\u8fbe\u4e3a r\"([-]?)(\\d+)([mshd])\" \uff0c\u5176\u4e2d\u7b2c\u4e00\u7ec4\u4e3a\u7b26\u53f7\uff0c'-'\u8868\u793a\u63d0\u524d\uff1b \u7b2c\u4e8c\u7ec4\u4e3a\u6570\u5b57\uff0c\u7b2c\u4e09\u7ec4\u4e3a\u5355\u4f4d\uff0c\u53ef\u4ee5\u4e3a m (\u5206\u949f), s (\u79d2), h \uff08\u5c0f\u65f6\uff09, d (\u5929)\u3002 \u4e0b\u9762\u7684\u793a\u4f8b\u6784\u9020\u4e86\u4e00\u4e2a\u53ea\u5728\u4ea4\u6613\u65e5\uff0c\u6bcf30\u5206\u949f\u89e6\u53d1\u4e00\u6b21\uff0c\u6bcf\u6b21\u63d0\u524d15\u79d2\u89e6\u7684trigger\u3002\u5373\u5b83\u7684\u89e6\u53d1\u65f6 \u95f4\u662f\u6bcf\u4e2a\u4ea4\u6613\u65e5\u768409:29:45, 09:59:45, ... Examples: >>> FrameTrigger ( FrameType . MIN30 , '-15s' ) < omicron . core . triggers . FrameTrigger object at 0 x ...> Parameters: Name Type Description Default frame_type Union[str, coretypes.types.FrameType] required jitter str \u5355\u4f4d\u79d2\u3002\u5176\u4e2doffset\u5fc5\u987b\u5728\u4e00\u4e2aFrameType\u7684\u957f\u5ea6\u4ee5\u5185 None Source code in omicron/core/triggers.py def __init__ ( self , frame_type : Union [ str , FrameType ], jitter : str = None ): \"\"\"\u6784\u9020\u51fd\u6570 jitter\u7684\u683c\u5f0f\u7528\u6b63\u5219\u5f0f\u8868\u8fbe\u4e3a`r\"([-]?)(\\\\d+)([mshd])\"`\uff0c\u5176\u4e2d\u7b2c\u4e00\u7ec4\u4e3a\u7b26\u53f7\uff0c'-'\u8868\u793a\u63d0\u524d\uff1b \u7b2c\u4e8c\u7ec4\u4e3a\u6570\u5b57\uff0c\u7b2c\u4e09\u7ec4\u4e3a\u5355\u4f4d\uff0c\u53ef\u4ee5\u4e3a`m`(\u5206\u949f), `s`(\u79d2), `h`\uff08\u5c0f\u65f6\uff09,`d`(\u5929)\u3002 \u4e0b\u9762\u7684\u793a\u4f8b\u6784\u9020\u4e86\u4e00\u4e2a\u53ea\u5728\u4ea4\u6613\u65e5\uff0c\u6bcf30\u5206\u949f\u89e6\u53d1\u4e00\u6b21\uff0c\u6bcf\u6b21\u63d0\u524d15\u79d2\u89e6\u7684trigger\u3002\u5373\u5b83\u7684\u89e6\u53d1\u65f6 \u95f4\u662f\u6bcf\u4e2a\u4ea4\u6613\u65e5\u768409:29:45, 09:59:45, ... Examples: >>> FrameTrigger(FrameType.MIN30, '-15s') # doctest: +ELLIPSIS Args: frame_type: jitter: \u5355\u4f4d\u79d2\u3002\u5176\u4e2doffset\u5fc5\u987b\u5728\u4e00\u4e2aFrameType\u7684\u957f\u5ea6\u4ee5\u5185 \"\"\" self . frame_type = FrameType ( frame_type ) if jitter is None : _jitter = 0 else : matched = re . match ( r \"([-]?)(\\d+)([mshd])\" , jitter ) if matched is None : # pragma: no cover raise ValueError ( \"malformed. jitter should be [-](number)(unit), \" \"for example, -30m, or 30s\" ) sign , num , unit = matched . groups () num = int ( num ) if unit . lower () == \"m\" : _jitter = 60 * num elif unit . lower () == \"s\" : _jitter = num elif unit . lower () == \"h\" : _jitter = 3600 * num elif unit . lower () == \"d\" : _jitter = 3600 * 24 * num else : # pragma: no cover raise ValueError ( \"bad time unit. only s,h,m,d is acceptable\" ) if sign == \"-\" : _jitter = - _jitter self . jitter = datetime . timedelta ( seconds = _jitter ) if ( frame_type == FrameType . MIN1 and abs ( _jitter ) >= 60 or frame_type == FrameType . MIN5 and abs ( _jitter ) >= 300 or frame_type == FrameType . MIN15 and abs ( _jitter ) >= 900 or frame_type == FrameType . MIN30 and abs ( _jitter ) >= 1800 or frame_type == FrameType . MIN60 and abs ( _jitter ) >= 3600 or frame_type == FrameType . DAY and abs ( _jitter ) >= 24 * 3600 # it's still not allowed if offset > week, month, etc. Would anybody # really specify an offset longer than that? ): raise ValueError ( \"offset must be less than frame length\" ) TradeTimeIntervalTrigger ( BaseTrigger ) \u00b6 \u53ea\u5728\u4ea4\u6613\u65f6\u95f4\u89e6\u53d1\u7684\u56fa\u5b9a\u95f4\u9694\u7684trigger Source code in omicron/core/triggers.py class TradeTimeIntervalTrigger ( BaseTrigger ): \"\"\"\u53ea\u5728\u4ea4\u6613\u65f6\u95f4\u89e6\u53d1\u7684\u56fa\u5b9a\u95f4\u9694\u7684trigger\"\"\" def __init__ ( self , interval : str ): \"\"\"\u6784\u9020\u51fd\u6570 interval\u7684\u683c\u5f0f\u7528\u6b63\u5219\u8868\u8fbe\u5f0f\u8868\u793a\u4e3a `r\"(\\\\d+)([mshd])\"` \u3002\u5176\u4e2d\u7b2c\u4e00\u7ec4\u4e3a\u6570\u5b57\uff0c\u7b2c\u4e8c\u7ec4\u4e3a\u5355\u4f4d\u3002\u6709\u6548\u7684 `interval`\u5982 1 \uff0c\u8868\u793a\u6bcf1\u5c0f\u65f6\u89e6\u53d1\u4e00\u6b21\uff0c\u5219\u8be5\u89e6\u53d1\u5668\u5c06\u5728\u4ea4\u6613\u65e5\u768410:30, 11:30, 14:00\u548c 15\uff1a00\u5404\u89e6\u53d1\u4e00\u6b21 Args: interval : [description] Raises: ValueError: [description] \"\"\" matched = re . match ( r \"(\\d+)([mshd])\" , interval ) if matched is None : raise ValueError ( f \"malform interval { interval } \" ) interval , unit = matched . groups () interval = int ( interval ) unit = unit . lower () if unit == \"s\" : self . interval = datetime . timedelta ( seconds = interval ) elif unit == \"m\" : self . interval = datetime . timedelta ( minutes = interval ) elif unit == \"h\" : self . interval = datetime . timedelta ( hours = interval ) elif unit == \"d\" : self . interval = datetime . timedelta ( days = interval ) else : self . interval = datetime . timedelta ( seconds = interval ) def __str__ ( self ): return f \" { self . __class__ . __name__ } : { self . interval . seconds } \" def get_next_fire_time ( self , previous_fire_time : Optional [ datetime . datetime ], now : Optional [ datetime . datetime ], ): \"\"\"\"\"\" if previous_fire_time is not None : fire_time = previous_fire_time + self . interval else : fire_time = now if TimeFrame . date2int ( fire_time . date ()) not in TimeFrame . day_frames : ft = TimeFrame . day_shift ( now , 1 ) fire_time = datetime . datetime ( ft . year , ft . month , ft . day , 9 , 30 , tzinfo = fire_time . tzinfo ) return fire_time minutes = fire_time . hour * 60 + fire_time . minute if minutes < 570 : fire_time = fire_time . replace ( hour = 9 , minute = 30 , second = 0 , microsecond = 0 ) elif 690 < minutes < 780 : fire_time = fire_time . replace ( hour = 13 , minute = 0 , second = 0 , microsecond = 0 ) elif minutes > 900 : ft = TimeFrame . day_shift ( fire_time , 1 ) fire_time = datetime . datetime ( ft . year , ft . month , ft . day , 9 , 30 , tzinfo = fire_time . tzinfo ) return fire_time __init__ ( self , interval ) special \u00b6 \u6784\u9020\u51fd\u6570 interval\u7684\u683c\u5f0f\u7528\u6b63\u5219\u8868\u8fbe\u5f0f\u8868\u793a\u4e3a r\"(\\d+)([mshd])\" \u3002\u5176\u4e2d\u7b2c\u4e00\u7ec4\u4e3a\u6570\u5b57\uff0c\u7b2c\u4e8c\u7ec4\u4e3a\u5355\u4f4d\u3002\u6709\u6548\u7684 interval \u5982 1 \uff0c\u8868\u793a\u6bcf1\u5c0f\u65f6\u89e6\u53d1\u4e00\u6b21\uff0c\u5219\u8be5\u89e6\u53d1\u5668\u5c06\u5728\u4ea4\u6613\u65e5\u768410:30, 11:30, 14:00\u548c 15\uff1a00\u5404\u89e6\u53d1\u4e00\u6b21 Parameters: Name Type Description Default interval [description] required Exceptions: Type Description ValueError [description] Source code in omicron/core/triggers.py def __init__ ( self , interval : str ): \"\"\"\u6784\u9020\u51fd\u6570 interval\u7684\u683c\u5f0f\u7528\u6b63\u5219\u8868\u8fbe\u5f0f\u8868\u793a\u4e3a `r\"(\\\\d+)([mshd])\"` \u3002\u5176\u4e2d\u7b2c\u4e00\u7ec4\u4e3a\u6570\u5b57\uff0c\u7b2c\u4e8c\u7ec4\u4e3a\u5355\u4f4d\u3002\u6709\u6548\u7684 `interval`\u5982 1 \uff0c\u8868\u793a\u6bcf1\u5c0f\u65f6\u89e6\u53d1\u4e00\u6b21\uff0c\u5219\u8be5\u89e6\u53d1\u5668\u5c06\u5728\u4ea4\u6613\u65e5\u768410:30, 11:30, 14:00\u548c 15\uff1a00\u5404\u89e6\u53d1\u4e00\u6b21 Args: interval : [description] Raises: ValueError: [description] \"\"\" matched = re . match ( r \"(\\d+)([mshd])\" , interval ) if matched is None : raise ValueError ( f \"malform interval { interval } \" ) interval , unit = matched . groups () interval = int ( interval ) unit = unit . lower () if unit == \"s\" : self . interval = datetime . timedelta ( seconds = interval ) elif unit == \"m\" : self . interval = datetime . timedelta ( minutes = interval ) elif unit == \"h\" : self . interval = datetime . timedelta ( hours = interval ) elif unit == \"d\" : self . interval = datetime . timedelta ( days = interval ) else : self . interval = datetime . timedelta ( seconds = interval )","title":"Triggers"},{"location":"api/triggers/#omicron.core.triggers.FrameTrigger","text":"A cron like trigger fires on each valid Frame Source code in omicron/core/triggers.py class FrameTrigger ( BaseTrigger ): \"\"\" A cron like trigger fires on each valid Frame \"\"\" def __init__ ( self , frame_type : Union [ str , FrameType ], jitter : str = None ): \"\"\"\u6784\u9020\u51fd\u6570 jitter\u7684\u683c\u5f0f\u7528\u6b63\u5219\u5f0f\u8868\u8fbe\u4e3a`r\"([-]?)(\\\\d+)([mshd])\"`\uff0c\u5176\u4e2d\u7b2c\u4e00\u7ec4\u4e3a\u7b26\u53f7\uff0c'-'\u8868\u793a\u63d0\u524d\uff1b \u7b2c\u4e8c\u7ec4\u4e3a\u6570\u5b57\uff0c\u7b2c\u4e09\u7ec4\u4e3a\u5355\u4f4d\uff0c\u53ef\u4ee5\u4e3a`m`(\u5206\u949f), `s`(\u79d2), `h`\uff08\u5c0f\u65f6\uff09,`d`(\u5929)\u3002 \u4e0b\u9762\u7684\u793a\u4f8b\u6784\u9020\u4e86\u4e00\u4e2a\u53ea\u5728\u4ea4\u6613\u65e5\uff0c\u6bcf30\u5206\u949f\u89e6\u53d1\u4e00\u6b21\uff0c\u6bcf\u6b21\u63d0\u524d15\u79d2\u89e6\u7684trigger\u3002\u5373\u5b83\u7684\u89e6\u53d1\u65f6 \u95f4\u662f\u6bcf\u4e2a\u4ea4\u6613\u65e5\u768409:29:45, 09:59:45, ... Examples: >>> FrameTrigger(FrameType.MIN30, '-15s') # doctest: +ELLIPSIS Args: frame_type: jitter: \u5355\u4f4d\u79d2\u3002\u5176\u4e2doffset\u5fc5\u987b\u5728\u4e00\u4e2aFrameType\u7684\u957f\u5ea6\u4ee5\u5185 \"\"\" self . frame_type = FrameType ( frame_type ) if jitter is None : _jitter = 0 else : matched = re . match ( r \"([-]?)(\\d+)([mshd])\" , jitter ) if matched is None : # pragma: no cover raise ValueError ( \"malformed. jitter should be [-](number)(unit), \" \"for example, -30m, or 30s\" ) sign , num , unit = matched . groups () num = int ( num ) if unit . lower () == \"m\" : _jitter = 60 * num elif unit . lower () == \"s\" : _jitter = num elif unit . lower () == \"h\" : _jitter = 3600 * num elif unit . lower () == \"d\" : _jitter = 3600 * 24 * num else : # pragma: no cover raise ValueError ( \"bad time unit. only s,h,m,d is acceptable\" ) if sign == \"-\" : _jitter = - _jitter self . jitter = datetime . timedelta ( seconds = _jitter ) if ( frame_type == FrameType . MIN1 and abs ( _jitter ) >= 60 or frame_type == FrameType . MIN5 and abs ( _jitter ) >= 300 or frame_type == FrameType . MIN15 and abs ( _jitter ) >= 900 or frame_type == FrameType . MIN30 and abs ( _jitter ) >= 1800 or frame_type == FrameType . MIN60 and abs ( _jitter ) >= 3600 or frame_type == FrameType . DAY and abs ( _jitter ) >= 24 * 3600 # it's still not allowed if offset > week, month, etc. Would anybody # really specify an offset longer than that? ): raise ValueError ( \"offset must be less than frame length\" ) def __str__ ( self ): return f \" { self . __class__ . __name__ } : { self . frame_type . value } : { self . jitter } \" def get_next_fire_time ( self , previous_fire_time : Union [ datetime . date , datetime . datetime ], now : Union [ datetime . date , datetime . datetime ], ): \"\"\"\"\"\" ft = self . frame_type # `now` is timezone aware, while ceiling isn't now = now . replace ( tzinfo = None ) next_tick = now next_frame = TimeFrame . ceiling ( now , ft ) while next_tick <= now : if ft in TimeFrame . day_level_frames : next_tick = TimeFrame . combine_time ( next_frame , 15 ) + self . jitter else : next_tick = next_frame + self . jitter if next_tick > now : tz = tzlocal . get_localzone () return next_tick . astimezone ( tz ) else : next_frame = TimeFrame . shift ( next_frame , 1 , ft )","title":"FrameTrigger"},{"location":"api/triggers/#omicron.core.triggers.FrameTrigger.__init__","text":"\u6784\u9020\u51fd\u6570 jitter\u7684\u683c\u5f0f\u7528\u6b63\u5219\u5f0f\u8868\u8fbe\u4e3a r\"([-]?)(\\d+)([mshd])\" \uff0c\u5176\u4e2d\u7b2c\u4e00\u7ec4\u4e3a\u7b26\u53f7\uff0c'-'\u8868\u793a\u63d0\u524d\uff1b \u7b2c\u4e8c\u7ec4\u4e3a\u6570\u5b57\uff0c\u7b2c\u4e09\u7ec4\u4e3a\u5355\u4f4d\uff0c\u53ef\u4ee5\u4e3a m (\u5206\u949f), s (\u79d2), h \uff08\u5c0f\u65f6\uff09, d (\u5929)\u3002 \u4e0b\u9762\u7684\u793a\u4f8b\u6784\u9020\u4e86\u4e00\u4e2a\u53ea\u5728\u4ea4\u6613\u65e5\uff0c\u6bcf30\u5206\u949f\u89e6\u53d1\u4e00\u6b21\uff0c\u6bcf\u6b21\u63d0\u524d15\u79d2\u89e6\u7684trigger\u3002\u5373\u5b83\u7684\u89e6\u53d1\u65f6 \u95f4\u662f\u6bcf\u4e2a\u4ea4\u6613\u65e5\u768409:29:45, 09:59:45, ... Examples: >>> FrameTrigger ( FrameType . MIN30 , '-15s' ) < omicron . core . triggers . FrameTrigger object at 0 x ...> Parameters: Name Type Description Default frame_type Union[str, coretypes.types.FrameType] required jitter str \u5355\u4f4d\u79d2\u3002\u5176\u4e2doffset\u5fc5\u987b\u5728\u4e00\u4e2aFrameType\u7684\u957f\u5ea6\u4ee5\u5185 None Source code in omicron/core/triggers.py def __init__ ( self , frame_type : Union [ str , FrameType ], jitter : str = None ): \"\"\"\u6784\u9020\u51fd\u6570 jitter\u7684\u683c\u5f0f\u7528\u6b63\u5219\u5f0f\u8868\u8fbe\u4e3a`r\"([-]?)(\\\\d+)([mshd])\"`\uff0c\u5176\u4e2d\u7b2c\u4e00\u7ec4\u4e3a\u7b26\u53f7\uff0c'-'\u8868\u793a\u63d0\u524d\uff1b \u7b2c\u4e8c\u7ec4\u4e3a\u6570\u5b57\uff0c\u7b2c\u4e09\u7ec4\u4e3a\u5355\u4f4d\uff0c\u53ef\u4ee5\u4e3a`m`(\u5206\u949f), `s`(\u79d2), `h`\uff08\u5c0f\u65f6\uff09,`d`(\u5929)\u3002 \u4e0b\u9762\u7684\u793a\u4f8b\u6784\u9020\u4e86\u4e00\u4e2a\u53ea\u5728\u4ea4\u6613\u65e5\uff0c\u6bcf30\u5206\u949f\u89e6\u53d1\u4e00\u6b21\uff0c\u6bcf\u6b21\u63d0\u524d15\u79d2\u89e6\u7684trigger\u3002\u5373\u5b83\u7684\u89e6\u53d1\u65f6 \u95f4\u662f\u6bcf\u4e2a\u4ea4\u6613\u65e5\u768409:29:45, 09:59:45, ... Examples: >>> FrameTrigger(FrameType.MIN30, '-15s') # doctest: +ELLIPSIS Args: frame_type: jitter: \u5355\u4f4d\u79d2\u3002\u5176\u4e2doffset\u5fc5\u987b\u5728\u4e00\u4e2aFrameType\u7684\u957f\u5ea6\u4ee5\u5185 \"\"\" self . frame_type = FrameType ( frame_type ) if jitter is None : _jitter = 0 else : matched = re . match ( r \"([-]?)(\\d+)([mshd])\" , jitter ) if matched is None : # pragma: no cover raise ValueError ( \"malformed. jitter should be [-](number)(unit), \" \"for example, -30m, or 30s\" ) sign , num , unit = matched . groups () num = int ( num ) if unit . lower () == \"m\" : _jitter = 60 * num elif unit . lower () == \"s\" : _jitter = num elif unit . lower () == \"h\" : _jitter = 3600 * num elif unit . lower () == \"d\" : _jitter = 3600 * 24 * num else : # pragma: no cover raise ValueError ( \"bad time unit. only s,h,m,d is acceptable\" ) if sign == \"-\" : _jitter = - _jitter self . jitter = datetime . timedelta ( seconds = _jitter ) if ( frame_type == FrameType . MIN1 and abs ( _jitter ) >= 60 or frame_type == FrameType . MIN5 and abs ( _jitter ) >= 300 or frame_type == FrameType . MIN15 and abs ( _jitter ) >= 900 or frame_type == FrameType . MIN30 and abs ( _jitter ) >= 1800 or frame_type == FrameType . MIN60 and abs ( _jitter ) >= 3600 or frame_type == FrameType . DAY and abs ( _jitter ) >= 24 * 3600 # it's still not allowed if offset > week, month, etc. Would anybody # really specify an offset longer than that? ): raise ValueError ( \"offset must be less than frame length\" )","title":"__init__()"},{"location":"api/triggers/#omicron.core.triggers.TradeTimeIntervalTrigger","text":"\u53ea\u5728\u4ea4\u6613\u65f6\u95f4\u89e6\u53d1\u7684\u56fa\u5b9a\u95f4\u9694\u7684trigger Source code in omicron/core/triggers.py class TradeTimeIntervalTrigger ( BaseTrigger ): \"\"\"\u53ea\u5728\u4ea4\u6613\u65f6\u95f4\u89e6\u53d1\u7684\u56fa\u5b9a\u95f4\u9694\u7684trigger\"\"\" def __init__ ( self , interval : str ): \"\"\"\u6784\u9020\u51fd\u6570 interval\u7684\u683c\u5f0f\u7528\u6b63\u5219\u8868\u8fbe\u5f0f\u8868\u793a\u4e3a `r\"(\\\\d+)([mshd])\"` \u3002\u5176\u4e2d\u7b2c\u4e00\u7ec4\u4e3a\u6570\u5b57\uff0c\u7b2c\u4e8c\u7ec4\u4e3a\u5355\u4f4d\u3002\u6709\u6548\u7684 `interval`\u5982 1 \uff0c\u8868\u793a\u6bcf1\u5c0f\u65f6\u89e6\u53d1\u4e00\u6b21\uff0c\u5219\u8be5\u89e6\u53d1\u5668\u5c06\u5728\u4ea4\u6613\u65e5\u768410:30, 11:30, 14:00\u548c 15\uff1a00\u5404\u89e6\u53d1\u4e00\u6b21 Args: interval : [description] Raises: ValueError: [description] \"\"\" matched = re . match ( r \"(\\d+)([mshd])\" , interval ) if matched is None : raise ValueError ( f \"malform interval { interval } \" ) interval , unit = matched . groups () interval = int ( interval ) unit = unit . lower () if unit == \"s\" : self . interval = datetime . timedelta ( seconds = interval ) elif unit == \"m\" : self . interval = datetime . timedelta ( minutes = interval ) elif unit == \"h\" : self . interval = datetime . timedelta ( hours = interval ) elif unit == \"d\" : self . interval = datetime . timedelta ( days = interval ) else : self . interval = datetime . timedelta ( seconds = interval ) def __str__ ( self ): return f \" { self . __class__ . __name__ } : { self . interval . seconds } \" def get_next_fire_time ( self , previous_fire_time : Optional [ datetime . datetime ], now : Optional [ datetime . datetime ], ): \"\"\"\"\"\" if previous_fire_time is not None : fire_time = previous_fire_time + self . interval else : fire_time = now if TimeFrame . date2int ( fire_time . date ()) not in TimeFrame . day_frames : ft = TimeFrame . day_shift ( now , 1 ) fire_time = datetime . datetime ( ft . year , ft . month , ft . day , 9 , 30 , tzinfo = fire_time . tzinfo ) return fire_time minutes = fire_time . hour * 60 + fire_time . minute if minutes < 570 : fire_time = fire_time . replace ( hour = 9 , minute = 30 , second = 0 , microsecond = 0 ) elif 690 < minutes < 780 : fire_time = fire_time . replace ( hour = 13 , minute = 0 , second = 0 , microsecond = 0 ) elif minutes > 900 : ft = TimeFrame . day_shift ( fire_time , 1 ) fire_time = datetime . datetime ( ft . year , ft . month , ft . day , 9 , 30 , tzinfo = fire_time . tzinfo ) return fire_time","title":"TradeTimeIntervalTrigger"},{"location":"api/triggers/#omicron.core.triggers.TradeTimeIntervalTrigger.__init__","text":"\u6784\u9020\u51fd\u6570 interval\u7684\u683c\u5f0f\u7528\u6b63\u5219\u8868\u8fbe\u5f0f\u8868\u793a\u4e3a r\"(\\d+)([mshd])\" \u3002\u5176\u4e2d\u7b2c\u4e00\u7ec4\u4e3a\u6570\u5b57\uff0c\u7b2c\u4e8c\u7ec4\u4e3a\u5355\u4f4d\u3002\u6709\u6548\u7684 interval \u5982 1 \uff0c\u8868\u793a\u6bcf1\u5c0f\u65f6\u89e6\u53d1\u4e00\u6b21\uff0c\u5219\u8be5\u89e6\u53d1\u5668\u5c06\u5728\u4ea4\u6613\u65e5\u768410:30, 11:30, 14:00\u548c 15\uff1a00\u5404\u89e6\u53d1\u4e00\u6b21 Parameters: Name Type Description Default interval [description] required Exceptions: Type Description ValueError [description] Source code in omicron/core/triggers.py def __init__ ( self , interval : str ): \"\"\"\u6784\u9020\u51fd\u6570 interval\u7684\u683c\u5f0f\u7528\u6b63\u5219\u8868\u8fbe\u5f0f\u8868\u793a\u4e3a `r\"(\\\\d+)([mshd])\"` \u3002\u5176\u4e2d\u7b2c\u4e00\u7ec4\u4e3a\u6570\u5b57\uff0c\u7b2c\u4e8c\u7ec4\u4e3a\u5355\u4f4d\u3002\u6709\u6548\u7684 `interval`\u5982 1 \uff0c\u8868\u793a\u6bcf1\u5c0f\u65f6\u89e6\u53d1\u4e00\u6b21\uff0c\u5219\u8be5\u89e6\u53d1\u5668\u5c06\u5728\u4ea4\u6613\u65e5\u768410:30, 11:30, 14:00\u548c 15\uff1a00\u5404\u89e6\u53d1\u4e00\u6b21 Args: interval : [description] Raises: ValueError: [description] \"\"\" matched = re . match ( r \"(\\d+)([mshd])\" , interval ) if matched is None : raise ValueError ( f \"malform interval { interval } \" ) interval , unit = matched . groups () interval = int ( interval ) unit = unit . lower () if unit == \"s\" : self . interval = datetime . timedelta ( seconds = interval ) elif unit == \"m\" : self . interval = datetime . timedelta ( minutes = interval ) elif unit == \"h\" : self . interval = datetime . timedelta ( hours = interval ) elif unit == \"d\" : self . interval = datetime . timedelta ( days = interval ) else : self . interval = datetime . timedelta ( seconds = interval )","title":"__init__()"},{"location":"api/dal/flux/","text":"Flux - the query language builder for influxdb \u00b6 Helper functions for building flux query expression Source code in omicron/dal/influx/flux.py class Flux ( object ): \"\"\"Helper functions for building flux query expression\"\"\" EPOCH_START = datetime . datetime ( 1970 , 1 , 1 , 0 , 0 , 0 ) def __init__ ( self , auto_pivot = True , no_sys_cols = True ): \"\"\"\u521d\u59cb\u5316Flux\u5bf9\u8c61 Args: auto_pivot : \u662f\u5426\u81ea\u52a8\u5c06\u67e5\u8be2\u5217\u5b57\u6bb5\u7ec4\u88c5\u6210\u884c. Defaults to True. no_sys_cols: \u662f\u5426\u81ea\u52a8\u5c06\u7cfb\u7edf\u5b57\u6bb5\u5220\u9664. Defaults to True.\u8bf7\u53c2\u8003[drop_sys_cols][omicron.dal.influx.flux.Flux.drop_sys_cols] \"\"\" self . _cols = None self . expressions = defaultdict ( list ) self . _auto_pivot = auto_pivot self . _last_n = None self . no_sys_cols = no_sys_cols def __str__ ( self ): return self . _compose () def __repr__ ( self ) -> str : return f \"< { self . __class__ . __name__ } >: \\n { self . _compose () } \" def _compose ( self ): \"\"\"\u5c06\u6240\u6709\u8868\u8fbe\u5f0f\u5408\u5e76\u4e3a\u4e00\u4e2a\u8868\u8fbe\u5f0f\"\"\" if not all ( [ \"bucket\" in self . expressions , \"measurement\" in self . expressions , \"range\" in self . expressions , ] ): raise AssertionError ( \"bucket, measurement and range must be set\" ) expr = [ self . expressions [ k ] for k in ( \"bucket\" , \"range\" , \"measurement\" )] if self . expressions . get ( \"tags\" ): expr . append ( self . expressions [ \"tags\" ]) if self . expressions . get ( \"fields\" ): expr . append ( self . expressions [ \"fields\" ]) if \"drop\" not in self . expressions and self . no_sys_cols : self . drop_sys_cols () if self . expressions . get ( \"drop\" ): expr . append ( self . expressions [ \"drop\" ]) if self . _auto_pivot and \"pivot\" not in self . expressions : self . pivot () if self . expressions . get ( \"pivot\" ): expr . append ( self . expressions [ \"pivot\" ]) if self . expressions . get ( \"group\" ): expr . append ( self . expressions [ \"group\" ]) if self . expressions . get ( \"sort\" ): expr . append ( self . expressions [ \"sort\" ]) if self . expressions . get ( \"limit\" ): expr . append ( self . expressions [ \"limit\" ]) # influxdb\u9ed8\u8ba4\u6309\u5347\u5e8f\u6392\u5217\uff0c\u4f46last_n\u67e5\u8be2\u7684\u7ed3\u679c\u5219\u5fc5\u7136\u662f\u964d\u5e8f\u7684\uff0c\u6240\u4ee5\u8fd8\u9700\u8981\u518d\u6b21\u6392\u5e8f if self . _last_n : expr . append ( \" \\n \" . join ( [ f ' |> top(n: { self . _last_n } , columns: [\"_time\"])' , ' |> sort(columns: [\"_time\"], desc: false)' , ] ) ) return \" \\n \" . join ( expr ) def bucket ( self , bucket : str ) -> \"Flux\" : \"\"\"add bucket to query expression Raises: DuplicateOperationError: \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u4e2asource\uff0c\u5982\u679c\u8868\u8fbe\u5f0f\u4e2d\u5df2\u7ecf\u6307\u5b9a\u4e86bucket\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Returns: Flux\u5bf9\u8c61 \"\"\" if \"bucket\" in self . expressions : raise DuplicateOperationError ( \"bucket has been set\" ) self . expressions [ \"bucket\" ] = f 'from(bucket: \" { bucket } \")' return self def measurement ( self , measurement : str ) -> \"Flux\" : \"\"\"add measurement filter to query Raises: DuplicateOperationError: \u4e00\u6b21\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u4e2ameasurement, \u5982\u679c\u8868\u8fbe\u5f0f\u4e2d\u5df2\u7ecf\u5b58\u5728measurement, \u5219\u629b\u51fa\u5f02\u5e38 Returns: Flux\u5bf9\u8c61\u81ea\u8eab\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"measurement\" in self . expressions : raise DuplicateOperationError ( \"measurement has been set\" ) self . expressions [ \"measurement\" ] = f ' |> filter(fn: (r) => r[\"_measurement\"] == \" { measurement } \")' return self def range ( self , start : Frame , end : Frame , right_close = True , precision = \"s\" ) -> \"Flux\" : \"\"\"\u6dfb\u52a0\u65f6\u95f4\u8303\u56f4\u8fc7\u6ee4 \u5fc5\u987b\u6307\u5b9a\u7684\u67e5\u8be2\u6761\u4ef6\uff0c\u5426\u5219influxdb\u4f1a\u62a5unbound\u67e5\u8be2\u9519\uff0c\u56e0\u4e3a\u8fd9\u79cd\u60c5\u51b5\u4e0b\uff0c\u8fd4\u56de\u7684\u6570\u636e\u91cf\u5c06\u975e\u5e38\u5927\u3002 \u5728\u683c\u5f0f\u5316\u65f6\u95f4\u65f6\uff0c\u9700\u8981\u6839\u636e`precision`\u751f\u6210\u65f6\u95f4\u5b57\u7b26\u4e32\u3002\u5728\u5411Influxdb\u53d1\u9001\u8bf7\u6c42\u65f6\uff0c\u5e94\u8be5\u6ce8\u610f\u67e5\u8be2\u53c2\u6570\u4e2d\u6307\u5b9a\u7684\u65f6\u95f4\u7cbe\u5ea6\u4e0e\u8fd9\u91cc\u4f7f\u7528\u7684\u4fdd\u6301\u4e00\u81f4\u3002 Influxdb\u7684\u67e5\u8be2\u7ed3\u679c\u9ed8\u8ba4\u4e0d\u5305\u542b\u7ed3\u675f\u65f6\u95f4\uff0c\u5f53`right_close`\u6307\u5b9a\u4e3aTrue\u65f6\uff0c\u6211\u4eec\u5c06\u6839\u636e\u6307\u5b9a\u7684\u7cbe\u5ea6\u4fee\u6539`end`\u65f6\u95f4\uff0c\u4f7f\u4e4b\u4ec5\u6bd4`end`\u591a\u4e00\u4e2a\u65f6\u95f4\u5355\u4f4d\uff0c\u4ece\u800c\u4fdd\u8bc1\u67e5\u8be2\u7ed3\u679c\u4f1a\u5305\u542b`end`\u3002 Raises: DuplicateOperationError: \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u6b21\u65f6\u95f4\u8303\u56f4\uff0c\u5982\u679crange\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Args: start: \u5f00\u59cb\u65f6\u95f4 end: \u7ed3\u675f\u65f6\u95f4 right_close: \u67e5\u8be2\u7ed3\u679c\u662f\u5426\u5305\u542b\u7ed3\u675f\u65f6\u95f4\u3002 precision: \u65f6\u95f4\u7cbe\u5ea6\uff0c\u9ed8\u8ba4\u4e3a\u79d2\u3002 Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u652f\u6301\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"range\" in self . expressions : raise DuplicateOperationError ( \"range has been set\" ) if precision not in [ \"s\" , \"ms\" , \"us\" ]: raise AssertionError ( \"precision must be 's', 'ms' or 'us'\" ) end = self . format_time ( end , precision , right_close ) start = self . format_time ( start , precision ) self . expressions [ \"range\" ] = f \" |> range(start: { start } , stop: { end } )\" return self def limit ( self , limit : int ) -> \"Flux\" : \"\"\"\u6dfb\u52a0\u8fd4\u56de\u8bb0\u5f55\u6570\u9650\u5236 Raises: DuplicateOperationError: \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u6b21limit\uff0c\u5982\u679climit\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Args: limit: \u8fd4\u56de\u8bb0\u5f55\u6570\u9650\u5236 Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"limit\" in self . expressions : raise DuplicateOperationError ( \"limit has been set\" ) self . expressions [ \"limit\" ] = \" |> limit(n: %d )\" % limit return self @classmethod def to_timestamp ( cls , tm : Frame , precision : str = \"s\" ) -> int : \"\"\"\u5c06\u65f6\u95f4\u6839\u636e\u7cbe\u5ea6\u8f6c\u6362\u4e3aunix\u65f6\u95f4\u6233 \u5728\u5f80influxdb\u5199\u5165\u6570\u636e\u65f6\uff0cline-protocol\u8981\u6c42\u7684\u65f6\u95f4\u6233\u4e3aunix timestamp\uff0c\u5e76\u4e14\u4e0e\u5176\u7cbe\u5ea6\u5bf9\u5e94\u3002 influxdb\u59cb\u7ec8\u4f7f\u7528UTC\u65f6\u95f4\uff0c\u56e0\u6b64\uff0c`tm`\u4e5f\u5fc5\u987b\u5df2\u7ecf\u8f6c\u6362\u6210UTC\u65f6\u95f4\u3002 Args: tm: \u65f6\u95f4 precision: \u65f6\u95f4\u7cbe\u5ea6\uff0c\u9ed8\u8ba4\u4e3a\u79d2\u3002 Returns: \u65f6\u95f4\u6233 \"\"\" if precision not in [ \"s\" , \"ms\" , \"us\" ]: raise AssertionError ( \"precision must be 's', 'ms' or 'us'\" ) # get int repr of tm, in seconds unit if isinstance ( tm , np . datetime64 ): tm = tm . astype ( \"datetime64[s]\" ) . astype ( \"int\" ) elif isinstance ( tm , datetime . datetime ): tm = tm . timestamp () else : tm = arrow . get ( tm ) . timestamp () return int ( tm * 10 ** ({ \"s\" : 0 , \"ms\" : 3 , \"us\" : 6 }[ precision ])) @classmethod def format_time ( cls , tm : Frame , precision : str = \"s\" , shift_forward = False ) -> str : \"\"\"\u5c06\u65f6\u95f4\u8f6c\u6362\u6210\u5ba2\u6237\u7aef\u5bf9\u5e94\u7684\u7cbe\u5ea6\uff0c\u5e76\u4ee5 RFC3339 timestamps\u683c\u5f0f\u4e32\uff08\u5373influxdb\u8981\u6c42\u7684\u683c\u5f0f\uff09\u8fd4\u56de\u3002 \u5982\u679c\u8fd9\u4e2a\u65f6\u95f4\u662f\u4f5c\u4e3a\u67e5\u8be2\u7684range\u4e2d\u7684\u7ed3\u675f\u65f6\u95f4\u4f7f\u7528\u65f6\uff0c\u7531\u4e8einflux\u67e5\u8be2\u7684\u65f6\u95f4\u8303\u56f4\u662f\u5de6\u95ed\u53f3\u5f00\u7684\uff0c\u56e0\u6b64\u5982\u679c\u4f60\u9700\u8981\u67e5\u8be2\u7684\u662f\u4e00\u4e2a\u95ed\u533a\u95f4\uff0c\u5219\u9700\u8981\u5c06`end`\u7684\u65f6\u95f4\u5411\u524d\u504f\u79fb\u4e00\u4e2a\u7cbe\u5ea6\u3002\u901a\u8fc7\u4f20\u5165`shift_forward = True`\u53ef\u4ee5\u5b8c\u6210\u8fd9\u79cd\u8f6c\u6362\u3002 Examples: >>> # by default, the precision is seconds, and convert a date >>> Flux.format_time(datetime.date(2019, 1, 1)) '2019-01-01T00:00:00Z' >>> # set precision to ms, convert a time >>> Flux.format_time(datetime.datetime(1978, 7, 8, 12, 34, 56, 123456), precision=\"ms\") '1978-07-08T12:34:56.123Z' >>> # convert and forward shift >>> Flux.format_time(datetime.date(1978, 7, 8), shift_forward = True) '1978-07-08T00:00:01Z' Args: tm : \u5f85\u683c\u5f0f\u5316\u7684\u65f6\u95f4 precision: \u65f6\u95f4\u7cbe\u5ea6\uff0c\u53ef\u9009\u503c\u4e3a\uff1a's', 'ms', 'us' shift_forward: \u5982\u679c\u4e3aTrue\uff0c\u5219\u5c06end\u5411\u524d\u504f\u79fb\u4e00\u4e2a\u7cbe\u5ea6 Returns: \u8c03\u6574\u540e\u7b26\u5408influx\u65f6\u95f4\u89c4\u8303\u7684\u65f6\u95f4\uff08\u5b57\u7b26\u4e32\u8868\u793a\uff09 \"\"\" timespec = { \"s\" : \"seconds\" , \"ms\" : \"milliseconds\" , \"us\" : \"microseconds\" } . get ( precision ) if timespec is None : raise ValueError ( f \"precision must be one of 's', 'ms', 'us', but got { precision } \" ) tm = arrow . get ( tm ) . naive if shift_forward : tm = tm + datetime . timedelta ( ** { timespec : 1 }) return tm . isoformat ( sep = \"T\" , timespec = timespec ) + \"Z\" def tags ( self , tags : DefaultDict [ str , List [ str ]]) -> \"Flux\" : \"\"\"\u7ed9\u67e5\u8be2\u6dfb\u52a0tags\u8fc7\u6ee4\u6761\u4ef6 \u6b64\u67e5\u8be2\u6761\u4ef6\u4e3a\u8fc7\u6ee4\u6761\u4ef6\uff0c\u5e76\u975e\u5fc5\u987b\u3002\u5982\u679c\u67e5\u8be2\u4e2d\u6ca1\u6709\u6307\u5b9atags\uff0c\u5219\u4f1a\u8fd4\u56de\u6240\u6709\u8bb0\u5f55\u3002 \u5728\u5b9e\u73b0\u4e0a\uff0c\u65e2\u53ef\u4ee5\u4f7f\u7528`contains`\u8bed\u6cd5\uff0c\u4e5f\u53ef\u4ee5\u4f7f\u7528`or`\u8bed\u6cd5(\u7531\u4e8e\u4e00\u6761\u8bb0\u5f55\u53ea\u80fd\u5c5e\u4e8e\u4e00\u4e2atag\uff0c\u6240\u4ee5\uff0c\u5f53\u6307\u5b9a\u591a\u4e2atag\u8fdb\u884c\u67e5\u8be2\u65f6\uff0c\u5b83\u4eec\u4e4b\u95f4\u7684\u5173\u7cfb\u5e94\u8be5\u4e3a`or`)\u3002\u7ecf\u9a8c\u8bc1\uff0ccontains\u8bed\u6cd5\u4f1a\u59cb\u7ec8\u5148\u5c06\u6240\u6709\u7b26\u5408\u6761\u4ef6\u7684\u8bb0\u5f55\u68c0\u7d22\u51fa\u6765\uff0c\u518d\u8fdb\u884c\u8fc7\u6ee4\u3002\u8fd9\u6837\u7684\u6548\u7387\u6bd4\u8f83\u4f4e\uff0c\u7279\u522b\u662f\u5f53tags\u7684\u6570\u91cf\u8f83\u5c11\u65f6\uff0c\u4f1a\u8fdc\u8fdc\u6bd4\u4f7f\u7528or\u8bed\u6cd5\u6162\u3002 Raises: DuplicateOperationError: \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6267\u884c\u4e00\u6b21\uff0c\u5982\u679ctag filter\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Args: tags : tags\u662f\u4e00\u4e2a{tagname: Union[str,[tag_values]]}\u5bf9\u8c61\u3002 Examples: >>> flux = Flux() >>> flux.tags({\"code\": [\"000001\", \"000002\"], \"name\": [\"\u6d66\u53d1\u94f6\u884c\"]}).expressions[\"tags\"] ' |> filter(fn: (r) => r[\"code\"] == \"000001\" or r[\"code\"] == \"000002\" or r[\"name\"] == \"\u6d66\u53d1\u94f6\u884c\")' Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"tags\" in self . expressions : raise DuplicateOperationError ( \"tags has been set\" ) filters = [] for tag , values in tags . items (): assert ( isinstance ( values , str ) or len ( values ) > 0 ), f \"tag { tag } should not be empty or None\" if isinstance ( values , str ): values = [ values ] for v in values : filters . append ( f 'r[\" { tag } \"] == \" { v } \"' ) op_expression = \" or \" . join ( filters ) self . expressions [ \"tags\" ] = f \" |> filter(fn: (r) => { op_expression } )\" return self def fields ( self , fields : List , reserve_time_stamp : bool = True ) -> \"Flux\" : \"\"\"\u7ed9\u67e5\u8be2\u6dfb\u52a0field\u8fc7\u6ee4\u6761\u4ef6 \u6b64\u67e5\u8be2\u6761\u4ef6\u4e3a\u8fc7\u6ee4\u6761\u4ef6\uff0c\u7528\u4ee5\u6307\u5b9a\u54ea\u4e9bfield\u4f1a\u51fa\u73b0\u5728\u67e5\u8be2\u7ed3\u679c\u4e2d\uff0c\u5e76\u975e\u5fc5\u987b\u3002\u5982\u679c\u67e5\u8be2\u4e2d\u6ca1\u6709\u6307\u5b9atags\uff0c\u5219\u4f1a\u8fd4\u56de\u6240\u6709\u8bb0\u5f55\u3002 \u7531\u4e8e\u4e00\u6761\u8bb0\u5f55\u53ea\u80fd\u5c5e\u4e8e\u4e00\u4e2a_field\uff0c\u6240\u4ee5\uff0c\u5f53\u6307\u5b9a\u591a\u4e2a_field\u8fdb\u884c\u67e5\u8be2\u65f6\uff0c\u5b83\u4eec\u4e4b\u95f4\u7684\u5173\u7cfb\u5e94\u8be5\u4e3a`or`\u3002 Raises: DuplicateOperationError: \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6267\u884c\u4e00\u6b21\uff0c\u5982\u679cfiled filter\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Args: fields: \u5f85\u67e5\u8be2\u7684field\u5217\u8868 reserve_time_stamp: \u662f\u5426\u4fdd\u7559\u65f6\u95f4\u6233`_time`\uff0c\u9ed8\u8ba4\u4e3aTrue Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"fields\" in self . expressions : raise DuplicateOperationError ( \"fields has been set\" ) self . _cols = fields . copy () if reserve_time_stamp and \"_time\" not in self . _cols : self . _cols . append ( \"_time\" ) self . _cols = sorted ( self . _cols ) filters = [ f 'r[\"_field\"] == \" { name } \"' for name in self . _cols ] self . expressions [ \"fields\" ] = f \" |> filter(fn: (r) => { ' or ' . join ( filters ) } )\" return self def pivot ( self , row_keys : List [ str ] = [ \"_time\" ], column_keys = [ \"_field\" ], value_column : str = \"_value\" , ) -> \"Flux\" : \"\"\"pivot\u7528\u6765\u5c06\u4ee5\u5217\u4e3a\u5355\u4f4d\u7684\u6570\u636e\u8f6c\u6362\u4e3a\u4ee5\u884c\u4e3a\u5355\u4f4d\u7684\u6570\u636e Flux\u67e5\u8be2\u8fd4\u56de\u7684\u7ed3\u679c\u901a\u5e38\u90fd\u662f\u4ee5\u5217\u4e3a\u5355\u4f4d\u7684\u6570\u636e\uff0c\u589e\u52a0\u672cpivot\u6761\u4ef6\u540e\uff0c\u7ed3\u679c\u5c06\u88ab\u8f6c\u6362\u6210\u4e3a\u4ee5\u884c\u4e3a\u5355\u4f4d\u7684\u6570\u636e\u518d\u8fd4\u56de\u3002 \u8fd9\u91cc\u5b9e\u73b0\u7684\u662fmeasurement\u5185\u7684\u8f6c\u6362\uff0c\u8bf7\u53c2\u8003 [pivot](https://docs.influxdata.com/flux/v0.x/stdlib/universe/pivot/#align-fields-within-each-measurement-that-have-the-same-timestamp) Args: row_keys: \u60df\u4e00\u786e\u5b9a\u8f93\u51fa\u4e2d\u4e00\u884c\u6570\u636e\u7684\u5217\u540d\u5b57, \u9ed8\u8ba4\u4e3a[\"_time\"] column_keys: \u5217\u540d\u79f0\u5217\u8868\uff0c\u9ed8\u8ba4\u4e3a[\"_field\"] value_column: \u503c\u5217\u540d\uff0c\u9ed8\u8ba4\u4e3a\"_value\" Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"pivot\" in self . expressions : raise DuplicateOperationError ( \"pivot has been set\" ) columns = \",\" . join ([ f '\" { name } \"' for name in column_keys ]) rowkeys = \",\" . join ([ f '\" { name } \"' for name in row_keys ]) self . expressions [ \"pivot\" ] = f ' |> pivot(columnKey: [ { columns } ], rowKey: [ { rowkeys } ], valueColumn: \" { value_column } \")' return self def sort ( self , by : List [ str ] = None , desc : bool = False ) -> \"Flux\" : \"\"\"\u6309\u7167\u6307\u5b9a\u7684\u5217\u8fdb\u884c\u6392\u5e8f \u6839\u636e[influxdb doc](https://docs.influxdata.com/influxdb/v2.0/query-data/flux/first-last/), \u67e5\u8be2\u8fd4\u56de\u503c\u9ed8\u8ba4\u5730\u6309\u65f6\u95f4\u6392\u5e8f\u3002\u56e0\u6b64\uff0c\u5982\u679c\u4ec5\u4ec5\u662f\u8981\u6c42\u67e5\u8be2\u7ed3\u679c\u6309\u65f6\u95f4\u6392\u5e8f\uff0c\u65e0\u987b\u8c03\u7528\u6b64API\uff0c\u4f46\u662f\uff0c\u6b64API\u63d0\u4f9b\u4e86\u6309\u5176\u5b83\u5b57\u6bb5\u6392\u5e8f\u7684\u80fd\u529b\u3002 \u53e6\u5916\uff0c\u5728\u4e00\u4e2a\u67095000\u591a\u4e2atag\uff0c\u5171\u8fd4\u56de1M\u6761\u8bb0\u5f55\u7684\u6d4b\u8bd5\u4e2d\uff0c\u6d4b\u8bd5\u9a8c\u8bc1\u8fd4\u56de\u8bb0\u5f55\u786e\u5b9e\u6309_time\u5347\u5e8f\u6392\u5217\u3002 Args: by: \u6307\u5b9a\u6392\u5e8f\u7684\u5217\u540d\u79f0\u5217\u8868 Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"sort\" in self . expressions : raise DuplicateOperationError ( \"sort has been set\" ) if by is None : by = [ \"_value\" ] if isinstance ( by , str ): by = [ by ] columns_ = \",\" . join ([ f '\" { name } \"' for name in by ]) desc = \"true\" if desc else \"false\" self . expressions [ \"sort\" ] = f \" |> sort(columns: [ { columns_ } ], desc: { desc } )\" return self def group ( self , by : Tuple [ str ]) -> \"Flux\" : \"\"\"[summary] Returns: [description] \"\"\" if \"group\" in self . expressions : raise DuplicateOperationError ( \"group has been set\" ) if isinstance ( by , str ): by = [ by ] cols = \",\" . join ([ f '\" { col } \"' for col in by ]) self . expressions [ \"group\" ] = f \" |> group(columns: [ { cols } ])\" return self def latest ( self , n : int ) -> \"Flux\" : \"\"\"\u83b7\u53d6\u6700\u540en\u6761\u6570\u636e\uff0c\u6309\u65f6\u95f4\u589e\u5e8f\u8fd4\u56de Flux\u67e5\u8be2\u7684\u589e\u5f3a\u529f\u80fd\uff0c\u76f8\u5f53\u4e8etop + sort + limit Args: n: \u6700\u540en\u6761\u6570\u636e Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" assert \"top\" not in self . expressions , \"top and last_n can not be used together\" assert ( \"sort\" not in self . expressions ), \"sort and last_n can not be used together\" assert ( \"limit\" not in self . expressions ), \"limit and last_n can not be used together\" self . _last_n = n return self @property def cols ( self ) -> List [ str ]: \"\"\"the columns or the return records the implementation is buggy. Influx doesn't tell us in which order these columns are. Returns: the columns name of the return records \"\"\" # fixme: if keep in expression, then return group key + tag key + value key # if keep not in expression, then stream, table, _time, ... return sorted ( self . _cols ) def delete ( self , measurement : str , stop : datetime . datetime , tags : dict = {}, start : datetime . datetime = None , precision : str = \"s\" , ) -> dict : \"\"\"\u6784\u5efa\u5220\u9664\u8bed\u53e5\u3002 according to [delete-predicate](https://docs.influxdata.com/influxdb/v2.1/reference/syntax/delete-predicate/), delete\u53ea\u652f\u6301AND\u903b\u8f91\u64cd\u4f5c\uff0c\u53ea\u652f\u6301\u201c=\u201d\u64cd\u4f5c\uff0c\u4e0d\u652f\u6301\u201c\uff01=\u201d\u64cd\u4f5c\uff0c\u53ef\u4ee5\u4f7f\u7528\u4efb\u4f55\u5b57\u6bb5\u6216\u8005tag\uff0c\u4f46\u4e0d\u5305\u62ec_time\u548c_value\u5b57\u6bb5\u3002 \u7531\u4e8einfluxdb\u8fd9\u4e00\u6bb5\u6587\u6863\u4e0d\u662f\u5f88\u6e05\u695a\uff0c\u6839\u636e\u8bd5\u9a8c\u7ed3\u679c\uff0c\u76ee\u524d\u4ec5\u652f\u6301\u6309\u65f6\u95f4\u8303\u56f4\u548ctags\u8fdb\u884c\u5220\u9664\u8f83\u597d\u3002\u5982\u679c\u67d0\u4e2acolumn\u7684\u503c\u7c7b\u578b\u662f\u5b57\u7b26\u4e32\uff0c\u5219\u4e5f\u53ef\u4ee5\u901a\u8fc7`tags`\u53c2\u6570\u4f20\u5165\uff0c\u5339\u914d\u540e\u5220\u9664\u3002\u4f46\u5982\u679c\u4f20\u5165\u4e86\u975e\u5b57\u7b26\u4e32\u7c7b\u578b\u7684column\uff0c\u5219\u5c06\u5f97\u5230\u65e0\u6cd5\u9884\u6599\u7684\u7ed3\u679c\u3002 Args: measurement : [description] stop : [description] tags : \u6309tags\u548c\u5339\u914d\u7684\u503c\u8fdb\u884c\u5220\u9664\u3002\u4f20\u5165\u7684tags\u4e2d\uff0ckey\u4e3atag\u540d\u79f0\uff0cvalue\u4e3atag\u8981\u5339\u914d\u7684\u53d6\u503c\uff0c\u53ef\u4ee5\u4e3astr\u6216\u8005List[str]\u3002 start : \u8d77\u59cb\u65f6\u95f4\u3002\u5982\u679c\u7701\u7565\uff0c\u5219\u4f7f\u7528EPOCH_START. precision : \u65f6\u95f4\u7cbe\u5ea6\u3002\u53ef\u4ee5\u4e3a\u201cs\u201d\uff0c\u201cms\u201d\uff0c\u201cus\u201d Returns: \u5220\u9664\u8bed\u53e5 \"\"\" timespec = { \"s\" : \"seconds\" , \"ms\" : \"milliseconds\" , \"us\" : \"microseconds\" } . get ( precision ) if start is None : start = self . EPOCH_START . isoformat ( timespec = timespec ) + \"Z\" predicate = [ f '_measurement=\" { measurement } \"' ] for key , value in tags . items (): if isinstance ( value , list ): predicate . extend ([ f ' { key } = \" { v } \"' for v in value ]) else : predicate . append ( f ' { key } = \" { value } \"' ) command = { \"start\" : start , \"stop\" : f \" { stop . isoformat ( timespec = timespec ) } Z\" , \"predicate\" : \" AND \" . join ( predicate ), } return command def drop ( self , cols : List [ str ]) -> \"Flux\" : \"\"\"use this to drop columns before return result Args: cols : the name of columns to be dropped Returns: Flux object, to support pipe operation \"\"\" if \"drop\" in self . expressions : raise DuplicateOperationError ( \"drop operation has been set already\" ) # add surrounding quotes _cols = [ f '\" { c } \"' for c in cols ] self . expressions [ \"drop\" ] = f \" |> drop(columns: [ { ',' . join ( _cols ) } ])\" return self def drop_sys_cols ( self , cols : List [ str ] = None ) -> \"Flux\" : \"\"\"use this to drop [\"_start\", \"_stop\", \"_measurement\"], plus columns specified in `cols`, before return query result please be noticed, after drop sys columns, there's still two sys columns left, which is \"_time\" and \"table\", and \"_time\" should usually be kept, \"table\" is one we're not able to removed. If you don't like _time in return result, you can specify it in `cols` parameter. Args: cols : the extra columns to be dropped Returns: Flux query object \"\"\" _cols = [ \"_start\" , \"_stop\" , \"_measurement\" ] if cols is not None : _cols . extend ( cols ) return self . drop ( _cols ) cols : List [ str ] property readonly \u00b6 the columns or the return records the implementation is buggy. Influx doesn't tell us in which order these columns are. Returns: Type Description List[str] the columns name of the return records __init__ ( self , auto_pivot = True , no_sys_cols = True ) special \u00b6 \u521d\u59cb\u5316Flux\u5bf9\u8c61 Parameters: Name Type Description Default auto_pivot \u662f\u5426\u81ea\u52a8\u5c06\u67e5\u8be2\u5217\u5b57\u6bb5\u7ec4\u88c5\u6210\u884c. Defaults to True. True no_sys_cols \u662f\u5426\u81ea\u52a8\u5c06\u7cfb\u7edf\u5b57\u6bb5\u5220\u9664. Defaults to True.\u8bf7\u53c2\u8003 drop_sys_cols True Source code in omicron/dal/influx/flux.py def __init__ ( self , auto_pivot = True , no_sys_cols = True ): \"\"\"\u521d\u59cb\u5316Flux\u5bf9\u8c61 Args: auto_pivot : \u662f\u5426\u81ea\u52a8\u5c06\u67e5\u8be2\u5217\u5b57\u6bb5\u7ec4\u88c5\u6210\u884c. Defaults to True. no_sys_cols: \u662f\u5426\u81ea\u52a8\u5c06\u7cfb\u7edf\u5b57\u6bb5\u5220\u9664. Defaults to True.\u8bf7\u53c2\u8003[drop_sys_cols][omicron.dal.influx.flux.Flux.drop_sys_cols] \"\"\" self . _cols = None self . expressions = defaultdict ( list ) self . _auto_pivot = auto_pivot self . _last_n = None self . no_sys_cols = no_sys_cols bucket ( self , bucket ) \u00b6 add bucket to query expression Exceptions: Type Description DuplicateOperationError \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u4e2asource\uff0c\u5982\u679c\u8868\u8fbe\u5f0f\u4e2d\u5df2\u7ecf\u6307\u5b9a\u4e86bucket\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Returns: Type Description Flux Flux\u5bf9\u8c61 Source code in omicron/dal/influx/flux.py def bucket ( self , bucket : str ) -> \"Flux\" : \"\"\"add bucket to query expression Raises: DuplicateOperationError: \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u4e2asource\uff0c\u5982\u679c\u8868\u8fbe\u5f0f\u4e2d\u5df2\u7ecf\u6307\u5b9a\u4e86bucket\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Returns: Flux\u5bf9\u8c61 \"\"\" if \"bucket\" in self . expressions : raise DuplicateOperationError ( \"bucket has been set\" ) self . expressions [ \"bucket\" ] = f 'from(bucket: \" { bucket } \")' return self delete ( self , measurement , stop , tags = {}, start = None , precision = 's' ) \u00b6 \u6784\u5efa\u5220\u9664\u8bed\u53e5\u3002 according to delete-predicate , delete\u53ea\u652f\u6301AND\u903b\u8f91\u64cd\u4f5c\uff0c\u53ea\u652f\u6301\u201c=\u201d\u64cd\u4f5c\uff0c\u4e0d\u652f\u6301\u201c\uff01=\u201d\u64cd\u4f5c\uff0c\u53ef\u4ee5\u4f7f\u7528\u4efb\u4f55\u5b57\u6bb5\u6216\u8005tag\uff0c\u4f46\u4e0d\u5305\u62ec_time\u548c_value\u5b57\u6bb5\u3002 \u7531\u4e8einfluxdb\u8fd9\u4e00\u6bb5\u6587\u6863\u4e0d\u662f\u5f88\u6e05\u695a\uff0c\u6839\u636e\u8bd5\u9a8c\u7ed3\u679c\uff0c\u76ee\u524d\u4ec5\u652f\u6301\u6309\u65f6\u95f4\u8303\u56f4\u548ctags\u8fdb\u884c\u5220\u9664\u8f83\u597d\u3002\u5982\u679c\u67d0\u4e2acolumn\u7684\u503c\u7c7b\u578b\u662f\u5b57\u7b26\u4e32\uff0c\u5219\u4e5f\u53ef\u4ee5\u901a\u8fc7 tags \u53c2\u6570\u4f20\u5165\uff0c\u5339\u914d\u540e\u5220\u9664\u3002\u4f46\u5982\u679c\u4f20\u5165\u4e86\u975e\u5b57\u7b26\u4e32\u7c7b\u578b\u7684column\uff0c\u5219\u5c06\u5f97\u5230\u65e0\u6cd5\u9884\u6599\u7684\u7ed3\u679c\u3002 Parameters: Name Type Description Default measurement [description] required stop [description] required tags \u6309tags\u548c\u5339\u914d\u7684\u503c\u8fdb\u884c\u5220\u9664\u3002\u4f20\u5165\u7684tags\u4e2d\uff0ckey\u4e3atag\u540d\u79f0\uff0cvalue\u4e3atag\u8981\u5339\u914d\u7684\u53d6\u503c\uff0c\u53ef\u4ee5\u4e3astr\u6216\u8005List[str]\u3002 {} start \u8d77\u59cb\u65f6\u95f4\u3002\u5982\u679c\u7701\u7565\uff0c\u5219\u4f7f\u7528EPOCH_START. None precision \u65f6\u95f4\u7cbe\u5ea6\u3002\u53ef\u4ee5\u4e3a\u201cs\u201d\uff0c\u201cms\u201d\uff0c\u201cus\u201d 's' Returns: Type Description dict \u5220\u9664\u8bed\u53e5 Source code in omicron/dal/influx/flux.py def delete ( self , measurement : str , stop : datetime . datetime , tags : dict = {}, start : datetime . datetime = None , precision : str = \"s\" , ) -> dict : \"\"\"\u6784\u5efa\u5220\u9664\u8bed\u53e5\u3002 according to [delete-predicate](https://docs.influxdata.com/influxdb/v2.1/reference/syntax/delete-predicate/), delete\u53ea\u652f\u6301AND\u903b\u8f91\u64cd\u4f5c\uff0c\u53ea\u652f\u6301\u201c=\u201d\u64cd\u4f5c\uff0c\u4e0d\u652f\u6301\u201c\uff01=\u201d\u64cd\u4f5c\uff0c\u53ef\u4ee5\u4f7f\u7528\u4efb\u4f55\u5b57\u6bb5\u6216\u8005tag\uff0c\u4f46\u4e0d\u5305\u62ec_time\u548c_value\u5b57\u6bb5\u3002 \u7531\u4e8einfluxdb\u8fd9\u4e00\u6bb5\u6587\u6863\u4e0d\u662f\u5f88\u6e05\u695a\uff0c\u6839\u636e\u8bd5\u9a8c\u7ed3\u679c\uff0c\u76ee\u524d\u4ec5\u652f\u6301\u6309\u65f6\u95f4\u8303\u56f4\u548ctags\u8fdb\u884c\u5220\u9664\u8f83\u597d\u3002\u5982\u679c\u67d0\u4e2acolumn\u7684\u503c\u7c7b\u578b\u662f\u5b57\u7b26\u4e32\uff0c\u5219\u4e5f\u53ef\u4ee5\u901a\u8fc7`tags`\u53c2\u6570\u4f20\u5165\uff0c\u5339\u914d\u540e\u5220\u9664\u3002\u4f46\u5982\u679c\u4f20\u5165\u4e86\u975e\u5b57\u7b26\u4e32\u7c7b\u578b\u7684column\uff0c\u5219\u5c06\u5f97\u5230\u65e0\u6cd5\u9884\u6599\u7684\u7ed3\u679c\u3002 Args: measurement : [description] stop : [description] tags : \u6309tags\u548c\u5339\u914d\u7684\u503c\u8fdb\u884c\u5220\u9664\u3002\u4f20\u5165\u7684tags\u4e2d\uff0ckey\u4e3atag\u540d\u79f0\uff0cvalue\u4e3atag\u8981\u5339\u914d\u7684\u53d6\u503c\uff0c\u53ef\u4ee5\u4e3astr\u6216\u8005List[str]\u3002 start : \u8d77\u59cb\u65f6\u95f4\u3002\u5982\u679c\u7701\u7565\uff0c\u5219\u4f7f\u7528EPOCH_START. precision : \u65f6\u95f4\u7cbe\u5ea6\u3002\u53ef\u4ee5\u4e3a\u201cs\u201d\uff0c\u201cms\u201d\uff0c\u201cus\u201d Returns: \u5220\u9664\u8bed\u53e5 \"\"\" timespec = { \"s\" : \"seconds\" , \"ms\" : \"milliseconds\" , \"us\" : \"microseconds\" } . get ( precision ) if start is None : start = self . EPOCH_START . isoformat ( timespec = timespec ) + \"Z\" predicate = [ f '_measurement=\" { measurement } \"' ] for key , value in tags . items (): if isinstance ( value , list ): predicate . extend ([ f ' { key } = \" { v } \"' for v in value ]) else : predicate . append ( f ' { key } = \" { value } \"' ) command = { \"start\" : start , \"stop\" : f \" { stop . isoformat ( timespec = timespec ) } Z\" , \"predicate\" : \" AND \" . join ( predicate ), } return command drop ( self , cols ) \u00b6 use this to drop columns before return result Parameters: Name Type Description Default cols the name of columns to be dropped required Returns: Type Description Flux Flux object, to support pipe operation Source code in omicron/dal/influx/flux.py def drop ( self , cols : List [ str ]) -> \"Flux\" : \"\"\"use this to drop columns before return result Args: cols : the name of columns to be dropped Returns: Flux object, to support pipe operation \"\"\" if \"drop\" in self . expressions : raise DuplicateOperationError ( \"drop operation has been set already\" ) # add surrounding quotes _cols = [ f '\" { c } \"' for c in cols ] self . expressions [ \"drop\" ] = f \" |> drop(columns: [ { ',' . join ( _cols ) } ])\" return self drop_sys_cols ( self , cols = None ) \u00b6 use this to drop [\"_start\", \"_stop\", \"_measurement\"], plus columns specified in cols , before return query result please be noticed, after drop sys columns, there's still two sys columns left, which is \"_time\" and \"table\", and \"_time\" should usually be kept, \"table\" is one we're not able to removed. If you don't like _time in return result, you can specify it in cols parameter. Parameters: Name Type Description Default cols the extra columns to be dropped None Returns: Type Description Flux Flux query object Source code in omicron/dal/influx/flux.py def drop_sys_cols ( self , cols : List [ str ] = None ) -> \"Flux\" : \"\"\"use this to drop [\"_start\", \"_stop\", \"_measurement\"], plus columns specified in `cols`, before return query result please be noticed, after drop sys columns, there's still two sys columns left, which is \"_time\" and \"table\", and \"_time\" should usually be kept, \"table\" is one we're not able to removed. If you don't like _time in return result, you can specify it in `cols` parameter. Args: cols : the extra columns to be dropped Returns: Flux query object \"\"\" _cols = [ \"_start\" , \"_stop\" , \"_measurement\" ] if cols is not None : _cols . extend ( cols ) return self . drop ( _cols ) fields ( self , fields , reserve_time_stamp = True ) \u00b6 \u7ed9\u67e5\u8be2\u6dfb\u52a0field\u8fc7\u6ee4\u6761\u4ef6 \u6b64\u67e5\u8be2\u6761\u4ef6\u4e3a\u8fc7\u6ee4\u6761\u4ef6\uff0c\u7528\u4ee5\u6307\u5b9a\u54ea\u4e9bfield\u4f1a\u51fa\u73b0\u5728\u67e5\u8be2\u7ed3\u679c\u4e2d\uff0c\u5e76\u975e\u5fc5\u987b\u3002\u5982\u679c\u67e5\u8be2\u4e2d\u6ca1\u6709\u6307\u5b9atags\uff0c\u5219\u4f1a\u8fd4\u56de\u6240\u6709\u8bb0\u5f55\u3002 \u7531\u4e8e\u4e00\u6761\u8bb0\u5f55\u53ea\u80fd\u5c5e\u4e8e\u4e00\u4e2a_field\uff0c\u6240\u4ee5\uff0c\u5f53\u6307\u5b9a\u591a\u4e2a_field\u8fdb\u884c\u67e5\u8be2\u65f6\uff0c\u5b83\u4eec\u4e4b\u95f4\u7684\u5173\u7cfb\u5e94\u8be5\u4e3a or \u3002 Exceptions: Type Description DuplicateOperationError \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6267\u884c\u4e00\u6b21\uff0c\u5982\u679cfiled filter\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Parameters: Name Type Description Default fields List \u5f85\u67e5\u8be2\u7684field\u5217\u8868 required reserve_time_stamp bool \u662f\u5426\u4fdd\u7559\u65f6\u95f4\u6233 _time \uff0c\u9ed8\u8ba4\u4e3aTrue True Returns: Type Description Flux Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c Source code in omicron/dal/influx/flux.py def fields ( self , fields : List , reserve_time_stamp : bool = True ) -> \"Flux\" : \"\"\"\u7ed9\u67e5\u8be2\u6dfb\u52a0field\u8fc7\u6ee4\u6761\u4ef6 \u6b64\u67e5\u8be2\u6761\u4ef6\u4e3a\u8fc7\u6ee4\u6761\u4ef6\uff0c\u7528\u4ee5\u6307\u5b9a\u54ea\u4e9bfield\u4f1a\u51fa\u73b0\u5728\u67e5\u8be2\u7ed3\u679c\u4e2d\uff0c\u5e76\u975e\u5fc5\u987b\u3002\u5982\u679c\u67e5\u8be2\u4e2d\u6ca1\u6709\u6307\u5b9atags\uff0c\u5219\u4f1a\u8fd4\u56de\u6240\u6709\u8bb0\u5f55\u3002 \u7531\u4e8e\u4e00\u6761\u8bb0\u5f55\u53ea\u80fd\u5c5e\u4e8e\u4e00\u4e2a_field\uff0c\u6240\u4ee5\uff0c\u5f53\u6307\u5b9a\u591a\u4e2a_field\u8fdb\u884c\u67e5\u8be2\u65f6\uff0c\u5b83\u4eec\u4e4b\u95f4\u7684\u5173\u7cfb\u5e94\u8be5\u4e3a`or`\u3002 Raises: DuplicateOperationError: \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6267\u884c\u4e00\u6b21\uff0c\u5982\u679cfiled filter\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Args: fields: \u5f85\u67e5\u8be2\u7684field\u5217\u8868 reserve_time_stamp: \u662f\u5426\u4fdd\u7559\u65f6\u95f4\u6233`_time`\uff0c\u9ed8\u8ba4\u4e3aTrue Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"fields\" in self . expressions : raise DuplicateOperationError ( \"fields has been set\" ) self . _cols = fields . copy () if reserve_time_stamp and \"_time\" not in self . _cols : self . _cols . append ( \"_time\" ) self . _cols = sorted ( self . _cols ) filters = [ f 'r[\"_field\"] == \" { name } \"' for name in self . _cols ] self . expressions [ \"fields\" ] = f \" |> filter(fn: (r) => { ' or ' . join ( filters ) } )\" return self format_time ( tm , precision = 's' , shift_forward = False ) classmethod \u00b6 \u5c06\u65f6\u95f4\u8f6c\u6362\u6210\u5ba2\u6237\u7aef\u5bf9\u5e94\u7684\u7cbe\u5ea6\uff0c\u5e76\u4ee5 RFC3339 timestamps\u683c\u5f0f\u4e32\uff08\u5373influxdb\u8981\u6c42\u7684\u683c\u5f0f\uff09\u8fd4\u56de\u3002 \u5982\u679c\u8fd9\u4e2a\u65f6\u95f4\u662f\u4f5c\u4e3a\u67e5\u8be2\u7684range\u4e2d\u7684\u7ed3\u675f\u65f6\u95f4\u4f7f\u7528\u65f6\uff0c\u7531\u4e8einflux\u67e5\u8be2\u7684\u65f6\u95f4\u8303\u56f4\u662f\u5de6\u95ed\u53f3\u5f00\u7684\uff0c\u56e0\u6b64\u5982\u679c\u4f60\u9700\u8981\u67e5\u8be2\u7684\u662f\u4e00\u4e2a\u95ed\u533a\u95f4\uff0c\u5219\u9700\u8981\u5c06 end \u7684\u65f6\u95f4\u5411\u524d\u504f\u79fb\u4e00\u4e2a\u7cbe\u5ea6\u3002\u901a\u8fc7\u4f20\u5165 shift_forward = True \u53ef\u4ee5\u5b8c\u6210\u8fd9\u79cd\u8f6c\u6362\u3002 Examples: >>> # by default, the precision is seconds, and convert a date >>> Flux . format_time ( datetime . date ( 2019 , 1 , 1 )) '2019-01-01T00:00:00Z' >>> # set precision to ms, convert a time >>> Flux . format_time ( datetime . datetime ( 1978 , 7 , 8 , 12 , 34 , 56 , 123456 ), precision = \"ms\" ) '1978-07-08T12:34:56.123Z' >>> # convert and forward shift >>> Flux . format_time ( datetime . date ( 1978 , 7 , 8 ), shift_forward = True ) '1978-07-08T00:00:01Z' Parameters: Name Type Description Default tm \u5f85\u683c\u5f0f\u5316\u7684\u65f6\u95f4 required precision str \u65f6\u95f4\u7cbe\u5ea6\uff0c\u53ef\u9009\u503c\u4e3a\uff1a's', 'ms', 'us' 's' shift_forward \u5982\u679c\u4e3aTrue\uff0c\u5219\u5c06end\u5411\u524d\u504f\u79fb\u4e00\u4e2a\u7cbe\u5ea6 False Returns: Type Description str \u8c03\u6574\u540e\u7b26\u5408influx\u65f6\u95f4\u89c4\u8303\u7684\u65f6\u95f4\uff08\u5b57\u7b26\u4e32\u8868\u793a\uff09 Source code in omicron/dal/influx/flux.py @classmethod def format_time ( cls , tm : Frame , precision : str = \"s\" , shift_forward = False ) -> str : \"\"\"\u5c06\u65f6\u95f4\u8f6c\u6362\u6210\u5ba2\u6237\u7aef\u5bf9\u5e94\u7684\u7cbe\u5ea6\uff0c\u5e76\u4ee5 RFC3339 timestamps\u683c\u5f0f\u4e32\uff08\u5373influxdb\u8981\u6c42\u7684\u683c\u5f0f\uff09\u8fd4\u56de\u3002 \u5982\u679c\u8fd9\u4e2a\u65f6\u95f4\u662f\u4f5c\u4e3a\u67e5\u8be2\u7684range\u4e2d\u7684\u7ed3\u675f\u65f6\u95f4\u4f7f\u7528\u65f6\uff0c\u7531\u4e8einflux\u67e5\u8be2\u7684\u65f6\u95f4\u8303\u56f4\u662f\u5de6\u95ed\u53f3\u5f00\u7684\uff0c\u56e0\u6b64\u5982\u679c\u4f60\u9700\u8981\u67e5\u8be2\u7684\u662f\u4e00\u4e2a\u95ed\u533a\u95f4\uff0c\u5219\u9700\u8981\u5c06`end`\u7684\u65f6\u95f4\u5411\u524d\u504f\u79fb\u4e00\u4e2a\u7cbe\u5ea6\u3002\u901a\u8fc7\u4f20\u5165`shift_forward = True`\u53ef\u4ee5\u5b8c\u6210\u8fd9\u79cd\u8f6c\u6362\u3002 Examples: >>> # by default, the precision is seconds, and convert a date >>> Flux.format_time(datetime.date(2019, 1, 1)) '2019-01-01T00:00:00Z' >>> # set precision to ms, convert a time >>> Flux.format_time(datetime.datetime(1978, 7, 8, 12, 34, 56, 123456), precision=\"ms\") '1978-07-08T12:34:56.123Z' >>> # convert and forward shift >>> Flux.format_time(datetime.date(1978, 7, 8), shift_forward = True) '1978-07-08T00:00:01Z' Args: tm : \u5f85\u683c\u5f0f\u5316\u7684\u65f6\u95f4 precision: \u65f6\u95f4\u7cbe\u5ea6\uff0c\u53ef\u9009\u503c\u4e3a\uff1a's', 'ms', 'us' shift_forward: \u5982\u679c\u4e3aTrue\uff0c\u5219\u5c06end\u5411\u524d\u504f\u79fb\u4e00\u4e2a\u7cbe\u5ea6 Returns: \u8c03\u6574\u540e\u7b26\u5408influx\u65f6\u95f4\u89c4\u8303\u7684\u65f6\u95f4\uff08\u5b57\u7b26\u4e32\u8868\u793a\uff09 \"\"\" timespec = { \"s\" : \"seconds\" , \"ms\" : \"milliseconds\" , \"us\" : \"microseconds\" } . get ( precision ) if timespec is None : raise ValueError ( f \"precision must be one of 's', 'ms', 'us', but got { precision } \" ) tm = arrow . get ( tm ) . naive if shift_forward : tm = tm + datetime . timedelta ( ** { timespec : 1 }) return tm . isoformat ( sep = \"T\" , timespec = timespec ) + \"Z\" group ( self , by ) \u00b6 [summary] Returns: Type Description Flux [description] Source code in omicron/dal/influx/flux.py def group ( self , by : Tuple [ str ]) -> \"Flux\" : \"\"\"[summary] Returns: [description] \"\"\" if \"group\" in self . expressions : raise DuplicateOperationError ( \"group has been set\" ) if isinstance ( by , str ): by = [ by ] cols = \",\" . join ([ f '\" { col } \"' for col in by ]) self . expressions [ \"group\" ] = f \" |> group(columns: [ { cols } ])\" return self latest ( self , n ) \u00b6 \u83b7\u53d6\u6700\u540en\u6761\u6570\u636e\uff0c\u6309\u65f6\u95f4\u589e\u5e8f\u8fd4\u56de Flux\u67e5\u8be2\u7684\u589e\u5f3a\u529f\u80fd\uff0c\u76f8\u5f53\u4e8etop + sort + limit Parameters: Name Type Description Default n int \u6700\u540en\u6761\u6570\u636e required Returns: Type Description Flux Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c Source code in omicron/dal/influx/flux.py def latest ( self , n : int ) -> \"Flux\" : \"\"\"\u83b7\u53d6\u6700\u540en\u6761\u6570\u636e\uff0c\u6309\u65f6\u95f4\u589e\u5e8f\u8fd4\u56de Flux\u67e5\u8be2\u7684\u589e\u5f3a\u529f\u80fd\uff0c\u76f8\u5f53\u4e8etop + sort + limit Args: n: \u6700\u540en\u6761\u6570\u636e Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" assert \"top\" not in self . expressions , \"top and last_n can not be used together\" assert ( \"sort\" not in self . expressions ), \"sort and last_n can not be used together\" assert ( \"limit\" not in self . expressions ), \"limit and last_n can not be used together\" self . _last_n = n return self limit ( self , limit ) \u00b6 \u6dfb\u52a0\u8fd4\u56de\u8bb0\u5f55\u6570\u9650\u5236 Exceptions: Type Description DuplicateOperationError \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u6b21limit\uff0c\u5982\u679climit\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Parameters: Name Type Description Default limit int \u8fd4\u56de\u8bb0\u5f55\u6570\u9650\u5236 required Returns: Type Description Flux Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c Source code in omicron/dal/influx/flux.py def limit ( self , limit : int ) -> \"Flux\" : \"\"\"\u6dfb\u52a0\u8fd4\u56de\u8bb0\u5f55\u6570\u9650\u5236 Raises: DuplicateOperationError: \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u6b21limit\uff0c\u5982\u679climit\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Args: limit: \u8fd4\u56de\u8bb0\u5f55\u6570\u9650\u5236 Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"limit\" in self . expressions : raise DuplicateOperationError ( \"limit has been set\" ) self . expressions [ \"limit\" ] = \" |> limit(n: %d )\" % limit return self measurement ( self , measurement ) \u00b6 add measurement filter to query Exceptions: Type Description DuplicateOperationError \u4e00\u6b21\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u4e2ameasurement, \u5982\u679c\u8868\u8fbe\u5f0f\u4e2d\u5df2\u7ecf\u5b58\u5728measurement, \u5219\u629b\u51fa\u5f02\u5e38 Returns: Type Description Flux Flux\u5bf9\u8c61\u81ea\u8eab\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c Source code in omicron/dal/influx/flux.py def measurement ( self , measurement : str ) -> \"Flux\" : \"\"\"add measurement filter to query Raises: DuplicateOperationError: \u4e00\u6b21\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u4e2ameasurement, \u5982\u679c\u8868\u8fbe\u5f0f\u4e2d\u5df2\u7ecf\u5b58\u5728measurement, \u5219\u629b\u51fa\u5f02\u5e38 Returns: Flux\u5bf9\u8c61\u81ea\u8eab\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"measurement\" in self . expressions : raise DuplicateOperationError ( \"measurement has been set\" ) self . expressions [ \"measurement\" ] = f ' |> filter(fn: (r) => r[\"_measurement\"] == \" { measurement } \")' return self pivot ( self , row_keys = [ '_time' ], column_keys = [ '_field' ], value_column = '_value' ) \u00b6 pivot\u7528\u6765\u5c06\u4ee5\u5217\u4e3a\u5355\u4f4d\u7684\u6570\u636e\u8f6c\u6362\u4e3a\u4ee5\u884c\u4e3a\u5355\u4f4d\u7684\u6570\u636e Flux\u67e5\u8be2\u8fd4\u56de\u7684\u7ed3\u679c\u901a\u5e38\u90fd\u662f\u4ee5\u5217\u4e3a\u5355\u4f4d\u7684\u6570\u636e\uff0c\u589e\u52a0\u672cpivot\u6761\u4ef6\u540e\uff0c\u7ed3\u679c\u5c06\u88ab\u8f6c\u6362\u6210\u4e3a\u4ee5\u884c\u4e3a\u5355\u4f4d\u7684\u6570\u636e\u518d\u8fd4\u56de\u3002 \u8fd9\u91cc\u5b9e\u73b0\u7684\u662fmeasurement\u5185\u7684\u8f6c\u6362\uff0c\u8bf7\u53c2\u8003 pivot Parameters: Name Type Description Default row_keys List[str] \u60df\u4e00\u786e\u5b9a\u8f93\u51fa\u4e2d\u4e00\u884c\u6570\u636e\u7684\u5217\u540d\u5b57, \u9ed8\u8ba4\u4e3a[\"_time\"] ['_time'] column_keys \u5217\u540d\u79f0\u5217\u8868\uff0c\u9ed8\u8ba4\u4e3a[\"_field\"] ['_field'] value_column str \u503c\u5217\u540d\uff0c\u9ed8\u8ba4\u4e3a\"_value\" '_value' Returns: Type Description Flux Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c Source code in omicron/dal/influx/flux.py def pivot ( self , row_keys : List [ str ] = [ \"_time\" ], column_keys = [ \"_field\" ], value_column : str = \"_value\" , ) -> \"Flux\" : \"\"\"pivot\u7528\u6765\u5c06\u4ee5\u5217\u4e3a\u5355\u4f4d\u7684\u6570\u636e\u8f6c\u6362\u4e3a\u4ee5\u884c\u4e3a\u5355\u4f4d\u7684\u6570\u636e Flux\u67e5\u8be2\u8fd4\u56de\u7684\u7ed3\u679c\u901a\u5e38\u90fd\u662f\u4ee5\u5217\u4e3a\u5355\u4f4d\u7684\u6570\u636e\uff0c\u589e\u52a0\u672cpivot\u6761\u4ef6\u540e\uff0c\u7ed3\u679c\u5c06\u88ab\u8f6c\u6362\u6210\u4e3a\u4ee5\u884c\u4e3a\u5355\u4f4d\u7684\u6570\u636e\u518d\u8fd4\u56de\u3002 \u8fd9\u91cc\u5b9e\u73b0\u7684\u662fmeasurement\u5185\u7684\u8f6c\u6362\uff0c\u8bf7\u53c2\u8003 [pivot](https://docs.influxdata.com/flux/v0.x/stdlib/universe/pivot/#align-fields-within-each-measurement-that-have-the-same-timestamp) Args: row_keys: \u60df\u4e00\u786e\u5b9a\u8f93\u51fa\u4e2d\u4e00\u884c\u6570\u636e\u7684\u5217\u540d\u5b57, \u9ed8\u8ba4\u4e3a[\"_time\"] column_keys: \u5217\u540d\u79f0\u5217\u8868\uff0c\u9ed8\u8ba4\u4e3a[\"_field\"] value_column: \u503c\u5217\u540d\uff0c\u9ed8\u8ba4\u4e3a\"_value\" Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"pivot\" in self . expressions : raise DuplicateOperationError ( \"pivot has been set\" ) columns = \",\" . join ([ f '\" { name } \"' for name in column_keys ]) rowkeys = \",\" . join ([ f '\" { name } \"' for name in row_keys ]) self . expressions [ \"pivot\" ] = f ' |> pivot(columnKey: [ { columns } ], rowKey: [ { rowkeys } ], valueColumn: \" { value_column } \")' return self range ( self , start , end , right_close = True , precision = 's' ) \u00b6 \u6dfb\u52a0\u65f6\u95f4\u8303\u56f4\u8fc7\u6ee4 \u5fc5\u987b\u6307\u5b9a\u7684\u67e5\u8be2\u6761\u4ef6\uff0c\u5426\u5219influxdb\u4f1a\u62a5unbound\u67e5\u8be2\u9519\uff0c\u56e0\u4e3a\u8fd9\u79cd\u60c5\u51b5\u4e0b\uff0c\u8fd4\u56de\u7684\u6570\u636e\u91cf\u5c06\u975e\u5e38\u5927\u3002 \u5728\u683c\u5f0f\u5316\u65f6\u95f4\u65f6\uff0c\u9700\u8981\u6839\u636e precision \u751f\u6210\u65f6\u95f4\u5b57\u7b26\u4e32\u3002\u5728\u5411Influxdb\u53d1\u9001\u8bf7\u6c42\u65f6\uff0c\u5e94\u8be5\u6ce8\u610f\u67e5\u8be2\u53c2\u6570\u4e2d\u6307\u5b9a\u7684\u65f6\u95f4\u7cbe\u5ea6\u4e0e\u8fd9\u91cc\u4f7f\u7528\u7684\u4fdd\u6301\u4e00\u81f4\u3002 Influxdb\u7684\u67e5\u8be2\u7ed3\u679c\u9ed8\u8ba4\u4e0d\u5305\u542b\u7ed3\u675f\u65f6\u95f4\uff0c\u5f53 right_close \u6307\u5b9a\u4e3aTrue\u65f6\uff0c\u6211\u4eec\u5c06\u6839\u636e\u6307\u5b9a\u7684\u7cbe\u5ea6\u4fee\u6539 end \u65f6\u95f4\uff0c\u4f7f\u4e4b\u4ec5\u6bd4 end \u591a\u4e00\u4e2a\u65f6\u95f4\u5355\u4f4d\uff0c\u4ece\u800c\u4fdd\u8bc1\u67e5\u8be2\u7ed3\u679c\u4f1a\u5305\u542b end \u3002 Exceptions: Type Description DuplicateOperationError \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u6b21\u65f6\u95f4\u8303\u56f4\uff0c\u5982\u679crange\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Parameters: Name Type Description Default start Union[datetime.date, datetime.datetime] \u5f00\u59cb\u65f6\u95f4 required end Union[datetime.date, datetime.datetime] \u7ed3\u675f\u65f6\u95f4 required right_close \u67e5\u8be2\u7ed3\u679c\u662f\u5426\u5305\u542b\u7ed3\u675f\u65f6\u95f4\u3002 True precision \u65f6\u95f4\u7cbe\u5ea6\uff0c\u9ed8\u8ba4\u4e3a\u79d2\u3002 's' Returns: Type Description Flux Flux\u5bf9\u8c61\uff0c\u4ee5\u652f\u6301\u7ba1\u9053\u64cd\u4f5c Source code in omicron/dal/influx/flux.py def range ( self , start : Frame , end : Frame , right_close = True , precision = \"s\" ) -> \"Flux\" : \"\"\"\u6dfb\u52a0\u65f6\u95f4\u8303\u56f4\u8fc7\u6ee4 \u5fc5\u987b\u6307\u5b9a\u7684\u67e5\u8be2\u6761\u4ef6\uff0c\u5426\u5219influxdb\u4f1a\u62a5unbound\u67e5\u8be2\u9519\uff0c\u56e0\u4e3a\u8fd9\u79cd\u60c5\u51b5\u4e0b\uff0c\u8fd4\u56de\u7684\u6570\u636e\u91cf\u5c06\u975e\u5e38\u5927\u3002 \u5728\u683c\u5f0f\u5316\u65f6\u95f4\u65f6\uff0c\u9700\u8981\u6839\u636e`precision`\u751f\u6210\u65f6\u95f4\u5b57\u7b26\u4e32\u3002\u5728\u5411Influxdb\u53d1\u9001\u8bf7\u6c42\u65f6\uff0c\u5e94\u8be5\u6ce8\u610f\u67e5\u8be2\u53c2\u6570\u4e2d\u6307\u5b9a\u7684\u65f6\u95f4\u7cbe\u5ea6\u4e0e\u8fd9\u91cc\u4f7f\u7528\u7684\u4fdd\u6301\u4e00\u81f4\u3002 Influxdb\u7684\u67e5\u8be2\u7ed3\u679c\u9ed8\u8ba4\u4e0d\u5305\u542b\u7ed3\u675f\u65f6\u95f4\uff0c\u5f53`right_close`\u6307\u5b9a\u4e3aTrue\u65f6\uff0c\u6211\u4eec\u5c06\u6839\u636e\u6307\u5b9a\u7684\u7cbe\u5ea6\u4fee\u6539`end`\u65f6\u95f4\uff0c\u4f7f\u4e4b\u4ec5\u6bd4`end`\u591a\u4e00\u4e2a\u65f6\u95f4\u5355\u4f4d\uff0c\u4ece\u800c\u4fdd\u8bc1\u67e5\u8be2\u7ed3\u679c\u4f1a\u5305\u542b`end`\u3002 Raises: DuplicateOperationError: \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u6b21\u65f6\u95f4\u8303\u56f4\uff0c\u5982\u679crange\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Args: start: \u5f00\u59cb\u65f6\u95f4 end: \u7ed3\u675f\u65f6\u95f4 right_close: \u67e5\u8be2\u7ed3\u679c\u662f\u5426\u5305\u542b\u7ed3\u675f\u65f6\u95f4\u3002 precision: \u65f6\u95f4\u7cbe\u5ea6\uff0c\u9ed8\u8ba4\u4e3a\u79d2\u3002 Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u652f\u6301\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"range\" in self . expressions : raise DuplicateOperationError ( \"range has been set\" ) if precision not in [ \"s\" , \"ms\" , \"us\" ]: raise AssertionError ( \"precision must be 's', 'ms' or 'us'\" ) end = self . format_time ( end , precision , right_close ) start = self . format_time ( start , precision ) self . expressions [ \"range\" ] = f \" |> range(start: { start } , stop: { end } )\" return self sort ( self , by = None , desc = False ) \u00b6 \u6309\u7167\u6307\u5b9a\u7684\u5217\u8fdb\u884c\u6392\u5e8f \u6839\u636e influxdb doc , \u67e5\u8be2\u8fd4\u56de\u503c\u9ed8\u8ba4\u5730\u6309\u65f6\u95f4\u6392\u5e8f\u3002\u56e0\u6b64\uff0c\u5982\u679c\u4ec5\u4ec5\u662f\u8981\u6c42\u67e5\u8be2\u7ed3\u679c\u6309\u65f6\u95f4\u6392\u5e8f\uff0c\u65e0\u987b\u8c03\u7528\u6b64API\uff0c\u4f46\u662f\uff0c\u6b64API\u63d0\u4f9b\u4e86\u6309\u5176\u5b83\u5b57\u6bb5\u6392\u5e8f\u7684\u80fd\u529b\u3002 \u53e6\u5916\uff0c\u5728\u4e00\u4e2a\u67095000\u591a\u4e2atag\uff0c\u5171\u8fd4\u56de1M\u6761\u8bb0\u5f55\u7684\u6d4b\u8bd5\u4e2d\uff0c\u6d4b\u8bd5\u9a8c\u8bc1\u8fd4\u56de\u8bb0\u5f55\u786e\u5b9e\u6309_time\u5347\u5e8f\u6392\u5217\u3002 Parameters: Name Type Description Default by List[str] \u6307\u5b9a\u6392\u5e8f\u7684\u5217\u540d\u79f0\u5217\u8868 None Returns: Type Description Flux Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c Source code in omicron/dal/influx/flux.py def sort ( self , by : List [ str ] = None , desc : bool = False ) -> \"Flux\" : \"\"\"\u6309\u7167\u6307\u5b9a\u7684\u5217\u8fdb\u884c\u6392\u5e8f \u6839\u636e[influxdb doc](https://docs.influxdata.com/influxdb/v2.0/query-data/flux/first-last/), \u67e5\u8be2\u8fd4\u56de\u503c\u9ed8\u8ba4\u5730\u6309\u65f6\u95f4\u6392\u5e8f\u3002\u56e0\u6b64\uff0c\u5982\u679c\u4ec5\u4ec5\u662f\u8981\u6c42\u67e5\u8be2\u7ed3\u679c\u6309\u65f6\u95f4\u6392\u5e8f\uff0c\u65e0\u987b\u8c03\u7528\u6b64API\uff0c\u4f46\u662f\uff0c\u6b64API\u63d0\u4f9b\u4e86\u6309\u5176\u5b83\u5b57\u6bb5\u6392\u5e8f\u7684\u80fd\u529b\u3002 \u53e6\u5916\uff0c\u5728\u4e00\u4e2a\u67095000\u591a\u4e2atag\uff0c\u5171\u8fd4\u56de1M\u6761\u8bb0\u5f55\u7684\u6d4b\u8bd5\u4e2d\uff0c\u6d4b\u8bd5\u9a8c\u8bc1\u8fd4\u56de\u8bb0\u5f55\u786e\u5b9e\u6309_time\u5347\u5e8f\u6392\u5217\u3002 Args: by: \u6307\u5b9a\u6392\u5e8f\u7684\u5217\u540d\u79f0\u5217\u8868 Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"sort\" in self . expressions : raise DuplicateOperationError ( \"sort has been set\" ) if by is None : by = [ \"_value\" ] if isinstance ( by , str ): by = [ by ] columns_ = \",\" . join ([ f '\" { name } \"' for name in by ]) desc = \"true\" if desc else \"false\" self . expressions [ \"sort\" ] = f \" |> sort(columns: [ { columns_ } ], desc: { desc } )\" return self tags ( self , tags ) \u00b6 \u7ed9\u67e5\u8be2\u6dfb\u52a0tags\u8fc7\u6ee4\u6761\u4ef6 \u6b64\u67e5\u8be2\u6761\u4ef6\u4e3a\u8fc7\u6ee4\u6761\u4ef6\uff0c\u5e76\u975e\u5fc5\u987b\u3002\u5982\u679c\u67e5\u8be2\u4e2d\u6ca1\u6709\u6307\u5b9atags\uff0c\u5219\u4f1a\u8fd4\u56de\u6240\u6709\u8bb0\u5f55\u3002 \u5728\u5b9e\u73b0\u4e0a\uff0c\u65e2\u53ef\u4ee5\u4f7f\u7528 contains \u8bed\u6cd5\uff0c\u4e5f\u53ef\u4ee5\u4f7f\u7528 or \u8bed\u6cd5(\u7531\u4e8e\u4e00\u6761\u8bb0\u5f55\u53ea\u80fd\u5c5e\u4e8e\u4e00\u4e2atag\uff0c\u6240\u4ee5\uff0c\u5f53\u6307\u5b9a\u591a\u4e2atag\u8fdb\u884c\u67e5\u8be2\u65f6\uff0c\u5b83\u4eec\u4e4b\u95f4\u7684\u5173\u7cfb\u5e94\u8be5\u4e3a or )\u3002\u7ecf\u9a8c\u8bc1\uff0ccontains\u8bed\u6cd5\u4f1a\u59cb\u7ec8\u5148\u5c06\u6240\u6709\u7b26\u5408\u6761\u4ef6\u7684\u8bb0\u5f55\u68c0\u7d22\u51fa\u6765\uff0c\u518d\u8fdb\u884c\u8fc7\u6ee4\u3002\u8fd9\u6837\u7684\u6548\u7387\u6bd4\u8f83\u4f4e\uff0c\u7279\u522b\u662f\u5f53tags\u7684\u6570\u91cf\u8f83\u5c11\u65f6\uff0c\u4f1a\u8fdc\u8fdc\u6bd4\u4f7f\u7528or\u8bed\u6cd5\u6162\u3002 Exceptions: Type Description DuplicateOperationError \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6267\u884c\u4e00\u6b21\uff0c\u5982\u679ctag filter\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Parameters: Name Type Description Default tags tags\u662f\u4e00\u4e2a{tagname: Union[str,[tag_values]]}\u5bf9\u8c61\u3002 required Examples: >>> flux = Flux () >>> flux . tags ({ \"code\" : [ \"000001\" , \"000002\" ], \"name\" : [ \"\u6d66\u53d1\u94f6\u884c\" ]}) . expressions [ \"tags\" ] ' |> filter(fn: (r) => r[\"code\"] == \"000001\" or r[\"code\"] == \"000002\" or r[\"name\"] == \"\u6d66\u53d1\u94f6\u884c\")' Returns: Type Description Flux Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c Source code in omicron/dal/influx/flux.py def tags ( self , tags : DefaultDict [ str , List [ str ]]) -> \"Flux\" : \"\"\"\u7ed9\u67e5\u8be2\u6dfb\u52a0tags\u8fc7\u6ee4\u6761\u4ef6 \u6b64\u67e5\u8be2\u6761\u4ef6\u4e3a\u8fc7\u6ee4\u6761\u4ef6\uff0c\u5e76\u975e\u5fc5\u987b\u3002\u5982\u679c\u67e5\u8be2\u4e2d\u6ca1\u6709\u6307\u5b9atags\uff0c\u5219\u4f1a\u8fd4\u56de\u6240\u6709\u8bb0\u5f55\u3002 \u5728\u5b9e\u73b0\u4e0a\uff0c\u65e2\u53ef\u4ee5\u4f7f\u7528`contains`\u8bed\u6cd5\uff0c\u4e5f\u53ef\u4ee5\u4f7f\u7528`or`\u8bed\u6cd5(\u7531\u4e8e\u4e00\u6761\u8bb0\u5f55\u53ea\u80fd\u5c5e\u4e8e\u4e00\u4e2atag\uff0c\u6240\u4ee5\uff0c\u5f53\u6307\u5b9a\u591a\u4e2atag\u8fdb\u884c\u67e5\u8be2\u65f6\uff0c\u5b83\u4eec\u4e4b\u95f4\u7684\u5173\u7cfb\u5e94\u8be5\u4e3a`or`)\u3002\u7ecf\u9a8c\u8bc1\uff0ccontains\u8bed\u6cd5\u4f1a\u59cb\u7ec8\u5148\u5c06\u6240\u6709\u7b26\u5408\u6761\u4ef6\u7684\u8bb0\u5f55\u68c0\u7d22\u51fa\u6765\uff0c\u518d\u8fdb\u884c\u8fc7\u6ee4\u3002\u8fd9\u6837\u7684\u6548\u7387\u6bd4\u8f83\u4f4e\uff0c\u7279\u522b\u662f\u5f53tags\u7684\u6570\u91cf\u8f83\u5c11\u65f6\uff0c\u4f1a\u8fdc\u8fdc\u6bd4\u4f7f\u7528or\u8bed\u6cd5\u6162\u3002 Raises: DuplicateOperationError: \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6267\u884c\u4e00\u6b21\uff0c\u5982\u679ctag filter\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Args: tags : tags\u662f\u4e00\u4e2a{tagname: Union[str,[tag_values]]}\u5bf9\u8c61\u3002 Examples: >>> flux = Flux() >>> flux.tags({\"code\": [\"000001\", \"000002\"], \"name\": [\"\u6d66\u53d1\u94f6\u884c\"]}).expressions[\"tags\"] ' |> filter(fn: (r) => r[\"code\"] == \"000001\" or r[\"code\"] == \"000002\" or r[\"name\"] == \"\u6d66\u53d1\u94f6\u884c\")' Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"tags\" in self . expressions : raise DuplicateOperationError ( \"tags has been set\" ) filters = [] for tag , values in tags . items (): assert ( isinstance ( values , str ) or len ( values ) > 0 ), f \"tag { tag } should not be empty or None\" if isinstance ( values , str ): values = [ values ] for v in values : filters . append ( f 'r[\" { tag } \"] == \" { v } \"' ) op_expression = \" or \" . join ( filters ) self . expressions [ \"tags\" ] = f \" |> filter(fn: (r) => { op_expression } )\" return self to_timestamp ( tm , precision = 's' ) classmethod \u00b6 \u5c06\u65f6\u95f4\u6839\u636e\u7cbe\u5ea6\u8f6c\u6362\u4e3aunix\u65f6\u95f4\u6233 \u5728\u5f80influxdb\u5199\u5165\u6570\u636e\u65f6\uff0cline-protocol\u8981\u6c42\u7684\u65f6\u95f4\u6233\u4e3aunix timestamp\uff0c\u5e76\u4e14\u4e0e\u5176\u7cbe\u5ea6\u5bf9\u5e94\u3002 influxdb\u59cb\u7ec8\u4f7f\u7528UTC\u65f6\u95f4\uff0c\u56e0\u6b64\uff0c tm \u4e5f\u5fc5\u987b\u5df2\u7ecf\u8f6c\u6362\u6210UTC\u65f6\u95f4\u3002 Parameters: Name Type Description Default tm Union[datetime.date, datetime.datetime] \u65f6\u95f4 required precision str \u65f6\u95f4\u7cbe\u5ea6\uff0c\u9ed8\u8ba4\u4e3a\u79d2\u3002 's' Returns: Type Description int \u65f6\u95f4\u6233 Source code in omicron/dal/influx/flux.py @classmethod def to_timestamp ( cls , tm : Frame , precision : str = \"s\" ) -> int : \"\"\"\u5c06\u65f6\u95f4\u6839\u636e\u7cbe\u5ea6\u8f6c\u6362\u4e3aunix\u65f6\u95f4\u6233 \u5728\u5f80influxdb\u5199\u5165\u6570\u636e\u65f6\uff0cline-protocol\u8981\u6c42\u7684\u65f6\u95f4\u6233\u4e3aunix timestamp\uff0c\u5e76\u4e14\u4e0e\u5176\u7cbe\u5ea6\u5bf9\u5e94\u3002 influxdb\u59cb\u7ec8\u4f7f\u7528UTC\u65f6\u95f4\uff0c\u56e0\u6b64\uff0c`tm`\u4e5f\u5fc5\u987b\u5df2\u7ecf\u8f6c\u6362\u6210UTC\u65f6\u95f4\u3002 Args: tm: \u65f6\u95f4 precision: \u65f6\u95f4\u7cbe\u5ea6\uff0c\u9ed8\u8ba4\u4e3a\u79d2\u3002 Returns: \u65f6\u95f4\u6233 \"\"\" if precision not in [ \"s\" , \"ms\" , \"us\" ]: raise AssertionError ( \"precision must be 's', 'ms' or 'us'\" ) # get int repr of tm, in seconds unit if isinstance ( tm , np . datetime64 ): tm = tm . astype ( \"datetime64[s]\" ) . astype ( \"int\" ) elif isinstance ( tm , datetime . datetime ): tm = tm . timestamp () else : tm = arrow . get ( tm ) . timestamp () return int ( tm * 10 ** ({ \"s\" : 0 , \"ms\" : 3 , \"us\" : 6 }[ precision ]))","title":"Flux"},{"location":"api/dal/flux/#flux---the-query-language-builder-for-influxdb","text":"Helper functions for building flux query expression Source code in omicron/dal/influx/flux.py class Flux ( object ): \"\"\"Helper functions for building flux query expression\"\"\" EPOCH_START = datetime . datetime ( 1970 , 1 , 1 , 0 , 0 , 0 ) def __init__ ( self , auto_pivot = True , no_sys_cols = True ): \"\"\"\u521d\u59cb\u5316Flux\u5bf9\u8c61 Args: auto_pivot : \u662f\u5426\u81ea\u52a8\u5c06\u67e5\u8be2\u5217\u5b57\u6bb5\u7ec4\u88c5\u6210\u884c. Defaults to True. no_sys_cols: \u662f\u5426\u81ea\u52a8\u5c06\u7cfb\u7edf\u5b57\u6bb5\u5220\u9664. Defaults to True.\u8bf7\u53c2\u8003[drop_sys_cols][omicron.dal.influx.flux.Flux.drop_sys_cols] \"\"\" self . _cols = None self . expressions = defaultdict ( list ) self . _auto_pivot = auto_pivot self . _last_n = None self . no_sys_cols = no_sys_cols def __str__ ( self ): return self . _compose () def __repr__ ( self ) -> str : return f \"< { self . __class__ . __name__ } >: \\n { self . _compose () } \" def _compose ( self ): \"\"\"\u5c06\u6240\u6709\u8868\u8fbe\u5f0f\u5408\u5e76\u4e3a\u4e00\u4e2a\u8868\u8fbe\u5f0f\"\"\" if not all ( [ \"bucket\" in self . expressions , \"measurement\" in self . expressions , \"range\" in self . expressions , ] ): raise AssertionError ( \"bucket, measurement and range must be set\" ) expr = [ self . expressions [ k ] for k in ( \"bucket\" , \"range\" , \"measurement\" )] if self . expressions . get ( \"tags\" ): expr . append ( self . expressions [ \"tags\" ]) if self . expressions . get ( \"fields\" ): expr . append ( self . expressions [ \"fields\" ]) if \"drop\" not in self . expressions and self . no_sys_cols : self . drop_sys_cols () if self . expressions . get ( \"drop\" ): expr . append ( self . expressions [ \"drop\" ]) if self . _auto_pivot and \"pivot\" not in self . expressions : self . pivot () if self . expressions . get ( \"pivot\" ): expr . append ( self . expressions [ \"pivot\" ]) if self . expressions . get ( \"group\" ): expr . append ( self . expressions [ \"group\" ]) if self . expressions . get ( \"sort\" ): expr . append ( self . expressions [ \"sort\" ]) if self . expressions . get ( \"limit\" ): expr . append ( self . expressions [ \"limit\" ]) # influxdb\u9ed8\u8ba4\u6309\u5347\u5e8f\u6392\u5217\uff0c\u4f46last_n\u67e5\u8be2\u7684\u7ed3\u679c\u5219\u5fc5\u7136\u662f\u964d\u5e8f\u7684\uff0c\u6240\u4ee5\u8fd8\u9700\u8981\u518d\u6b21\u6392\u5e8f if self . _last_n : expr . append ( \" \\n \" . join ( [ f ' |> top(n: { self . _last_n } , columns: [\"_time\"])' , ' |> sort(columns: [\"_time\"], desc: false)' , ] ) ) return \" \\n \" . join ( expr ) def bucket ( self , bucket : str ) -> \"Flux\" : \"\"\"add bucket to query expression Raises: DuplicateOperationError: \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u4e2asource\uff0c\u5982\u679c\u8868\u8fbe\u5f0f\u4e2d\u5df2\u7ecf\u6307\u5b9a\u4e86bucket\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Returns: Flux\u5bf9\u8c61 \"\"\" if \"bucket\" in self . expressions : raise DuplicateOperationError ( \"bucket has been set\" ) self . expressions [ \"bucket\" ] = f 'from(bucket: \" { bucket } \")' return self def measurement ( self , measurement : str ) -> \"Flux\" : \"\"\"add measurement filter to query Raises: DuplicateOperationError: \u4e00\u6b21\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u4e2ameasurement, \u5982\u679c\u8868\u8fbe\u5f0f\u4e2d\u5df2\u7ecf\u5b58\u5728measurement, \u5219\u629b\u51fa\u5f02\u5e38 Returns: Flux\u5bf9\u8c61\u81ea\u8eab\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"measurement\" in self . expressions : raise DuplicateOperationError ( \"measurement has been set\" ) self . expressions [ \"measurement\" ] = f ' |> filter(fn: (r) => r[\"_measurement\"] == \" { measurement } \")' return self def range ( self , start : Frame , end : Frame , right_close = True , precision = \"s\" ) -> \"Flux\" : \"\"\"\u6dfb\u52a0\u65f6\u95f4\u8303\u56f4\u8fc7\u6ee4 \u5fc5\u987b\u6307\u5b9a\u7684\u67e5\u8be2\u6761\u4ef6\uff0c\u5426\u5219influxdb\u4f1a\u62a5unbound\u67e5\u8be2\u9519\uff0c\u56e0\u4e3a\u8fd9\u79cd\u60c5\u51b5\u4e0b\uff0c\u8fd4\u56de\u7684\u6570\u636e\u91cf\u5c06\u975e\u5e38\u5927\u3002 \u5728\u683c\u5f0f\u5316\u65f6\u95f4\u65f6\uff0c\u9700\u8981\u6839\u636e`precision`\u751f\u6210\u65f6\u95f4\u5b57\u7b26\u4e32\u3002\u5728\u5411Influxdb\u53d1\u9001\u8bf7\u6c42\u65f6\uff0c\u5e94\u8be5\u6ce8\u610f\u67e5\u8be2\u53c2\u6570\u4e2d\u6307\u5b9a\u7684\u65f6\u95f4\u7cbe\u5ea6\u4e0e\u8fd9\u91cc\u4f7f\u7528\u7684\u4fdd\u6301\u4e00\u81f4\u3002 Influxdb\u7684\u67e5\u8be2\u7ed3\u679c\u9ed8\u8ba4\u4e0d\u5305\u542b\u7ed3\u675f\u65f6\u95f4\uff0c\u5f53`right_close`\u6307\u5b9a\u4e3aTrue\u65f6\uff0c\u6211\u4eec\u5c06\u6839\u636e\u6307\u5b9a\u7684\u7cbe\u5ea6\u4fee\u6539`end`\u65f6\u95f4\uff0c\u4f7f\u4e4b\u4ec5\u6bd4`end`\u591a\u4e00\u4e2a\u65f6\u95f4\u5355\u4f4d\uff0c\u4ece\u800c\u4fdd\u8bc1\u67e5\u8be2\u7ed3\u679c\u4f1a\u5305\u542b`end`\u3002 Raises: DuplicateOperationError: \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u6b21\u65f6\u95f4\u8303\u56f4\uff0c\u5982\u679crange\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Args: start: \u5f00\u59cb\u65f6\u95f4 end: \u7ed3\u675f\u65f6\u95f4 right_close: \u67e5\u8be2\u7ed3\u679c\u662f\u5426\u5305\u542b\u7ed3\u675f\u65f6\u95f4\u3002 precision: \u65f6\u95f4\u7cbe\u5ea6\uff0c\u9ed8\u8ba4\u4e3a\u79d2\u3002 Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u652f\u6301\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"range\" in self . expressions : raise DuplicateOperationError ( \"range has been set\" ) if precision not in [ \"s\" , \"ms\" , \"us\" ]: raise AssertionError ( \"precision must be 's', 'ms' or 'us'\" ) end = self . format_time ( end , precision , right_close ) start = self . format_time ( start , precision ) self . expressions [ \"range\" ] = f \" |> range(start: { start } , stop: { end } )\" return self def limit ( self , limit : int ) -> \"Flux\" : \"\"\"\u6dfb\u52a0\u8fd4\u56de\u8bb0\u5f55\u6570\u9650\u5236 Raises: DuplicateOperationError: \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u6b21limit\uff0c\u5982\u679climit\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Args: limit: \u8fd4\u56de\u8bb0\u5f55\u6570\u9650\u5236 Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"limit\" in self . expressions : raise DuplicateOperationError ( \"limit has been set\" ) self . expressions [ \"limit\" ] = \" |> limit(n: %d )\" % limit return self @classmethod def to_timestamp ( cls , tm : Frame , precision : str = \"s\" ) -> int : \"\"\"\u5c06\u65f6\u95f4\u6839\u636e\u7cbe\u5ea6\u8f6c\u6362\u4e3aunix\u65f6\u95f4\u6233 \u5728\u5f80influxdb\u5199\u5165\u6570\u636e\u65f6\uff0cline-protocol\u8981\u6c42\u7684\u65f6\u95f4\u6233\u4e3aunix timestamp\uff0c\u5e76\u4e14\u4e0e\u5176\u7cbe\u5ea6\u5bf9\u5e94\u3002 influxdb\u59cb\u7ec8\u4f7f\u7528UTC\u65f6\u95f4\uff0c\u56e0\u6b64\uff0c`tm`\u4e5f\u5fc5\u987b\u5df2\u7ecf\u8f6c\u6362\u6210UTC\u65f6\u95f4\u3002 Args: tm: \u65f6\u95f4 precision: \u65f6\u95f4\u7cbe\u5ea6\uff0c\u9ed8\u8ba4\u4e3a\u79d2\u3002 Returns: \u65f6\u95f4\u6233 \"\"\" if precision not in [ \"s\" , \"ms\" , \"us\" ]: raise AssertionError ( \"precision must be 's', 'ms' or 'us'\" ) # get int repr of tm, in seconds unit if isinstance ( tm , np . datetime64 ): tm = tm . astype ( \"datetime64[s]\" ) . astype ( \"int\" ) elif isinstance ( tm , datetime . datetime ): tm = tm . timestamp () else : tm = arrow . get ( tm ) . timestamp () return int ( tm * 10 ** ({ \"s\" : 0 , \"ms\" : 3 , \"us\" : 6 }[ precision ])) @classmethod def format_time ( cls , tm : Frame , precision : str = \"s\" , shift_forward = False ) -> str : \"\"\"\u5c06\u65f6\u95f4\u8f6c\u6362\u6210\u5ba2\u6237\u7aef\u5bf9\u5e94\u7684\u7cbe\u5ea6\uff0c\u5e76\u4ee5 RFC3339 timestamps\u683c\u5f0f\u4e32\uff08\u5373influxdb\u8981\u6c42\u7684\u683c\u5f0f\uff09\u8fd4\u56de\u3002 \u5982\u679c\u8fd9\u4e2a\u65f6\u95f4\u662f\u4f5c\u4e3a\u67e5\u8be2\u7684range\u4e2d\u7684\u7ed3\u675f\u65f6\u95f4\u4f7f\u7528\u65f6\uff0c\u7531\u4e8einflux\u67e5\u8be2\u7684\u65f6\u95f4\u8303\u56f4\u662f\u5de6\u95ed\u53f3\u5f00\u7684\uff0c\u56e0\u6b64\u5982\u679c\u4f60\u9700\u8981\u67e5\u8be2\u7684\u662f\u4e00\u4e2a\u95ed\u533a\u95f4\uff0c\u5219\u9700\u8981\u5c06`end`\u7684\u65f6\u95f4\u5411\u524d\u504f\u79fb\u4e00\u4e2a\u7cbe\u5ea6\u3002\u901a\u8fc7\u4f20\u5165`shift_forward = True`\u53ef\u4ee5\u5b8c\u6210\u8fd9\u79cd\u8f6c\u6362\u3002 Examples: >>> # by default, the precision is seconds, and convert a date >>> Flux.format_time(datetime.date(2019, 1, 1)) '2019-01-01T00:00:00Z' >>> # set precision to ms, convert a time >>> Flux.format_time(datetime.datetime(1978, 7, 8, 12, 34, 56, 123456), precision=\"ms\") '1978-07-08T12:34:56.123Z' >>> # convert and forward shift >>> Flux.format_time(datetime.date(1978, 7, 8), shift_forward = True) '1978-07-08T00:00:01Z' Args: tm : \u5f85\u683c\u5f0f\u5316\u7684\u65f6\u95f4 precision: \u65f6\u95f4\u7cbe\u5ea6\uff0c\u53ef\u9009\u503c\u4e3a\uff1a's', 'ms', 'us' shift_forward: \u5982\u679c\u4e3aTrue\uff0c\u5219\u5c06end\u5411\u524d\u504f\u79fb\u4e00\u4e2a\u7cbe\u5ea6 Returns: \u8c03\u6574\u540e\u7b26\u5408influx\u65f6\u95f4\u89c4\u8303\u7684\u65f6\u95f4\uff08\u5b57\u7b26\u4e32\u8868\u793a\uff09 \"\"\" timespec = { \"s\" : \"seconds\" , \"ms\" : \"milliseconds\" , \"us\" : \"microseconds\" } . get ( precision ) if timespec is None : raise ValueError ( f \"precision must be one of 's', 'ms', 'us', but got { precision } \" ) tm = arrow . get ( tm ) . naive if shift_forward : tm = tm + datetime . timedelta ( ** { timespec : 1 }) return tm . isoformat ( sep = \"T\" , timespec = timespec ) + \"Z\" def tags ( self , tags : DefaultDict [ str , List [ str ]]) -> \"Flux\" : \"\"\"\u7ed9\u67e5\u8be2\u6dfb\u52a0tags\u8fc7\u6ee4\u6761\u4ef6 \u6b64\u67e5\u8be2\u6761\u4ef6\u4e3a\u8fc7\u6ee4\u6761\u4ef6\uff0c\u5e76\u975e\u5fc5\u987b\u3002\u5982\u679c\u67e5\u8be2\u4e2d\u6ca1\u6709\u6307\u5b9atags\uff0c\u5219\u4f1a\u8fd4\u56de\u6240\u6709\u8bb0\u5f55\u3002 \u5728\u5b9e\u73b0\u4e0a\uff0c\u65e2\u53ef\u4ee5\u4f7f\u7528`contains`\u8bed\u6cd5\uff0c\u4e5f\u53ef\u4ee5\u4f7f\u7528`or`\u8bed\u6cd5(\u7531\u4e8e\u4e00\u6761\u8bb0\u5f55\u53ea\u80fd\u5c5e\u4e8e\u4e00\u4e2atag\uff0c\u6240\u4ee5\uff0c\u5f53\u6307\u5b9a\u591a\u4e2atag\u8fdb\u884c\u67e5\u8be2\u65f6\uff0c\u5b83\u4eec\u4e4b\u95f4\u7684\u5173\u7cfb\u5e94\u8be5\u4e3a`or`)\u3002\u7ecf\u9a8c\u8bc1\uff0ccontains\u8bed\u6cd5\u4f1a\u59cb\u7ec8\u5148\u5c06\u6240\u6709\u7b26\u5408\u6761\u4ef6\u7684\u8bb0\u5f55\u68c0\u7d22\u51fa\u6765\uff0c\u518d\u8fdb\u884c\u8fc7\u6ee4\u3002\u8fd9\u6837\u7684\u6548\u7387\u6bd4\u8f83\u4f4e\uff0c\u7279\u522b\u662f\u5f53tags\u7684\u6570\u91cf\u8f83\u5c11\u65f6\uff0c\u4f1a\u8fdc\u8fdc\u6bd4\u4f7f\u7528or\u8bed\u6cd5\u6162\u3002 Raises: DuplicateOperationError: \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6267\u884c\u4e00\u6b21\uff0c\u5982\u679ctag filter\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Args: tags : tags\u662f\u4e00\u4e2a{tagname: Union[str,[tag_values]]}\u5bf9\u8c61\u3002 Examples: >>> flux = Flux() >>> flux.tags({\"code\": [\"000001\", \"000002\"], \"name\": [\"\u6d66\u53d1\u94f6\u884c\"]}).expressions[\"tags\"] ' |> filter(fn: (r) => r[\"code\"] == \"000001\" or r[\"code\"] == \"000002\" or r[\"name\"] == \"\u6d66\u53d1\u94f6\u884c\")' Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"tags\" in self . expressions : raise DuplicateOperationError ( \"tags has been set\" ) filters = [] for tag , values in tags . items (): assert ( isinstance ( values , str ) or len ( values ) > 0 ), f \"tag { tag } should not be empty or None\" if isinstance ( values , str ): values = [ values ] for v in values : filters . append ( f 'r[\" { tag } \"] == \" { v } \"' ) op_expression = \" or \" . join ( filters ) self . expressions [ \"tags\" ] = f \" |> filter(fn: (r) => { op_expression } )\" return self def fields ( self , fields : List , reserve_time_stamp : bool = True ) -> \"Flux\" : \"\"\"\u7ed9\u67e5\u8be2\u6dfb\u52a0field\u8fc7\u6ee4\u6761\u4ef6 \u6b64\u67e5\u8be2\u6761\u4ef6\u4e3a\u8fc7\u6ee4\u6761\u4ef6\uff0c\u7528\u4ee5\u6307\u5b9a\u54ea\u4e9bfield\u4f1a\u51fa\u73b0\u5728\u67e5\u8be2\u7ed3\u679c\u4e2d\uff0c\u5e76\u975e\u5fc5\u987b\u3002\u5982\u679c\u67e5\u8be2\u4e2d\u6ca1\u6709\u6307\u5b9atags\uff0c\u5219\u4f1a\u8fd4\u56de\u6240\u6709\u8bb0\u5f55\u3002 \u7531\u4e8e\u4e00\u6761\u8bb0\u5f55\u53ea\u80fd\u5c5e\u4e8e\u4e00\u4e2a_field\uff0c\u6240\u4ee5\uff0c\u5f53\u6307\u5b9a\u591a\u4e2a_field\u8fdb\u884c\u67e5\u8be2\u65f6\uff0c\u5b83\u4eec\u4e4b\u95f4\u7684\u5173\u7cfb\u5e94\u8be5\u4e3a`or`\u3002 Raises: DuplicateOperationError: \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6267\u884c\u4e00\u6b21\uff0c\u5982\u679cfiled filter\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Args: fields: \u5f85\u67e5\u8be2\u7684field\u5217\u8868 reserve_time_stamp: \u662f\u5426\u4fdd\u7559\u65f6\u95f4\u6233`_time`\uff0c\u9ed8\u8ba4\u4e3aTrue Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"fields\" in self . expressions : raise DuplicateOperationError ( \"fields has been set\" ) self . _cols = fields . copy () if reserve_time_stamp and \"_time\" not in self . _cols : self . _cols . append ( \"_time\" ) self . _cols = sorted ( self . _cols ) filters = [ f 'r[\"_field\"] == \" { name } \"' for name in self . _cols ] self . expressions [ \"fields\" ] = f \" |> filter(fn: (r) => { ' or ' . join ( filters ) } )\" return self def pivot ( self , row_keys : List [ str ] = [ \"_time\" ], column_keys = [ \"_field\" ], value_column : str = \"_value\" , ) -> \"Flux\" : \"\"\"pivot\u7528\u6765\u5c06\u4ee5\u5217\u4e3a\u5355\u4f4d\u7684\u6570\u636e\u8f6c\u6362\u4e3a\u4ee5\u884c\u4e3a\u5355\u4f4d\u7684\u6570\u636e Flux\u67e5\u8be2\u8fd4\u56de\u7684\u7ed3\u679c\u901a\u5e38\u90fd\u662f\u4ee5\u5217\u4e3a\u5355\u4f4d\u7684\u6570\u636e\uff0c\u589e\u52a0\u672cpivot\u6761\u4ef6\u540e\uff0c\u7ed3\u679c\u5c06\u88ab\u8f6c\u6362\u6210\u4e3a\u4ee5\u884c\u4e3a\u5355\u4f4d\u7684\u6570\u636e\u518d\u8fd4\u56de\u3002 \u8fd9\u91cc\u5b9e\u73b0\u7684\u662fmeasurement\u5185\u7684\u8f6c\u6362\uff0c\u8bf7\u53c2\u8003 [pivot](https://docs.influxdata.com/flux/v0.x/stdlib/universe/pivot/#align-fields-within-each-measurement-that-have-the-same-timestamp) Args: row_keys: \u60df\u4e00\u786e\u5b9a\u8f93\u51fa\u4e2d\u4e00\u884c\u6570\u636e\u7684\u5217\u540d\u5b57, \u9ed8\u8ba4\u4e3a[\"_time\"] column_keys: \u5217\u540d\u79f0\u5217\u8868\uff0c\u9ed8\u8ba4\u4e3a[\"_field\"] value_column: \u503c\u5217\u540d\uff0c\u9ed8\u8ba4\u4e3a\"_value\" Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"pivot\" in self . expressions : raise DuplicateOperationError ( \"pivot has been set\" ) columns = \",\" . join ([ f '\" { name } \"' for name in column_keys ]) rowkeys = \",\" . join ([ f '\" { name } \"' for name in row_keys ]) self . expressions [ \"pivot\" ] = f ' |> pivot(columnKey: [ { columns } ], rowKey: [ { rowkeys } ], valueColumn: \" { value_column } \")' return self def sort ( self , by : List [ str ] = None , desc : bool = False ) -> \"Flux\" : \"\"\"\u6309\u7167\u6307\u5b9a\u7684\u5217\u8fdb\u884c\u6392\u5e8f \u6839\u636e[influxdb doc](https://docs.influxdata.com/influxdb/v2.0/query-data/flux/first-last/), \u67e5\u8be2\u8fd4\u56de\u503c\u9ed8\u8ba4\u5730\u6309\u65f6\u95f4\u6392\u5e8f\u3002\u56e0\u6b64\uff0c\u5982\u679c\u4ec5\u4ec5\u662f\u8981\u6c42\u67e5\u8be2\u7ed3\u679c\u6309\u65f6\u95f4\u6392\u5e8f\uff0c\u65e0\u987b\u8c03\u7528\u6b64API\uff0c\u4f46\u662f\uff0c\u6b64API\u63d0\u4f9b\u4e86\u6309\u5176\u5b83\u5b57\u6bb5\u6392\u5e8f\u7684\u80fd\u529b\u3002 \u53e6\u5916\uff0c\u5728\u4e00\u4e2a\u67095000\u591a\u4e2atag\uff0c\u5171\u8fd4\u56de1M\u6761\u8bb0\u5f55\u7684\u6d4b\u8bd5\u4e2d\uff0c\u6d4b\u8bd5\u9a8c\u8bc1\u8fd4\u56de\u8bb0\u5f55\u786e\u5b9e\u6309_time\u5347\u5e8f\u6392\u5217\u3002 Args: by: \u6307\u5b9a\u6392\u5e8f\u7684\u5217\u540d\u79f0\u5217\u8868 Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"sort\" in self . expressions : raise DuplicateOperationError ( \"sort has been set\" ) if by is None : by = [ \"_value\" ] if isinstance ( by , str ): by = [ by ] columns_ = \",\" . join ([ f '\" { name } \"' for name in by ]) desc = \"true\" if desc else \"false\" self . expressions [ \"sort\" ] = f \" |> sort(columns: [ { columns_ } ], desc: { desc } )\" return self def group ( self , by : Tuple [ str ]) -> \"Flux\" : \"\"\"[summary] Returns: [description] \"\"\" if \"group\" in self . expressions : raise DuplicateOperationError ( \"group has been set\" ) if isinstance ( by , str ): by = [ by ] cols = \",\" . join ([ f '\" { col } \"' for col in by ]) self . expressions [ \"group\" ] = f \" |> group(columns: [ { cols } ])\" return self def latest ( self , n : int ) -> \"Flux\" : \"\"\"\u83b7\u53d6\u6700\u540en\u6761\u6570\u636e\uff0c\u6309\u65f6\u95f4\u589e\u5e8f\u8fd4\u56de Flux\u67e5\u8be2\u7684\u589e\u5f3a\u529f\u80fd\uff0c\u76f8\u5f53\u4e8etop + sort + limit Args: n: \u6700\u540en\u6761\u6570\u636e Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" assert \"top\" not in self . expressions , \"top and last_n can not be used together\" assert ( \"sort\" not in self . expressions ), \"sort and last_n can not be used together\" assert ( \"limit\" not in self . expressions ), \"limit and last_n can not be used together\" self . _last_n = n return self @property def cols ( self ) -> List [ str ]: \"\"\"the columns or the return records the implementation is buggy. Influx doesn't tell us in which order these columns are. Returns: the columns name of the return records \"\"\" # fixme: if keep in expression, then return group key + tag key + value key # if keep not in expression, then stream, table, _time, ... return sorted ( self . _cols ) def delete ( self , measurement : str , stop : datetime . datetime , tags : dict = {}, start : datetime . datetime = None , precision : str = \"s\" , ) -> dict : \"\"\"\u6784\u5efa\u5220\u9664\u8bed\u53e5\u3002 according to [delete-predicate](https://docs.influxdata.com/influxdb/v2.1/reference/syntax/delete-predicate/), delete\u53ea\u652f\u6301AND\u903b\u8f91\u64cd\u4f5c\uff0c\u53ea\u652f\u6301\u201c=\u201d\u64cd\u4f5c\uff0c\u4e0d\u652f\u6301\u201c\uff01=\u201d\u64cd\u4f5c\uff0c\u53ef\u4ee5\u4f7f\u7528\u4efb\u4f55\u5b57\u6bb5\u6216\u8005tag\uff0c\u4f46\u4e0d\u5305\u62ec_time\u548c_value\u5b57\u6bb5\u3002 \u7531\u4e8einfluxdb\u8fd9\u4e00\u6bb5\u6587\u6863\u4e0d\u662f\u5f88\u6e05\u695a\uff0c\u6839\u636e\u8bd5\u9a8c\u7ed3\u679c\uff0c\u76ee\u524d\u4ec5\u652f\u6301\u6309\u65f6\u95f4\u8303\u56f4\u548ctags\u8fdb\u884c\u5220\u9664\u8f83\u597d\u3002\u5982\u679c\u67d0\u4e2acolumn\u7684\u503c\u7c7b\u578b\u662f\u5b57\u7b26\u4e32\uff0c\u5219\u4e5f\u53ef\u4ee5\u901a\u8fc7`tags`\u53c2\u6570\u4f20\u5165\uff0c\u5339\u914d\u540e\u5220\u9664\u3002\u4f46\u5982\u679c\u4f20\u5165\u4e86\u975e\u5b57\u7b26\u4e32\u7c7b\u578b\u7684column\uff0c\u5219\u5c06\u5f97\u5230\u65e0\u6cd5\u9884\u6599\u7684\u7ed3\u679c\u3002 Args: measurement : [description] stop : [description] tags : \u6309tags\u548c\u5339\u914d\u7684\u503c\u8fdb\u884c\u5220\u9664\u3002\u4f20\u5165\u7684tags\u4e2d\uff0ckey\u4e3atag\u540d\u79f0\uff0cvalue\u4e3atag\u8981\u5339\u914d\u7684\u53d6\u503c\uff0c\u53ef\u4ee5\u4e3astr\u6216\u8005List[str]\u3002 start : \u8d77\u59cb\u65f6\u95f4\u3002\u5982\u679c\u7701\u7565\uff0c\u5219\u4f7f\u7528EPOCH_START. precision : \u65f6\u95f4\u7cbe\u5ea6\u3002\u53ef\u4ee5\u4e3a\u201cs\u201d\uff0c\u201cms\u201d\uff0c\u201cus\u201d Returns: \u5220\u9664\u8bed\u53e5 \"\"\" timespec = { \"s\" : \"seconds\" , \"ms\" : \"milliseconds\" , \"us\" : \"microseconds\" } . get ( precision ) if start is None : start = self . EPOCH_START . isoformat ( timespec = timespec ) + \"Z\" predicate = [ f '_measurement=\" { measurement } \"' ] for key , value in tags . items (): if isinstance ( value , list ): predicate . extend ([ f ' { key } = \" { v } \"' for v in value ]) else : predicate . append ( f ' { key } = \" { value } \"' ) command = { \"start\" : start , \"stop\" : f \" { stop . isoformat ( timespec = timespec ) } Z\" , \"predicate\" : \" AND \" . join ( predicate ), } return command def drop ( self , cols : List [ str ]) -> \"Flux\" : \"\"\"use this to drop columns before return result Args: cols : the name of columns to be dropped Returns: Flux object, to support pipe operation \"\"\" if \"drop\" in self . expressions : raise DuplicateOperationError ( \"drop operation has been set already\" ) # add surrounding quotes _cols = [ f '\" { c } \"' for c in cols ] self . expressions [ \"drop\" ] = f \" |> drop(columns: [ { ',' . join ( _cols ) } ])\" return self def drop_sys_cols ( self , cols : List [ str ] = None ) -> \"Flux\" : \"\"\"use this to drop [\"_start\", \"_stop\", \"_measurement\"], plus columns specified in `cols`, before return query result please be noticed, after drop sys columns, there's still two sys columns left, which is \"_time\" and \"table\", and \"_time\" should usually be kept, \"table\" is one we're not able to removed. If you don't like _time in return result, you can specify it in `cols` parameter. Args: cols : the extra columns to be dropped Returns: Flux query object \"\"\" _cols = [ \"_start\" , \"_stop\" , \"_measurement\" ] if cols is not None : _cols . extend ( cols ) return self . drop ( _cols )","title":"Flux - the query language builder for influxdb"},{"location":"api/dal/flux/#omicron.dal.influx.flux.Flux.cols","text":"the columns or the return records the implementation is buggy. Influx doesn't tell us in which order these columns are. Returns: Type Description List[str] the columns name of the return records","title":"cols"},{"location":"api/dal/flux/#omicron.dal.influx.flux.Flux.__init__","text":"\u521d\u59cb\u5316Flux\u5bf9\u8c61 Parameters: Name Type Description Default auto_pivot \u662f\u5426\u81ea\u52a8\u5c06\u67e5\u8be2\u5217\u5b57\u6bb5\u7ec4\u88c5\u6210\u884c. Defaults to True. True no_sys_cols \u662f\u5426\u81ea\u52a8\u5c06\u7cfb\u7edf\u5b57\u6bb5\u5220\u9664. Defaults to True.\u8bf7\u53c2\u8003 drop_sys_cols True Source code in omicron/dal/influx/flux.py def __init__ ( self , auto_pivot = True , no_sys_cols = True ): \"\"\"\u521d\u59cb\u5316Flux\u5bf9\u8c61 Args: auto_pivot : \u662f\u5426\u81ea\u52a8\u5c06\u67e5\u8be2\u5217\u5b57\u6bb5\u7ec4\u88c5\u6210\u884c. Defaults to True. no_sys_cols: \u662f\u5426\u81ea\u52a8\u5c06\u7cfb\u7edf\u5b57\u6bb5\u5220\u9664. Defaults to True.\u8bf7\u53c2\u8003[drop_sys_cols][omicron.dal.influx.flux.Flux.drop_sys_cols] \"\"\" self . _cols = None self . expressions = defaultdict ( list ) self . _auto_pivot = auto_pivot self . _last_n = None self . no_sys_cols = no_sys_cols","title":"__init__()"},{"location":"api/dal/flux/#omicron.dal.influx.flux.Flux.bucket","text":"add bucket to query expression Exceptions: Type Description DuplicateOperationError \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u4e2asource\uff0c\u5982\u679c\u8868\u8fbe\u5f0f\u4e2d\u5df2\u7ecf\u6307\u5b9a\u4e86bucket\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Returns: Type Description Flux Flux\u5bf9\u8c61 Source code in omicron/dal/influx/flux.py def bucket ( self , bucket : str ) -> \"Flux\" : \"\"\"add bucket to query expression Raises: DuplicateOperationError: \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u4e2asource\uff0c\u5982\u679c\u8868\u8fbe\u5f0f\u4e2d\u5df2\u7ecf\u6307\u5b9a\u4e86bucket\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Returns: Flux\u5bf9\u8c61 \"\"\" if \"bucket\" in self . expressions : raise DuplicateOperationError ( \"bucket has been set\" ) self . expressions [ \"bucket\" ] = f 'from(bucket: \" { bucket } \")' return self","title":"bucket()"},{"location":"api/dal/flux/#omicron.dal.influx.flux.Flux.delete","text":"\u6784\u5efa\u5220\u9664\u8bed\u53e5\u3002 according to delete-predicate , delete\u53ea\u652f\u6301AND\u903b\u8f91\u64cd\u4f5c\uff0c\u53ea\u652f\u6301\u201c=\u201d\u64cd\u4f5c\uff0c\u4e0d\u652f\u6301\u201c\uff01=\u201d\u64cd\u4f5c\uff0c\u53ef\u4ee5\u4f7f\u7528\u4efb\u4f55\u5b57\u6bb5\u6216\u8005tag\uff0c\u4f46\u4e0d\u5305\u62ec_time\u548c_value\u5b57\u6bb5\u3002 \u7531\u4e8einfluxdb\u8fd9\u4e00\u6bb5\u6587\u6863\u4e0d\u662f\u5f88\u6e05\u695a\uff0c\u6839\u636e\u8bd5\u9a8c\u7ed3\u679c\uff0c\u76ee\u524d\u4ec5\u652f\u6301\u6309\u65f6\u95f4\u8303\u56f4\u548ctags\u8fdb\u884c\u5220\u9664\u8f83\u597d\u3002\u5982\u679c\u67d0\u4e2acolumn\u7684\u503c\u7c7b\u578b\u662f\u5b57\u7b26\u4e32\uff0c\u5219\u4e5f\u53ef\u4ee5\u901a\u8fc7 tags \u53c2\u6570\u4f20\u5165\uff0c\u5339\u914d\u540e\u5220\u9664\u3002\u4f46\u5982\u679c\u4f20\u5165\u4e86\u975e\u5b57\u7b26\u4e32\u7c7b\u578b\u7684column\uff0c\u5219\u5c06\u5f97\u5230\u65e0\u6cd5\u9884\u6599\u7684\u7ed3\u679c\u3002 Parameters: Name Type Description Default measurement [description] required stop [description] required tags \u6309tags\u548c\u5339\u914d\u7684\u503c\u8fdb\u884c\u5220\u9664\u3002\u4f20\u5165\u7684tags\u4e2d\uff0ckey\u4e3atag\u540d\u79f0\uff0cvalue\u4e3atag\u8981\u5339\u914d\u7684\u53d6\u503c\uff0c\u53ef\u4ee5\u4e3astr\u6216\u8005List[str]\u3002 {} start \u8d77\u59cb\u65f6\u95f4\u3002\u5982\u679c\u7701\u7565\uff0c\u5219\u4f7f\u7528EPOCH_START. None precision \u65f6\u95f4\u7cbe\u5ea6\u3002\u53ef\u4ee5\u4e3a\u201cs\u201d\uff0c\u201cms\u201d\uff0c\u201cus\u201d 's' Returns: Type Description dict \u5220\u9664\u8bed\u53e5 Source code in omicron/dal/influx/flux.py def delete ( self , measurement : str , stop : datetime . datetime , tags : dict = {}, start : datetime . datetime = None , precision : str = \"s\" , ) -> dict : \"\"\"\u6784\u5efa\u5220\u9664\u8bed\u53e5\u3002 according to [delete-predicate](https://docs.influxdata.com/influxdb/v2.1/reference/syntax/delete-predicate/), delete\u53ea\u652f\u6301AND\u903b\u8f91\u64cd\u4f5c\uff0c\u53ea\u652f\u6301\u201c=\u201d\u64cd\u4f5c\uff0c\u4e0d\u652f\u6301\u201c\uff01=\u201d\u64cd\u4f5c\uff0c\u53ef\u4ee5\u4f7f\u7528\u4efb\u4f55\u5b57\u6bb5\u6216\u8005tag\uff0c\u4f46\u4e0d\u5305\u62ec_time\u548c_value\u5b57\u6bb5\u3002 \u7531\u4e8einfluxdb\u8fd9\u4e00\u6bb5\u6587\u6863\u4e0d\u662f\u5f88\u6e05\u695a\uff0c\u6839\u636e\u8bd5\u9a8c\u7ed3\u679c\uff0c\u76ee\u524d\u4ec5\u652f\u6301\u6309\u65f6\u95f4\u8303\u56f4\u548ctags\u8fdb\u884c\u5220\u9664\u8f83\u597d\u3002\u5982\u679c\u67d0\u4e2acolumn\u7684\u503c\u7c7b\u578b\u662f\u5b57\u7b26\u4e32\uff0c\u5219\u4e5f\u53ef\u4ee5\u901a\u8fc7`tags`\u53c2\u6570\u4f20\u5165\uff0c\u5339\u914d\u540e\u5220\u9664\u3002\u4f46\u5982\u679c\u4f20\u5165\u4e86\u975e\u5b57\u7b26\u4e32\u7c7b\u578b\u7684column\uff0c\u5219\u5c06\u5f97\u5230\u65e0\u6cd5\u9884\u6599\u7684\u7ed3\u679c\u3002 Args: measurement : [description] stop : [description] tags : \u6309tags\u548c\u5339\u914d\u7684\u503c\u8fdb\u884c\u5220\u9664\u3002\u4f20\u5165\u7684tags\u4e2d\uff0ckey\u4e3atag\u540d\u79f0\uff0cvalue\u4e3atag\u8981\u5339\u914d\u7684\u53d6\u503c\uff0c\u53ef\u4ee5\u4e3astr\u6216\u8005List[str]\u3002 start : \u8d77\u59cb\u65f6\u95f4\u3002\u5982\u679c\u7701\u7565\uff0c\u5219\u4f7f\u7528EPOCH_START. precision : \u65f6\u95f4\u7cbe\u5ea6\u3002\u53ef\u4ee5\u4e3a\u201cs\u201d\uff0c\u201cms\u201d\uff0c\u201cus\u201d Returns: \u5220\u9664\u8bed\u53e5 \"\"\" timespec = { \"s\" : \"seconds\" , \"ms\" : \"milliseconds\" , \"us\" : \"microseconds\" } . get ( precision ) if start is None : start = self . EPOCH_START . isoformat ( timespec = timespec ) + \"Z\" predicate = [ f '_measurement=\" { measurement } \"' ] for key , value in tags . items (): if isinstance ( value , list ): predicate . extend ([ f ' { key } = \" { v } \"' for v in value ]) else : predicate . append ( f ' { key } = \" { value } \"' ) command = { \"start\" : start , \"stop\" : f \" { stop . isoformat ( timespec = timespec ) } Z\" , \"predicate\" : \" AND \" . join ( predicate ), } return command","title":"delete()"},{"location":"api/dal/flux/#omicron.dal.influx.flux.Flux.drop","text":"use this to drop columns before return result Parameters: Name Type Description Default cols the name of columns to be dropped required Returns: Type Description Flux Flux object, to support pipe operation Source code in omicron/dal/influx/flux.py def drop ( self , cols : List [ str ]) -> \"Flux\" : \"\"\"use this to drop columns before return result Args: cols : the name of columns to be dropped Returns: Flux object, to support pipe operation \"\"\" if \"drop\" in self . expressions : raise DuplicateOperationError ( \"drop operation has been set already\" ) # add surrounding quotes _cols = [ f '\" { c } \"' for c in cols ] self . expressions [ \"drop\" ] = f \" |> drop(columns: [ { ',' . join ( _cols ) } ])\" return self","title":"drop()"},{"location":"api/dal/flux/#omicron.dal.influx.flux.Flux.drop_sys_cols","text":"use this to drop [\"_start\", \"_stop\", \"_measurement\"], plus columns specified in cols , before return query result please be noticed, after drop sys columns, there's still two sys columns left, which is \"_time\" and \"table\", and \"_time\" should usually be kept, \"table\" is one we're not able to removed. If you don't like _time in return result, you can specify it in cols parameter. Parameters: Name Type Description Default cols the extra columns to be dropped None Returns: Type Description Flux Flux query object Source code in omicron/dal/influx/flux.py def drop_sys_cols ( self , cols : List [ str ] = None ) -> \"Flux\" : \"\"\"use this to drop [\"_start\", \"_stop\", \"_measurement\"], plus columns specified in `cols`, before return query result please be noticed, after drop sys columns, there's still two sys columns left, which is \"_time\" and \"table\", and \"_time\" should usually be kept, \"table\" is one we're not able to removed. If you don't like _time in return result, you can specify it in `cols` parameter. Args: cols : the extra columns to be dropped Returns: Flux query object \"\"\" _cols = [ \"_start\" , \"_stop\" , \"_measurement\" ] if cols is not None : _cols . extend ( cols ) return self . drop ( _cols )","title":"drop_sys_cols()"},{"location":"api/dal/flux/#omicron.dal.influx.flux.Flux.fields","text":"\u7ed9\u67e5\u8be2\u6dfb\u52a0field\u8fc7\u6ee4\u6761\u4ef6 \u6b64\u67e5\u8be2\u6761\u4ef6\u4e3a\u8fc7\u6ee4\u6761\u4ef6\uff0c\u7528\u4ee5\u6307\u5b9a\u54ea\u4e9bfield\u4f1a\u51fa\u73b0\u5728\u67e5\u8be2\u7ed3\u679c\u4e2d\uff0c\u5e76\u975e\u5fc5\u987b\u3002\u5982\u679c\u67e5\u8be2\u4e2d\u6ca1\u6709\u6307\u5b9atags\uff0c\u5219\u4f1a\u8fd4\u56de\u6240\u6709\u8bb0\u5f55\u3002 \u7531\u4e8e\u4e00\u6761\u8bb0\u5f55\u53ea\u80fd\u5c5e\u4e8e\u4e00\u4e2a_field\uff0c\u6240\u4ee5\uff0c\u5f53\u6307\u5b9a\u591a\u4e2a_field\u8fdb\u884c\u67e5\u8be2\u65f6\uff0c\u5b83\u4eec\u4e4b\u95f4\u7684\u5173\u7cfb\u5e94\u8be5\u4e3a or \u3002 Exceptions: Type Description DuplicateOperationError \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6267\u884c\u4e00\u6b21\uff0c\u5982\u679cfiled filter\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Parameters: Name Type Description Default fields List \u5f85\u67e5\u8be2\u7684field\u5217\u8868 required reserve_time_stamp bool \u662f\u5426\u4fdd\u7559\u65f6\u95f4\u6233 _time \uff0c\u9ed8\u8ba4\u4e3aTrue True Returns: Type Description Flux Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c Source code in omicron/dal/influx/flux.py def fields ( self , fields : List , reserve_time_stamp : bool = True ) -> \"Flux\" : \"\"\"\u7ed9\u67e5\u8be2\u6dfb\u52a0field\u8fc7\u6ee4\u6761\u4ef6 \u6b64\u67e5\u8be2\u6761\u4ef6\u4e3a\u8fc7\u6ee4\u6761\u4ef6\uff0c\u7528\u4ee5\u6307\u5b9a\u54ea\u4e9bfield\u4f1a\u51fa\u73b0\u5728\u67e5\u8be2\u7ed3\u679c\u4e2d\uff0c\u5e76\u975e\u5fc5\u987b\u3002\u5982\u679c\u67e5\u8be2\u4e2d\u6ca1\u6709\u6307\u5b9atags\uff0c\u5219\u4f1a\u8fd4\u56de\u6240\u6709\u8bb0\u5f55\u3002 \u7531\u4e8e\u4e00\u6761\u8bb0\u5f55\u53ea\u80fd\u5c5e\u4e8e\u4e00\u4e2a_field\uff0c\u6240\u4ee5\uff0c\u5f53\u6307\u5b9a\u591a\u4e2a_field\u8fdb\u884c\u67e5\u8be2\u65f6\uff0c\u5b83\u4eec\u4e4b\u95f4\u7684\u5173\u7cfb\u5e94\u8be5\u4e3a`or`\u3002 Raises: DuplicateOperationError: \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6267\u884c\u4e00\u6b21\uff0c\u5982\u679cfiled filter\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Args: fields: \u5f85\u67e5\u8be2\u7684field\u5217\u8868 reserve_time_stamp: \u662f\u5426\u4fdd\u7559\u65f6\u95f4\u6233`_time`\uff0c\u9ed8\u8ba4\u4e3aTrue Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"fields\" in self . expressions : raise DuplicateOperationError ( \"fields has been set\" ) self . _cols = fields . copy () if reserve_time_stamp and \"_time\" not in self . _cols : self . _cols . append ( \"_time\" ) self . _cols = sorted ( self . _cols ) filters = [ f 'r[\"_field\"] == \" { name } \"' for name in self . _cols ] self . expressions [ \"fields\" ] = f \" |> filter(fn: (r) => { ' or ' . join ( filters ) } )\" return self","title":"fields()"},{"location":"api/dal/flux/#omicron.dal.influx.flux.Flux.format_time","text":"\u5c06\u65f6\u95f4\u8f6c\u6362\u6210\u5ba2\u6237\u7aef\u5bf9\u5e94\u7684\u7cbe\u5ea6\uff0c\u5e76\u4ee5 RFC3339 timestamps\u683c\u5f0f\u4e32\uff08\u5373influxdb\u8981\u6c42\u7684\u683c\u5f0f\uff09\u8fd4\u56de\u3002 \u5982\u679c\u8fd9\u4e2a\u65f6\u95f4\u662f\u4f5c\u4e3a\u67e5\u8be2\u7684range\u4e2d\u7684\u7ed3\u675f\u65f6\u95f4\u4f7f\u7528\u65f6\uff0c\u7531\u4e8einflux\u67e5\u8be2\u7684\u65f6\u95f4\u8303\u56f4\u662f\u5de6\u95ed\u53f3\u5f00\u7684\uff0c\u56e0\u6b64\u5982\u679c\u4f60\u9700\u8981\u67e5\u8be2\u7684\u662f\u4e00\u4e2a\u95ed\u533a\u95f4\uff0c\u5219\u9700\u8981\u5c06 end \u7684\u65f6\u95f4\u5411\u524d\u504f\u79fb\u4e00\u4e2a\u7cbe\u5ea6\u3002\u901a\u8fc7\u4f20\u5165 shift_forward = True \u53ef\u4ee5\u5b8c\u6210\u8fd9\u79cd\u8f6c\u6362\u3002 Examples: >>> # by default, the precision is seconds, and convert a date >>> Flux . format_time ( datetime . date ( 2019 , 1 , 1 )) '2019-01-01T00:00:00Z' >>> # set precision to ms, convert a time >>> Flux . format_time ( datetime . datetime ( 1978 , 7 , 8 , 12 , 34 , 56 , 123456 ), precision = \"ms\" ) '1978-07-08T12:34:56.123Z' >>> # convert and forward shift >>> Flux . format_time ( datetime . date ( 1978 , 7 , 8 ), shift_forward = True ) '1978-07-08T00:00:01Z' Parameters: Name Type Description Default tm \u5f85\u683c\u5f0f\u5316\u7684\u65f6\u95f4 required precision str \u65f6\u95f4\u7cbe\u5ea6\uff0c\u53ef\u9009\u503c\u4e3a\uff1a's', 'ms', 'us' 's' shift_forward \u5982\u679c\u4e3aTrue\uff0c\u5219\u5c06end\u5411\u524d\u504f\u79fb\u4e00\u4e2a\u7cbe\u5ea6 False Returns: Type Description str \u8c03\u6574\u540e\u7b26\u5408influx\u65f6\u95f4\u89c4\u8303\u7684\u65f6\u95f4\uff08\u5b57\u7b26\u4e32\u8868\u793a\uff09 Source code in omicron/dal/influx/flux.py @classmethod def format_time ( cls , tm : Frame , precision : str = \"s\" , shift_forward = False ) -> str : \"\"\"\u5c06\u65f6\u95f4\u8f6c\u6362\u6210\u5ba2\u6237\u7aef\u5bf9\u5e94\u7684\u7cbe\u5ea6\uff0c\u5e76\u4ee5 RFC3339 timestamps\u683c\u5f0f\u4e32\uff08\u5373influxdb\u8981\u6c42\u7684\u683c\u5f0f\uff09\u8fd4\u56de\u3002 \u5982\u679c\u8fd9\u4e2a\u65f6\u95f4\u662f\u4f5c\u4e3a\u67e5\u8be2\u7684range\u4e2d\u7684\u7ed3\u675f\u65f6\u95f4\u4f7f\u7528\u65f6\uff0c\u7531\u4e8einflux\u67e5\u8be2\u7684\u65f6\u95f4\u8303\u56f4\u662f\u5de6\u95ed\u53f3\u5f00\u7684\uff0c\u56e0\u6b64\u5982\u679c\u4f60\u9700\u8981\u67e5\u8be2\u7684\u662f\u4e00\u4e2a\u95ed\u533a\u95f4\uff0c\u5219\u9700\u8981\u5c06`end`\u7684\u65f6\u95f4\u5411\u524d\u504f\u79fb\u4e00\u4e2a\u7cbe\u5ea6\u3002\u901a\u8fc7\u4f20\u5165`shift_forward = True`\u53ef\u4ee5\u5b8c\u6210\u8fd9\u79cd\u8f6c\u6362\u3002 Examples: >>> # by default, the precision is seconds, and convert a date >>> Flux.format_time(datetime.date(2019, 1, 1)) '2019-01-01T00:00:00Z' >>> # set precision to ms, convert a time >>> Flux.format_time(datetime.datetime(1978, 7, 8, 12, 34, 56, 123456), precision=\"ms\") '1978-07-08T12:34:56.123Z' >>> # convert and forward shift >>> Flux.format_time(datetime.date(1978, 7, 8), shift_forward = True) '1978-07-08T00:00:01Z' Args: tm : \u5f85\u683c\u5f0f\u5316\u7684\u65f6\u95f4 precision: \u65f6\u95f4\u7cbe\u5ea6\uff0c\u53ef\u9009\u503c\u4e3a\uff1a's', 'ms', 'us' shift_forward: \u5982\u679c\u4e3aTrue\uff0c\u5219\u5c06end\u5411\u524d\u504f\u79fb\u4e00\u4e2a\u7cbe\u5ea6 Returns: \u8c03\u6574\u540e\u7b26\u5408influx\u65f6\u95f4\u89c4\u8303\u7684\u65f6\u95f4\uff08\u5b57\u7b26\u4e32\u8868\u793a\uff09 \"\"\" timespec = { \"s\" : \"seconds\" , \"ms\" : \"milliseconds\" , \"us\" : \"microseconds\" } . get ( precision ) if timespec is None : raise ValueError ( f \"precision must be one of 's', 'ms', 'us', but got { precision } \" ) tm = arrow . get ( tm ) . naive if shift_forward : tm = tm + datetime . timedelta ( ** { timespec : 1 }) return tm . isoformat ( sep = \"T\" , timespec = timespec ) + \"Z\"","title":"format_time()"},{"location":"api/dal/flux/#omicron.dal.influx.flux.Flux.group","text":"[summary] Returns: Type Description Flux [description] Source code in omicron/dal/influx/flux.py def group ( self , by : Tuple [ str ]) -> \"Flux\" : \"\"\"[summary] Returns: [description] \"\"\" if \"group\" in self . expressions : raise DuplicateOperationError ( \"group has been set\" ) if isinstance ( by , str ): by = [ by ] cols = \",\" . join ([ f '\" { col } \"' for col in by ]) self . expressions [ \"group\" ] = f \" |> group(columns: [ { cols } ])\" return self","title":"group()"},{"location":"api/dal/flux/#omicron.dal.influx.flux.Flux.latest","text":"\u83b7\u53d6\u6700\u540en\u6761\u6570\u636e\uff0c\u6309\u65f6\u95f4\u589e\u5e8f\u8fd4\u56de Flux\u67e5\u8be2\u7684\u589e\u5f3a\u529f\u80fd\uff0c\u76f8\u5f53\u4e8etop + sort + limit Parameters: Name Type Description Default n int \u6700\u540en\u6761\u6570\u636e required Returns: Type Description Flux Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c Source code in omicron/dal/influx/flux.py def latest ( self , n : int ) -> \"Flux\" : \"\"\"\u83b7\u53d6\u6700\u540en\u6761\u6570\u636e\uff0c\u6309\u65f6\u95f4\u589e\u5e8f\u8fd4\u56de Flux\u67e5\u8be2\u7684\u589e\u5f3a\u529f\u80fd\uff0c\u76f8\u5f53\u4e8etop + sort + limit Args: n: \u6700\u540en\u6761\u6570\u636e Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" assert \"top\" not in self . expressions , \"top and last_n can not be used together\" assert ( \"sort\" not in self . expressions ), \"sort and last_n can not be used together\" assert ( \"limit\" not in self . expressions ), \"limit and last_n can not be used together\" self . _last_n = n return self","title":"latest()"},{"location":"api/dal/flux/#omicron.dal.influx.flux.Flux.limit","text":"\u6dfb\u52a0\u8fd4\u56de\u8bb0\u5f55\u6570\u9650\u5236 Exceptions: Type Description DuplicateOperationError \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u6b21limit\uff0c\u5982\u679climit\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Parameters: Name Type Description Default limit int \u8fd4\u56de\u8bb0\u5f55\u6570\u9650\u5236 required Returns: Type Description Flux Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c Source code in omicron/dal/influx/flux.py def limit ( self , limit : int ) -> \"Flux\" : \"\"\"\u6dfb\u52a0\u8fd4\u56de\u8bb0\u5f55\u6570\u9650\u5236 Raises: DuplicateOperationError: \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u6b21limit\uff0c\u5982\u679climit\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Args: limit: \u8fd4\u56de\u8bb0\u5f55\u6570\u9650\u5236 Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"limit\" in self . expressions : raise DuplicateOperationError ( \"limit has been set\" ) self . expressions [ \"limit\" ] = \" |> limit(n: %d )\" % limit return self","title":"limit()"},{"location":"api/dal/flux/#omicron.dal.influx.flux.Flux.measurement","text":"add measurement filter to query Exceptions: Type Description DuplicateOperationError \u4e00\u6b21\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u4e2ameasurement, \u5982\u679c\u8868\u8fbe\u5f0f\u4e2d\u5df2\u7ecf\u5b58\u5728measurement, \u5219\u629b\u51fa\u5f02\u5e38 Returns: Type Description Flux Flux\u5bf9\u8c61\u81ea\u8eab\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c Source code in omicron/dal/influx/flux.py def measurement ( self , measurement : str ) -> \"Flux\" : \"\"\"add measurement filter to query Raises: DuplicateOperationError: \u4e00\u6b21\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u4e2ameasurement, \u5982\u679c\u8868\u8fbe\u5f0f\u4e2d\u5df2\u7ecf\u5b58\u5728measurement, \u5219\u629b\u51fa\u5f02\u5e38 Returns: Flux\u5bf9\u8c61\u81ea\u8eab\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"measurement\" in self . expressions : raise DuplicateOperationError ( \"measurement has been set\" ) self . expressions [ \"measurement\" ] = f ' |> filter(fn: (r) => r[\"_measurement\"] == \" { measurement } \")' return self","title":"measurement()"},{"location":"api/dal/flux/#omicron.dal.influx.flux.Flux.pivot","text":"pivot\u7528\u6765\u5c06\u4ee5\u5217\u4e3a\u5355\u4f4d\u7684\u6570\u636e\u8f6c\u6362\u4e3a\u4ee5\u884c\u4e3a\u5355\u4f4d\u7684\u6570\u636e Flux\u67e5\u8be2\u8fd4\u56de\u7684\u7ed3\u679c\u901a\u5e38\u90fd\u662f\u4ee5\u5217\u4e3a\u5355\u4f4d\u7684\u6570\u636e\uff0c\u589e\u52a0\u672cpivot\u6761\u4ef6\u540e\uff0c\u7ed3\u679c\u5c06\u88ab\u8f6c\u6362\u6210\u4e3a\u4ee5\u884c\u4e3a\u5355\u4f4d\u7684\u6570\u636e\u518d\u8fd4\u56de\u3002 \u8fd9\u91cc\u5b9e\u73b0\u7684\u662fmeasurement\u5185\u7684\u8f6c\u6362\uff0c\u8bf7\u53c2\u8003 pivot Parameters: Name Type Description Default row_keys List[str] \u60df\u4e00\u786e\u5b9a\u8f93\u51fa\u4e2d\u4e00\u884c\u6570\u636e\u7684\u5217\u540d\u5b57, \u9ed8\u8ba4\u4e3a[\"_time\"] ['_time'] column_keys \u5217\u540d\u79f0\u5217\u8868\uff0c\u9ed8\u8ba4\u4e3a[\"_field\"] ['_field'] value_column str \u503c\u5217\u540d\uff0c\u9ed8\u8ba4\u4e3a\"_value\" '_value' Returns: Type Description Flux Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c Source code in omicron/dal/influx/flux.py def pivot ( self , row_keys : List [ str ] = [ \"_time\" ], column_keys = [ \"_field\" ], value_column : str = \"_value\" , ) -> \"Flux\" : \"\"\"pivot\u7528\u6765\u5c06\u4ee5\u5217\u4e3a\u5355\u4f4d\u7684\u6570\u636e\u8f6c\u6362\u4e3a\u4ee5\u884c\u4e3a\u5355\u4f4d\u7684\u6570\u636e Flux\u67e5\u8be2\u8fd4\u56de\u7684\u7ed3\u679c\u901a\u5e38\u90fd\u662f\u4ee5\u5217\u4e3a\u5355\u4f4d\u7684\u6570\u636e\uff0c\u589e\u52a0\u672cpivot\u6761\u4ef6\u540e\uff0c\u7ed3\u679c\u5c06\u88ab\u8f6c\u6362\u6210\u4e3a\u4ee5\u884c\u4e3a\u5355\u4f4d\u7684\u6570\u636e\u518d\u8fd4\u56de\u3002 \u8fd9\u91cc\u5b9e\u73b0\u7684\u662fmeasurement\u5185\u7684\u8f6c\u6362\uff0c\u8bf7\u53c2\u8003 [pivot](https://docs.influxdata.com/flux/v0.x/stdlib/universe/pivot/#align-fields-within-each-measurement-that-have-the-same-timestamp) Args: row_keys: \u60df\u4e00\u786e\u5b9a\u8f93\u51fa\u4e2d\u4e00\u884c\u6570\u636e\u7684\u5217\u540d\u5b57, \u9ed8\u8ba4\u4e3a[\"_time\"] column_keys: \u5217\u540d\u79f0\u5217\u8868\uff0c\u9ed8\u8ba4\u4e3a[\"_field\"] value_column: \u503c\u5217\u540d\uff0c\u9ed8\u8ba4\u4e3a\"_value\" Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"pivot\" in self . expressions : raise DuplicateOperationError ( \"pivot has been set\" ) columns = \",\" . join ([ f '\" { name } \"' for name in column_keys ]) rowkeys = \",\" . join ([ f '\" { name } \"' for name in row_keys ]) self . expressions [ \"pivot\" ] = f ' |> pivot(columnKey: [ { columns } ], rowKey: [ { rowkeys } ], valueColumn: \" { value_column } \")' return self","title":"pivot()"},{"location":"api/dal/flux/#omicron.dal.influx.flux.Flux.range","text":"\u6dfb\u52a0\u65f6\u95f4\u8303\u56f4\u8fc7\u6ee4 \u5fc5\u987b\u6307\u5b9a\u7684\u67e5\u8be2\u6761\u4ef6\uff0c\u5426\u5219influxdb\u4f1a\u62a5unbound\u67e5\u8be2\u9519\uff0c\u56e0\u4e3a\u8fd9\u79cd\u60c5\u51b5\u4e0b\uff0c\u8fd4\u56de\u7684\u6570\u636e\u91cf\u5c06\u975e\u5e38\u5927\u3002 \u5728\u683c\u5f0f\u5316\u65f6\u95f4\u65f6\uff0c\u9700\u8981\u6839\u636e precision \u751f\u6210\u65f6\u95f4\u5b57\u7b26\u4e32\u3002\u5728\u5411Influxdb\u53d1\u9001\u8bf7\u6c42\u65f6\uff0c\u5e94\u8be5\u6ce8\u610f\u67e5\u8be2\u53c2\u6570\u4e2d\u6307\u5b9a\u7684\u65f6\u95f4\u7cbe\u5ea6\u4e0e\u8fd9\u91cc\u4f7f\u7528\u7684\u4fdd\u6301\u4e00\u81f4\u3002 Influxdb\u7684\u67e5\u8be2\u7ed3\u679c\u9ed8\u8ba4\u4e0d\u5305\u542b\u7ed3\u675f\u65f6\u95f4\uff0c\u5f53 right_close \u6307\u5b9a\u4e3aTrue\u65f6\uff0c\u6211\u4eec\u5c06\u6839\u636e\u6307\u5b9a\u7684\u7cbe\u5ea6\u4fee\u6539 end \u65f6\u95f4\uff0c\u4f7f\u4e4b\u4ec5\u6bd4 end \u591a\u4e00\u4e2a\u65f6\u95f4\u5355\u4f4d\uff0c\u4ece\u800c\u4fdd\u8bc1\u67e5\u8be2\u7ed3\u679c\u4f1a\u5305\u542b end \u3002 Exceptions: Type Description DuplicateOperationError \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u6b21\u65f6\u95f4\u8303\u56f4\uff0c\u5982\u679crange\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Parameters: Name Type Description Default start Union[datetime.date, datetime.datetime] \u5f00\u59cb\u65f6\u95f4 required end Union[datetime.date, datetime.datetime] \u7ed3\u675f\u65f6\u95f4 required right_close \u67e5\u8be2\u7ed3\u679c\u662f\u5426\u5305\u542b\u7ed3\u675f\u65f6\u95f4\u3002 True precision \u65f6\u95f4\u7cbe\u5ea6\uff0c\u9ed8\u8ba4\u4e3a\u79d2\u3002 's' Returns: Type Description Flux Flux\u5bf9\u8c61\uff0c\u4ee5\u652f\u6301\u7ba1\u9053\u64cd\u4f5c Source code in omicron/dal/influx/flux.py def range ( self , start : Frame , end : Frame , right_close = True , precision = \"s\" ) -> \"Flux\" : \"\"\"\u6dfb\u52a0\u65f6\u95f4\u8303\u56f4\u8fc7\u6ee4 \u5fc5\u987b\u6307\u5b9a\u7684\u67e5\u8be2\u6761\u4ef6\uff0c\u5426\u5219influxdb\u4f1a\u62a5unbound\u67e5\u8be2\u9519\uff0c\u56e0\u4e3a\u8fd9\u79cd\u60c5\u51b5\u4e0b\uff0c\u8fd4\u56de\u7684\u6570\u636e\u91cf\u5c06\u975e\u5e38\u5927\u3002 \u5728\u683c\u5f0f\u5316\u65f6\u95f4\u65f6\uff0c\u9700\u8981\u6839\u636e`precision`\u751f\u6210\u65f6\u95f4\u5b57\u7b26\u4e32\u3002\u5728\u5411Influxdb\u53d1\u9001\u8bf7\u6c42\u65f6\uff0c\u5e94\u8be5\u6ce8\u610f\u67e5\u8be2\u53c2\u6570\u4e2d\u6307\u5b9a\u7684\u65f6\u95f4\u7cbe\u5ea6\u4e0e\u8fd9\u91cc\u4f7f\u7528\u7684\u4fdd\u6301\u4e00\u81f4\u3002 Influxdb\u7684\u67e5\u8be2\u7ed3\u679c\u9ed8\u8ba4\u4e0d\u5305\u542b\u7ed3\u675f\u65f6\u95f4\uff0c\u5f53`right_close`\u6307\u5b9a\u4e3aTrue\u65f6\uff0c\u6211\u4eec\u5c06\u6839\u636e\u6307\u5b9a\u7684\u7cbe\u5ea6\u4fee\u6539`end`\u65f6\u95f4\uff0c\u4f7f\u4e4b\u4ec5\u6bd4`end`\u591a\u4e00\u4e2a\u65f6\u95f4\u5355\u4f4d\uff0c\u4ece\u800c\u4fdd\u8bc1\u67e5\u8be2\u7ed3\u679c\u4f1a\u5305\u542b`end`\u3002 Raises: DuplicateOperationError: \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6307\u5b9a\u4e00\u6b21\u65f6\u95f4\u8303\u56f4\uff0c\u5982\u679crange\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Args: start: \u5f00\u59cb\u65f6\u95f4 end: \u7ed3\u675f\u65f6\u95f4 right_close: \u67e5\u8be2\u7ed3\u679c\u662f\u5426\u5305\u542b\u7ed3\u675f\u65f6\u95f4\u3002 precision: \u65f6\u95f4\u7cbe\u5ea6\uff0c\u9ed8\u8ba4\u4e3a\u79d2\u3002 Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u652f\u6301\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"range\" in self . expressions : raise DuplicateOperationError ( \"range has been set\" ) if precision not in [ \"s\" , \"ms\" , \"us\" ]: raise AssertionError ( \"precision must be 's', 'ms' or 'us'\" ) end = self . format_time ( end , precision , right_close ) start = self . format_time ( start , precision ) self . expressions [ \"range\" ] = f \" |> range(start: { start } , stop: { end } )\" return self","title":"range()"},{"location":"api/dal/flux/#omicron.dal.influx.flux.Flux.sort","text":"\u6309\u7167\u6307\u5b9a\u7684\u5217\u8fdb\u884c\u6392\u5e8f \u6839\u636e influxdb doc , \u67e5\u8be2\u8fd4\u56de\u503c\u9ed8\u8ba4\u5730\u6309\u65f6\u95f4\u6392\u5e8f\u3002\u56e0\u6b64\uff0c\u5982\u679c\u4ec5\u4ec5\u662f\u8981\u6c42\u67e5\u8be2\u7ed3\u679c\u6309\u65f6\u95f4\u6392\u5e8f\uff0c\u65e0\u987b\u8c03\u7528\u6b64API\uff0c\u4f46\u662f\uff0c\u6b64API\u63d0\u4f9b\u4e86\u6309\u5176\u5b83\u5b57\u6bb5\u6392\u5e8f\u7684\u80fd\u529b\u3002 \u53e6\u5916\uff0c\u5728\u4e00\u4e2a\u67095000\u591a\u4e2atag\uff0c\u5171\u8fd4\u56de1M\u6761\u8bb0\u5f55\u7684\u6d4b\u8bd5\u4e2d\uff0c\u6d4b\u8bd5\u9a8c\u8bc1\u8fd4\u56de\u8bb0\u5f55\u786e\u5b9e\u6309_time\u5347\u5e8f\u6392\u5217\u3002 Parameters: Name Type Description Default by List[str] \u6307\u5b9a\u6392\u5e8f\u7684\u5217\u540d\u79f0\u5217\u8868 None Returns: Type Description Flux Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c Source code in omicron/dal/influx/flux.py def sort ( self , by : List [ str ] = None , desc : bool = False ) -> \"Flux\" : \"\"\"\u6309\u7167\u6307\u5b9a\u7684\u5217\u8fdb\u884c\u6392\u5e8f \u6839\u636e[influxdb doc](https://docs.influxdata.com/influxdb/v2.0/query-data/flux/first-last/), \u67e5\u8be2\u8fd4\u56de\u503c\u9ed8\u8ba4\u5730\u6309\u65f6\u95f4\u6392\u5e8f\u3002\u56e0\u6b64\uff0c\u5982\u679c\u4ec5\u4ec5\u662f\u8981\u6c42\u67e5\u8be2\u7ed3\u679c\u6309\u65f6\u95f4\u6392\u5e8f\uff0c\u65e0\u987b\u8c03\u7528\u6b64API\uff0c\u4f46\u662f\uff0c\u6b64API\u63d0\u4f9b\u4e86\u6309\u5176\u5b83\u5b57\u6bb5\u6392\u5e8f\u7684\u80fd\u529b\u3002 \u53e6\u5916\uff0c\u5728\u4e00\u4e2a\u67095000\u591a\u4e2atag\uff0c\u5171\u8fd4\u56de1M\u6761\u8bb0\u5f55\u7684\u6d4b\u8bd5\u4e2d\uff0c\u6d4b\u8bd5\u9a8c\u8bc1\u8fd4\u56de\u8bb0\u5f55\u786e\u5b9e\u6309_time\u5347\u5e8f\u6392\u5217\u3002 Args: by: \u6307\u5b9a\u6392\u5e8f\u7684\u5217\u540d\u79f0\u5217\u8868 Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"sort\" in self . expressions : raise DuplicateOperationError ( \"sort has been set\" ) if by is None : by = [ \"_value\" ] if isinstance ( by , str ): by = [ by ] columns_ = \",\" . join ([ f '\" { name } \"' for name in by ]) desc = \"true\" if desc else \"false\" self . expressions [ \"sort\" ] = f \" |> sort(columns: [ { columns_ } ], desc: { desc } )\" return self","title":"sort()"},{"location":"api/dal/flux/#omicron.dal.influx.flux.Flux.tags","text":"\u7ed9\u67e5\u8be2\u6dfb\u52a0tags\u8fc7\u6ee4\u6761\u4ef6 \u6b64\u67e5\u8be2\u6761\u4ef6\u4e3a\u8fc7\u6ee4\u6761\u4ef6\uff0c\u5e76\u975e\u5fc5\u987b\u3002\u5982\u679c\u67e5\u8be2\u4e2d\u6ca1\u6709\u6307\u5b9atags\uff0c\u5219\u4f1a\u8fd4\u56de\u6240\u6709\u8bb0\u5f55\u3002 \u5728\u5b9e\u73b0\u4e0a\uff0c\u65e2\u53ef\u4ee5\u4f7f\u7528 contains \u8bed\u6cd5\uff0c\u4e5f\u53ef\u4ee5\u4f7f\u7528 or \u8bed\u6cd5(\u7531\u4e8e\u4e00\u6761\u8bb0\u5f55\u53ea\u80fd\u5c5e\u4e8e\u4e00\u4e2atag\uff0c\u6240\u4ee5\uff0c\u5f53\u6307\u5b9a\u591a\u4e2atag\u8fdb\u884c\u67e5\u8be2\u65f6\uff0c\u5b83\u4eec\u4e4b\u95f4\u7684\u5173\u7cfb\u5e94\u8be5\u4e3a or )\u3002\u7ecf\u9a8c\u8bc1\uff0ccontains\u8bed\u6cd5\u4f1a\u59cb\u7ec8\u5148\u5c06\u6240\u6709\u7b26\u5408\u6761\u4ef6\u7684\u8bb0\u5f55\u68c0\u7d22\u51fa\u6765\uff0c\u518d\u8fdb\u884c\u8fc7\u6ee4\u3002\u8fd9\u6837\u7684\u6548\u7387\u6bd4\u8f83\u4f4e\uff0c\u7279\u522b\u662f\u5f53tags\u7684\u6570\u91cf\u8f83\u5c11\u65f6\uff0c\u4f1a\u8fdc\u8fdc\u6bd4\u4f7f\u7528or\u8bed\u6cd5\u6162\u3002 Exceptions: Type Description DuplicateOperationError \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6267\u884c\u4e00\u6b21\uff0c\u5982\u679ctag filter\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Parameters: Name Type Description Default tags tags\u662f\u4e00\u4e2a{tagname: Union[str,[tag_values]]}\u5bf9\u8c61\u3002 required Examples: >>> flux = Flux () >>> flux . tags ({ \"code\" : [ \"000001\" , \"000002\" ], \"name\" : [ \"\u6d66\u53d1\u94f6\u884c\" ]}) . expressions [ \"tags\" ] ' |> filter(fn: (r) => r[\"code\"] == \"000001\" or r[\"code\"] == \"000002\" or r[\"name\"] == \"\u6d66\u53d1\u94f6\u884c\")' Returns: Type Description Flux Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c Source code in omicron/dal/influx/flux.py def tags ( self , tags : DefaultDict [ str , List [ str ]]) -> \"Flux\" : \"\"\"\u7ed9\u67e5\u8be2\u6dfb\u52a0tags\u8fc7\u6ee4\u6761\u4ef6 \u6b64\u67e5\u8be2\u6761\u4ef6\u4e3a\u8fc7\u6ee4\u6761\u4ef6\uff0c\u5e76\u975e\u5fc5\u987b\u3002\u5982\u679c\u67e5\u8be2\u4e2d\u6ca1\u6709\u6307\u5b9atags\uff0c\u5219\u4f1a\u8fd4\u56de\u6240\u6709\u8bb0\u5f55\u3002 \u5728\u5b9e\u73b0\u4e0a\uff0c\u65e2\u53ef\u4ee5\u4f7f\u7528`contains`\u8bed\u6cd5\uff0c\u4e5f\u53ef\u4ee5\u4f7f\u7528`or`\u8bed\u6cd5(\u7531\u4e8e\u4e00\u6761\u8bb0\u5f55\u53ea\u80fd\u5c5e\u4e8e\u4e00\u4e2atag\uff0c\u6240\u4ee5\uff0c\u5f53\u6307\u5b9a\u591a\u4e2atag\u8fdb\u884c\u67e5\u8be2\u65f6\uff0c\u5b83\u4eec\u4e4b\u95f4\u7684\u5173\u7cfb\u5e94\u8be5\u4e3a`or`)\u3002\u7ecf\u9a8c\u8bc1\uff0ccontains\u8bed\u6cd5\u4f1a\u59cb\u7ec8\u5148\u5c06\u6240\u6709\u7b26\u5408\u6761\u4ef6\u7684\u8bb0\u5f55\u68c0\u7d22\u51fa\u6765\uff0c\u518d\u8fdb\u884c\u8fc7\u6ee4\u3002\u8fd9\u6837\u7684\u6548\u7387\u6bd4\u8f83\u4f4e\uff0c\u7279\u522b\u662f\u5f53tags\u7684\u6570\u91cf\u8f83\u5c11\u65f6\uff0c\u4f1a\u8fdc\u8fdc\u6bd4\u4f7f\u7528or\u8bed\u6cd5\u6162\u3002 Raises: DuplicateOperationError: \u4e00\u4e2a\u67e5\u8be2\u4e2d\u53ea\u5141\u8bb8\u6267\u884c\u4e00\u6b21\uff0c\u5982\u679ctag filter\u8868\u8fbe\u5f0f\u5df2\u7ecf\u5b58\u5728\uff0c\u5219\u629b\u51fa\u5f02\u5e38 Args: tags : tags\u662f\u4e00\u4e2a{tagname: Union[str,[tag_values]]}\u5bf9\u8c61\u3002 Examples: >>> flux = Flux() >>> flux.tags({\"code\": [\"000001\", \"000002\"], \"name\": [\"\u6d66\u53d1\u94f6\u884c\"]}).expressions[\"tags\"] ' |> filter(fn: (r) => r[\"code\"] == \"000001\" or r[\"code\"] == \"000002\" or r[\"name\"] == \"\u6d66\u53d1\u94f6\u884c\")' Returns: Flux\u5bf9\u8c61\uff0c\u4ee5\u4fbf\u8fdb\u884c\u7ba1\u9053\u64cd\u4f5c \"\"\" if \"tags\" in self . expressions : raise DuplicateOperationError ( \"tags has been set\" ) filters = [] for tag , values in tags . items (): assert ( isinstance ( values , str ) or len ( values ) > 0 ), f \"tag { tag } should not be empty or None\" if isinstance ( values , str ): values = [ values ] for v in values : filters . append ( f 'r[\" { tag } \"] == \" { v } \"' ) op_expression = \" or \" . join ( filters ) self . expressions [ \"tags\" ] = f \" |> filter(fn: (r) => { op_expression } )\" return self","title":"tags()"},{"location":"api/dal/flux/#omicron.dal.influx.flux.Flux.to_timestamp","text":"\u5c06\u65f6\u95f4\u6839\u636e\u7cbe\u5ea6\u8f6c\u6362\u4e3aunix\u65f6\u95f4\u6233 \u5728\u5f80influxdb\u5199\u5165\u6570\u636e\u65f6\uff0cline-protocol\u8981\u6c42\u7684\u65f6\u95f4\u6233\u4e3aunix timestamp\uff0c\u5e76\u4e14\u4e0e\u5176\u7cbe\u5ea6\u5bf9\u5e94\u3002 influxdb\u59cb\u7ec8\u4f7f\u7528UTC\u65f6\u95f4\uff0c\u56e0\u6b64\uff0c tm \u4e5f\u5fc5\u987b\u5df2\u7ecf\u8f6c\u6362\u6210UTC\u65f6\u95f4\u3002 Parameters: Name Type Description Default tm Union[datetime.date, datetime.datetime] \u65f6\u95f4 required precision str \u65f6\u95f4\u7cbe\u5ea6\uff0c\u9ed8\u8ba4\u4e3a\u79d2\u3002 's' Returns: Type Description int \u65f6\u95f4\u6233 Source code in omicron/dal/influx/flux.py @classmethod def to_timestamp ( cls , tm : Frame , precision : str = \"s\" ) -> int : \"\"\"\u5c06\u65f6\u95f4\u6839\u636e\u7cbe\u5ea6\u8f6c\u6362\u4e3aunix\u65f6\u95f4\u6233 \u5728\u5f80influxdb\u5199\u5165\u6570\u636e\u65f6\uff0cline-protocol\u8981\u6c42\u7684\u65f6\u95f4\u6233\u4e3aunix timestamp\uff0c\u5e76\u4e14\u4e0e\u5176\u7cbe\u5ea6\u5bf9\u5e94\u3002 influxdb\u59cb\u7ec8\u4f7f\u7528UTC\u65f6\u95f4\uff0c\u56e0\u6b64\uff0c`tm`\u4e5f\u5fc5\u987b\u5df2\u7ecf\u8f6c\u6362\u6210UTC\u65f6\u95f4\u3002 Args: tm: \u65f6\u95f4 precision: \u65f6\u95f4\u7cbe\u5ea6\uff0c\u9ed8\u8ba4\u4e3a\u79d2\u3002 Returns: \u65f6\u95f4\u6233 \"\"\" if precision not in [ \"s\" , \"ms\" , \"us\" ]: raise AssertionError ( \"precision must be 's', 'ms' or 'us'\" ) # get int repr of tm, in seconds unit if isinstance ( tm , np . datetime64 ): tm = tm . astype ( \"datetime64[s]\" ) . astype ( \"int\" ) elif isinstance ( tm , datetime . datetime ): tm = tm . timestamp () else : tm = arrow . get ( tm ) . timestamp () return int ( tm * 10 ** ({ \"s\" : 0 , \"ms\" : 3 , \"us\" : 6 }[ precision ]))","title":"to_timestamp()"},{"location":"api/dal/influxclient/","text":"InfluxClient - the performanct async client for influxdb \u00b6 Source code in omicron/dal/influx/influxclient.py class InfluxClient : def __init__ ( self , url : str , token : str , bucket : str , org : str = None , enable_compress = False , chunk_size : int = 5000 , precision : str = \"s\" , ): \"\"\"[summary] Args: url ([type]): [description] token ([type]): [description] bucket ([type]): [description] org ([type], optional): [description]. Defaults to None. enable_compress ([type], optional): [description]. Defaults to False. chunk_size: number of lines to be saved in one request precision: \u652f\u6301\u7684\u65f6\u95f4\u7cbe\u5ea6 \"\"\" self . _url = url self . _bucket = bucket self . _enable_compress = enable_compress self . _org = org self . _org_id = None # \u9700\u8981\u65f6\u901a\u8fc7\u67e5\u8be2\u83b7\u53d6\uff0c\u6b64\u540e\u4e0d\u518d\u66f4\u65b0 self . _token = token # influxdb 2.0\u8d77\u652f\u6301\u7684\u65f6\u95f4\u7cbe\u5ea6\u6709\uff1ans, us, ms, s\u3002\u672c\u5ba2\u6237\u7aef\u53ea\u652f\u6301s, ms\u548cus self . _precision = precision . lower () if self . _precision not in [ \"s\" , \"ms\" , \"us\" ]: # pragma: no cover raise ValueError ( \"precision must be one of ['s', 'ms', 'us']\" ) self . _chunk_size = chunk_size # write self . _write_url = f \" { self . _url } /api/v2/write?org= { self . _org } &bucket= { self . _bucket } &precision= { self . _precision } \" self . _write_headers = { \"Content-Type\" : \"text/plain; charset=utf-8\" , \"Authorization\" : f \"Token { token } \" , \"Accept\" : \"application/json\" , } if self . _enable_compress : self . _write_headers [ \"Content-Encoding\" ] = \"gzip\" self . _query_url = f \" { self . _url } /api/v2/query?org= { self . _org } \" self . _query_headers = { \"Authorization\" : f \"Token { token } \" , \"Content-Type\" : \"application/vnd.flux\" , # influx\u67e5\u8be2\u7ed3\u679c\u683c\u5f0f\uff0c\u65e0\u8bba\u5982\u4f55\u6307\u5b9a\uff08\u6216\u8005\u4e0d\u6307\u5b9a\uff09\uff0c\u57282.1\u4e2d\u59cb\u7ec8\u662fcsv\u683c\u5f0f \"Accept\" : \"text/csv\" , } if self . _enable_compress : self . _query_headers [ \"Accept-Encoding\" ] = \"gzip\" self . _delete_url = ( f \" { self . _url } /api/v2/delete?org= { self . _org } &bucket= { self . _bucket } \" ) self . _delete_headers = { \"Authorization\" : f \"Token { token } \" , \"Content-Type\" : \"application/json\" , } async def save ( self , data : Union [ np . ndarray , DataFrame ], measurement : str = None , tag_keys : List [ str ] = [], time_key : str = None , global_tags : Dict = {}, chunk_size : int = None , ) -> None : \"\"\"save `data` into influxdb if `data` is a pandas.DataFrame or numy structured array, it will be converted to line protocol and saved. If `data` is str, use `write` method instead. Args: data: data to be saved measurement: the name of measurement tag_keys: which columns name will be used as tags chunk_size: number of lines to be saved in one request. if it's -1, then all data will be written in one request. If it's None, then it will be set to `self._chunk_size` Raises: InfluxDBWriteError: if write failed \"\"\" # todo: add more errors raise if isinstance ( data , DataFrame ): assert ( measurement is not None ), \"measurement must be specified when data is a DataFrame\" if tag_keys : assert set ( tag_keys ) in set ( data . columns . tolist () ), \"tag_keys must be in data.columns\" serializer = DataframeSerializer ( data , measurement , time_key , tag_keys , global_tags , precision = self . _precision , ) if chunk_size == - 1 : chunk_size = len ( data ) for lines in serializer . serialize ( chunk_size or self . _chunk_size ): await self . write ( lines ) elif isinstance ( data , np . ndarray ): assert ( measurement is not None ), \"measurement must be specified when data is a numpy array\" assert ( time_key is not None ), \"time_key must be specified when data is a numpy array\" serializer = NumpySerializer ( data , measurement , time_key , tag_keys , global_tags , time_precision = self . _precision , ) if chunk_size == - 1 : chunk_size = len ( data ) for lines in serializer . serialize ( chunk_size or self . _chunk_size ): await self . write ( lines ) else : raise TypeError ( f \"data must be pandas.DataFrame, numpy array, got { type ( data ) } \" ) async def write ( self , line_protocol : str ): \"\"\"\u5c06line-protocol\u6570\u7ec4\u5199\u5165influxdb Args: line_protocol: \u5f85\u5199\u5165\u7684\u6570\u636e\uff0c\u4ee5line-protocol\u6570\u7ec4\u5f62\u5f0f\u5b58\u5728 \"\"\" # todo: add raise error declaration if self . _enable_compress : line_protocol_ = gzip . compress ( line_protocol . encode ( \"utf-8\" )) else : line_protocol_ = line_protocol async with ClientSession () as session : async with session . post ( self . _write_url , data = line_protocol_ , headers = self . _write_headers ) as resp : if resp . status != 204 : err = await resp . json () logger . warning ( \"influxdb write error when processing: %s , err code: %s , message: %s \" , { line_protocol [: 100 ]}, err [ \"code\" ], err [ \"message\" ], ) logger . debug ( \"data caused error: %s \" , line_protocol ) raise InfluxDBWriteError ( f \"influxdb write failed, err: { err [ 'message' ] } \" ) async def query ( self , flux : Union [ Flux , str ], deserializer : Callable = None ) -> Any : \"\"\"flux\u67e5\u8be2 flux\u67e5\u8be2\u7ed3\u679c\u662f\u4e00\u4e2a\u4ee5annotated csv\u683c\u5f0f\u5b58\u50a8\u7684\u6570\u636e\uff0c\u4f8b\u5982\uff1a ``` ,result,table,_time,code,amount,close,factor,high,low,open,volume ,_result,0,2019-01-01T00:00:00Z,000001.XSHE,100000000,5.15,1.23,5.2,5,5.1,1000000 ``` \u4e0a\u8ff0`result`\u4e2d\uff0c\u4e8b\u5148\u901a\u8fc7Flux.keep()\u9650\u5236\u4e86\u8fd4\u56de\u7684\u5b57\u6bb5\u4e3a_time,code,amount,close,factor,high,low,open,volume\u3002influxdb\u67e5\u8be2\u8fd4\u56de\u7ed3\u679c\u65f6\uff0c\u603b\u662f\u6309\u7167\u5b57\u6bb5\u540d\u79f0\u5347\u5e8f\u6392\u5217\u3002\u6b64\u5916\uff0c\u603b\u662f\u4f1a\u989d\u5916\u5730\u8fd4\u56de_result, table\u4e24\u4e2a\u5b57\u6bb5\u3002 \u5982\u679c\u4f20\u5165\u4e86deserializer\uff0c\u5219\u4f1a\u8c03\u7528deserializer\u5c06\u5176\u89e3\u6790\u6210\u4e3apython\u5bf9\u8c61\u3002\u5426\u5219\uff0c\u8fd4\u56debytes\u6570\u636e\u3002 Args: flux: flux\u67e5\u8be2\u8bed\u53e5 deserializer: \u53cd\u5e8f\u5217\u5316\u51fd\u6570 Returns: \u5982\u679c\u672a\u63d0\u4f9b\u53cd\u5e8f\u5217\u5316\u51fd\u6570\uff0c\u5219\u8fd4\u56de\u7ed3\u679c\u4e3abytes array(\u5982\u679c\u6307\u5b9a\u4e86compress=True\uff0c\u8fd4\u56de\u7ed3\u679c\u4e3agzip\u89e3\u538b\u7f29\u540e\u7684bytes array)\uff0c\u5426\u5219\u8fd4\u56de\u53cd\u5e8f\u5217\u5316\u540e\u7684python\u5bf9\u8c61 \"\"\" if isinstance ( flux , Flux ): flux = str ( flux ) async with ClientSession () as session : async with session . post ( self . _query_url , data = flux , headers = self . _query_headers ) as resp : if resp . status != 200 : err = await resp . json () logger . warning ( f \"influxdb query error: { err } when processing { flux [: 500 ] } \" ) logger . debug ( \"data caused error: %s \" , flux ) raise InfluxDBQueryError ( f \"influxdb query failed, status code: { err [ 'message' ] } \" ) else : # auto-unzip body = await resp . read () if deserializer : try : return deserializer ( body ) except Exception as e : logger . exception ( e ) logger . warning ( \"failed to deserialize data: %s , the query is: %s \" , body , flux [: 500 ], ) raise else : return body async def drop_measurement ( self , measurement : str ): \"\"\"\u4eceinfluxdb\u4e2d\u5220\u9664\u4e00\u4e2ameasurement \u8c03\u7528\u6b64\u65b9\u6cd5\u540e\uff0c\u5b9e\u9645\u4e0a\u8be5measurement\u4ecd\u7136\u5b58\u5728\uff0c\u53ea\u662f\u6ca1\u6709\u6570\u636e\u3002 \"\"\" # todo: add raise error declaration await self . delete ( measurement , arrow . now () . naive ) async def delete ( self , measurement : str , stop : datetime . datetime , tags : Optional [ Dict [ str , str ]] = {}, start : datetime . datetime = None , precision : str = \"s\" , ): \"\"\"\u5220\u9664influxdb\u4e2d\u6307\u5b9a\u65f6\u95f4\u6bb5\u5185\u7684\u6570\u636e \u5173\u4e8e\u53c2\u6570\uff0c\u8bf7\u53c2\u89c1[Flux.delete][omicron.dal.influx.flux.Flux.delete]\u3002 Args: measurement: \u6307\u5b9ameasurement\u540d\u5b57 stop: \u5f85\u5220\u9664\u8bb0\u5f55\u7684\u7ed3\u675f\u65f6\u95f4 start: \u5f85\u5220\u9664\u8bb0\u5f55\u7684\u5f00\u59cb\u65f6\u95f4\uff0c\u5982\u679c\u672a\u6307\u5b9a\uff0c\u5219\u4f7f\u7528EPOCH_START tags: \u6309tag\u8fdb\u884c\u8fc7\u6ee4\u7684\u6761\u4ef6 precision: \u7528\u4ee5\u683c\u5f0f\u5316\u8d77\u59cb\u548c\u7ed3\u675f\u65f6\u95f4\u3002 Raises: InfluxDeleteError: \u5982\u679c\u5220\u9664\u5931\u8d25\uff0c\u5219\u629b\u51fa\u6b64\u5f02\u5e38 \"\"\" # todo: add raise error declaration command = Flux () . delete ( measurement , stop , tags , start = start , precision = precision ) async with ClientSession () as session : async with session . post ( self . _delete_url , data = json . dumps ( command ), headers = self . _delete_headers ) as resp : if resp . status != 204 : err = await resp . json () logger . warning ( \"influxdb delete error: %s when processin command %s \" , err [ \"message\" ], command , ) raise InfluxDeleteError ( f \"influxdb delete failed, status code: { err [ 'message' ] } \" ) async def list_buckets ( self ) -> List [ Dict ]: \"\"\"\u5217\u51fainfluxdb\u4e2d\u5bf9\u5e94token\u80fd\u770b\u5230\u7684\u6240\u6709\u7684bucket Returns: list of buckets, each bucket is a dict with keys: ``` id orgID, a 16 bytes hex string type, system or user description name retentionRules createdAt updatedAt links labels ``` \"\"\" url = f \" { self . _url } /api/v2/buckets\" headers = { \"Authorization\" : f \"Token { self . _token } \" } async with ClientSession () as session : async with session . get ( url , headers = headers ) as resp : if resp . status != 200 : err = await resp . json () raise InfluxSchemaError ( f \"influxdb list bucket failed, status code: { err [ 'message' ] } \" ) else : return ( await resp . json ())[ \"buckets\" ] async def delete_bucket ( self , bucket_id : str = None ): \"\"\"\u5220\u9664influxdb\u4e2d\u6307\u5b9abucket Args: bucket_id: \u6307\u5b9abucket\u7684id\u3002\u5982\u679c\u4e3aNone\uff0c\u5219\u4f1a\u5220\u9664\u672cclient\u5bf9\u5e94\u7684bucket\u3002 \"\"\" if bucket_id is None : buckets = await self . list_buckets () for bucket in buckets : if bucket [ \"type\" ] == \"user\" and bucket [ \"name\" ] == self . _bucket : bucket_id = bucket [ \"id\" ] break else : raise BadParameterError ( \"bucket_id is None, and we can't find bucket with name: %s \" % self . _bucket ) url = f \" { self . _url } /api/v2/buckets/ { bucket_id } \" headers = { \"Authorization\" : f \"Token { self . _token } \" } async with ClientSession () as session : async with session . delete ( url , headers = headers ) as resp : if resp . status != 204 : err = await resp . json () logger . warning ( \"influxdb delete bucket error: %s when processin command %s \" , err [ \"message\" ], bucket_id , ) raise InfluxSchemaError ( f \"influxdb delete bucket failed, status code: { err [ 'message' ] } \" ) async def create_bucket ( self , description = None , retention_rules : List [ Dict ] = None , org_id : str = None ) -> str : \"\"\"\u521b\u5efainfluxdb\u4e2d\u6307\u5b9abucket Args: description: \u6307\u5b9abucket\u7684\u63cf\u8ff0 org_id: \u6307\u5b9abucket\u6240\u5c5e\u7684\u7ec4\u7ec7id\uff0c\u5982\u679c\u672a\u6307\u5b9a\uff0c\u5219\u4f7f\u7528\u672cclient\u5bf9\u5e94\u7684\u7ec4\u7ec7id\u3002 Raises: InfluxSchemaError: \u5f53influxdb\u8fd4\u56de\u9519\u8bef\u65f6\uff0c\u6bd4\u5982\u91cd\u590d\u521b\u5efabucket\u7b49\uff0c\u4f1a\u629b\u51fa\u6b64\u5f02\u5e38 Returns: \u65b0\u521b\u5efa\u7684bucket\u7684id \"\"\" if org_id is None : org_id = await self . query_org_id () url = f \" { self . _url } /api/v2/buckets\" headers = { \"Authorization\" : f \"Token { self . _token } \" } data = { \"name\" : self . _bucket , \"orgID\" : org_id , \"description\" : description , \"retentionRules\" : retention_rules , } async with ClientSession () as session : async with session . post ( url , data = json . dumps ( data ), headers = headers ) as resp : if resp . status != 201 : err = await resp . json () logger . warning ( \"influxdb create bucket error: %s when processin command %s \" , err [ \"message\" ], data , ) raise InfluxSchemaError ( f \"influxdb create bucket failed, status code: { err [ 'message' ] } \" ) else : result = await resp . json () return result [ \"id\" ] async def list_organizations ( self , offset : int = 0 , limit : int = 100 ) -> List [ Dict ]: \"\"\"\u5217\u51fa\u672c\u5ba2\u6237\u7aef\u5141\u8bb8\u67e5\u8be2\u7684\u6240\u7ec4\u7ec7 Args: offset : \u5206\u9875\u8d77\u70b9 limit : \u6bcf\u9875size Raises: InfluxSchemaError: influxdb\u8fd4\u56de\u7684\u9519\u8bef Returns: list of organizations, each organization is a dict with keys: ``` id : the id of the org links name : the name of the org description createdAt updatedAt ``` \"\"\" url = f \" { self . _url } /api/v2/orgs?offset= { offset } &limit= { limit } \" headers = { \"Authorization\" : f \"Token { self . _token } \" } async with ClientSession () as session : async with session . get ( url , headers = headers ) as resp : if resp . status != 200 : err = await resp . json () logger . warning ( \"influxdb query orgs err: %s \" , err [ \"message\" ]) raise InfluxSchemaError ( f \"influxdb query orgs failed, status code: { err [ 'message' ] } \" ) else : return ( await resp . json ())[ \"orgs\" ] async def query_org_id ( self , name : str = None ) -> str : \"\"\"\u901a\u8fc7\u7ec4\u7ec7\u540d\u67e5\u627e\u7ec4\u7ec7id \u53ea\u80fd\u67e5\u7684\u672c\u5ba2\u6237\u7aef\u5141\u8bb8\u67e5\u8be2\u7684\u7ec4\u7ec7\u3002\u5982\u679cname\u672a\u63d0\u4f9b\uff0c\u5219\u4f7f\u7528\u672c\u5ba2\u6237\u7aef\u521b\u5efa\u65f6\u4f20\u5165\u7684\u7ec4\u7ec7\u540d\u3002 Args: name: \u6307\u5b9a\u7ec4\u7ec7\u540d Returns: \u7ec4\u7ec7id \"\"\" if name is None : name = self . _org orgs = await self . list_organizations () for org in orgs : if org [ \"name\" ] == name : return org [ \"id\" ] raise BadParameterError ( f \"can't find org with name: { name } \" ) __init__ ( self , url , token , bucket , org = None , enable_compress = False , chunk_size = 5000 , precision = 's' ) special \u00b6 [summary] Parameters: Name Type Description Default url [type] [description] required token [type] [description] required bucket [type] [description] required org [type] [description]. Defaults to None. None enable_compress [type] [description]. Defaults to False. False chunk_size int number of lines to be saved in one request 5000 precision str \u652f\u6301\u7684\u65f6\u95f4\u7cbe\u5ea6 's' Source code in omicron/dal/influx/influxclient.py def __init__ ( self , url : str , token : str , bucket : str , org : str = None , enable_compress = False , chunk_size : int = 5000 , precision : str = \"s\" , ): \"\"\"[summary] Args: url ([type]): [description] token ([type]): [description] bucket ([type]): [description] org ([type], optional): [description]. Defaults to None. enable_compress ([type], optional): [description]. Defaults to False. chunk_size: number of lines to be saved in one request precision: \u652f\u6301\u7684\u65f6\u95f4\u7cbe\u5ea6 \"\"\" self . _url = url self . _bucket = bucket self . _enable_compress = enable_compress self . _org = org self . _org_id = None # \u9700\u8981\u65f6\u901a\u8fc7\u67e5\u8be2\u83b7\u53d6\uff0c\u6b64\u540e\u4e0d\u518d\u66f4\u65b0 self . _token = token # influxdb 2.0\u8d77\u652f\u6301\u7684\u65f6\u95f4\u7cbe\u5ea6\u6709\uff1ans, us, ms, s\u3002\u672c\u5ba2\u6237\u7aef\u53ea\u652f\u6301s, ms\u548cus self . _precision = precision . lower () if self . _precision not in [ \"s\" , \"ms\" , \"us\" ]: # pragma: no cover raise ValueError ( \"precision must be one of ['s', 'ms', 'us']\" ) self . _chunk_size = chunk_size # write self . _write_url = f \" { self . _url } /api/v2/write?org= { self . _org } &bucket= { self . _bucket } &precision= { self . _precision } \" self . _write_headers = { \"Content-Type\" : \"text/plain; charset=utf-8\" , \"Authorization\" : f \"Token { token } \" , \"Accept\" : \"application/json\" , } if self . _enable_compress : self . _write_headers [ \"Content-Encoding\" ] = \"gzip\" self . _query_url = f \" { self . _url } /api/v2/query?org= { self . _org } \" self . _query_headers = { \"Authorization\" : f \"Token { token } \" , \"Content-Type\" : \"application/vnd.flux\" , # influx\u67e5\u8be2\u7ed3\u679c\u683c\u5f0f\uff0c\u65e0\u8bba\u5982\u4f55\u6307\u5b9a\uff08\u6216\u8005\u4e0d\u6307\u5b9a\uff09\uff0c\u57282.1\u4e2d\u59cb\u7ec8\u662fcsv\u683c\u5f0f \"Accept\" : \"text/csv\" , } if self . _enable_compress : self . _query_headers [ \"Accept-Encoding\" ] = \"gzip\" self . _delete_url = ( f \" { self . _url } /api/v2/delete?org= { self . _org } &bucket= { self . _bucket } \" ) self . _delete_headers = { \"Authorization\" : f \"Token { token } \" , \"Content-Type\" : \"application/json\" , } create_bucket ( self , description = None , retention_rules = None , org_id = None ) async \u00b6 \u521b\u5efainfluxdb\u4e2d\u6307\u5b9abucket Parameters: Name Type Description Default description \u6307\u5b9abucket\u7684\u63cf\u8ff0 None org_id str \u6307\u5b9abucket\u6240\u5c5e\u7684\u7ec4\u7ec7id\uff0c\u5982\u679c\u672a\u6307\u5b9a\uff0c\u5219\u4f7f\u7528\u672cclient\u5bf9\u5e94\u7684\u7ec4\u7ec7id\u3002 None Exceptions: Type Description InfluxSchemaError \u5f53influxdb\u8fd4\u56de\u9519\u8bef\u65f6\uff0c\u6bd4\u5982\u91cd\u590d\u521b\u5efabucket\u7b49\uff0c\u4f1a\u629b\u51fa\u6b64\u5f02\u5e38 Returns: Type Description str \u65b0\u521b\u5efa\u7684bucket\u7684id Source code in omicron/dal/influx/influxclient.py async def create_bucket ( self , description = None , retention_rules : List [ Dict ] = None , org_id : str = None ) -> str : \"\"\"\u521b\u5efainfluxdb\u4e2d\u6307\u5b9abucket Args: description: \u6307\u5b9abucket\u7684\u63cf\u8ff0 org_id: \u6307\u5b9abucket\u6240\u5c5e\u7684\u7ec4\u7ec7id\uff0c\u5982\u679c\u672a\u6307\u5b9a\uff0c\u5219\u4f7f\u7528\u672cclient\u5bf9\u5e94\u7684\u7ec4\u7ec7id\u3002 Raises: InfluxSchemaError: \u5f53influxdb\u8fd4\u56de\u9519\u8bef\u65f6\uff0c\u6bd4\u5982\u91cd\u590d\u521b\u5efabucket\u7b49\uff0c\u4f1a\u629b\u51fa\u6b64\u5f02\u5e38 Returns: \u65b0\u521b\u5efa\u7684bucket\u7684id \"\"\" if org_id is None : org_id = await self . query_org_id () url = f \" { self . _url } /api/v2/buckets\" headers = { \"Authorization\" : f \"Token { self . _token } \" } data = { \"name\" : self . _bucket , \"orgID\" : org_id , \"description\" : description , \"retentionRules\" : retention_rules , } async with ClientSession () as session : async with session . post ( url , data = json . dumps ( data ), headers = headers ) as resp : if resp . status != 201 : err = await resp . json () logger . warning ( \"influxdb create bucket error: %s when processin command %s \" , err [ \"message\" ], data , ) raise InfluxSchemaError ( f \"influxdb create bucket failed, status code: { err [ 'message' ] } \" ) else : result = await resp . json () return result [ \"id\" ] delete ( self , measurement , stop , tags = {}, start = None , precision = 's' ) async \u00b6 \u5220\u9664influxdb\u4e2d\u6307\u5b9a\u65f6\u95f4\u6bb5\u5185\u7684\u6570\u636e \u5173\u4e8e\u53c2\u6570\uff0c\u8bf7\u53c2\u89c1 Flux.delete \u3002 Parameters: Name Type Description Default measurement str \u6307\u5b9ameasurement\u540d\u5b57 required stop datetime \u5f85\u5220\u9664\u8bb0\u5f55\u7684\u7ed3\u675f\u65f6\u95f4 required start datetime \u5f85\u5220\u9664\u8bb0\u5f55\u7684\u5f00\u59cb\u65f6\u95f4\uff0c\u5982\u679c\u672a\u6307\u5b9a\uff0c\u5219\u4f7f\u7528EPOCH_START None tags Optional[Dict[str, str]] \u6309tag\u8fdb\u884c\u8fc7\u6ee4\u7684\u6761\u4ef6 {} precision str \u7528\u4ee5\u683c\u5f0f\u5316\u8d77\u59cb\u548c\u7ed3\u675f\u65f6\u95f4\u3002 's' Exceptions: Type Description InfluxDeleteError \u5982\u679c\u5220\u9664\u5931\u8d25\uff0c\u5219\u629b\u51fa\u6b64\u5f02\u5e38 Source code in omicron/dal/influx/influxclient.py async def delete ( self , measurement : str , stop : datetime . datetime , tags : Optional [ Dict [ str , str ]] = {}, start : datetime . datetime = None , precision : str = \"s\" , ): \"\"\"\u5220\u9664influxdb\u4e2d\u6307\u5b9a\u65f6\u95f4\u6bb5\u5185\u7684\u6570\u636e \u5173\u4e8e\u53c2\u6570\uff0c\u8bf7\u53c2\u89c1[Flux.delete][omicron.dal.influx.flux.Flux.delete]\u3002 Args: measurement: \u6307\u5b9ameasurement\u540d\u5b57 stop: \u5f85\u5220\u9664\u8bb0\u5f55\u7684\u7ed3\u675f\u65f6\u95f4 start: \u5f85\u5220\u9664\u8bb0\u5f55\u7684\u5f00\u59cb\u65f6\u95f4\uff0c\u5982\u679c\u672a\u6307\u5b9a\uff0c\u5219\u4f7f\u7528EPOCH_START tags: \u6309tag\u8fdb\u884c\u8fc7\u6ee4\u7684\u6761\u4ef6 precision: \u7528\u4ee5\u683c\u5f0f\u5316\u8d77\u59cb\u548c\u7ed3\u675f\u65f6\u95f4\u3002 Raises: InfluxDeleteError: \u5982\u679c\u5220\u9664\u5931\u8d25\uff0c\u5219\u629b\u51fa\u6b64\u5f02\u5e38 \"\"\" # todo: add raise error declaration command = Flux () . delete ( measurement , stop , tags , start = start , precision = precision ) async with ClientSession () as session : async with session . post ( self . _delete_url , data = json . dumps ( command ), headers = self . _delete_headers ) as resp : if resp . status != 204 : err = await resp . json () logger . warning ( \"influxdb delete error: %s when processin command %s \" , err [ \"message\" ], command , ) raise InfluxDeleteError ( f \"influxdb delete failed, status code: { err [ 'message' ] } \" ) delete_bucket ( self , bucket_id = None ) async \u00b6 \u5220\u9664influxdb\u4e2d\u6307\u5b9abucket Parameters: Name Type Description Default bucket_id str \u6307\u5b9abucket\u7684id\u3002\u5982\u679c\u4e3aNone\uff0c\u5219\u4f1a\u5220\u9664\u672cclient\u5bf9\u5e94\u7684bucket\u3002 None Source code in omicron/dal/influx/influxclient.py async def delete_bucket ( self , bucket_id : str = None ): \"\"\"\u5220\u9664influxdb\u4e2d\u6307\u5b9abucket Args: bucket_id: \u6307\u5b9abucket\u7684id\u3002\u5982\u679c\u4e3aNone\uff0c\u5219\u4f1a\u5220\u9664\u672cclient\u5bf9\u5e94\u7684bucket\u3002 \"\"\" if bucket_id is None : buckets = await self . list_buckets () for bucket in buckets : if bucket [ \"type\" ] == \"user\" and bucket [ \"name\" ] == self . _bucket : bucket_id = bucket [ \"id\" ] break else : raise BadParameterError ( \"bucket_id is None, and we can't find bucket with name: %s \" % self . _bucket ) url = f \" { self . _url } /api/v2/buckets/ { bucket_id } \" headers = { \"Authorization\" : f \"Token { self . _token } \" } async with ClientSession () as session : async with session . delete ( url , headers = headers ) as resp : if resp . status != 204 : err = await resp . json () logger . warning ( \"influxdb delete bucket error: %s when processin command %s \" , err [ \"message\" ], bucket_id , ) raise InfluxSchemaError ( f \"influxdb delete bucket failed, status code: { err [ 'message' ] } \" ) drop_measurement ( self , measurement ) async \u00b6 \u4eceinfluxdb\u4e2d\u5220\u9664\u4e00\u4e2ameasurement \u8c03\u7528\u6b64\u65b9\u6cd5\u540e\uff0c\u5b9e\u9645\u4e0a\u8be5measurement\u4ecd\u7136\u5b58\u5728\uff0c\u53ea\u662f\u6ca1\u6709\u6570\u636e\u3002 Source code in omicron/dal/influx/influxclient.py async def drop_measurement ( self , measurement : str ): \"\"\"\u4eceinfluxdb\u4e2d\u5220\u9664\u4e00\u4e2ameasurement \u8c03\u7528\u6b64\u65b9\u6cd5\u540e\uff0c\u5b9e\u9645\u4e0a\u8be5measurement\u4ecd\u7136\u5b58\u5728\uff0c\u53ea\u662f\u6ca1\u6709\u6570\u636e\u3002 \"\"\" # todo: add raise error declaration await self . delete ( measurement , arrow . now () . naive ) list_buckets ( self ) async \u00b6 \u5217\u51fainfluxdb\u4e2d\u5bf9\u5e94token\u80fd\u770b\u5230\u7684\u6240\u6709\u7684bucket Returns: Type Description list of buckets, each bucket is a dict with keys ``` id orgID, a 16 bytes hex string type, system or user description name retentionRules createdAt updatedAt links labels ``` Source code in omicron/dal/influx/influxclient.py async def list_buckets ( self ) -> List [ Dict ]: \"\"\"\u5217\u51fainfluxdb\u4e2d\u5bf9\u5e94token\u80fd\u770b\u5230\u7684\u6240\u6709\u7684bucket Returns: list of buckets, each bucket is a dict with keys: ``` id orgID, a 16 bytes hex string type, system or user description name retentionRules createdAt updatedAt links labels ``` \"\"\" url = f \" { self . _url } /api/v2/buckets\" headers = { \"Authorization\" : f \"Token { self . _token } \" } async with ClientSession () as session : async with session . get ( url , headers = headers ) as resp : if resp . status != 200 : err = await resp . json () raise InfluxSchemaError ( f \"influxdb list bucket failed, status code: { err [ 'message' ] } \" ) else : return ( await resp . json ())[ \"buckets\" ] list_organizations ( self , offset = 0 , limit = 100 ) async \u00b6 \u5217\u51fa\u672c\u5ba2\u6237\u7aef\u5141\u8bb8\u67e5\u8be2\u7684\u6240\u7ec4\u7ec7 Parameters: Name Type Description Default offset \u5206\u9875\u8d77\u70b9 0 limit \u6bcf\u9875size 100 Exceptions: Type Description InfluxSchemaError influxdb\u8fd4\u56de\u7684\u9519\u8bef Returns: Type Description list of organizations, each organization is a dict with keys 1 2 3 4 5 6 id : the id of the org links name : the name of the org description createdAt updatedAt Source code in omicron/dal/influx/influxclient.py async def list_organizations ( self , offset : int = 0 , limit : int = 100 ) -> List [ Dict ]: \"\"\"\u5217\u51fa\u672c\u5ba2\u6237\u7aef\u5141\u8bb8\u67e5\u8be2\u7684\u6240\u7ec4\u7ec7 Args: offset : \u5206\u9875\u8d77\u70b9 limit : \u6bcf\u9875size Raises: InfluxSchemaError: influxdb\u8fd4\u56de\u7684\u9519\u8bef Returns: list of organizations, each organization is a dict with keys: ``` id : the id of the org links name : the name of the org description createdAt updatedAt ``` \"\"\" url = f \" { self . _url } /api/v2/orgs?offset= { offset } &limit= { limit } \" headers = { \"Authorization\" : f \"Token { self . _token } \" } async with ClientSession () as session : async with session . get ( url , headers = headers ) as resp : if resp . status != 200 : err = await resp . json () logger . warning ( \"influxdb query orgs err: %s \" , err [ \"message\" ]) raise InfluxSchemaError ( f \"influxdb query orgs failed, status code: { err [ 'message' ] } \" ) else : return ( await resp . json ())[ \"orgs\" ] query ( self , flux , deserializer = None ) async \u00b6 flux\u67e5\u8be2 flux\u67e5\u8be2\u7ed3\u679c\u662f\u4e00\u4e2a\u4ee5annotated csv\u683c\u5f0f\u5b58\u50a8\u7684\u6570\u636e\uff0c\u4f8b\u5982\uff1a 1 2 ,result,table,_time,code,amount,close,factor,high,low,open,volume ,_result,0,2019-01-01T00:00:00Z,000001.XSHE,100000000,5.15,1.23,5.2,5,5.1,1000000 \u4e0a\u8ff0 result \u4e2d\uff0c\u4e8b\u5148\u901a\u8fc7Flux.keep()\u9650\u5236\u4e86\u8fd4\u56de\u7684\u5b57\u6bb5\u4e3a_time,code,amount,close,factor,high,low,open,volume\u3002influxdb\u67e5\u8be2\u8fd4\u56de\u7ed3\u679c\u65f6\uff0c\u603b\u662f\u6309\u7167\u5b57\u6bb5\u540d\u79f0\u5347\u5e8f\u6392\u5217\u3002\u6b64\u5916\uff0c\u603b\u662f\u4f1a\u989d\u5916\u5730\u8fd4\u56de_result, table\u4e24\u4e2a\u5b57\u6bb5\u3002 \u5982\u679c\u4f20\u5165\u4e86deserializer\uff0c\u5219\u4f1a\u8c03\u7528deserializer\u5c06\u5176\u89e3\u6790\u6210\u4e3apython\u5bf9\u8c61\u3002\u5426\u5219\uff0c\u8fd4\u56debytes\u6570\u636e\u3002 Parameters: Name Type Description Default flux Union[omicron.dal.influx.flux.Flux, str] flux\u67e5\u8be2\u8bed\u53e5 required deserializer Callable \u53cd\u5e8f\u5217\u5316\u51fd\u6570 None Returns: Type Description Any \u5982\u679c\u672a\u63d0\u4f9b\u53cd\u5e8f\u5217\u5316\u51fd\u6570\uff0c\u5219\u8fd4\u56de\u7ed3\u679c\u4e3abytes array(\u5982\u679c\u6307\u5b9a\u4e86compress=True\uff0c\u8fd4\u56de\u7ed3\u679c\u4e3agzip\u89e3\u538b\u7f29\u540e\u7684bytes array)\uff0c\u5426\u5219\u8fd4\u56de\u53cd\u5e8f\u5217\u5316\u540e\u7684python\u5bf9\u8c61 Source code in omicron/dal/influx/influxclient.py async def query ( self , flux : Union [ Flux , str ], deserializer : Callable = None ) -> Any : \"\"\"flux\u67e5\u8be2 flux\u67e5\u8be2\u7ed3\u679c\u662f\u4e00\u4e2a\u4ee5annotated csv\u683c\u5f0f\u5b58\u50a8\u7684\u6570\u636e\uff0c\u4f8b\u5982\uff1a ``` ,result,table,_time,code,amount,close,factor,high,low,open,volume ,_result,0,2019-01-01T00:00:00Z,000001.XSHE,100000000,5.15,1.23,5.2,5,5.1,1000000 ``` \u4e0a\u8ff0`result`\u4e2d\uff0c\u4e8b\u5148\u901a\u8fc7Flux.keep()\u9650\u5236\u4e86\u8fd4\u56de\u7684\u5b57\u6bb5\u4e3a_time,code,amount,close,factor,high,low,open,volume\u3002influxdb\u67e5\u8be2\u8fd4\u56de\u7ed3\u679c\u65f6\uff0c\u603b\u662f\u6309\u7167\u5b57\u6bb5\u540d\u79f0\u5347\u5e8f\u6392\u5217\u3002\u6b64\u5916\uff0c\u603b\u662f\u4f1a\u989d\u5916\u5730\u8fd4\u56de_result, table\u4e24\u4e2a\u5b57\u6bb5\u3002 \u5982\u679c\u4f20\u5165\u4e86deserializer\uff0c\u5219\u4f1a\u8c03\u7528deserializer\u5c06\u5176\u89e3\u6790\u6210\u4e3apython\u5bf9\u8c61\u3002\u5426\u5219\uff0c\u8fd4\u56debytes\u6570\u636e\u3002 Args: flux: flux\u67e5\u8be2\u8bed\u53e5 deserializer: \u53cd\u5e8f\u5217\u5316\u51fd\u6570 Returns: \u5982\u679c\u672a\u63d0\u4f9b\u53cd\u5e8f\u5217\u5316\u51fd\u6570\uff0c\u5219\u8fd4\u56de\u7ed3\u679c\u4e3abytes array(\u5982\u679c\u6307\u5b9a\u4e86compress=True\uff0c\u8fd4\u56de\u7ed3\u679c\u4e3agzip\u89e3\u538b\u7f29\u540e\u7684bytes array)\uff0c\u5426\u5219\u8fd4\u56de\u53cd\u5e8f\u5217\u5316\u540e\u7684python\u5bf9\u8c61 \"\"\" if isinstance ( flux , Flux ): flux = str ( flux ) async with ClientSession () as session : async with session . post ( self . _query_url , data = flux , headers = self . _query_headers ) as resp : if resp . status != 200 : err = await resp . json () logger . warning ( f \"influxdb query error: { err } when processing { flux [: 500 ] } \" ) logger . debug ( \"data caused error: %s \" , flux ) raise InfluxDBQueryError ( f \"influxdb query failed, status code: { err [ 'message' ] } \" ) else : # auto-unzip body = await resp . read () if deserializer : try : return deserializer ( body ) except Exception as e : logger . exception ( e ) logger . warning ( \"failed to deserialize data: %s , the query is: %s \" , body , flux [: 500 ], ) raise else : return body query_org_id ( self , name = None ) async \u00b6 \u901a\u8fc7\u7ec4\u7ec7\u540d\u67e5\u627e\u7ec4\u7ec7id \u53ea\u80fd\u67e5\u7684\u672c\u5ba2\u6237\u7aef\u5141\u8bb8\u67e5\u8be2\u7684\u7ec4\u7ec7\u3002\u5982\u679cname\u672a\u63d0\u4f9b\uff0c\u5219\u4f7f\u7528\u672c\u5ba2\u6237\u7aef\u521b\u5efa\u65f6\u4f20\u5165\u7684\u7ec4\u7ec7\u540d\u3002 Parameters: Name Type Description Default name str \u6307\u5b9a\u7ec4\u7ec7\u540d None Returns: Type Description str \u7ec4\u7ec7id Source code in omicron/dal/influx/influxclient.py async def query_org_id ( self , name : str = None ) -> str : \"\"\"\u901a\u8fc7\u7ec4\u7ec7\u540d\u67e5\u627e\u7ec4\u7ec7id \u53ea\u80fd\u67e5\u7684\u672c\u5ba2\u6237\u7aef\u5141\u8bb8\u67e5\u8be2\u7684\u7ec4\u7ec7\u3002\u5982\u679cname\u672a\u63d0\u4f9b\uff0c\u5219\u4f7f\u7528\u672c\u5ba2\u6237\u7aef\u521b\u5efa\u65f6\u4f20\u5165\u7684\u7ec4\u7ec7\u540d\u3002 Args: name: \u6307\u5b9a\u7ec4\u7ec7\u540d Returns: \u7ec4\u7ec7id \"\"\" if name is None : name = self . _org orgs = await self . list_organizations () for org in orgs : if org [ \"name\" ] == name : return org [ \"id\" ] raise BadParameterError ( f \"can't find org with name: { name } \" ) save ( self , data , measurement = None , tag_keys = [], time_key = None , global_tags = {}, chunk_size = None ) async \u00b6 save data into influxdb if data is a pandas.DataFrame or numy structured array, it will be converted to line protocol and saved. If data is str, use write method instead. Parameters: Name Type Description Default data Union[numpy.ndarray, pandas.core.frame.DataFrame] data to be saved required measurement str the name of measurement None tag_keys List[str] which columns name will be used as tags [] chunk_size int number of lines to be saved in one request. if it's -1, then all data will be written in one request. If it's None, then it will be set to self._chunk_size None Exceptions: Type Description InfluxDBWriteError if write failed Source code in omicron/dal/influx/influxclient.py async def save ( self , data : Union [ np . ndarray , DataFrame ], measurement : str = None , tag_keys : List [ str ] = [], time_key : str = None , global_tags : Dict = {}, chunk_size : int = None , ) -> None : \"\"\"save `data` into influxdb if `data` is a pandas.DataFrame or numy structured array, it will be converted to line protocol and saved. If `data` is str, use `write` method instead. Args: data: data to be saved measurement: the name of measurement tag_keys: which columns name will be used as tags chunk_size: number of lines to be saved in one request. if it's -1, then all data will be written in one request. If it's None, then it will be set to `self._chunk_size` Raises: InfluxDBWriteError: if write failed \"\"\" # todo: add more errors raise if isinstance ( data , DataFrame ): assert ( measurement is not None ), \"measurement must be specified when data is a DataFrame\" if tag_keys : assert set ( tag_keys ) in set ( data . columns . tolist () ), \"tag_keys must be in data.columns\" serializer = DataframeSerializer ( data , measurement , time_key , tag_keys , global_tags , precision = self . _precision , ) if chunk_size == - 1 : chunk_size = len ( data ) for lines in serializer . serialize ( chunk_size or self . _chunk_size ): await self . write ( lines ) elif isinstance ( data , np . ndarray ): assert ( measurement is not None ), \"measurement must be specified when data is a numpy array\" assert ( time_key is not None ), \"time_key must be specified when data is a numpy array\" serializer = NumpySerializer ( data , measurement , time_key , tag_keys , global_tags , time_precision = self . _precision , ) if chunk_size == - 1 : chunk_size = len ( data ) for lines in serializer . serialize ( chunk_size or self . _chunk_size ): await self . write ( lines ) else : raise TypeError ( f \"data must be pandas.DataFrame, numpy array, got { type ( data ) } \" ) write ( self , line_protocol ) async \u00b6 \u5c06line-protocol\u6570\u7ec4\u5199\u5165influxdb Parameters: Name Type Description Default line_protocol str \u5f85\u5199\u5165\u7684\u6570\u636e\uff0c\u4ee5line-protocol\u6570\u7ec4\u5f62\u5f0f\u5b58\u5728 required Source code in omicron/dal/influx/influxclient.py async def write ( self , line_protocol : str ): \"\"\"\u5c06line-protocol\u6570\u7ec4\u5199\u5165influxdb Args: line_protocol: \u5f85\u5199\u5165\u7684\u6570\u636e\uff0c\u4ee5line-protocol\u6570\u7ec4\u5f62\u5f0f\u5b58\u5728 \"\"\" # todo: add raise error declaration if self . _enable_compress : line_protocol_ = gzip . compress ( line_protocol . encode ( \"utf-8\" )) else : line_protocol_ = line_protocol async with ClientSession () as session : async with session . post ( self . _write_url , data = line_protocol_ , headers = self . _write_headers ) as resp : if resp . status != 204 : err = await resp . json () logger . warning ( \"influxdb write error when processing: %s , err code: %s , message: %s \" , { line_protocol [: 100 ]}, err [ \"code\" ], err [ \"message\" ], ) logger . debug ( \"data caused error: %s \" , line_protocol ) raise InfluxDBWriteError ( f \"influxdb write failed, err: { err [ 'message' ] } \" )","title":"InfluxClient"},{"location":"api/dal/influxclient/#influxclient---the-performanct-async-client-for-influxdb","text":"Source code in omicron/dal/influx/influxclient.py class InfluxClient : def __init__ ( self , url : str , token : str , bucket : str , org : str = None , enable_compress = False , chunk_size : int = 5000 , precision : str = \"s\" , ): \"\"\"[summary] Args: url ([type]): [description] token ([type]): [description] bucket ([type]): [description] org ([type], optional): [description]. Defaults to None. enable_compress ([type], optional): [description]. Defaults to False. chunk_size: number of lines to be saved in one request precision: \u652f\u6301\u7684\u65f6\u95f4\u7cbe\u5ea6 \"\"\" self . _url = url self . _bucket = bucket self . _enable_compress = enable_compress self . _org = org self . _org_id = None # \u9700\u8981\u65f6\u901a\u8fc7\u67e5\u8be2\u83b7\u53d6\uff0c\u6b64\u540e\u4e0d\u518d\u66f4\u65b0 self . _token = token # influxdb 2.0\u8d77\u652f\u6301\u7684\u65f6\u95f4\u7cbe\u5ea6\u6709\uff1ans, us, ms, s\u3002\u672c\u5ba2\u6237\u7aef\u53ea\u652f\u6301s, ms\u548cus self . _precision = precision . lower () if self . _precision not in [ \"s\" , \"ms\" , \"us\" ]: # pragma: no cover raise ValueError ( \"precision must be one of ['s', 'ms', 'us']\" ) self . _chunk_size = chunk_size # write self . _write_url = f \" { self . _url } /api/v2/write?org= { self . _org } &bucket= { self . _bucket } &precision= { self . _precision } \" self . _write_headers = { \"Content-Type\" : \"text/plain; charset=utf-8\" , \"Authorization\" : f \"Token { token } \" , \"Accept\" : \"application/json\" , } if self . _enable_compress : self . _write_headers [ \"Content-Encoding\" ] = \"gzip\" self . _query_url = f \" { self . _url } /api/v2/query?org= { self . _org } \" self . _query_headers = { \"Authorization\" : f \"Token { token } \" , \"Content-Type\" : \"application/vnd.flux\" , # influx\u67e5\u8be2\u7ed3\u679c\u683c\u5f0f\uff0c\u65e0\u8bba\u5982\u4f55\u6307\u5b9a\uff08\u6216\u8005\u4e0d\u6307\u5b9a\uff09\uff0c\u57282.1\u4e2d\u59cb\u7ec8\u662fcsv\u683c\u5f0f \"Accept\" : \"text/csv\" , } if self . _enable_compress : self . _query_headers [ \"Accept-Encoding\" ] = \"gzip\" self . _delete_url = ( f \" { self . _url } /api/v2/delete?org= { self . _org } &bucket= { self . _bucket } \" ) self . _delete_headers = { \"Authorization\" : f \"Token { token } \" , \"Content-Type\" : \"application/json\" , } async def save ( self , data : Union [ np . ndarray , DataFrame ], measurement : str = None , tag_keys : List [ str ] = [], time_key : str = None , global_tags : Dict = {}, chunk_size : int = None , ) -> None : \"\"\"save `data` into influxdb if `data` is a pandas.DataFrame or numy structured array, it will be converted to line protocol and saved. If `data` is str, use `write` method instead. Args: data: data to be saved measurement: the name of measurement tag_keys: which columns name will be used as tags chunk_size: number of lines to be saved in one request. if it's -1, then all data will be written in one request. If it's None, then it will be set to `self._chunk_size` Raises: InfluxDBWriteError: if write failed \"\"\" # todo: add more errors raise if isinstance ( data , DataFrame ): assert ( measurement is not None ), \"measurement must be specified when data is a DataFrame\" if tag_keys : assert set ( tag_keys ) in set ( data . columns . tolist () ), \"tag_keys must be in data.columns\" serializer = DataframeSerializer ( data , measurement , time_key , tag_keys , global_tags , precision = self . _precision , ) if chunk_size == - 1 : chunk_size = len ( data ) for lines in serializer . serialize ( chunk_size or self . _chunk_size ): await self . write ( lines ) elif isinstance ( data , np . ndarray ): assert ( measurement is not None ), \"measurement must be specified when data is a numpy array\" assert ( time_key is not None ), \"time_key must be specified when data is a numpy array\" serializer = NumpySerializer ( data , measurement , time_key , tag_keys , global_tags , time_precision = self . _precision , ) if chunk_size == - 1 : chunk_size = len ( data ) for lines in serializer . serialize ( chunk_size or self . _chunk_size ): await self . write ( lines ) else : raise TypeError ( f \"data must be pandas.DataFrame, numpy array, got { type ( data ) } \" ) async def write ( self , line_protocol : str ): \"\"\"\u5c06line-protocol\u6570\u7ec4\u5199\u5165influxdb Args: line_protocol: \u5f85\u5199\u5165\u7684\u6570\u636e\uff0c\u4ee5line-protocol\u6570\u7ec4\u5f62\u5f0f\u5b58\u5728 \"\"\" # todo: add raise error declaration if self . _enable_compress : line_protocol_ = gzip . compress ( line_protocol . encode ( \"utf-8\" )) else : line_protocol_ = line_protocol async with ClientSession () as session : async with session . post ( self . _write_url , data = line_protocol_ , headers = self . _write_headers ) as resp : if resp . status != 204 : err = await resp . json () logger . warning ( \"influxdb write error when processing: %s , err code: %s , message: %s \" , { line_protocol [: 100 ]}, err [ \"code\" ], err [ \"message\" ], ) logger . debug ( \"data caused error: %s \" , line_protocol ) raise InfluxDBWriteError ( f \"influxdb write failed, err: { err [ 'message' ] } \" ) async def query ( self , flux : Union [ Flux , str ], deserializer : Callable = None ) -> Any : \"\"\"flux\u67e5\u8be2 flux\u67e5\u8be2\u7ed3\u679c\u662f\u4e00\u4e2a\u4ee5annotated csv\u683c\u5f0f\u5b58\u50a8\u7684\u6570\u636e\uff0c\u4f8b\u5982\uff1a ``` ,result,table,_time,code,amount,close,factor,high,low,open,volume ,_result,0,2019-01-01T00:00:00Z,000001.XSHE,100000000,5.15,1.23,5.2,5,5.1,1000000 ``` \u4e0a\u8ff0`result`\u4e2d\uff0c\u4e8b\u5148\u901a\u8fc7Flux.keep()\u9650\u5236\u4e86\u8fd4\u56de\u7684\u5b57\u6bb5\u4e3a_time,code,amount,close,factor,high,low,open,volume\u3002influxdb\u67e5\u8be2\u8fd4\u56de\u7ed3\u679c\u65f6\uff0c\u603b\u662f\u6309\u7167\u5b57\u6bb5\u540d\u79f0\u5347\u5e8f\u6392\u5217\u3002\u6b64\u5916\uff0c\u603b\u662f\u4f1a\u989d\u5916\u5730\u8fd4\u56de_result, table\u4e24\u4e2a\u5b57\u6bb5\u3002 \u5982\u679c\u4f20\u5165\u4e86deserializer\uff0c\u5219\u4f1a\u8c03\u7528deserializer\u5c06\u5176\u89e3\u6790\u6210\u4e3apython\u5bf9\u8c61\u3002\u5426\u5219\uff0c\u8fd4\u56debytes\u6570\u636e\u3002 Args: flux: flux\u67e5\u8be2\u8bed\u53e5 deserializer: \u53cd\u5e8f\u5217\u5316\u51fd\u6570 Returns: \u5982\u679c\u672a\u63d0\u4f9b\u53cd\u5e8f\u5217\u5316\u51fd\u6570\uff0c\u5219\u8fd4\u56de\u7ed3\u679c\u4e3abytes array(\u5982\u679c\u6307\u5b9a\u4e86compress=True\uff0c\u8fd4\u56de\u7ed3\u679c\u4e3agzip\u89e3\u538b\u7f29\u540e\u7684bytes array)\uff0c\u5426\u5219\u8fd4\u56de\u53cd\u5e8f\u5217\u5316\u540e\u7684python\u5bf9\u8c61 \"\"\" if isinstance ( flux , Flux ): flux = str ( flux ) async with ClientSession () as session : async with session . post ( self . _query_url , data = flux , headers = self . _query_headers ) as resp : if resp . status != 200 : err = await resp . json () logger . warning ( f \"influxdb query error: { err } when processing { flux [: 500 ] } \" ) logger . debug ( \"data caused error: %s \" , flux ) raise InfluxDBQueryError ( f \"influxdb query failed, status code: { err [ 'message' ] } \" ) else : # auto-unzip body = await resp . read () if deserializer : try : return deserializer ( body ) except Exception as e : logger . exception ( e ) logger . warning ( \"failed to deserialize data: %s , the query is: %s \" , body , flux [: 500 ], ) raise else : return body async def drop_measurement ( self , measurement : str ): \"\"\"\u4eceinfluxdb\u4e2d\u5220\u9664\u4e00\u4e2ameasurement \u8c03\u7528\u6b64\u65b9\u6cd5\u540e\uff0c\u5b9e\u9645\u4e0a\u8be5measurement\u4ecd\u7136\u5b58\u5728\uff0c\u53ea\u662f\u6ca1\u6709\u6570\u636e\u3002 \"\"\" # todo: add raise error declaration await self . delete ( measurement , arrow . now () . naive ) async def delete ( self , measurement : str , stop : datetime . datetime , tags : Optional [ Dict [ str , str ]] = {}, start : datetime . datetime = None , precision : str = \"s\" , ): \"\"\"\u5220\u9664influxdb\u4e2d\u6307\u5b9a\u65f6\u95f4\u6bb5\u5185\u7684\u6570\u636e \u5173\u4e8e\u53c2\u6570\uff0c\u8bf7\u53c2\u89c1[Flux.delete][omicron.dal.influx.flux.Flux.delete]\u3002 Args: measurement: \u6307\u5b9ameasurement\u540d\u5b57 stop: \u5f85\u5220\u9664\u8bb0\u5f55\u7684\u7ed3\u675f\u65f6\u95f4 start: \u5f85\u5220\u9664\u8bb0\u5f55\u7684\u5f00\u59cb\u65f6\u95f4\uff0c\u5982\u679c\u672a\u6307\u5b9a\uff0c\u5219\u4f7f\u7528EPOCH_START tags: \u6309tag\u8fdb\u884c\u8fc7\u6ee4\u7684\u6761\u4ef6 precision: \u7528\u4ee5\u683c\u5f0f\u5316\u8d77\u59cb\u548c\u7ed3\u675f\u65f6\u95f4\u3002 Raises: InfluxDeleteError: \u5982\u679c\u5220\u9664\u5931\u8d25\uff0c\u5219\u629b\u51fa\u6b64\u5f02\u5e38 \"\"\" # todo: add raise error declaration command = Flux () . delete ( measurement , stop , tags , start = start , precision = precision ) async with ClientSession () as session : async with session . post ( self . _delete_url , data = json . dumps ( command ), headers = self . _delete_headers ) as resp : if resp . status != 204 : err = await resp . json () logger . warning ( \"influxdb delete error: %s when processin command %s \" , err [ \"message\" ], command , ) raise InfluxDeleteError ( f \"influxdb delete failed, status code: { err [ 'message' ] } \" ) async def list_buckets ( self ) -> List [ Dict ]: \"\"\"\u5217\u51fainfluxdb\u4e2d\u5bf9\u5e94token\u80fd\u770b\u5230\u7684\u6240\u6709\u7684bucket Returns: list of buckets, each bucket is a dict with keys: ``` id orgID, a 16 bytes hex string type, system or user description name retentionRules createdAt updatedAt links labels ``` \"\"\" url = f \" { self . _url } /api/v2/buckets\" headers = { \"Authorization\" : f \"Token { self . _token } \" } async with ClientSession () as session : async with session . get ( url , headers = headers ) as resp : if resp . status != 200 : err = await resp . json () raise InfluxSchemaError ( f \"influxdb list bucket failed, status code: { err [ 'message' ] } \" ) else : return ( await resp . json ())[ \"buckets\" ] async def delete_bucket ( self , bucket_id : str = None ): \"\"\"\u5220\u9664influxdb\u4e2d\u6307\u5b9abucket Args: bucket_id: \u6307\u5b9abucket\u7684id\u3002\u5982\u679c\u4e3aNone\uff0c\u5219\u4f1a\u5220\u9664\u672cclient\u5bf9\u5e94\u7684bucket\u3002 \"\"\" if bucket_id is None : buckets = await self . list_buckets () for bucket in buckets : if bucket [ \"type\" ] == \"user\" and bucket [ \"name\" ] == self . _bucket : bucket_id = bucket [ \"id\" ] break else : raise BadParameterError ( \"bucket_id is None, and we can't find bucket with name: %s \" % self . _bucket ) url = f \" { self . _url } /api/v2/buckets/ { bucket_id } \" headers = { \"Authorization\" : f \"Token { self . _token } \" } async with ClientSession () as session : async with session . delete ( url , headers = headers ) as resp : if resp . status != 204 : err = await resp . json () logger . warning ( \"influxdb delete bucket error: %s when processin command %s \" , err [ \"message\" ], bucket_id , ) raise InfluxSchemaError ( f \"influxdb delete bucket failed, status code: { err [ 'message' ] } \" ) async def create_bucket ( self , description = None , retention_rules : List [ Dict ] = None , org_id : str = None ) -> str : \"\"\"\u521b\u5efainfluxdb\u4e2d\u6307\u5b9abucket Args: description: \u6307\u5b9abucket\u7684\u63cf\u8ff0 org_id: \u6307\u5b9abucket\u6240\u5c5e\u7684\u7ec4\u7ec7id\uff0c\u5982\u679c\u672a\u6307\u5b9a\uff0c\u5219\u4f7f\u7528\u672cclient\u5bf9\u5e94\u7684\u7ec4\u7ec7id\u3002 Raises: InfluxSchemaError: \u5f53influxdb\u8fd4\u56de\u9519\u8bef\u65f6\uff0c\u6bd4\u5982\u91cd\u590d\u521b\u5efabucket\u7b49\uff0c\u4f1a\u629b\u51fa\u6b64\u5f02\u5e38 Returns: \u65b0\u521b\u5efa\u7684bucket\u7684id \"\"\" if org_id is None : org_id = await self . query_org_id () url = f \" { self . _url } /api/v2/buckets\" headers = { \"Authorization\" : f \"Token { self . _token } \" } data = { \"name\" : self . _bucket , \"orgID\" : org_id , \"description\" : description , \"retentionRules\" : retention_rules , } async with ClientSession () as session : async with session . post ( url , data = json . dumps ( data ), headers = headers ) as resp : if resp . status != 201 : err = await resp . json () logger . warning ( \"influxdb create bucket error: %s when processin command %s \" , err [ \"message\" ], data , ) raise InfluxSchemaError ( f \"influxdb create bucket failed, status code: { err [ 'message' ] } \" ) else : result = await resp . json () return result [ \"id\" ] async def list_organizations ( self , offset : int = 0 , limit : int = 100 ) -> List [ Dict ]: \"\"\"\u5217\u51fa\u672c\u5ba2\u6237\u7aef\u5141\u8bb8\u67e5\u8be2\u7684\u6240\u7ec4\u7ec7 Args: offset : \u5206\u9875\u8d77\u70b9 limit : \u6bcf\u9875size Raises: InfluxSchemaError: influxdb\u8fd4\u56de\u7684\u9519\u8bef Returns: list of organizations, each organization is a dict with keys: ``` id : the id of the org links name : the name of the org description createdAt updatedAt ``` \"\"\" url = f \" { self . _url } /api/v2/orgs?offset= { offset } &limit= { limit } \" headers = { \"Authorization\" : f \"Token { self . _token } \" } async with ClientSession () as session : async with session . get ( url , headers = headers ) as resp : if resp . status != 200 : err = await resp . json () logger . warning ( \"influxdb query orgs err: %s \" , err [ \"message\" ]) raise InfluxSchemaError ( f \"influxdb query orgs failed, status code: { err [ 'message' ] } \" ) else : return ( await resp . json ())[ \"orgs\" ] async def query_org_id ( self , name : str = None ) -> str : \"\"\"\u901a\u8fc7\u7ec4\u7ec7\u540d\u67e5\u627e\u7ec4\u7ec7id \u53ea\u80fd\u67e5\u7684\u672c\u5ba2\u6237\u7aef\u5141\u8bb8\u67e5\u8be2\u7684\u7ec4\u7ec7\u3002\u5982\u679cname\u672a\u63d0\u4f9b\uff0c\u5219\u4f7f\u7528\u672c\u5ba2\u6237\u7aef\u521b\u5efa\u65f6\u4f20\u5165\u7684\u7ec4\u7ec7\u540d\u3002 Args: name: \u6307\u5b9a\u7ec4\u7ec7\u540d Returns: \u7ec4\u7ec7id \"\"\" if name is None : name = self . _org orgs = await self . list_organizations () for org in orgs : if org [ \"name\" ] == name : return org [ \"id\" ] raise BadParameterError ( f \"can't find org with name: { name } \" )","title":"InfluxClient - the performanct async client for influxdb"},{"location":"api/dal/influxclient/#omicron.dal.influx.influxclient.InfluxClient.__init__","text":"[summary] Parameters: Name Type Description Default url [type] [description] required token [type] [description] required bucket [type] [description] required org [type] [description]. Defaults to None. None enable_compress [type] [description]. Defaults to False. False chunk_size int number of lines to be saved in one request 5000 precision str \u652f\u6301\u7684\u65f6\u95f4\u7cbe\u5ea6 's' Source code in omicron/dal/influx/influxclient.py def __init__ ( self , url : str , token : str , bucket : str , org : str = None , enable_compress = False , chunk_size : int = 5000 , precision : str = \"s\" , ): \"\"\"[summary] Args: url ([type]): [description] token ([type]): [description] bucket ([type]): [description] org ([type], optional): [description]. Defaults to None. enable_compress ([type], optional): [description]. Defaults to False. chunk_size: number of lines to be saved in one request precision: \u652f\u6301\u7684\u65f6\u95f4\u7cbe\u5ea6 \"\"\" self . _url = url self . _bucket = bucket self . _enable_compress = enable_compress self . _org = org self . _org_id = None # \u9700\u8981\u65f6\u901a\u8fc7\u67e5\u8be2\u83b7\u53d6\uff0c\u6b64\u540e\u4e0d\u518d\u66f4\u65b0 self . _token = token # influxdb 2.0\u8d77\u652f\u6301\u7684\u65f6\u95f4\u7cbe\u5ea6\u6709\uff1ans, us, ms, s\u3002\u672c\u5ba2\u6237\u7aef\u53ea\u652f\u6301s, ms\u548cus self . _precision = precision . lower () if self . _precision not in [ \"s\" , \"ms\" , \"us\" ]: # pragma: no cover raise ValueError ( \"precision must be one of ['s', 'ms', 'us']\" ) self . _chunk_size = chunk_size # write self . _write_url = f \" { self . _url } /api/v2/write?org= { self . _org } &bucket= { self . _bucket } &precision= { self . _precision } \" self . _write_headers = { \"Content-Type\" : \"text/plain; charset=utf-8\" , \"Authorization\" : f \"Token { token } \" , \"Accept\" : \"application/json\" , } if self . _enable_compress : self . _write_headers [ \"Content-Encoding\" ] = \"gzip\" self . _query_url = f \" { self . _url } /api/v2/query?org= { self . _org } \" self . _query_headers = { \"Authorization\" : f \"Token { token } \" , \"Content-Type\" : \"application/vnd.flux\" , # influx\u67e5\u8be2\u7ed3\u679c\u683c\u5f0f\uff0c\u65e0\u8bba\u5982\u4f55\u6307\u5b9a\uff08\u6216\u8005\u4e0d\u6307\u5b9a\uff09\uff0c\u57282.1\u4e2d\u59cb\u7ec8\u662fcsv\u683c\u5f0f \"Accept\" : \"text/csv\" , } if self . _enable_compress : self . _query_headers [ \"Accept-Encoding\" ] = \"gzip\" self . _delete_url = ( f \" { self . _url } /api/v2/delete?org= { self . _org } &bucket= { self . _bucket } \" ) self . _delete_headers = { \"Authorization\" : f \"Token { token } \" , \"Content-Type\" : \"application/json\" , }","title":"__init__()"},{"location":"api/dal/influxclient/#omicron.dal.influx.influxclient.InfluxClient.create_bucket","text":"\u521b\u5efainfluxdb\u4e2d\u6307\u5b9abucket Parameters: Name Type Description Default description \u6307\u5b9abucket\u7684\u63cf\u8ff0 None org_id str \u6307\u5b9abucket\u6240\u5c5e\u7684\u7ec4\u7ec7id\uff0c\u5982\u679c\u672a\u6307\u5b9a\uff0c\u5219\u4f7f\u7528\u672cclient\u5bf9\u5e94\u7684\u7ec4\u7ec7id\u3002 None Exceptions: Type Description InfluxSchemaError \u5f53influxdb\u8fd4\u56de\u9519\u8bef\u65f6\uff0c\u6bd4\u5982\u91cd\u590d\u521b\u5efabucket\u7b49\uff0c\u4f1a\u629b\u51fa\u6b64\u5f02\u5e38 Returns: Type Description str \u65b0\u521b\u5efa\u7684bucket\u7684id Source code in omicron/dal/influx/influxclient.py async def create_bucket ( self , description = None , retention_rules : List [ Dict ] = None , org_id : str = None ) -> str : \"\"\"\u521b\u5efainfluxdb\u4e2d\u6307\u5b9abucket Args: description: \u6307\u5b9abucket\u7684\u63cf\u8ff0 org_id: \u6307\u5b9abucket\u6240\u5c5e\u7684\u7ec4\u7ec7id\uff0c\u5982\u679c\u672a\u6307\u5b9a\uff0c\u5219\u4f7f\u7528\u672cclient\u5bf9\u5e94\u7684\u7ec4\u7ec7id\u3002 Raises: InfluxSchemaError: \u5f53influxdb\u8fd4\u56de\u9519\u8bef\u65f6\uff0c\u6bd4\u5982\u91cd\u590d\u521b\u5efabucket\u7b49\uff0c\u4f1a\u629b\u51fa\u6b64\u5f02\u5e38 Returns: \u65b0\u521b\u5efa\u7684bucket\u7684id \"\"\" if org_id is None : org_id = await self . query_org_id () url = f \" { self . _url } /api/v2/buckets\" headers = { \"Authorization\" : f \"Token { self . _token } \" } data = { \"name\" : self . _bucket , \"orgID\" : org_id , \"description\" : description , \"retentionRules\" : retention_rules , } async with ClientSession () as session : async with session . post ( url , data = json . dumps ( data ), headers = headers ) as resp : if resp . status != 201 : err = await resp . json () logger . warning ( \"influxdb create bucket error: %s when processin command %s \" , err [ \"message\" ], data , ) raise InfluxSchemaError ( f \"influxdb create bucket failed, status code: { err [ 'message' ] } \" ) else : result = await resp . json () return result [ \"id\" ]","title":"create_bucket()"},{"location":"api/dal/influxclient/#omicron.dal.influx.influxclient.InfluxClient.delete","text":"\u5220\u9664influxdb\u4e2d\u6307\u5b9a\u65f6\u95f4\u6bb5\u5185\u7684\u6570\u636e \u5173\u4e8e\u53c2\u6570\uff0c\u8bf7\u53c2\u89c1 Flux.delete \u3002 Parameters: Name Type Description Default measurement str \u6307\u5b9ameasurement\u540d\u5b57 required stop datetime \u5f85\u5220\u9664\u8bb0\u5f55\u7684\u7ed3\u675f\u65f6\u95f4 required start datetime \u5f85\u5220\u9664\u8bb0\u5f55\u7684\u5f00\u59cb\u65f6\u95f4\uff0c\u5982\u679c\u672a\u6307\u5b9a\uff0c\u5219\u4f7f\u7528EPOCH_START None tags Optional[Dict[str, str]] \u6309tag\u8fdb\u884c\u8fc7\u6ee4\u7684\u6761\u4ef6 {} precision str \u7528\u4ee5\u683c\u5f0f\u5316\u8d77\u59cb\u548c\u7ed3\u675f\u65f6\u95f4\u3002 's' Exceptions: Type Description InfluxDeleteError \u5982\u679c\u5220\u9664\u5931\u8d25\uff0c\u5219\u629b\u51fa\u6b64\u5f02\u5e38 Source code in omicron/dal/influx/influxclient.py async def delete ( self , measurement : str , stop : datetime . datetime , tags : Optional [ Dict [ str , str ]] = {}, start : datetime . datetime = None , precision : str = \"s\" , ): \"\"\"\u5220\u9664influxdb\u4e2d\u6307\u5b9a\u65f6\u95f4\u6bb5\u5185\u7684\u6570\u636e \u5173\u4e8e\u53c2\u6570\uff0c\u8bf7\u53c2\u89c1[Flux.delete][omicron.dal.influx.flux.Flux.delete]\u3002 Args: measurement: \u6307\u5b9ameasurement\u540d\u5b57 stop: \u5f85\u5220\u9664\u8bb0\u5f55\u7684\u7ed3\u675f\u65f6\u95f4 start: \u5f85\u5220\u9664\u8bb0\u5f55\u7684\u5f00\u59cb\u65f6\u95f4\uff0c\u5982\u679c\u672a\u6307\u5b9a\uff0c\u5219\u4f7f\u7528EPOCH_START tags: \u6309tag\u8fdb\u884c\u8fc7\u6ee4\u7684\u6761\u4ef6 precision: \u7528\u4ee5\u683c\u5f0f\u5316\u8d77\u59cb\u548c\u7ed3\u675f\u65f6\u95f4\u3002 Raises: InfluxDeleteError: \u5982\u679c\u5220\u9664\u5931\u8d25\uff0c\u5219\u629b\u51fa\u6b64\u5f02\u5e38 \"\"\" # todo: add raise error declaration command = Flux () . delete ( measurement , stop , tags , start = start , precision = precision ) async with ClientSession () as session : async with session . post ( self . _delete_url , data = json . dumps ( command ), headers = self . _delete_headers ) as resp : if resp . status != 204 : err = await resp . json () logger . warning ( \"influxdb delete error: %s when processin command %s \" , err [ \"message\" ], command , ) raise InfluxDeleteError ( f \"influxdb delete failed, status code: { err [ 'message' ] } \" )","title":"delete()"},{"location":"api/dal/influxclient/#omicron.dal.influx.influxclient.InfluxClient.delete_bucket","text":"\u5220\u9664influxdb\u4e2d\u6307\u5b9abucket Parameters: Name Type Description Default bucket_id str \u6307\u5b9abucket\u7684id\u3002\u5982\u679c\u4e3aNone\uff0c\u5219\u4f1a\u5220\u9664\u672cclient\u5bf9\u5e94\u7684bucket\u3002 None Source code in omicron/dal/influx/influxclient.py async def delete_bucket ( self , bucket_id : str = None ): \"\"\"\u5220\u9664influxdb\u4e2d\u6307\u5b9abucket Args: bucket_id: \u6307\u5b9abucket\u7684id\u3002\u5982\u679c\u4e3aNone\uff0c\u5219\u4f1a\u5220\u9664\u672cclient\u5bf9\u5e94\u7684bucket\u3002 \"\"\" if bucket_id is None : buckets = await self . list_buckets () for bucket in buckets : if bucket [ \"type\" ] == \"user\" and bucket [ \"name\" ] == self . _bucket : bucket_id = bucket [ \"id\" ] break else : raise BadParameterError ( \"bucket_id is None, and we can't find bucket with name: %s \" % self . _bucket ) url = f \" { self . _url } /api/v2/buckets/ { bucket_id } \" headers = { \"Authorization\" : f \"Token { self . _token } \" } async with ClientSession () as session : async with session . delete ( url , headers = headers ) as resp : if resp . status != 204 : err = await resp . json () logger . warning ( \"influxdb delete bucket error: %s when processin command %s \" , err [ \"message\" ], bucket_id , ) raise InfluxSchemaError ( f \"influxdb delete bucket failed, status code: { err [ 'message' ] } \" )","title":"delete_bucket()"},{"location":"api/dal/influxclient/#omicron.dal.influx.influxclient.InfluxClient.drop_measurement","text":"\u4eceinfluxdb\u4e2d\u5220\u9664\u4e00\u4e2ameasurement \u8c03\u7528\u6b64\u65b9\u6cd5\u540e\uff0c\u5b9e\u9645\u4e0a\u8be5measurement\u4ecd\u7136\u5b58\u5728\uff0c\u53ea\u662f\u6ca1\u6709\u6570\u636e\u3002 Source code in omicron/dal/influx/influxclient.py async def drop_measurement ( self , measurement : str ): \"\"\"\u4eceinfluxdb\u4e2d\u5220\u9664\u4e00\u4e2ameasurement \u8c03\u7528\u6b64\u65b9\u6cd5\u540e\uff0c\u5b9e\u9645\u4e0a\u8be5measurement\u4ecd\u7136\u5b58\u5728\uff0c\u53ea\u662f\u6ca1\u6709\u6570\u636e\u3002 \"\"\" # todo: add raise error declaration await self . delete ( measurement , arrow . now () . naive )","title":"drop_measurement()"},{"location":"api/dal/influxclient/#omicron.dal.influx.influxclient.InfluxClient.list_buckets","text":"\u5217\u51fainfluxdb\u4e2d\u5bf9\u5e94token\u80fd\u770b\u5230\u7684\u6240\u6709\u7684bucket Returns: Type Description list of buckets, each bucket is a dict with keys ``` id orgID, a 16 bytes hex string type, system or user description name retentionRules createdAt updatedAt links labels ``` Source code in omicron/dal/influx/influxclient.py async def list_buckets ( self ) -> List [ Dict ]: \"\"\"\u5217\u51fainfluxdb\u4e2d\u5bf9\u5e94token\u80fd\u770b\u5230\u7684\u6240\u6709\u7684bucket Returns: list of buckets, each bucket is a dict with keys: ``` id orgID, a 16 bytes hex string type, system or user description name retentionRules createdAt updatedAt links labels ``` \"\"\" url = f \" { self . _url } /api/v2/buckets\" headers = { \"Authorization\" : f \"Token { self . _token } \" } async with ClientSession () as session : async with session . get ( url , headers = headers ) as resp : if resp . status != 200 : err = await resp . json () raise InfluxSchemaError ( f \"influxdb list bucket failed, status code: { err [ 'message' ] } \" ) else : return ( await resp . json ())[ \"buckets\" ]","title":"list_buckets()"},{"location":"api/dal/influxclient/#omicron.dal.influx.influxclient.InfluxClient.list_organizations","text":"\u5217\u51fa\u672c\u5ba2\u6237\u7aef\u5141\u8bb8\u67e5\u8be2\u7684\u6240\u7ec4\u7ec7 Parameters: Name Type Description Default offset \u5206\u9875\u8d77\u70b9 0 limit \u6bcf\u9875size 100 Exceptions: Type Description InfluxSchemaError influxdb\u8fd4\u56de\u7684\u9519\u8bef Returns: Type Description list of organizations, each organization is a dict with keys 1 2 3 4 5 6 id : the id of the org links name : the name of the org description createdAt updatedAt Source code in omicron/dal/influx/influxclient.py async def list_organizations ( self , offset : int = 0 , limit : int = 100 ) -> List [ Dict ]: \"\"\"\u5217\u51fa\u672c\u5ba2\u6237\u7aef\u5141\u8bb8\u67e5\u8be2\u7684\u6240\u7ec4\u7ec7 Args: offset : \u5206\u9875\u8d77\u70b9 limit : \u6bcf\u9875size Raises: InfluxSchemaError: influxdb\u8fd4\u56de\u7684\u9519\u8bef Returns: list of organizations, each organization is a dict with keys: ``` id : the id of the org links name : the name of the org description createdAt updatedAt ``` \"\"\" url = f \" { self . _url } /api/v2/orgs?offset= { offset } &limit= { limit } \" headers = { \"Authorization\" : f \"Token { self . _token } \" } async with ClientSession () as session : async with session . get ( url , headers = headers ) as resp : if resp . status != 200 : err = await resp . json () logger . warning ( \"influxdb query orgs err: %s \" , err [ \"message\" ]) raise InfluxSchemaError ( f \"influxdb query orgs failed, status code: { err [ 'message' ] } \" ) else : return ( await resp . json ())[ \"orgs\" ]","title":"list_organizations()"},{"location":"api/dal/influxclient/#omicron.dal.influx.influxclient.InfluxClient.query","text":"flux\u67e5\u8be2 flux\u67e5\u8be2\u7ed3\u679c\u662f\u4e00\u4e2a\u4ee5annotated csv\u683c\u5f0f\u5b58\u50a8\u7684\u6570\u636e\uff0c\u4f8b\u5982\uff1a 1 2 ,result,table,_time,code,amount,close,factor,high,low,open,volume ,_result,0,2019-01-01T00:00:00Z,000001.XSHE,100000000,5.15,1.23,5.2,5,5.1,1000000 \u4e0a\u8ff0 result \u4e2d\uff0c\u4e8b\u5148\u901a\u8fc7Flux.keep()\u9650\u5236\u4e86\u8fd4\u56de\u7684\u5b57\u6bb5\u4e3a_time,code,amount,close,factor,high,low,open,volume\u3002influxdb\u67e5\u8be2\u8fd4\u56de\u7ed3\u679c\u65f6\uff0c\u603b\u662f\u6309\u7167\u5b57\u6bb5\u540d\u79f0\u5347\u5e8f\u6392\u5217\u3002\u6b64\u5916\uff0c\u603b\u662f\u4f1a\u989d\u5916\u5730\u8fd4\u56de_result, table\u4e24\u4e2a\u5b57\u6bb5\u3002 \u5982\u679c\u4f20\u5165\u4e86deserializer\uff0c\u5219\u4f1a\u8c03\u7528deserializer\u5c06\u5176\u89e3\u6790\u6210\u4e3apython\u5bf9\u8c61\u3002\u5426\u5219\uff0c\u8fd4\u56debytes\u6570\u636e\u3002 Parameters: Name Type Description Default flux Union[omicron.dal.influx.flux.Flux, str] flux\u67e5\u8be2\u8bed\u53e5 required deserializer Callable \u53cd\u5e8f\u5217\u5316\u51fd\u6570 None Returns: Type Description Any \u5982\u679c\u672a\u63d0\u4f9b\u53cd\u5e8f\u5217\u5316\u51fd\u6570\uff0c\u5219\u8fd4\u56de\u7ed3\u679c\u4e3abytes array(\u5982\u679c\u6307\u5b9a\u4e86compress=True\uff0c\u8fd4\u56de\u7ed3\u679c\u4e3agzip\u89e3\u538b\u7f29\u540e\u7684bytes array)\uff0c\u5426\u5219\u8fd4\u56de\u53cd\u5e8f\u5217\u5316\u540e\u7684python\u5bf9\u8c61 Source code in omicron/dal/influx/influxclient.py async def query ( self , flux : Union [ Flux , str ], deserializer : Callable = None ) -> Any : \"\"\"flux\u67e5\u8be2 flux\u67e5\u8be2\u7ed3\u679c\u662f\u4e00\u4e2a\u4ee5annotated csv\u683c\u5f0f\u5b58\u50a8\u7684\u6570\u636e\uff0c\u4f8b\u5982\uff1a ``` ,result,table,_time,code,amount,close,factor,high,low,open,volume ,_result,0,2019-01-01T00:00:00Z,000001.XSHE,100000000,5.15,1.23,5.2,5,5.1,1000000 ``` \u4e0a\u8ff0`result`\u4e2d\uff0c\u4e8b\u5148\u901a\u8fc7Flux.keep()\u9650\u5236\u4e86\u8fd4\u56de\u7684\u5b57\u6bb5\u4e3a_time,code,amount,close,factor,high,low,open,volume\u3002influxdb\u67e5\u8be2\u8fd4\u56de\u7ed3\u679c\u65f6\uff0c\u603b\u662f\u6309\u7167\u5b57\u6bb5\u540d\u79f0\u5347\u5e8f\u6392\u5217\u3002\u6b64\u5916\uff0c\u603b\u662f\u4f1a\u989d\u5916\u5730\u8fd4\u56de_result, table\u4e24\u4e2a\u5b57\u6bb5\u3002 \u5982\u679c\u4f20\u5165\u4e86deserializer\uff0c\u5219\u4f1a\u8c03\u7528deserializer\u5c06\u5176\u89e3\u6790\u6210\u4e3apython\u5bf9\u8c61\u3002\u5426\u5219\uff0c\u8fd4\u56debytes\u6570\u636e\u3002 Args: flux: flux\u67e5\u8be2\u8bed\u53e5 deserializer: \u53cd\u5e8f\u5217\u5316\u51fd\u6570 Returns: \u5982\u679c\u672a\u63d0\u4f9b\u53cd\u5e8f\u5217\u5316\u51fd\u6570\uff0c\u5219\u8fd4\u56de\u7ed3\u679c\u4e3abytes array(\u5982\u679c\u6307\u5b9a\u4e86compress=True\uff0c\u8fd4\u56de\u7ed3\u679c\u4e3agzip\u89e3\u538b\u7f29\u540e\u7684bytes array)\uff0c\u5426\u5219\u8fd4\u56de\u53cd\u5e8f\u5217\u5316\u540e\u7684python\u5bf9\u8c61 \"\"\" if isinstance ( flux , Flux ): flux = str ( flux ) async with ClientSession () as session : async with session . post ( self . _query_url , data = flux , headers = self . _query_headers ) as resp : if resp . status != 200 : err = await resp . json () logger . warning ( f \"influxdb query error: { err } when processing { flux [: 500 ] } \" ) logger . debug ( \"data caused error: %s \" , flux ) raise InfluxDBQueryError ( f \"influxdb query failed, status code: { err [ 'message' ] } \" ) else : # auto-unzip body = await resp . read () if deserializer : try : return deserializer ( body ) except Exception as e : logger . exception ( e ) logger . warning ( \"failed to deserialize data: %s , the query is: %s \" , body , flux [: 500 ], ) raise else : return body","title":"query()"},{"location":"api/dal/influxclient/#omicron.dal.influx.influxclient.InfluxClient.query_org_id","text":"\u901a\u8fc7\u7ec4\u7ec7\u540d\u67e5\u627e\u7ec4\u7ec7id \u53ea\u80fd\u67e5\u7684\u672c\u5ba2\u6237\u7aef\u5141\u8bb8\u67e5\u8be2\u7684\u7ec4\u7ec7\u3002\u5982\u679cname\u672a\u63d0\u4f9b\uff0c\u5219\u4f7f\u7528\u672c\u5ba2\u6237\u7aef\u521b\u5efa\u65f6\u4f20\u5165\u7684\u7ec4\u7ec7\u540d\u3002 Parameters: Name Type Description Default name str \u6307\u5b9a\u7ec4\u7ec7\u540d None Returns: Type Description str \u7ec4\u7ec7id Source code in omicron/dal/influx/influxclient.py async def query_org_id ( self , name : str = None ) -> str : \"\"\"\u901a\u8fc7\u7ec4\u7ec7\u540d\u67e5\u627e\u7ec4\u7ec7id \u53ea\u80fd\u67e5\u7684\u672c\u5ba2\u6237\u7aef\u5141\u8bb8\u67e5\u8be2\u7684\u7ec4\u7ec7\u3002\u5982\u679cname\u672a\u63d0\u4f9b\uff0c\u5219\u4f7f\u7528\u672c\u5ba2\u6237\u7aef\u521b\u5efa\u65f6\u4f20\u5165\u7684\u7ec4\u7ec7\u540d\u3002 Args: name: \u6307\u5b9a\u7ec4\u7ec7\u540d Returns: \u7ec4\u7ec7id \"\"\" if name is None : name = self . _org orgs = await self . list_organizations () for org in orgs : if org [ \"name\" ] == name : return org [ \"id\" ] raise BadParameterError ( f \"can't find org with name: { name } \" )","title":"query_org_id()"},{"location":"api/dal/influxclient/#omicron.dal.influx.influxclient.InfluxClient.save","text":"save data into influxdb if data is a pandas.DataFrame or numy structured array, it will be converted to line protocol and saved. If data is str, use write method instead. Parameters: Name Type Description Default data Union[numpy.ndarray, pandas.core.frame.DataFrame] data to be saved required measurement str the name of measurement None tag_keys List[str] which columns name will be used as tags [] chunk_size int number of lines to be saved in one request. if it's -1, then all data will be written in one request. If it's None, then it will be set to self._chunk_size None Exceptions: Type Description InfluxDBWriteError if write failed Source code in omicron/dal/influx/influxclient.py async def save ( self , data : Union [ np . ndarray , DataFrame ], measurement : str = None , tag_keys : List [ str ] = [], time_key : str = None , global_tags : Dict = {}, chunk_size : int = None , ) -> None : \"\"\"save `data` into influxdb if `data` is a pandas.DataFrame or numy structured array, it will be converted to line protocol and saved. If `data` is str, use `write` method instead. Args: data: data to be saved measurement: the name of measurement tag_keys: which columns name will be used as tags chunk_size: number of lines to be saved in one request. if it's -1, then all data will be written in one request. If it's None, then it will be set to `self._chunk_size` Raises: InfluxDBWriteError: if write failed \"\"\" # todo: add more errors raise if isinstance ( data , DataFrame ): assert ( measurement is not None ), \"measurement must be specified when data is a DataFrame\" if tag_keys : assert set ( tag_keys ) in set ( data . columns . tolist () ), \"tag_keys must be in data.columns\" serializer = DataframeSerializer ( data , measurement , time_key , tag_keys , global_tags , precision = self . _precision , ) if chunk_size == - 1 : chunk_size = len ( data ) for lines in serializer . serialize ( chunk_size or self . _chunk_size ): await self . write ( lines ) elif isinstance ( data , np . ndarray ): assert ( measurement is not None ), \"measurement must be specified when data is a numpy array\" assert ( time_key is not None ), \"time_key must be specified when data is a numpy array\" serializer = NumpySerializer ( data , measurement , time_key , tag_keys , global_tags , time_precision = self . _precision , ) if chunk_size == - 1 : chunk_size = len ( data ) for lines in serializer . serialize ( chunk_size or self . _chunk_size ): await self . write ( lines ) else : raise TypeError ( f \"data must be pandas.DataFrame, numpy array, got { type ( data ) } \" )","title":"save()"},{"location":"api/dal/influxclient/#omicron.dal.influx.influxclient.InfluxClient.write","text":"\u5c06line-protocol\u6570\u7ec4\u5199\u5165influxdb Parameters: Name Type Description Default line_protocol str \u5f85\u5199\u5165\u7684\u6570\u636e\uff0c\u4ee5line-protocol\u6570\u7ec4\u5f62\u5f0f\u5b58\u5728 required Source code in omicron/dal/influx/influxclient.py async def write ( self , line_protocol : str ): \"\"\"\u5c06line-protocol\u6570\u7ec4\u5199\u5165influxdb Args: line_protocol: \u5f85\u5199\u5165\u7684\u6570\u636e\uff0c\u4ee5line-protocol\u6570\u7ec4\u5f62\u5f0f\u5b58\u5728 \"\"\" # todo: add raise error declaration if self . _enable_compress : line_protocol_ = gzip . compress ( line_protocol . encode ( \"utf-8\" )) else : line_protocol_ = line_protocol async with ClientSession () as session : async with session . post ( self . _write_url , data = line_protocol_ , headers = self . _write_headers ) as resp : if resp . status != 204 : err = await resp . json () logger . warning ( \"influxdb write error when processing: %s , err code: %s , message: %s \" , { line_protocol [: 100 ]}, err [ \"code\" ], err [ \"message\" ], ) logger . debug ( \"data caused error: %s \" , line_protocol ) raise InfluxDBWriteError ( f \"influxdb write failed, err: { err [ 'message' ] } \" )","title":"write()"},{"location":"api/dal/serialize/","text":"Serializer and Deserializer \u00b6 DataFrameDeserializer \u00b6 Source code in omicron/dal/influx/serialize.py class DataframeDeserializer ( Serializer ): def __init__ ( self , sort_values : Union [ str , List [ str ]] = None , encoding : str = \"utf-8\" , names : List [ str ] = None , usecols : Union [ List [ int ], List [ str ]] = None , dtype : dict = None , time_col : Union [ int , str ] = None , sep : str = \",\" , header : Union [ int , List [ int ], str ] = \"infer\" , engine : str = None , infer_datetime_format = True , lineterminator : str = None , converters : dict = None , skipfooter = 0 , index_col : Union [ int , str , List [ int ], List [ str ], bool ] = None , skiprows : Union [ int , List [ int ], Callable ] = None , ** kwargs , ): \"\"\"constructor a deserializer which convert a csv-like bytes array to pandas.DataFrame the args are the same as pandas.read_csv. for details, please refer to the official doc: [pandas.read_csv](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.read_csv.html) for performance consideration, please specify the following args: - engine = 'c' or 'pyarrow' when possible. Be noticed that 'pyarrow' is the fastest (multi-threaded supported) but may be error-prone. Only use it when you have thoroughly tested. - specify dtype when possible use `usecols` to specify the columns to read, and `names` to specify the column names (i.e., rename the columns), otherwise, the column names will be inferred from the first line. when `names` is specified, it has to be as same length as actual columns of the data. If this causes column renaming, then you should always use column name specified in `names` to access the data (instead of which in `usecols`). Examples: >>> data = \",result,table,_time,code,name\\\\r\\\\n,_result,0,2019-01-01T09:31:00Z,000002.XSHE,\u56fd\u8054\u8bc1\u5238\" >>> des = DataframeDeserializer(names=[\"_\", \"result\", \"table\", \"frame\", \"code\", \"name\"], usecols=[\"frame\", \"code\", \"name\"]) >>> des(data) frame code name 0 2019-01-01T09:31:00Z 000002.XSHE \u56fd\u8054\u8bc1\u5238 Args: sort_values: sort the dataframe by the specified columns encoding: if the data is bytes, then encoding is required, due to pandas.read_csv only handle string array sep: the separator/delimiter of each fields header: the row number of the header, default is 'infer' names: the column names of the dataframe index_col: the column number or name of the index column usecols: the column name of the columns to use dtype: the dtype of the columns engine: the engine of the csv file, default is None converters: specify converter for columns. skiprows: the row number to skip skipfooter: the row number to skip at the end of the file time_col: the columns to parse as dates infer_datetime_format: whether to infer the datetime format lineterminator: the line terminator of the csv file, only valid when engine is 'c' kwargs: other arguments \"\"\" self . sort_values = sort_values self . encoding = encoding self . sep = sep self . header = header self . names = names self . index_col = index_col self . usecols = usecols self . dtype = dtype self . engine = engine self . converters = converters or {} self . skiprows = skiprows self . skipfooter = skipfooter self . infer_datetime_format = infer_datetime_format self . lineterminator = lineterminator self . kwargs = kwargs if names is not None : self . header = 0 if time_col is not None : self . converters [ time_col ] = lambda x : ciso8601 . parse_datetime_as_naive ( x ) def __call__ ( self , data : Union [ str , bytes ]) -> pd . DataFrame : if isinstance ( data , str ): # treat data as string stream = io . StringIO ( data ) else : stream = io . StringIO ( data . decode ( self . encoding )) df = pd . read_csv ( stream , sep = self . sep , header = self . header , names = self . names , index_col = self . index_col , usecols = self . usecols , dtype = self . dtype , engine = self . engine , converters = self . converters , skiprows = self . skiprows , skipfooter = self . skipfooter , infer_datetime_format = self . infer_datetime_format , lineterminator = self . lineterminator , ** self . kwargs , ) if self . usecols : df = df [ list ( self . usecols )] if self . sort_values is not None : return df . sort_values ( self . sort_values ) else : return df __init__ ( self , sort_values = None , encoding = 'utf-8' , names = None , usecols = None , dtype = None , time_col = None , sep = ',' , header = 'infer' , engine = None , infer_datetime_format = True , lineterminator = None , converters = None , skipfooter = 0 , index_col = None , skiprows = None , ** kwargs ) special \u00b6 constructor a deserializer which convert a csv-like bytes array to pandas.DataFrame the args are the same as pandas.read_csv. for details, please refer to the official doc: pandas.read_csv for performance consideration, please specify the following args: - engine = 'c' or 'pyarrow' when possible. Be noticed that 'pyarrow' is the fastest (multi-threaded supported) but may be error-prone. Only use it when you have thoroughly tested. 1 - specify dtype when possible use usecols to specify the columns to read, and names to specify the column names (i.e., rename the columns), otherwise, the column names will be inferred from the first line. when names is specified, it has to be as same length as actual columns of the data. If this causes column renaming, then you should always use column name specified in names to access the data (instead of which in usecols ). Examples: >>> data = \",result,table,_time,code,name \\r\\n ,_result,0,2019-01-01T09:31:00Z,000002.XSHE,\u56fd\u8054\u8bc1\u5238\" >>> des = DataframeDeserializer ( names = [ \"_\" , \"result\" , \"table\" , \"frame\" , \"code\" , \"name\" ], usecols = [ \"frame\" , \"code\" , \"name\" ]) >>> des ( data ) frame code name 0 2019 - 01 - 01 T09 : 31 : 00 Z 000002. XSHE \u56fd\u8054\u8bc1\u5238 Parameters: Name Type Description Default sort_values Union[str, List[str]] sort the dataframe by the specified columns None encoding str if the data is bytes, then encoding is required, due to pandas.read_csv only handle string array 'utf-8' sep str the separator/delimiter of each fields ',' header Union[int, List[int], str] the row number of the header, default is 'infer' 'infer' names List[str] the column names of the dataframe None index_col Union[int, str, List[int], List[str], bool] the column number or name of the index column None usecols Union[List[int], List[str]] the column name of the columns to use None dtype dict the dtype of the columns None engine str the engine of the csv file, default is None None converters dict specify converter for columns. None skiprows Union[int, List[int], Callable] the row number to skip None skipfooter the row number to skip at the end of the file 0 time_col Union[int, str] the columns to parse as dates None infer_datetime_format whether to infer the datetime format True lineterminator str the line terminator of the csv file, only valid when engine is 'c' None kwargs other arguments {} Source code in omicron/dal/influx/serialize.py def __init__ ( self , sort_values : Union [ str , List [ str ]] = None , encoding : str = \"utf-8\" , names : List [ str ] = None , usecols : Union [ List [ int ], List [ str ]] = None , dtype : dict = None , time_col : Union [ int , str ] = None , sep : str = \",\" , header : Union [ int , List [ int ], str ] = \"infer\" , engine : str = None , infer_datetime_format = True , lineterminator : str = None , converters : dict = None , skipfooter = 0 , index_col : Union [ int , str , List [ int ], List [ str ], bool ] = None , skiprows : Union [ int , List [ int ], Callable ] = None , ** kwargs , ): \"\"\"constructor a deserializer which convert a csv-like bytes array to pandas.DataFrame the args are the same as pandas.read_csv. for details, please refer to the official doc: [pandas.read_csv](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.read_csv.html) for performance consideration, please specify the following args: - engine = 'c' or 'pyarrow' when possible. Be noticed that 'pyarrow' is the fastest (multi-threaded supported) but may be error-prone. Only use it when you have thoroughly tested. - specify dtype when possible use `usecols` to specify the columns to read, and `names` to specify the column names (i.e., rename the columns), otherwise, the column names will be inferred from the first line. when `names` is specified, it has to be as same length as actual columns of the data. If this causes column renaming, then you should always use column name specified in `names` to access the data (instead of which in `usecols`). Examples: >>> data = \",result,table,_time,code,name\\\\r\\\\n,_result,0,2019-01-01T09:31:00Z,000002.XSHE,\u56fd\u8054\u8bc1\u5238\" >>> des = DataframeDeserializer(names=[\"_\", \"result\", \"table\", \"frame\", \"code\", \"name\"], usecols=[\"frame\", \"code\", \"name\"]) >>> des(data) frame code name 0 2019-01-01T09:31:00Z 000002.XSHE \u56fd\u8054\u8bc1\u5238 Args: sort_values: sort the dataframe by the specified columns encoding: if the data is bytes, then encoding is required, due to pandas.read_csv only handle string array sep: the separator/delimiter of each fields header: the row number of the header, default is 'infer' names: the column names of the dataframe index_col: the column number or name of the index column usecols: the column name of the columns to use dtype: the dtype of the columns engine: the engine of the csv file, default is None converters: specify converter for columns. skiprows: the row number to skip skipfooter: the row number to skip at the end of the file time_col: the columns to parse as dates infer_datetime_format: whether to infer the datetime format lineterminator: the line terminator of the csv file, only valid when engine is 'c' kwargs: other arguments \"\"\" self . sort_values = sort_values self . encoding = encoding self . sep = sep self . header = header self . names = names self . index_col = index_col self . usecols = usecols self . dtype = dtype self . engine = engine self . converters = converters or {} self . skiprows = skiprows self . skipfooter = skipfooter self . infer_datetime_format = infer_datetime_format self . lineterminator = lineterminator self . kwargs = kwargs if names is not None : self . header = 0 if time_col is not None : self . converters [ time_col ] = lambda x : ciso8601 . parse_datetime_as_naive ( x ) NumpyDeserializer \u00b6 Source code in omicron/dal/influx/serialize.py class NumpyDeserializer ( Serializer ): def __init__ ( self , dtype : List [ tuple ] = \"float\" , sort_values : Union [ str , List [ str ]] = None , use_cols : Union [ List [ str ], List [ int ]] = None , parse_date : Union [ int , str ] = \"_time\" , sep : str = \",\" , encoding : str = \"utf-8\" , skip_rows : Union [ int , List [ int ]] = 1 , header_line : int = 1 , comments : str = \"#\" , converters : Mapping [ int , Callable ] = None , ): \"\"\"construct a deserializer, which will convert a csv like multiline string/bytes array to a numpy array the data to be deserialized will be first split into array of fields, then use use_cols to select which fields to use, and re-order them by the order of use_cols. After that, the fields will be converted to numpy array and converted into dtype. by default dtype is float, which means the data will be converted to float. If you need to convert to a numpy structured array, then you can specify the dtype as a list of tuples, e.g. ``` dtype = [('col_1', 'datetime64[s]'), ('col_2', ' 1 : assert all ( [ isinstance ( x , int ) for x in self . converters . keys ()] ), \"converters must be a dict of column index to converter function, if there's no header\" self . _parsed_headers = None def _parse_header_once ( self , stream ): \"\"\"parse header and convert use_cols, if columns is specified in string. And if parse_date is required, add it into converters Args: stream : [description] Raises: SerializationError: [description] \"\"\" if self . header_line is None or self . _parsed_headers is not None : return try : line = stream . readlines ( self . header_line )[ - 1 ] cols = line . strip () . split ( self . sep ) self . _parsed_headers = cols use_cols = self . use_cols if use_cols is not None and isinstance ( use_cols [ 0 ], str ): self . use_cols = [ cols . index ( col ) for col in self . use_cols ] # convert keys of converters to int converters = { cols . index ( k ): v for k , v in self . converters . items ()} self . converters = converters if isinstance ( self . parse_date , str ): parse_date = cols . index ( self . parse_date ) if parse_date in self . converters . keys (): logger . debug ( \"specify duplicated converter in both parse_date and converters for col %s , use converters.\" , self . parse_date , ) else : # \u589e\u52a0parse_date\u5230converters self . converters [ parse_date ] = lambda x : ciso8601 . parse_datetime_as_naive ( x ) stream . seek ( 0 ) except ( IndexError , ValueError ): if line . strip () == \"\" : content = \"\" . join ( stream . readlines ()) . strip () if len ( content ) > 0 : raise SerializationError ( f \"specified heder line { self . header_line } is empty\" ) else : raise EmptyResult () else : raise SerializationError ( f \"bad header[ { self . header_line } ]: { line } \" ) def __call__ ( self , data : bytes ) -> np . ndarray : if self . encoding and isinstance ( data , bytes ): stream = io . StringIO ( data . decode ( self . encoding )) else : stream = io . StringIO ( data ) try : self . _parse_header_once ( stream ) except EmptyResult : return np . empty (( 0 ,), dtype = self . dtype ) arr = np . loadtxt ( stream . readlines (), delimiter = self . sep , skiprows = self . skip_rows , dtype = self . dtype , usecols = self . use_cols , converters = self . converters , encoding = self . encoding , ) # \u5982\u679c\u8fd4\u56de\u4ec5\u4e00\u6761\u8bb0\u5f55\uff0c\u6709\u65f6\u4f1a\u51fa\u73b0 shape == () if arr . shape == tuple (): arr = arr . reshape (( - 1 ,)) if self . sort_values is not None and arr . size > 1 : return np . sort ( arr , order = self . sort_values ) else : return arr __init__ ( self , dtype = 'float' , sort_values = None , use_cols = None , parse_date = '_time' , sep = ',' , encoding = 'utf-8' , skip_rows = 1 , header_line = 1 , comments = '#' , converters = None ) special \u00b6 construct a deserializer, which will convert a csv like multiline string/bytes array to a numpy array the data to be deserialized will be first split into array of fields, then use use_cols to select which fields to use, and re-order them by the order of use_cols. After that, the fields will be converted to numpy array and converted into dtype. by default dtype is float, which means the data will be converted to float. If you need to convert to a numpy structured array, then you can specify the dtype as a list of tuples, e.g. 1 dtype = [('col_1', 'datetime64[s]'), ('col_2', ' 1 : assert all ( [ isinstance ( x , int ) for x in self . converters . keys ()] ), \"converters must be a dict of column index to converter function, if there's no header\" self . _parsed_headers = None","title":"Serialize"},{"location":"api/dal/serialize/#serializer-and-deserializer","text":"","title":"Serializer and Deserializer"},{"location":"api/dal/serialize/#dataframedeserializer","text":"Source code in omicron/dal/influx/serialize.py class DataframeDeserializer ( Serializer ): def __init__ ( self , sort_values : Union [ str , List [ str ]] = None , encoding : str = \"utf-8\" , names : List [ str ] = None , usecols : Union [ List [ int ], List [ str ]] = None , dtype : dict = None , time_col : Union [ int , str ] = None , sep : str = \",\" , header : Union [ int , List [ int ], str ] = \"infer\" , engine : str = None , infer_datetime_format = True , lineterminator : str = None , converters : dict = None , skipfooter = 0 , index_col : Union [ int , str , List [ int ], List [ str ], bool ] = None , skiprows : Union [ int , List [ int ], Callable ] = None , ** kwargs , ): \"\"\"constructor a deserializer which convert a csv-like bytes array to pandas.DataFrame the args are the same as pandas.read_csv. for details, please refer to the official doc: [pandas.read_csv](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.read_csv.html) for performance consideration, please specify the following args: - engine = 'c' or 'pyarrow' when possible. Be noticed that 'pyarrow' is the fastest (multi-threaded supported) but may be error-prone. Only use it when you have thoroughly tested. - specify dtype when possible use `usecols` to specify the columns to read, and `names` to specify the column names (i.e., rename the columns), otherwise, the column names will be inferred from the first line. when `names` is specified, it has to be as same length as actual columns of the data. If this causes column renaming, then you should always use column name specified in `names` to access the data (instead of which in `usecols`). Examples: >>> data = \",result,table,_time,code,name\\\\r\\\\n,_result,0,2019-01-01T09:31:00Z,000002.XSHE,\u56fd\u8054\u8bc1\u5238\" >>> des = DataframeDeserializer(names=[\"_\", \"result\", \"table\", \"frame\", \"code\", \"name\"], usecols=[\"frame\", \"code\", \"name\"]) >>> des(data) frame code name 0 2019-01-01T09:31:00Z 000002.XSHE \u56fd\u8054\u8bc1\u5238 Args: sort_values: sort the dataframe by the specified columns encoding: if the data is bytes, then encoding is required, due to pandas.read_csv only handle string array sep: the separator/delimiter of each fields header: the row number of the header, default is 'infer' names: the column names of the dataframe index_col: the column number or name of the index column usecols: the column name of the columns to use dtype: the dtype of the columns engine: the engine of the csv file, default is None converters: specify converter for columns. skiprows: the row number to skip skipfooter: the row number to skip at the end of the file time_col: the columns to parse as dates infer_datetime_format: whether to infer the datetime format lineterminator: the line terminator of the csv file, only valid when engine is 'c' kwargs: other arguments \"\"\" self . sort_values = sort_values self . encoding = encoding self . sep = sep self . header = header self . names = names self . index_col = index_col self . usecols = usecols self . dtype = dtype self . engine = engine self . converters = converters or {} self . skiprows = skiprows self . skipfooter = skipfooter self . infer_datetime_format = infer_datetime_format self . lineterminator = lineterminator self . kwargs = kwargs if names is not None : self . header = 0 if time_col is not None : self . converters [ time_col ] = lambda x : ciso8601 . parse_datetime_as_naive ( x ) def __call__ ( self , data : Union [ str , bytes ]) -> pd . DataFrame : if isinstance ( data , str ): # treat data as string stream = io . StringIO ( data ) else : stream = io . StringIO ( data . decode ( self . encoding )) df = pd . read_csv ( stream , sep = self . sep , header = self . header , names = self . names , index_col = self . index_col , usecols = self . usecols , dtype = self . dtype , engine = self . engine , converters = self . converters , skiprows = self . skiprows , skipfooter = self . skipfooter , infer_datetime_format = self . infer_datetime_format , lineterminator = self . lineterminator , ** self . kwargs , ) if self . usecols : df = df [ list ( self . usecols )] if self . sort_values is not None : return df . sort_values ( self . sort_values ) else : return df","title":"DataFrameDeserializer"},{"location":"api/dal/serialize/#omicron.dal.influx.serialize.DataframeDeserializer.__init__","text":"constructor a deserializer which convert a csv-like bytes array to pandas.DataFrame the args are the same as pandas.read_csv. for details, please refer to the official doc: pandas.read_csv for performance consideration, please specify the following args: - engine = 'c' or 'pyarrow' when possible. Be noticed that 'pyarrow' is the fastest (multi-threaded supported) but may be error-prone. Only use it when you have thoroughly tested. 1 - specify dtype when possible use usecols to specify the columns to read, and names to specify the column names (i.e., rename the columns), otherwise, the column names will be inferred from the first line. when names is specified, it has to be as same length as actual columns of the data. If this causes column renaming, then you should always use column name specified in names to access the data (instead of which in usecols ). Examples: >>> data = \",result,table,_time,code,name \\r\\n ,_result,0,2019-01-01T09:31:00Z,000002.XSHE,\u56fd\u8054\u8bc1\u5238\" >>> des = DataframeDeserializer ( names = [ \"_\" , \"result\" , \"table\" , \"frame\" , \"code\" , \"name\" ], usecols = [ \"frame\" , \"code\" , \"name\" ]) >>> des ( data ) frame code name 0 2019 - 01 - 01 T09 : 31 : 00 Z 000002. XSHE \u56fd\u8054\u8bc1\u5238 Parameters: Name Type Description Default sort_values Union[str, List[str]] sort the dataframe by the specified columns None encoding str if the data is bytes, then encoding is required, due to pandas.read_csv only handle string array 'utf-8' sep str the separator/delimiter of each fields ',' header Union[int, List[int], str] the row number of the header, default is 'infer' 'infer' names List[str] the column names of the dataframe None index_col Union[int, str, List[int], List[str], bool] the column number or name of the index column None usecols Union[List[int], List[str]] the column name of the columns to use None dtype dict the dtype of the columns None engine str the engine of the csv file, default is None None converters dict specify converter for columns. None skiprows Union[int, List[int], Callable] the row number to skip None skipfooter the row number to skip at the end of the file 0 time_col Union[int, str] the columns to parse as dates None infer_datetime_format whether to infer the datetime format True lineterminator str the line terminator of the csv file, only valid when engine is 'c' None kwargs other arguments {} Source code in omicron/dal/influx/serialize.py def __init__ ( self , sort_values : Union [ str , List [ str ]] = None , encoding : str = \"utf-8\" , names : List [ str ] = None , usecols : Union [ List [ int ], List [ str ]] = None , dtype : dict = None , time_col : Union [ int , str ] = None , sep : str = \",\" , header : Union [ int , List [ int ], str ] = \"infer\" , engine : str = None , infer_datetime_format = True , lineterminator : str = None , converters : dict = None , skipfooter = 0 , index_col : Union [ int , str , List [ int ], List [ str ], bool ] = None , skiprows : Union [ int , List [ int ], Callable ] = None , ** kwargs , ): \"\"\"constructor a deserializer which convert a csv-like bytes array to pandas.DataFrame the args are the same as pandas.read_csv. for details, please refer to the official doc: [pandas.read_csv](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.read_csv.html) for performance consideration, please specify the following args: - engine = 'c' or 'pyarrow' when possible. Be noticed that 'pyarrow' is the fastest (multi-threaded supported) but may be error-prone. Only use it when you have thoroughly tested. - specify dtype when possible use `usecols` to specify the columns to read, and `names` to specify the column names (i.e., rename the columns), otherwise, the column names will be inferred from the first line. when `names` is specified, it has to be as same length as actual columns of the data. If this causes column renaming, then you should always use column name specified in `names` to access the data (instead of which in `usecols`). Examples: >>> data = \",result,table,_time,code,name\\\\r\\\\n,_result,0,2019-01-01T09:31:00Z,000002.XSHE,\u56fd\u8054\u8bc1\u5238\" >>> des = DataframeDeserializer(names=[\"_\", \"result\", \"table\", \"frame\", \"code\", \"name\"], usecols=[\"frame\", \"code\", \"name\"]) >>> des(data) frame code name 0 2019-01-01T09:31:00Z 000002.XSHE \u56fd\u8054\u8bc1\u5238 Args: sort_values: sort the dataframe by the specified columns encoding: if the data is bytes, then encoding is required, due to pandas.read_csv only handle string array sep: the separator/delimiter of each fields header: the row number of the header, default is 'infer' names: the column names of the dataframe index_col: the column number or name of the index column usecols: the column name of the columns to use dtype: the dtype of the columns engine: the engine of the csv file, default is None converters: specify converter for columns. skiprows: the row number to skip skipfooter: the row number to skip at the end of the file time_col: the columns to parse as dates infer_datetime_format: whether to infer the datetime format lineterminator: the line terminator of the csv file, only valid when engine is 'c' kwargs: other arguments \"\"\" self . sort_values = sort_values self . encoding = encoding self . sep = sep self . header = header self . names = names self . index_col = index_col self . usecols = usecols self . dtype = dtype self . engine = engine self . converters = converters or {} self . skiprows = skiprows self . skipfooter = skipfooter self . infer_datetime_format = infer_datetime_format self . lineterminator = lineterminator self . kwargs = kwargs if names is not None : self . header = 0 if time_col is not None : self . converters [ time_col ] = lambda x : ciso8601 . parse_datetime_as_naive ( x )","title":"__init__()"},{"location":"api/dal/serialize/#numpydeserializer","text":"Source code in omicron/dal/influx/serialize.py class NumpyDeserializer ( Serializer ): def __init__ ( self , dtype : List [ tuple ] = \"float\" , sort_values : Union [ str , List [ str ]] = None , use_cols : Union [ List [ str ], List [ int ]] = None , parse_date : Union [ int , str ] = \"_time\" , sep : str = \",\" , encoding : str = \"utf-8\" , skip_rows : Union [ int , List [ int ]] = 1 , header_line : int = 1 , comments : str = \"#\" , converters : Mapping [ int , Callable ] = None , ): \"\"\"construct a deserializer, which will convert a csv like multiline string/bytes array to a numpy array the data to be deserialized will be first split into array of fields, then use use_cols to select which fields to use, and re-order them by the order of use_cols. After that, the fields will be converted to numpy array and converted into dtype. by default dtype is float, which means the data will be converted to float. If you need to convert to a numpy structured array, then you can specify the dtype as a list of tuples, e.g. ``` dtype = [('col_1', 'datetime64[s]'), ('col_2', ' 1 : assert all ( [ isinstance ( x , int ) for x in self . converters . keys ()] ), \"converters must be a dict of column index to converter function, if there's no header\" self . _parsed_headers = None def _parse_header_once ( self , stream ): \"\"\"parse header and convert use_cols, if columns is specified in string. And if parse_date is required, add it into converters Args: stream : [description] Raises: SerializationError: [description] \"\"\" if self . header_line is None or self . _parsed_headers is not None : return try : line = stream . readlines ( self . header_line )[ - 1 ] cols = line . strip () . split ( self . sep ) self . _parsed_headers = cols use_cols = self . use_cols if use_cols is not None and isinstance ( use_cols [ 0 ], str ): self . use_cols = [ cols . index ( col ) for col in self . use_cols ] # convert keys of converters to int converters = { cols . index ( k ): v for k , v in self . converters . items ()} self . converters = converters if isinstance ( self . parse_date , str ): parse_date = cols . index ( self . parse_date ) if parse_date in self . converters . keys (): logger . debug ( \"specify duplicated converter in both parse_date and converters for col %s , use converters.\" , self . parse_date , ) else : # \u589e\u52a0parse_date\u5230converters self . converters [ parse_date ] = lambda x : ciso8601 . parse_datetime_as_naive ( x ) stream . seek ( 0 ) except ( IndexError , ValueError ): if line . strip () == \"\" : content = \"\" . join ( stream . readlines ()) . strip () if len ( content ) > 0 : raise SerializationError ( f \"specified heder line { self . header_line } is empty\" ) else : raise EmptyResult () else : raise SerializationError ( f \"bad header[ { self . header_line } ]: { line } \" ) def __call__ ( self , data : bytes ) -> np . ndarray : if self . encoding and isinstance ( data , bytes ): stream = io . StringIO ( data . decode ( self . encoding )) else : stream = io . StringIO ( data ) try : self . _parse_header_once ( stream ) except EmptyResult : return np . empty (( 0 ,), dtype = self . dtype ) arr = np . loadtxt ( stream . readlines (), delimiter = self . sep , skiprows = self . skip_rows , dtype = self . dtype , usecols = self . use_cols , converters = self . converters , encoding = self . encoding , ) # \u5982\u679c\u8fd4\u56de\u4ec5\u4e00\u6761\u8bb0\u5f55\uff0c\u6709\u65f6\u4f1a\u51fa\u73b0 shape == () if arr . shape == tuple (): arr = arr . reshape (( - 1 ,)) if self . sort_values is not None and arr . size > 1 : return np . sort ( arr , order = self . sort_values ) else : return arr","title":"NumpyDeserializer"},{"location":"api/dal/serialize/#omicron.dal.influx.serialize.NumpyDeserializer.__init__","text":"construct a deserializer, which will convert a csv like multiline string/bytes array to a numpy array the data to be deserialized will be first split into array of fields, then use use_cols to select which fields to use, and re-order them by the order of use_cols. After that, the fields will be converted to numpy array and converted into dtype. by default dtype is float, which means the data will be converted to float. If you need to convert to a numpy structured array, then you can specify the dtype as a list of tuples, e.g. 1 dtype = [('col_1', 'datetime64[s]'), ('col_2', ' 1 : assert all ( [ isinstance ( x , int ) for x in self . converters . keys ()] ), \"converters must be a dict of column index to converter function, if there's no header\" self . _parsed_headers = None","title":"__init__()"},{"location":"api/plotting/candlestick/","text":"\u7ed8\u5236K\u7ebf\u56fe\u3002 \u7528\u6cd5\u793a\u4f8b \u00b6 \u6ce8\u610f\u793a\u4f8b\u9700\u8981\u5728notebook\u4e2d\u8fd0\u884c\uff0c\u5426\u5219\u65e0\u6cd5\u751f\u6210\u56fe\u3002 1 2 3 4 5 from omicron.plotting.candlestick import Candlestick bars = await Stock . get_bars ( \"000001.XSHE\" , 120 , FrameType . DAY ) cs = Candlestick ( bars ) cs . plot () \u8fd9\u5c06\u751f\u6210\u4e0b\u56fe\uff1a \u9ed8\u8ba4\u5730\uff0c\u5c06\u663e\u793a\u6210\u4ea4\u91cf\u548cRSI\u6307\u6807\u4e24\u4e2a\u526f\u56fe\u3002\u53ef\u4ee5\u901a\u8fc7\u4ee5\u4e0b\u65b9\u5f0f\u6765\u5b9a\u5236\uff1a 1 2 3 4 5 cs = Candlestick ( bars , show_volume = True , show_rsi = True , show_peaks = False } cs . plot () \u589e\u52a0\u6807\u8bb0 \u00b6 1 2 3 4 5 6 7 8 9 10 11 from omicron.plotting.candlestick import Candlestick bars = await Stock . get_bars ( \"000001.XSHE\" , 120 , FrameType . DAY ) cs = Candlestick ( bars , show_volume = True , show_rsi = False , show_peaks = True ) cs . add_marks ([ 20 , 50 ]) cs . plot () \u8fd9\u5c06\u5728k\u7ebf\u4e0a\u663e\u793a\u4e24\u4e2a\u52a0\u53f7\uff1a \u663e\u793a\u5e03\u6797\u5e26 \u00b6 1 2 3 4 5 6 7 8 9 10 11 from omicron.plotting.candlestick import Candlestick bars = await Stock . get_bars ( \"000001.XSHE\" , 120 , FrameType . DAY ) cs = Candlestick ( bars , show_volume = True , show_rsi = False , show_peaks = True ) cs . add_indicator ( \"bbands\" , 20 ) cs . plot () \u663e\u793a\u5e73\u53f0 \u00b6 1 2 3 4 5 6 7 8 9 10 11 12 from omicron.plotting.candlestick import Candlestick bars = await Stock . get_bars ( \"000001.XSHE\" , 120 , FrameType . DAY ) cs = Candlestick ( bars , show_volume = True , show_rsi = False , show_peaks = True ) cs . mark_bbox () cs . plot () Candlestick \u00b6 Source code in omicron/plotting/candlestick.py class Candlestick : RED = \"#FF4136\" GREEN = \"#3DAA70\" TRANSPARENT = \"rgba(0,0,0,0)\" LIGHT_GRAY = \"rgba(0, 0, 0, 0.1)\" MA_COLORS = { 5 : \"#1432F5\" , 10 : \"#EB52F7\" , 20 : \"#C0C0C0\" , 30 : \"#882111\" , 60 : \"#5E8E28\" , 120 : \"#4294F7\" , 250 : \"#F09937\" , } def __init__ ( self , bars : np . ndarray , ma_groups : List [ int ] = None , title : str = None , show_volume = True , show_rsi = True , show_peaks = False , width = None , height = None , ** kwargs , ): \"\"\"\u6784\u9020\u51fd\u6570 Args: bars: \u884c\u60c5\u6570\u636e ma_groups: \u5747\u7ebf\u7ec4\u53c2\u6570\u3002\u6bd4\u5982[5, 10, 20]\u8868\u660e\u5411k\u7ebf\u56fe\u4e2d\u6dfb\u52a05, 10, 20\u65e5\u5747\u7ebf\u3002\u5982\u679c\u4e0d\u63d0\u4f9b\uff0c\u5c06\u4ece\u6570\u7ec4[5, 10, 20, 30, 60, 120, 250]\u4e2d\u53d6\u76f4\u5230\u4e0e`len(bars) - 5`\u5339\u914d\u7684\u53c2\u6570\u4e3a\u6b62\u3002\u6bd4\u5982bars\u957f\u5ea6\u4e3a30\uff0c\u5219\u5c06\u53d6[5, 10, 20]\u6765\u7ed8\u5236\u5747\u7ebf\u3002 title: k\u7ebf\u56fe\u7684\u6807\u9898 show_volume: \u662f\u5426\u663e\u793a\u6210\u4ea4\u91cf\u56fe show_rsi: \u662f\u5426\u663e\u793aRSI\u56fe\u3002\u7f3a\u7701\u663e\u793a\u53c2\u6570\u4e3a6\u7684RSI\u56fe\u3002 show_peaks: \u662f\u5426\u6807\u8bb0\u68c0\u6d4b\u51fa\u6765\u7684\u5cf0\u8ddf\u8c37\u3002 width: the width in 'px' units of the figure height: the height in 'px' units of the figure Keyword Args: rsi_win int: default is 6 \"\"\" self . title = title self . bars = bars self . width = width self . height = height # traces for main area self . main_traces = {} # traces for indicator area self . ind_traces = {} self . ticks = self . _format_tick ( bars [ \"frame\" ]) self . _bar_close = array_math_round ( bars [ \"close\" ], 2 ) . astype ( np . float64 ) # for every candlestick, it must contain a candlestick plot cs = go . Candlestick ( x = self . ticks , open = bars [ \"open\" ], high = bars [ \"high\" ], low = bars [ \"low\" ], close = self . _bar_close , line = dict ({ \"width\" : 1 }), name = \"K\u7ebf\" , ** kwargs , ) # Set line and fill colors cs . increasing . fillcolor = \"rgba(255,255,255,0.9)\" cs . increasing . line . color = self . RED cs . decreasing . fillcolor = self . GREEN cs . decreasing . line . color = self . GREEN self . main_traces [ \"ohlc\" ] = cs if show_volume : self . add_indicator ( \"volume\" ) if show_peaks : self . add_main_trace ( \"peaks\" ) if show_rsi : self . add_indicator ( \"rsi\" , win = kwargs . get ( \"rsi_win\" , 6 )) # \u589e\u52a0\u5747\u7ebf if ma_groups is None : nbars = len ( bars ) if nbars < 9 : ma_groups = [] else : groups = np . array ([ 5 , 10 , 20 , 30 , 60 , 120 , 250 ]) idx = max ( np . argwhere ( groups < ( nbars - 5 ))) . item () + 1 ma_groups = groups [: idx ] for win in ma_groups : name = f \"ma { win } \" if win > len ( bars ): continue ma = moving_average ( self . _bar_close , win ) line = go . Scatter ( y = ma , x = self . ticks , name = name , line = dict ( width = 1 , color = self . MA_COLORS . get ( win )), ) self . main_traces [ name ] = line @property def figure ( self ): \"\"\"\u8fd4\u56de\u4e00\u4e2afigure\u5bf9\u8c61\"\"\" rows = len ( self . ind_traces ) + 1 specs = [[{ \"secondary_y\" : False }]] * rows specs [ 0 ][ 0 ][ \"secondary_y\" ] = True row_heights = [ 0.7 , * ([ 0.3 / ( rows - 1 )] * ( rows - 1 ))] print ( row_heights ) cols = 1 fig = make_subplots ( rows = rows , cols = cols , shared_xaxes = True , vertical_spacing = 0.1 , subplot_titles = ( self . title , * self . ind_traces . keys ()), row_heights = row_heights , specs = specs , ) for _ , trace in self . main_traces . items (): fig . add_trace ( trace , row = 1 , col = 1 ) for i , ( _ , trace ) in enumerate ( self . ind_traces . items ()): fig . add_trace ( trace , row = i + 2 , col = 1 ) ymin = np . min ( self . bars [ \"low\" ]) ymax = np . max ( self . bars [ \"high\" ]) ylim = [ ymin * 0.95 , ymax * 1.05 ] # \u663e\u793a\u5341\u5b57\u5149\u6807 fig . update_xaxes ( showgrid = False , showspikes = True , spikemode = \"across\" , spikesnap = \"cursor\" , spikecolor = \"grey\" , spikedash = \"solid\" , spikethickness = 1 , ) fig . update_yaxes ( showspikes = True , spikemode = \"across\" , spikesnap = \"cursor\" , spikedash = \"solid\" , spikecolor = \"grey\" , spikethickness = 1 , showgrid = True , gridcolor = self . LIGHT_GRAY , ) fig . update_xaxes ( nticks = len ( self . bars ) // 10 , ticklen = 10 , ticks = \"outside\" , minor = dict ( nticks = 5 , ticklen = 5 , ticks = \"outside\" ), row = rows , col = 1 , ) # \u8bbe\u7f6eK\u7ebf\u663e\u793a\u533a\u57df if self . width : win_size = int ( self . width // 10 ) else : win_size = 120 fig . update_xaxes ( type = \"category\" , range = [ len ( self . bars ) - win_size , len ( self . bars ) - 1 ] ) fig . update_layout ( yaxis = dict ( range = ylim ), hovermode = \"x unified\" , plot_bgcolor = self . TRANSPARENT , xaxis_rangeslider_visible = False , ) if self . width : fig . update_layout ( width = self . width ) if self . height : fig . update_layout ( height = self . height ) return fig def _format_tick ( self , tm : np . array ) -> NDArray : if tm . item ( 0 ) . hour == 0 : # assume it's date return np . array ( [ f \" { x . item () . year : 02 } - { x . item () . month : 02 } - { x . item () . day : 02 } \" for x in tm ] ) else : return np . array ( [ f \" { x . item () . month : 02 } - { x . item () . day : 02 } { x . item () . hour : 02 } : { x . item () . minute : 02 } \" for x in tm ] ) def _remove_ma ( self ): traces = {} for name in self . main_traces : if not name . startswith ( \"ma\" ): traces [ name ] = self . main_traces [ name ] self . main_traces = traces def add_main_trace ( self , trace_name : str , ** kwargs ): \"\"\"add trace to main plot \u652f\u6301\u7684\u56fe\u4f8b\u7c7b\u522b\u6709peaks, bbox\uff08bounding-box), bt(\u56de\u6d4b), support_line, resist_line Args: trace_name : \u56fe\u4f8b\u540d\u79f0 **kwargs : \u5176\u4ed6\u53c2\u6570 \"\"\" if trace_name == \"peaks\" : self . mark_peaks_and_valleys ( kwargs . get ( \"up_thres\" , 0.03 ), kwargs . get ( \"down_thres\" , - 0.03 ) ) # \u6807\u6ce8\u77e9\u5f62\u6846 elif trace_name == \"bbox\" : self . add_bounding_box ( kwargs . get ( \"boxes\" )) # \u56de\u6d4b\u7ed3\u679c elif trace_name == \"bt\" : self . add_backtest_result ( kwargs . get ( \"bt\" )) # \u589e\u52a0\u76f4\u7ebf elif trace_name == \"support_line\" : self . add_line ( \"\u652f\u6491\u7ebf\" , kwargs . get ( \"x\" ), kwargs . get ( \"y\" )) elif trace_name == \"resist_line\" : self . add_line ( \"\u538b\u529b\u7ebf\" , kwargs . get ( \"x\" ), kwargs . get ( \"y\" )) def add_line ( self , trace_name : str , x : List [ int ], y : List [ float ]): \"\"\"\u5728k\u7ebf\u56fe\u4e0a\u589e\u52a0\u4ee5`x`,`y`\u8868\u793a\u7684\u4e00\u6761\u76f4\u7ebf Args: trace_name : \u56fe\u4f8b\u540d\u79f0 x : x\u8f74\u5750\u6807\uff0c\u6240\u6709\u7684x\u503c\u90fd\u5fc5\u987b\u5c5e\u4e8e[0, len(self.bars)] y : y\u503c \"\"\" line = go . Scatter ( x = self . ticks [ x ], y = y , mode = \"lines\" , name = trace_name ) self . main_traces [ trace_name ] = line def mark_support_resist_lines ( self , upthres : float = None , downthres : float = None , use_close = True , win = 60 ): \"\"\"\u5728K\u7ebf\u56fe\u4e0a\u6807\u6ce8\u652f\u6491\u7ebf\u548c\u538b\u529b\u7ebf \u5728`win`\u4e2ak\u7ebf\u5185\uff0c\u627e\u51fa\u6240\u6709\u7684\u5c40\u90e8\u5cf0\u8c37\u70b9\uff0c\u5e76\u4ee5\u6700\u9ad8\u7684\u4e24\u4e2a\u5cf0\u8fde\u7ebf\u751f\u6210\u538b\u529b\u7ebf\uff0c\u4ee5\u6700\u4f4e\u7684\u4e24\u4e2a\u8c37\u8fde\u7ebf\u751f\u6210\u652f\u6491\u7ebf\u3002 Args: upthres : \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1`omicron.talib.morph.peaks_and_valleys` downthres : \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1`omicron.talib.morph.peaks_and_valleys`. use_close : \u662f\u5426\u4f7f\u7528\u6536\u76d8\u4ef7\u6765\u8fdb\u884c\u68c0\u6d4b\u3002\u5982\u679c\u4e3aFalse\uff0c\u5219\u4f7f\u7528high\u6765\u68c0\u6d4b\u538b\u529b\u7ebf\uff0c\u4f7f\u7528low\u6765\u68c0\u6d4b\u652f\u6491\u7ebf. win : \u68c0\u6d4b\u5c40\u90e8\u9ad8\u4f4e\u70b9\u7684\u7a97\u53e3. \"\"\" bars = self . bars [ - win :] clipped = len ( self . bars ) - win if use_close : support , resist , x_start = support_resist_lines ( self . _bar_close , upthres , downthres ) x = np . arange ( len ( bars ))[ x_start :] self . add_main_trace ( \"support_line\" , x = x + clipped , y = support ( x )) self . add_main_trace ( \"resist_line\" , x = x + clipped , y = resist ( x )) else : # \u4f7f\u7528\"high\"\u548c\"low\" bars = self . bars [ - win :] support , _ , x_start = support_resist_lines ( bars [ \"low\" ], upthres , downthres ) x = np . arange ( len ( bars ))[ x_start :] self . add_main_trace ( \"support_line\" , x = x + clipped , y = support ( x )) _ , resist , x_start = support_resist_lines ( bars [ \"high\" ], upthres , downthres ) x = np . arange ( len ( bars ))[ x_start :] self . add_main_trace ( \"resist_line\" , x = x + clipped , y = resist ( x )) def mark_bbox ( self , min_size : int = 20 ): \"\"\"\u5728k\u7ebf\u56fe\u4e0a\u68c0\u6d4b\u5e76\u6807\u6ce8\u77e9\u5f62\u6846 Args: min_size : \u77e9\u5f62\u6846\u7684\u6700\u5c0f\u957f\u5ea6 \"\"\" boxes = plateaus ( self . _bar_close , min_size ) self . add_main_trace ( \"bbox\" , boxes = boxes ) def mark_backtest_result ( self , result : dict ): \"\"\"\u6807\u8bb0\u4e70\u5356\u70b9\u548c\u56de\u6d4b\u6570\u636e TODO: \u6b64\u65b9\u6cd5\u53ef\u80fd\u672a\u4e0ebacktest\u8fd4\u56de\u503c\u540c\u6b65\u3002\u6b64\u5916\uff0c\u5728portofolio\u56de\u6d4b\u4e2d\uff0c\u4e0d\u53ef\u80fd\u5728k\u7ebf\u56fe\u4e2d\u4f7f\u7528\u6b64\u65b9\u6cd5\u3002 Args: points : \u4e70\u5356\u70b9\u7684\u5750\u6807\u3002 \"\"\" trades = result . get ( \"trades\" ) assets = result . get ( \"assets\" ) x , y , labels = [], [], [] hover = [] labels_color = defaultdict ( list ) for trade in trades : trade_date = arrow . get ( trade [ \"time\" ]) . date () asset = assets . get ( trade_date ) security = trade [ \"security\" ] price = trade [ \"price\" ] volume = trade [ \"volume\" ] side = trade [ \"order_side\" ] x . append ( self . _format_tick ( trade_date )) bar = self . bars [ self . bars [ \"frame\" ] == trade_date ] if side == \"\u4e70\u5165\" : hover . append ( f \"\u603b\u8d44\u4ea7: { asset }

    { side } : { security }
    \u4e70\u5165\u4ef7: { price }
    \u80a1\u6570: { volume } \" ) y . append ( bar [ \"high\" ][ 0 ] * 1.1 ) labels . append ( \"B\" ) labels_color [ \"color\" ] . append ( self . RED ) else : y . append ( bar [ \"low\" ][ 0 ] * 0.99 ) hover . append ( f \"\u603b\u8d44\u4ea7: { asset }

    { side } : { security }
    \u5356\u51fa\u4ef7: { price }
    \u80a1\u6570: { volume } \" ) labels . append ( \"S\" ) labels_color [ \"color\" ] . append ( self . GREEN ) labels_color . append ( self . GREEN ) # txt.append(f'{side}:{security}
    \u5356\u51fa\u4ef7:{price}
    \u80a1\u6570:{volume}') trace = go . Scatter ( x = x , y = y , mode = \"text\" , text = labels , name = \"backtest\" , hovertext = hover , textfont = labels_color , ) self . main_traces [ \"bs\" ] = trace def mark_peaks_and_valleys ( self , up_thres : Optional [ float ] = None , down_thres : Optional [ float ] = None ): \"\"\"\u5728K\u7ebf\u56fe\u4e0a\u6807\u6ce8\u5cf0\u8c37\u70b9 Args: up_thres : \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1[omicron.talib.morph.peaks_and_valleys][] down_thres : \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1[omicron.talib.morph.peaks_and_valleys][] \"\"\" bars = self . bars flags = peaks_and_valleys ( self . _bar_close , up_thres , down_thres ) # \u79fb\u9664\u9996\u5c3e\u7684\u9876\u5e95\u6807\u8bb0\uff0c\u4e00\u822c\u60c5\u51b5\u4e0b\u5b83\u4eec\u90fd\u4e0d\u662f\u771f\u6b63\u7684\u9876\u548c\u5e95\u3002 flags [ 0 ] = 0 flags [ - 1 ] = 0 marker_margin = ( max ( bars [ \"high\" ]) - min ( bars [ \"low\" ])) * 0.05 ticks_up = self . ticks [ flags == 1 ] y_up = bars [ \"high\" ][ flags == 1 ] + marker_margin ticks_down = self . ticks [ flags == - 1 ] y_down = bars [ \"low\" ][ flags == - 1 ] - marker_margin trace = go . Scatter ( mode = \"markers\" , x = ticks_up , y = y_up , marker_symbol = \"triangle-down\" , name = \"\u5cf0\" ) self . main_traces [ \"peaks\" ] = trace trace = go . Scatter ( mode = \"markers\" , x = ticks_down , y = y_down , marker_symbol = \"triangle-up\" , name = \"\u8c37\" , ) self . main_traces [ \"valleys\" ] = trace def add_bounding_box ( self , boxes : List [ Tuple ]): \"\"\"bbox\u662f\u6807\u8bb0\u5728k\u7ebf\u56fe\u4e0a\u67d0\u4e2a\u533a\u95f4\u5185\u7684\u77e9\u5f62\u6846\uff0c\u5b83\u4ee5\u8be5\u533a\u95f4\u6700\u9ad8\u4ef7\u548c\u6700\u4f4e\u4ef7\u4e3a\u4e0a\u4e0b\u8fb9\u3002 Args: boxes: \u6bcf\u4e2a\u5143\u7d20(start, width)\u8868\u793a\u5404\u4e2abbox\u7684\u8d77\u70b9\u548c\u5bbd\u5ea6\u3002 \"\"\" for j , box in enumerate ( boxes ): x , y = [], [] i , width = box if len ( x ): x . append ( None ) y . append ( None ) group = self . bars [ i : i + width ] mean = np . mean ( group [ \"close\" ]) std = 2 * np . std ( group [ \"close\" ]) # \u843d\u5728\u4e24\u4e2a\u6807\u51c6\u5dee\u4ee5\u5185\u7684\u5b9e\u4f53\u6700\u4e0a\u65b9\u548c\u6700\u4e0b\u65b9\u503c hc = np . max ( group [ group [ \"close\" ] < mean + std ][ \"close\" ]) lc = np . min ( group [ group [ \"close\" ] > mean - std ][ \"close\" ]) ho = np . max ( group [ group [ \"open\" ] < mean + std ][ \"open\" ]) lo = np . min ( group [ group [ \"open\" ] > mean - std ][ \"open\" ]) h = max ( hc , ho ) low = min ( lo , lc ) x . extend ( self . ticks [[ i , i + width - 1 , i + width - 1 , i , i ]]) y . extend (( h , h , low , low , h )) hover = f \"\u5bbd\u5ea6: { width }
    \u632f\u5e45: { h / low - 1 : .2% } \" trace = go . Scatter ( x = x , y = y , fill = \"toself\" , name = f \"\u5e73\u53f0\u6574\u7406 { j } \" , text = hover ) self . main_traces [ f \"bbox- { j } \" ] = trace def add_indicator ( self , indicator : str , ** kwargs ): \"\"\"\u5411k\u7ebf\u56fe\u4e2d\u589e\u52a0\u6280\u672f\u6307\u6807 Args: indicator: \u5f53\u524d\u652f\u6301\u503c\u6709'volume', 'rsi', 'bbands' kwargs: \u8ba1\u7b97\u67d0\u4e2aindicator\u65f6\uff0c\u9700\u8981\u7684\u53c2\u6570\u3002\u6bd4\u5982\u8ba1\u7b97bbands\u65f6\uff0c\u9700\u8981\u4f20\u5165\u5747\u7ebf\u7684window \"\"\" if indicator == \"volume\" : colors = np . repeat ( self . RED , len ( self . bars )) colors [ self . bars [ \"close\" ] <= self . bars [ \"open\" ]] = self . GREEN trace = go . Bar ( x = self . ticks , y = self . bars [ \"volume\" ], showlegend = False , marker = { \"color\" : colors }, ) elif indicator == \"rsi\" : win = kwargs . get ( \"win\" ) rsi = talib . RSI ( self . _bar_close , win ) # type: ignore trace = go . Scatter ( x = self . ticks , y = rsi , showlegend = False ) elif indicator == \"bbands\" : self . _remove_ma () win = kwargs . get ( \"win\" ) for name , ind in zip ( [ \"bbands-high\" , \"bbands-mean\" , \"bbands-low\" ], talib . BBANDS ( self . _bar_close , win ), # type: ignore ): trace = go . Scatter ( x = self . ticks , y = ind , showlegend = True , name = name ) self . main_traces [ name ] = trace return else : raise ValueError ( f \" { indicator } not supported\" ) self . ind_traces [ indicator ] = trace def add_marks ( self , x : List [ int ], y : List [ float ], name : str , marker : str = \"cross\" , color : Optional [ str ] = None , ): \"\"\"\u5411k\u7ebf\u56fe\u4e2d\u589e\u52a0\u6807\u8bb0\u70b9\"\"\" trace = go . Scatter ( x = self . ticks [ x ], y = y , mode = \"markers\" , marker_symbol = marker , marker_color = color , name = name , ) self . main_traces [ name ] = trace def plot ( self ): \"\"\"\u7ed8\u5236\u56fe\u8868\"\"\" fig = self . figure fig . show () figure property readonly \u00b6 \u8fd4\u56de\u4e00\u4e2afigure\u5bf9\u8c61 __init__ ( self , bars , ma_groups = None , title = None , show_volume = True , show_rsi = True , show_peaks = False , width = None , height = None , ** kwargs ) special \u00b6 \u6784\u9020\u51fd\u6570 Parameters: Name Type Description Default bars ndarray \u884c\u60c5\u6570\u636e required ma_groups List[int] \u5747\u7ebf\u7ec4\u53c2\u6570\u3002\u6bd4\u5982[5, 10, 20]\u8868\u660e\u5411k\u7ebf\u56fe\u4e2d\u6dfb\u52a05, 10, 20\u65e5\u5747\u7ebf\u3002\u5982\u679c\u4e0d\u63d0\u4f9b\uff0c\u5c06\u4ece\u6570\u7ec4[5, 10, 20, 30, 60, 120, 250]\u4e2d\u53d6\u76f4\u5230\u4e0e len(bars) - 5 \u5339\u914d\u7684\u53c2\u6570\u4e3a\u6b62\u3002\u6bd4\u5982bars\u957f\u5ea6\u4e3a30\uff0c\u5219\u5c06\u53d6[5, 10, 20]\u6765\u7ed8\u5236\u5747\u7ebf\u3002 None title str k\u7ebf\u56fe\u7684\u6807\u9898 None show_volume \u662f\u5426\u663e\u793a\u6210\u4ea4\u91cf\u56fe True show_rsi \u662f\u5426\u663e\u793aRSI\u56fe\u3002\u7f3a\u7701\u663e\u793a\u53c2\u6570\u4e3a6\u7684RSI\u56fe\u3002 True show_peaks \u662f\u5426\u6807\u8bb0\u68c0\u6d4b\u51fa\u6765\u7684\u5cf0\u8ddf\u8c37\u3002 False width the width in 'px' units of the figure None height the height in 'px' units of the figure None Keyword arguments: Name Type Description rsi_win int default is 6 Source code in omicron/plotting/candlestick.py def __init__ ( self , bars : np . ndarray , ma_groups : List [ int ] = None , title : str = None , show_volume = True , show_rsi = True , show_peaks = False , width = None , height = None , ** kwargs , ): \"\"\"\u6784\u9020\u51fd\u6570 Args: bars: \u884c\u60c5\u6570\u636e ma_groups: \u5747\u7ebf\u7ec4\u53c2\u6570\u3002\u6bd4\u5982[5, 10, 20]\u8868\u660e\u5411k\u7ebf\u56fe\u4e2d\u6dfb\u52a05, 10, 20\u65e5\u5747\u7ebf\u3002\u5982\u679c\u4e0d\u63d0\u4f9b\uff0c\u5c06\u4ece\u6570\u7ec4[5, 10, 20, 30, 60, 120, 250]\u4e2d\u53d6\u76f4\u5230\u4e0e`len(bars) - 5`\u5339\u914d\u7684\u53c2\u6570\u4e3a\u6b62\u3002\u6bd4\u5982bars\u957f\u5ea6\u4e3a30\uff0c\u5219\u5c06\u53d6[5, 10, 20]\u6765\u7ed8\u5236\u5747\u7ebf\u3002 title: k\u7ebf\u56fe\u7684\u6807\u9898 show_volume: \u662f\u5426\u663e\u793a\u6210\u4ea4\u91cf\u56fe show_rsi: \u662f\u5426\u663e\u793aRSI\u56fe\u3002\u7f3a\u7701\u663e\u793a\u53c2\u6570\u4e3a6\u7684RSI\u56fe\u3002 show_peaks: \u662f\u5426\u6807\u8bb0\u68c0\u6d4b\u51fa\u6765\u7684\u5cf0\u8ddf\u8c37\u3002 width: the width in 'px' units of the figure height: the height in 'px' units of the figure Keyword Args: rsi_win int: default is 6 \"\"\" self . title = title self . bars = bars self . width = width self . height = height # traces for main area self . main_traces = {} # traces for indicator area self . ind_traces = {} self . ticks = self . _format_tick ( bars [ \"frame\" ]) self . _bar_close = array_math_round ( bars [ \"close\" ], 2 ) . astype ( np . float64 ) # for every candlestick, it must contain a candlestick plot cs = go . Candlestick ( x = self . ticks , open = bars [ \"open\" ], high = bars [ \"high\" ], low = bars [ \"low\" ], close = self . _bar_close , line = dict ({ \"width\" : 1 }), name = \"K\u7ebf\" , ** kwargs , ) # Set line and fill colors cs . increasing . fillcolor = \"rgba(255,255,255,0.9)\" cs . increasing . line . color = self . RED cs . decreasing . fillcolor = self . GREEN cs . decreasing . line . color = self . GREEN self . main_traces [ \"ohlc\" ] = cs if show_volume : self . add_indicator ( \"volume\" ) if show_peaks : self . add_main_trace ( \"peaks\" ) if show_rsi : self . add_indicator ( \"rsi\" , win = kwargs . get ( \"rsi_win\" , 6 )) # \u589e\u52a0\u5747\u7ebf if ma_groups is None : nbars = len ( bars ) if nbars < 9 : ma_groups = [] else : groups = np . array ([ 5 , 10 , 20 , 30 , 60 , 120 , 250 ]) idx = max ( np . argwhere ( groups < ( nbars - 5 ))) . item () + 1 ma_groups = groups [: idx ] for win in ma_groups : name = f \"ma { win } \" if win > len ( bars ): continue ma = moving_average ( self . _bar_close , win ) line = go . Scatter ( y = ma , x = self . ticks , name = name , line = dict ( width = 1 , color = self . MA_COLORS . get ( win )), ) self . main_traces [ name ] = line add_bounding_box ( self , boxes ) \u00b6 bbox\u662f\u6807\u8bb0\u5728k\u7ebf\u56fe\u4e0a\u67d0\u4e2a\u533a\u95f4\u5185\u7684\u77e9\u5f62\u6846\uff0c\u5b83\u4ee5\u8be5\u533a\u95f4\u6700\u9ad8\u4ef7\u548c\u6700\u4f4e\u4ef7\u4e3a\u4e0a\u4e0b\u8fb9\u3002 Parameters: Name Type Description Default boxes List[Tuple] \u6bcf\u4e2a\u5143\u7d20(start, width)\u8868\u793a\u5404\u4e2abbox\u7684\u8d77\u70b9\u548c\u5bbd\u5ea6\u3002 required Source code in omicron/plotting/candlestick.py def add_bounding_box ( self , boxes : List [ Tuple ]): \"\"\"bbox\u662f\u6807\u8bb0\u5728k\u7ebf\u56fe\u4e0a\u67d0\u4e2a\u533a\u95f4\u5185\u7684\u77e9\u5f62\u6846\uff0c\u5b83\u4ee5\u8be5\u533a\u95f4\u6700\u9ad8\u4ef7\u548c\u6700\u4f4e\u4ef7\u4e3a\u4e0a\u4e0b\u8fb9\u3002 Args: boxes: \u6bcf\u4e2a\u5143\u7d20(start, width)\u8868\u793a\u5404\u4e2abbox\u7684\u8d77\u70b9\u548c\u5bbd\u5ea6\u3002 \"\"\" for j , box in enumerate ( boxes ): x , y = [], [] i , width = box if len ( x ): x . append ( None ) y . append ( None ) group = self . bars [ i : i + width ] mean = np . mean ( group [ \"close\" ]) std = 2 * np . std ( group [ \"close\" ]) # \u843d\u5728\u4e24\u4e2a\u6807\u51c6\u5dee\u4ee5\u5185\u7684\u5b9e\u4f53\u6700\u4e0a\u65b9\u548c\u6700\u4e0b\u65b9\u503c hc = np . max ( group [ group [ \"close\" ] < mean + std ][ \"close\" ]) lc = np . min ( group [ group [ \"close\" ] > mean - std ][ \"close\" ]) ho = np . max ( group [ group [ \"open\" ] < mean + std ][ \"open\" ]) lo = np . min ( group [ group [ \"open\" ] > mean - std ][ \"open\" ]) h = max ( hc , ho ) low = min ( lo , lc ) x . extend ( self . ticks [[ i , i + width - 1 , i + width - 1 , i , i ]]) y . extend (( h , h , low , low , h )) hover = f \"\u5bbd\u5ea6: { width }
    \u632f\u5e45: { h / low - 1 : .2% } \" trace = go . Scatter ( x = x , y = y , fill = \"toself\" , name = f \"\u5e73\u53f0\u6574\u7406 { j } \" , text = hover ) self . main_traces [ f \"bbox- { j } \" ] = trace add_indicator ( self , indicator , ** kwargs ) \u00b6 \u5411k\u7ebf\u56fe\u4e2d\u589e\u52a0\u6280\u672f\u6307\u6807 Parameters: Name Type Description Default indicator str \u5f53\u524d\u652f\u6301\u503c\u6709'volume', 'rsi', 'bbands' required kwargs \u8ba1\u7b97\u67d0\u4e2aindicator\u65f6\uff0c\u9700\u8981\u7684\u53c2\u6570\u3002\u6bd4\u5982\u8ba1\u7b97bbands\u65f6\uff0c\u9700\u8981\u4f20\u5165\u5747\u7ebf\u7684window {} Source code in omicron/plotting/candlestick.py def add_indicator ( self , indicator : str , ** kwargs ): \"\"\"\u5411k\u7ebf\u56fe\u4e2d\u589e\u52a0\u6280\u672f\u6307\u6807 Args: indicator: \u5f53\u524d\u652f\u6301\u503c\u6709'volume', 'rsi', 'bbands' kwargs: \u8ba1\u7b97\u67d0\u4e2aindicator\u65f6\uff0c\u9700\u8981\u7684\u53c2\u6570\u3002\u6bd4\u5982\u8ba1\u7b97bbands\u65f6\uff0c\u9700\u8981\u4f20\u5165\u5747\u7ebf\u7684window \"\"\" if indicator == \"volume\" : colors = np . repeat ( self . RED , len ( self . bars )) colors [ self . bars [ \"close\" ] <= self . bars [ \"open\" ]] = self . GREEN trace = go . Bar ( x = self . ticks , y = self . bars [ \"volume\" ], showlegend = False , marker = { \"color\" : colors }, ) elif indicator == \"rsi\" : win = kwargs . get ( \"win\" ) rsi = talib . RSI ( self . _bar_close , win ) # type: ignore trace = go . Scatter ( x = self . ticks , y = rsi , showlegend = False ) elif indicator == \"bbands\" : self . _remove_ma () win = kwargs . get ( \"win\" ) for name , ind in zip ( [ \"bbands-high\" , \"bbands-mean\" , \"bbands-low\" ], talib . BBANDS ( self . _bar_close , win ), # type: ignore ): trace = go . Scatter ( x = self . ticks , y = ind , showlegend = True , name = name ) self . main_traces [ name ] = trace return else : raise ValueError ( f \" { indicator } not supported\" ) self . ind_traces [ indicator ] = trace add_line ( self , trace_name , x , y ) \u00b6 \u5728k\u7ebf\u56fe\u4e0a\u589e\u52a0\u4ee5 x , y \u8868\u793a\u7684\u4e00\u6761\u76f4\u7ebf Parameters: Name Type Description Default trace_name \u56fe\u4f8b\u540d\u79f0 required x x\u8f74\u5750\u6807\uff0c\u6240\u6709\u7684x\u503c\u90fd\u5fc5\u987b\u5c5e\u4e8e[0, len(self.bars)] required y y\u503c required Source code in omicron/plotting/candlestick.py def add_line ( self , trace_name : str , x : List [ int ], y : List [ float ]): \"\"\"\u5728k\u7ebf\u56fe\u4e0a\u589e\u52a0\u4ee5`x`,`y`\u8868\u793a\u7684\u4e00\u6761\u76f4\u7ebf Args: trace_name : \u56fe\u4f8b\u540d\u79f0 x : x\u8f74\u5750\u6807\uff0c\u6240\u6709\u7684x\u503c\u90fd\u5fc5\u987b\u5c5e\u4e8e[0, len(self.bars)] y : y\u503c \"\"\" line = go . Scatter ( x = self . ticks [ x ], y = y , mode = \"lines\" , name = trace_name ) self . main_traces [ trace_name ] = line add_main_trace ( self , trace_name , ** kwargs ) \u00b6 add trace to main plot \u652f\u6301\u7684\u56fe\u4f8b\u7c7b\u522b\u6709peaks, bbox\uff08bounding-box), bt(\u56de\u6d4b), support_line, resist_line Parameters: Name Type Description Default trace_name \u56fe\u4f8b\u540d\u79f0 required **kwargs \u5176\u4ed6\u53c2\u6570 {} Source code in omicron/plotting/candlestick.py def add_main_trace ( self , trace_name : str , ** kwargs ): \"\"\"add trace to main plot \u652f\u6301\u7684\u56fe\u4f8b\u7c7b\u522b\u6709peaks, bbox\uff08bounding-box), bt(\u56de\u6d4b), support_line, resist_line Args: trace_name : \u56fe\u4f8b\u540d\u79f0 **kwargs : \u5176\u4ed6\u53c2\u6570 \"\"\" if trace_name == \"peaks\" : self . mark_peaks_and_valleys ( kwargs . get ( \"up_thres\" , 0.03 ), kwargs . get ( \"down_thres\" , - 0.03 ) ) # \u6807\u6ce8\u77e9\u5f62\u6846 elif trace_name == \"bbox\" : self . add_bounding_box ( kwargs . get ( \"boxes\" )) # \u56de\u6d4b\u7ed3\u679c elif trace_name == \"bt\" : self . add_backtest_result ( kwargs . get ( \"bt\" )) # \u589e\u52a0\u76f4\u7ebf elif trace_name == \"support_line\" : self . add_line ( \"\u652f\u6491\u7ebf\" , kwargs . get ( \"x\" ), kwargs . get ( \"y\" )) elif trace_name == \"resist_line\" : self . add_line ( \"\u538b\u529b\u7ebf\" , kwargs . get ( \"x\" ), kwargs . get ( \"y\" )) add_marks ( self , x , y , name , marker = 'cross' , color = None ) \u00b6 \u5411k\u7ebf\u56fe\u4e2d\u589e\u52a0\u6807\u8bb0\u70b9 Source code in omicron/plotting/candlestick.py def add_marks ( self , x : List [ int ], y : List [ float ], name : str , marker : str = \"cross\" , color : Optional [ str ] = None , ): \"\"\"\u5411k\u7ebf\u56fe\u4e2d\u589e\u52a0\u6807\u8bb0\u70b9\"\"\" trace = go . Scatter ( x = self . ticks [ x ], y = y , mode = \"markers\" , marker_symbol = marker , marker_color = color , name = name , ) self . main_traces [ name ] = trace mark_backtest_result ( self , result ) \u00b6 \u6807\u8bb0\u4e70\u5356\u70b9\u548c\u56de\u6d4b\u6570\u636e Todo \u6b64\u65b9\u6cd5\u53ef\u80fd\u672a\u4e0ebacktest\u8fd4\u56de\u503c\u540c\u6b65\u3002\u6b64\u5916\uff0c\u5728portofolio\u56de\u6d4b\u4e2d\uff0c\u4e0d\u53ef\u80fd\u5728k\u7ebf\u56fe\u4e2d\u4f7f\u7528\u6b64\u65b9\u6cd5\u3002 Parameters: Name Type Description Default points \u4e70\u5356\u70b9\u7684\u5750\u6807\u3002 required Source code in omicron/plotting/candlestick.py def mark_backtest_result ( self , result : dict ): \"\"\"\u6807\u8bb0\u4e70\u5356\u70b9\u548c\u56de\u6d4b\u6570\u636e TODO: \u6b64\u65b9\u6cd5\u53ef\u80fd\u672a\u4e0ebacktest\u8fd4\u56de\u503c\u540c\u6b65\u3002\u6b64\u5916\uff0c\u5728portofolio\u56de\u6d4b\u4e2d\uff0c\u4e0d\u53ef\u80fd\u5728k\u7ebf\u56fe\u4e2d\u4f7f\u7528\u6b64\u65b9\u6cd5\u3002 Args: points : \u4e70\u5356\u70b9\u7684\u5750\u6807\u3002 \"\"\" trades = result . get ( \"trades\" ) assets = result . get ( \"assets\" ) x , y , labels = [], [], [] hover = [] labels_color = defaultdict ( list ) for trade in trades : trade_date = arrow . get ( trade [ \"time\" ]) . date () asset = assets . get ( trade_date ) security = trade [ \"security\" ] price = trade [ \"price\" ] volume = trade [ \"volume\" ] side = trade [ \"order_side\" ] x . append ( self . _format_tick ( trade_date )) bar = self . bars [ self . bars [ \"frame\" ] == trade_date ] if side == \"\u4e70\u5165\" : hover . append ( f \"\u603b\u8d44\u4ea7: { asset }

    { side } : { security }
    \u4e70\u5165\u4ef7: { price }
    \u80a1\u6570: { volume } \" ) y . append ( bar [ \"high\" ][ 0 ] * 1.1 ) labels . append ( \"B\" ) labels_color [ \"color\" ] . append ( self . RED ) else : y . append ( bar [ \"low\" ][ 0 ] * 0.99 ) hover . append ( f \"\u603b\u8d44\u4ea7: { asset }

    { side } : { security }
    \u5356\u51fa\u4ef7: { price }
    \u80a1\u6570: { volume } \" ) labels . append ( \"S\" ) labels_color [ \"color\" ] . append ( self . GREEN ) labels_color . append ( self . GREEN ) # txt.append(f'{side}:{security}
    \u5356\u51fa\u4ef7:{price}
    \u80a1\u6570:{volume}') trace = go . Scatter ( x = x , y = y , mode = \"text\" , text = labels , name = \"backtest\" , hovertext = hover , textfont = labels_color , ) self . main_traces [ \"bs\" ] = trace mark_bbox ( self , min_size = 20 ) \u00b6 \u5728k\u7ebf\u56fe\u4e0a\u68c0\u6d4b\u5e76\u6807\u6ce8\u77e9\u5f62\u6846 Parameters: Name Type Description Default min_size \u77e9\u5f62\u6846\u7684\u6700\u5c0f\u957f\u5ea6 20 Source code in omicron/plotting/candlestick.py def mark_bbox ( self , min_size : int = 20 ): \"\"\"\u5728k\u7ebf\u56fe\u4e0a\u68c0\u6d4b\u5e76\u6807\u6ce8\u77e9\u5f62\u6846 Args: min_size : \u77e9\u5f62\u6846\u7684\u6700\u5c0f\u957f\u5ea6 \"\"\" boxes = plateaus ( self . _bar_close , min_size ) self . add_main_trace ( \"bbox\" , boxes = boxes ) mark_peaks_and_valleys ( self , up_thres = None , down_thres = None ) \u00b6 \u5728K\u7ebf\u56fe\u4e0a\u6807\u6ce8\u5cf0\u8c37\u70b9 Parameters: Name Type Description Default up_thres \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1 omicron.talib.morph.peaks_and_valleys None down_thres \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1 omicron.talib.morph.peaks_and_valleys None Source code in omicron/plotting/candlestick.py def mark_peaks_and_valleys ( self , up_thres : Optional [ float ] = None , down_thres : Optional [ float ] = None ): \"\"\"\u5728K\u7ebf\u56fe\u4e0a\u6807\u6ce8\u5cf0\u8c37\u70b9 Args: up_thres : \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1[omicron.talib.morph.peaks_and_valleys][] down_thres : \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1[omicron.talib.morph.peaks_and_valleys][] \"\"\" bars = self . bars flags = peaks_and_valleys ( self . _bar_close , up_thres , down_thres ) # \u79fb\u9664\u9996\u5c3e\u7684\u9876\u5e95\u6807\u8bb0\uff0c\u4e00\u822c\u60c5\u51b5\u4e0b\u5b83\u4eec\u90fd\u4e0d\u662f\u771f\u6b63\u7684\u9876\u548c\u5e95\u3002 flags [ 0 ] = 0 flags [ - 1 ] = 0 marker_margin = ( max ( bars [ \"high\" ]) - min ( bars [ \"low\" ])) * 0.05 ticks_up = self . ticks [ flags == 1 ] y_up = bars [ \"high\" ][ flags == 1 ] + marker_margin ticks_down = self . ticks [ flags == - 1 ] y_down = bars [ \"low\" ][ flags == - 1 ] - marker_margin trace = go . Scatter ( mode = \"markers\" , x = ticks_up , y = y_up , marker_symbol = \"triangle-down\" , name = \"\u5cf0\" ) self . main_traces [ \"peaks\" ] = trace trace = go . Scatter ( mode = \"markers\" , x = ticks_down , y = y_down , marker_symbol = \"triangle-up\" , name = \"\u8c37\" , ) self . main_traces [ \"valleys\" ] = trace mark_support_resist_lines ( self , upthres = None , downthres = None , use_close = True , win = 60 ) \u00b6 \u5728K\u7ebf\u56fe\u4e0a\u6807\u6ce8\u652f\u6491\u7ebf\u548c\u538b\u529b\u7ebf \u5728 win \u4e2ak\u7ebf\u5185\uff0c\u627e\u51fa\u6240\u6709\u7684\u5c40\u90e8\u5cf0\u8c37\u70b9\uff0c\u5e76\u4ee5\u6700\u9ad8\u7684\u4e24\u4e2a\u5cf0\u8fde\u7ebf\u751f\u6210\u538b\u529b\u7ebf\uff0c\u4ee5\u6700\u4f4e\u7684\u4e24\u4e2a\u8c37\u8fde\u7ebf\u751f\u6210\u652f\u6491\u7ebf\u3002 Parameters: Name Type Description Default upthres \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1 omicron.talib.morph.peaks_and_valleys None downthres \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1 omicron.talib.morph.peaks_and_valleys . None use_close \u662f\u5426\u4f7f\u7528\u6536\u76d8\u4ef7\u6765\u8fdb\u884c\u68c0\u6d4b\u3002\u5982\u679c\u4e3aFalse\uff0c\u5219\u4f7f\u7528high\u6765\u68c0\u6d4b\u538b\u529b\u7ebf\uff0c\u4f7f\u7528low\u6765\u68c0\u6d4b\u652f\u6491\u7ebf. True win \u68c0\u6d4b\u5c40\u90e8\u9ad8\u4f4e\u70b9\u7684\u7a97\u53e3. 60 Source code in omicron/plotting/candlestick.py def mark_support_resist_lines ( self , upthres : float = None , downthres : float = None , use_close = True , win = 60 ): \"\"\"\u5728K\u7ebf\u56fe\u4e0a\u6807\u6ce8\u652f\u6491\u7ebf\u548c\u538b\u529b\u7ebf \u5728`win`\u4e2ak\u7ebf\u5185\uff0c\u627e\u51fa\u6240\u6709\u7684\u5c40\u90e8\u5cf0\u8c37\u70b9\uff0c\u5e76\u4ee5\u6700\u9ad8\u7684\u4e24\u4e2a\u5cf0\u8fde\u7ebf\u751f\u6210\u538b\u529b\u7ebf\uff0c\u4ee5\u6700\u4f4e\u7684\u4e24\u4e2a\u8c37\u8fde\u7ebf\u751f\u6210\u652f\u6491\u7ebf\u3002 Args: upthres : \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1`omicron.talib.morph.peaks_and_valleys` downthres : \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1`omicron.talib.morph.peaks_and_valleys`. use_close : \u662f\u5426\u4f7f\u7528\u6536\u76d8\u4ef7\u6765\u8fdb\u884c\u68c0\u6d4b\u3002\u5982\u679c\u4e3aFalse\uff0c\u5219\u4f7f\u7528high\u6765\u68c0\u6d4b\u538b\u529b\u7ebf\uff0c\u4f7f\u7528low\u6765\u68c0\u6d4b\u652f\u6491\u7ebf. win : \u68c0\u6d4b\u5c40\u90e8\u9ad8\u4f4e\u70b9\u7684\u7a97\u53e3. \"\"\" bars = self . bars [ - win :] clipped = len ( self . bars ) - win if use_close : support , resist , x_start = support_resist_lines ( self . _bar_close , upthres , downthres ) x = np . arange ( len ( bars ))[ x_start :] self . add_main_trace ( \"support_line\" , x = x + clipped , y = support ( x )) self . add_main_trace ( \"resist_line\" , x = x + clipped , y = resist ( x )) else : # \u4f7f\u7528\"high\"\u548c\"low\" bars = self . bars [ - win :] support , _ , x_start = support_resist_lines ( bars [ \"low\" ], upthres , downthres ) x = np . arange ( len ( bars ))[ x_start :] self . add_main_trace ( \"support_line\" , x = x + clipped , y = support ( x )) _ , resist , x_start = support_resist_lines ( bars [ \"high\" ], upthres , downthres ) x = np . arange ( len ( bars ))[ x_start :] self . add_main_trace ( \"resist_line\" , x = x + clipped , y = resist ( x )) plot ( self ) \u00b6 \u7ed8\u5236\u56fe\u8868 Source code in omicron/plotting/candlestick.py def plot ( self ): \"\"\"\u7ed8\u5236\u56fe\u8868\"\"\" fig = self . figure fig . show ()","title":"CandleStick"},{"location":"api/plotting/candlestick/#omicron.plotting.candlestick--\u7528\u6cd5\u793a\u4f8b","text":"\u6ce8\u610f\u793a\u4f8b\u9700\u8981\u5728notebook\u4e2d\u8fd0\u884c\uff0c\u5426\u5219\u65e0\u6cd5\u751f\u6210\u56fe\u3002 1 2 3 4 5 from omicron.plotting.candlestick import Candlestick bars = await Stock . get_bars ( \"000001.XSHE\" , 120 , FrameType . DAY ) cs = Candlestick ( bars ) cs . plot () \u8fd9\u5c06\u751f\u6210\u4e0b\u56fe\uff1a \u9ed8\u8ba4\u5730\uff0c\u5c06\u663e\u793a\u6210\u4ea4\u91cf\u548cRSI\u6307\u6807\u4e24\u4e2a\u526f\u56fe\u3002\u53ef\u4ee5\u901a\u8fc7\u4ee5\u4e0b\u65b9\u5f0f\u6765\u5b9a\u5236\uff1a 1 2 3 4 5 cs = Candlestick ( bars , show_volume = True , show_rsi = True , show_peaks = False } cs . plot ()","title":"\u7528\u6cd5\u793a\u4f8b"},{"location":"api/plotting/candlestick/#omicron.plotting.candlestick--\u589e\u52a0\u6807\u8bb0","text":"1 2 3 4 5 6 7 8 9 10 11 from omicron.plotting.candlestick import Candlestick bars = await Stock . get_bars ( \"000001.XSHE\" , 120 , FrameType . DAY ) cs = Candlestick ( bars , show_volume = True , show_rsi = False , show_peaks = True ) cs . add_marks ([ 20 , 50 ]) cs . plot () \u8fd9\u5c06\u5728k\u7ebf\u4e0a\u663e\u793a\u4e24\u4e2a\u52a0\u53f7\uff1a","title":"\u589e\u52a0\u6807\u8bb0"},{"location":"api/plotting/candlestick/#omicron.plotting.candlestick--\u663e\u793a\u5e03\u6797\u5e26","text":"1 2 3 4 5 6 7 8 9 10 11 from omicron.plotting.candlestick import Candlestick bars = await Stock . get_bars ( \"000001.XSHE\" , 120 , FrameType . DAY ) cs = Candlestick ( bars , show_volume = True , show_rsi = False , show_peaks = True ) cs . add_indicator ( \"bbands\" , 20 ) cs . plot ()","title":"\u663e\u793a\u5e03\u6797\u5e26"},{"location":"api/plotting/candlestick/#omicron.plotting.candlestick--\u663e\u793a\u5e73\u53f0","text":"1 2 3 4 5 6 7 8 9 10 11 12 from omicron.plotting.candlestick import Candlestick bars = await Stock . get_bars ( \"000001.XSHE\" , 120 , FrameType . DAY ) cs = Candlestick ( bars , show_volume = True , show_rsi = False , show_peaks = True ) cs . mark_bbox () cs . plot ()","title":"\u663e\u793a\u5e73\u53f0"},{"location":"api/plotting/candlestick/#omicron.plotting.candlestick.Candlestick","text":"Source code in omicron/plotting/candlestick.py class Candlestick : RED = \"#FF4136\" GREEN = \"#3DAA70\" TRANSPARENT = \"rgba(0,0,0,0)\" LIGHT_GRAY = \"rgba(0, 0, 0, 0.1)\" MA_COLORS = { 5 : \"#1432F5\" , 10 : \"#EB52F7\" , 20 : \"#C0C0C0\" , 30 : \"#882111\" , 60 : \"#5E8E28\" , 120 : \"#4294F7\" , 250 : \"#F09937\" , } def __init__ ( self , bars : np . ndarray , ma_groups : List [ int ] = None , title : str = None , show_volume = True , show_rsi = True , show_peaks = False , width = None , height = None , ** kwargs , ): \"\"\"\u6784\u9020\u51fd\u6570 Args: bars: \u884c\u60c5\u6570\u636e ma_groups: \u5747\u7ebf\u7ec4\u53c2\u6570\u3002\u6bd4\u5982[5, 10, 20]\u8868\u660e\u5411k\u7ebf\u56fe\u4e2d\u6dfb\u52a05, 10, 20\u65e5\u5747\u7ebf\u3002\u5982\u679c\u4e0d\u63d0\u4f9b\uff0c\u5c06\u4ece\u6570\u7ec4[5, 10, 20, 30, 60, 120, 250]\u4e2d\u53d6\u76f4\u5230\u4e0e`len(bars) - 5`\u5339\u914d\u7684\u53c2\u6570\u4e3a\u6b62\u3002\u6bd4\u5982bars\u957f\u5ea6\u4e3a30\uff0c\u5219\u5c06\u53d6[5, 10, 20]\u6765\u7ed8\u5236\u5747\u7ebf\u3002 title: k\u7ebf\u56fe\u7684\u6807\u9898 show_volume: \u662f\u5426\u663e\u793a\u6210\u4ea4\u91cf\u56fe show_rsi: \u662f\u5426\u663e\u793aRSI\u56fe\u3002\u7f3a\u7701\u663e\u793a\u53c2\u6570\u4e3a6\u7684RSI\u56fe\u3002 show_peaks: \u662f\u5426\u6807\u8bb0\u68c0\u6d4b\u51fa\u6765\u7684\u5cf0\u8ddf\u8c37\u3002 width: the width in 'px' units of the figure height: the height in 'px' units of the figure Keyword Args: rsi_win int: default is 6 \"\"\" self . title = title self . bars = bars self . width = width self . height = height # traces for main area self . main_traces = {} # traces for indicator area self . ind_traces = {} self . ticks = self . _format_tick ( bars [ \"frame\" ]) self . _bar_close = array_math_round ( bars [ \"close\" ], 2 ) . astype ( np . float64 ) # for every candlestick, it must contain a candlestick plot cs = go . Candlestick ( x = self . ticks , open = bars [ \"open\" ], high = bars [ \"high\" ], low = bars [ \"low\" ], close = self . _bar_close , line = dict ({ \"width\" : 1 }), name = \"K\u7ebf\" , ** kwargs , ) # Set line and fill colors cs . increasing . fillcolor = \"rgba(255,255,255,0.9)\" cs . increasing . line . color = self . RED cs . decreasing . fillcolor = self . GREEN cs . decreasing . line . color = self . GREEN self . main_traces [ \"ohlc\" ] = cs if show_volume : self . add_indicator ( \"volume\" ) if show_peaks : self . add_main_trace ( \"peaks\" ) if show_rsi : self . add_indicator ( \"rsi\" , win = kwargs . get ( \"rsi_win\" , 6 )) # \u589e\u52a0\u5747\u7ebf if ma_groups is None : nbars = len ( bars ) if nbars < 9 : ma_groups = [] else : groups = np . array ([ 5 , 10 , 20 , 30 , 60 , 120 , 250 ]) idx = max ( np . argwhere ( groups < ( nbars - 5 ))) . item () + 1 ma_groups = groups [: idx ] for win in ma_groups : name = f \"ma { win } \" if win > len ( bars ): continue ma = moving_average ( self . _bar_close , win ) line = go . Scatter ( y = ma , x = self . ticks , name = name , line = dict ( width = 1 , color = self . MA_COLORS . get ( win )), ) self . main_traces [ name ] = line @property def figure ( self ): \"\"\"\u8fd4\u56de\u4e00\u4e2afigure\u5bf9\u8c61\"\"\" rows = len ( self . ind_traces ) + 1 specs = [[{ \"secondary_y\" : False }]] * rows specs [ 0 ][ 0 ][ \"secondary_y\" ] = True row_heights = [ 0.7 , * ([ 0.3 / ( rows - 1 )] * ( rows - 1 ))] print ( row_heights ) cols = 1 fig = make_subplots ( rows = rows , cols = cols , shared_xaxes = True , vertical_spacing = 0.1 , subplot_titles = ( self . title , * self . ind_traces . keys ()), row_heights = row_heights , specs = specs , ) for _ , trace in self . main_traces . items (): fig . add_trace ( trace , row = 1 , col = 1 ) for i , ( _ , trace ) in enumerate ( self . ind_traces . items ()): fig . add_trace ( trace , row = i + 2 , col = 1 ) ymin = np . min ( self . bars [ \"low\" ]) ymax = np . max ( self . bars [ \"high\" ]) ylim = [ ymin * 0.95 , ymax * 1.05 ] # \u663e\u793a\u5341\u5b57\u5149\u6807 fig . update_xaxes ( showgrid = False , showspikes = True , spikemode = \"across\" , spikesnap = \"cursor\" , spikecolor = \"grey\" , spikedash = \"solid\" , spikethickness = 1 , ) fig . update_yaxes ( showspikes = True , spikemode = \"across\" , spikesnap = \"cursor\" , spikedash = \"solid\" , spikecolor = \"grey\" , spikethickness = 1 , showgrid = True , gridcolor = self . LIGHT_GRAY , ) fig . update_xaxes ( nticks = len ( self . bars ) // 10 , ticklen = 10 , ticks = \"outside\" , minor = dict ( nticks = 5 , ticklen = 5 , ticks = \"outside\" ), row = rows , col = 1 , ) # \u8bbe\u7f6eK\u7ebf\u663e\u793a\u533a\u57df if self . width : win_size = int ( self . width // 10 ) else : win_size = 120 fig . update_xaxes ( type = \"category\" , range = [ len ( self . bars ) - win_size , len ( self . bars ) - 1 ] ) fig . update_layout ( yaxis = dict ( range = ylim ), hovermode = \"x unified\" , plot_bgcolor = self . TRANSPARENT , xaxis_rangeslider_visible = False , ) if self . width : fig . update_layout ( width = self . width ) if self . height : fig . update_layout ( height = self . height ) return fig def _format_tick ( self , tm : np . array ) -> NDArray : if tm . item ( 0 ) . hour == 0 : # assume it's date return np . array ( [ f \" { x . item () . year : 02 } - { x . item () . month : 02 } - { x . item () . day : 02 } \" for x in tm ] ) else : return np . array ( [ f \" { x . item () . month : 02 } - { x . item () . day : 02 } { x . item () . hour : 02 } : { x . item () . minute : 02 } \" for x in tm ] ) def _remove_ma ( self ): traces = {} for name in self . main_traces : if not name . startswith ( \"ma\" ): traces [ name ] = self . main_traces [ name ] self . main_traces = traces def add_main_trace ( self , trace_name : str , ** kwargs ): \"\"\"add trace to main plot \u652f\u6301\u7684\u56fe\u4f8b\u7c7b\u522b\u6709peaks, bbox\uff08bounding-box), bt(\u56de\u6d4b), support_line, resist_line Args: trace_name : \u56fe\u4f8b\u540d\u79f0 **kwargs : \u5176\u4ed6\u53c2\u6570 \"\"\" if trace_name == \"peaks\" : self . mark_peaks_and_valleys ( kwargs . get ( \"up_thres\" , 0.03 ), kwargs . get ( \"down_thres\" , - 0.03 ) ) # \u6807\u6ce8\u77e9\u5f62\u6846 elif trace_name == \"bbox\" : self . add_bounding_box ( kwargs . get ( \"boxes\" )) # \u56de\u6d4b\u7ed3\u679c elif trace_name == \"bt\" : self . add_backtest_result ( kwargs . get ( \"bt\" )) # \u589e\u52a0\u76f4\u7ebf elif trace_name == \"support_line\" : self . add_line ( \"\u652f\u6491\u7ebf\" , kwargs . get ( \"x\" ), kwargs . get ( \"y\" )) elif trace_name == \"resist_line\" : self . add_line ( \"\u538b\u529b\u7ebf\" , kwargs . get ( \"x\" ), kwargs . get ( \"y\" )) def add_line ( self , trace_name : str , x : List [ int ], y : List [ float ]): \"\"\"\u5728k\u7ebf\u56fe\u4e0a\u589e\u52a0\u4ee5`x`,`y`\u8868\u793a\u7684\u4e00\u6761\u76f4\u7ebf Args: trace_name : \u56fe\u4f8b\u540d\u79f0 x : x\u8f74\u5750\u6807\uff0c\u6240\u6709\u7684x\u503c\u90fd\u5fc5\u987b\u5c5e\u4e8e[0, len(self.bars)] y : y\u503c \"\"\" line = go . Scatter ( x = self . ticks [ x ], y = y , mode = \"lines\" , name = trace_name ) self . main_traces [ trace_name ] = line def mark_support_resist_lines ( self , upthres : float = None , downthres : float = None , use_close = True , win = 60 ): \"\"\"\u5728K\u7ebf\u56fe\u4e0a\u6807\u6ce8\u652f\u6491\u7ebf\u548c\u538b\u529b\u7ebf \u5728`win`\u4e2ak\u7ebf\u5185\uff0c\u627e\u51fa\u6240\u6709\u7684\u5c40\u90e8\u5cf0\u8c37\u70b9\uff0c\u5e76\u4ee5\u6700\u9ad8\u7684\u4e24\u4e2a\u5cf0\u8fde\u7ebf\u751f\u6210\u538b\u529b\u7ebf\uff0c\u4ee5\u6700\u4f4e\u7684\u4e24\u4e2a\u8c37\u8fde\u7ebf\u751f\u6210\u652f\u6491\u7ebf\u3002 Args: upthres : \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1`omicron.talib.morph.peaks_and_valleys` downthres : \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1`omicron.talib.morph.peaks_and_valleys`. use_close : \u662f\u5426\u4f7f\u7528\u6536\u76d8\u4ef7\u6765\u8fdb\u884c\u68c0\u6d4b\u3002\u5982\u679c\u4e3aFalse\uff0c\u5219\u4f7f\u7528high\u6765\u68c0\u6d4b\u538b\u529b\u7ebf\uff0c\u4f7f\u7528low\u6765\u68c0\u6d4b\u652f\u6491\u7ebf. win : \u68c0\u6d4b\u5c40\u90e8\u9ad8\u4f4e\u70b9\u7684\u7a97\u53e3. \"\"\" bars = self . bars [ - win :] clipped = len ( self . bars ) - win if use_close : support , resist , x_start = support_resist_lines ( self . _bar_close , upthres , downthres ) x = np . arange ( len ( bars ))[ x_start :] self . add_main_trace ( \"support_line\" , x = x + clipped , y = support ( x )) self . add_main_trace ( \"resist_line\" , x = x + clipped , y = resist ( x )) else : # \u4f7f\u7528\"high\"\u548c\"low\" bars = self . bars [ - win :] support , _ , x_start = support_resist_lines ( bars [ \"low\" ], upthres , downthres ) x = np . arange ( len ( bars ))[ x_start :] self . add_main_trace ( \"support_line\" , x = x + clipped , y = support ( x )) _ , resist , x_start = support_resist_lines ( bars [ \"high\" ], upthres , downthres ) x = np . arange ( len ( bars ))[ x_start :] self . add_main_trace ( \"resist_line\" , x = x + clipped , y = resist ( x )) def mark_bbox ( self , min_size : int = 20 ): \"\"\"\u5728k\u7ebf\u56fe\u4e0a\u68c0\u6d4b\u5e76\u6807\u6ce8\u77e9\u5f62\u6846 Args: min_size : \u77e9\u5f62\u6846\u7684\u6700\u5c0f\u957f\u5ea6 \"\"\" boxes = plateaus ( self . _bar_close , min_size ) self . add_main_trace ( \"bbox\" , boxes = boxes ) def mark_backtest_result ( self , result : dict ): \"\"\"\u6807\u8bb0\u4e70\u5356\u70b9\u548c\u56de\u6d4b\u6570\u636e TODO: \u6b64\u65b9\u6cd5\u53ef\u80fd\u672a\u4e0ebacktest\u8fd4\u56de\u503c\u540c\u6b65\u3002\u6b64\u5916\uff0c\u5728portofolio\u56de\u6d4b\u4e2d\uff0c\u4e0d\u53ef\u80fd\u5728k\u7ebf\u56fe\u4e2d\u4f7f\u7528\u6b64\u65b9\u6cd5\u3002 Args: points : \u4e70\u5356\u70b9\u7684\u5750\u6807\u3002 \"\"\" trades = result . get ( \"trades\" ) assets = result . get ( \"assets\" ) x , y , labels = [], [], [] hover = [] labels_color = defaultdict ( list ) for trade in trades : trade_date = arrow . get ( trade [ \"time\" ]) . date () asset = assets . get ( trade_date ) security = trade [ \"security\" ] price = trade [ \"price\" ] volume = trade [ \"volume\" ] side = trade [ \"order_side\" ] x . append ( self . _format_tick ( trade_date )) bar = self . bars [ self . bars [ \"frame\" ] == trade_date ] if side == \"\u4e70\u5165\" : hover . append ( f \"\u603b\u8d44\u4ea7: { asset }

    { side } : { security }
    \u4e70\u5165\u4ef7: { price }
    \u80a1\u6570: { volume } \" ) y . append ( bar [ \"high\" ][ 0 ] * 1.1 ) labels . append ( \"B\" ) labels_color [ \"color\" ] . append ( self . RED ) else : y . append ( bar [ \"low\" ][ 0 ] * 0.99 ) hover . append ( f \"\u603b\u8d44\u4ea7: { asset }

    { side } : { security }
    \u5356\u51fa\u4ef7: { price }
    \u80a1\u6570: { volume } \" ) labels . append ( \"S\" ) labels_color [ \"color\" ] . append ( self . GREEN ) labels_color . append ( self . GREEN ) # txt.append(f'{side}:{security}
    \u5356\u51fa\u4ef7:{price}
    \u80a1\u6570:{volume}') trace = go . Scatter ( x = x , y = y , mode = \"text\" , text = labels , name = \"backtest\" , hovertext = hover , textfont = labels_color , ) self . main_traces [ \"bs\" ] = trace def mark_peaks_and_valleys ( self , up_thres : Optional [ float ] = None , down_thres : Optional [ float ] = None ): \"\"\"\u5728K\u7ebf\u56fe\u4e0a\u6807\u6ce8\u5cf0\u8c37\u70b9 Args: up_thres : \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1[omicron.talib.morph.peaks_and_valleys][] down_thres : \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1[omicron.talib.morph.peaks_and_valleys][] \"\"\" bars = self . bars flags = peaks_and_valleys ( self . _bar_close , up_thres , down_thres ) # \u79fb\u9664\u9996\u5c3e\u7684\u9876\u5e95\u6807\u8bb0\uff0c\u4e00\u822c\u60c5\u51b5\u4e0b\u5b83\u4eec\u90fd\u4e0d\u662f\u771f\u6b63\u7684\u9876\u548c\u5e95\u3002 flags [ 0 ] = 0 flags [ - 1 ] = 0 marker_margin = ( max ( bars [ \"high\" ]) - min ( bars [ \"low\" ])) * 0.05 ticks_up = self . ticks [ flags == 1 ] y_up = bars [ \"high\" ][ flags == 1 ] + marker_margin ticks_down = self . ticks [ flags == - 1 ] y_down = bars [ \"low\" ][ flags == - 1 ] - marker_margin trace = go . Scatter ( mode = \"markers\" , x = ticks_up , y = y_up , marker_symbol = \"triangle-down\" , name = \"\u5cf0\" ) self . main_traces [ \"peaks\" ] = trace trace = go . Scatter ( mode = \"markers\" , x = ticks_down , y = y_down , marker_symbol = \"triangle-up\" , name = \"\u8c37\" , ) self . main_traces [ \"valleys\" ] = trace def add_bounding_box ( self , boxes : List [ Tuple ]): \"\"\"bbox\u662f\u6807\u8bb0\u5728k\u7ebf\u56fe\u4e0a\u67d0\u4e2a\u533a\u95f4\u5185\u7684\u77e9\u5f62\u6846\uff0c\u5b83\u4ee5\u8be5\u533a\u95f4\u6700\u9ad8\u4ef7\u548c\u6700\u4f4e\u4ef7\u4e3a\u4e0a\u4e0b\u8fb9\u3002 Args: boxes: \u6bcf\u4e2a\u5143\u7d20(start, width)\u8868\u793a\u5404\u4e2abbox\u7684\u8d77\u70b9\u548c\u5bbd\u5ea6\u3002 \"\"\" for j , box in enumerate ( boxes ): x , y = [], [] i , width = box if len ( x ): x . append ( None ) y . append ( None ) group = self . bars [ i : i + width ] mean = np . mean ( group [ \"close\" ]) std = 2 * np . std ( group [ \"close\" ]) # \u843d\u5728\u4e24\u4e2a\u6807\u51c6\u5dee\u4ee5\u5185\u7684\u5b9e\u4f53\u6700\u4e0a\u65b9\u548c\u6700\u4e0b\u65b9\u503c hc = np . max ( group [ group [ \"close\" ] < mean + std ][ \"close\" ]) lc = np . min ( group [ group [ \"close\" ] > mean - std ][ \"close\" ]) ho = np . max ( group [ group [ \"open\" ] < mean + std ][ \"open\" ]) lo = np . min ( group [ group [ \"open\" ] > mean - std ][ \"open\" ]) h = max ( hc , ho ) low = min ( lo , lc ) x . extend ( self . ticks [[ i , i + width - 1 , i + width - 1 , i , i ]]) y . extend (( h , h , low , low , h )) hover = f \"\u5bbd\u5ea6: { width }
    \u632f\u5e45: { h / low - 1 : .2% } \" trace = go . Scatter ( x = x , y = y , fill = \"toself\" , name = f \"\u5e73\u53f0\u6574\u7406 { j } \" , text = hover ) self . main_traces [ f \"bbox- { j } \" ] = trace def add_indicator ( self , indicator : str , ** kwargs ): \"\"\"\u5411k\u7ebf\u56fe\u4e2d\u589e\u52a0\u6280\u672f\u6307\u6807 Args: indicator: \u5f53\u524d\u652f\u6301\u503c\u6709'volume', 'rsi', 'bbands' kwargs: \u8ba1\u7b97\u67d0\u4e2aindicator\u65f6\uff0c\u9700\u8981\u7684\u53c2\u6570\u3002\u6bd4\u5982\u8ba1\u7b97bbands\u65f6\uff0c\u9700\u8981\u4f20\u5165\u5747\u7ebf\u7684window \"\"\" if indicator == \"volume\" : colors = np . repeat ( self . RED , len ( self . bars )) colors [ self . bars [ \"close\" ] <= self . bars [ \"open\" ]] = self . GREEN trace = go . Bar ( x = self . ticks , y = self . bars [ \"volume\" ], showlegend = False , marker = { \"color\" : colors }, ) elif indicator == \"rsi\" : win = kwargs . get ( \"win\" ) rsi = talib . RSI ( self . _bar_close , win ) # type: ignore trace = go . Scatter ( x = self . ticks , y = rsi , showlegend = False ) elif indicator == \"bbands\" : self . _remove_ma () win = kwargs . get ( \"win\" ) for name , ind in zip ( [ \"bbands-high\" , \"bbands-mean\" , \"bbands-low\" ], talib . BBANDS ( self . _bar_close , win ), # type: ignore ): trace = go . Scatter ( x = self . ticks , y = ind , showlegend = True , name = name ) self . main_traces [ name ] = trace return else : raise ValueError ( f \" { indicator } not supported\" ) self . ind_traces [ indicator ] = trace def add_marks ( self , x : List [ int ], y : List [ float ], name : str , marker : str = \"cross\" , color : Optional [ str ] = None , ): \"\"\"\u5411k\u7ebf\u56fe\u4e2d\u589e\u52a0\u6807\u8bb0\u70b9\"\"\" trace = go . Scatter ( x = self . ticks [ x ], y = y , mode = \"markers\" , marker_symbol = marker , marker_color = color , name = name , ) self . main_traces [ name ] = trace def plot ( self ): \"\"\"\u7ed8\u5236\u56fe\u8868\"\"\" fig = self . figure fig . show ()","title":"Candlestick"},{"location":"api/plotting/candlestick/#omicron.plotting.candlestick.Candlestick.figure","text":"\u8fd4\u56de\u4e00\u4e2afigure\u5bf9\u8c61","title":"figure"},{"location":"api/plotting/candlestick/#omicron.plotting.candlestick.Candlestick.__init__","text":"\u6784\u9020\u51fd\u6570 Parameters: Name Type Description Default bars ndarray \u884c\u60c5\u6570\u636e required ma_groups List[int] \u5747\u7ebf\u7ec4\u53c2\u6570\u3002\u6bd4\u5982[5, 10, 20]\u8868\u660e\u5411k\u7ebf\u56fe\u4e2d\u6dfb\u52a05, 10, 20\u65e5\u5747\u7ebf\u3002\u5982\u679c\u4e0d\u63d0\u4f9b\uff0c\u5c06\u4ece\u6570\u7ec4[5, 10, 20, 30, 60, 120, 250]\u4e2d\u53d6\u76f4\u5230\u4e0e len(bars) - 5 \u5339\u914d\u7684\u53c2\u6570\u4e3a\u6b62\u3002\u6bd4\u5982bars\u957f\u5ea6\u4e3a30\uff0c\u5219\u5c06\u53d6[5, 10, 20]\u6765\u7ed8\u5236\u5747\u7ebf\u3002 None title str k\u7ebf\u56fe\u7684\u6807\u9898 None show_volume \u662f\u5426\u663e\u793a\u6210\u4ea4\u91cf\u56fe True show_rsi \u662f\u5426\u663e\u793aRSI\u56fe\u3002\u7f3a\u7701\u663e\u793a\u53c2\u6570\u4e3a6\u7684RSI\u56fe\u3002 True show_peaks \u662f\u5426\u6807\u8bb0\u68c0\u6d4b\u51fa\u6765\u7684\u5cf0\u8ddf\u8c37\u3002 False width the width in 'px' units of the figure None height the height in 'px' units of the figure None Keyword arguments: Name Type Description rsi_win int default is 6 Source code in omicron/plotting/candlestick.py def __init__ ( self , bars : np . ndarray , ma_groups : List [ int ] = None , title : str = None , show_volume = True , show_rsi = True , show_peaks = False , width = None , height = None , ** kwargs , ): \"\"\"\u6784\u9020\u51fd\u6570 Args: bars: \u884c\u60c5\u6570\u636e ma_groups: \u5747\u7ebf\u7ec4\u53c2\u6570\u3002\u6bd4\u5982[5, 10, 20]\u8868\u660e\u5411k\u7ebf\u56fe\u4e2d\u6dfb\u52a05, 10, 20\u65e5\u5747\u7ebf\u3002\u5982\u679c\u4e0d\u63d0\u4f9b\uff0c\u5c06\u4ece\u6570\u7ec4[5, 10, 20, 30, 60, 120, 250]\u4e2d\u53d6\u76f4\u5230\u4e0e`len(bars) - 5`\u5339\u914d\u7684\u53c2\u6570\u4e3a\u6b62\u3002\u6bd4\u5982bars\u957f\u5ea6\u4e3a30\uff0c\u5219\u5c06\u53d6[5, 10, 20]\u6765\u7ed8\u5236\u5747\u7ebf\u3002 title: k\u7ebf\u56fe\u7684\u6807\u9898 show_volume: \u662f\u5426\u663e\u793a\u6210\u4ea4\u91cf\u56fe show_rsi: \u662f\u5426\u663e\u793aRSI\u56fe\u3002\u7f3a\u7701\u663e\u793a\u53c2\u6570\u4e3a6\u7684RSI\u56fe\u3002 show_peaks: \u662f\u5426\u6807\u8bb0\u68c0\u6d4b\u51fa\u6765\u7684\u5cf0\u8ddf\u8c37\u3002 width: the width in 'px' units of the figure height: the height in 'px' units of the figure Keyword Args: rsi_win int: default is 6 \"\"\" self . title = title self . bars = bars self . width = width self . height = height # traces for main area self . main_traces = {} # traces for indicator area self . ind_traces = {} self . ticks = self . _format_tick ( bars [ \"frame\" ]) self . _bar_close = array_math_round ( bars [ \"close\" ], 2 ) . astype ( np . float64 ) # for every candlestick, it must contain a candlestick plot cs = go . Candlestick ( x = self . ticks , open = bars [ \"open\" ], high = bars [ \"high\" ], low = bars [ \"low\" ], close = self . _bar_close , line = dict ({ \"width\" : 1 }), name = \"K\u7ebf\" , ** kwargs , ) # Set line and fill colors cs . increasing . fillcolor = \"rgba(255,255,255,0.9)\" cs . increasing . line . color = self . RED cs . decreasing . fillcolor = self . GREEN cs . decreasing . line . color = self . GREEN self . main_traces [ \"ohlc\" ] = cs if show_volume : self . add_indicator ( \"volume\" ) if show_peaks : self . add_main_trace ( \"peaks\" ) if show_rsi : self . add_indicator ( \"rsi\" , win = kwargs . get ( \"rsi_win\" , 6 )) # \u589e\u52a0\u5747\u7ebf if ma_groups is None : nbars = len ( bars ) if nbars < 9 : ma_groups = [] else : groups = np . array ([ 5 , 10 , 20 , 30 , 60 , 120 , 250 ]) idx = max ( np . argwhere ( groups < ( nbars - 5 ))) . item () + 1 ma_groups = groups [: idx ] for win in ma_groups : name = f \"ma { win } \" if win > len ( bars ): continue ma = moving_average ( self . _bar_close , win ) line = go . Scatter ( y = ma , x = self . ticks , name = name , line = dict ( width = 1 , color = self . MA_COLORS . get ( win )), ) self . main_traces [ name ] = line","title":"__init__()"},{"location":"api/plotting/candlestick/#omicron.plotting.candlestick.Candlestick.add_bounding_box","text":"bbox\u662f\u6807\u8bb0\u5728k\u7ebf\u56fe\u4e0a\u67d0\u4e2a\u533a\u95f4\u5185\u7684\u77e9\u5f62\u6846\uff0c\u5b83\u4ee5\u8be5\u533a\u95f4\u6700\u9ad8\u4ef7\u548c\u6700\u4f4e\u4ef7\u4e3a\u4e0a\u4e0b\u8fb9\u3002 Parameters: Name Type Description Default boxes List[Tuple] \u6bcf\u4e2a\u5143\u7d20(start, width)\u8868\u793a\u5404\u4e2abbox\u7684\u8d77\u70b9\u548c\u5bbd\u5ea6\u3002 required Source code in omicron/plotting/candlestick.py def add_bounding_box ( self , boxes : List [ Tuple ]): \"\"\"bbox\u662f\u6807\u8bb0\u5728k\u7ebf\u56fe\u4e0a\u67d0\u4e2a\u533a\u95f4\u5185\u7684\u77e9\u5f62\u6846\uff0c\u5b83\u4ee5\u8be5\u533a\u95f4\u6700\u9ad8\u4ef7\u548c\u6700\u4f4e\u4ef7\u4e3a\u4e0a\u4e0b\u8fb9\u3002 Args: boxes: \u6bcf\u4e2a\u5143\u7d20(start, width)\u8868\u793a\u5404\u4e2abbox\u7684\u8d77\u70b9\u548c\u5bbd\u5ea6\u3002 \"\"\" for j , box in enumerate ( boxes ): x , y = [], [] i , width = box if len ( x ): x . append ( None ) y . append ( None ) group = self . bars [ i : i + width ] mean = np . mean ( group [ \"close\" ]) std = 2 * np . std ( group [ \"close\" ]) # \u843d\u5728\u4e24\u4e2a\u6807\u51c6\u5dee\u4ee5\u5185\u7684\u5b9e\u4f53\u6700\u4e0a\u65b9\u548c\u6700\u4e0b\u65b9\u503c hc = np . max ( group [ group [ \"close\" ] < mean + std ][ \"close\" ]) lc = np . min ( group [ group [ \"close\" ] > mean - std ][ \"close\" ]) ho = np . max ( group [ group [ \"open\" ] < mean + std ][ \"open\" ]) lo = np . min ( group [ group [ \"open\" ] > mean - std ][ \"open\" ]) h = max ( hc , ho ) low = min ( lo , lc ) x . extend ( self . ticks [[ i , i + width - 1 , i + width - 1 , i , i ]]) y . extend (( h , h , low , low , h )) hover = f \"\u5bbd\u5ea6: { width }
    \u632f\u5e45: { h / low - 1 : .2% } \" trace = go . Scatter ( x = x , y = y , fill = \"toself\" , name = f \"\u5e73\u53f0\u6574\u7406 { j } \" , text = hover ) self . main_traces [ f \"bbox- { j } \" ] = trace","title":"add_bounding_box()"},{"location":"api/plotting/candlestick/#omicron.plotting.candlestick.Candlestick.add_indicator","text":"\u5411k\u7ebf\u56fe\u4e2d\u589e\u52a0\u6280\u672f\u6307\u6807 Parameters: Name Type Description Default indicator str \u5f53\u524d\u652f\u6301\u503c\u6709'volume', 'rsi', 'bbands' required kwargs \u8ba1\u7b97\u67d0\u4e2aindicator\u65f6\uff0c\u9700\u8981\u7684\u53c2\u6570\u3002\u6bd4\u5982\u8ba1\u7b97bbands\u65f6\uff0c\u9700\u8981\u4f20\u5165\u5747\u7ebf\u7684window {} Source code in omicron/plotting/candlestick.py def add_indicator ( self , indicator : str , ** kwargs ): \"\"\"\u5411k\u7ebf\u56fe\u4e2d\u589e\u52a0\u6280\u672f\u6307\u6807 Args: indicator: \u5f53\u524d\u652f\u6301\u503c\u6709'volume', 'rsi', 'bbands' kwargs: \u8ba1\u7b97\u67d0\u4e2aindicator\u65f6\uff0c\u9700\u8981\u7684\u53c2\u6570\u3002\u6bd4\u5982\u8ba1\u7b97bbands\u65f6\uff0c\u9700\u8981\u4f20\u5165\u5747\u7ebf\u7684window \"\"\" if indicator == \"volume\" : colors = np . repeat ( self . RED , len ( self . bars )) colors [ self . bars [ \"close\" ] <= self . bars [ \"open\" ]] = self . GREEN trace = go . Bar ( x = self . ticks , y = self . bars [ \"volume\" ], showlegend = False , marker = { \"color\" : colors }, ) elif indicator == \"rsi\" : win = kwargs . get ( \"win\" ) rsi = talib . RSI ( self . _bar_close , win ) # type: ignore trace = go . Scatter ( x = self . ticks , y = rsi , showlegend = False ) elif indicator == \"bbands\" : self . _remove_ma () win = kwargs . get ( \"win\" ) for name , ind in zip ( [ \"bbands-high\" , \"bbands-mean\" , \"bbands-low\" ], talib . BBANDS ( self . _bar_close , win ), # type: ignore ): trace = go . Scatter ( x = self . ticks , y = ind , showlegend = True , name = name ) self . main_traces [ name ] = trace return else : raise ValueError ( f \" { indicator } not supported\" ) self . ind_traces [ indicator ] = trace","title":"add_indicator()"},{"location":"api/plotting/candlestick/#omicron.plotting.candlestick.Candlestick.add_line","text":"\u5728k\u7ebf\u56fe\u4e0a\u589e\u52a0\u4ee5 x , y \u8868\u793a\u7684\u4e00\u6761\u76f4\u7ebf Parameters: Name Type Description Default trace_name \u56fe\u4f8b\u540d\u79f0 required x x\u8f74\u5750\u6807\uff0c\u6240\u6709\u7684x\u503c\u90fd\u5fc5\u987b\u5c5e\u4e8e[0, len(self.bars)] required y y\u503c required Source code in omicron/plotting/candlestick.py def add_line ( self , trace_name : str , x : List [ int ], y : List [ float ]): \"\"\"\u5728k\u7ebf\u56fe\u4e0a\u589e\u52a0\u4ee5`x`,`y`\u8868\u793a\u7684\u4e00\u6761\u76f4\u7ebf Args: trace_name : \u56fe\u4f8b\u540d\u79f0 x : x\u8f74\u5750\u6807\uff0c\u6240\u6709\u7684x\u503c\u90fd\u5fc5\u987b\u5c5e\u4e8e[0, len(self.bars)] y : y\u503c \"\"\" line = go . Scatter ( x = self . ticks [ x ], y = y , mode = \"lines\" , name = trace_name ) self . main_traces [ trace_name ] = line","title":"add_line()"},{"location":"api/plotting/candlestick/#omicron.plotting.candlestick.Candlestick.add_main_trace","text":"add trace to main plot \u652f\u6301\u7684\u56fe\u4f8b\u7c7b\u522b\u6709peaks, bbox\uff08bounding-box), bt(\u56de\u6d4b), support_line, resist_line Parameters: Name Type Description Default trace_name \u56fe\u4f8b\u540d\u79f0 required **kwargs \u5176\u4ed6\u53c2\u6570 {} Source code in omicron/plotting/candlestick.py def add_main_trace ( self , trace_name : str , ** kwargs ): \"\"\"add trace to main plot \u652f\u6301\u7684\u56fe\u4f8b\u7c7b\u522b\u6709peaks, bbox\uff08bounding-box), bt(\u56de\u6d4b), support_line, resist_line Args: trace_name : \u56fe\u4f8b\u540d\u79f0 **kwargs : \u5176\u4ed6\u53c2\u6570 \"\"\" if trace_name == \"peaks\" : self . mark_peaks_and_valleys ( kwargs . get ( \"up_thres\" , 0.03 ), kwargs . get ( \"down_thres\" , - 0.03 ) ) # \u6807\u6ce8\u77e9\u5f62\u6846 elif trace_name == \"bbox\" : self . add_bounding_box ( kwargs . get ( \"boxes\" )) # \u56de\u6d4b\u7ed3\u679c elif trace_name == \"bt\" : self . add_backtest_result ( kwargs . get ( \"bt\" )) # \u589e\u52a0\u76f4\u7ebf elif trace_name == \"support_line\" : self . add_line ( \"\u652f\u6491\u7ebf\" , kwargs . get ( \"x\" ), kwargs . get ( \"y\" )) elif trace_name == \"resist_line\" : self . add_line ( \"\u538b\u529b\u7ebf\" , kwargs . get ( \"x\" ), kwargs . get ( \"y\" ))","title":"add_main_trace()"},{"location":"api/plotting/candlestick/#omicron.plotting.candlestick.Candlestick.add_marks","text":"\u5411k\u7ebf\u56fe\u4e2d\u589e\u52a0\u6807\u8bb0\u70b9 Source code in omicron/plotting/candlestick.py def add_marks ( self , x : List [ int ], y : List [ float ], name : str , marker : str = \"cross\" , color : Optional [ str ] = None , ): \"\"\"\u5411k\u7ebf\u56fe\u4e2d\u589e\u52a0\u6807\u8bb0\u70b9\"\"\" trace = go . Scatter ( x = self . ticks [ x ], y = y , mode = \"markers\" , marker_symbol = marker , marker_color = color , name = name , ) self . main_traces [ name ] = trace","title":"add_marks()"},{"location":"api/plotting/candlestick/#omicron.plotting.candlestick.Candlestick.mark_backtest_result","text":"\u6807\u8bb0\u4e70\u5356\u70b9\u548c\u56de\u6d4b\u6570\u636e Todo \u6b64\u65b9\u6cd5\u53ef\u80fd\u672a\u4e0ebacktest\u8fd4\u56de\u503c\u540c\u6b65\u3002\u6b64\u5916\uff0c\u5728portofolio\u56de\u6d4b\u4e2d\uff0c\u4e0d\u53ef\u80fd\u5728k\u7ebf\u56fe\u4e2d\u4f7f\u7528\u6b64\u65b9\u6cd5\u3002 Parameters: Name Type Description Default points \u4e70\u5356\u70b9\u7684\u5750\u6807\u3002 required Source code in omicron/plotting/candlestick.py def mark_backtest_result ( self , result : dict ): \"\"\"\u6807\u8bb0\u4e70\u5356\u70b9\u548c\u56de\u6d4b\u6570\u636e TODO: \u6b64\u65b9\u6cd5\u53ef\u80fd\u672a\u4e0ebacktest\u8fd4\u56de\u503c\u540c\u6b65\u3002\u6b64\u5916\uff0c\u5728portofolio\u56de\u6d4b\u4e2d\uff0c\u4e0d\u53ef\u80fd\u5728k\u7ebf\u56fe\u4e2d\u4f7f\u7528\u6b64\u65b9\u6cd5\u3002 Args: points : \u4e70\u5356\u70b9\u7684\u5750\u6807\u3002 \"\"\" trades = result . get ( \"trades\" ) assets = result . get ( \"assets\" ) x , y , labels = [], [], [] hover = [] labels_color = defaultdict ( list ) for trade in trades : trade_date = arrow . get ( trade [ \"time\" ]) . date () asset = assets . get ( trade_date ) security = trade [ \"security\" ] price = trade [ \"price\" ] volume = trade [ \"volume\" ] side = trade [ \"order_side\" ] x . append ( self . _format_tick ( trade_date )) bar = self . bars [ self . bars [ \"frame\" ] == trade_date ] if side == \"\u4e70\u5165\" : hover . append ( f \"\u603b\u8d44\u4ea7: { asset }

    { side } : { security }
    \u4e70\u5165\u4ef7: { price }
    \u80a1\u6570: { volume } \" ) y . append ( bar [ \"high\" ][ 0 ] * 1.1 ) labels . append ( \"B\" ) labels_color [ \"color\" ] . append ( self . RED ) else : y . append ( bar [ \"low\" ][ 0 ] * 0.99 ) hover . append ( f \"\u603b\u8d44\u4ea7: { asset }

    { side } : { security }
    \u5356\u51fa\u4ef7: { price }
    \u80a1\u6570: { volume } \" ) labels . append ( \"S\" ) labels_color [ \"color\" ] . append ( self . GREEN ) labels_color . append ( self . GREEN ) # txt.append(f'{side}:{security}
    \u5356\u51fa\u4ef7:{price}
    \u80a1\u6570:{volume}') trace = go . Scatter ( x = x , y = y , mode = \"text\" , text = labels , name = \"backtest\" , hovertext = hover , textfont = labels_color , ) self . main_traces [ \"bs\" ] = trace","title":"mark_backtest_result()"},{"location":"api/plotting/candlestick/#omicron.plotting.candlestick.Candlestick.mark_bbox","text":"\u5728k\u7ebf\u56fe\u4e0a\u68c0\u6d4b\u5e76\u6807\u6ce8\u77e9\u5f62\u6846 Parameters: Name Type Description Default min_size \u77e9\u5f62\u6846\u7684\u6700\u5c0f\u957f\u5ea6 20 Source code in omicron/plotting/candlestick.py def mark_bbox ( self , min_size : int = 20 ): \"\"\"\u5728k\u7ebf\u56fe\u4e0a\u68c0\u6d4b\u5e76\u6807\u6ce8\u77e9\u5f62\u6846 Args: min_size : \u77e9\u5f62\u6846\u7684\u6700\u5c0f\u957f\u5ea6 \"\"\" boxes = plateaus ( self . _bar_close , min_size ) self . add_main_trace ( \"bbox\" , boxes = boxes )","title":"mark_bbox()"},{"location":"api/plotting/candlestick/#omicron.plotting.candlestick.Candlestick.mark_peaks_and_valleys","text":"\u5728K\u7ebf\u56fe\u4e0a\u6807\u6ce8\u5cf0\u8c37\u70b9 Parameters: Name Type Description Default up_thres \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1 omicron.talib.morph.peaks_and_valleys None down_thres \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1 omicron.talib.morph.peaks_and_valleys None Source code in omicron/plotting/candlestick.py def mark_peaks_and_valleys ( self , up_thres : Optional [ float ] = None , down_thres : Optional [ float ] = None ): \"\"\"\u5728K\u7ebf\u56fe\u4e0a\u6807\u6ce8\u5cf0\u8c37\u70b9 Args: up_thres : \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1[omicron.talib.morph.peaks_and_valleys][] down_thres : \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1[omicron.talib.morph.peaks_and_valleys][] \"\"\" bars = self . bars flags = peaks_and_valleys ( self . _bar_close , up_thres , down_thres ) # \u79fb\u9664\u9996\u5c3e\u7684\u9876\u5e95\u6807\u8bb0\uff0c\u4e00\u822c\u60c5\u51b5\u4e0b\u5b83\u4eec\u90fd\u4e0d\u662f\u771f\u6b63\u7684\u9876\u548c\u5e95\u3002 flags [ 0 ] = 0 flags [ - 1 ] = 0 marker_margin = ( max ( bars [ \"high\" ]) - min ( bars [ \"low\" ])) * 0.05 ticks_up = self . ticks [ flags == 1 ] y_up = bars [ \"high\" ][ flags == 1 ] + marker_margin ticks_down = self . ticks [ flags == - 1 ] y_down = bars [ \"low\" ][ flags == - 1 ] - marker_margin trace = go . Scatter ( mode = \"markers\" , x = ticks_up , y = y_up , marker_symbol = \"triangle-down\" , name = \"\u5cf0\" ) self . main_traces [ \"peaks\" ] = trace trace = go . Scatter ( mode = \"markers\" , x = ticks_down , y = y_down , marker_symbol = \"triangle-up\" , name = \"\u8c37\" , ) self . main_traces [ \"valleys\" ] = trace","title":"mark_peaks_and_valleys()"},{"location":"api/plotting/candlestick/#omicron.plotting.candlestick.Candlestick.mark_support_resist_lines","text":"\u5728K\u7ebf\u56fe\u4e0a\u6807\u6ce8\u652f\u6491\u7ebf\u548c\u538b\u529b\u7ebf \u5728 win \u4e2ak\u7ebf\u5185\uff0c\u627e\u51fa\u6240\u6709\u7684\u5c40\u90e8\u5cf0\u8c37\u70b9\uff0c\u5e76\u4ee5\u6700\u9ad8\u7684\u4e24\u4e2a\u5cf0\u8fde\u7ebf\u751f\u6210\u538b\u529b\u7ebf\uff0c\u4ee5\u6700\u4f4e\u7684\u4e24\u4e2a\u8c37\u8fde\u7ebf\u751f\u6210\u652f\u6491\u7ebf\u3002 Parameters: Name Type Description Default upthres \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1 omicron.talib.morph.peaks_and_valleys None downthres \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1 omicron.talib.morph.peaks_and_valleys . None use_close \u662f\u5426\u4f7f\u7528\u6536\u76d8\u4ef7\u6765\u8fdb\u884c\u68c0\u6d4b\u3002\u5982\u679c\u4e3aFalse\uff0c\u5219\u4f7f\u7528high\u6765\u68c0\u6d4b\u538b\u529b\u7ebf\uff0c\u4f7f\u7528low\u6765\u68c0\u6d4b\u652f\u6491\u7ebf. True win \u68c0\u6d4b\u5c40\u90e8\u9ad8\u4f4e\u70b9\u7684\u7a97\u53e3. 60 Source code in omicron/plotting/candlestick.py def mark_support_resist_lines ( self , upthres : float = None , downthres : float = None , use_close = True , win = 60 ): \"\"\"\u5728K\u7ebf\u56fe\u4e0a\u6807\u6ce8\u652f\u6491\u7ebf\u548c\u538b\u529b\u7ebf \u5728`win`\u4e2ak\u7ebf\u5185\uff0c\u627e\u51fa\u6240\u6709\u7684\u5c40\u90e8\u5cf0\u8c37\u70b9\uff0c\u5e76\u4ee5\u6700\u9ad8\u7684\u4e24\u4e2a\u5cf0\u8fde\u7ebf\u751f\u6210\u538b\u529b\u7ebf\uff0c\u4ee5\u6700\u4f4e\u7684\u4e24\u4e2a\u8c37\u8fde\u7ebf\u751f\u6210\u652f\u6491\u7ebf\u3002 Args: upthres : \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1`omicron.talib.morph.peaks_and_valleys` downthres : \u7528\u6765\u68c0\u6d4b\u5cf0\u8c37\u65f6\u4f7f\u7528\u7684\u9608\u503c\uff0c\u53c2\u89c1`omicron.talib.morph.peaks_and_valleys`. use_close : \u662f\u5426\u4f7f\u7528\u6536\u76d8\u4ef7\u6765\u8fdb\u884c\u68c0\u6d4b\u3002\u5982\u679c\u4e3aFalse\uff0c\u5219\u4f7f\u7528high\u6765\u68c0\u6d4b\u538b\u529b\u7ebf\uff0c\u4f7f\u7528low\u6765\u68c0\u6d4b\u652f\u6491\u7ebf. win : \u68c0\u6d4b\u5c40\u90e8\u9ad8\u4f4e\u70b9\u7684\u7a97\u53e3. \"\"\" bars = self . bars [ - win :] clipped = len ( self . bars ) - win if use_close : support , resist , x_start = support_resist_lines ( self . _bar_close , upthres , downthres ) x = np . arange ( len ( bars ))[ x_start :] self . add_main_trace ( \"support_line\" , x = x + clipped , y = support ( x )) self . add_main_trace ( \"resist_line\" , x = x + clipped , y = resist ( x )) else : # \u4f7f\u7528\"high\"\u548c\"low\" bars = self . bars [ - win :] support , _ , x_start = support_resist_lines ( bars [ \"low\" ], upthres , downthres ) x = np . arange ( len ( bars ))[ x_start :] self . add_main_trace ( \"support_line\" , x = x + clipped , y = support ( x )) _ , resist , x_start = support_resist_lines ( bars [ \"high\" ], upthres , downthres ) x = np . arange ( len ( bars ))[ x_start :] self . add_main_trace ( \"resist_line\" , x = x + clipped , y = resist ( x ))","title":"mark_support_resist_lines()"},{"location":"api/plotting/candlestick/#omicron.plotting.candlestick.Candlestick.plot","text":"\u7ed8\u5236\u56fe\u8868 Source code in omicron/plotting/candlestick.py def plot ( self ): \"\"\"\u7ed8\u5236\u56fe\u8868\"\"\" fig = self . figure fig . show ()","title":"plot()"},{"location":"api/plotting/metrics/","text":"\u7ed8\u5236\u56de\u6d4b\u8d44\u4ea7\u66f2\u7ebf\u548c\u6307\u6807\u56fe\u3002 \u793a\u4f8b: 1 2 3 4 5 6 from omicron.plotting import MetricsGraph # calling some strategy's backtest and get bills/metrics mg = MetricsGraph ( bills , metrics ) await mg . plot () \u6ce8\u610f\u6b64\u65b9\u6cd5\u9700\u8981\u5728notebook\u4e2d\u8c03\u7528\u3002 MetricsGraph \u00b6 Source code in omicron/plotting/metrics.py class MetricsGraph : def __init__ ( self , bills : dict , metrics : dict , baseline_code : str = \"399300.XSHE\" , indicator : Optional [ pd . DataFrame ] = None , ): \"\"\" Args: bills: \u56de\u6d4b\u751f\u6210\u7684\u8d26\u5355\uff0c\u901a\u8fc7Strategy.bills\u83b7\u5f97 metrics: \u56de\u6d4b\u751f\u6210\u7684\u6307\u6807\uff0c\u901a\u8fc7strategy.metrics\u83b7\u5f97 baseline_code: \u57fa\u51c6\u8bc1\u5238\u4ee3\u7801 indicator: \u56de\u6d4b\u65f6\u4f7f\u7528\u7684\u6307\u6807\u3002\u5982\u679c\u5b58\u5728\uff0c\u5c06\u53e0\u52a0\u5230\u7b56\u7565\u56de\u6d4b\u56fe\u4e0a\u3002\u5b83\u5e94\u8be5\u662f\u4e00\u4e2a\u4ee5\u65e5\u671f\u4e3a\u7d22\u5f15\uff0c\u6307\u6807\u503c\u5217\u540d\u4e3a\"value\"\u7684pandas.DataFrame\u3002\u5982\u679c\u4e0d\u63d0\u4f9b\uff0c\u5c06\u4e0d\u4f1a\u7ed8\u5236\u6307\u6807\u56fe \"\"\" self . metrics = metrics self . trades = bills [ \"trades\" ] self . positions = bills [ \"positions\" ] self . start = arrow . get ( bills [ \"assets\" ][ 0 ][ 0 ]) . date () self . end = arrow . get ( bills [ \"assets\" ][ - 1 ][ 0 ]) . date () self . frames = [ tf . int2date ( f ) for f in tf . get_frames ( self . start , self . end , FrameType . DAY ) ] if indicator is not None : self . indicator = indicator . join ( pd . Series ( index = self . frames , name = \"frames\" , dtype = np . float64 ), how = \"right\" , ) else : self . indicator = None # \u8bb0\u5f55\u65e5\u671f\u5230\u4e0b\u6807\u7684\u53cd\u5411\u6620\u5c04 self . _frame2pos = { f : i for i , f in enumerate ( self . frames )} self . ticks = self . _format_tick ( self . frames ) # TODO: there's bug in backtesting, temporarily fix here df = pd . DataFrame ( self . frames , columns = [ \"frame\" ]) df [ \"assets\" ] = np . nan assets = pd . DataFrame ( bills [ \"assets\" ], columns = [ \"frame\" , \"assets\" ]) df [ \"assets\" ] = assets [ \"assets\" ] self . assets = df . fillna ( method = \"ffill\" )[ \"assets\" ] . to_numpy () self . nv = self . assets / self . assets [ 0 ] self . baseline_code = baseline_code or \"399300.XSHE\" def _fill_missing_prices ( self , bars : BarsArray , frames : Union [ List , NDArray ]): \"\"\"\u5c06bars\u4e2d\u7f3a\u5931\u503c\u91c7\u7528\u5176\u524d\u503c\u66ff\u6362 \u5f53baseline\u4e3a\u4e2a\u80a1\u65f6\uff0c\u53ef\u80fd\u5b58\u5728\u505c\u724c\u7684\u60c5\u51b5\uff0c\u8fd9\u6837\u5bfc\u81f4\u7531\u6b64\u8ba1\u7b97\u7684\u53c2\u8003\u6536\u76ca\u65e0\u6cd5\u4e0e\u56de\u6d4b\u7684\u8d44\u4ea7\u6536\u76ca\u5bf9\u9f50\uff0c\u56e0\u6b64\u9700\u8981\u8fdb\u884c\u8c03\u6574\u3002 \u51fa\u4e8e\u8fd9\u4e2a\u76ee\u7684\uff0c\u672c\u51fd\u6570\u53ea\u8fd4\u56de\u5904\u7406\u540e\u7684\u6536\u76d8\u4ef7\u3002 Args: bars: \u57fa\u7ebf\u884c\u60c5\u6570\u636e\u3002 frames: \u65e5\u671f\u7d22\u5f15 Returns: \u8865\u5145\u7f3a\u5931\u503c\u540e\u7684\u6536\u76d8\u4ef7\u5e8f\u5217 \"\"\" _close = pd . DataFrame ( { \"close\" : pd . Series ( bars [ \"close\" ], index = bars [ \"frame\" ]), \"frame\" : pd . Series ( np . empty (( len ( frames ),)), index = frames ), } )[ \"close\" ] . to_numpy () # \u8fd9\u91cc\u4f7f\u7528omicron\u4e2d\u7684fill_nan\uff0c\u662f\u56e0\u4e3a\u5982\u679c\u6570\u7ec4\u7684\u7b2c\u4e00\u4e2a\u5143\u7d20\u5373\u4e3aNaN\u7684\u8bdd\uff0c\u90a3\u4e48DataFrame.fillna(method='ffill')\u5c06\u65e0\u6cd5\u5904\u7406\u8fd9\u6837\u7684\u60c5\u51b5(\u4ecd\u7136\u4fdd\u6301\u4e3anan) return fill_nan ( _close ) def _format_tick ( self , frames : Union [ Frame , List [ Frame ]]) -> Union [ str , NDArray ]: if type ( frames ) == datetime . date : x = frames return f \" { x . year : 02 } - { x . month : 02 } - { x . day : 02 } \" elif type ( frames ) == datetime . datetime : x = frames return f \" { x . month : 02 } - { x . day : 02 } { x . hour : 02 } : { x . minute : 02 } \" elif type ( frames [ 0 ]) == datetime . date : # type: ignore return np . array ([ f \" { x . year : 02 } - { x . month : 02 } - { x . day : 02 } \" for x in frames ]) else : return np . array ( [ f \" { x . month : 02 } - { x . day : 02 } { x . hour : 02 } : { x . minute : 02 } \" for x in frames ] # type: ignore ) async def _metrics_trace ( self ): metric_names = { \"start\" : \"\u8d77\u59cb\u65e5\" , \"end\" : \"\u7ed3\u675f\u65e5\" , \"window\" : \"\u8d44\u4ea7\u66b4\u9732\u7a97\u53e3\" , \"total_tx\" : \"\u4ea4\u6613\u6b21\u6570\" , \"total_profit\" : \"\u603b\u5229\u6da6\" , \"total_profit_rate\" : \"\u5229\u6da6\u7387\" , \"win_rate\" : \"\u80dc\u7387\" , \"mean_return\" : \"\u65e5\u5747\u56de\u62a5\" , \"sharpe\" : \"\u590f\u666e\u7387\" , \"max_drawdown\" : \"\u6700\u5927\u56de\u64a4\" , \"annual_return\" : \"\u5e74\u5316\u56de\u62a5\" , \"volatility\" : \"\u6ce2\u52a8\u7387\" , \"sortino\" : \"sortino\" , \"calmar\" : \"calmar\" , } # bug: plotly go.Table.Cells format not work here metric_formatter = { \"start\" : \" {} \" , \"end\" : \" {} \" , \"window\" : \" {} \" , \"total_tx\" : \" {} \" , \"total_profit\" : \" {:.2f} \" , \"total_profit_rate\" : \" {:.2%} \" , \"win_rate\" : \" {:.2%} \" , \"mean_return\" : \" {:.2%} \" , \"sharpe\" : \" {:.2f} \" , \"max_drawdown\" : \" {:.2%} \" , \"annual_return\" : \" {:.2%} \" , \"volatility\" : \" {:.2%} \" , \"sortino\" : \" {:.2f} \" , \"calmar\" : \" {:.2f} \" , } metrics = deepcopy ( self . metrics ) baseline = metrics [ \"baseline\" ] or {} del metrics [ \"baseline\" ] baseline_name = ( await Security . alias ( self . baseline_code ) if self . baseline_code else \"\u57fa\u51c6\" ) metrics_formatted = [] for k in metric_names . keys (): if metrics . get ( k ): metrics_formatted . append ( metric_formatter [ k ] . format ( metrics . get ( k ))) else : metrics_formatted . append ( \"-\" ) baseline_formatted = [] for k in metric_names . keys (): if baseline . get ( k ): baseline_formatted . append ( metric_formatter [ k ] . format ( baseline . get ( k ))) else : baseline_formatted . append ( \"-\" ) return go . Table ( header = dict ( values = [ \"\u6307\u6807\u540d\" , \"\u7b56\u7565\" , baseline_name ]), cells = dict ( values = [ [ v for _ , v in metric_names . items ()], metrics_formatted , baseline_formatted , ], font_size = 10 , ), ) async def _trade_info_trace ( self ): \"\"\"\u6784\u5efahover text \u5e8f\u5217\"\"\" # convert trades into hover_info buys = defaultdict ( list ) sells = defaultdict ( list ) for _ , trade in self . trades . items (): trade_date = arrow . get ( trade [ \"time\" ]) . date () ipos = self . _frame2pos . get ( trade_date ) if ipos is None : logger . warning ( \"date %s in trade record not in backtest range\" , trade_date ) continue name = await Security . alias ( trade [ \"security\" ]) price = trade [ \"price\" ] side = trade [ \"order_side\" ] filled = trade [ \"filled\" ] trade_text = f \" { side } : { name } { filled / 100 : .0f } \u624b \u4ef7\u683c: { price : .02f } \u6210\u4ea4\u989d: { filled * price / 10000 : .1f } \u4e07\" if side == \"\u5356\u51fa\" : sells [ trade_date ] . append ( trade_text ) elif side in ( \"\u4e70\u5165\" , \"\u5206\u7ea2\u914d\u80a1\" ): buys [ trade_date ] . append ( trade_text ) X_buy , Y_buy , data_buy = [], [], [] X_sell , Y_sell , data_sell = [], [], [] for dt , text in buys . items (): ipos = self . _frame2pos . get ( dt ) Y_buy . append ( self . nv [ ipos ]) X_buy . append ( self . _format_tick ( dt )) asset = self . assets [ ipos ] hover = f \"\u8d44\u4ea7: { asset / 10000 : .1f } \u4e07
    { '
    ' . join ( text ) } \" data_buy . append ( hover ) trace_buy = go . Scatter ( x = X_buy , y = Y_buy , mode = \"markers\" , text = data_buy , name = \"\u4e70\u5165\u6210\u4ea4\" , marker = dict ( color = \"red\" , symbol = \"triangle-up\" ), hovertemplate = \"
    % {text} \" , ) for dt , text in sells . items (): ipos = self . _frame2pos . get ( dt ) Y_sell . append ( self . nv [ ipos ]) X_sell . append ( self . _format_tick ( dt )) asset = self . assets [ ipos ] hover = f \"\u8d44\u4ea7: { asset / 10000 : .1f } \u4e07
    { '
    ' . join ( text ) } \" data_sell . append ( hover ) trace_sell = go . Scatter ( x = X_sell , y = Y_sell , mode = \"markers\" , text = data_sell , name = \"\u5356\u51fa\u6210\u4ea4\" , marker = dict ( color = \"green\" , symbol = \"triangle-down\" ), hovertemplate = \"
    % {text} \" , ) return trace_buy , trace_sell async def plot ( self ): \"\"\"\u7ed8\u5236\u8d44\u4ea7\u66f2\u7ebf\u53ca\u56de\u6d4b\u6307\u6807\u56fe\"\"\" n = len ( self . assets ) bars = await Stock . get_bars ( self . baseline_code , n , FrameType . DAY , self . end ) baseline_prices = self . _fill_missing_prices ( bars , self . frames ) baseline_prices /= baseline_prices [ 0 ] fig = make_subplots ( rows = 1 , cols = 2 , shared_xaxes = False , specs = [ [{ \"secondary_y\" : True }, { \"type\" : \"table\" }], ], column_width = [ 0.75 , 0.25 ], horizontal_spacing = 0.01 , subplot_titles = ( \"\u8d44\u4ea7\u66f2\u7ebf\" , \"\u7b56\u7565\u6307\u6807\" ), ) fig . add_trace ( await self . _metrics_trace (), row = 1 , col = 2 ) if self . indicator is not None : indicator_on_hover = self . indicator [ \"value\" ] else : indicator_on_hover = None baseline_name = ( await Security . alias ( self . baseline_code ) if self . baseline_code else \"\u57fa\u51c6\" ) baseline_trace = go . Scatter ( y = baseline_prices , x = self . ticks , mode = \"lines\" , name = baseline_name , showlegend = True , text = indicator_on_hover , hovertemplate = \"
    \u51c0\u503c:% {y:.2f} \" + \"
    \u6307\u6807:% {text:.1f} \" , ) fig . add_trace ( baseline_trace , row = 1 , col = 1 ) nv_trace = go . Scatter ( y = self . nv , x = self . ticks , mode = \"lines\" , name = \"\u7b56\u7565\" , showlegend = True , hovertemplate = \"
    \u51c0\u503c:% {y:.2f} \" , ) fig . add_trace ( nv_trace , row = 1 , col = 1 ) if self . indicator is not None : ind_trace = go . Scatter ( y = self . indicator [ \"value\" ], x = self . ticks , mode = \"lines\" , name = \"indicator\" , showlegend = True , visible = \"legendonly\" , ) fig . add_trace ( ind_trace , row = 1 , col = 1 , secondary_y = True ) for trace in await self . _trade_info_trace (): fig . add_trace ( trace , row = 1 , col = 1 ) fig . update_xaxes ( type = \"category\" , tickangle = 45 , nticks = len ( self . ticks ) // 5 ) fig . update_layout ( margin = dict ( l = 20 , r = 20 , t = 50 , b = 50 ), width = 1040 , height = 435 ) fig . update_layout ( hovermode = \"x unified\" , hoverlabel = dict ( bgcolor = \"rgba(255,255,255,0.8)\" ) ) fig . show () __init__ ( self , bills , metrics , baseline_code = '399300.XSHE' , indicator = None ) special \u00b6 Parameters: Name Type Description Default bills dict \u56de\u6d4b\u751f\u6210\u7684\u8d26\u5355\uff0c\u901a\u8fc7Strategy.bills\u83b7\u5f97 required metrics dict \u56de\u6d4b\u751f\u6210\u7684\u6307\u6807\uff0c\u901a\u8fc7strategy.metrics\u83b7\u5f97 required baseline_code str \u57fa\u51c6\u8bc1\u5238\u4ee3\u7801 '399300.XSHE' indicator Optional[pandas.core.frame.DataFrame] \u56de\u6d4b\u65f6\u4f7f\u7528\u7684\u6307\u6807\u3002\u5982\u679c\u5b58\u5728\uff0c\u5c06\u53e0\u52a0\u5230\u7b56\u7565\u56de\u6d4b\u56fe\u4e0a\u3002\u5b83\u5e94\u8be5\u662f\u4e00\u4e2a\u4ee5\u65e5\u671f\u4e3a\u7d22\u5f15\uff0c\u6307\u6807\u503c\u5217\u540d\u4e3a\"value\"\u7684pandas.DataFrame\u3002\u5982\u679c\u4e0d\u63d0\u4f9b\uff0c\u5c06\u4e0d\u4f1a\u7ed8\u5236\u6307\u6807\u56fe None Source code in omicron/plotting/metrics.py def __init__ ( self , bills : dict , metrics : dict , baseline_code : str = \"399300.XSHE\" , indicator : Optional [ pd . DataFrame ] = None , ): \"\"\" Args: bills: \u56de\u6d4b\u751f\u6210\u7684\u8d26\u5355\uff0c\u901a\u8fc7Strategy.bills\u83b7\u5f97 metrics: \u56de\u6d4b\u751f\u6210\u7684\u6307\u6807\uff0c\u901a\u8fc7strategy.metrics\u83b7\u5f97 baseline_code: \u57fa\u51c6\u8bc1\u5238\u4ee3\u7801 indicator: \u56de\u6d4b\u65f6\u4f7f\u7528\u7684\u6307\u6807\u3002\u5982\u679c\u5b58\u5728\uff0c\u5c06\u53e0\u52a0\u5230\u7b56\u7565\u56de\u6d4b\u56fe\u4e0a\u3002\u5b83\u5e94\u8be5\u662f\u4e00\u4e2a\u4ee5\u65e5\u671f\u4e3a\u7d22\u5f15\uff0c\u6307\u6807\u503c\u5217\u540d\u4e3a\"value\"\u7684pandas.DataFrame\u3002\u5982\u679c\u4e0d\u63d0\u4f9b\uff0c\u5c06\u4e0d\u4f1a\u7ed8\u5236\u6307\u6807\u56fe \"\"\" self . metrics = metrics self . trades = bills [ \"trades\" ] self . positions = bills [ \"positions\" ] self . start = arrow . get ( bills [ \"assets\" ][ 0 ][ 0 ]) . date () self . end = arrow . get ( bills [ \"assets\" ][ - 1 ][ 0 ]) . date () self . frames = [ tf . int2date ( f ) for f in tf . get_frames ( self . start , self . end , FrameType . DAY ) ] if indicator is not None : self . indicator = indicator . join ( pd . Series ( index = self . frames , name = \"frames\" , dtype = np . float64 ), how = \"right\" , ) else : self . indicator = None # \u8bb0\u5f55\u65e5\u671f\u5230\u4e0b\u6807\u7684\u53cd\u5411\u6620\u5c04 self . _frame2pos = { f : i for i , f in enumerate ( self . frames )} self . ticks = self . _format_tick ( self . frames ) # TODO: there's bug in backtesting, temporarily fix here df = pd . DataFrame ( self . frames , columns = [ \"frame\" ]) df [ \"assets\" ] = np . nan assets = pd . DataFrame ( bills [ \"assets\" ], columns = [ \"frame\" , \"assets\" ]) df [ \"assets\" ] = assets [ \"assets\" ] self . assets = df . fillna ( method = \"ffill\" )[ \"assets\" ] . to_numpy () self . nv = self . assets / self . assets [ 0 ] self . baseline_code = baseline_code or \"399300.XSHE\" plot ( self ) async \u00b6 \u7ed8\u5236\u8d44\u4ea7\u66f2\u7ebf\u53ca\u56de\u6d4b\u6307\u6807\u56fe Source code in omicron/plotting/metrics.py async def plot ( self ): \"\"\"\u7ed8\u5236\u8d44\u4ea7\u66f2\u7ebf\u53ca\u56de\u6d4b\u6307\u6807\u56fe\"\"\" n = len ( self . assets ) bars = await Stock . get_bars ( self . baseline_code , n , FrameType . DAY , self . end ) baseline_prices = self . _fill_missing_prices ( bars , self . frames ) baseline_prices /= baseline_prices [ 0 ] fig = make_subplots ( rows = 1 , cols = 2 , shared_xaxes = False , specs = [ [{ \"secondary_y\" : True }, { \"type\" : \"table\" }], ], column_width = [ 0.75 , 0.25 ], horizontal_spacing = 0.01 , subplot_titles = ( \"\u8d44\u4ea7\u66f2\u7ebf\" , \"\u7b56\u7565\u6307\u6807\" ), ) fig . add_trace ( await self . _metrics_trace (), row = 1 , col = 2 ) if self . indicator is not None : indicator_on_hover = self . indicator [ \"value\" ] else : indicator_on_hover = None baseline_name = ( await Security . alias ( self . baseline_code ) if self . baseline_code else \"\u57fa\u51c6\" ) baseline_trace = go . Scatter ( y = baseline_prices , x = self . ticks , mode = \"lines\" , name = baseline_name , showlegend = True , text = indicator_on_hover , hovertemplate = \"
    \u51c0\u503c:% {y:.2f} \" + \"
    \u6307\u6807:% {text:.1f} \" , ) fig . add_trace ( baseline_trace , row = 1 , col = 1 ) nv_trace = go . Scatter ( y = self . nv , x = self . ticks , mode = \"lines\" , name = \"\u7b56\u7565\" , showlegend = True , hovertemplate = \"
    \u51c0\u503c:% {y:.2f} \" , ) fig . add_trace ( nv_trace , row = 1 , col = 1 ) if self . indicator is not None : ind_trace = go . Scatter ( y = self . indicator [ \"value\" ], x = self . ticks , mode = \"lines\" , name = \"indicator\" , showlegend = True , visible = \"legendonly\" , ) fig . add_trace ( ind_trace , row = 1 , col = 1 , secondary_y = True ) for trace in await self . _trade_info_trace (): fig . add_trace ( trace , row = 1 , col = 1 ) fig . update_xaxes ( type = \"category\" , tickangle = 45 , nticks = len ( self . ticks ) // 5 ) fig . update_layout ( margin = dict ( l = 20 , r = 20 , t = 50 , b = 50 ), width = 1040 , height = 435 ) fig . update_layout ( hovermode = \"x unified\" , hoverlabel = dict ( bgcolor = \"rgba(255,255,255,0.8)\" ) ) fig . show ()","title":"MetricsGraph"},{"location":"api/plotting/metrics/#omicron.plotting.metrics.MetricsGraph","text":"Source code in omicron/plotting/metrics.py class MetricsGraph : def __init__ ( self , bills : dict , metrics : dict , baseline_code : str = \"399300.XSHE\" , indicator : Optional [ pd . DataFrame ] = None , ): \"\"\" Args: bills: \u56de\u6d4b\u751f\u6210\u7684\u8d26\u5355\uff0c\u901a\u8fc7Strategy.bills\u83b7\u5f97 metrics: \u56de\u6d4b\u751f\u6210\u7684\u6307\u6807\uff0c\u901a\u8fc7strategy.metrics\u83b7\u5f97 baseline_code: \u57fa\u51c6\u8bc1\u5238\u4ee3\u7801 indicator: \u56de\u6d4b\u65f6\u4f7f\u7528\u7684\u6307\u6807\u3002\u5982\u679c\u5b58\u5728\uff0c\u5c06\u53e0\u52a0\u5230\u7b56\u7565\u56de\u6d4b\u56fe\u4e0a\u3002\u5b83\u5e94\u8be5\u662f\u4e00\u4e2a\u4ee5\u65e5\u671f\u4e3a\u7d22\u5f15\uff0c\u6307\u6807\u503c\u5217\u540d\u4e3a\"value\"\u7684pandas.DataFrame\u3002\u5982\u679c\u4e0d\u63d0\u4f9b\uff0c\u5c06\u4e0d\u4f1a\u7ed8\u5236\u6307\u6807\u56fe \"\"\" self . metrics = metrics self . trades = bills [ \"trades\" ] self . positions = bills [ \"positions\" ] self . start = arrow . get ( bills [ \"assets\" ][ 0 ][ 0 ]) . date () self . end = arrow . get ( bills [ \"assets\" ][ - 1 ][ 0 ]) . date () self . frames = [ tf . int2date ( f ) for f in tf . get_frames ( self . start , self . end , FrameType . DAY ) ] if indicator is not None : self . indicator = indicator . join ( pd . Series ( index = self . frames , name = \"frames\" , dtype = np . float64 ), how = \"right\" , ) else : self . indicator = None # \u8bb0\u5f55\u65e5\u671f\u5230\u4e0b\u6807\u7684\u53cd\u5411\u6620\u5c04 self . _frame2pos = { f : i for i , f in enumerate ( self . frames )} self . ticks = self . _format_tick ( self . frames ) # TODO: there's bug in backtesting, temporarily fix here df = pd . DataFrame ( self . frames , columns = [ \"frame\" ]) df [ \"assets\" ] = np . nan assets = pd . DataFrame ( bills [ \"assets\" ], columns = [ \"frame\" , \"assets\" ]) df [ \"assets\" ] = assets [ \"assets\" ] self . assets = df . fillna ( method = \"ffill\" )[ \"assets\" ] . to_numpy () self . nv = self . assets / self . assets [ 0 ] self . baseline_code = baseline_code or \"399300.XSHE\" def _fill_missing_prices ( self , bars : BarsArray , frames : Union [ List , NDArray ]): \"\"\"\u5c06bars\u4e2d\u7f3a\u5931\u503c\u91c7\u7528\u5176\u524d\u503c\u66ff\u6362 \u5f53baseline\u4e3a\u4e2a\u80a1\u65f6\uff0c\u53ef\u80fd\u5b58\u5728\u505c\u724c\u7684\u60c5\u51b5\uff0c\u8fd9\u6837\u5bfc\u81f4\u7531\u6b64\u8ba1\u7b97\u7684\u53c2\u8003\u6536\u76ca\u65e0\u6cd5\u4e0e\u56de\u6d4b\u7684\u8d44\u4ea7\u6536\u76ca\u5bf9\u9f50\uff0c\u56e0\u6b64\u9700\u8981\u8fdb\u884c\u8c03\u6574\u3002 \u51fa\u4e8e\u8fd9\u4e2a\u76ee\u7684\uff0c\u672c\u51fd\u6570\u53ea\u8fd4\u56de\u5904\u7406\u540e\u7684\u6536\u76d8\u4ef7\u3002 Args: bars: \u57fa\u7ebf\u884c\u60c5\u6570\u636e\u3002 frames: \u65e5\u671f\u7d22\u5f15 Returns: \u8865\u5145\u7f3a\u5931\u503c\u540e\u7684\u6536\u76d8\u4ef7\u5e8f\u5217 \"\"\" _close = pd . DataFrame ( { \"close\" : pd . Series ( bars [ \"close\" ], index = bars [ \"frame\" ]), \"frame\" : pd . Series ( np . empty (( len ( frames ),)), index = frames ), } )[ \"close\" ] . to_numpy () # \u8fd9\u91cc\u4f7f\u7528omicron\u4e2d\u7684fill_nan\uff0c\u662f\u56e0\u4e3a\u5982\u679c\u6570\u7ec4\u7684\u7b2c\u4e00\u4e2a\u5143\u7d20\u5373\u4e3aNaN\u7684\u8bdd\uff0c\u90a3\u4e48DataFrame.fillna(method='ffill')\u5c06\u65e0\u6cd5\u5904\u7406\u8fd9\u6837\u7684\u60c5\u51b5(\u4ecd\u7136\u4fdd\u6301\u4e3anan) return fill_nan ( _close ) def _format_tick ( self , frames : Union [ Frame , List [ Frame ]]) -> Union [ str , NDArray ]: if type ( frames ) == datetime . date : x = frames return f \" { x . year : 02 } - { x . month : 02 } - { x . day : 02 } \" elif type ( frames ) == datetime . datetime : x = frames return f \" { x . month : 02 } - { x . day : 02 } { x . hour : 02 } : { x . minute : 02 } \" elif type ( frames [ 0 ]) == datetime . date : # type: ignore return np . array ([ f \" { x . year : 02 } - { x . month : 02 } - { x . day : 02 } \" for x in frames ]) else : return np . array ( [ f \" { x . month : 02 } - { x . day : 02 } { x . hour : 02 } : { x . minute : 02 } \" for x in frames ] # type: ignore ) async def _metrics_trace ( self ): metric_names = { \"start\" : \"\u8d77\u59cb\u65e5\" , \"end\" : \"\u7ed3\u675f\u65e5\" , \"window\" : \"\u8d44\u4ea7\u66b4\u9732\u7a97\u53e3\" , \"total_tx\" : \"\u4ea4\u6613\u6b21\u6570\" , \"total_profit\" : \"\u603b\u5229\u6da6\" , \"total_profit_rate\" : \"\u5229\u6da6\u7387\" , \"win_rate\" : \"\u80dc\u7387\" , \"mean_return\" : \"\u65e5\u5747\u56de\u62a5\" , \"sharpe\" : \"\u590f\u666e\u7387\" , \"max_drawdown\" : \"\u6700\u5927\u56de\u64a4\" , \"annual_return\" : \"\u5e74\u5316\u56de\u62a5\" , \"volatility\" : \"\u6ce2\u52a8\u7387\" , \"sortino\" : \"sortino\" , \"calmar\" : \"calmar\" , } # bug: plotly go.Table.Cells format not work here metric_formatter = { \"start\" : \" {} \" , \"end\" : \" {} \" , \"window\" : \" {} \" , \"total_tx\" : \" {} \" , \"total_profit\" : \" {:.2f} \" , \"total_profit_rate\" : \" {:.2%} \" , \"win_rate\" : \" {:.2%} \" , \"mean_return\" : \" {:.2%} \" , \"sharpe\" : \" {:.2f} \" , \"max_drawdown\" : \" {:.2%} \" , \"annual_return\" : \" {:.2%} \" , \"volatility\" : \" {:.2%} \" , \"sortino\" : \" {:.2f} \" , \"calmar\" : \" {:.2f} \" , } metrics = deepcopy ( self . metrics ) baseline = metrics [ \"baseline\" ] or {} del metrics [ \"baseline\" ] baseline_name = ( await Security . alias ( self . baseline_code ) if self . baseline_code else \"\u57fa\u51c6\" ) metrics_formatted = [] for k in metric_names . keys (): if metrics . get ( k ): metrics_formatted . append ( metric_formatter [ k ] . format ( metrics . get ( k ))) else : metrics_formatted . append ( \"-\" ) baseline_formatted = [] for k in metric_names . keys (): if baseline . get ( k ): baseline_formatted . append ( metric_formatter [ k ] . format ( baseline . get ( k ))) else : baseline_formatted . append ( \"-\" ) return go . Table ( header = dict ( values = [ \"\u6307\u6807\u540d\" , \"\u7b56\u7565\" , baseline_name ]), cells = dict ( values = [ [ v for _ , v in metric_names . items ()], metrics_formatted , baseline_formatted , ], font_size = 10 , ), ) async def _trade_info_trace ( self ): \"\"\"\u6784\u5efahover text \u5e8f\u5217\"\"\" # convert trades into hover_info buys = defaultdict ( list ) sells = defaultdict ( list ) for _ , trade in self . trades . items (): trade_date = arrow . get ( trade [ \"time\" ]) . date () ipos = self . _frame2pos . get ( trade_date ) if ipos is None : logger . warning ( \"date %s in trade record not in backtest range\" , trade_date ) continue name = await Security . alias ( trade [ \"security\" ]) price = trade [ \"price\" ] side = trade [ \"order_side\" ] filled = trade [ \"filled\" ] trade_text = f \" { side } : { name } { filled / 100 : .0f } \u624b \u4ef7\u683c: { price : .02f } \u6210\u4ea4\u989d: { filled * price / 10000 : .1f } \u4e07\" if side == \"\u5356\u51fa\" : sells [ trade_date ] . append ( trade_text ) elif side in ( \"\u4e70\u5165\" , \"\u5206\u7ea2\u914d\u80a1\" ): buys [ trade_date ] . append ( trade_text ) X_buy , Y_buy , data_buy = [], [], [] X_sell , Y_sell , data_sell = [], [], [] for dt , text in buys . items (): ipos = self . _frame2pos . get ( dt ) Y_buy . append ( self . nv [ ipos ]) X_buy . append ( self . _format_tick ( dt )) asset = self . assets [ ipos ] hover = f \"\u8d44\u4ea7: { asset / 10000 : .1f } \u4e07
    { '
    ' . join ( text ) } \" data_buy . append ( hover ) trace_buy = go . Scatter ( x = X_buy , y = Y_buy , mode = \"markers\" , text = data_buy , name = \"\u4e70\u5165\u6210\u4ea4\" , marker = dict ( color = \"red\" , symbol = \"triangle-up\" ), hovertemplate = \"
    % {text} \" , ) for dt , text in sells . items (): ipos = self . _frame2pos . get ( dt ) Y_sell . append ( self . nv [ ipos ]) X_sell . append ( self . _format_tick ( dt )) asset = self . assets [ ipos ] hover = f \"\u8d44\u4ea7: { asset / 10000 : .1f } \u4e07
    { '
    ' . join ( text ) } \" data_sell . append ( hover ) trace_sell = go . Scatter ( x = X_sell , y = Y_sell , mode = \"markers\" , text = data_sell , name = \"\u5356\u51fa\u6210\u4ea4\" , marker = dict ( color = \"green\" , symbol = \"triangle-down\" ), hovertemplate = \"
    % {text} \" , ) return trace_buy , trace_sell async def plot ( self ): \"\"\"\u7ed8\u5236\u8d44\u4ea7\u66f2\u7ebf\u53ca\u56de\u6d4b\u6307\u6807\u56fe\"\"\" n = len ( self . assets ) bars = await Stock . get_bars ( self . baseline_code , n , FrameType . DAY , self . end ) baseline_prices = self . _fill_missing_prices ( bars , self . frames ) baseline_prices /= baseline_prices [ 0 ] fig = make_subplots ( rows = 1 , cols = 2 , shared_xaxes = False , specs = [ [{ \"secondary_y\" : True }, { \"type\" : \"table\" }], ], column_width = [ 0.75 , 0.25 ], horizontal_spacing = 0.01 , subplot_titles = ( \"\u8d44\u4ea7\u66f2\u7ebf\" , \"\u7b56\u7565\u6307\u6807\" ), ) fig . add_trace ( await self . _metrics_trace (), row = 1 , col = 2 ) if self . indicator is not None : indicator_on_hover = self . indicator [ \"value\" ] else : indicator_on_hover = None baseline_name = ( await Security . alias ( self . baseline_code ) if self . baseline_code else \"\u57fa\u51c6\" ) baseline_trace = go . Scatter ( y = baseline_prices , x = self . ticks , mode = \"lines\" , name = baseline_name , showlegend = True , text = indicator_on_hover , hovertemplate = \"
    \u51c0\u503c:% {y:.2f} \" + \"
    \u6307\u6807:% {text:.1f} \" , ) fig . add_trace ( baseline_trace , row = 1 , col = 1 ) nv_trace = go . Scatter ( y = self . nv , x = self . ticks , mode = \"lines\" , name = \"\u7b56\u7565\" , showlegend = True , hovertemplate = \"
    \u51c0\u503c:% {y:.2f} \" , ) fig . add_trace ( nv_trace , row = 1 , col = 1 ) if self . indicator is not None : ind_trace = go . Scatter ( y = self . indicator [ \"value\" ], x = self . ticks , mode = \"lines\" , name = \"indicator\" , showlegend = True , visible = \"legendonly\" , ) fig . add_trace ( ind_trace , row = 1 , col = 1 , secondary_y = True ) for trace in await self . _trade_info_trace (): fig . add_trace ( trace , row = 1 , col = 1 ) fig . update_xaxes ( type = \"category\" , tickangle = 45 , nticks = len ( self . ticks ) // 5 ) fig . update_layout ( margin = dict ( l = 20 , r = 20 , t = 50 , b = 50 ), width = 1040 , height = 435 ) fig . update_layout ( hovermode = \"x unified\" , hoverlabel = dict ( bgcolor = \"rgba(255,255,255,0.8)\" ) ) fig . show ()","title":"MetricsGraph"},{"location":"api/plotting/metrics/#omicron.plotting.metrics.MetricsGraph.__init__","text":"Parameters: Name Type Description Default bills dict \u56de\u6d4b\u751f\u6210\u7684\u8d26\u5355\uff0c\u901a\u8fc7Strategy.bills\u83b7\u5f97 required metrics dict \u56de\u6d4b\u751f\u6210\u7684\u6307\u6807\uff0c\u901a\u8fc7strategy.metrics\u83b7\u5f97 required baseline_code str \u57fa\u51c6\u8bc1\u5238\u4ee3\u7801 '399300.XSHE' indicator Optional[pandas.core.frame.DataFrame] \u56de\u6d4b\u65f6\u4f7f\u7528\u7684\u6307\u6807\u3002\u5982\u679c\u5b58\u5728\uff0c\u5c06\u53e0\u52a0\u5230\u7b56\u7565\u56de\u6d4b\u56fe\u4e0a\u3002\u5b83\u5e94\u8be5\u662f\u4e00\u4e2a\u4ee5\u65e5\u671f\u4e3a\u7d22\u5f15\uff0c\u6307\u6807\u503c\u5217\u540d\u4e3a\"value\"\u7684pandas.DataFrame\u3002\u5982\u679c\u4e0d\u63d0\u4f9b\uff0c\u5c06\u4e0d\u4f1a\u7ed8\u5236\u6307\u6807\u56fe None Source code in omicron/plotting/metrics.py def __init__ ( self , bills : dict , metrics : dict , baseline_code : str = \"399300.XSHE\" , indicator : Optional [ pd . DataFrame ] = None , ): \"\"\" Args: bills: \u56de\u6d4b\u751f\u6210\u7684\u8d26\u5355\uff0c\u901a\u8fc7Strategy.bills\u83b7\u5f97 metrics: \u56de\u6d4b\u751f\u6210\u7684\u6307\u6807\uff0c\u901a\u8fc7strategy.metrics\u83b7\u5f97 baseline_code: \u57fa\u51c6\u8bc1\u5238\u4ee3\u7801 indicator: \u56de\u6d4b\u65f6\u4f7f\u7528\u7684\u6307\u6807\u3002\u5982\u679c\u5b58\u5728\uff0c\u5c06\u53e0\u52a0\u5230\u7b56\u7565\u56de\u6d4b\u56fe\u4e0a\u3002\u5b83\u5e94\u8be5\u662f\u4e00\u4e2a\u4ee5\u65e5\u671f\u4e3a\u7d22\u5f15\uff0c\u6307\u6807\u503c\u5217\u540d\u4e3a\"value\"\u7684pandas.DataFrame\u3002\u5982\u679c\u4e0d\u63d0\u4f9b\uff0c\u5c06\u4e0d\u4f1a\u7ed8\u5236\u6307\u6807\u56fe \"\"\" self . metrics = metrics self . trades = bills [ \"trades\" ] self . positions = bills [ \"positions\" ] self . start = arrow . get ( bills [ \"assets\" ][ 0 ][ 0 ]) . date () self . end = arrow . get ( bills [ \"assets\" ][ - 1 ][ 0 ]) . date () self . frames = [ tf . int2date ( f ) for f in tf . get_frames ( self . start , self . end , FrameType . DAY ) ] if indicator is not None : self . indicator = indicator . join ( pd . Series ( index = self . frames , name = \"frames\" , dtype = np . float64 ), how = \"right\" , ) else : self . indicator = None # \u8bb0\u5f55\u65e5\u671f\u5230\u4e0b\u6807\u7684\u53cd\u5411\u6620\u5c04 self . _frame2pos = { f : i for i , f in enumerate ( self . frames )} self . ticks = self . _format_tick ( self . frames ) # TODO: there's bug in backtesting, temporarily fix here df = pd . DataFrame ( self . frames , columns = [ \"frame\" ]) df [ \"assets\" ] = np . nan assets = pd . DataFrame ( bills [ \"assets\" ], columns = [ \"frame\" , \"assets\" ]) df [ \"assets\" ] = assets [ \"assets\" ] self . assets = df . fillna ( method = \"ffill\" )[ \"assets\" ] . to_numpy () self . nv = self . assets / self . assets [ 0 ] self . baseline_code = baseline_code or \"399300.XSHE\"","title":"__init__()"},{"location":"api/plotting/metrics/#omicron.plotting.metrics.MetricsGraph.plot","text":"\u7ed8\u5236\u8d44\u4ea7\u66f2\u7ebf\u53ca\u56de\u6d4b\u6307\u6807\u56fe Source code in omicron/plotting/metrics.py async def plot ( self ): \"\"\"\u7ed8\u5236\u8d44\u4ea7\u66f2\u7ebf\u53ca\u56de\u6d4b\u6307\u6807\u56fe\"\"\" n = len ( self . assets ) bars = await Stock . get_bars ( self . baseline_code , n , FrameType . DAY , self . end ) baseline_prices = self . _fill_missing_prices ( bars , self . frames ) baseline_prices /= baseline_prices [ 0 ] fig = make_subplots ( rows = 1 , cols = 2 , shared_xaxes = False , specs = [ [{ \"secondary_y\" : True }, { \"type\" : \"table\" }], ], column_width = [ 0.75 , 0.25 ], horizontal_spacing = 0.01 , subplot_titles = ( \"\u8d44\u4ea7\u66f2\u7ebf\" , \"\u7b56\u7565\u6307\u6807\" ), ) fig . add_trace ( await self . _metrics_trace (), row = 1 , col = 2 ) if self . indicator is not None : indicator_on_hover = self . indicator [ \"value\" ] else : indicator_on_hover = None baseline_name = ( await Security . alias ( self . baseline_code ) if self . baseline_code else \"\u57fa\u51c6\" ) baseline_trace = go . Scatter ( y = baseline_prices , x = self . ticks , mode = \"lines\" , name = baseline_name , showlegend = True , text = indicator_on_hover , hovertemplate = \"
    \u51c0\u503c:% {y:.2f} \" + \"
    \u6307\u6807:% {text:.1f} \" , ) fig . add_trace ( baseline_trace , row = 1 , col = 1 ) nv_trace = go . Scatter ( y = self . nv , x = self . ticks , mode = \"lines\" , name = \"\u7b56\u7565\" , showlegend = True , hovertemplate = \"
    \u51c0\u503c:% {y:.2f} \" , ) fig . add_trace ( nv_trace , row = 1 , col = 1 ) if self . indicator is not None : ind_trace = go . Scatter ( y = self . indicator [ \"value\" ], x = self . ticks , mode = \"lines\" , name = \"indicator\" , showlegend = True , visible = \"legendonly\" , ) fig . add_trace ( ind_trace , row = 1 , col = 1 , secondary_y = True ) for trace in await self . _trade_info_trace (): fig . add_trace ( trace , row = 1 , col = 1 ) fig . update_xaxes ( type = \"category\" , tickangle = 45 , nticks = len ( self . ticks ) // 5 ) fig . update_layout ( margin = dict ( l = 20 , r = 20 , t = 50 , b = 50 ), width = 1040 , height = 435 ) fig . update_layout ( hovermode = \"x unified\" , hoverlabel = dict ( bgcolor = \"rgba(255,255,255,0.8)\" ) ) fig . show ()","title":"plot()"}]} \ No newline at end of file diff --git a/2.0.0/sitemap.xml b/2.0.0/sitemap.xml new file mode 100644 index 00000000..93dbe17d --- /dev/null +++ b/2.0.0/sitemap.xml @@ -0,0 +1,103 @@ + + + + None + 2023-11-21 + daily + + + None + 2023-11-21 + daily + + + None + 2023-11-21 + daily + + + None + 2023-11-21 + daily + + + None + 2023-11-21 + daily + + + None + 2023-11-21 + daily + + + None + 2023-11-21 + daily + + + None + 2023-11-21 + daily + + + None + 2023-11-21 + daily + + + None + 2023-11-21 + daily + + + None + 2023-11-21 + daily + + + None + 2023-11-21 + daily + + + None + 2023-11-21 + daily + + + None + 2023-11-21 + daily + + + None + 2023-11-21 + daily + + + None + 2023-11-21 + daily + + + None + 2023-11-21 + daily + + + None + 2023-11-21 + daily + + + None + 2023-11-21 + daily + + + None + 2023-11-21 + daily + + \ No newline at end of file diff --git a/2.0.0/sitemap.xml.gz b/2.0.0/sitemap.xml.gz new file mode 100644 index 00000000..164bd567 Binary files /dev/null and b/2.0.0/sitemap.xml.gz differ diff --git a/2.0.0/usage/index.html b/2.0.0/usage/index.html new file mode 100644 index 00000000..c5b4aa30 --- /dev/null +++ b/2.0.0/usage/index.html @@ -0,0 +1,1692 @@ + + + + + + + + + + + + + + + + 使用教程 - Omicron + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + +
    + + +
    + +
    + + + + + + +
    +
    + + + +
    +
    +
    + + + + +
    +
    +
    + + + + + + + +
    +
    + + + + + + + + + +

    使用教程

    + +

    1. 配置、初始化和关闭 OMICRON

    +

    Omicron 依赖于 zillionare-omega 服务来获取数据。但它并不直接与 Omega 服务通讯,相反,它直接读取 Omega 服务器会写入数据的Influxdbredis数据库。因此,在使用 Omicron 之前,我们需要提供这两个服务器的连接地址,并进行初始化。

    +

    1.1. 配置和初始化

    +

    Omicron 使用 cfg4py 来管理配置。

    +

    cfg4py 使用 yaml 文件来保存配置项。在使用 cfg4py 之前,您需要在某处初始化 cfg4py,然后再初始化 omicron:

    +
    +Tip +

    为了简洁起见,我们在顶层代码中直接使用了 async/await。通常,这些代码能够直接在 notebook 中运行,但如果需要在普通的 python 脚本中运行这些代码,您通常需要将其封装到一个异步函数中,再通过asyncio.run来运行它。

    +
    1
    +2
    +3
    +4
    +5
    +6
    +7
    +8
    +9
    import asyncio
    +import cfg4py
    +import omicron
    +async def main():
    +    cfg4py.init('path/to/your/config/dir')
    +    await omicron.init()
    +    # DO YOUR GREAT JOB WITH OMICRON
    +
    +asyncio.run(main())
    +
    +
    +
    1
    +2
    +3
    +4
    +5
    import cfg4py
    +import omicron
    +cfg4py.init('path/to/your/config/dir')
    +
    +await omicron.init()
    +
    +

    注意初始化 cfg4py 时,需要提供包含配置文件的文件夹的路径,而不是配置文件的路径。配置文件名必须为 defaults.yml。

    +

    您至少应该为 omicron 配置 Redis 连接串和 influxdb 连接串。下面是常用配置示例:

    +
     1
    + 2
    + 3
    + 4
    + 5
    + 6
    + 7
    + 8
    + 9
    +10
    +11
    +12
    +13
    +14
    +15
    +16
    +17
    +18
    +19
    # DEFAULTS.YAML
    +redis:
    +  dsn: redis://${REDIS_HOST}:${REDIS_PORT}
    +
    +influxdb:
    +  url: http://${INFLUXDB_HOST}:${INFLUXDB_PORT}
    +  token: ${INFLUXDB_TOKEN}
    +  org: ${INFLUXDB_ORG}
    +  bucket_name: ${INFLUXDB_BUCKET_NAME}
    +  enable_compress: true
    +  max_query_size: 150000
    +
    +notify:
    +    mail_from: ${MAIL_FROM}
    +    mail_to:
    +        - ${MAIL_TO}
    +    mail_server: ${MAIL_SERVER}
    +    dingtalk_access_token: ${DINGTALK_ACCESS_TOKEN}
    +    dingtalk_secret: ${DINGTALK_SECRET}
    +
    +

    请根据您实际环境配置来更改上述文件。上述配置中,${{REDIS_HOST}}意味着环境变量。如果是 windows,您需要在系统 > 环境变量中进行设置。如果是 Linux 或者 Mac,您需要修改.bashrc,例如: +

    1
    export REDIS_HOST=localhost
    +

    +

    1.2. 关闭 omicron

    +

    在您的进程即将退出之前,请记得关闭 omicron。如果您是在 notebook 中使用 omicron, 则可以忽略此步聚。

    +
    1
        await omicron.close()
    +
    +

    2. 数据读取

    +

    2.1. 证券列表

    +

    SecurityQuery 提供了证券列表和查询操作。查询被设计成为链式 API。通常,我们通过调用 Security.select()来生成一个Query对象,然后可以针对此对象,进行各种过查询过滤,最后,我们调用query.eval()方法结束链式调用,并返回结果。

    +
    2.1.1. 查询所有证券代码
    +

    您可以通过以下方法来获取某一天的证券列表:

    +

    1
    +2
    +3
    +4
    +5
    +6
    +7
    # 4. ASSUME YOU HAVE OMICRON INIT
    +dt = datetime.date(2022, 5, 20)
    +
    +query = Security.select(dt)
    +codes = await query.eval()
    +print(codes)
    +# THE OUTPUTS IS LIKE ["000001.XSHE", "000004.XSHE", ...]
    +
    +这里的dt如果没有提供的话,将使用最新的证券列表。但在回测中,您通常不同时间的证券列表,因此,dt在这种情况下是必须的,否则,您将引入未来数据。

    +
    2.1.2. 返回所有股票或者指数
    +
    1
    +2
    +3
    query = Security.select(dt)
    +codes = await query.types(["stock"]).eval()
    +print(codes)
    +
    +
    2.1.3. 排除某种股票(证券)
    +
    1
    +2
    +3
    query = Security.select(dt)
    +codes = await query.exclude_st().exclude_kcb().exclude_cyb().eval()
    +print(codes)
    +
    +
    2.1.4. 如果只要求某种股票(证券)
    +
    1
    +2
    +3
    +4
    query = Security.select(dt)
    +codes = await query.only_kcb().only_st().only_cyb().eval()
    +print(codes)
    +#得到空列表
    +
    +
    2.1.5. 按别名进行模糊查询
    +

    A 股的证券在标识上,一般有代码(code 或者 symbol)、拼音简写 (name) 和汉字表示名 (display_name) 三种标识。比如中国平安,其代码为 601318.XSHG; 其拼音简写为 ZGPA;而中国平安被称为它的别名 (alias)。

    +

    如果要查询所有中字头的股票:

    +
    1
    +2
    +3
    query = Security.select(dt)
    +codes = await query.alias_like("中").eval()
    +print(codes)
    +
    +
    2.1.6. 通过代码查询其它信息
    +

    通过前面的查询我们可以得到一个证券列表,如果要得到具体的信息,可以通过info接口来查询:

    +

    1
    +2
    +3
        dt = datetime.date(2022, 5, 20)
    +    info = await Security.info("688001.XSHG", dt)
    +    print(info)
    +
    +输出为: +
    1
    +2
    +3
    +4
    +5
    +6
    +7
    +8
    {
    +    'type': 'stock',
    +    'display_name': '华兴源创',
    +    'alias': '华兴源创',
    +    'end': datetime.date(2200, 1, 1),
    +    'start': datetime.date(2019, 7, 22),
    +    'name': 'HXYC'
    +}
    +

    +

    2.2. 交易日历及时间帧计算

    +

    Omicron 不仅提供了交易日历,与其它量化框架相比,我们还提供了丰富的与时间相关的运算操作。这些操作都有详细的文档和示例,您可以通过 TimeFrame 来进一步阅读。

    +

    omicron 中,常常会遇到时间帧 (Time Frame) 这个概念。因为行情数据都是按一定的时间长度组织的,比如 5 分钟,1 天,等等。因此,在 omicron 中,我们经常使用某个时间片结束的时间,来标识这个时间片,并将其称之为帧 (Time Frame)。

    +

    omicron 中,我们支持的时间帧包括日内的分钟帧 (FrameType.MIN1), 5 分钟帧 (FrameType.MIN5), 15 分钟帧、30 分钟帧和 60 分钟帧,以及日线级别的 FrameType.DAY, FrameType.WEEK 等。关于详细的类型说明,请参见 coretypes

    +

    omicron 提供的交易日历起始于 2005 年 1 月 4 日。提供的行情数据,最早从这一天起。

    +

    大致上,omicron 提供了以下时间帧操作:

    +
    2.2.1. 交易时间的偏移
    +

    如果今天是 2022 年 5 月 20 日,您想得到 100 天前的交易日,则可以使用 day_shift: +

    1
    +2
    +3
    +4
    from omicron import tf
    +dt = datetime.date(2022, 5, 20)
    +
    +tf.day_shift(dt, -100)
    +
    +输出是 datetime.date(2021, 12, 16)。在这里,day_shift 的第二个参数n是偏移量,当它小于零时,是找dtn个交易日;当它大于零时,是找dt之后的n个交易日。

    +

    比如有意思的是n == 0 的时候。对上述dt,day_shift(dt, 0) 得到的仍然是同一天,但如果dt是 2022 年 5 月 21 日是周六,则 day_shift(datetime.date(2022, 5, 21)) 将返回 2022 年 5 月 20 日。因为 5 月 21 日这一天是周六,不是交易日,day_shift 将返回其对应的交易日,这在多数情况下会非常方便。

    +

    除了day_shift外,timeframe 还提供了类似函数比如week_shift等。一般地,您可以用 shift(dt, n, frame_type) 来对任意支持的时间进行偏移。

    +
    2.2.2. 边界操作 ceiling 和 floor
    +

    很多时候我们需要知道具体的某个时间点 (moment) 所属的帧。如果要取其上一帧,则可以用 floor 操作,反之,使用 ceiling。 +

    1
    +2
    tf.ceiling(datetime.date(2005, 1, 4), FrameType.WEEK)
    +# OUTPUT IS DATETIME.DATE(2005, 1, 7)
    +

    +
    2.2.3. 时间转换
    +

    为了加快速度,以及方便持久化存储,在 timeframe 内部,有时候使用整数来表示时间。比如 20220502 表示的是 2022 年 5 月 20 日,而 202205220931 则表示 2022 年 5 月 20 日 9 时 31 分钟。

    +

    这种表示法,有时候要求我们进行一些转换: +

     1
    + 2
    + 3
    + 4
    + 5
    + 6
    + 7
    + 8
    + 9
    +10
    # 将整数表示的日期转换为日期
    +tf.int2date(20220522) # datetime.date(2022, 5, 22)
    +# 将整数表示的时间转换为时间
    +tf.int2time(202205220931) # datetime.datetime(2022, 5, 22, 9, 31)
    +
    +# 将日期转换成为整数
    +tf.date2int(datetime.date(2022, 5, 22)) # 20220520
    +
    +# 将时间转换成为时间
    +tf.date2time(datetime.datetime(2022, 5, 22, 9, 21)) # 202205220921
    +

    +
    2.2.4. 列出区间内的所有时间帧
    +

    有时候我们需要得到startend之间某个时间帧类型的所有时间帧: +

    1
    +2
    +3
    +4
    start = arrow.get('2020-1-13 10:00').naive
    +end = arrow.get('2020-1-13 13:30').naive
    +tf.get_frames(start, end, FrameType.MIN30)
    +[202001131000, 202001131030, 202001131100, 202001131130, 202001131330]
    +

    +
    +Important +

    上面的示例中,出现了可能您不太熟悉的naive属性。它指的是取不带时区的时间。在 python 中,时间可以带时区(timezone-aware) 和不带时区 (naive)。

    +

    如果您使用 datetime.datetime(2022, 5, 20),它就是不带时区的,除非您专门指定时区。

    +

    在 omicron 中,我们在绝大多数情况下,仅使用 naive 表示的时间,即不带时区,并且假定时区为东八区(即北京时间)。

    +
    +

    如果您只知道结束时间,需要向前取n个时间帧,则可以使用get_frames_by_count

    +

    如果您只是需要知道在startend之间,总共有多少个帧,请使用 count_frames: +

    1
    +2
    +3
    start = datetime.date(2019, 12, 21)
    +end = datetime.date(2019, 12, 21)
    +tf.count_frames(start, end, FrameType.DAY)
    +
    +输出将是 1。上述方法还有一个快捷方法,即count_day_frames,并且,对 week, month, quaters 也是一样。

    +

    2.3. 读取行情数据

    +

    现在,让我们来获取一段行情数据: +

    1
    +2
    +3
    +4
    code = "000001.XSHE"
    +
    +end = datetime.date(2022, 5, 20)
    +bars = await Stock.get_bars(code, 10, FrameType.DAY, end)
    +
    +返回的bars将是一个 numpy structured array, 其类型为 bars_dtype。一般地,它包括了以下字段:

    +
    1
    +2
    +3
    +4
    +5
    +6
    +7
    +8
    * frame(帧)
    +* open(开盘价)
    +* high(最高价)
    +* low(最低价)
    +* close(收盘价)
    +* volume(成交量,股数)
    +* amount(成交额)
    +* factor(复权因子)
    +
    +

    缺省情况下,返回的数据是到end为止的前复权数据。你可以通参数fq = False关闭它,来获得不复权数据,并以此自行计算后复权数据。

    +

    如果要获取某个时间段的数据,可以使用get_bars_in_range

    +

    上述方法总是尽最大可能返回实时数据,如果end为当前时间的话,但由于omega同步延时是一分钟,所以行情数据最多可能慢一分钟。如果要获取更实时的数据,可以通过get_latest_price方法。

    +

    要获涨跌停价格和标志,请使用:

    + +

    2.4. 板块数据

    +

    提供同花顺板块行业板块和概念板块数据。在使用本模块之前,需要进行初始化:

    +
    1
    +2
    +3
    # 请先进行omicron初始化,略
    +from omicron.models.board import Board, BoardType
    +Board.init('192.168.100.101')
    +
    +

    此处的IP为安装omega服务器的ip。

    +

    通过board_list来查询所有的板块。

    +

    其它方法请参看API文档

    +

    3. 策略编写

    +

    omicron 通过 strategy 来提供策略框架。通过该框架编写的策略,可以在实盘和回测之间无缝转换 -- 根据初始化时传入的服务器不同而自动切换。

    +

    omicron 提供了一个简单的 双均线策略 作为策略编写的示范,可结合其源码,以及本文档中的完整策略示例在notebook中运行查看。

    +

    策略框架提供了回测驱动逻辑及一些基本函数。要编写自己的策略,您需要从基类BaseStrategy派生出自己的子类,并改写它的predict方法来实现调仓换股。

    +

    策略框架依赖于zillionare-trader-client,在回测时,需要有zillionare-backtesting提供回测服务。在实盘时,需要zilllionare-gm-adaptor或者其它实盘交易网关提供服务。

    +

    策略代码可以不加修改,即可使用于回测和实盘两种场景。

    +

    3.1. 回测场景

    +

    实现策略回测,一般需要进行以下步骤: +1. 从此基类派生出一个策略子类,比如sma.py +2. 子类需要重载predict方法,根据当前传入的时间帧和帧类型参数,获取数据并进行处理,评估出交易信号。 +3. 子类根据交易信号,在predict方法里,调用基类的buysell方法来进行交易 +4. 生成策略实例,通过实例调用backtest方法来进行回测,该方法将根据策略构建时指定的回测起始时间、终止时间、帧类型,逐帧生成各个时间帧,并调用子类的predict方法。如果调用时指定了prefetch_stocks参数,backtest还将进行数据预取(预取的数据长度由warmup_peroid决定),并将截止到当前回测帧时的数据传入。 +5. 在交易结束时,调用plot_metrics方法来获取如下所示的回测指标图 +

    +

    如何派生子类,可以参考sma源代码。

    +

     1
    + 2
    + 3
    + 4
    + 5
    + 6
    + 7
    + 8
    + 9
    +10
    +11
    from omicron.strategy.sma import SMAStrategy
    +sma = SMAStrategy(
    +    url="", # the url of either backtest server, or trade server
    +    is_backtest=True,
    +    start=datetime.date(2023, 2, 3),
    +    end=datetime.date(2023, 4, 28),
    +    frame_type=FrameType.DAY,
    +    warmup_period = 20
    +)
    +
    +await sma.backtest(prefetch_stocks=["600000.XSHG"])
    +
    +在回测时,必须要指定is_backtest=Truestart, end参数。

    +

    3.2. 回测报告

    +

    在回测结束后,可以通过以下方法,在notebook中绘制回测报告:

    +
    1
    await sma.plot_metrics()
    +
    +

    这将绘制出类似以下图:

    +

    +
    3.2.1. 在回测报告中添加技术指标
    +
    +

    Info

    +

    Since 2.0.0.a76

    +
    +

    首先,我们可以在策略类的predict方法中计算出技术指标,并保存到成员变量中。在下面的示例代码中,我们将技术指标及当时的时间保存到了一个indicators数组中(注意顺序!),然后在回测结束后,在调用 plot_metrics时,将其传入即可。

    +

    1
    +2
    +3
    +4
    +5
    +6
    +7
    indicators = [
    +    (datetime.date(2021, 2, 3), 20.1),
    +    (datetime.date(2021, 2, 4), 20.2),
    +    ...,
    +    (datetime.date(2021, 4, 1), 20.3)
    +    ]
    +await sma.plot_metrics(indicator)
    +
    +时间只能使用主周期的时间,否则可能产生无法与坐标轴对齐的情况。

    +

    加入的指标默认只显示在legend中,如果要显示在主图上,需要点击legend进行显示。

    +

    指标除可以叠加在主图上之外,还会出现在基准线的hoverinfo中(即使指标的计算与基准线无关),参见上图中的“指标”行。

    +

    3.3. 使用数据预取

    +
    +

    Info

    +

    since version 2.0.0-alpha76

    +
    +

    在回测中,可以使用主周期的数据预取,以加快回测速度。工作原理如下:

    +

    如果策略指定了warmup_period,并在调用backtest时传入了prefetch_stocks参数,则backtest将会在回测之前,预取从[start - warmup_period * frame_type, end]间的portfolio行情数据,并在每次调用predict方法时,通过barss参数,将[start - warmup_period * frame_type, start + i * frame_type]间的数据传给predict方法。传入的数据已进行前复权。

    +

    如果在回测过程中,需要偷看未来数据,可以使用peek方法。

    +

    3.4. 完整SMA回测示例

    +

    以下策略需要在notebook中运行,并且需要事先安装omega服务器同步数据,并正确配置omicron。

    +

    该示例在《大富翁量化课程》课件环境下可运行。

    +
     1
    + 2
    + 3
    + 4
    + 5
    + 6
    + 7
    + 8
    + 9
    +10
    +11
    +12
    +13
    +14
    +15
    +16
    import cfg4py
    +import omicron
    +import datetime
    +from omicron.strategy.sma import SMAStrategy
    +from coretypes import FrameType
    +
    +cfg = cfg4py.init("/etc/zillionare")
    +await omicron.init()
    +
    +sec = "600000.XSHG"
    +start = datetime.date(2022, 1, 4)
    +end = datetime.date(2023, 1,1)
    +
    +sma = SMAStrategy(sec, url=cfg.backtest.url, is_backtest=True, start=start, end=end, frame_type=FrameType.DAY, warmup_period=10)
    +await sma.backtest(portfolio=[sec], stop_on_error=False)
    +await sma.plot_metrics(sma.indicators)
    +
    +

    3.5. 实盘

    +

    在实盘环境下,你还需要在子类中加入周期性任务(比如每分钟执行一次),在该任务中调用predict方法来完成交易,如以下示例所示:

    +

     1
    + 2
    + 3
    + 4
    + 5
    + 6
    + 7
    + 8
    + 9
    +10
    +11
    +12
    +13
    +14
    +15
    +16
    +17
    +18
    +19
    +20
    import cfg4py
    +import omicron
    +import datetime
    +from omicron.strategy.sma import SMAStrategy
    +from coretypes import FrameType
    +from apscheduler.schedulers.asyncio import AsyncIOScheduler
    +
    +
    +cfg = cfg4py.init("/etc/zillionare")
    +await omicron.init()
    +
    +async def daily_job():
    +    sma = SMAStrategy(sec, url=cfg.traderserver.url, is_backtest=False,frame_type=FrameType.DAY)
    +    bars = await Stock.get_bars(sma._sec, 20, FrameType.DAY)
    +    await sma.predict(barss={sma._sec: bars})
    +
    +async def main():
    +    scheduler = AsyncIOScheduler()
    +    scheduler.add_job(daily_job, 'cron', hour=14, minute=55)
    +    scheduler.start()
    +
    +策略代码无须修改。

    +

    该策略将自动在每天的14:55运行,以判断是否要进行调仓换股。您需要额外判断当天是否为交易日。

    +

    4. 绘图

    +

    omicron 通过 Candlestick 提供了 k 线绘制功能。默认地,它将绘制一幅显示 120 个 bar,可拖动(以加载更多 bar),并且可以叠加副图、主图叠加各种指标的 k 线图:

    +

    +

    上图显示了自动检测出来的平台。此外,还可以进行顶底自动检测和标注。

    +
    +

    Note

    +

    通过指定width参数,可以影响初始加载的bar的数量。

    +
    +

    omicron 通过 metris 提供回测报告。该报告类似于:

    +

    +

    它同样提供可拖动的绘图,并且在买卖点上可以通过鼠标悬停,显示买卖点信息。

    +

    omicron 的绘图功能只能在 notebook 中使用。

    +

    5. 评估指标

    +

    omicron 提供了 mean_absolute_error 函数和 pct_error 函数。它们在 scipy 或者其它库中也能找到,为了方便不熟悉这些第三方库的使用者,我们内置了这个常指标。

    +

    对一些常见的策略评估函数,我们引用了 empyrical 中的相关函数,比如 alpha, beta, shapre_ratio, calmar_ratio 等。

    +

    6. TALIB 库

    +

    您应该把这里提供的函数当成实验性的。这些 API 也可能在某天被废弃、重命名、修改,或者这些 API 并没有多大作用,或者它们的实现存在错误。

    +

    但是,如果我们将来会抛弃这些 API 的话,我们一定会通过 depracted 方法提前进行警告。

    +

    7. 扩展

    +

    Python当中的四舍五入用于证券投资,会带来严重的问题,比如,像round(0.3/2),我们期望得到0.2,但实际上会得到0.1。这种误差一旦发生成在一些低价股身上,将会带来非常大的不确定性。比如,1.945保留两位小数,本来应该是1.95,如果被误舍入为1.94,则误差接近0.5%,这对投资来说是难以接受的。

    +
    +

    Info

    +

    如果一天只进行一次交易,一次交易误差为0.5%,一年累积下来,误差将达到2.5倍。

    +
    +

    我们在decimals中提供了适用于证券交易领域的版本,math_round和价格比较函数price_equal

    +

    我们还在np中,对numpy中缺失的一些功能进行了补充,比如numpy_append_fields, fill_nan等。

    + + + + + + +
    +
    + + +
    + +
    + + + +
    +
    +
    +
    + + + + + + + + + + \ No newline at end of file diff --git a/latest/404.html b/latest/404.html new file mode 100644 index 00000000..6fe58906 --- /dev/null +++ b/latest/404.html @@ -0,0 +1,16 @@ + + + + + Redirecting + + + + + Redirecting to ../2.0.0/404.html... + + \ No newline at end of file diff --git a/latest/api/board/index.html b/latest/api/board/index.html new file mode 100644 index 00000000..00739f76 --- /dev/null +++ b/latest/api/board/index.html @@ -0,0 +1,16 @@ + + + + + Redirecting + + + + + Redirecting to ../../../2.0.0/api/board/... + + \ No newline at end of file diff --git a/latest/api/dal/flux/index.html b/latest/api/dal/flux/index.html new file mode 100644 index 00000000..3ac52327 --- /dev/null +++ b/latest/api/dal/flux/index.html @@ -0,0 +1,16 @@ + + + + + Redirecting + + + + + Redirecting to ../../../../2.0.0/api/dal/flux/... + + \ No newline at end of file diff --git a/latest/api/dal/influxclient/index.html b/latest/api/dal/influxclient/index.html new file mode 100644 index 00000000..45f27920 --- /dev/null +++ b/latest/api/dal/influxclient/index.html @@ -0,0 +1,16 @@ + + + + + Redirecting + + + + + Redirecting to ../../../../2.0.0/api/dal/influxclient/... + + \ No newline at end of file diff --git a/latest/api/dal/serialize/index.html b/latest/api/dal/serialize/index.html new file mode 100644 index 00000000..b9c5d9d9 --- /dev/null +++ b/latest/api/dal/serialize/index.html @@ -0,0 +1,16 @@ + + + + + Redirecting + + + + + Redirecting to ../../../../2.0.0/api/dal/serialize/... + + \ No newline at end of file diff --git a/latest/api/extensions/index.html b/latest/api/extensions/index.html new file mode 100644 index 00000000..16f26247 --- /dev/null +++ b/latest/api/extensions/index.html @@ -0,0 +1,16 @@ + + + + + Redirecting + + + + + Redirecting to ../../../2.0.0/api/extensions/... + + \ No newline at end of file diff --git a/latest/api/metrics/index.html b/latest/api/metrics/index.html new file mode 100644 index 00000000..ef8052e4 --- /dev/null +++ b/latest/api/metrics/index.html @@ -0,0 +1,16 @@ + + + + + Redirecting + + + + + Redirecting to ../../../2.0.0/api/metrics/... + + \ No newline at end of file diff --git a/latest/api/omicron/index.html b/latest/api/omicron/index.html new file mode 100644 index 00000000..79c40987 --- /dev/null +++ b/latest/api/omicron/index.html @@ -0,0 +1,16 @@ + + + + + Redirecting + + + + + Redirecting to ../../../2.0.0/api/omicron/... + + \ No newline at end of file diff --git a/latest/api/plotting/candlestick/index.html b/latest/api/plotting/candlestick/index.html new file mode 100644 index 00000000..c451a6d3 --- /dev/null +++ b/latest/api/plotting/candlestick/index.html @@ -0,0 +1,16 @@ + + + + + Redirecting + + + + + Redirecting to ../../../../2.0.0/api/plotting/candlestick/... + + \ No newline at end of file diff --git a/latest/api/plotting/metrics/index.html b/latest/api/plotting/metrics/index.html new file mode 100644 index 00000000..6144b9a0 --- /dev/null +++ b/latest/api/plotting/metrics/index.html @@ -0,0 +1,16 @@ + + + + + Redirecting + + + + + Redirecting to ../../../../2.0.0/api/plotting/metrics/... + + \ No newline at end of file diff --git a/latest/api/security/index.html b/latest/api/security/index.html new file mode 100644 index 00000000..d32b2696 --- /dev/null +++ b/latest/api/security/index.html @@ -0,0 +1,16 @@ + + + + + Redirecting + + + + + Redirecting to ../../../2.0.0/api/security/... + + \ No newline at end of file diff --git a/latest/api/stock/index.html b/latest/api/stock/index.html new file mode 100644 index 00000000..056586ae --- /dev/null +++ b/latest/api/stock/index.html @@ -0,0 +1,16 @@ + + + + + Redirecting + + + + + Redirecting to ../../../2.0.0/api/stock/... + + \ No newline at end of file diff --git a/latest/api/strategy/index.html b/latest/api/strategy/index.html new file mode 100644 index 00000000..cc2ae6f6 --- /dev/null +++ b/latest/api/strategy/index.html @@ -0,0 +1,16 @@ + + + + + Redirecting + + + + + Redirecting to ../../../2.0.0/api/strategy/... + + \ No newline at end of file diff --git a/latest/api/talib/index.html b/latest/api/talib/index.html new file mode 100644 index 00000000..b7bb22d1 --- /dev/null +++ b/latest/api/talib/index.html @@ -0,0 +1,16 @@ + + + + + Redirecting + + + + + Redirecting to ../../../2.0.0/api/talib/... + + \ No newline at end of file diff --git a/latest/api/timeframe/index.html b/latest/api/timeframe/index.html new file mode 100644 index 00000000..06c94044 --- /dev/null +++ b/latest/api/timeframe/index.html @@ -0,0 +1,16 @@ + + + + + Redirecting + + + + + Redirecting to ../../../2.0.0/api/timeframe/... + + \ No newline at end of file diff --git a/latest/api/triggers/index.html b/latest/api/triggers/index.html new file mode 100644 index 00000000..9a9ab4ca --- /dev/null +++ b/latest/api/triggers/index.html @@ -0,0 +1,16 @@ + + + + + Redirecting + + + + + Redirecting to ../../../2.0.0/api/triggers/... + + \ No newline at end of file diff --git a/latest/developer/index.html b/latest/developer/index.html new file mode 100644 index 00000000..d307c99a --- /dev/null +++ b/latest/developer/index.html @@ -0,0 +1,16 @@ + + + + + Redirecting + + + + + Redirecting to ../../2.0.0/developer/... + + \ No newline at end of file diff --git a/latest/history/index.html b/latest/history/index.html new file mode 100644 index 00000000..a0b0691d --- /dev/null +++ b/latest/history/index.html @@ -0,0 +1,16 @@ + + + + + Redirecting + + + + + Redirecting to ../../2.0.0/history/... + + \ No newline at end of file diff --git a/latest/index.html b/latest/index.html new file mode 100644 index 00000000..452c107d --- /dev/null +++ b/latest/index.html @@ -0,0 +1,16 @@ + + + + + Redirecting + + + + + Redirecting to ../2.0.0/... + + \ No newline at end of file diff --git a/latest/installation/index.html b/latest/installation/index.html new file mode 100644 index 00000000..5037e533 --- /dev/null +++ b/latest/installation/index.html @@ -0,0 +1,16 @@ + + + + + Redirecting + + + + + Redirecting to ../../2.0.0/installation/... + + \ No newline at end of file diff --git a/latest/usage/index.html b/latest/usage/index.html new file mode 100644 index 00000000..472fb295 --- /dev/null +++ b/latest/usage/index.html @@ -0,0 +1,16 @@ + + + + + Redirecting + + + + + Redirecting to ../../2.0.0/usage/... + + \ No newline at end of file diff --git a/versions.json b/versions.json index 2e8a835b..e0989df8 100644 --- a/versions.json +++ b/versions.json @@ -1 +1 @@ -[{"version": "2.0.0.a76", "title": "2.0.0.a76", "aliases": []}, {"version": "2.0.0a73", "title": "2.0.0a73", "aliases": []}, {"version": "2.0.0a69.dev", "title": "2.0.0a69.dev", "aliases": []}, {"version": "2.0.0a68.dev", "title": "2.0.0a68.dev", "aliases": []}, {"version": "2.0.0a67.dev", "title": "2.0.0a67.dev", "aliases": []}, {"version": "2.0.0a65.dev", "title": "2.0.0a65.dev", "aliases": []}] \ No newline at end of file +[{"version": "2.0.0", "title": "2.0.0", "aliases": ["latest"]}, {"version": "2.0.0.a76", "title": "2.0.0.a76", "aliases": []}, {"version": "2.0.0a73", "title": "2.0.0a73", "aliases": []}, {"version": "2.0.0a69.dev", "title": "2.0.0a69.dev", "aliases": []}, {"version": "2.0.0a68.dev", "title": "2.0.0a68.dev", "aliases": []}, {"version": "2.0.0a67.dev", "title": "2.0.0a67.dev", "aliases": []}, {"version": "2.0.0a65.dev", "title": "2.0.0a65.dev", "aliases": []}] \ No newline at end of file