* Update README.md

* Retire the pandas stubs and the associated scripts.

* Remove the pandas tests.

* Update README.md
This commit is contained in:
Graham Wheeler 2022-06-02 18:30:05 -07:00 коммит произвёл GitHub
Родитель 5221f7c4fb
Коммит 0aa15ddf9f
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
173 изменённых файлов: 3 добавлений и 14164 удалений

42
.github/workflows/docify.yml поставляемый
Просмотреть файл

@ -1,42 +0,0 @@
name: 'Docify'
on: [push, pull_request]
jobs:
test:
runs-on: ubuntu-18.04
timeout-minutes: 10
steps:
- name: Check out code
uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v2
with:
python-version: "3.9"
- name: Cache pip
uses: actions/cache@v2
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install dependencies
run:
python -m pip install -r utils/requirements.txt
- name: Install partial pandas stubs
run:
cp -R partial/ typings
- name: Install docify
run:
PBR_VERSION=1.0.0 pip install utils/docify
- name: Run docify
run:
docify utils/build_bundle/docify-pandas.cfg typings/pandas

52
.github/workflows/test.yml поставляемый
Просмотреть файл

@ -1,52 +0,0 @@
name: 'Test'
on: [push, pull_request, workflow_dispatch]
jobs:
test:
runs-on: ubuntu-18.04
timeout-minutes: 10
steps:
- name: Check out code
uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v2
with:
python-version: "3.9"
- name: Cache pip
uses: actions/cache@v2
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install dependencies
run:
python -m pip install -r tests/requirements.txt
- name: Install partial pandas stubs
run:
cp -R partial/ typings
- name: Run pyright tests
uses: gramster/pyright-action@main
with:
project: pyrighttestconfig.json
warn-partial: true
- name: Run pytest
run:
pytest tests/pandas
- name: rename matplotlib for mypy test
run:
mv matplotlib savmpl
- name: Run mypy
run:
mypy tests/pandas

Просмотреть файл

@ -40,10 +40,12 @@ and are no longer maintained here:
- freezegun
- markdown
- packaging
- pandas (see https://github.com/pandas-dev/pandas-stubs; please open pandas stub issues there)
- PIL
- retry
- slugify
# Trademarks
This project may contain trademarks or logos for projects, products, or services. Authorized use of Microsoft trademarks or logos is subject to and must follow Microsoft's Trademark & Brand Guidelines. Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship. Any use of third-party trademarks or logos are subject to those third-party's policies.

Просмотреть файл

@ -1,116 +0,0 @@
from ._config import (
describe_option as describe_option,
get_option as get_option,
option_context as option_context,
options as options,
reset_option as reset_option,
set_option as set_option)
from .core.api import (
BooleanDtype as BooleanDtype,
Categorical as Categorical,
CategoricalDtype as CategoricalDtype,
CategoricalIndex as CategoricalIndex,
DataFrame as DataFrame,
DateOffset as DateOffset,
DatetimeIndex as DatetimeIndex,
DatetimeTZDtype as DatetimeTZDtype,
Float64Index as Float64Index,
Grouper as Grouper,
Index as Index,
IndexSlice as IndexSlice,
Int16Dtype as Int16Dtype,
Int32Dtype as Int32Dtype,
Int64Dtype as Int64Dtype,
Int64Index as Int64Index,
Int8Dtype as Int8Dtype,
Interval as Interval,
IntervalDtype as IntervalDtype,
IntervalIndex as IntervalIndex,
MultiIndex as MultiIndex,
NA as NA,
NaT as NaT,
NamedAgg as NamedAgg,
Period as Period,
PeriodDtype as PeriodDtype,
PeriodIndex as PeriodIndex,
RangeIndex as RangeIndex,
Series as Series,
StringDtype as StringDtype,
Timedelta as Timedelta,
TimedeltaIndex as TimedeltaIndex,
Timestamp as Timestamp,
UInt16Dtype as UInt16Dtype,
UInt32Dtype as UInt32Dtype,
UInt64Dtype as UInt64Dtype,
UInt64Index as UInt64Index,
UInt8Dtype as UInt8Dtype,
array as array,
bdate_range as bdate_range,
date_range as date_range,
factorize as factorize,
interval_range as interval_range,
isna as isna,
isnull as isnull,
notna as notna,
notnull as notnull,
period_range as period_range,
set_eng_float_format as set_eng_float_format,
timedelta_range as timedelta_range,
to_numeric as to_numeric,
unique as unique,
value_counts as value_counts)
from .core.tools import (
to_datetime as to_datetime,
to_timedelta as to_timedelta
)
from .core.arrays.sparse import SparseDtype as SparseDtype
from .tseries import offsets as offsets
from .tseries.api import infer_freq as infer_freq
from .core.computation.api import eval as eval
from .core.reshape.api import (
concat as concat,
crosstab as crosstab,
cut as cut,
get_dummies as get_dummies,
lreshape as lreshape,
melt as melt,
merge as merge,
merge_asof as merge_asof,
merge_ordered as merge_ordered,
pivot as pivot,
pivot_table as pivot_table,
qcut as qcut,
wide_to_long as wide_to_long)
from .util._print_versions import show_versions as show_versions
from .io.json import json_normalize as json_normalize
from .io.api import (
ExcelFile as ExcelFile,
ExcelWriter as ExcelWriter,
HDFStore as HDFStore,
read_clipboard as read_clipboard,
read_csv as read_csv,
read_excel as read_excel,
read_feather as read_feather,
read_fwf as read_fwf,
read_gbq as read_gbq,
read_hdf as read_hdf,
read_html as read_html,
read_json as read_json,
read_orc as read_orc,
read_parquet as read_parquet,
read_pickle as read_pickle,
read_sas as read_sas,
read_spss as read_spss,
read_sql as read_sql,
read_sql_query as read_sql_query,
read_sql_table as read_sql_table,
read_stata as read_stata,
read_table as read_table,
to_pickle as to_pickle)
from .util._tester import test as test
import pandas.testing as testing
__version__ : str

Просмотреть файл

@ -1,4 +0,0 @@
from .tslibs import NaT as NaT, NaTType as NaTType, OutOfBoundsDatetime as OutOfBoundsDatetime, Period as Period, Timedelta as Timedelta, Timestamp as Timestamp, iNaT as iNaT
from .interval import Interval as Interval

Просмотреть файл

@ -1,153 +0,0 @@
from __future__ import annotations
from typing import (
Any,
Generic,
TypeVar,
Union,
overload,
)
import numpy as np
from pandas._typing import npt
from pandas._typing import (
IntervalClosedType,
Timedelta,
Timestamp,
)
VALID_CLOSED: frozenset[str]
_OrderableScalarT = TypeVar("_OrderableScalarT", int, float)
_OrderableTimesT = TypeVar("_OrderableTimesT", Timestamp, Timedelta)
_OrderableT = TypeVar("_OrderableT", int, float, Timestamp, Timedelta)
class _LengthDescriptor:
@overload
def __get__(self, instance: Interval[_OrderableScalarT], owner: Any) -> _OrderableScalarT: ...
@overload
def __get__(self, instance: Interval[_OrderableTimesT], owner: Any) -> Timedelta: ...
@overload
def __get__(self, instance: IntervalTree, owner: Any) -> np.ndarray: ...
class _MidDescriptor:
@overload
def __get__(self, instance: Interval[_OrderableScalarT], owner: Any) -> float: ...
@overload
def __get__(self, instance: Interval[_OrderableTimesT], owner: Any) -> _OrderableTimesT: ...
@overload
def __get__(self, instance: IntervalTree, owner: Any) -> np.ndarray: ...
class IntervalMixin:
@property
def closed_left(self) -> bool: ...
@property
def closed_right(self) -> bool: ...
@property
def open_left(self) -> bool: ...
@property
def open_right(self) -> bool: ...
@property
def is_empty(self) -> bool: ...
def _check_closed_matches(self, other: IntervalMixin, name: str = ...) -> None: ...
class Interval(IntervalMixin, Generic[_OrderableT]):
@property
def left(self: Interval[_OrderableT]) -> _OrderableT: ...
@property
def right(self: Interval[_OrderableT]) -> _OrderableT: ...
@property
def closed(self) -> IntervalClosedType: ...
mid: _MidDescriptor
length: _LengthDescriptor
def __init__(
self,
left: _OrderableT,
right: _OrderableT,
closed: IntervalClosedType = ...,
): ...
def __hash__(self) -> int: ...
@overload
def __contains__(self: Interval[_OrderableTimesT], _OrderableTimesT) -> bool: ...
@overload
def __contains__(self: Interval[_OrderableScalarT], key: Union[int, float]) -> bool: ...
def __repr__(self) -> str: ...
def __str__(self) -> str: ...
@overload
def __add__(self: Interval[_OrderableTimesT], y: Timedelta) -> Interval[_OrderableTimesT]: ...
@overload
def __add__(self: Interval[int], y: int) -> Interval[int]: ...
@overload
def __add__(self: Interval[int], y: float) -> Interval[float]: ...
@overload
def __add__(self: Interval[float], y: Union[int, float]) -> Interval[float]: ...
@overload
def __radd__(self: Interval[_OrderableTimesT], y: Timedelta) -> Interval[_OrderableTimesT]: ...
@overload
def __radd__(self: Interval[int], y: int) -> Interval[int]: ...
@overload
def __radd__(self: Interval[int], y: float) -> Interval[float]: ...
@overload
def __radd__(self: Interval[float], y: Union[int, float]) -> Interval[float]: ...
@overload
def __sub__(self: Interval[_OrderableTimesT], y: Timedelta) -> Interval[_OrderableTimesT]: ...
@overload
def __sub__(self: Interval[int], y: int) -> Interval[int]: ...
@overload
def __sub__(self: Interval[int], y: float) -> Interval[float]: ...
@overload
def __sub__(self: Interval[float], y: Union[int, float]) -> Interval[float]: ...
@overload
def __rsub__(self: Interval[_OrderableTimesT], y: Timedelta) -> Interval[_OrderableTimesT]: ...
@overload
def __rsub__(self: Interval[int], y: int) -> Interval[int]: ...
@overload
def __rsub__(self: Interval[int], y: float) -> Interval[float]: ...
@overload
def __rsub__(self: Interval[float], y: Union[int, float]) -> Interval[float]: ...
@overload
def __mul__(self: Interval[int], y: int) -> Interval[int]: ...
@overload
def __mul__(self: Interval[int], y: float) -> Interval[float]: ...
@overload
def __mul__(self: Interval[float], y: Union[int, float]) -> Interval[float]: ...
@overload
def __rmul__(self: Interval[int], y: int) -> Interval[int]: ...
@overload
def __rmul__(self: Interval[int], y: float) -> Interval[float]: ...
@overload
def __rmul__(self: Interval[float], y: Union[int, float]) -> Interval[float]: ...
@overload
def __truediv__(self: Interval[int], y: int) -> Interval[int]: ...
@overload
def __truediv__(self: Interval[int], y: float) -> Interval[float]: ...
@overload
def __truediv__(self: Interval[float], y: Union[int, float]) -> Interval[float]: ...
@overload
def __floordiv__(self: Interval[int], y: int) -> Interval[int]: ...
@overload
def __floordiv__(self: Interval[int], y: float) -> Interval[float]: ...
@overload
def __floordiv__(self: Interval[float], y: Union[int, float]) -> Interval[float]: ...
def overlaps(self: Interval[_OrderableT], other: Interval[_OrderableT]) -> bool: ...
def intervals_to_interval_bounds(intervals: np.ndarray, validate_closed: bool = ...) -> tuple[np.ndarray, np.ndarray, str]: ...
class IntervalTree(IntervalMixin):
def __init__(
self,
left: np.ndarray,
right: np.ndarray,
closed: IntervalClosedType = ...,
leaf_size: int = ...,
): ...
def get_indexer(self, target) -> npt.NDArray[np.intp]: ...
def get_indexer_non_unique(self, target) -> tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]]: ...
_na_count: int
@property
def is_overlapping(self) -> bool: ...
@property
def is_monotonic_increasing(self) -> bool: ...
def clear_mapping(self) -> None: ...

Просмотреть файл

@ -1,4 +0,0 @@
def decode(*args, **kwargs): ...
def dumps(*args, **kwargs): ...
def encode(*args, **kwargs): ...
def loads(*args, **kwargs): ...

Просмотреть файл

@ -1,48 +0,0 @@
from __future__ import annotations
from typing import Union
class NAType:
def __new__(cls, *args, **kwargs) -> NAType: ...
def __repr__(self) -> str: ...
def __format__(self, format_spec: str) -> str: ...
def __bool__(self) -> None: ...
def __hash__(self) -> int: ...
def __reduce__(self) -> str: ...
def __add__(self, other) -> NAType: ...
def __radd__(self, other) -> NAType: ...
def __sub__(self, other) -> NAType: ...
def __rsub__(self, other) -> NAType: ...
def __mul__(self, other) -> NAType: ...
def __rmul__(self, other) -> NAType: ...
def __matmul__(self, other) -> NAType: ...
def __rmatmul__(self, other) -> NAType: ...
def __truediv__(self, other) -> NAType: ...
def __rtruediv__(self, other) -> NAType: ...
def __floordiv__(self, other) -> NAType: ...
def __rfloordiv__(self, other) -> NAType: ...
def __mod__(self, other) -> NAType: ...
def __rmod__(self, other) -> NAType: ...
def __divmod__(self, other) -> NAType: ...
def __rdivmod__(self, other) -> NAType: ...
def __eq__(self, other) -> bool: ...
def __ne__(self, other) -> bool: ...
def __le__(self, other) -> bool: ...
def __lt__(self, other) -> bool: ...
def __gt__(self, other) -> bool: ...
def __ge__(self, other) -> bool: ...
def __neg__(self, other) -> NAType: ...
def __pos__(self, other) -> NAType: ...
def __abs__(self, other) -> NAType: ...
def __invert__(self, other) -> NAType: ...
def __pow__(self, other) -> NAType: ...
def __rpow__(self, other) -> NAType: ...
def __and__(self, other) -> Union[None, NAType]: ...
__rand__ = __and__
def __or__(self, other) -> Union[bool, NAType]: ...
__ror__ = __or__
def __xor__(self, other) -> NAType: ...
__rxor__ = __xor__
__array_priority__: int
def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): ...
NA: NAType = ...

Просмотреть файл

@ -1,13 +0,0 @@
from typing import Callable
class CachedProperty:
def __init__(self, func: Callable) -> None: ...
def __get__(self, obj, typ): ...
def __set__(self, obj, value) -> None: ...
cache_readonly: CachedProperty = ...
class AxisProperty:
def __init__(self, axis: int = ..., doc: str = ...) -> None: ...
def __get__(self, obj, typ): ...
def __set__(self, obj, value) -> None: ...

Просмотреть файл

@ -1,24 +0,0 @@
__all__ = [
"Period",
"Timestamp",
"Timedelta",
"NaT",
"NaTType",
"iNaT",
"nat_strings",
"BaseOffset",
"Tick",
"OutofBoundsDatetime",
]
from .period import Period
from .timestamps import Timestamp
from .timedeltas import Timedelta
from .nattype import (
NaT,
NaTType,
iNaT,
nat_strings,
)
from .offsets import BaseOffset, Tick
from np_datetime import OutOfBoundsDatetime as OutOfBoundsDatetime

Просмотреть файл

@ -1,123 +0,0 @@
from datetime import (
datetime,
timedelta,
tzinfo as _tzinfo,
)
from typing import (
Any,
Union,
)
import numpy as np
from pandas._libs.tslibs.period import Period
NaT: NaTType
iNaT: int
nat_strings: set[str]
def is_null_datetimelike(val: object, inat_is_null: bool = ...) -> bool: ...
_NaTComparisonTypes = Union[datetime, timedelta, Period, np.datetime64, np.timedelta64]
class _NatComparison:
def __call__(self, other: _NaTComparisonTypes) -> bool: ...
class NaTType:
value: np.int64
def asm8(self) -> np.datetime64: ...
def to_datetime64(self) -> np.datetime64: ...
def to_numpy(self, dtype: np.dtype | str | None = ..., copy: bool = ...) -> np.datetime64 | np.timedelta64: ...
@property
def is_leap_year(self) -> bool: ...
@property
def is_month_start(self) -> bool: ...
@property
def is_quarter_start(self) -> bool: ...
@property
def is_year_start(self) -> bool: ...
@property
def is_month_end(self) -> bool: ...
@property
def is_quarter_end(self) -> bool: ...
@property
def is_year_end(self) -> bool: ...
@property
def day_of_year(self) -> float: ...
@property
def dayofyear(self) -> float: ...
@property
def days_in_month(self) -> float: ...
@property
def daysinmonth(self) -> float: ...
@property
def day_of_week(self) -> float: ...
@property
def dayofweek(self) -> float: ...
@property
def week(self) -> float: ...
@property
def weekofyear(self) -> float: ...
def day_name(self) -> float: ...
def month_name(self) -> float: ...
def weekday(self) -> float: ...
def isoweekday(self) -> float: ...
def total_seconds(self) -> float: ...
def today(self, *args, **kwargs) -> NaTType: ...
def now(self, *args, **kwargs) -> NaTType: ...
def to_pydatetime(self) -> NaTType: ...
def date(self) -> NaTType: ...
def round(self) -> NaTType: ...
def floor(self) -> NaTType: ...
def ceil(self) -> NaTType: ...
def tz_convert(self) -> NaTType: ...
def tz_localize(self) -> NaTType: ...
def replace(
self,
year: int | None = ...,
month: int | None = ...,
day: int | None = ...,
hour: int | None = ...,
minute: int | None = ...,
second: int | None = ...,
microsecond: int | None = ...,
nanosecond: int | None = ...,
tzinfo: _tzinfo | None = ...,
fold: int | None = ...,
) -> NaTType: ...
@property
def year(self) -> float: ...
@property
def quarter(self) -> float: ...
@property
def month(self) -> float: ...
@property
def day(self) -> float: ...
@property
def hour(self) -> float: ...
@property
def minute(self) -> float: ...
@property
def second(self) -> float: ...
@property
def millisecond(self) -> float: ...
@property
def microsecond(self) -> float: ...
@property
def nanosecond(self) -> float: ...
# inject Timedelta properties
@property
def days(self) -> float: ...
@property
def microseconds(self) -> float: ...
@property
def nanoseconds(self) -> float: ...
# inject Period properties
@property
def qyear(self) -> float: ...
def __eq__(self, other: Any) -> bool: ...
def __ne__(self, other: Any) -> bool: ...
__lt__: _NatComparison
__le__: _NatComparison
__gt__: _NatComparison
__ge__: _NatComparison

Просмотреть файл

@ -1 +0,0 @@
class OutOfBoundsDatetime(ValueError): ...

Просмотреть файл

@ -1,241 +0,0 @@
from __future__ import annotations
from datetime import (
datetime,
timedelta,
)
from typing import (
TYPE_CHECKING,
Any,
Collection,
Literal,
Tuple,
TypeVar,
Union,
overload,
)
import numpy as np
from pandas._typing import npt
from .timedeltas import Timedelta
if TYPE_CHECKING:
from pandas.core.indexes.datetimes import DatetimeIndex
_BaseOffsetT = TypeVar("_BaseOffsetT", bound="BaseOffset")
_DatetimeT = TypeVar("_DatetimeT", bound=datetime)
_TimedeltaT = TypeVar("_TimedeltaT", bound=timedelta)
_relativedelta_kwds: set[str]
prefix_mapping: dict[str, type]
class ApplyTypeError(TypeError): ...
class BaseOffset:
n: int
def __init__(self, n: int = ..., normalize: bool = ...) -> None: ...
def __eq__(self, other) -> bool: ...
def __ne__(self, other) -> bool: ...
def __hash__(self) -> int: ...
@property
def kwds(self) -> dict: ...
@property
def base(self) -> BaseOffset: ...
@overload
def __add__(self, other: npt.NDArray[np.object_]) -> npt.NDArray[np.object_]: ...
@overload
def __add__(self: _BaseOffsetT, other: BaseOffset) -> _BaseOffsetT: ...
@overload
def __add__(self, other: _DatetimeT) -> _DatetimeT: ...
@overload
def __add__(self, other: _TimedeltaT) -> _TimedeltaT: ...
@overload
def __radd__(self, other: npt.NDArray[np.object_]) -> npt.NDArray[np.object_]: ...
@overload
def __radd__(self: _BaseOffsetT, other: BaseOffset) -> _BaseOffsetT: ...
@overload
def __radd__(self, other: _DatetimeT) -> _DatetimeT: ...
@overload
def __radd__(self, other: _TimedeltaT) -> _TimedeltaT: ...
def __sub__(self: _BaseOffsetT, other: BaseOffset) -> _BaseOffsetT: ...
@overload
def __rsub__(self, other: npt.NDArray[np.object_]) -> npt.NDArray[np.object_]: ...
@overload
def __rsub__(self: _BaseOffsetT, other: BaseOffset) -> _BaseOffsetT: ...
@overload
def __rsub__(self, other: _DatetimeT) -> _DatetimeT: ...
@overload
def __rsub__(self, other: _TimedeltaT) -> _TimedeltaT: ...
def __call__(self, other): ...
@overload
def __mul__(self, other: np.ndarray) -> np.ndarray: ...
@overload
def __mul__(self: _BaseOffsetT, other: int) -> _BaseOffsetT: ...
@overload
def __rmul__(self, other: np.ndarray) -> np.ndarray: ...
@overload
def __rmul__(self: _BaseOffsetT, other: int) -> _BaseOffsetT: ...
def __neg__(self: _BaseOffsetT) -> _BaseOffsetT: ...
def copy(self: _BaseOffsetT) -> _BaseOffsetT: ...
def __repr__(self) -> str: ...
@property
def name(self) -> str: ...
@property
def rule_code(self) -> str: ...
def freqstr(self) -> str: ...
def apply_index(self, dtindex: "DatetimeIndex") -> "DatetimeIndex": ...
def _apply_array(self, dtarr) -> None: ...
def rollback(self, dt: datetime) -> datetime: ...
def rollforward(self, dt: datetime) -> datetime: ...
def is_on_offset(self, dt: datetime) -> bool: ...
def __setstate__(self, state) -> None: ...
def __getstate__(self): ...
@property
def nanos(self) -> int: ...
def onOffset(self, dt: datetime) -> bool: ...
def isAnchored(self) -> bool: ...
def is_anchored(self) -> bool: ...
def _get_offset(name: str) -> BaseOffset: ...
class SingleConstructorOffset(BaseOffset):
@classmethod
def _from_name(cls, suffix=...): ...
def __reduce__(self): ...
@overload
def to_offset(freq: None) -> None: ...
@overload
def to_offset(freq: timedelta | BaseOffset | str) -> BaseOffset: ...
class Tick(SingleConstructorOffset):
def __init__(self, n: int = ..., normalize: bool = ...) -> None: ...
@property
def delta(self) -> Timedelta: ...
@property
def nanos(self) -> int: ...
def delta_to_tick(delta: timedelta) -> Tick: ...
class Day(Tick): ...
class Hour(Tick): ...
class Minute(Tick): ...
class Second(Tick): ...
class Milli(Tick): ...
class Micro(Tick): ...
class Nano(Tick): ...
class RelativeDeltaOffset(BaseOffset):
def __init__(self, n: int = ..., normalize: bool = ..., **kwds: Any) -> None: ...
class BusinessMixin(SingleConstructorOffset):
def __init__(self, n: int = ..., normalize: bool = ..., offset: timedelta = ...): ...
class BusinessDay(BusinessMixin): ...
class BusinessHour(BusinessMixin):
def __init__(
self,
n: int = ...,
normalize: bool = ...,
start: str | Collection[str] = ...,
end: str | Collection[str] = ...,
offset: timedelta = ...,
): ...
class WeekOfMonthMixin(SingleConstructorOffset): ...
class YearOffset(SingleConstructorOffset):
def __init__(self, n: int = ..., normalize: bool = ..., month: int | None = ...): ...
class BYearEnd(YearOffset): ...
class BYearBegin(YearOffset): ...
class YearEnd(YearOffset): ...
class YearBegin(YearOffset): ...
class QuarterOffset(SingleConstructorOffset):
def __init__(self, n: int = ..., normalize: bool = ..., startingMonth: int | None = ...) -> None: ...
class BQuarterEnd(QuarterOffset): ...
class BQuarterBegin(QuarterOffset): ...
class QuarterEnd(QuarterOffset): ...
class QuarterBegin(QuarterOffset): ...
class MonthOffset(SingleConstructorOffset): ...
class MonthEnd(MonthOffset): ...
class MonthBegin(MonthOffset): ...
class BusinessMonthEnd(MonthOffset): ...
class BusinessMonthBegin(MonthOffset): ...
class SemiMonthOffset(SingleConstructorOffset):
def __init__(self, n: int = ..., normalize: bool = ..., day_of_month: int | None = ...) -> None: ...
class SemiMonthEnd(SemiMonthOffset): ...
class SemiMonthBegin(SemiMonthOffset): ...
class Week(SingleConstructorOffset):
def __init__(self, n: int = ..., normalize: bool = ..., weekday: int | None = ...) -> None: ...
class WeekOfMonth(WeekOfMonthMixin): ...
class LastWeekOfMonth(WeekOfMonthMixin): ...
class FY5253Mixin(SingleConstructorOffset):
def __init__(
self,
n: int = ...,
normalize: bool = ...,
weekday: int = ...,
startingMonth: int = ...,
variation: str = ...,
) -> None: ...
class FY5253(FY5253Mixin): ...
class FY5253Quarter(FY5253Mixin): ...
class Easter(SingleConstructorOffset): ...
class _CustomBusinessMonth(BusinessMixin):
def __init__(
self,
n: int = ...,
normalize: bool = ...,
offset: timedelta = ...,
holidays: None | list = ...,
): ...
class CustomBusinessDay(BusinessDay):
def __init__(
self,
n: int = ...,
normalize: bool = ...,
offset: timedelta = ...,
weekmask: str = ...,
): ...
class CustomBusinessHour(BusinessHour):
def __init__(
self,
n: int = ...,
normalize: bool = ...,
start: str = ...,
end: str = ...,
offset: timedelta = ...,
holidays: None | list = ...,
): ...
class CustomBusinessMonthEnd(_CustomBusinessMonth): ...
class CustomBusinessMonthBegin(_CustomBusinessMonth): ...
class DateOffset(RelativeDeltaOffset): ...
BDay = BusinessDay
BMonthEnd = BusinessMonthEnd
BMonthBegin = BusinessMonthBegin
CBMonthEnd = CustomBusinessMonthEnd
CBMonthBegin = CustomBusinessMonthBegin
CDay = CustomBusinessDay
def roll_qtrday(other: datetime, n: int, month: int, day_opt: str, modby: int) -> int: ...
INVALID_FREQ_ERR_MSG: Literal["Invalid frequency: {0}"]
def shift_months(dtindex: npt.NDArray[np.int64], months: int, day_opt: str | None = ...) -> npt.NDArray[np.int64]: ...
_offset_map: dict[str, BaseOffset]

Просмотреть файл

@ -1,89 +0,0 @@
from __future__ import annotations
from datetime import datetime
from typing import Any
def is_period_object(obj: object) -> bool: ...
def get_period_ordinal(dts: datetime, freq: int) -> int: ...
class IncompatibleFrequency(ValueError): ...
class Period:
def __init__(
self,
value: Any = ...,
freqstr: Any = ...,
ordinal: Any = ...,
year: Any = ...,
month: int = ...,
quarter: Any = ...,
day: int = ...,
hour: int = ...,
minute: int = ...,
second: int = ...,
) -> None: ...
def __add__(self, other) -> Period: ...
def __eq__(self, other) -> bool: ...
def __ge__(self, other) -> bool: ...
def __gt__(self, other) -> bool: ...
def __hash__(self) -> int: ...
def __le__(self, other) -> bool: ...
def __lt__(self, other) -> bool: ...
def __new__(cls, *args, **kwargs) -> Period: ...
def __ne__(self, other) -> bool: ...
def __radd__(self, other) -> Period: ...
def __reduce__(self, *args, **kwargs) -> Any: ... # what should this be?
def __repr__(self) -> str: ...
def __rsub__(self, other) -> Period: ...
def __setstate__(self, *args, **kwargs) -> Any: ... # what should this be?
def __str__(self) -> str: ...
@property
def day(self) -> int: ...
@property
def dayofweek(self) -> int: ...
@property
def dayofyear(self) -> int: ...
@property
def daysinmonth(self) -> int: ...
@property
def days_in_month(self) -> int: ...
@property
def end_time(self) -> Timestamp: ...
@property
def freq(self) -> Any: ...
@property
def freqstr(self) -> str: ...
@property
def hour(self) -> int: ...
@property
def minute(self) -> int: ...
@property
def month(self) -> int: ...
@property
def quarter(self) -> int: ...
@property
def qyear(self) -> int: ...
@property
def second(self) -> int: ...
@property
def ordinal(self) -> int: ...
@property
def is_leap_year(self) -> bool: ...
@property
def start_time(self) -> Timestamp: ...
@property
def week(self) -> int: ...
@property
def weekday(self) -> int: ...
@property
def weekofyear(self) -> int: ...
@property
def year(self) -> int: ...
# Static methods
@classmethod
def now(cls) -> Period: ...
# Methods
def asfreq(self, freq: str, how: str = ...) -> Period: ...
def strftime(self, fmt: str) -> str: ...
def to_timestamp(self, freq: str, how: str = ...) -> Timestamp: ...
from .timestamps import Timestamp

Просмотреть файл

@ -1,148 +0,0 @@
from datetime import timedelta
from typing import (
ClassVar,
Literal,
Type,
TypeVar,
overload,
)
import numpy as np
from pandas._libs.tslibs import (
NaTType,
Tick,
)
from pandas._typing import npt
# This should be kept consistent with the keys in the dict timedelta_abbrevs
# in pandas/_libs/tslibs/timedeltas.pyx
UnitChoices = Literal[
"Y",
"y",
"M",
"W",
"w",
"D",
"d",
"days",
"day",
"hours",
"hour",
"hr",
"h",
"m",
"minute",
"min",
"minutes",
"t",
"s",
"seconds",
"sec",
"second",
"ms",
"milliseconds",
"millisecond",
"milli",
"millis",
"l",
"us",
"microseconds",
"microsecond",
"µs",
"micro",
"micros",
"u",
"ns",
"nanoseconds",
"nano",
"nanos",
"nanosecond",
"n",
]
_S = TypeVar("_S", bound=timedelta)
def ints_to_pytimedelta(
arr: npt.NDArray[np.int64], # const int64_t[:]
box: bool = ...,
) -> npt.NDArray[np.object_]: ...
def array_to_timedelta64(
values: npt.NDArray[np.object_],
unit: str | None = ...,
errors: str = ...,
) -> np.ndarray: ... # np.ndarray[m8ns]
def parse_timedelta_unit(unit: str | None) -> UnitChoices: ...
def delta_to_nanoseconds(delta: np.timedelta64 | timedelta | Tick) -> int: ...
class Timedelta(timedelta):
min: ClassVar[Timedelta]
max: ClassVar[Timedelta]
resolution: ClassVar[Timedelta]
value: int # np.int64
def __new__(
cls: Type[_S],
value=...,
unit: str = ...,
**kwargs: int | float | np.integer | np.floating,
) -> _S: ...
# GH 46171
# While Timedelta can return pd.NaT, having the constructor return
# a Union with NaTType makes things awkward for users of pandas
@property
def days(self) -> int: ...
@property
def seconds(self) -> int: ...
@property
def microseconds(self) -> int: ...
def total_seconds(self) -> float: ...
def to_pytimedelta(self) -> timedelta: ...
def to_timedelta64(self) -> np.timedelta64: ...
@property
def asm8(self) -> np.timedelta64: ...
# TODO: round/floor/ceil could return NaT?
def round(self: _S, freq: str) -> _S: ...
def floor(self: _S, freq: str) -> _S: ...
def ceil(self: _S, freq: str) -> _S: ...
@property
def resolution_string(self) -> str: ...
def __add__(self, other: timedelta) -> Timedelta: ...
def __radd__(self, other: timedelta) -> Timedelta: ...
def __sub__(self, other: timedelta) -> Timedelta: ...
def __rsub__(self, other: timedelta) -> Timedelta: ...
def __neg__(self) -> Timedelta: ...
def __pos__(self) -> Timedelta: ...
def __abs__(self) -> Timedelta: ...
def __mul__(self, other: float) -> Timedelta: ...
def __rmul__(self, other: float) -> Timedelta: ...
# error: Signature of "__floordiv__" incompatible with supertype "timedelta"
@overload # type: ignore[override]
def __floordiv__(self, other: timedelta) -> int: ...
@overload
def __floordiv__(self, other: int | float) -> Timedelta: ...
@overload
def __floordiv__(
self, other: npt.NDArray[np.timedelta64]
) -> npt.NDArray[np.intp]: ...
@overload
def __floordiv__(
self, other: npt.NDArray[np.number]
) -> npt.NDArray[np.timedelta64] | Timedelta: ...
@overload
def __rfloordiv__(self, other: timedelta | str) -> int: ...
@overload
def __rfloordiv__(self, other: None | NaTType) -> NaTType: ...
@overload
def __rfloordiv__(self, other: np.ndarray) -> npt.NDArray[np.timedelta64]: ...
@overload
def __truediv__(self, other: timedelta) -> float: ...
@overload
def __truediv__(self, other: float) -> Timedelta: ...
def __mod__(self, other: timedelta) -> Timedelta: ...
def __divmod__(self, other: timedelta) -> tuple[int, Timedelta]: ...
def __le__(self, other: timedelta) -> bool: ...
def __lt__(self, other: timedelta) -> bool: ...
def __ge__(self, other: timedelta) -> bool: ...
def __gt__(self, other: timedelta) -> bool: ...
def __hash__(self) -> int: ...
def isoformat(self) -> str: ...
def to_numpy(self) -> np.timedelta64: ...

Просмотреть файл

@ -1,201 +0,0 @@
from datetime import (
date as _date,
datetime,
time as _time,
timedelta,
tzinfo as _tzinfo,
)
from time import struct_time
from typing import (
ClassVar,
TypeVar,
overload,
)
import numpy as np
from pandas._libs.tslibs import (
BaseOffset,
NaTType,
Period,
Tick,
Timedelta,
)
_DatetimeT = TypeVar("_DatetimeT", bound=datetime)
def integer_op_not_supported(obj: object) -> TypeError: ...
class Timestamp(datetime):
min: ClassVar[Timestamp]
max: ClassVar[Timestamp]
resolution: ClassVar[Timedelta]
value: int # np.int64
def __new__(
cls: type[_DatetimeT],
ts_input: int | np.integer | float | str | _date | datetime | np.datetime64 = ...,
freq: int | None | str | BaseOffset = ...,
tz: str | _tzinfo | None | int = ...,
unit: str | int | None = ...,
year: int | None = ...,
month: int | None = ...,
day: int | None = ...,
hour: int | None = ...,
minute: int | None = ...,
second: int | None = ...,
microsecond: int | None = ...,
nanosecond: int | None = ...,
tzinfo: _tzinfo | None = ...,
*,
fold: int | None = ...,
) -> _DatetimeT: ...
# GH 46171
# While Timestamp can return pd.NaT, having the constructor return
# a Union with NaTType makes things awkward for users of pandas
def _set_freq(self, freq: BaseOffset | None) -> None: ...
@property
def year(self) -> int: ...
@property
def month(self) -> int: ...
@property
def day(self) -> int: ...
@property
def hour(self) -> int: ...
@property
def minute(self) -> int: ...
@property
def second(self) -> int: ...
@property
def microsecond(self) -> int: ...
@property
def tzinfo(self) -> _tzinfo | None: ...
@property
def tz(self) -> _tzinfo | None: ...
@property
def fold(self) -> int: ...
@classmethod
def fromtimestamp(cls: type[_DatetimeT], t: float, tz: _tzinfo | None = ...) -> _DatetimeT: ...
@classmethod
def utcfromtimestamp(cls: type[_DatetimeT], t: float) -> _DatetimeT: ...
@classmethod
def today(cls: type[_DatetimeT], tz: _tzinfo | str | None = ...) -> _DatetimeT: ...
@classmethod
def fromordinal(
cls: type[_DatetimeT],
ordinal: int,
freq: str | BaseOffset | None = ...,
tz: _tzinfo | str | None = ...,
) -> _DatetimeT: ...
@classmethod
def now(cls: type[_DatetimeT], tz: _tzinfo | str | None = ...) -> _DatetimeT: ...
@classmethod
def utcnow(cls: type[_DatetimeT]) -> _DatetimeT: ...
# error: Signature of "combine" incompatible with supertype "datetime"
@classmethod
def combine(cls, date: _date, time: _time) -> datetime: ... # type: ignore[override]
@classmethod
def fromisoformat(cls: type[_DatetimeT], date_string: str) -> _DatetimeT: ...
def strftime(self, format: str) -> str: ...
def __format__(self, fmt: str) -> str: ...
def toordinal(self) -> int: ...
def timetuple(self) -> struct_time: ...
def timestamp(self) -> float: ...
def utctimetuple(self) -> struct_time: ...
def date(self) -> _date: ...
def time(self) -> _time: ...
def timetz(self) -> _time: ...
def replace(
self,
year: int = ...,
month: int = ...,
day: int = ...,
hour: int = ...,
minute: int = ...,
second: int = ...,
microsecond: int = ...,
tzinfo: _tzinfo | None = ...,
fold: int = ...,
) -> Timestamp: ...
def astimezone(self: _DatetimeT, tz: _tzinfo | None = ...) -> _DatetimeT: ...
def ctime(self) -> str: ...
def isoformat(self, sep: str = ..., timespec: str = ...) -> str: ...
@classmethod
def strptime(cls, date_string: str, format: str) -> datetime: ...
def utcoffset(self) -> timedelta | None: ...
def tzname(self) -> str | None: ...
def dst(self) -> timedelta | None: ...
def __le__(self, other: datetime) -> bool: ... # type: ignore[override]
def __lt__(self, other: datetime) -> bool: ... # type: ignore[override]
def __ge__(self, other: datetime) -> bool: ... # type: ignore[override]
def __gt__(self, other: datetime) -> bool: ... # type: ignore[override]
# error: Signature of "__add__" incompatible with supertype "date"/"datetime"
@overload # type: ignore[override]
def __add__(self, other: np.ndarray) -> np.ndarray: ...
@overload
def __add__(self: _DatetimeT, other: timedelta | np.timedelta64 | Tick) -> _DatetimeT: ...
def __radd__(self: _DatetimeT, other: timedelta) -> _DatetimeT: ...
@overload # type: ignore[override]
def __sub__(self, other: datetime) -> Timedelta: ...
@overload
def __sub__(self: _DatetimeT, other: timedelta | np.timedelta64 | Tick) -> _DatetimeT: ...
def __hash__(self) -> int: ...
def weekday(self) -> int: ...
def isoweekday(self) -> int: ...
def isocalendar(self) -> tuple[int, int, int]: ...
@property
def is_leap_year(self) -> bool: ...
@property
def is_month_start(self) -> bool: ...
@property
def is_quarter_start(self) -> bool: ...
@property
def is_year_start(self) -> bool: ...
@property
def is_month_end(self) -> bool: ...
@property
def is_quarter_end(self) -> bool: ...
@property
def is_year_end(self) -> bool: ...
def to_pydatetime(self, warn: bool = ...) -> datetime: ...
def to_datetime64(self) -> np.datetime64: ...
def to_period(self, freq: BaseOffset | str | None = ...) -> Period: ...
def to_julian_date(self) -> np.float64: ...
@property
def asm8(self) -> np.datetime64: ...
def tz_convert(self: _DatetimeT, tz: _tzinfo | str | None) -> _DatetimeT: ...
# TODO: could return NaT?
def tz_localize(
self: _DatetimeT,
tz: _tzinfo | str | None,
ambiguous: str = ...,
nonexistent: str = ...,
) -> _DatetimeT: ...
def normalize(self: _DatetimeT) -> _DatetimeT: ...
# TODO: round/floor/ceil could return NaT?
def round(self: _DatetimeT, freq: str, ambiguous: bool | str = ..., nonexistent: str = ...) -> _DatetimeT: ...
def floor(self: _DatetimeT, freq: str, ambiguous: bool | str = ..., nonexistent: str = ...) -> _DatetimeT: ...
def ceil(self: _DatetimeT, freq: str, ambiguous: bool | str = ..., nonexistent: str = ...) -> _DatetimeT: ...
def day_name(self, locale: str | None = ...) -> str: ...
def month_name(self, locale: str | None = ...) -> str: ...
@property
def day_of_week(self) -> int: ...
@property
def dayofweek(self) -> int: ...
@property
def day_of_month(self) -> int: ...
@property
def day_of_year(self) -> int: ...
@property
def dayofyear(self) -> int: ...
@property
def quarter(self) -> int: ...
@property
def week(self) -> int: ...
def to_numpy(self, dtype: np.dtype | None = ..., copy: bool = ...) -> np.datetime64: ...
@property
def _date_repr(self) -> str: ...
@property
def days_in_month(self) -> int: ...
@property
def daysinmonth(self) -> int: ...

Просмотреть файл

@ -1,121 +0,0 @@
from pandas.core.frame import DataFrame
from pandas.core.indexes.base import Index
from pandas.core.series import Series
from pandas._typing import FilePathOrBuffer as FilePathOrBuffer, FrameOrSeries as FrameOrSeries
from typing import Any, List, Optional, Union
lzma = ...
N: int = ...
K: int = ...
def set_testing_mode() -> None: ...
def reset_testing_mode() -> None: ...
def reset_display_options() -> None: ...
def round_trip_pickle(obj, path: Optional[FilePathOrBuffer]=...) -> FrameOrSeries: ...
def round_trip_pathlib(writer, reader, path: Optional[str]=...) : ...
def round_trip_localpath(writer, reader, path: Optional[str]=...) : ...
def decompress_file(path, compression) -> None: ...
def write_to_compressed(compression, path, data, dest: str = ...) -> None: ...
def assert_almost_equal(left, right, check_dtype: Union[bool, str]=..., check_less_precise: Union[bool, int]=..., **kwargs) : ...
def assert_dict_equal(left, right, compare_keys: bool=...) : ...
def randbool(size=..., p: float=...) : ...
RANDS_CHARS = ...
RANDU_CHARS = ...
def rands_array(nchars, size, dtype: str = ...): ...
def randu_array(nchars, size, dtype: str = ...): ...
def rands(nchars): ...
def randu(nchars): ...
def close(fignum = ...) -> None: ...
def ensure_clean(filename = ..., return_filelike: bool = ...) -> None: ...
def ensure_clean_dir() -> None: ...
def ensure_safe_environment_variables() -> None: ...
def equalContents(arr1, arr2) -> bool: ...
def assert_index_equal(left: Index[Any], right: Index[Any]) -> None: ...
def assert_class_equal(left, right, exact: Union[bool, str]=..., obj=...) : ...
def assert_attr_equal(attr, left, right, obj: str = ...): ...
def assert_is_valid_plot_return_object(objs) -> None: ...
def isiterable(obj) -> bool: ...
def assert_is_sorted(seq) -> None: ...
def assert_categorical_equal(left, right, check_dtype: bool = ..., check_category_order: bool = ..., obj: str = ...) -> None: ...
def assert_interval_array_equal(left, right, exact: str = ..., obj: str = ...) -> None: ...
def assert_period_array_equal(left, right, obj: str = ...) -> None: ...
def assert_datetime_array_equal(left, right, obj: str = ...) -> None: ...
def assert_timedelta_array_equal(left, right, obj: str = ...) -> None: ...
def raise_assert_detail(obj, message, left, right, diff = ...) -> None: ...
def assert_numpy_array_equal(left, right, strict_nan: bool = ..., check_dtype: bool = ..., err_msg = ..., check_same = ..., obj: str = ...): ...
def assert_extension_array_equal(
left, right, check_dtype: bool = ..., check_less_precise: bool = ..., check_exact: bool = ...,
) -> None: ...
def assert_series_equal(left: Series, right: Series,
check_dtype: bool = ...,
check_index_type: bool|str = ...,
check_series_type: bool = ...,
check_less_precise: bool|int = ...,
check_names : bool = ...,
check_exact: bool = ...,
check_datetimelike_compat: bool = ...,
check_categorical: bool = ...,
check_category_order: bool = ...,
check_freq: bool = ...,
check_flags: bool = ...,
rtol: float = ...,
atol: float = ...,
obj: str = ...,
*,
check_index: bool = ...) -> None: ...
def assert_frame_equal(left: DataFrame, right: DataFrame, check_like: Optional[bool] = ...) -> None: ...
def assert_equal(left, right, **kwargs) -> None: ...
def box_expected(expected, box_cls, transpose: bool = ...): ...
def to_array(obj): ...
def assert_sp_array_equal(left, right, check_dtype: bool = ..., check_kind: bool = ..., check_fill_value: bool = ..., consolidate_block_indices: bool = ...) -> None: ...
def assert_contains_all(iterable, dic) -> None: ...
def assert_copy(iter1, iter2, **eql_kwargs) -> None: ...
def getCols(k): ...
def makeStringIndex(k: int = ..., name = ...): ...
def makeUnicodeIndex(k: int = ..., name = ...): ...
def makeCategoricalIndex(k: int = ..., n: int = ..., name = ..., **kwargs): ...
def makeIntervalIndex(k: int = ..., name = ..., **kwargs): ...
def makeBoolIndex(k: int = ..., name = ...): ...
def makeIntIndex(k: int = ..., name = ...): ...
def makeUIntIndex(k: int = ..., name = ...): ...
def makeRangeIndex(k: int = ..., name = ..., **kwargs): ...
def makeFloatIndex(k: int = ..., name = ...): ...
def makeDateIndex(k: int = ..., freq: str = ..., name = ..., **kwargs): ...
def makeTimedeltaIndex(k: int = ..., freq: str = ..., name = ..., **kwargs): ...
def makePeriodIndex(k: int = ..., name = ..., **kwargs): ...
def makeMultiIndex(k: int = ..., names = ..., **kwargs): ...
def all_index_generator(k: int = ...) -> None: ...
def index_subclass_makers_generator() -> None: ...
def all_timeseries_index_generator(k: int = ...) -> None: ...
def makeFloatSeries(name = ...): ...
def makeStringSeries(name = ...): ...
def makeObjectSeries(name = ...): ...
def getSeriesData(): ...
def makeTimeSeries(nper = ..., freq: str = ..., name = ...): ...
def makePeriodSeries(nper = ..., name = ...): ...
def getTimeSeriesData(nper = ..., freq: str = ...): ...
def getPeriodData(nper = ...): ...
def makeTimeDataFrame(nper = ..., freq: str = ...): ...
def makeDataFrame(): ...
def getMixedTypeDict(): ...
def makeMixedDataFrame(): ...
def makePeriodFrame(nper = ...): ...
def makeCustomIndex(nentries, nlevels, prefix: str = ..., names: bool = ..., ndupe_l = ..., idx_type = ...): ...
def makeCustomDataframe(nrows, ncols, c_idx_names: bool = ..., r_idx_names: bool = ..., c_idx_nlevels: int = ..., r_idx_nlevels: int = ..., data_gen_f = ..., c_ndupe_l = ..., r_ndupe_l = ..., dtype = ..., c_idx_type = ..., r_idx_type = ...): ...
def makeMissingCustomDataframe(nrows, ncols, density: float = ..., random_state = ..., c_idx_names: bool = ..., r_idx_names: bool = ..., c_idx_nlevels: int = ..., r_idx_nlevels: int = ..., data_gen_f = ..., c_ndupe_l = ..., r_ndupe_l = ..., dtype = ..., c_idx_type = ..., r_idx_type = ...): ...
def makeMissingDataframe(density: float = ..., random_state = ...): ...
def optional_args(decorator): ...
def can_connect(url, error_classes = ...): ...
def network(t, url: str = ..., raise_on_error = ..., check_before_test: bool = ..., error_classes = ..., skip_errnos = ..., _skip_on_messages = ...): ...
with_connectivity_check = network
def assert_produces_warning(expected_warning = ..., filter_level: str = ..., clear = ..., check_stacklevel: bool = ..., raise_on_extra_warnings: bool = ...) -> None: ...
def with_csv_dialect(name, **kwargs) -> None: ...
def use_numexpr(use, min_elements = ...) -> None: ...
def test_parallel(num_threads: int = ..., kwargs_list = ...): ...
def set_timezone(tz: str) : ...
def convert_rows_list_to_csv_str(rows_list: List[str]) : ...

Просмотреть файл

@ -1,141 +0,0 @@
import datetime
from io import BufferedIOBase, RawIOBase, TextIOBase, TextIOWrapper
from mmap import mmap
import numpy as np
from numpy import typing as npt
import sys
from os import PathLike
from pathlib import Path
from typing import (
Any,
AnyStr,
Callable,
Collection,
Dict,
Hashable,
IO,
List,
Mapping,
NewType,
Optional,
Protocol,
Sequence,
Tuple,
Type,
TypeVar,
Union,
)
from pandas.core.generic import NDFrame
from pandas._libs.tslibs import Period, Timedelta as Timedelta, Timestamp as Timestamp
from pandas.core.arrays import ExtensionArray as ExtensionArray
from pandas.core.series import Series as Series
from pandas.core.frame import DataFrame as DataFrame
from pandas.core.indexes.base import Index as Index
from pandas.core.dtypes.dtypes import ExtensionDtype
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
ArrayLike = Union[ExtensionArray, np.ndarray]
AnyArrayLike = Union[Index, Series]
PythonScalar = Union[str, int, float, bool, complex]
DatetimeLikeScalar = TypeVar("DatetimeLikeScalar", Period, Timestamp, Timedelta)
PandasScalar = Union[bytes, datetime.date, datetime.datetime, datetime.timedelta]
# Scalar = Union[PythonScalar, PandasScalar]
# dtypes
NpDtype = Union[str, np.dtype[np.generic], Type[Union[str, float, int, complex, bool, object]]]
Dtype = Union[ExtensionDtype, NpDtype]
AstypeArg = Union[ExtensionDtype, npt.DTypeLike]
# DtypeArg specifies all allowable dtypes in a functions its dtype argument
DtypeArg = Union[Dtype, Dict[Any, Dtype]]
DtypeObj = Union[np.dtype[np.generic], "ExtensionDtype"]
# filenames and file-like-objects
AnyStr_cov = TypeVar("AnyStr_cov", str, bytes, covariant=True)
AnyStr_con = TypeVar("AnyStr_con", str, bytes, contravariant=True)
class BaseBuffer(Protocol): ...
class ReadBuffer(BaseBuffer, Protocol[AnyStr_cov]): ...
class WriteBuffer(BaseBuffer, Protocol[AnyStr_cov]): ...
FilePath = Union[str, PathLike[str]]
Buffer = Union[IO[AnyStr], RawIOBase, BufferedIOBase, TextIOBase, TextIOWrapper, mmap]
FileOrBuffer = Union[str, Buffer[AnyStr]]
FilePathOrBuffer = Union["PathLike[str]", FileOrBuffer[AnyStr]]
FilePathOrBytesBuffer = Union[PathLike[str], WriteBuffer[bytes]]
FrameOrSeries = TypeVar("FrameOrSeries", bound=NDFrame)
FrameOrSeriesUnion = Union[DataFrame, Series]
Axis = Union[str, int]
IndexLevel = Union[Hashable, Sequence[Hashable]]
Label = Optional[Hashable]
Level = Union[Hashable, int]
Ordered = Optional[bool]
JSONSerializable = Union[PythonScalar, List, Dict]
Axes = Collection
Renamer = Union[Mapping[Any, Label], Callable[[Any], Label]]
T = TypeVar("T")
FuncType = Callable[..., Any]
F = TypeVar("F", bound=FuncType)
AggFuncTypeBase = Union[Callable, str]
AggFuncTypeDict = Dict[Hashable, Union[AggFuncTypeBase, List[AggFuncTypeBase]]]
AggFuncType = Union[
AggFuncTypeBase,
List[AggFuncTypeBase],
AggFuncTypeDict,
]
num = Union[int, float]
SeriesAxisType = Literal["index", 0] # Restricted subset of _AxisType for series
AxisType = Literal["columns", "index", 0, 1]
DtypeNp = TypeVar("DtypeNp", bound=np.dtype[np.generic])
KeysArgType = Any
ListLike = TypeVar("ListLike", Sequence, np.ndarray, "Series")
StrLike = Union[str, np.str_]
Scalar = Union[str, bytes, datetime.date, datetime.datetime, datetime.timedelta, bool, int, float, complex, Timestamp, Timedelta]
# Refine the next 3 in 3.9 to use the specialized type.
np_ndarray_int64 = npt.NDArray[np.int64]
np_ndarray_bool = npt.NDArray[np.bool_]
np_ndarray_str = npt.NDArray[np.str_]
IndexType = Union[slice, np_ndarray_int64, Index, List[int], Series[int]]
MaskType = Union[Series[bool], np_ndarray_bool, List[bool]]
# Scratch types for generics
S1 = TypeVar(
"S1",
str,
bytes,
datetime.date,
datetime.datetime,
datetime.timedelta,
bool,
int,
float,
complex,
Timestamp,
Timedelta,
np.datetime64,
)
T1 = TypeVar("T1", str, int, np.int64, np.uint64, np.float64, float, np.dtype[np.generic])
T2 = TypeVar("T2", str, int)
# Interval closed type
IntervalClosedType = Literal["left", "right", "both", "neither"]
DateTimeErrorChoices = Literal["ignore", "raise", "coerce"]
# Shared by functions such as drop and astype
IgnoreRaise = Literal["ignore", "raise"]
# for arbitrary kwargs passed during reading/writing files
StorageOptions = Optional[Dict[str, Any]]
# compression keywords and compression
CompressionDict = Dict[str, Any]
CompressionOptions = Optional[Union[Literal["infer", "gzip", "bz2", "zip", "xz", "zstd"], CompressionDict]]

Просмотреть файл

@ -1,8 +0,0 @@
import pickle as pkl
from pandas import DataFrame as DataFrame, Index as Index, Series as Series
from typing import Optional
def load_reduce(self) -> None: ...
def load_newobj(self) -> None: ...
def load_newobj_ex(self) -> None: ...
def load(fh, encoding: Optional[str] = ..., is_verbose: bool = ...): ...

Просмотреть файл

Просмотреть файл

@ -1,16 +0,0 @@
from typing import Any
class DirNamesMixin:
def __dir__(self): ...
class PandasDelegate: ...
def delegate_names(delegate: Any, accessors: Any, typ: str, overwrite: bool=...) -> Any: ...
class CachedAccessor:
def __init__(self, name: str, accessor: Any) -> None: ...
def __get__(self, obj: Any, cls: Any): ...
def register_dataframe_accessor(name: Any): ...
def register_series_accessor(name: Any): ...
def register_index_accessor(name: Any): ...

Просмотреть файл

@ -1,48 +0,0 @@
from __future__ import annotations
import numpy as np
from pandas.core.dtypes.generic import ABCIndex as ABCIndex
from pandas.core.indexes.base import Index
from typing import Any, Tuple, Union
def unique(values): ...
unique1d = unique
def isin(comps, values) -> np.ndarray: ...
def factorize(
values: Any, sort: bool = ..., na_sentinel: int = ..., size_hint: Union[int, None] = None,
) -> Tuple[np.ndarray, Union[np.ndarray, Index]]: ...
def value_counts(values, sort: bool=..., ascending: bool=..., normalize: bool=..., bins=..., dropna: bool=...) -> Series: ...
def duplicated(values, keep=...) -> np.ndarray: ...
def mode(values, dropna: bool=...) -> Series: ...
def rank(values, axis: int=..., method: str=..., na_option: str=..., ascending: bool=..., pct: bool=...) : ...
def checked_add_with_arr(arr, b, arr_mask = ..., b_mask = ...): ...
def quantile(x, q, interpolation_method: str = ...): ...
class SelectN:
obj = ...
n = ...
keep = ...
def __init__(self, obj, n: int, keep: str) -> None: ...
def nlargest(self): ...
def nsmallest(self): ...
@staticmethod
def is_valid_dtype_n_method(dtype) -> bool: ...
class SelectNSeries(SelectN):
def compute(self, method): ...
class SelectNFrame(SelectN):
columns = ...
def __init__(self, obj, n: int, keep: str, columns) -> None: ...
def compute(self, method): ...
def take(arr, indices, axis: int=..., allow_fill: bool=..., fill_value=...) : ...
def take_nd(arr, indexer, axis: int=..., out=..., fill_value=..., allow_fill: bool=...) : ...
take_1d = take_nd
def take_2d_multi(arr, indexer, fill_value = ...): ...
def searchsorted(arr, value, side: str = ..., sorter = ...): ...
def diff(arr, n: int, axis: int=..., stacklevel=...) : ...
def safe_sort(values, codes=..., na_sentinel: int=..., assume_unique: bool=..., verify: bool=...) -> Union[np.ndarray, Tuple[np.ndarray, np.ndarray]]: ...
from pandas import Series as Series

Просмотреть файл

@ -1,52 +0,0 @@
from pandas._libs import NaT as NaT, Period as Period, Timedelta as Timedelta
from pandas._libs.tslibs import Timestamp as Timestamp
from pandas._libs.missing import NA as NA
from pandas.core.arrays.boolean import BooleanDtype as BooleanDtype
from pandas.core.arrays.integer import (
Int16Dtype as Int16Dtype,
Int32Dtype as Int32Dtype,
Int64Dtype as Int64Dtype,
Int8Dtype as Int8Dtype,
UInt16Dtype as UInt16Dtype,
UInt32Dtype as UInt32Dtype,
UInt64Dtype as UInt64Dtype,
UInt8Dtype as UInt8Dtype,
)
from pandas.core.arrays.string_ import StringDtype as StringDtype
from pandas.core.construction import array as array
from pandas.core.dtypes.dtypes import (
CategoricalDtype as CategoricalDtype,
DatetimeTZDtype as DatetimeTZDtype,
IntervalDtype as IntervalDtype,
PeriodDtype as PeriodDtype,
)
from pandas.core.dtypes.missing import isna as isna, isnull as isnull, notna as notna, notnull as notnull
from pandas.core.indexes.datetimes import bdate_range as bdate_range, date_range as date_range
from pandas.core.tools import to_datetime as to_datetime
from pandas.core.tools.numeric import to_numeric as to_numeric
from pandas.core.tools.timedeltas import to_timedelta as to_timedelta
from pandas.io.formats.format import set_eng_float_format as set_eng_float_format
from pandas.core.indexes.interval import Interval as Interval, interval_range as interval_range
from pandas.core.indexes.period import period_range as period_range
from pandas.core.indexes.timedeltas import timedelta_range as timedelta_range
from pandas.core.arrays import Categorical as Categorical
from pandas.core.groupby import Grouper as Grouper, NamedAgg as NamedAgg
from pandas.core.indexes.api import (
CategoricalIndex as CategoricalIndex,
DatetimeIndex as DatetimeIndex,
Float64Index as Float64Index,
Index as Index,
Int64Index as Int64Index,
IntervalIndex as IntervalIndex,
MultiIndex as MultiIndex,
PeriodIndex as PeriodIndex,
RangeIndex as RangeIndex,
TimedeltaIndex as TimedeltaIndex,
UInt64Index as UInt64Index,
)
from pandas.core.indexing import IndexSlice as IndexSlice
from pandas.core.series import Series as Series
from pandas.core.frame import DataFrame as DataFrame
from pandas.tseries.offsets import DateOffset as DateOffset
from pandas.core.algorithms import factorize as factorize, unique as unique, value_counts as value_counts

Просмотреть файл

@ -1,38 +0,0 @@
from typing import Any, Protocol, Tuple
from pandas._typing import FrameOrSeries, DataFrame
class OpsMixinProtocol(Protocol): ...
class OpsMixin:
def __eq__(self: OpsMixinProtocol, other: Any) -> DataFrame: ... # type: ignore
def __ne__(self: OpsMixinProtocol, other: Any) -> DataFrame: ... # type: ignore
def __lt__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
def __le__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
def __gt__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
def __ge__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
# -------------------------------------------------------------
# Logical Methods
def __and__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
def __rand__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
def __or__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
def __ror__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
def __xor__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
def __rxor__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
# -------------------------------------------------------------
# Arithmetic Methods
def __add__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
def __radd__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
def __sub__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
def __rsub__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
def __mul__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
def __rmul__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
def __truediv__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
def __rtruediv__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
def __floordiv__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
def __rfloordiv__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
def __mod__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
def __rmod__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
def __divmod__(self: OpsMixinProtocol, other: DataFrame) -> Tuple[DataFrame, DataFrame]: ...
def __rdivmod__(self: OpsMixinProtocol, other: DataFrame) -> Tuple[DataFrame, DataFrame]: ...
def __pow__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...
def __rpow__(self: OpsMixinProtocol, other: Any) -> DataFrame: ...

Просмотреть файл

@ -1,14 +0,0 @@
from .base import (
ExtensionArray as ExtensionArray,
ExtensionOpsMixin as ExtensionOpsMixin,
ExtensionScalarOpsMixin as ExtensionScalarOpsMixin)
from .boolean import BooleanArray as BooleanArray
from .categorical import Categorical as Categorical
from .datetimes import DatetimeArray as DatetimeArray
from .integer import IntegerArray as IntegerArray, integer_array as integer_array
from .interval import IntervalArray as IntervalArray
from .numpy_ import PandasArray as PandasArray, PandasDtype as PandasDtype
from .period import PeriodArray as PeriodArray, period_array as period_array
from .sparse import SparseArray as SparseArray
from .string_ import StringArray as StringArray
from .timedeltas import TimedeltaArray as TimedeltaArray

Просмотреть файл

Просмотреть файл

Просмотреть файл

@ -1,39 +0,0 @@
import numpy as np
from pandas._typing import ArrayLike as ArrayLike
from pandas.core.dtypes.dtypes import ExtensionDtype as ExtensionDtype
from pandas.core.dtypes.generic import ABCExtensionArray as ABCExtensionArray
from typing import Sequence, Tuple, Union
def try_cast_to_ea(cls_or_instance, obj, dtype=...): ...
class ExtensionArray:
def __getitem__(self, item) -> None: ...
def __setitem__(self, key: Union[int, slice, np.ndarray], value) -> None: ...
def __len__(self) -> int: ...
def __iter__(self): ...
def to_numpy(self, dtype=..., copy: bool = ..., na_value=...): ...
@property
def dtype(self) -> ExtensionDtype: ...
@property
def shape(self) -> Tuple[int, ...]: ...
@property
def ndim(self) -> int: ...
@property
def nbytes(self) -> int: ...
def astype(self, dtype, copy: bool = ...): ...
def isna(self) -> ArrayLike: ...
def argsort(self, ascending: bool = ..., kind: str = ..., *args, **kwargs) -> np.ndarray: ...
def fillna(self, value=..., method=..., limit=...): ...
def dropna(self): ...
def shift(self, periods: int = ..., fill_value: object = ...) -> ABCExtensionArray: ...
def unique(self): ...
def searchsorted(self, value, side: str = ..., sorter=...): ...
def factorize(self, na_sentinel: int = ...) -> Tuple[np.ndarray, ABCExtensionArray]: ...
def repeat(self, repeats, axis=...): ...
def take(self, indices: Sequence[int], allow_fill: bool = ..., fill_value=...) -> ABCExtensionArray: ...
def copy(self) -> ABCExtensionArray: ...
def view(self, dtype=...) -> Union[ABCExtensionArray, np.ndarray]: ...
def ravel(self, order=...) -> ABCExtensionArray: ...
class ExtensionOpsMixin: ...
class ExtensionScalarOpsMixin(ExtensionOpsMixin): ...

Просмотреть файл

@ -1,30 +0,0 @@
from __future__ import annotations
import numpy as np
from .masked import BaseMaskedArray as BaseMaskedArray
from pandas._typing import Scalar as Scalar
from pandas.core.dtypes.base import ExtensionDtype as ExtensionDtype
from typing import Type
class BooleanDtype(ExtensionDtype):
name: str = ...
@property
def na_value(self) -> Scalar: ...
@property
def type(self) -> Type: ...
@property
def kind(self) -> str: ...
@classmethod
def construct_array_type(cls) -> Type[BooleanArray]: ...
def __from_arrow__(self, array): ...
def coerce_to_array(values, mask=..., copy: bool=...) : ...
class BooleanArray(BaseMaskedArray):
def __init__(self, values: np.ndarray, mask: np.ndarray, copy: bool=...) -> None: ...
@property
def dtype(self): ...
def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): ...
def __setitem__(self, key, value) -> None: ...
def astype(self, dtype, copy: bool = ...): ...
def any(self, skipna: bool=..., **kwargs) : ...
def all(self, skipna: bool=..., **kwargs) : ...

Просмотреть файл

@ -1,161 +0,0 @@
import numpy as np
from pandas._typing import ArrayLike as ArrayLike, Dtype as Dtype, Ordered as Ordered, Scalar as Scalar, np_ndarray_bool
from pandas.core.accessor import PandasDelegate as PandasDelegate
from pandas.core.arrays.base import ExtensionArray as ExtensionArray
from pandas.core.base import NoNewAttributesMixin as NoNewAttributesMixin
from pandas.core.base import PandasObject as PandasObject
from pandas.core.dtypes.dtypes import CategoricalDtype as CategoricalDtype
from pandas.core.indexes.base import Index
from typing import List, Literal, Optional, Sequence, Union, overload
def contains(cat, key, container): ...
class Categorical(ExtensionArray, PandasObject):
__array_priority__: int = ...
def __init__(
self,
values: Sequence,
categories=...,
ordered: Optional[bool] = ...,
dtype: Optional[CategoricalDtype] = ...,
fastpath: bool = ...,
) -> None: ...
@property
def categories(self): ...
@categories.setter
def categories(self, categories) -> None: ...
@property
def ordered(self) -> Ordered: ...
@property
def dtype(self) -> CategoricalDtype: ...
def astype(self, dtype: Dtype, copy: bool = ...) -> ArrayLike: ...
def size(self) -> int: ...
def itemsize(self) -> int: ...
def tolist(self) -> List[Scalar]: ...
to_list = ...
@classmethod
def from_codes(
cls,
codes: Sequence[int],
categories: Optional[Index] = ...,
ordered: Optional[bool] = ...,
dtype: Optional[CategoricalDtype] = ...,
fastpath: bool = ...,
) -> Categorical: ...
@property
def codes(self) -> List[int]: ...
@overload
def set_ordered(self, value, inplace: Literal[True]) -> None: ...
@overload
def set_ordered(self, value, inplace: Literal[False]) -> Categorical: ...
@overload
def set_ordered(self, value, inplace: bool) -> Union[None, Categorical]: ...
@overload
def as_ordered(self, inplace: Literal[True]) -> None: ...
@overload
def as_ordered(self, inplace: Literal[False]) -> Categorical: ...
@overload
def as_ordered(self, inplace: bool) -> Union[None, Categorical]: ...
@overload
def as_unordered(self, inplace: Literal[True]) -> None: ...
@overload
def as_unordered(self, inplace: Literal[False] = ...) -> Categorical: ...
@overload
def set_categories(self, new_categories, ordered=..., rename: bool = ..., *, inplace: Literal[True]) -> None: ...
@overload
def set_categories(self, new_categories, ordered=..., rename: bool = ..., inplace: Literal[False] = ...) -> Categorical: ...
@overload
def set_categories(
self, new_categories, ordered=..., rename: bool = ..., inplace: bool = ...
) -> Union[None, Categorical]: ...
@overload
def rename_categories(self, new_categories, inplace: Literal[True]) -> None: ...
@overload
def rename_categories(self, new_categories, inplace: Literal[False] = ...) -> Categorical: ...
@overload
def rename_categories(self, new_categories, inplace: bool = ...) -> Union[None, Categorical]: ...
@overload
def reorder_categories(self, new_categories, ordered=..., *, inplace: Literal[True]) -> None: ...
@overload
def reorder_categories(self, new_categories, ordered=..., inplace: Literal[False] = ...) -> Categorical: ...
@overload
def reorder_categories(self, new_categories, ordered=..., inplace: bool = ...) -> Union[None, Categorical]: ...
@overload
def add_categories(self, new_categories, inplace: Literal[True]) -> None: ...
@overload
def add_categories(self, new_categories, inplace: Literal[False] = ...) -> Categorical: ...
@overload
def add_categories(self, new_categories, inplace: bool = ...) -> Union[None, Categorical]: ...
@overload
def remove_categories(self, removals, inplace: Literal[True]) -> None: ...
@overload
def remove_categories(self, removals, inplace: Literal[False] = ...) -> Categorical: ...
@overload
def remove_categories(self, removals, inplace: bool = ...) -> Union[None, Categorical]: ...
@overload
def remove_unused_categories(self, inplace: Literal[True]) -> None: ...
@overload
def remove_unused_categories(self, inplace: Literal[False] = ...) -> Categorical: ...
@overload
def remove_unused_categories(self, inplace: bool = ...) -> Union[None, Categorical]: ...
def map(self, mapper): ...
def __eq__(self, other) -> bool: ...
def __ne__(self, other) -> bool: ...
def __lt__(self, other) -> bool: ...
def __gt__(self, other) -> bool: ...
def __le__(self, other) -> bool: ...
def __ge__(self, other) -> bool: ...
@property
def shape(self): ...
def shift(self, periods, fill_value=...): ...
def __array__(self, dtype=...) -> np.ndarray: ...
def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): ...
@property
def T(self): ...
@property
def nbytes(self) -> int: ...
def memory_usage(self, deep: bool = ...): ...
def searchsorted(self, value, side: str = ..., sorter=...): ...
def isna(self) -> np_ndarray_bool: ...
def isnull(self) -> np_ndarray_bool: ...
def notna(self) -> np_ndarray_bool: ...
def notnull(self) -> np_ndarray_bool: ...
def put(self, *args, **kwargs) -> None: ...
def dropna(self): ...
def value_counts(self, dropna: bool = ...): ...
def check_for_ordered(self, op) -> None: ...
def argsort(self, ascending: bool = ..., kind: str = ..., *args, **kwargs): ...
def sort_values(self, inplace: bool = ..., ascending: bool = ..., na_position: str = ...): ...
def view(self, dtype=...): ...
def to_dense(self): ...
def fillna(self, value=..., method=..., limit=...): ...
def take(self, indexer, allow_fill: bool = ..., fill_value=...): ...
def take_nd(self, indexer, allow_fill: bool = ..., fill_value=...): ...
def __len__(self) -> int: ...
def __iter__(self): ...
def __contains__(self, key) -> bool: ...
def __getitem__(self, key): ...
def __setitem__(self, key, value) -> None: ...
def min(self, skipna: bool = ...): ...
def max(self, skipna: bool = ...): ...
def mode(self, dropna: bool = ...): ...
def unique(self): ...
def equals(self, other): ...
def is_dtype_equal(self, other): ...
def describe(self): ...
def repeat(self, repeats, axis=...): ...
def isin(self, values): ...
@overload
def replace(self, to_replace, value, inplace: Literal[True]) -> None: ...
@overload
def replace(self, to_replace, value, inplace: Literal[False]) -> Categorical: ...
@overload
def replace(self, to_replace, value, inplace: bool = ...) -> Union[None, Categorical]: ...
class CategoricalAccessor(PandasDelegate, PandasObject, NoNewAttributesMixin):
def __init__(self, data) -> None: ...
@property
def codes(self) -> Sequence[int]: ...
def factorize_from_iterable(values): ...
def factorize_from_iterables(iterables): ...

Просмотреть файл

@ -1,81 +0,0 @@
import numpy as np
from pandas._libs import NaT as NaT, NaTType as NaTType, Timestamp as Timestamp
from pandas.core.arrays.base import ExtensionArray as ExtensionArray, ExtensionOpsMixin as ExtensionOpsMixin
from typing import Sequence, Union
class AttributesMixin: ...
class DatelikeOps:
def strftime(self, date_format): ...
class TimelikeOps:
def round(self, freq, ambiguous: str = ..., nonexistent: str = ...): ...
def floor(self, freq, ambiguous: str = ..., nonexistent: str = ...): ...
def ceil(self, freq, ambiguous: str = ..., nonexistent: str = ...): ...
class DatetimeLikeArrayMixin(ExtensionOpsMixin, AttributesMixin, ExtensionArray):
@property
def ndim(self) -> int: ...
@property
def shape(self): ...
def reshape(self, *args, **kwargs): ...
def ravel(self, *args, **kwargs): ...
def __iter__(self): ...
@property
def asi8(self) -> np.ndarray: ...
@property
def nbytes(self): ...
def __array__(self, dtype=...) -> np.ndarray: ...
@property
def size(self) -> int: ...
def __len__(self) -> int: ...
def __getitem__(self, key): ...
def __setitem__(self, key: Union[int, Sequence[int], Sequence[bool], slice], value) -> None: ... # type: ignore[override]
def astype(self, dtype, copy: bool = ...): ...
def view(self, dtype=...): ...
def unique(self): ...
def take(self, indices, allow_fill: bool = ..., fill_value=...): ...
def copy(self): ...
def shift(self, periods: int = ..., fill_value=..., axis: int = ...): ...
def searchsorted(self, value, side: str = ..., sorter=...): ...
def repeat(self, repeats, *args, **kwargs): ...
def value_counts(self, dropna: bool = ...): ...
def map(self, mapper): ...
def isna(self): ...
def fillna(self, value=..., method=..., limit=...): ...
@property
def freq(self): ...
@freq.setter
def freq(self, value) -> None: ...
@property
def freqstr(self): ...
@property
def inferred_freq(self): ...
@property
def resolution(self): ...
__pow__ = ...
__rpow__ = ...
__mul__ = ...
__rmul__ = ...
__truediv__ = ...
__rtruediv__ = ...
__floordiv__ = ...
__rfloordiv__ = ...
__mod__ = ...
__rmod__ = ...
__divmod__ = ...
__rdivmod__ = ...
def __add__(self, other): ...
def __radd__(self, other): ...
def __sub__(self, other): ...
def __rsub__(self, other): ...
def __iadd__(self, other): ...
def __isub__(self, other): ...
def min(self, axis=..., skipna: bool = ..., *args, **kwargs): ...
def max(self, axis=..., skipna: bool = ..., *args, **kwargs): ...
def mean(self, skipna: bool = ...): ...
def validate_periods(periods): ...
def validate_endpoints(closed): ...
def validate_inferred_freq(freq, inferred_freq, freq_infer): ...
def maybe_infer_freq(freq): ...

Просмотреть файл

@ -1,72 +0,0 @@
import numpy as np
from pandas.core.arrays import datetimelike as dtl
from pandas.core.dtypes.dtypes import DatetimeTZDtype as DatetimeTZDtype
from typing import Union
def tz_to_dtype(tz): ...
class DatetimeArray(dtl.DatetimeLikeArrayMixin, dtl.TimelikeOps, dtl.DatelikeOps):
__array_priority__: int = ...
def __init__(self, values, dtype=..., freq=..., copy: bool = ...) -> None: ...
# ignore in dtype() is from the pandas source
@property
def dtype(self) -> Union[np.dtype, DatetimeTZDtype]: ... # type: ignore[override]
@property
def tz(self): ...
@tz.setter
def tz(self, value) -> None: ...
@property
def tzinfo(self): ...
@property
def is_normalized(self): ...
def __array__(self, dtype=...) -> np.ndarray: ...
def __iter__(self): ...
def astype(self, dtype, copy: bool = ...): ...
def tz_convert(self, tz): ...
def tz_localize(self, tz, ambiguous: str = ..., nonexistent: str = ...): ...
def to_pydatetime(self): ...
def normalize(self): ...
def to_period(self, freq=...): ...
def to_perioddelta(self, freq): ...
def month_name(self, locale=...): ...
def day_name(self, locale=...): ...
@property
def time(self): ...
@property
def timetz(self): ...
@property
def date(self): ...
year = ...
month = ...
day = ...
hour = ...
minute = ...
second = ...
microsecond = ...
nanosecond = ...
weekofyear = ...
week = ...
dayofweek = ...
weekday = ...
dayofyear = ...
quarter = ...
days_in_month = ...
daysinmonth = ...
is_month_start = ...
is_month_end = ...
is_quarter_start = ...
is_quarter_end = ...
is_year_start = ...
is_year_end = ...
is_leap_year = ...
def to_julian_date(self): ...
def sequence_to_dt64ns(
data, dtype=..., copy: bool = ..., tz=..., dayfirst: bool = ..., yearfirst: bool = ..., ambiguous: str = ...
): ...
def objects_to_datetime64ns(
data, dayfirst, yearfirst, utc: bool = ..., errors: str = ..., require_iso8601: bool = ..., allow_object: bool = ...
): ...
def maybe_convert_dtype(data, copy): ...
def maybe_infer_tz(tz, inferred_tz): ...
def validate_tz_from_dtype(dtype, tz): ...

Просмотреть файл

@ -1,52 +0,0 @@
from .masked import BaseMaskedArray as BaseMaskedArray
from pandas.core.dtypes.base import ExtensionDtype as ExtensionDtype
from typing import Type
class _IntegerDtype(ExtensionDtype):
name: str
base = ...
type: Type
na_value = ...
def is_signed_integer(self): ...
def is_unsigned_integer(self): ...
def numpy_dtype(self): ...
def kind(self): ...
def itemsize(self): ...
@classmethod
def construct_array_type(cls): ...
def __from_arrow__(self, array): ...
def integer_array(values, dtype = ..., copy: bool = ...): ...
def safe_cast(values, dtype, copy): ...
def coerce_to_array(values, dtype, mask = ..., copy: bool = ...): ...
class IntegerArray(BaseMaskedArray):
def dtype(self): ...
def __init__(self, values, mask, copy: bool = ...) -> None: ...
def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): ...
def __setitem__(self, key, value) -> None: ...
def astype(self, dtype, copy: bool = ...): ...
class Int8Dtype:
...
class Int16Dtype:
...
class Int32Dtype:
...
class Int64Dtype:
...
class UInt8Dtype:
...
class UInt16Dtype:
...
class UInt32Dtype:
...
class UInt64Dtype:
...

Просмотреть файл

@ -1,61 +0,0 @@
from __future__ import annotations
import numpy as np
from pandas._libs.interval import (
Interval as Interval,
IntervalMixin as IntervalMixin,
)
from pandas.core.arrays.base import ExtensionArray as ExtensionArray
from pandas.core.dtypes.generic import ABCExtensionArray as ABCExtensionArray
from pandas._typing import Axis, Scalar, Index as Index
from typing import Optional
class IntervalArray(IntervalMixin, ExtensionArray):
ndim: int = ...
can_hold_na: bool = ...
def __new__(cls, data, closed=..., dtype=..., copy: bool = ..., verify_integrity: bool = ...): ...
@classmethod
def from_breaks(cls, breaks, closed: str = ..., copy: bool = ..., dtype=...): ...
@classmethod
def from_arrays(cls, left, right, closed: str = ..., copy: bool = ..., dtype=...): ...
@classmethod
def from_tuples(cls, data, closed: str = ..., copy: bool = ..., dtype=...): ...
def __iter__(self): ...
def __len__(self) -> int: ...
def __getitem__(self, value): ...
def __setitem__(self, key, value) -> None: ...
def __eq__(self, other): ...
def __ne__(self, other): ...
def fillna(self, value=..., method=..., limit=...): ...
@property
def dtype(self): ...
def astype(self, dtype, copy: bool = ...): ...
def copy(self): ...
def isna(self): ...
@property
def nbytes(self) -> int: ...
@property
def size(self) -> int: ...
def shift(self, periods: int = ..., fill_value: object = ...) -> ABCExtensionArray: ...
def take(self, indices, allow_fill: bool = ..., fill_value=..., axis=..., **kwargs): ...
def value_counts(self, dropna: bool = ...): ...
@property
def left(self) -> Index: ...
@property
def right(self) -> Index: ...
@property
def closed(self) -> bool: ...
def set_closed(self, closed): ...
@property
def length(self) -> Index: ...
@property
def mid(self) -> Index: ...
@property
def is_non_overlapping_monotonic(self) -> bool: ...
def __array__(self, dtype=...) -> np.ndarray: ...
def __arrow_array__(self, type=...): ...
def to_tuples(self, na_tuple: bool = ...): ...
def repeat(self, repeats, axis: Optional[Axis] = ...): ...
def contains(self, other): ...
def overlaps(self, other: Interval) -> bool: ...
def maybe_convert_platform_interval(values): ...

Просмотреть файл

@ -1,19 +0,0 @@
import numpy as np
from pandas._typing import Scalar as Scalar
from pandas.core.arrays import ExtensionArray as ExtensionArray, ExtensionOpsMixin as ExtensionOpsMixin
class BaseMaskedArray(ExtensionArray, ExtensionOpsMixin):
def __getitem__(self, item): ...
def __iter__(self) : ...
def __len__(self) -> int: ...
def __invert__(self): ...
def to_numpy(self, dtype=..., copy=..., na_value: Scalar=...) : ...
__array_priority__: int = ...
def __array__(self, dtype=...) -> np.ndarray: ...
def __arrow_array__(self, type = ...): ...
def isna(self): ...
@property
def nbytes(self) -> int: ...
def take(self, indexer, allow_fill: bool = ..., fill_value = ...): ...
def copy(self): ...
def value_counts(self, dropna: bool = ...): ...

Просмотреть файл

@ -1,56 +0,0 @@
import numpy as np
from numpy.lib.mixins import NDArrayOperatorsMixin
from pandas.core.arrays.base import ExtensionArray as ExtensionArray, ExtensionOpsMixin as ExtensionOpsMixin
from pandas.core.dtypes.dtypes import ExtensionDtype as ExtensionDtype
from typing import Union
class PandasDtype(ExtensionDtype):
def __init__(self, dtype) -> None: ...
@property
def numpy_dtype(self): ...
@property
def name(self): ...
@property
def type(self): ...
@classmethod
def construct_from_string(cls, string): ...
@classmethod
def construct_array_type(cls): ...
@property
def kind(self): ...
@property
def itemsize(self): ...
class PandasArray(ExtensionArray, ExtensionOpsMixin, NDArrayOperatorsMixin):
__array_priority__: int = ...
def __init__(self, values: Union[np.ndarray, PandasArray], copy: bool=...) -> None: ...
@property
def dtype(self): ...
def __array__(self, dtype=...) -> np.ndarray: ...
def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): ...
def __getitem__(self, item): ...
def __setitem__(self, key, value) -> None: ...
def __len__(self) -> int: ...
@property
def nbytes(self) -> int: ...
def isna(self): ...
def fillna(self, value = ..., method = ..., limit = ...): ...
def take(self, indices, allow_fill: bool = ..., fill_value = ...): ...
def copy(self): ...
def unique(self): ...
def any(self, axis = ..., out = ..., keepdims: bool = ..., skipna: bool = ...): ...
def all(self, axis = ..., out = ..., keepdims: bool = ..., skipna: bool = ...): ...
def min(self, axis = ..., out = ..., keepdims: bool = ..., skipna: bool = ...): ...
def max(self, axis = ..., out = ..., keepdims: bool = ..., skipna: bool = ...): ...
def sum(self, axis = ..., dtype = ..., out = ..., keepdims: bool = ..., initial = ..., skipna: bool = ..., min_count: int = ...): ...
def prod(self, axis = ..., dtype = ..., out = ..., keepdims: bool = ..., initial = ..., skipna: bool = ..., min_count: int = ...): ...
def mean(self, axis = ..., dtype = ..., out = ..., keepdims: bool = ..., skipna: bool = ...): ...
def median(self, axis = ..., out = ..., overwrite_input: bool = ..., keepdims: bool = ..., skipna: bool = ...): ...
def std(self, axis = ..., dtype = ..., out = ..., ddof: int = ..., keepdims: bool = ..., skipna: bool = ...): ...
def var(self, axis = ..., dtype = ..., out = ..., ddof: int = ..., keepdims: bool = ..., skipna: bool = ...): ...
def sem(self, axis = ..., dtype = ..., out = ..., ddof: int = ..., keepdims: bool = ..., skipna: bool = ...): ...
def kurt(self, axis = ..., dtype = ..., out = ..., keepdims: bool = ..., skipna: bool = ...): ...
def skew(self, axis = ..., dtype = ..., out = ..., keepdims: bool = ..., skipna: bool = ...): ...
def to_numpy(self, dtype = ..., copy: bool = ..., na_value = ...): ...
def searchsorted(self, value, side: str = ..., sorter = ...): ...
def __invert__(self): ...

Просмотреть файл

@ -1,43 +0,0 @@
import numpy as np
from pandas._libs.tslibs import Timestamp
from pandas._libs.tslibs.period import Period as Period
from pandas.core.arrays import datetimelike as dtl
from pandas.tseries.offsets import Tick as Tick
from typing import Optional, Sequence, Union
class PeriodArray(dtl.DatetimeLikeArrayMixin, dtl.DatelikeOps):
__array_priority__: int = ...
def __init__(self, values, freq = ..., dtype = ..., copy: bool = ...) -> None: ...
def dtype(self): ...
def __array__(self, dtype=...) -> np.ndarray: ...
def __arrow_array__(self, type = ...): ...
year: int = ...
month: int = ...
day: int = ...
hour: int = ...
minute: int = ...
second: int = ...
weekofyear: int = ...
week: int = ...
dayofweek: int = ...
weekday: int = ...
dayofyear: int = ...
day_of_year = ...
quarter: int = ...
qyear: int = ...
days_in_month: int = ...
daysinmonth: int = ...
@property
def is_leap_year(self) -> bool: ...
@property
def start_time(self) -> Timestamp: ...
@property
def end_time(self) -> Timestamp: ...
def to_timestamp(self, freq: Optional[str] = ..., how: str = ...) -> Timestamp: ...
def asfreq(self, freq: Optional[str] = ..., how: str = ...) -> Period: ...
def astype(self, dtype, copy: bool = ...): ...
def raise_on_incompatible(left, right): ...
def period_array(data: Sequence[Optional[Period]], freq: Optional[Union[str, Tick]]=..., copy: bool=...) -> PeriodArray: ...
def validate_dtype_freq(dtype, freq): ...
def dt64arr_to_periodarr(data, freq, tz = ...): ...

Просмотреть файл

@ -1,3 +0,0 @@
from .accessor import SparseAccessor as SparseAccessor, SparseFrameAccessor as SparseFrameAccessor
from .array import BlockIndex as BlockIndex, IntIndex as IntIndex, SparseArray as SparseArray
from .dtype import SparseDtype as SparseDtype

Просмотреть файл

@ -1,22 +0,0 @@
from pandas.compat._optional import import_optional_dependency as import_optional_dependency
from pandas.core.accessor import PandasDelegate as PandasDelegate, delegate_names as delegate_names
from pandas.core.arrays.sparse.array import SparseArray as SparseArray
from pandas.core.arrays.sparse.dtype import SparseDtype as SparseDtype
from pandas.core.dtypes.cast import find_common_type as find_common_type
class BaseAccessor:
def __init__(self, data = ...) -> None: ...
class SparseAccessor(BaseAccessor, PandasDelegate):
@classmethod
def from_coo(cls, A, dense_index: bool = ...): ...
def to_coo(self, row_levels = ..., column_levels = ..., sort_labels: bool = ...): ...
def to_dense(self): ...
class SparseFrameAccessor(BaseAccessor, PandasDelegate):
@classmethod
def from_spmatrix(cls, data, index = ..., columns = ...): ...
def to_dense(self): ...
def to_coo(self): ...
@property
def density(self) -> float: ...

Просмотреть файл

@ -1,84 +0,0 @@
import numpy as np
from pandas._libs.sparse import BlockIndex as BlockIndex, IntIndex as IntIndex, SparseIndex as SparseIndex
from pandas._libs.tslibs import NaT as NaT
from pandas.core.arrays import ExtensionArray as ExtensionArray, ExtensionOpsMixin as ExtensionOpsMixin
from pandas.core.arrays.sparse.dtype import SparseDtype as SparseDtype
from pandas.core.base import PandasObject as PandasObject
from pandas.core.construction import sanitize_array as sanitize_array
from pandas.core.dtypes.cast import (
astype_nansafe as astype_nansafe,
construct_1d_arraylike_from_scalar as construct_1d_arraylike_from_scalar,
find_common_type as find_common_type,
infer_dtype_from_scalar as infer_dtype_from_scalar,
)
from pandas.core.dtypes.common import (
is_array_like as is_array_like,
is_bool_dtype as is_bool_dtype,
is_datetime64_any_dtype as is_datetime64_any_dtype,
is_dtype_equal as is_dtype_equal,
is_integer as is_integer,
is_object_dtype as is_object_dtype,
is_scalar as is_scalar,
is_string_dtype as is_string_dtype,
pandas_dtype as pandas_dtype,
)
from pandas.core.dtypes.generic import ABCIndexClass as ABCIndexClass, ABCSeries as ABCSeries, ABCSparseArray as ABCSparseArray
from pandas.core.dtypes.missing import isna as isna, na_value_for_dtype as na_value_for_dtype, notna as notna
from pandas.core.indexers import check_array_indexer as check_array_indexer
from pandas.core.missing import interpolate_2d as interpolate_2d
from pandas.core.ops.common import unpack_zerodim_and_defer as unpack_zerodim_and_defer
from pandas.errors import PerformanceWarning as PerformanceWarning
class SparseArray(PandasObject, ExtensionArray, ExtensionOpsMixin):
def __init__(
self, data, sparse_index=..., index=..., fill_value=..., kind: str = ..., dtype=..., copy: bool = ...
) -> None: ...
@classmethod
def from_spmatrix(cls, data): ...
def __array__(self, dtype=..., copy=...) -> np.ndarray: ...
def __setitem__(self, key, value) -> None: ...
@property
def sp_index(self): ...
@property
def sp_values(self): ...
@property
def dtype(self): ...
@property
def fill_value(self): ...
@fill_value.setter
def fill_value(self, value) -> None: ...
@property
def kind(self) -> str: ...
def __len__(self) -> int: ...
@property
def nbytes(self) -> int: ...
@property
def density(self): ...
@property
def npoints(self) -> int: ...
def isna(self): ...
def fillna(self, value=..., method=..., limit=...): ...
def shift(self, periods: int = ..., fill_value=...): ...
def unique(self): ...
def factorize(self, na_sentinel: int = ...): ...
def value_counts(self, dropna: bool = ...): ...
def __getitem__(self, key): ...
def take(self, indices, allow_fill: bool = ..., fill_value=...): ...
def searchsorted(self, v, side: str = ..., sorter=...): ...
def copy(self): ...
def astype(self, dtype=..., copy: bool = ...): ...
def map(self, mapper): ...
def to_dense(self): ...
def nonzero(self): ...
def all(self, axis=..., *args, **kwargs): ...
def any(self, axis: int = ..., *args, **kwargs): ...
def sum(self, axis: int = ..., *args, **kwargs): ...
def cumsum(self, axis: int = ..., *args, **kwargs): ...
def mean(self, axis: int = ..., *args, **kwargs): ...
def transpose(self, *axes): ...
@property
def T(self): ...
def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): ...
def __abs__(self): ...
def make_sparse(arr, kind: str = ..., fill_value=..., dtype=..., copy: bool = ...): ...

Просмотреть файл

@ -1,31 +0,0 @@
# merged types from pylance
from pandas._typing import Dtype as Dtype, Scalar
from pandas.core.dtypes.base import ExtensionDtype as ExtensionDtype
from pandas.core.dtypes.cast import astype_nansafe as astype_nansafe
from pandas.core.dtypes.common import is_bool_dtype as is_bool_dtype, is_object_dtype as is_object_dtype, is_scalar as is_scalar, is_string_dtype as is_string_dtype, pandas_dtype as pandas_dtype
from pandas.core.dtypes.dtypes import register_extension_dtype as register_extension_dtype
from pandas.core.dtypes.missing import isna as isna, na_value_for_dtype as na_value_for_dtype
from typing import Optional
class SparseDtype(ExtensionDtype):
def __init__(self, dtype: Dtype=..., fill_value: Optional[Scalar]=...) -> None: ...
def __hash__(self) : ...
def __eq__(self, other) -> bool: ...
@property
def fill_value(self): ...
@property
def kind(self): ...
@property
def type(self): ...
@property
def subtype(self): ...
@property
def name(self): ...
@classmethod
def construct_array_type(cls): ...
@classmethod
def construct_from_string(cls, string): ...
@classmethod
def is_dtype(cls, dtype): ...
def update_dtype(self, dtype): ...

Просмотреть файл

@ -1,2 +0,0 @@
from pandas.core.indexes.api import Index as Index, MultiIndex as MultiIndex
from pandas.core.series import Series as Series

Просмотреть файл

@ -1,20 +0,0 @@
from pandas.core.arrays import PandasArray as PandasArray
from pandas.core.dtypes.base import ExtensionDtype as ExtensionDtype
from typing import Type
class StringDtype(ExtensionDtype):
name: str = ...
na_value = ...
@property
def type(self) -> Type: ...
@classmethod
def construct_array_type(cls) -> Type[StringArray]: ...
def __from_arrow__(self, array): ...
class StringArray(PandasArray):
def __init__(self, values, copy: bool = ...) -> None: ...
def __arrow_array__(self, type = ...): ...
def __setitem__(self, key, value) -> None: ...
def fillna(self, value = ..., method = ..., limit = ...): ...
def astype(self, dtype, copy: bool = ...): ...
def value_counts(self, dropna: bool = ...): ...

Просмотреть файл

@ -1,38 +0,0 @@
from datetime import timedelta
from pandas.core.arrays import datetimelike as dtl
from typing import Sequence
class TimedeltaArray(dtl.DatetimeLikeArrayMixin, dtl.TimelikeOps):
__array_priority__: int = ...
@property
def dtype(self): ...
def __init__(self, values, dtype = ..., freq = ..., copy: bool = ...) -> None: ...
def astype(self, dtype, copy: bool = ...): ...
def sum(self, axis=..., dtype=..., out=..., keepdims: bool=..., initial=..., skipna: bool=..., min_count: int=...) : ...
def std(self, axis=..., dtype=..., out=..., ddof: int=..., keepdims: bool=..., skipna: bool=...) : ...
def median(self, axis=..., out=..., overwrite_input: bool=..., keepdims: bool=..., skipna: bool=...) : ...
def __mul__(self, other): ...
__rmul__ = ...
def __truediv__(self, other): ...
def __rtruediv__(self, other): ...
def __floordiv__(self, other): ...
def __rfloordiv__(self, other): ...
def __mod__(self, other): ...
def __rmod__(self, other): ...
def __divmod__(self, other): ...
def __rdivmod__(self, other): ...
def __neg__(self): ...
def __pos__(self): ...
def __abs__(self): ...
def total_seconds(self) -> int: ...
def to_pytimedelta(self) -> Sequence[timedelta]: ...
days: int = ...
seconds: int = ...
microseconds: int = ...
nanoseconds: int = ...
@property
def components(self) -> int: ...
def sequence_to_td64ns(data, copy: bool = ..., unit: str = ..., errors: str = ...): ...
def ints_to_td64ns(data, unit: str = ...): ...
def objects_to_td64ns(data, unit: str = ..., errors: str = ...): ...

Просмотреть файл

@ -1,77 +0,0 @@
from __future__ import annotations
import numpy as np
from pandas.core.arrays.categorical import Categorical
from pandas._typing import (
Scalar,
SeriesAxisType,
T1 as T1,
np_ndarray_int64,
np_ndarray_str,
Index as Index,
Series as Series,
DataFrame as DataFrame,
)
from pandas.core.accessor import DirNamesMixin as DirNamesMixin
from pandas.core.arrays import ExtensionArray as ExtensionArray
from typing import Callable, Generic, List, Literal, Optional, Tuple, Union, overload
class PandasObject(DirNamesMixin):
def __sizeof__(self) -> int: ...
class NoNewAttributesMixin:
def __setattr__(self, key, value) -> None: ...
class GroupByError(Exception): ...
class DataError(GroupByError): ...
class SpecificationError(GroupByError): ...
class SelectionMixin:
def ndim(self) -> int: ...
def __getitem__(self, key): ...
def aggregate(self, func: Optional[Callable] = ..., *args, **kwargs) -> Union[Scalar, Series, DataFrame]: ...
agg = aggregate
class ShallowMixin: ...
class IndexOpsMixin:
__array_priority__: int = ...
def transpose(self, *args, **kwargs) -> IndexOpsMixin: ...
@property
def T(self) -> IndexOpsMixin: ...
@property
def shape(self) -> tuple: ...
@property
def ndim(self) -> int: ...
def item(self): ...
@property
def nbytes(self) -> int: ...
@property
def size(self) -> int: ...
@property
def array(self) -> ExtensionArray: ...
def to_numpy(self) -> np.ndarray: ...
@property
def empty(self) -> bool: ...
def max(self, axis=..., skipna: bool = ..., **kwargs): ...
def min(self, axis=..., skipna: bool = ..., **kwargs): ...
def argmax(self, axis: Optional[SeriesAxisType] = ..., skipna: bool = ..., *args, **kwargs) -> np.ndarray: ...
def argmin(self, axis: Optional[SeriesAxisType] = ..., skipna: bool = ..., *args, **kwargs) -> np.ndarray: ...
def tolist(self) -> List: ...
def to_list(self) -> List: ...
def __iter__(self): ...
def hasnans(self) -> bool: ...
def value_counts(self, normalize: bool = ..., sort: bool = ..., ascending: bool = ..., bins=..., dropna: bool = ...): ...
def nunique(self, dropna: bool = ...) -> int: ...
@property
def is_unique(self) -> bool: ...
@property
def is_monotonic(self) -> bool: ...
@property
def is_monotonic_decreasing(self) -> bool: ...
@property
def is_monotonic_increasing(self) -> bool: ...
def factorize(self, sort: bool = ..., na_sentinel: int = ...) -> Tuple[np.ndarray, Union[np.ndarray, Index, Categorical]]: ...
def searchsorted(self, value, side: str = ..., sorter=...) -> Union[int, List[int]]: ...
def drop_duplicates(self, keep: Literal["first", "last", False] = ...) -> IndexOpsMixin: ...

Просмотреть файл

@ -1,2 +0,0 @@
def align_terms(terms): ...
def reconstruct_object(typ, obj, axes, dtype): ...

Просмотреть файл

@ -1,3 +0,0 @@
def result_type_many(*arrays_and_dtypes): ...
class NameResolutionError(NameError): ...

Просмотреть файл

@ -1,19 +0,0 @@
import abc
class NumExprClobberingError(NameError): ...
class AbstractEngine(metaclass=abc.ABCMeta):
has_neg_frac: bool = ...
expr = ...
aligned_axes = ...
result_type = ...
def __init__(self, expr) -> None: ...
def convert(self) -> str: ...
def evaluate(self) -> object: ...
class NumExprEngine(AbstractEngine):
has_neg_frac: bool = ...
class PythonEngine(AbstractEngine):
has_neg_frac: bool = ...
def evaluate(self): ...

Просмотреть файл

@ -1,6 +0,0 @@
def set_use_numexpr(v: bool = ...) -> None: ...
def set_numexpr_threads(n=...) -> None: ...
def evaluate(op, op_str, a, b, use_numexpr: bool = ...): ...
def where(cond, a, b, use_numexpr: bool = ...): ...
def set_test_mode(v: bool = ...) -> None: ...
def get_test_result(): ...

Просмотреть файл

@ -1,12 +0,0 @@
import tokenize
from typing import Iterator, Tuple
BACKTICK_QUOTED_STRING: int
def create_valid_python_identifier(name: str) -> str: ...
def clean_backtick_quoted_toks(tok: Tuple[int, str]) -> Tuple[int, str]: ...
def clean_column_name(name: str) -> str: ...
def tokenize_backtick_quoted_string(
token_generator: Iterator[tokenize.TokenInfo], source: str, string_start: int
) -> Tuple[int, str]: ...
def tokenize_string(source: str) -> Iterator[Tuple[int, str]]: ...

Просмотреть файл

@ -1,62 +0,0 @@
use_bottleneck_doc: str = ...
def use_bottleneck_cb(key) -> None: ...
use_numexpr_doc: str = ...
def use_numexpr_cb(key) -> None: ...
pc_precision_doc: str = ...
pc_colspace_doc: str = ...
pc_max_rows_doc: str = ...
pc_min_rows_doc: str = ...
pc_max_cols_doc: str = ...
pc_max_categories_doc: str = ...
pc_max_info_cols_doc: str = ...
pc_nb_repr_h_doc: str = ...
pc_pprint_nest_depth: str = ...
pc_multi_sparse_doc: str = ...
float_format_doc: str = ...
max_colwidth_doc: str = ...
colheader_justify_doc: str = ...
pc_expand_repr_doc: str = ...
pc_show_dimensions_doc: str = ...
pc_east_asian_width_doc: str = ...
pc_ambiguous_as_wide_doc: str = ...
pc_latex_repr_doc: str = ...
pc_table_schema_doc: str = ...
pc_html_border_doc: str = ...
pc_html_use_mathjax_doc: str = ...
pc_width_doc: str = ...
pc_chop_threshold_doc: str = ...
pc_max_seq_items: str = ...
pc_max_info_rows_doc: str = ...
pc_large_repr_doc: str = ...
pc_memory_usage_doc: str = ...
pc_latex_escape: str = ...
pc_latex_longtable: str = ...
pc_latex_multicolumn: str = ...
pc_latex_multicolumn_format: str = ...
pc_latex_multirow: str = ...
def table_schema_cb(key) -> None: ...
def is_terminal() -> bool: ...
max_cols: int = ...
tc_sim_interactive_doc: str = ...
use_inf_as_null_doc: str = ...
use_inf_as_na_doc: str = ...
def use_inf_as_na_cb(key) -> None: ...
chained_assignment: str = ...
reader_engine_doc: str = ...
writer_engine_doc: str = ...
parquet_engine_doc: str = ...
plotting_backend_doc: str = ...
def register_plotting_backend_cb(key) -> None: ...
register_converter_doc: str = ...
def register_converter_cb(key) -> None: ...

Просмотреть файл

Просмотреть файл

@ -1,42 +0,0 @@
from pandas.core.dtypes.common import (
is_array_like as is_array_like,
is_bool as is_bool,
is_bool_dtype as is_bool_dtype,
is_categorical as is_categorical,
is_categorical_dtype as is_categorical_dtype,
is_complex as is_complex,
is_complex_dtype as is_complex_dtype,
is_datetime64_any_dtype as is_datetime64_any_dtype,
is_datetime64_dtype as is_datetime64_dtype,
is_datetime64_ns_dtype as is_datetime64_ns_dtype,
is_datetime64tz_dtype as is_datetime64tz_dtype,
is_dict_like as is_dict_like,
is_dtype_equal as is_dtype_equal,
is_extension_array_dtype as is_extension_array_dtype,
is_extension_type as is_extension_type,
is_file_like as is_file_like,
is_float as is_float,
is_float_dtype as is_float_dtype,
is_hashable as is_hashable,
is_int64_dtype as is_int64_dtype,
is_integer as is_integer,
is_integer_dtype as is_integer_dtype,
is_interval as is_interval,
is_interval_dtype as is_interval_dtype,
is_iterator as is_iterator,
is_list_like as is_list_like,
is_named_tuple as is_named_tuple,
is_number as is_number,
is_numeric_dtype as is_numeric_dtype,
is_object_dtype as is_object_dtype,
is_period_dtype as is_period_dtype,
is_re as is_re,
is_re_compilable as is_re_compilable,
is_scalar as is_scalar,
is_signed_integer_dtype as is_signed_integer_dtype,
is_sparse as is_sparse,
is_string_dtype as is_string_dtype,
is_timedelta64_dtype as is_timedelta64_dtype,
is_timedelta64_ns_dtype as is_timedelta64_ns_dtype,
is_unsigned_integer_dtype as is_unsigned_integer_dtype,
pandas_dtype as pandas_dtype)

Просмотреть файл

@ -1,23 +0,0 @@
from typing import List, Optional, Type
from pandas._typing import ExtensionArray
class ExtensionDtype:
def __eq__(self, other) -> bool: ...
def __hash__(self) -> int: ...
def __ne__(self, other) -> bool: ...
@property
def na_value(self): ...
@property
def type(self) -> Type: ...
@property
def kind(self) -> str: ...
@property
def name(self) -> str: ...
@property
def names(self) -> Optional[List[str]]: ...
@classmethod
def construct_array_type(cls) -> Type[ExtensionArray]: ...
@classmethod
def construct_from_string(cls, string: str): ...
@classmethod
def is_dtype(cls, dtype) -> bool: ...

Просмотреть файл

@ -1,30 +0,0 @@
import numpy as np
from pandas._typing import Dtype as Dtype
def maybe_convert_platform(values): ...
def is_nested_object(obj) -> bool: ...
def maybe_downcast_to_dtype(result, dtype): ...
def maybe_downcast_numeric(result, dtype, do_round: bool=...) : ...
def maybe_upcast_putmask(result: np.ndarray, mask: np.ndarray, other) : ...
def maybe_promote(dtype, fill_value = ...): ...
def infer_dtype_from(val, pandas_dtype: bool=...) : ...
def infer_dtype_from_scalar(val, pandas_dtype: bool=...) : ...
def infer_dtype_from_array(arr, pandas_dtype: bool=...) : ...
def maybe_infer_dtype_type(element): ...
def maybe_upcast(values, fill_value=..., dtype=..., copy: bool=...) : ...
def invalidate_string_dtypes(dtype_set) -> None: ...
def coerce_indexer_dtype(indexer, categories): ...
def coerce_to_dtypes(result, dtypes): ...
def astype_nansafe(arr, dtype, copy: bool=..., skipna: bool=...) : ...
def maybe_convert_objects(values: np.ndarray, convert_numeric: bool=...) : ...
def soft_convert_objects(values: np.ndarray, datetime: bool=..., numeric: bool=..., timedelta: bool=..., coerce: bool=..., copy: bool=...) : ...
def convert_dtypes(input_array, convert_string: bool=..., convert_integer: bool=..., convert_boolean: bool=...) -> Dtype: ...
def maybe_castable(arr) -> bool: ...
def maybe_infer_to_datetimelike(value, convert_dates: bool=...) : ...
def maybe_cast_to_datetime(value, dtype, errors: str=...) : ...
def find_common_type(types): ...
def cast_scalar_to_array(shape, value, dtype = ...): ...
def construct_1d_arraylike_from_scalar(value, length: int, dtype) : ...
def construct_1d_object_array_from_listlike(values): ...
def construct_1d_ndarray_preserving_na(values, dtype=..., copy: bool=...) : ...
def maybe_cast_to_integer_array(arr, dtype, copy: bool=...) : ...

Просмотреть файл

@ -1,75 +0,0 @@
import numpy as np
from pandas._typing import ArrayLike as ArrayLike
from pandas.core.dtypes.inference import (
is_array_like as is_array_like,
is_bool as is_bool,
is_complex as is_complex,
is_dict_like as is_dict_like,
is_file_like as is_file_like,
is_float as is_float,
is_hashable as is_hashable,
is_integer as is_integer,
is_interval as is_interval,
is_iterator as is_iterator,
is_list_like as is_list_like,
is_named_tuple as is_named_tuple,
is_number as is_number,
is_re as is_re,
is_re_compilable as is_re_compilable,
is_scalar as is_scalar,
)
from typing import Callable, Union
ensure_float64 = ...
ensure_float32 = ...
def ensure_float(arr): ...
ensure_uint64 = ...
ensure_int64 = ...
ensure_int32 = ...
ensure_int16 = ...
ensure_int8 = ...
ensure_platform_int = ...
ensure_object = ...
def ensure_str(value) -> str: ...
def ensure_categorical(arr): ...
def ensure_python_int(value: Union[int, np.integer]) -> int: ...
def classes(*klasses) -> Callable: ...
def classes_and_not_datetimelike(*klasses) -> Callable: ...
def is_object_dtype(arr_or_dtype) -> bool: ...
def is_sparse(arr) -> bool: ...
def is_scipy_sparse(arr) -> bool: ...
def is_categorical(arr) -> bool: ...
def is_datetime64_dtype(arr_or_dtype) -> bool: ...
def is_datetime64tz_dtype(arr_or_dtype) -> bool: ...
def is_timedelta64_dtype(arr_or_dtype) -> bool: ...
def is_period_dtype(arr_or_dtype) -> bool: ...
def is_interval_dtype(arr_or_dtype) -> bool: ...
def is_categorical_dtype(arr_or_dtype) -> bool: ...
def is_string_dtype(arr_or_dtype) -> bool: ...
def is_period_arraylike(arr) -> bool: ...
def is_datetime_arraylike(arr) -> bool: ...
def is_dtype_equal(source, target) -> bool: ...
def is_any_int_dtype(arr_or_dtype) -> bool: ...
def is_integer_dtype(arr_or_dtype) -> bool: ...
def is_signed_integer_dtype(arr_or_dtype) -> bool: ...
def is_unsigned_integer_dtype(arr_or_dtype) -> bool: ...
def is_int64_dtype(arr_or_dtype) -> bool: ...
def is_datetime64_any_dtype(arr_or_dtype) -> bool: ...
def is_datetime64_ns_dtype(arr_or_dtype) -> bool: ...
def is_timedelta64_ns_dtype(arr_or_dtype) -> bool: ...
def is_datetime_or_timedelta_dtype(arr_or_dtype) -> bool: ...
def is_numeric_v_string_like(a, b): ...
def is_datetimelike_v_numeric(a, b): ...
def needs_i8_conversion(arr_or_dtype) -> bool: ...
def is_numeric_dtype(arr_or_dtype) -> bool: ...
def is_string_like_dtype(arr_or_dtype) -> bool: ...
def is_float_dtype(arr_or_dtype) -> bool: ...
def is_bool_dtype(arr_or_dtype) -> bool: ...
def is_extension_type(arr) -> bool: ...
def is_extension_array_dtype(arr_or_dtype) -> bool: ...
def is_complex_dtype(arr_or_dtype) -> bool: ...
def infer_dtype_from_object(dtype): ...
def pandas_dtype(dtype): ...

Просмотреть файл

@ -1,5 +0,0 @@
def get_dtype_kinds(l): ...
def concat_compat(to_concat, axis: int=...) : ...
def concat_categorical(to_concat, axis: int=...) : ...
def union_categoricals(to_union, sort_categories: bool=..., ignore_order: bool=...) : ...
def concat_datetime(to_concat, axis: int = ..., typs = ...): ...

Просмотреть файл

@ -1,120 +0,0 @@
from pandas._typing import Ordered as Ordered
from .base import ExtensionDtype as ExtensionDtype
from pandas._libs.tslibs import NaT as NaT, Period as Period, Timestamp as Timestamp # , timezones as timezones
from pandas.core.indexes.base import Index
from typing import Any, Optional, Sequence, Tuple, Type, Union
_str = str
def register_extension_dtype(cls: Type[ExtensionDtype]) -> Type[ExtensionDtype]: ...
class Registry:
dtypes = ...
def __init__(self) -> None: ...
def register(self, dtype: Type[ExtensionDtype]) -> None: ...
def find(self, dtype: Union[Type[ExtensionDtype], str]) -> Optional[Type[ExtensionDtype]]: ...
registry = ...
class PandasExtensionDtype(ExtensionDtype):
subdtype = ...
str: Optional[_str] = ...
num: int = ...
shape: Tuple[int, ...] = ...
itemsize: int = ...
base = ...
isbuiltin: int = ...
isnative: int = ...
def __hash__(self) -> int: ...
@classmethod
def reset_cache(cls) -> None: ...
class CategoricalDtypeType(type): ...
class CategoricalDtype(PandasExtensionDtype, ExtensionDtype):
name: _str = ...
type: Type[CategoricalDtypeType] = ...
kind: _str = ...
str: _str = ...
base = ...
def __init__(self, categories: Optional[Sequence[Any]] = ..., ordered: Ordered = ...) -> None: ...
@classmethod
def construct_from_string(cls, string: _str) -> CategoricalDtype: ...
def __hash__(self) -> int: ...
def __eq__(self, other) -> bool: ...
@classmethod
def construct_array_type(cls): ...
@staticmethod
def validate_ordered(ordered: Ordered) -> None: ...
@staticmethod
def validate_categories(categories, fastpath: bool = ...): ...
def update_dtype(self, dtype: Union[_str, CategoricalDtype]) -> CategoricalDtype: ...
@property
def categories(self) -> Index: ...
@property
def ordered(self) -> Ordered: ...
class DatetimeTZDtype(PandasExtensionDtype):
type: Type[Timestamp] = ...
kind: _str = ...
str: _str = ...
num: int = ...
base = ...
na_value = ...
def __init__(self, unit: _str = ..., tz=...) -> None: ...
@property
def unit(self): ...
@property
def tz(self): ...
@classmethod
def construct_array_type(cls): ...
@classmethod
def construct_from_string(cls, string: _str): ...
@property
def name(self) -> _str: ...
def __hash__(self) -> int: ...
def __eq__(self, other) -> bool: ...
class PeriodDtype(PandasExtensionDtype):
type: Type[Period] = ...
kind: _str = ...
str: _str = ...
base = ...
num: int = ...
def __new__(cls, freq=...): ...
@property
def freq(self): ...
@classmethod
def construct_from_string(cls, string: _str): ...
@property
def name(self) -> _str: ...
@property
def na_value(self): ...
def __hash__(self) -> int: ...
def __eq__(self, other) -> bool: ...
@classmethod
def is_dtype(cls, dtype) -> bool: ...
@classmethod
def construct_array_type(cls): ...
def __from_arrow__(self, array): ...
class IntervalDtype(PandasExtensionDtype):
name: _str = ...
kind: _str = ...
str: _str = ...
base = ...
num: int = ...
def __new__(cls, subtype=...): ...
@property
def subtype(self): ...
@classmethod
def construct_array_type(cls): ...
@classmethod
def construct_from_string(cls, string: _str): ...
@property
def type(self): ...
def __hash__(self) -> int: ...
def __eq__(self, other) -> bool: ...
@classmethod
def is_dtype(cls, dtype) -> bool: ...
def __from_arrow__(self, array): ...

Просмотреть файл

@ -1,76 +0,0 @@
def create_pandas_abc_type(name, attr, comp): ...
class ABCIndex: ...
class ABCInt64Index: ...
class ABCUInt64Index: ...
class ABCRangeIndex: ...
class ABCFloat64Index:
...
class ABCMultiIndex:
...
class ABCDatetimeIndex:
...
class ABCTimedeltaIndex:
...
class ABCPeriodIndex:
...
class ABCCategoricalIndex:
...
class ABCIntervalIndex:
...
class ABCIndexClass:
...
class ABCSeries:
...
class ABCDataFrame:
...
class ABCSparseArray:
...
class ABCCategorical:
...
class ABCDatetimeArray:
...
class ABCTimedeltaArray:
...
class ABCPeriodArray:
...
class ABCPeriod:
...
class ABCDateOffset:
...
class ABCInterval:
...
class ABCExtensionArray:
...
class ABCPandasArray:
...
class _ABCGeneric(type):
def __instancecheck__(cls, inst) -> bool: ...
class ABCGeneric:
...

Просмотреть файл

@ -1,19 +0,0 @@
def is_bool(obj) -> bool: ...
def is_integer(obj) -> bool: ...
def is_float(obj) -> bool: ...
def is_complex(obj) -> bool: ...
def is_scalar(obj) -> bool: ...
def is_decimal(obj) -> bool: ...
def is_interval(obj) -> bool: ...
def is_list_like(obj) -> bool: ...
def is_number(obj) -> bool: ...
def is_iterator(obj) -> bool: ...
def is_file_like(obj) -> bool: ...
def is_re(obj) -> bool: ...
def is_re_compilable(obj) -> bool: ...
def is_array_like(obj) -> bool: ...
def is_nested_list_like(obj) -> bool: ...
def is_dict_like(obj) -> bool: ...
def is_named_tuple(obj) -> bool: ...
def is_hashable(obj) -> bool: ...
def is_sequence(obj) -> bool: ...

Просмотреть файл

@ -1,33 +0,0 @@
import numpy as np
from typing import Union, overload, List
from pandas._typing import Scalar as Scalar, Series as Series, Index as Index, ArrayLike as ArrayLike, DataFrame as DataFrame
isposinf_scalar = ...
isneginf_scalar = ...
@overload
def isna(obj: DataFrame) -> DataFrame: ...
@overload
def isna(obj: Series) -> Series[bool]: ...
@overload
def isna(obj: Union[Index, List, ArrayLike]) -> np.ndarray: ...
@overload
def isna(obj: Scalar) -> bool: ...
isnull = isna
@overload
def notna(obj: DataFrame) -> DataFrame: ...
@overload
def notna(obj: Series) -> Series[bool]: ...
@overload
def notna(obj: Union[Index, List, ArrayLike]) -> np.ndarray: ...
@overload
def notna(obj: Scalar) -> bool: ...
notnull = notna
def array_equivalent(left, right, strict_nan: bool = ...) -> bool: ...
def na_value_for_dtype(dtype, compat: bool = ...): ...
def remove_na_arraylike(arr): ...
def is_valid_nat_for_dtype(obj, dtype) -> bool: ...

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,383 +0,0 @@
import numpy as np
import sys
import pandas.core.indexing as indexing
from pandas._typing import (
ArrayLike as ArrayLike,
Axis as Axis,
AxisType as AxisType,
Dtype as Dtype,
FilePathOrBuffer as FilePathOrBuffer,
FrameOrSeriesUnion as FrameOrSeriesUnion,
IgnoreRaise as IgnoreRaise,
JSONSerializable as JSONSerializable,
Level as Level,
Renamer as Renamer,
ListLike as ListLike,
Scalar as Scalar,
SeriesAxisType as SeriesAxisType,
FrameOrSeries as FrameOrSeries,
S1 as S1,
Timestamp as Timestamp,
Timedelta as Timedelta,
T,
)
from pandas.core.base import PandasObject as PandasObject
from pandas.core.indexes.base import Index as Index
from pandas.core.internals import BlockManager as BlockManager
from pandas.core.resample import Resampler
from typing import Any, Callable, Dict, Hashable, Iterator, List, Mapping, Optional, Sequence, Tuple, TypeVar, Union, overload
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
_bool = bool
_str = str
class NDFrame(PandasObject, indexing.IndexingMixin):
def __new__(
cls,
data: BlockManager,
axes: Optional[List[Index]] = ...,
copy: _bool = ...,
dtype: Optional[Dtype] = ...,
attrs: Optional[Mapping[Optional[Hashable], Any]] = ...,
fastpath: _bool = ...,
) -> NDFrame: ...
def set_flags(self: FrameOrSeries, *, copy: bool = ..., allows_duplicate_labels: Optional[bool] = ...) -> FrameOrSeries: ...
@property
def attrs(self) -> Dict[Optional[Hashable], Any]: ...
@attrs.setter
def attrs(self, value: Mapping[Optional[Hashable], Any]) -> None: ...
@property
def shape(self) -> Tuple[int, ...]: ...
@property
def axes(self) -> List[Index]: ...
@property
def ndim(self) -> int: ...
@property
def size(self) -> int: ...
def swapaxes(self, axis1: SeriesAxisType, axis2: SeriesAxisType, copy: _bool = ...) -> NDFrame: ...
def droplevel(self, level: Level, axis: SeriesAxisType = ...) -> NDFrame: ...
def pop(self, item: _str) -> NDFrame: ...
def squeeze(self, axis=...): ...
def swaplevel(self, i=..., j=..., axis=...) -> NDFrame: ...
def equals(self, other: Series[S1]) -> _bool: ...
def __neg__(self) -> None: ...
def __pos__(self) -> None: ...
def __nonzero__(self) -> None: ...
def bool(self) -> _bool: ...
def __abs__(self) -> NDFrame: ...
def __round__(self, decimals: int = ...) -> NDFrame: ...
def __hash__(self): ...
def __iter__(self) -> Iterator: ...
def keys(self): ...
def iteritems(self): ...
def __len__(self) -> int: ...
def __contains__(self, key) -> _bool: ...
@property
def empty(self) -> _bool: ...
__array_priority__: int = ...
def __array__(self, dtype=...) -> np.ndarray: ...
def __array_wrap__(self, result, context=...): ...
def to_excel(
self,
excel_writer,
sheet_name: _str = ...,
na_rep: _str = ...,
float_format: Optional[_str] = ...,
columns: Optional[Union[_str, Sequence[_str]]] = ...,
header: _bool = ...,
index: _bool = ...,
index_label: Optional[Union[_str, Sequence[_str]]] = ...,
startrow: int = ...,
startcol: int = ...,
engine: Optional[_str] = ...,
merge_cells: _bool = ...,
encoding: Optional[_str] = ...,
inf_rep: _str = ...,
verbose: _bool = ...,
freeze_panes: Optional[Tuple[int, int]] = ...,
) -> None: ...
@overload
def to_json(
self,
path_or_buf: Optional[FilePathOrBuffer],
orient: Optional[Union[_str, Literal["split", "records", "index", "columns", "values", "table"]]] = ...,
date_format: Optional[Union[_str, Literal["epoch", "iso"]]] = ...,
double_precision: int = ...,
force_ascii: _bool = ...,
date_unit: Union[_str, Literal["s", "ms", "us", "ns"]] = ...,
default_handler: Optional[Callable[[Any], Union[_str, int, float, _bool, List, Dict]]] = ...,
lines: _bool = ...,
compression: Union[_str, Literal["infer", "gzip", "bz2", "zip", "xz"]] = ...,
index: _bool = ...,
indent: Optional[int] = ...,
) -> None: ...
@overload
def to_json(
self,
orient: Optional[Union[_str, Literal["split", "records", "index", "columns", "values", "table"]]] = ...,
date_format: Optional[Union[_str, Literal["epoch", "iso"]]] = ...,
double_precision: int = ...,
force_ascii: _bool = ...,
date_unit: Union[_str, Literal["s", "ms", "us", "ns"]] = ...,
default_handler: Optional[Callable[[Any], Union[_str, int, float, _bool, List, Dict]]] = ...,
lines: _bool = ...,
compression: Optional[Union[_str, Literal["infer", "gzip", "bz2", "zip", "xz"]]] = ...,
index: _bool = ...,
indent: Optional[int] = ...,
) -> _str: ...
def to_hdf(
self,
path_or_buf: FilePathOrBuffer,
key: _str,
mode: _str = ...,
complevel: Optional[int] = ...,
complib: Optional[_str] = ...,
append: _bool = ...,
format: Optional[_str] = ...,
index: _bool = ...,
min_itemsize: Optional[Union[int, Dict[_str, int]]] = ...,
nan_rep=...,
dropna: Optional[_bool] = ...,
data_columns: Optional[List[_str]] = ...,
errors: _str = ...,
encoding: _str = ...,
) -> None: ...
def to_sql(
self,
name: _str,
con,
schema: Optional[_str] = ...,
if_exists: _str = ...,
index: _bool = ...,
index_label: Optional[Union[_str, Sequence[_str]]] = ...,
chunksize: Optional[int] = ...,
dtype: Optional[Union[Dict, Scalar]] = ...,
method: Optional[Union[_str, Callable]] = ...,
) -> None: ...
def to_pickle(
self,
path: _str,
compression: Union[_str, Literal["infer", "gzip", "bz2", "zip", "xz"]] = ...,
protocol: int = ...,
) -> None: ...
def to_clipboard(self, excel: _bool = ..., sep: Optional[_str] = ..., **kwargs) -> None: ...
def to_xarray(self): ...
@overload
def to_latex(
self,
buf: Optional[FilePathOrBuffer],
columns: Optional[List[_str]] = ...,
col_space: Optional[int] = ...,
header: _bool = ...,
index: _bool = ...,
na_rep: _str = ...,
formatters=...,
float_format=...,
sparsify: Optional[_bool] = ...,
index_names: _bool = ...,
bold_rows: _bool = ...,
column_format: Optional[_str] = ...,
longtable: Optional[_bool] = ...,
escape: Optional[_bool] = ...,
encoding: Optional[_str] = ...,
decimal: _str = ...,
multicolumn: Optional[_bool] = ...,
multicolumn_format: Optional[_str] = ...,
multirow: Optional[_bool] = ...,
caption: Optional[Union[_str, Tuple[_str, _str]]] = ...,
label: Optional[_str] = ...,
position: Optional[_str] = ...,
) -> None: ...
@overload
def to_latex(
self,
columns: Optional[List[_str]] = ...,
col_space: Optional[int] = ...,
header: _bool = ...,
index: _bool = ...,
na_rep: _str = ...,
formatters=...,
float_format=...,
sparsify: Optional[_bool] = ...,
index_names: _bool = ...,
bold_rows: _bool = ...,
column_format: Optional[_str] = ...,
longtable: Optional[_bool] = ...,
escape: Optional[_bool] = ...,
encoding: Optional[_str] = ...,
decimal: _str = ...,
multicolumn: Optional[_bool] = ...,
multicolumn_format: Optional[_str] = ...,
multirow: Optional[_bool] = ...,
caption: Optional[Union[_str, Tuple[_str, _str]]] = ...,
label: Optional[_str] = ...,
position: Optional[_str] = ...,
) -> _str: ...
@overload
def to_csv(
self,
path_or_buf: Optional[FilePathOrBuffer],
sep: _str = ...,
na_rep: _str = ...,
float_format: Optional[_str] = ...,
columns: Optional[Sequence[Hashable]] = ...,
header: Union[_bool, List[_str]] = ...,
index: _bool = ...,
index_label: Optional[Union[_bool, _str, Sequence[Hashable]]] = ...,
mode: _str = ...,
encoding: Optional[_str] = ...,
compression: Union[_str, Mapping[_str, _str]] = ...,
quoting: Optional[int] = ...,
quotechar: _str = ...,
line_terminator: Optional[_str] = ...,
chunksize: Optional[int] = ...,
date_format: Optional[_str] = ...,
doublequote: _bool = ...,
escapechar: Optional[_str] = ...,
decimal: _str = ...,
errors: _str = ...,
storage_options: Optional[Dict[_str, Any]] = ...,
) -> None: ...
@overload
def to_csv(
self,
sep: _str = ...,
na_rep: _str = ...,
float_format: Optional[_str] = ...,
columns: Optional[Sequence[Hashable]] = ...,
header: Union[_bool, List[_str]] = ...,
index: _bool = ...,
index_label: Optional[Union[_bool, _str, Sequence[Hashable]]] = ...,
mode: _str = ...,
encoding: Optional[_str] = ...,
compression: Union[_str, Mapping[_str, _str]] = ...,
quoting: Optional[int] = ...,
quotechar: _str = ...,
line_terminator: Optional[_str] = ...,
chunksize: Optional[int] = ...,
date_format: Optional[_str] = ...,
doublequote: _bool = ...,
escapechar: Optional[_str] = ...,
decimal: _str = ...,
errors: _str = ...,
storage_options: Optional[Dict[_str, Any]] = ...,
) -> _str: ...
def take(self, indices, axis=..., is_copy: Optional[_bool] = ..., **kwargs) -> NDFrame: ...
def xs(
self,
key: Union[_str, Tuple[_str]],
axis: SeriesAxisType = ...,
level: Optional[Level] = ...,
drop_level: _bool = ...,
) -> FrameOrSeriesUnion: ...
def __delitem__(self, idx: Hashable): ...
def get(self, key: object, default: Optional[Dtype] = ...) -> Dtype: ...
def reindex_like(self, other, method: Optional[_str] = ..., copy: _bool = ..., limit=..., tolerance=...) -> NDFrame: ...
@overload
def drop(
self,
labels: Hashable | list[Hashable] = ...,
*,
axis: Axis = ...,
index: Hashable | list[Hashable] = ...,
columns: Hashable | list[Hashable] = ...,
level: Level | None = ...,
inplace: Literal[True],
errors: IgnoreRaise = ...,
) -> None: ...
@overload
def drop(
self: NDFrame,
labels: Hashable | list[Hashable] = ...,
*,
axis: Axis = ...,
index: Hashable | list[Hashable] = ...,
columns: Hashable | list[Hashable] = ...,
level: Level | None = ...,
inplace: Literal[False] = ...,
errors: IgnoreRaise = ...,
) -> NDFrame: ...
@overload
def drop(
self: NDFrame,
labels: Hashable | list[Hashable] = ...,
*,
axis: Axis = ...,
index: Hashable | list[Hashable] = ...,
columns: Hashable | list[Hashable] = ...,
level: Level | None = ...,
inplace: _bool = ...,
errors: IgnoreRaise = ...,
) -> NDFrame | None: ...
def add_prefix(self, prefix: _str) -> NDFrame: ...
def add_suffix(self, suffix: _str) -> NDFrame: ...
def sort_index(
self,
axis=...,
level=...,
ascending: _bool = ...,
inplace: _bool = ...,
kind: _str = ...,
na_position: _str = ...,
sort_remaining: _bool = ...,
ignore_index: _bool = ...,
): ...
def filter(self, items=..., like: Optional[_str] = ..., regex: Optional[_str] = ..., axis=...) -> NDFrame: ...
def head(self: FrameOrSeries, n: int = ...) -> FrameOrSeries: ...
def tail(self: FrameOrSeries, n: int = ...) -> FrameOrSeries: ...
def pipe(self, func: Callable[..., T] | tuple[Callable[..., T], str], *args, **kwargs) -> T: ...
def __finalize__(self, other, method=..., **kwargs) -> NDFrame: ...
def __getattr__(self, name: _str): ...
def __setattr__(self, name: _str, value) -> None: ...
@property
def values(self) -> ArrayLike: ...
@property
def dtypes(self): ...
def astype(self: FrameOrSeries, dtype, copy: _bool = ..., errors: str = ...) -> FrameOrSeries: ...
def copy(self: FrameOrSeries, deep: _bool = ...) -> FrameOrSeries: ...
def __copy__(self, deep: _bool = ...) -> NDFrame: ...
def __deepcopy__(self, memo=...) -> NDFrame: ...
def infer_objects(self) -> NDFrame: ...
def convert_dtypes(self:FrameOrSeries, infer_objects: _bool = ..., convert_string: _bool = ..., convert_integer: _bool = ..., convert_boolean: _bool = ...) -> FrameOrSeries: ...
def fillna(self, value=..., method=..., axis=..., inplace: _bool = ..., limit=..., downcast=...) -> Optional[NDFrame]: ...
def replace(self, to_replace=..., value=..., inplace: _bool = ..., limit=..., regex: _bool = ..., method: _str = ...): ...
def asof(self, where, subset=...): ...
def isna(self) -> NDFrame: ...
def isnull(self) -> NDFrame: ...
def notna(self) -> NDFrame: ...
def notnull(self) -> NDFrame: ...
def clip(self, lower=..., upper=..., axis=..., inplace: _bool = ..., *args, **kwargs) -> NDFrame: ...
def asfreq(self, freq, method=..., how: Optional[_str] = ..., normalize: _bool = ..., fill_value=...) -> NDFrame: ...
def at_time(self, time, asof: _bool = ..., axis=...) -> NDFrame: ...
def between_time(self, start_time, end_time, include_start: _bool = ..., include_end: _bool = ..., axis=...) -> NDFrame: ...
def first(self, offset) -> NDFrame: ...
def last(self, offset) -> NDFrame: ...
def rank(
self,
axis=...,
method: _str = ...,
numeric_only: Optional[_bool] = ...,
na_option: _str = ...,
ascending: _bool = ...,
pct: _bool = ...,
) -> NDFrame: ...
def where(self, cond, other=..., inplace: _bool = ..., axis=..., level=..., errors: _str = ..., try_cast: _bool = ...): ...
def mask(self, cond, other=..., inplace: _bool = ..., axis=..., level=..., errors: _str = ..., try_cast: _bool = ...): ...
def shift(self, periods=..., freq=..., axis=..., fill_value=...) -> NDFrame: ...
def slice_shift(self, periods: int = ..., axis=...) -> NDFrame: ...
def tshift(self, periods: int = ..., freq=..., axis=...) -> NDFrame: ...
def truncate(self, before=..., after=..., axis=..., copy: _bool = ...) -> NDFrame: ...
def tz_convert(self, tz, axis=..., level=..., copy: _bool = ...) -> NDFrame: ...
def tz_localize(self, tz, axis=..., level=..., copy: _bool = ..., ambiguous=..., nonexistent: str = ...) -> NDFrame: ...
def abs(self) -> NDFrame: ...
def describe(self, percentiles=..., include=..., exclude=..., datetime_is_numeric: Optional[_bool] = ...) -> NDFrame: ...
def pct_change(self, periods=..., fill_method=..., limit=..., freq=..., **kwargs) -> NDFrame: ...
def transform(self, func, *args, **kwargs): ...
def first_valid_index(self): ...
def last_valid_index(self): ...
from pandas.core.series import Series as Series

Просмотреть файл

@ -1,2 +0,0 @@
from pandas.core.groupby.generic import NamedAgg as NamedAgg
from pandas.core.groupby.grouper import Grouper as Grouper

Просмотреть файл

@ -1,4 +0,0 @@
from pandas.core.arrays.categorical import Categorical as Categorical # , CategoricalDtype as CategoricalDtype
def recode_for_groupby(c: Categorical, sort: bool, observed: bool): ...
def recode_from_groupby(c: Categorical, sort: bool, ci): ...

Просмотреть файл

@ -1,238 +0,0 @@
from matplotlib.axes import Axes as PlotAxes, SubplotBase as AxesSubplot
import numpy as np
import sys
from pandas._typing import FrameOrSeries as FrameOrSeries, AxisType, Dtype, Level, F, AggFuncType, S1
from pandas.core.frame import DataFrame as DataFrame
from pandas.core.groupby.groupby import GroupBy as GroupBy # , get_groupby as get_groupby
from pandas.core.groupby.grouper import Grouper as Grouper
from pandas.core.series import Series as Series
from typing import Any, Callable, Dict, FrozenSet, List, NamedTuple, Optional, Sequence, Tuple, Type, Union, overload
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
AggScalar = Union[str, Callable[..., Any]]
ScalarResult = ...
class NamedAgg(NamedTuple):
column: str = ...
aggfunc: AggScalar = ...
def generate_property(name: str, klass: Type[FrameOrSeries]): ...
def pin_whitelisted_properties(klass: Type[FrameOrSeries], whitelist: FrozenSet[str]): ...
class SeriesGroupBy(GroupBy):
def any(self, skipna: bool = ...) -> Series[bool]: ...
def all(self, skipna: bool = ...) -> Series[bool]: ...
def apply(self, func, *args, **kwargs) -> Series: ...
@overload
def aggregate(self, func: Union[List, Dict], *args, **kwargs) -> DataFrame: ...
@overload
def aggregate(self, func: Union[str, Callable], *args, **kwargs) -> Series: ...
agg = aggregate
def transform(self, func, *args, **kwargs): ...
def filter(self, func, dropna: bool = ..., *args, **kwargs): ...
def nunique(self, dropna: bool = ...) -> Series: ...
def describe(self, **kwargs) -> DataFrame: ...
def value_counts(
self,
normalize: bool = ...,
sort: bool = ...,
ascending: bool = ...,
bins=...,
dropna: bool = ...,
) -> DataFrame: ...
def count(self) -> Series[int]: ...
def pct_change(
self,
periods: int = ...,
fill_method: str = ...,
limit=...,
freq=...,
axis: AxisType = ...,
) -> Series[float]: ...
# Overrides and others from original pylance stubs
@property
def is_monotonic_increasing(self) -> bool: ...
@property
def is_monotonic_decreasing(self) -> bool: ...
def bfill(self, limit: Optional[int] = ...) -> Series[S1]: ...
def cummax(self, axis: AxisType = ..., **kwargs) -> Series[S1]: ...
def cummin(self, axis: AxisType = ..., **kwargs) -> Series[S1]: ...
def cumprod(self, axis: AxisType = ..., **kwargs) -> Series[S1]: ...
def cumsum(self, axis: AxisType = ..., **kwargs) -> Series[S1]: ...
def ffill(self, limit: Optional[int] = ...) -> Series[S1]: ...
def first(self, **kwargs) -> Series[S1]: ...
def head(self, n: int = ...) -> Series[S1]: ...
def last(self, **kwargs) -> Series[S1]: ...
def max(self, **kwargs) -> Series[S1]: ...
def mean(self, **kwargs) -> Series[S1]: ...
def median(self, **kwargs) -> Series[S1]: ...
def min(self, **kwargs) -> Series[S1]: ...
def nlargest(self, n: int = ..., keep: str = ...) -> Series[S1]: ...
def nsmallest(self, n: int = ..., keep: str = ...) -> Series[S1]: ...
def nth(self, n: Union[int, Sequence[int]], dropna: Optional[str] = ...) -> Series[S1]: ...
class DataFrameGroupBy(GroupBy):
def any(self, skipna: bool = ...) -> DataFrame: ...
def all(self, skipna: bool = ...) -> DataFrame: ...
def apply(self, func, *args, **kwargs) -> DataFrame: ...
@overload
def aggregate(self, arg: str, *args, **kwargs) -> DataFrame: ...
@overload
def aggregate(self, arg: Dict, *args, **kwargs) -> DataFrame: ...
@overload
def aggregate(self, arg: Callable[[], Any], *args, **kwargs) -> DataFrame: ...
@overload
def agg(self, arg: str, *args, **kwargs) -> DataFrame: ...
@overload
def agg(self, arg: Dict, *args, **kwargs) -> DataFrame: ...
@overload
def agg(self, arg: F, *args, **kwargs) -> DataFrame: ...
def transform(self, func, *args, **kwargs): ...
def filter(self, func: Callable, dropna: bool = ..., *args, **kwargs) -> DataFrame: ...
def nunique(self, dropna: bool = ...) -> DataFrame: ...
@overload
def __getitem__(self, item: str) -> SeriesGroupBy: ...
@overload
def __getitem__(self, item: List[str]) -> DataFrameGroupBy: ...
def count(self) -> DataFrame: ...
def boxplot(
self,
grouped: DataFrame,
subplots: bool = ...,
column: Optional[Union[str, Sequence]] = ...,
fontsize: Union[int, str] = ...,
rot: float = ...,
grid: bool = ...,
ax: Optional[PlotAxes] = ...,
figsize: Optional[Tuple[float, float]] = ...,
layout: Optional[Tuple[int, int]] = ...,
sharex: bool = ...,
sharey: bool = ...,
bins: Union[int, Sequence] = ...,
backend: Optional[str] = ...,
**kwargs,
) -> Union[AxesSubplot, Sequence[AxesSubplot]]: ...
# Overrides and others from original pylance stubs
## These are "properties" but properties can't have all these arguments?!
def corr(self, method: Union[str, Callable], min_periods: int = ...) -> DataFrame: ...
def cov(self, min_periods: int = ...) -> DataFrame: ...
def diff(self, periods: int = ..., axis: AxisType = ...) -> DataFrame: ...
def bfill(self, limit: Optional[int] = ...) -> DataFrame: ...
def corrwith(
self,
other: DataFrame,
axis: AxisType = ...,
drop: bool = ...,
method: str = ...,
) -> Series: ...
def cummax(self, axis: AxisType = ..., **kwargs) -> DataFrame: ...
def cummin(self, axis: AxisType = ..., **kwargs) -> DataFrame: ...
def cumprod(self, axis: AxisType = ..., **kwargs) -> DataFrame: ...
def cumsum(self, axis: AxisType = ..., **kwargs) -> DataFrame: ...
def describe(self, **kwargs) -> DataFrame: ...
def ffill(self, limit: Optional[int] = ...) -> DataFrame: ...
@overload
def fillna(
self,
value,
method: Optional[str] = ...,
axis: AxisType = ...,
limit: Optional[int] = ...,
downcast: Optional[Dict] = ...,
*,
inplace: Literal[True],
) -> None: ...
@overload
def fillna(
self,
value,
method: Optional[str] = ...,
axis: AxisType = ...,
limit: Optional[int] = ...,
downcast: Optional[Dict] = ...,
*,
inplace: Literal[False],
) -> DataFrame: ...
@overload
def fillna(
self,
value,
method: Optional[str] = ...,
axis: AxisType = ...,
inplace: bool = ...,
limit: Optional[int] = ...,
downcast: Optional[Dict] = ...,
) -> Union[None, DataFrame]: ...
def first(self, **kwargs) -> DataFrame: ...
def head(self, n: int = ...) -> DataFrame: ...
def hist(
self,
data: DataFrame,
column: Optional[Union[str, Sequence]] = ...,
by=...,
grid: bool = ...,
xlabelsize: Optional[int] = ...,
xrot: Optional[float] = ...,
ylabelsize: Optional[int] = ...,
yrot: Optional[float] = ...,
ax: Optional[PlotAxes] = ...,
sharex: bool = ...,
sharey: bool = ...,
figsize: Optional[Tuple[float, float]] = ...,
layout: Optional[Tuple[int, int]] = ...,
bins: Union[int, Sequence] = ...,
backend: Optional[str] = ...,
**kwargs,
) -> Union[AxesSubplot, Sequence[AxesSubplot]]: ...
def idxmax(self, axis: AxisType = ..., skipna: bool = ...) -> Series: ...
def idxmin(self, axis: AxisType = ..., skipna: bool = ...) -> Series: ...
def last(self, **kwargs) -> DataFrame: ...
@overload
def mad(
self, axis: AxisType = ..., skipna: bool = ..., numeric_only: Optional[bool] = ..., *, level: Level, **kwargs
) -> DataFrame: ...
@overload
def mad(
self, axis: AxisType = ..., skipna: bool = ..., level: None = ..., numeric_only: Optional[bool] = ..., **kwargs
) -> Series: ...
def max(self, **kwargs) -> DataFrame: ...
def mean(self, **kwargs) -> DataFrame: ...
def median(self, **kwargs) -> DataFrame: ...
def min(self, **kwargs) -> DataFrame: ...
def nth(self, n: Union[int, Sequence[int]], dropna: Optional[str] = ...) -> DataFrame: ...
def pct_change(
self,
periods: int = ...,
fill_method: str = ...,
limit=...,
freq=...,
axis: AxisType = ...,
) -> DataFrame: ...
def prod(self, **kwargs) -> DataFrame: ...
def quantile(self, q: float = ..., interpolation: str = ...) -> DataFrame: ...
def resample(self, rule, *args, **kwargs) -> Grouper: ...
def sem(self, ddof: int = ...) -> DataFrame: ...
def shift(
self,
periods: int = ...,
freq: str = ...,
axis: AxisType = ...,
fill_value=...,
) -> DataFrame: ...
def size(self) -> Series[int]: ...
@overload
def skew(
self, axis: AxisType = ..., skipna: bool = ..., numeric_only: bool = ..., *, level: Level, **kwargs
) -> DataFrame: ...
@overload
def skew(self, axis: AxisType = ..., skipna: bool = ..., level: None = ..., numeric_only: bool = ..., **kwargs) -> Series: ...
def std(self, ddof: int = ...) -> DataFrame: ...
def sum(self, **kwargs) -> DataFrame: ...
def tail(self, n: int = ...) -> DataFrame: ...
def take(self, indices: Sequence, axis: AxisType = ..., **kwargs) -> DataFrame: ...
def tshift(self, periods: int, freq=..., axis: AxisType = ...) -> DataFrame: ...
def var(self, ddof: int = ...) -> DataFrame: ...

Просмотреть файл

@ -1,131 +0,0 @@
from pandas._typing import (
FrameOrSeries as FrameOrSeries,
FrameOrSeriesUnion as FrameOrSeriesUnion,
Scalar as Scalar,
AxisType as AxisType,
KeysArgType,
)
from pandas.core.base import PandasObject as PandasObject, SelectionMixin as SelectionMixin
from pandas.core.frame import DataFrame as DataFrame
from pandas.core.generic import NDFrame as NDFrame
from pandas.core.groupby import ops as ops
from pandas.core.indexes.api import Index as Index
from pandas.core.series import Series as Series
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
class GroupByPlot(PandasObject):
def __init__(self, groupby) -> None: ...
def __call__(self, *args, **kwargs): ...
def __getattr__(self, name: str): ...
class _GroupBy(PandasObject):
level = ...
as_index = ...
keys = ...
sort = ...
group_keys = ...
squeeze = ...
observed = ...
mutated = ...
obj = ...
axis = ...
grouper = ...
exclusions = ...
def __init__(
self,
obj: NDFrame,
keys: Optional[KeysArgType] = ...,
axis: int = ...,
level=...,
grouper: Optional[ops.BaseGrouper] = ...,
exclusions=...,
selection=...,
as_index: bool = ...,
sort: bool = ...,
group_keys: bool = ...,
squeeze: bool = ...,
observed: bool = ...,
mutated: bool = ...,
) -> None: ...
def __len__(self) -> int: ...
@property
def groups(self) -> Dict[str, str]: ...
@property
def ngroups(self): ...
@property
def indices(self) -> Dict[str, Index]: ...
def __getattr__(self, attr: str): ...
def pipe(self, func: Callable, *args, **kwargs): ...
plot = ...
def get_group(self, name, obj: Optional[DataFrame] = ...) -> DataFrame: ...
def __iter__(self) -> Generator[Tuple[str, Any], None, None]: ...
def apply(self, func: Callable, *args, **kwargs) -> FrameOrSeriesUnion: ...
class GroupBy(_GroupBy):
def count(self) -> FrameOrSeriesUnion: ...
def mean(self, **kwargs) -> FrameOrSeriesUnion: ...
def median(self, **kwargs) -> FrameOrSeriesUnion: ...
def std(self, ddof: int = ...) -> FrameOrSeriesUnion: ...
def var(self, ddof: int = ...) -> FrameOrSeriesUnion: ...
def sem(self, ddof: int = ...) -> FrameOrSeriesUnion: ...
def size(self) -> Series: ...
def ohlc(self) -> DataFrame: ...
def describe(self, **kwargs) -> FrameOrSeriesUnion: ...
def resample(self, rule, *args, **kwargs): ...
def rolling(self, *args, **kwargs): ...
def expanding(self, *args, **kwargs): ...
def pad(self, limit: Optional[int] = ...): ...
def ffill(self, limit: Optional[int] = ...) -> FrameOrSeriesUnion: ...
def backfill(self, limit: Optional[int] = ...) -> FrameOrSeriesUnion: ...
def bfill(self, limit: Optional[int] = ...) -> FrameOrSeriesUnion: ...
def nth(self, n: Union[int, List[int]], dropna: Optional[str] = ...) -> FrameOrSeriesUnion: ...
def quantile(self, q=..., interpolation: str = ...): ...
def ngroup(self, ascending: bool = ...) -> Series: ...
def cumcount(self, ascending: bool = ...) -> Series: ...
def rank(
self,
method: str = ...,
ascending: bool = ...,
na_option: str = ...,
pct: bool = ...,
axis: int = ...,
) -> DataFrame: ...
def cummax(self, axis: AxisType = ..., **kwargs) -> FrameOrSeriesUnion: ...
def cummin(self, axis: AxisType = ..., **kwargs) -> FrameOrSeriesUnion: ...
def cumprod(self, axis: AxisType = ..., **kwargs) -> FrameOrSeriesUnion: ...
def cumsum(self, axis: AxisType = ..., **kwargs) -> FrameOrSeriesUnion: ...
def shift(self, periods: int = ..., freq=..., axis: AxisType = ..., fill_value=...): ...
def pct_change(
self,
periods: int = ...,
fill_method: str = ...,
limit=...,
freq=...,
axis: AxisType = ...,
) -> FrameOrSeriesUnion: ...
def head(self, n: int = ...) -> FrameOrSeriesUnion: ...
def tail(self, n: int = ...) -> FrameOrSeriesUnion: ...
# Surplus methodss from original pylance stubs; should they go away?
def first(self, **kwargs) -> FrameOrSeriesUnion: ...
def last(self, **kwargs) -> FrameOrSeriesUnion: ...
def max(self, **kwargs) -> FrameOrSeriesUnion: ...
def min(self, **kwargs) -> FrameOrSeriesUnion: ...
def prod(self, **kwargs) -> FrameOrSeriesUnion: ...
def sum(self, **kwargs) -> FrameOrSeriesUnion: ...
def get_groupby(
obj: NDFrame,
by: Optional[KeysArgType] = ...,
axis: int = ...,
level=...,
grouper: Optional[ops.BaseGrouper] = ...,
exclusions=...,
selection=...,
as_index: bool = ...,
sort: bool = ...,
group_keys: bool = ...,
squeeze: bool = ...,
observed: bool = ...,
mutated: bool = ...,
) -> GroupBy: ...

Просмотреть файл

@ -1,82 +0,0 @@
import numpy as np
from pandas._typing import FrameOrSeries as FrameOrSeries
from pandas.core.groupby import grouper as grouper
from pandas.core.indexes.api import Index as Index
from pandas.core.series import Series as Series
from typing import List, Optional, Sequence, Tuple
class BaseGrouper:
axis = ...
sort = ...
group_keys = ...
mutated = ...
indexer = ...
def __init__(self, axis: Index, groupings: Sequence[grouper.Grouping], sort: bool=..., group_keys: bool=..., mutated: bool=..., indexer: Optional[np.ndarray]=...) -> None: ...
@property
def groupings(self) -> List[grouper.Grouping]: ...
@property
def shape(self): ...
def __iter__(self) : ...
@property
def nkeys(self) -> int: ...
def get_iterator(self, data: FrameOrSeries, axis: int=...) : ...
def apply(self, f, data: FrameOrSeries, axis: int=...) : ...
def indices(self): ...
@property
def codes(self) -> List[np.ndarray]: ...
@property
def levels(self) -> List[Index]: ...
@property
def names(self): ...
def size(self) -> Series: ...
def groups(self): ...
def is_monotonic(self) -> bool: ...
def group_info(self): ...
def codes_info(self) -> np.ndarray: ...
def ngroups(self) -> int: ...
@property
def reconstructed_codes(self) -> List[np.ndarray]: ...
def result_index(self) -> Index: ...
def get_group_levels(self): ...
def aggregate(self, values, how: str, axis: int=..., min_count: int=...) -> Tuple[np.ndarray, Optional[List[str]]]: ...
def transform(self, values, how: str, axis: int=..., **kwargs) : ...
def agg_series(self, obj: Series, func) : ...
class BinGrouper(BaseGrouper):
bins = ...
binlabels = ...
mutated = ...
indexer = ...
def __init__(self, bins, binlabels, filter_empty: bool=..., mutated: bool=..., indexer=...) -> None: ...
def groups(self): ...
@property
def nkeys(self) -> int: ...
def get_iterator(self, data: FrameOrSeries, axis: int=...) : ...
def indices(self): ...
def group_info(self): ...
def reconstructed_codes(self) -> List[np.ndarray]: ...
def result_index(self): ...
@property
def levels(self): ...
@property
def names(self): ...
@property
def groupings(self) -> List[grouper.Grouping]: ...
def agg_series(self, obj: Series, func) : ...
class DataSplitter:
data = ...
labels = ...
ngroups = ...
axis = ...
def __init__(self, data: FrameOrSeries, labels, ngroups: int, axis: int=...) -> None: ...
def slabels(self): ...
def sort_idx(self): ...
def __iter__(self) : ...
class SeriesSplitter(DataSplitter): ...
class FrameSplitter(DataSplitter):
def fast_apply(self, f, names): ...
def get_splitter(data: FrameOrSeries, *args, **kwargs) -> DataSplitter: ...

Просмотреть файл

Просмотреть файл

@ -1,25 +0,0 @@
from pandas.core.accessor import PandasDelegate as PandasDelegate
from pandas.core.base import NoNewAttributesMixin as NoNewAttributesMixin, PandasObject as PandasObject
from pandas.core.series import Series
class Properties(PandasDelegate, PandasObject, NoNewAttributesMixin):
orig = ...
name = ...
def __init__(self, data: Series, orig) -> None: ...
class DatetimeProperties(Properties):
def to_pydatetime(self): ...
@property
def freq(self): ...
class TimedeltaProperties(Properties):
def to_pytimedelta(self): ...
@property
def components(self): ...
@property
def freq(self): ...
class PeriodProperties(Properties): ...
class CombinedDatetimelikeProperties(DatetimeProperties, TimedeltaProperties, PeriodProperties):
def __new__(cls, data: Series): ...

Просмотреть файл

@ -1,190 +0,0 @@
import numpy as np
from pandas._typing import (
Dtype as Dtype,
DtypeArg as DtypeArg,
Label as Label,
Level as Level,
Scalar as Scalar,
T1 as T1,
np_ndarray_str,
np_ndarray_int64,
np_ndarray_bool,
)
from pandas._typing import Series as Series, DataFrame as DataFrame, DtypeObj as DtypeObj
from pandas.core.arrays import ExtensionArray
from pandas.core.base import IndexOpsMixin, PandasObject
from pandas.core.strings import StringMethods
from typing import (
Callable,
Dict,
Generic,
Hashable,
Iterable,
Iterator,
List,
Literal,
Optional,
Sequence,
Tuple,
Union,
overload,
)
class InvalidIndexError(Exception): ...
_str = str
class Index(IndexOpsMixin, PandasObject):
def __new__(
cls, data: Iterable = ..., dtype=..., copy: bool = ..., name=..., tupleize_cols: bool = ..., **kwargs
) -> Index: ...
def __init__(
self,
data: Iterable,
dtype=...,
copy: bool = ...,
name=...,
tupleize_cols: bool = ...,
): ...
@property
def str(self) -> StringMethods[Index]: ...
@property
def asi8(self) -> np_ndarray_int64: ...
def is_(self, other) -> bool: ...
def __len__(self) -> int: ...
def __array__(self, dtype=...) -> np.ndarray: ...
def __array_wrap__(self, result, context=...): ...
@property
def dtype(self) -> DtypeObj: ...
def ravel(self, order: _str = ...): ...
def view(self, cls=...): ...
@overload
def astype(self, dtype: DtypeArg) -> Index: ...
@overload
def astype(self, dtype: T1) -> Index: ...
def take(self, indices, axis: int = ..., allow_fill: bool = ..., fill_value=..., **kwargs): ...
def repeat(self, repeats, axis=...): ...
def copy(self, name=..., deep: bool = ...) -> Index: ...
def __copy__(self, **kwargs): ...
def __deepcopy__(self, memo=...): ...
def format(self, name: bool = ..., formatter: Optional[Callable] = ..., na_rep: _str = ...) -> List[_str]: ...
def to_native_types(self, slicer=..., **kwargs): ...
def to_flat_index(self): ...
def to_series(self, index=..., name=...): ...
def to_frame(self, index: bool = ..., name=...) -> DataFrame: ...
@property
def name(self): ...
@name.setter
def name(self, value) -> None: ...
@property
def names(self) -> List[_str]: ...
@names.setter
def names(self, names: List[_str]): ...
def set_names(self, names, level=..., inplace: bool = ...): ...
def rename(self, name, inplace: bool = ...): ...
@property
def nlevels(self) -> int: ...
def sortlevel(self, level=..., ascending: bool = ..., sort_remaining=...): ...
def get_level_values(self, level: Union[int, _str]) -> Index: ...
def droplevel(self, level: Union[Level, List[Level]] = ...): ...
@property
def is_monotonic(self) -> bool: ...
@property
def is_monotonic_increasing(self) -> bool: ...
@property
def is_monotonic_decreasing(self) -> bool: ...
def is_unique(self) -> bool: ...
@property
def has_duplicates(self) -> bool: ...
def is_boolean(self) -> bool: ...
def is_integer(self) -> bool: ...
def is_floating(self) -> bool: ...
def is_numeric(self) -> bool: ...
def is_object(self) -> bool: ...
def is_categorical(self) -> bool: ...
def is_interval(self) -> bool: ...
def is_mixed(self) -> bool: ...
def holds_integer(self): ...
def inferred_type(self): ...
def is_all_dates(self) -> bool: ...
def __reduce__(self): ...
def hasnans(self) -> bool: ...
def isna(self): ...
isnull = ...
def notna(self): ...
notnull = ...
def fillna(self, value=..., downcast=...): ...
def dropna(self, how: _str = ...): ...
def unique(self, level=...) -> Index: ...
def drop_duplicates(self, keep: Literal["first", "last", False] = ...) -> IndexOpsMixin: ...
def duplicated(self, keep: _str = ...): ...
def __add__(self, other) -> Index: ...
def __radd__(self, other) -> Index: ...
def __iadd__(self, other) -> Index: ...
def __sub__(self, other) -> Index: ...
def __rsub__(self, other) -> Index: ...
def __and__(self, other) -> Index: ...
def __or__(self, other) -> Index: ...
def __xor__(self, other) -> Index: ...
def __nonzero__(self) -> None: ...
__bool__ = ...
def union(self, other: Union[List[T1], Index], sort=...) -> Index: ...
def intersection(self, other: Union[List[T1], Index], sort: bool = ...) -> Index: ...
def difference(self, other: Union[List[T1], Index]) -> Index: ...
def symmetric_difference(self, other: Union[List[T1], Index], result_name=..., sort=...) -> Index: ...
def get_loc(self, key, method=..., tolerance=...): ...
def get_indexer(self, target, method=..., limit=..., tolerance=...): ...
def reindex(self, target, method=..., level=..., limit=..., tolerance=...): ...
def join(self, other, how: _str = ..., level=..., return_indexers: bool = ..., sort: bool = ...): ...
@property
def values(self) -> np.ndarray: ...
def array(self) -> ExtensionArray: ...
def memory_usage(self, deep: bool = ...): ...
def where(self, cond, other=...): ...
def is_type_compatible(self, kind) -> bool: ...
def __contains__(self, key) -> bool: ...
def __hash__(self) -> int: ...
def __setitem__(self, key, value) -> None: ...
@overload
def __getitem__(self, idx: Union[slice, np_ndarray_int64, Index]) -> Index: ...
@overload
def __getitem__(self, idx: Union[int, Tuple[np_ndarray_int64, ...]]) -> Hashable: ...
def append(self, other): ...
def putmask(self, mask, value): ...
def equals(self, other) -> bool: ...
def identical(self, other) -> bool: ...
def asof(self, label): ...
def asof_locs(self, where, mask): ...
def sort_values(self, return_indexer: bool = ..., ascending: bool = ...): ...
def sort(self, *args, **kwargs) -> None: ...
def shift(self, periods: int = ..., freq=...) -> None: ...
def argsort(self, *args, **kwargs): ...
def get_value(self, series, key): ...
def set_value(self, arr, key, value) -> None: ...
def get_indexer_non_unique(self, target): ...
def get_indexer_for(self, target, **kwargs): ...
def groupby(self, values) -> Dict[Hashable, np.ndarray]: ...
def map(self, mapper, na_action=...) -> Index: ...
def isin(self, values, level=...) -> np_ndarray_bool: ...
def slice_indexer(self, start=..., end=..., step=..., kind=...): ...
def get_slice_bound(self, label, side, kind): ...
def slice_locs(self, start=..., end=..., step=..., kind=...): ...
def delete(self, loc): ...
def insert(self, loc, item): ...
def drop(self, labels, *, errors: _str = ...) -> Index: ...
@property
def shape(self) -> Tuple[int, ...]: ...
# Extra methods from old stubs
def __eq__(self, other: object) -> bool: ... # Series: ... # type: ignore
def __iter__(self) -> Iterator: ...
def __ne__(self, other: _str) -> Index: ... # type: ignore
def to_numpy(self) -> np.ndarray: ...
def ensure_index_from_sequences(sequences: Sequence[Sequence[Dtype]], names: Sequence[str] = ...) -> Index: ...
def ensure_index(index_like: Union[Sequence, Index], copy: bool = ...) -> Index: ...
def maybe_extract_name(name, obj, cls) -> Label: ...

Просмотреть файл

@ -1,34 +0,0 @@
from pandas.core.accessor import PandasDelegate as PandasDelegate
from pandas.core.indexes.extension import ExtensionIndex as ExtensionIndex
from pandas.core.indexes.numeric import Int64Index as Int64Index
from pandas.tseries.frequencies import DateOffset as DateOffset
from typing import List, Optional
class DatetimeIndexOpsMixin(ExtensionIndex):
freq: Optional[DateOffset]
freqstr: Optional[str]
@property
def is_all_dates(self) -> bool: ...
@property
def values(self): ...
def __array_wrap__(self, result, context=...): ...
def equals(self, other) -> bool: ...
def __contains__(self, key): ...
def sort_values(self, return_indexer: bool = ..., ascending: bool = ...): ...
def take(self, indices, axis: int = ..., allow_fill: bool = ..., fill_value=..., **kwargs): ...
def tolist(self) -> List: ...
def min(self, axis=..., skipna: bool = ..., *args, **kwargs): ...
def argmin(self, axis=..., skipna: bool = ..., *args, **kwargs): ...
def max(self, axis=..., skipna: bool = ..., *args, **kwargs): ...
def argmax(self, axis=..., skipna: bool = ..., *args, **kwargs): ...
def isin(self, values, level=...): ...
def where(self, cond, other=...): ...
def shift(self, periods: int = ..., freq=...): ...
def delete(self, loc): ...
class DatetimeTimedeltaMixin(DatetimeIndexOpsMixin, Int64Index):
def difference(self, other, sort=...): ...
def intersection(self, other, sort: bool = ...): ...
def join(self, other, how: str = ..., level=..., return_indexers=..., sort=...): ...
class DatetimelikeDelegateMixin(PandasDelegate): ...

Просмотреть файл

@ -1,66 +0,0 @@
import numpy as np
from datetime import tzinfo as tzinfo
from pandas.core.indexes.datetimelike import (
DatetimeTimedeltaMixin as DatetimeTimedeltaMixin,
DatetimelikeDelegateMixin as DatetimelikeDelegateMixin,
)
from pandas.core.indexes.timedeltas import TimedeltaIndex as TimedeltaIndex
from pandas.core.series import Series as Series, TimedeltaSeries, TimestampSeries
from pandas._typing import Timestamp as Timestamp, Timedelta as Timedelta
from typing import Optional, Union, overload
class DatetimeDelegateMixin(DatetimelikeDelegateMixin): ...
class DatetimeIndex(DatetimeTimedeltaMixin, DatetimeDelegateMixin):
tz: Optional[tzinfo]
def __init__(
self,
data=...,
freq=...,
tz=...,
normalize: bool = ...,
closed=...,
ambiguous: str = ...,
dayfirst: bool = ...,
yearfirst: bool = ...,
dtype=...,
copy: bool = ...,
name=...,
): ...
def __array__(self, dtype=...) -> np.ndarray: ...
def __reduce__(self): ...
@overload
def __add__(self, other: TimedeltaSeries) -> TimestampSeries: ...
@overload
def __add__(self, other: Union[Timedelta, TimedeltaIndex]) -> DatetimeIndex: ...
def union_many(self, others): ...
def to_series(self, keep_tz=..., index=..., name=...): ...
def snap(self, freq: str = ...): ...
def get_value(self, series, key): ...
def get_value_maybe_box(self, series, key): ...
def get_loc(self, key, method=..., tolerance=...): ...
def slice_indexer(self, start=..., end=..., step=..., kind=...): ...
def searchsorted(self, value, side: str = ..., sorter=...): ...
def is_type_compatible(self, typ) -> bool: ...
@property
def inferred_type(self) -> str: ...
def insert(self, loc, item): ...
def indexer_at_time(self, time, asof: bool = ...): ...
def indexer_between_time(self, start_time, end_time, include_start: bool = ..., include_end: bool = ...): ...
def strftime(self, date_format: str = ...) -> np.ndarray: ...
def date_range(
start=..., end=..., periods=..., freq=..., tz=..., normalize=..., name=..., closed=..., **kwargs
) -> DatetimeIndex: ...
def bdate_range(
start=...,
end=...,
periods=...,
freq: str = ...,
tz=...,
normalize: bool = ...,
name=...,
weekmask=...,
holidays=...,
closed=...,
) -> DatetimeIndex: ...

Просмотреть файл

@ -1,16 +0,0 @@
from pandas.core.indexes.base import Index as Index
from typing import List
def inherit_from_data(name: str, delegate, cache: bool = ..., wrap: bool = ...): ...
def inherit_names(names: List[str], delegate, cache: bool = ..., wrap: bool = ...): ...
def make_wrapped_arith_op(opname): ...
class ExtensionIndex(Index):
def __getitem__(self, key): ...
def __iter__(self): ...
def dropna(self, how: str = ...): ...
def repeat(self, repeats, axis=...): ...
def take(self, indices, axis: int = ..., allow_fill: bool = ..., fill_value=..., **kwargs): ...
def unique(self, level=...): ...
def map(self, mapper, na_action=...): ...
def astype(self, dtype, copy: bool = ...): ...

Просмотреть файл

@ -1,25 +0,0 @@
from pandas.core.base import PandasObject as PandasObject
class FrozenList(PandasObject, list):
def union(self, other) -> FrozenList: ...
def difference(self, other) -> FrozenList: ...
__add__ = ...
__iadd__ = ...
def __getitem__(self, n): ...
def __radd__(self, other): ...
def __eq__(self, other) -> bool: ...
__req__ = ...
def __mul__(self, other): ...
__imul__ = ...
def __reduce__(self): ...
def __hash__(self): ...
__setitem__ = ...
__setslice__ = ...
__delitem__ = ...
__delslice__ = ...
pop = ...
append = ...
extend = ...
remove = ...
sort = ...
insert = ...

Просмотреть файл

@ -1,117 +0,0 @@
import numpy as np
from pandas.core.indexes.base import Index as Index
from typing import Callable, Hashable, List, Optional, Sequence, Union
from pandas._typing import np_ndarray_bool, DtypeArg as DtypeArg, T1 as T1
class MultiIndex(Index):
def __new__(
cls,
levels=...,
codes=...,
sortorder=...,
names=...,
dtype=...,
copy=...,
name=...,
verify_integrity: bool = ...,
_set_identity: bool = ...,
) -> MultiIndex: ...
def __init__(
self,
levels=...,
codes=...,
sortorder=...,
names=...,
dtype=...,
copy=...,
name=...,
verify_integrity: bool = ...,
_set_identity: bool = ...,
) -> None: ...
@classmethod
def from_arrays(cls, arrays, sortorder=..., names=...) -> MultiIndex: ...
@classmethod
def from_tuples(cls, tuples, sortorder=..., names=...) -> MultiIndex: ...
@classmethod
def from_product(cls, iterables, sortorder=..., names=...) -> MultiIndex: ...
@classmethod
def from_frame(cls, df, sortorder=..., names=...) -> MultiIndex: ...
@property
def shape(self): ...
@property # Should be read-only
def levels(self) -> List[Index]: ...
def set_levels(self, levels, level=..., inplace: bool = ..., verify_integrity: bool = ...): ...
@property
def codes(self): ...
def set_codes(self, codes, level=..., inplace: bool = ..., verify_integrity: bool = ...): ...
def copy(self, names=..., deep: bool = ...) -> MultiIndex: ...
def __array__(self, dtype=...) -> np.ndarray: ...
def view(self, cls=...): ...
def __contains__(self, key) -> bool: ...
def dtype(self) -> np.dtype: ...
def memory_usage(self, deep: bool = ...) -> int: ...
def nbytes(self) -> int: ...
def format(
self,
name: Optional[bool] = ...,
formatter: Optional[Callable] = ...,
na_rep: Optional[str] = ...,
names: bool = ...,
space: int = ...,
sparsify: Optional[bool] = ...,
adjoin: bool = ...,
) -> List: ...
def __len__(self) -> int: ...
def inferred_type(self) -> str: ...
@property
def values(self): ...
def is_monotonic_increasing(self) -> bool: ...
def is_monotonic_decreasing(self) -> bool: ...
def duplicated(self, keep: str = ...): ...
def fillna(self, value=..., downcast=...) -> None: ...
def dropna(self, how: str = ...): ...
def get_value(self, series, key): ...
def get_level_values(self, level: Union[str, int]) -> Index: ...
def unique(self, level=...): ...
def to_frame(self, index: bool = ..., name=...): ...
def to_flat_index(self): ...
@property
def is_all_dates(self) -> bool: ...
def is_lexsorted(self) -> bool: ...
def lexsort_depth(self): ...
def remove_unused_levels(self): ...
@property
def nlevels(self) -> int: ...
@property
def levshape(self): ...
def __reduce__(self): ...
def __getitem__(self, key): ...
def take(self, indices, axis: int = ..., allow_fill: bool = ..., fill_value=..., **kwargs): ...
def append(self, other): ...
def argsort(self, *args, **kwargs): ...
def repeat(self, repeats, axis=...): ...
def where(self, cond, other=...) -> None: ...
def drop(self, codes, *, level=..., errors: str = ...) -> MultiIndex: ...
def swaplevel(self, i: int = ..., j: int = ...): ...
def reorder_levels(self, order): ...
def sortlevel(self, level: int = ..., ascending: bool = ..., sort_remaining: bool = ...): ...
def get_indexer(self, target, method=..., limit=..., tolerance=...): ...
def get_indexer_non_unique(self, target): ...
def reindex(self, target, method=..., level=..., limit=..., tolerance=...): ...
def get_slice_bound(self, label: Union[Hashable, Sequence[Hashable]], side: str, kind: str) -> int: ...
def slice_locs(self, start=..., end=..., step=..., kind=...): ...
def get_loc(self, key, method=...): ...
def get_loc_level(self, key, level=..., drop_level: bool = ...): ...
def get_locs(self, seq): ...
def truncate(self, before=..., after=...): ...
def equals(self, other) -> bool: ...
def equal_levels(self, other): ...
def union(self, other, sort=...): ...
def intersection(self, other, sort: bool = ...): ...
def difference(self, other, sort=...): ...
def astype(self, dtype: Union[DtypeArg, T1], copy: bool = ...) -> MultiIndex: ...
def insert(self, loc, item): ...
def delete(self, loc): ...
def isin(self, values, level=...) -> np_ndarray_bool: ...
def maybe_droplevels(index, key): ...

Просмотреть файл

@ -1,36 +0,0 @@
import numpy as np
from pandas.core.indexes.base import Index as Index
from typing import Iterable, TypeVar
from pandas._typing import T1 as T1, np_ndarray_int64
class NumericIndex(Index):
def __init__(self, data: Iterable = ..., dtype=..., copy: bool = ..., name=...): ...
@property
def is_all_dates(self) -> bool: ...
def insert(self, loc, item): ...
class IntegerIndex(NumericIndex):
def __contains__(self, key) -> bool: ...
class Int64Index(IntegerIndex):
@property
def inferredT1ype(self) -> str: ...
@property
def asi8(self) -> np_ndarray_int64: ...
class UInt64Index(IntegerIndex):
@property
def inferredT1ype(self) -> str: ...
@property
def asi8(self) -> np_ndarray_int64: ...
class Float64Index(NumericIndex):
@property
def inferredT1ype(self) -> str: ...
def astype(self, dtype, copy: bool = ...): ...
def get_value(self, series, key): ...
def equals(self, other) -> bool: ...
def __contains__(self, other) -> bool: ...
def get_loc(self, key, method=..., tolerance=...): ...
def is_unique(self) -> bool: ...
def isin(self, values, level=...): ...

Просмотреть файл

@ -1,33 +0,0 @@
from typing import Union, overload
from pandas._libs import Timedelta as Timedelta
from pandas.core.arrays import datetimelike as dtl
from pandas.core.indexes.datetimelike import (
DatetimeIndexOpsMixin as DatetimeIndexOpsMixin,
DatetimeTimedeltaMixin as DatetimeTimedeltaMixin,
DatetimelikeDelegateMixin as DatetimelikeDelegateMixin,
)
from pandas.core.indexes.datetimes import DatetimeIndex as DatetimeIndex
from pandas._typing import num
class TimedeltaDelegateMixin(DatetimelikeDelegateMixin): ...
class TimedeltaIndex(DatetimeTimedeltaMixin, dtl.TimelikeOps, TimedeltaDelegateMixin):
def __new__(cls, data=..., unit=..., freq=..., closed=..., dtype=..., copy: bool = ..., name=...): ...
@overload
def __add__(self, other: DatetimeIndex) -> DatetimeIndex: ...
@overload
def __add__(self, other: Union[Timedelta, TimedeltaIndex]) -> TimedeltaIndex: ...
def __sub__(self, other: Union[Timedelta, TimedeltaIndex]) -> TimedeltaIndex: ...
def __mul__(self, other: num) -> TimedeltaIndex: ...
def __truediv__(self, other: num) -> TimedeltaIndex: ...
def astype(self, dtype, copy: bool = ...): ...
def get_value(self, series, key): ...
def get_value_maybe_box(self, series, key: Timedelta): ...
def get_loc(self, key, method=..., tolerance=...): ...
def searchsorted(self, value, side: str = ..., sorter=...): ...
def is_type_compatible(self, typ) -> bool: ...
@property
def inferred_type(self) -> str: ...
def insert(self, loc, item): ...
def timedelta_range(start=..., end=..., periods=..., freq=..., name=..., closed=...) -> TimedeltaIndex: ...

Просмотреть файл

@ -1,50 +0,0 @@
import numpy as np
from pandas._libs.indexing import _NDFrameIndexerBase
from pandas.core.indexes.api import Index as Index
from pandas._typing import StrLike, Scalar
from typing import Tuple, Union
class _IndexSlice:
def __getitem__(self, arg) -> Tuple[Union[StrLike, Scalar, slice], ...]: ...
IndexSlice: _IndexSlice
class IndexingError(Exception): ...
class IndexingMixin:
@property
def iloc(self) -> _iLocIndexer: ...
@property
def loc(self) -> _LocIndexer: ...
@property
def at(self) -> _AtIndexer: ...
@property
def iat(self) -> _iAtIndexer: ...
class _NDFrameIndexer(_NDFrameIndexerBase):
axis = ...
def __call__(self, axis=...): ...
def __getitem__(self, key): ...
def __setitem__(self, key, value) -> None: ...
class _LocationIndexer(_NDFrameIndexer):
def __getitem__(self, key): ...
class _LocIndexer(_LocationIndexer): ...
class _iLocIndexer(_LocationIndexer): ...
class _ScalarAccessIndexer(_NDFrameIndexerBase):
def __getitem__(self, key): ...
def __setitem__(self, key, value) -> None: ...
class _AtIndexer(_ScalarAccessIndexer): ...
class _iAtIndexer(_ScalarAccessIndexer): ...
def convert_to_index_sliceable(obj, key): ...
def check_bool_indexer(index: Index, key) -> np.ndarray: ...
def convert_missing_indexer(indexer): ...
def convert_from_missing_indexer_tuple(indexer, axes): ...
def maybe_convert_ix(*args): ...
def is_nested_tuple(tup, labels) -> bool: ...
def is_label_like(key) -> bool: ...
def need_slice(obj) -> bool: ...

Просмотреть файл

@ -1,17 +0,0 @@
from .blocks import (
Block as Block,
BoolBlock as BoolBlock,
CategoricalBlock as CategoricalBlock,
DatetimeBlock as DatetimeBlock,
DatetimeTZBlock as DatetimeTZBlock,
ExtensionBlock as ExtensionBlock,
ObjectBlock as ObjectBlock,
make_block as make_block,
)
from .managers import (
BlockManager as BlockManager,
SingleBlockManager as SingleBlockManager,
concatenate_block_managers as concatenate_block_managers,
create_block_manager_from_arrays as create_block_manager_from_arrays,
create_block_manager_from_blocks as create_block_manager_from_blocks,
)

Просмотреть файл

@ -1,182 +0,0 @@
from pandas.core.arrays import ExtensionArray as ExtensionArray
from pandas.core.base import PandasObject as PandasObject
from typing import List
class Block(PandasObject):
is_numeric: bool = ...
is_float: bool = ...
is_integer: bool = ...
is_complex: bool = ...
is_datetime: bool = ...
is_datetimetz: bool = ...
is_timedelta: bool = ...
is_bool: bool = ...
is_object: bool = ...
is_categorical: bool = ...
is_extension: bool = ...
ndim = ...
values = ...
def __init__(self, values, placement, ndim=...) -> None: ...
@property
def is_view(self): ...
@property
def is_datelike(self): ...
def is_categorical_astype(self, dtype): ...
def external_values(self, dtype=...): ...
def internal_values(self, dtype=...): ...
def array_values(self) -> ExtensionArray: ...
def get_values(self, dtype=...): ...
def get_block_values(self, dtype=...): ...
def to_dense(self): ...
@property
def fill_value(self): ...
@property
def mgr_locs(self): ...
@mgr_locs.setter
def mgr_locs(self, new_mgr_locs) -> None: ...
@property
def array_dtype(self): ...
def make_block(self, values, placement=...) -> Block: ...
def make_block_same_class(self, values, placement=..., ndim=...): ...
def __len__(self) -> int: ...
def getitem_block(self, slicer, new_mgr_locs=...): ...
@property
def shape(self): ...
@property
def dtype(self): ...
@property
def ftype(self): ...
def merge(self, other): ...
def concat_same_type(self, to_concat, placement=...): ...
def iget(self, i): ...
def set(self, locs, values) -> None: ...
def delete(self, loc) -> None: ...
def apply(self, func, **kwargs): ...
def fillna(self, value, limit=..., inplace: bool = ..., downcast=...): ...
def split_and_operate(self, mask, f, inplace: bool): ...
def downcast(self, dtypes=...): ...
def astype(self, dtype, copy: bool = ..., errors: str = ...): ...
def convert(self, copy: bool = ..., datetime: bool = ..., numeric: bool = ..., timedelta: bool = ..., coerce: bool = ...): ...
def to_native_types(self, slicer=..., na_rep: str = ..., quoting=..., **kwargs): ...
def copy(self, deep: bool = ...): ...
def replace(self, to_replace, value, inplace: bool = ..., filter=..., regex: bool = ..., convert: bool = ...): ...
def setitem(self, indexer, value): ...
def putmask(self, mask, new, align: bool = ..., inplace: bool = ..., axis: int = ..., transpose: bool = ...): ...
def coerce_to_target_dtype(self, other): ...
def interpolate(
self,
*,
method: str = ...,
axis: int = ...,
index=...,
inplace: bool = ...,
limit=...,
limit_direction: str = ...,
limit_area=...,
fill_value=...,
downcast=...,
**kwargs,
): ...
def take_nd(self, indexer, axis, new_mgr_locs=..., fill_tuple=...): ...
def diff(self, n: int, axis: int = ...) -> List[Block]: ...
def shift(self, periods, axis: int = ..., fill_value=...): ...
def where(self, other, cond, align=..., errors=..., try_cast: bool = ..., axis: int = ...) -> List[Block]: ...
def equals(self, other) -> bool: ...
def quantile(self, qs, interpolation: str = ..., axis: int = ...): ...
class NonConsolidatableMixIn:
def __init__(self, values, placement, ndim=...) -> None: ...
@property
def shape(self): ...
def iget(self, col): ...
def should_store(self, value): ...
values = ...
def set(self, locs, values, check: bool = ...) -> None: ...
def putmask(self, mask, new, align: bool = ..., inplace: bool = ..., axis: int = ..., transpose: bool = ...): ...
class ExtensionBlock(NonConsolidatableMixIn, Block):
is_extension: bool = ...
def __init__(self, values, placement, ndim=...) -> None: ...
@property
def fill_value(self): ...
@property
def is_view(self): ...
@property
def is_numeric(self): ...
def setitem(self, indexer, value): ...
def get_values(self, dtype=...): ...
def array_values(self) -> ExtensionArray: ...
def to_dense(self): ...
def to_native_types(self, slicer=..., na_rep: str = ..., quoting=..., **kwargs): ...
def take_nd(self, indexer, axis: int = ..., new_mgr_locs=..., fill_tuple=...): ...
def concat_same_type(self, to_concat, placement=...): ...
def fillna(self, value, limit=..., inplace: bool = ..., downcast=...): ...
def interpolate(self, *, method: str = ..., axis: int = ..., inplace: bool = ..., limit=..., fill_value=..., **kwargs): ...
def diff(self, n: int, axis: int = ...) -> List[Block]: ...
def shift(self, periods: int, axis: int = ..., fill_value=...) -> List[ExtensionBlock]: ...
def where(self, other, cond, align=..., errors=..., try_cast: bool = ..., axis: int = ...) -> List[Block]: ...
class ObjectValuesExtensionBlock(ExtensionBlock):
def external_values(self, dtype=...): ...
class NumericBlock(Block):
is_numeric: bool = ...
class DatetimeLikeBlockMixin:
@property
def fill_value(self): ...
def get_values(self, dtype=...): ...
def iget(self, key): ...
def shift(self, periods, axis: int = ..., fill_value=...): ...
class DatetimeBlock(DatetimeLikeBlockMixin, Block):
is_datetime: bool = ...
def __init__(self, values, placement, ndim=...) -> None: ...
def astype(self, dtype, copy: bool = ..., errors: str = ...): ...
def to_native_types(self, slicer=..., na_rep=..., date_format=..., quoting=..., **kwargs): ...
def should_store(self, value): ...
def set(self, locs, values) -> None: ...
def external_values(self): ...
def array_values(self) -> ExtensionArray: ...
class DatetimeTZBlock(DatetimeBlock):
is_datetimetz: bool = ...
is_extension: bool = ...
fill_value = ...
@property
def is_view(self): ...
def get_values(self, dtype=...): ...
def to_dense(self): ...
def diff(self, n: int, axis: int = ...) -> List[Block]: ...
def concat_same_type(self, to_concat, placement=...): ...
def fillna(self, value, limit=..., inplace: bool = ..., downcast=...): ...
def setitem(self, indexer, value): ...
def equals(self, other) -> bool: ...
def quantile(self, qs, interpolation: str = ..., axis: int = ...): ...
class BoolBlock(NumericBlock):
is_bool: bool = ...
def should_store(self, value): ...
def replace(self, to_replace, value, inplace: bool = ..., filter=..., regex: bool = ..., convert: bool = ...): ...
class ObjectBlock(Block):
is_object: bool = ...
def __init__(self, values, placement=..., ndim: int = ...) -> None: ...
@property
def is_bool(self): ...
def convert(self, copy: bool = ..., datetime: bool = ..., numeric: bool = ..., timedelta: bool = ..., coerce: bool = ...): ...
def should_store(self, value): ...
def replace(self, to_replace, value, inplace: bool = ..., filter=..., regex: bool = ..., convert: bool = ...): ...
class CategoricalBlock(ExtensionBlock):
is_categorical: bool = ...
def __init__(self, values, placement, ndim=...) -> None: ...
@property
def array_dtype(self): ...
def to_dense(self): ...
def to_native_types(self, slicer=..., na_rep: str = ..., quoting=..., **kwargs): ...
def concat_same_type(self, to_concat, placement=...): ...
def replace(self, to_replace, value, inplace: bool = ..., filter=..., regex: bool = ..., convert: bool = ...): ...
def get_block_type(values, dtype=...): ...
def make_block(values, placement, klass=..., ndim=..., dtype=...): ...

Просмотреть файл

@ -1,15 +0,0 @@
def get_mgr_concatenation_plan(mgr, indexers): ...
class JoinUnit:
block = ...
indexers = ...
shape = ...
def __init__(self, block, shape, indexers=...) -> None: ...
def needs_filling(self): ...
def dtype(self): ...
def is_na(self): ...
def get_reindexed_values(self, empty_dtype, upcasted_na): ...
def concatenate_join_units(join_units, concat_axis, copy): ...
def is_uniform_join_units(join_units): ...
def combine_concat_plans(plans, concat_axis): ...

Просмотреть файл

@ -1,12 +0,0 @@
import numpy as np
def arrays_to_mgr(arrays, arr_names, index, columns, dtype=...): ...
def masked_rec_array_to_mgr(data, index, columns, dtype, copy): ...
def init_ndarray(values, index, columns, dtype=..., copy: bool = ...): ...
def init_dict(data, index, columns, dtype=...): ...
def prep_ndarray(values, copy=...) -> np.ndarray: ...
def extract_index(data): ...
def reorder_arrays(arrays, arr_columns, columns): ...
def get_names_from_index(data): ...
def to_arrays(data, columns, coerce_float: bool = ..., dtype=...): ...
def sanitize_index(data, index, copy: bool = ...): ...

Просмотреть файл

@ -1,100 +0,0 @@
from pandas.core.base import PandasObject as PandasObject
from pandas.core.indexes.api import Index as Index
from pandas.core.internals.blocks import Block as Block
from typing import List, Sequence, Union
class BlockManager(PandasObject):
axes = ...
blocks = ...
def __init__(self, blocks: Sequence[Block], axes: Sequence[Index], do_integrity_check: bool=...) -> None: ...
def make_empty(self, axes = ...): ...
def __nonzero__(self): ...
__bool__ = ...
@property
def shape(self): ...
@property
def ndim(self) -> int: ...
def set_axis(self, axis, new_labels) -> None: ...
def rename_axis(self, mapper, axis, copy: bool = ..., level = ...): ...
@property
def items(self): ...
def get_dtype_counts(self): ...
def get_dtypes(self): ...
def __len__(self) -> int: ...
def reduce(self, func, *args, **kwargs): ...
def apply(self, f, filter = ..., **kwargs): ...
def quantile(self, axis: int = ..., consolidate: bool = ..., transposed: bool = ..., interpolation: str = ..., qs = ..., numeric_only = ...): ...
def isna(self, func): ...
def where(self, **kwargs): ...
def setitem(self, **kwargs): ...
def putmask(self, **kwargs): ...
def diff(self, **kwargs): ...
def interpolate(self, **kwargs): ...
def shift(self, **kwargs): ...
def fillna(self, **kwargs): ...
def downcast(self, **kwargs): ...
def astype(self, dtype, copy: bool=..., errors: str=...) : ...
def convert(self, **kwargs): ...
def replace(self, value, **kwargs): ...
def replace_list(self, src_list, dest_list, inplace: bool = ..., regex: bool = ...): ...
def is_consolidated(self): ...
@property
def is_mixed_type(self): ...
@property
def is_numeric_mixed_type(self): ...
@property
def is_datelike_mixed_type(self): ...
@property
def any_extension_types(self): ...
@property
def is_view(self): ...
def get_bool_data(self, copy: bool = ...): ...
def get_numeric_data(self, copy: bool = ...): ...
def combine(self, blocks, copy: bool = ...): ...
def get_slice(self, slobj: slice, axis: int=...) : ...
def __contains__(self, item) -> bool: ...
@property
def nblocks(self) -> int: ...
def copy(self, deep: bool = ...): ...
def as_array(self, transpose: bool = ..., items = ...): ...
def to_dict(self, copy: bool = ...): ...
def fast_xs(self, loc): ...
def consolidate(self): ...
def get(self, item): ...
def iget(self, i): ...
def delete(self, item) -> None: ...
def set(self, item, value): ...
def insert(self, loc: int, item, value, allow_duplicates: bool=...) : ...
def reindex_axis(self, new_index, axis, method = ..., limit = ..., fill_value = ..., copy: bool = ...): ...
def reindex_indexer(self, new_axis, indexer, axis, fill_value = ..., allow_dups: bool = ..., copy: bool = ...): ...
def take(self, indexer, axis: int = ..., verify: bool = ..., convert: bool = ...): ...
def equals(self, other): ...
def unstack(self, unstacker_func, fill_value): ...
class SingleBlockManager(BlockManager):
ndim: int = ...
axes = ...
blocks = ...
def __init__(self, block: Block, axis: Union[Index, List[Index]], do_integrity_check: bool=..., fastpath: bool=...) -> None: ...
def get_slice(self, slobj, axis: int = ...): ...
@property
def index(self): ...
@property
def dtype(self): ...
@property
def array_dtype(self): ...
def get_dtype_counts(self): ...
def get_dtypes(self): ...
def external_values(self): ...
def internal_values(self): ...
def get_values(self): ...
def is_consolidated(self): ...
def delete(self, item) -> None: ...
def fast_xs(self, loc): ...
def concat(self, to_concat, new_axis): ...
def create_block_manager_from_blocks(blocks, axes): ...
def create_block_manager_from_arrays(arrays, names, axes): ...
def construction_error(tot_items, block_shape, axes, e = ...) -> None: ...
def form_blocks(arrays, names, axes): ...
def concatenate_block_managers(mgrs_indexers, axes, concat_axis, copy): ...

Просмотреть файл

@ -1,9 +0,0 @@
from typing import Any, Optional, Set, Tuple
ARITHMETIC_BINOPS: Set[str] = ...
COMPARISON_BINOPS: Set[str] = ...
def get_op_result_name(left: Any, right: Any): ...
def maybe_upcast_for_op(obj: Any, shape: Tuple[int, ...]) -> Any: ...
def fill_binop(left: Any, right: Any, fill_value: Any): ...
def dispatch_to_series(left: Any, right: Any, func: Any, str_rep: Optional[Any] = ..., axis: Optional[Any] = ...): ...

Просмотреть файл

@ -1,13 +0,0 @@
import numpy as np
from pandas.core.dtypes.generic import ABCExtensionArray as ABCExtensionArray
from typing import Optional, Union
def comp_method_OBJECT_ARRAY(op, x, y): ...
def masked_arith_op(x, y, op): ...
def define_na_arithmetic_op(op, str_rep: str): ...
def na_arithmetic_op(left, right, op, str_rep: str): ...
def arithmetic_op(left: Union[np.ndarray, ABCExtensionArray], right, op, str_rep: str): ...
def comparison_op(left: Union[np.ndarray, ABCExtensionArray], right, op) -> Union[np.ndarray, ABCExtensionArray]: ...
def na_logical_op(x: np.ndarray, y, op): ...
def logical_op(left: Union[np.ndarray, ABCExtensionArray], right, op) -> Union[np.ndarray, ABCExtensionArray]: ...
def get_array_op(op, str_rep: Optional[str] = ...): ...

Просмотреть файл

@ -1,7 +0,0 @@
import numpy as np
from pandas.core.dtypes.generic import ABCExtensionArray as ABCExtensionArray, ABCSeries as ABCSeries
from typing import Union
def should_extension_dispatch(left: ABCSeries, right) -> bool: ...
def should_series_dispatch(left, right, op): ...
def dispatch_to_extension_op(op, left: Union[ABCExtensionArray, np.ndarray], right): ...

Просмотреть файл

@ -1 +0,0 @@
reverse_op = ...

Просмотреть файл

@ -1,2 +0,0 @@
def invalid_comparison(left, right, op): ...
def make_invalid_op(name: str): ...

Просмотреть файл

@ -1,23 +0,0 @@
import numpy as np
from pandas._libs import lib as lib, missing as libmissing
from typing import Optional, Union
def kleene_or(
left: Union[bool, np.ndarray],
right: Union[bool, np.ndarray],
left_mask: Optional[np.ndarray],
right_mask: Optional[np.ndarray],
): ...
def kleene_xor(
left: Union[bool, np.ndarray],
right: Union[bool, np.ndarray],
left_mask: Optional[np.ndarray],
right_mask: Optional[np.ndarray],
): ...
def kleene_and(
left: Union[bool, libmissing.NAType, np.ndarray],
right: Union[bool, libmissing.NAType, np.ndarray],
left_mask: Optional[np.ndarray],
right_mask: Optional[np.ndarray],
): ...
def raise_for_nan(value, method) -> None: ...

Просмотреть файл

@ -1,2 +0,0 @@
def add_special_arithmetic_methods(cls): ...
def add_flex_arithmetic_methods(cls) -> None: ...

Просмотреть файл

@ -1,3 +0,0 @@
def fill_zeros(result, x, y): ...
def mask_zero_div_zero(x, y, result): ...
def dispatch_fill_zeros(op, left, right, result): ...

Просмотреть файл

@ -1,12 +0,0 @@
def radd(left, right): ...
def rsub(left, right): ...
def rmul(left, right): ...
def rdiv(left, right): ...
def rtruediv(left, right): ...
def rfloordiv(left, right): ...
def rmod(left, right): ...
def rdivmod(left, right): ...
def rpow(left, right): ...
def rand_(left, right): ...
def ror_(left, right): ...
def rxor(left, right): ...

Просмотреть файл

@ -1,58 +0,0 @@
from pandas.core.base import ShallowMixin as ShallowMixin
from pandas.core.groupby.base import GroupByMixin as GroupByMixin
from pandas.core.groupby.groupby import _GroupBy
from pandas.core.groupby.grouper import Grouper as Grouper
from pandas._typing import FrameOrSeriesUnion
class Resampler(_GroupBy, ShallowMixin):
def __init__(self, obj, groupby=..., axis: int = ..., kind=..., **kwargs) -> None: ...
def __getattr__(self, attr: str): ...
def __iter__(self): ...
@property
def obj(self): ...
@property
def ax(self): ...
def pipe(self, func, *args, **kwargs): ...
def aggregate(self, func, *args, **kwargs): ...
agg = aggregate
def transform(self, arg, *args, **kwargs): ...
def pad(self, limit=...): ...
def nearest(self, limit=...): ...
def backfill(self, limit=...): ...
bfill = backfill
def fillna(self, method, limit=...): ...
def interpolate(
self,
method: str = ...,
axis: int = ...,
limit=...,
inplace: bool = ...,
limit_direction: str = ...,
limit_area=...,
downcast=...,
**kwargs,
): ...
def asfreq(self, fill_value=...): ...
def std(self, ddof: int = ..., *args, **kwargs): ...
def var(self, ddof: int = ..., *args, **kwargs): ...
def size(self): ...
def count(self): ...
def quantile(self, q: float = ..., **kwargs): ...
def sum(self, _method=..., min_count: int = ..., *args, **kwargs) -> FrameOrSeriesUnion: ...
def prod(self, _method=..., min_count: int = ..., *args, **kwargs) -> FrameOrSeriesUnion: ...
def min(self, _method=..., min_count: int = ..., *args, **kwargs) -> FrameOrSeriesUnion: ...
def max(self, _method=..., min_count: int = ..., *args, **kwargs) -> FrameOrSeriesUnion: ...
def first(self, _method=..., min_count: int = ..., *args, **kwargs) -> FrameOrSeriesUnion: ...
def last(self, _method=..., min_count: int = ..., *args, **kwargs) -> FrameOrSeriesUnion: ...
def mean(self, _method=..., *args, **kwargs) -> FrameOrSeriesUnion: ...
def sem(self, _method=..., *args, **kwargs) -> FrameOrSeriesUnion: ...
def median(self, _method=..., *args, **kwargs) -> FrameOrSeriesUnion: ...
def ohlc(self, _method=..., *args, **kwargs) -> FrameOrSeriesUnion: ...
class _GroupByMixin(GroupByMixin):
groupby = ...
def __init__(self, obj, *args, **kwargs) -> None: ...
def resample(obj, kind=..., **kwds): ...
def get_resampler_for_grouping(groupby, rule, how=..., fill_method=..., limit=..., kind=..., **kwargs): ...
def asfreq(obj, freq, method=..., how=..., normalize: bool = ..., fill_value=...): ...

Просмотреть файл

Просмотреть файл

@ -1,6 +0,0 @@
from pandas.core.reshape.concat import concat as concat
from pandas.core.reshape.melt import lreshape as lreshape, melt as melt, wide_to_long as wide_to_long
from pandas.core.reshape.merge import merge as merge, merge_asof as merge_asof, merge_ordered as merge_ordered
from pandas.core.reshape.pivot import crosstab as crosstab, pivot as pivot, pivot_table as pivot_table
from pandas.core.reshape.reshape import get_dummies as get_dummies
from pandas.core.reshape.tile import cut as cut, qcut as qcut

Просмотреть файл

@ -1,44 +0,0 @@
from pandas import DataFrame as DataFrame, Series as Series
from typing import Hashable, Iterable, Mapping, Optional, Union, overload, Literal, TypeVar
HashableT = TypeVar("HashableT", bound=Hashable)
@overload
def concat(
objs: Union[Iterable[Optional[Series]], Mapping[HashableT, Optional[Series]]],
join: str = ...,
ignore_index: bool = ...,
keys=...,
levels=...,
names=...,
verify_integrity: bool = ...,
sort: bool = ...,
copy: bool = ...,
axis: Literal[0, "index"] = ...,
) -> Series: ...
@overload
def concat(
objs: Union[Iterable[Optional[Series]], Mapping[HashableT, Optional[Series]]],
axis: Literal[1, "columns"],
join: str = ...,
ignore_index: bool = ...,
keys=...,
levels=...,
names=...,
verify_integrity: bool = ...,
sort: bool = ...,
copy: bool = ...,
) -> DataFrame: ...
@overload
def concat(
objs: Union[Iterable[Optional[Union[DataFrame, Series]]], Mapping[HashableT, Optional[Union[DataFrame, Series]]]],
axis: Literal[0, "index", 1, "columns"] = ...,
join: str = ...,
ignore_index: bool = ...,
keys=...,
levels=...,
names=...,
verify_integrity: bool = ...,
sort: bool = ...,
copy: bool = ...,
) -> DataFrame: ...

Просмотреть файл

@ -1,15 +0,0 @@
import numpy as np
from pandas.core.frame import DataFrame as DataFrame
from typing import List, Optional, Tuple, Union
def melt(
frame: DataFrame,
id_vars: Optional[Union[Tuple, List, np.ndarray]] = ...,
value_vars: Optional[Union[Tuple, List, np.ndarray]] = ...,
var_name: Optional[str] = ...,
value_name: str = ...,
col_level: Optional[Union[int, str]] = ...,
ignore_index: bool = ...
) -> DataFrame: ...
def lreshape(data: DataFrame, groups, dropna: bool=..., label=...) -> DataFrame: ...
def wide_to_long(df: DataFrame, stubnames, i, j, sep: str=..., suffix: str=...) -> DataFrame: ...

Просмотреть файл

@ -1,130 +0,0 @@
from pandas._libs.tslibs import Timedelta
from pandas import DataFrame as DataFrame, Series as Series
from pandas._typing import Label
from typing import Optional, Sequence, Union
def merge(
left: Union[DataFrame, Series],
right: Union[DataFrame, Series],
how: str = ...,
on: Optional[Union[Label, Sequence]] = ...,
left_on: Optional[Union[Label, Sequence]] = ...,
right_on: Optional[Union[Label, Sequence]] = ...,
left_index: bool = ...,
right_index: bool = ...,
sort: bool = ...,
suffixes: Sequence[Union[str, None]] = ...,
copy: bool = ...,
indicator: Union[bool, str] = ...,
validate: str = ...,
) -> DataFrame: ...
def merge_ordered(
left: Union[DataFrame, Series],
right: Union[DataFrame, Series],
on: Optional[Union[Label, Sequence]] = ...,
left_on: Optional[Union[Label, Sequence]] = ...,
right_on: Optional[Union[Label, Sequence]] = ...,
left_by: Optional[Union[str, Sequence[str]]] = ...,
right_by: Optional[Union[str, Sequence[str]]] = ...,
fill_method: Optional[str] = ...,
suffixes: Sequence[Union[str, None]] = ...,
how: str = ...,
) -> DataFrame: ...
def merge_asof(
left: Union[DataFrame, Series],
right: Union[DataFrame, Series],
on: Optional[Label] = ...,
left_on: Optional[Label] = ...,
right_on: Optional[Label] = ...,
left_index: bool = ...,
right_index: bool = ...,
by: Optional[Union[str, Sequence[str]]] = ...,
left_by: Optional[str] = ...,
right_by: Optional[str] = ...,
suffixes: Sequence[Union[str, None]] = ...,
tolerance: Optional[Union[int, Timedelta]] = ...,
allow_exact_matches: bool = ...,
direction: str = ...,
) -> DataFrame: ...
class _MergeOperation:
left = ...
right = ...
how = ...
axis = ...
on = ...
left_on = ...
right_on = ...
copy = ...
suffixes = ...
sort = ...
left_index = ...
right_index = ...
indicator = ...
indicator_name = ...
def __init__(
self,
left: Union[Series, DataFrame],
right: Union[Series, DataFrame],
how: str = ...,
on=...,
left_on=...,
right_on=...,
axis=...,
left_index: bool = ...,
right_index: bool = ...,
sort: bool = ...,
suffixes=...,
copy: bool = ...,
indicator: bool = ...,
validate=...,
) -> None: ...
def get_result(self): ...
class _OrderedMerge(_MergeOperation):
fill_method = ...
def __init__(
self,
left,
right,
on=...,
left_on=...,
right_on=...,
left_index: bool = ...,
right_index: bool = ...,
axis=...,
suffixes=...,
copy: bool = ...,
fill_method=...,
how: str = ...,
) -> None: ...
def get_result(self): ...
class _AsOfMerge(_OrderedMerge):
by = ...
left_by = ...
right_by = ...
tolerance = ...
allow_exact_matches = ...
direction = ...
def __init__(
self,
left,
right,
on=...,
left_on=...,
right_on=...,
left_index: bool = ...,
right_index: bool = ...,
by=...,
left_by=...,
right_by=...,
axis=...,
suffixes=...,
copy: bool = ...,
fill_method=...,
how: str = ...,
tolerance=...,
allow_exact_matches: bool = ...,
direction: str = ...,
) -> None: ...

Просмотреть файл

@ -1,36 +0,0 @@
from pandas.core.series import Series
from pandas.core.frame import DataFrame
from pandas.core.groupby.grouper import Grouper
from pandas._typing import Scalar
from typing import Callable, Optional, Sequence, Union
def pivot_table(
data: DataFrame,
values: Optional[str] = ...,
index: Optional[Union[str, Sequence, Grouper]] = ...,
columns: Optional[Union[str, Sequence, Grouper]] = ...,
aggfunc = ...,
fill_value: Optional[Scalar] = ...,
margins: bool = ...,
dropna: bool = ...,
margins_name: str = ...,
observed: bool = ...) -> DataFrame: ...
def pivot(
data: DataFrame,
index: Optional[str] = ...,
columns: Optional[str] = ...,
values: Optional[Union[str, Sequence[str]]] = ...,
) -> DataFrame: ...
def crosstab(
index: Union[Sequence, Series],
columns: Union[Sequence, Series],
values: Optional[Sequence] = ...,
rownames: Optional[Sequence] = ...,
colnames: Optional[Sequence] = ...,
aggfunc: Optional[Callable] = ...,
margins: bool = ...,
margins_name: str = ...,
dropna: bool = ...,
normalize: bool = ...) -> DataFrame: ...

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше