|
17 | 17 | from __future__ import annotations |
18 | 18 |
|
19 | 19 | from collections import abc |
20 | | -from datetime import ( |
21 | | - datetime, |
22 | | - timedelta, |
23 | | -) |
| 20 | +from datetime import datetime |
24 | 21 | import sys |
25 | 22 | from typing import TYPE_CHECKING |
26 | 23 |
|
|
44 | 41 | from pandas import ( |
45 | 42 | DataFrame, |
46 | 43 | Timestamp, |
47 | | - isna, |
48 | 44 | ) |
49 | 45 |
|
50 | 46 | from pandas.io.common import get_handle |
|
55 | 51 | from pandas._typing import ( |
56 | 52 | CompressionOptions, |
57 | 53 | FilePath, |
58 | | - NaTType, |
59 | 54 | ReadBuffer, |
60 | 55 | ) |
61 | 56 |
|
|
64 | 59 | _sas_origin = Timestamp("1960-01-01") |
65 | 60 |
|
66 | 61 |
|
67 | | -def _parse_datetime(sas_datetime: float, unit: str) -> datetime | NaTType: |
68 | | - if isna(sas_datetime): |
69 | | - return pd.NaT |
70 | | - |
71 | | - if unit == "s": |
72 | | - return datetime(1960, 1, 1) + timedelta(seconds=sas_datetime) |
73 | | - |
74 | | - elif unit == "d": |
75 | | - return datetime(1960, 1, 1) + timedelta(days=sas_datetime) |
76 | | - |
77 | | - else: |
78 | | - raise ValueError("unit must be 'd' or 's'") |
79 | | - |
80 | | - |
81 | 62 | def _convert_datetimes(sas_datetimes: pd.Series, unit: str) -> pd.Series: |
82 | 63 | """ |
83 | 64 | Convert to Timestamp if possible, otherwise to datetime.datetime. |
@@ -370,11 +351,6 @@ def _read_bytes(self, offset: int, length: int): |
370 | 351 | raise ValueError("The cached page is too small.") |
371 | 352 | return self._cached_page[offset : offset + length] |
372 | 353 |
|
373 | | - def _read_and_convert_header_text(self, offset: int, length: int) -> str | bytes: |
374 | | - return self._convert_header_text( |
375 | | - self._read_bytes(offset, length).rstrip(b"\x00 ") |
376 | | - ) |
377 | | - |
378 | 354 | def _parse_metadata(self) -> None: |
379 | 355 | done = False |
380 | 356 | while not done: |
|
0 commit comments