@mytec: pushed back before 1.1

This commit is contained in:
lazard36
2026-01-30 20:12:52 +00:00
parent d6988e370e
commit e8ae5bc1db
5228 changed files with 1191766 additions and 0 deletions

View File

@@ -0,0 +1,178 @@
# Copyright 2009-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Python driver for MongoDB."""
from __future__ import annotations
from typing import ContextManager, Optional
__all__ = [
"ASCENDING",
"DESCENDING",
"GEO2D",
"GEOSPHERE",
"HASHED",
"TEXT",
"version_tuple",
"get_version_string",
"__version__",
"version",
"ReturnDocument",
"MAX_SUPPORTED_WIRE_VERSION",
"MIN_SUPPORTED_WIRE_VERSION",
"CursorType",
"MongoClient",
"AsyncMongoClient",
"DeleteMany",
"DeleteOne",
"IndexModel",
"InsertOne",
"ReplaceOne",
"UpdateMany",
"UpdateOne",
"ReadPreference",
"WriteConcern",
"has_c",
"timeout",
]
ASCENDING = 1
"""Ascending sort order."""
DESCENDING = -1
"""Descending sort order."""
GEO2D = "2d"
"""Index specifier for a 2-dimensional `geospatial index`_.
.. _geospatial index: https://mongodb.com/docs/manual/core/2d/
"""
GEOSPHERE = "2dsphere"
"""Index specifier for a `spherical geospatial index`_.
.. versionadded:: 2.5
.. _spherical geospatial index: https://mongodb.com/docs/manual/core/2dsphere/
"""
HASHED = "hashed"
"""Index specifier for a `hashed index`_.
.. versionadded:: 2.5
.. _hashed index: https://mongodb.com/docs/manual/core/index-hashed/
"""
TEXT = "text"
"""Index specifier for a `text index`_.
.. seealso:: MongoDB's `Atlas Search
<https://docs.atlas.mongodb.com/atlas-search/>`_ which offers more advanced
text search functionality.
.. versionadded:: 2.7.1
.. _text index: https://mongodb.com/docs/manual/core/index-text/
"""
from pymongo import _csot
from pymongo._version import __version__, get_version_string, version_tuple
from pymongo.asynchronous.mongo_client import AsyncMongoClient
from pymongo.common import MAX_SUPPORTED_WIRE_VERSION, MIN_SUPPORTED_WIRE_VERSION, has_c
from pymongo.cursor import CursorType
from pymongo.operations import (
DeleteMany,
DeleteOne,
IndexModel,
InsertOne,
ReplaceOne,
UpdateMany,
UpdateOne,
)
from pymongo.read_preferences import ReadPreference
from pymongo.synchronous.collection import ReturnDocument
from pymongo.synchronous.mongo_client import MongoClient
from pymongo.write_concern import WriteConcern
# Public module compatibility imports
# isort: off
from pymongo import uri_parser # noqa: F401
from pymongo import change_stream # noqa: F401
from pymongo import client_session # noqa: F401
from pymongo import collection # noqa: F401
from pymongo import command_cursor # noqa: F401
from pymongo import database # noqa: F401
# isort: on
version = __version__
"""Current version of PyMongo."""
def timeout(seconds: Optional[float]) -> ContextManager[None]:
"""**(Provisional)** Apply the given timeout for a block of operations.
.. note:: :func:`~pymongo.timeout` is currently provisional. Backwards
incompatible changes may occur before becoming officially supported.
Use :func:`~pymongo.timeout` in a with-statement::
with pymongo.timeout(5):
client.db.coll.insert_one({})
client.db.coll2.insert_one({})
When the with-statement is entered, a deadline is set for the entire
block. When that deadline is exceeded, any blocking pymongo operation
will raise a timeout exception. For example::
try:
with pymongo.timeout(5):
client.db.coll.insert_one({})
time.sleep(5)
# The deadline has now expired, the next operation will raise
# a timeout exception.
client.db.coll2.insert_one({})
except PyMongoError as exc:
if exc.timeout:
print(f"block timed out: {exc!r}")
else:
print(f"failed with non-timeout error: {exc!r}")
When nesting :func:`~pymongo.timeout`, the nested deadline is capped by
the outer deadline. The deadline can only be shortened, not extended.
When exiting the block, the previous deadline is restored::
with pymongo.timeout(5):
coll.find_one() # Uses the 5 second deadline.
with pymongo.timeout(3):
coll.find_one() # Uses the 3 second deadline.
coll.find_one() # Uses the original 5 second deadline.
with pymongo.timeout(10):
coll.find_one() # Still uses the original 5 second deadline.
coll.find_one() # Uses the original 5 second deadline.
:param seconds: A non-negative floating point number expressing seconds, or None.
:raises: :py:class:`ValueError`: When `seconds` is negative.
See `Limit Server Execution Time <https://www.mongodb.com/docs/languages/python/pymongo-driver/current/connect/connection-options/csot/#overview>`_ for more examples.
.. versionadded:: 4.2
"""
if not isinstance(seconds, (int, float, type(None))):
raise TypeError(f"timeout must be None, an int, or a float, not {type(seconds)}")
if seconds and seconds < 0:
raise ValueError("timeout cannot be negative")
if seconds is not None:
seconds = float(seconds)
return _csot._TimeoutContext(seconds)

View File

@@ -0,0 +1,309 @@
# Copyright (c) 2001-2024 Python Software Foundation; All Rights Reserved
"""Lock and Condition classes vendored from https://github.com/python/cpython/blob/main/Lib/asyncio/locks.py
to port 3.13 fixes to older versions of Python.
Can be removed once we drop Python 3.12 support."""
from __future__ import annotations
import collections
import threading
from asyncio import events, exceptions
from typing import Any, Coroutine, Optional
_global_lock = threading.Lock()
class _LoopBoundMixin:
_loop = None
def _get_loop(self) -> Any:
loop = events._get_running_loop()
if self._loop is None:
with _global_lock:
if self._loop is None:
self._loop = loop
if loop is not self._loop:
raise RuntimeError(f"{self!r} is bound to a different event loop")
return loop
class _ContextManagerMixin:
async def __aenter__(self) -> None:
await self.acquire() # type: ignore[attr-defined]
# We have no use for the "as ..." clause in the with
# statement for locks.
return
async def __aexit__(self, exc_type: Any, exc: Any, tb: Any) -> None:
self.release() # type: ignore[attr-defined]
class Lock(_ContextManagerMixin, _LoopBoundMixin):
"""Primitive lock objects.
A primitive lock is a synchronization primitive that is not owned
by a particular task when locked. A primitive lock is in one
of two states, 'locked' or 'unlocked'.
It is created in the unlocked state. It has two basic methods,
acquire() and release(). When the state is unlocked, acquire()
changes the state to locked and returns immediately. When the
state is locked, acquire() blocks until a call to release() in
another task changes it to unlocked, then the acquire() call
resets it to locked and returns. The release() method should only
be called in the locked state; it changes the state to unlocked
and returns immediately. If an attempt is made to release an
unlocked lock, a RuntimeError will be raised.
When more than one task is blocked in acquire() waiting for
the state to turn to unlocked, only one task proceeds when a
release() call resets the state to unlocked; successive release()
calls will unblock tasks in FIFO order.
Locks also support the asynchronous context management protocol.
'async with lock' statement should be used.
Usage:
lock = Lock()
...
await lock.acquire()
try:
...
finally:
lock.release()
Context manager usage:
lock = Lock()
...
async with lock:
...
Lock objects can be tested for locking state:
if not lock.locked():
await lock.acquire()
else:
# lock is acquired
...
"""
def __init__(self) -> None:
self._waiters: Optional[collections.deque[Any]] = None
self._locked = False
def __repr__(self) -> str:
res = super().__repr__()
extra = "locked" if self._locked else "unlocked"
if self._waiters:
extra = f"{extra}, waiters:{len(self._waiters)}"
return f"<{res[1:-1]} [{extra}]>"
def locked(self) -> bool:
"""Return True if lock is acquired."""
return self._locked
async def acquire(self) -> bool:
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
# Implement fair scheduling, where thread always waits
# its turn. Jumping the queue if all are cancelled is an optimization.
if not self._locked and (
self._waiters is None or all(w.cancelled() for w in self._waiters)
):
self._locked = True
return True
if self._waiters is None:
self._waiters = collections.deque()
fut = self._get_loop().create_future()
self._waiters.append(fut)
try:
try:
await fut
finally:
self._waiters.remove(fut)
except exceptions.CancelledError:
# Currently the only exception designed be able to occur here.
# Ensure the lock invariant: If lock is not claimed (or about
# to be claimed by us) and there is a Task in waiters,
# ensure that the Task at the head will run.
if not self._locked:
self._wake_up_first()
raise
# assert self._locked is False
self._locked = True
return True
def release(self) -> None:
"""Release a lock.
When the lock is locked, reset it to unlocked, and return.
If any other tasks are blocked waiting for the lock to become
unlocked, allow exactly one of them to proceed.
When invoked on an unlocked lock, a RuntimeError is raised.
There is no return value.
"""
if self._locked:
self._locked = False
self._wake_up_first()
else:
raise RuntimeError("Lock is not acquired")
def _wake_up_first(self) -> None:
"""Ensure that the first waiter will wake up."""
if not self._waiters:
return
try:
fut = next(iter(self._waiters))
except StopIteration:
return
# .done() means that the waiter is already set to wake up.
if not fut.done():
fut.set_result(True)
class Condition(_ContextManagerMixin, _LoopBoundMixin):
"""Asynchronous equivalent to threading.Condition.
This class implements condition variable objects. A condition variable
allows one or more tasks to wait until they are notified by another
task.
A new Lock object is created and used as the underlying lock.
"""
def __init__(self, lock: Optional[Lock] = None) -> None:
if lock is None:
lock = Lock()
self._lock = lock
# Export the lock's locked(), acquire() and release() methods.
self.locked = lock.locked
self.acquire = lock.acquire
self.release = lock.release
self._waiters: collections.deque[Any] = collections.deque()
def __repr__(self) -> str:
res = super().__repr__()
extra = "locked" if self.locked() else "unlocked"
if self._waiters:
extra = f"{extra}, waiters:{len(self._waiters)}"
return f"<{res[1:-1]} [{extra}]>"
async def wait(self) -> bool:
"""Wait until notified.
If the calling task has not acquired the lock when this
method is called, a RuntimeError is raised.
This method releases the underlying lock, and then blocks
until it is awakened by a notify() or notify_all() call for
the same condition variable in another task. Once
awakened, it re-acquires the lock and returns True.
This method may return spuriously,
which is why the caller should always
re-check the state and be prepared to wait() again.
"""
if not self.locked():
raise RuntimeError("cannot wait on un-acquired lock")
fut = self._get_loop().create_future()
self.release()
try:
try:
self._waiters.append(fut)
try:
await fut
return True
finally:
self._waiters.remove(fut)
finally:
# Must re-acquire lock even if wait is cancelled.
# We only catch CancelledError here, since we don't want any
# other (fatal) errors with the future to cause us to spin.
err = None
while True:
try:
await self.acquire()
break
except exceptions.CancelledError as e:
err = e
if err is not None:
try:
raise err # Re-raise most recent exception instance.
finally:
err = None # Break reference cycles.
except BaseException:
# Any error raised out of here _may_ have occurred after this Task
# believed to have been successfully notified.
# Make sure to notify another Task instead. This may result
# in a "spurious wakeup", which is allowed as part of the
# Condition Variable protocol.
self._notify(1)
raise
async def wait_for(self, predicate: Any) -> Coroutine[Any, Any, Any]:
"""Wait until a predicate becomes true.
The predicate should be a callable whose result will be
interpreted as a boolean value. The method will repeatedly
wait() until it evaluates to true. The final predicate value is
the return value.
"""
result = predicate()
while not result:
await self.wait()
result = predicate()
return result
def notify(self, n: int = 1) -> None:
"""By default, wake up one task waiting on this condition, if any.
If the calling task has not acquired the lock when this method
is called, a RuntimeError is raised.
This method wakes up n of the tasks waiting for the condition
variable; if fewer than n are waiting, they are all awoken.
Note: an awakened task does not actually return from its
wait() call until it can reacquire the lock. Since notify() does
not release the lock, its caller should.
"""
if not self.locked():
raise RuntimeError("cannot notify on un-acquired lock")
self._notify(n)
def _notify(self, n: int) -> None:
idx = 0
for fut in self._waiters:
if idx >= n:
break
if not fut.done():
idx += 1
fut.set_result(False)
def notify_all(self) -> None:
"""Wake up all tasks waiting on this condition. This method acts
like notify(), but wakes up all waiting tasks instead of one. If the
calling task has not acquired the lock when this method is called,
a RuntimeError is raised.
"""
self.notify(len(self._waiters))

View File

@@ -0,0 +1,49 @@
# Copyright 2024-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A custom asyncio.Task that allows checking if a task has been sent a cancellation request.
Can be removed once we drop Python 3.10 support in favor of asyncio.Task.cancelling."""
from __future__ import annotations
import asyncio
import sys
from typing import Any, Coroutine, Optional
# TODO (https://jira.mongodb.org/browse/PYTHON-4981): Revisit once the underlying cause of the swallowed cancellations is uncovered
class _Task(asyncio.Task[Any]):
def __init__(self, coro: Coroutine[Any, Any, Any], *, name: Optional[str] = None) -> None:
super().__init__(coro, name=name)
self._cancel_requests = 0
asyncio._register_task(self)
def cancel(self, msg: Optional[str] = None) -> bool:
self._cancel_requests += 1
return super().cancel(msg=msg)
def uncancel(self) -> int:
if self._cancel_requests > 0:
self._cancel_requests -= 1
return self._cancel_requests
def cancelling(self) -> int:
return self._cancel_requests
def create_task(coro: Coroutine[Any, Any, Any], *, name: Optional[str] = None) -> asyncio.Task[Any]:
if sys.version_info >= (3, 11):
return asyncio.create_task(coro, name=name)
return _Task(coro, name=name)

View File

@@ -0,0 +1,57 @@
# Copyright 2023-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Azure helpers."""
from __future__ import annotations
import json
from typing import Any, Optional
def _get_azure_response(
resource: str, client_id: Optional[str] = None, timeout: float = 5
) -> dict[str, Any]:
# Deferred import to save overall import time.
from urllib.request import Request, urlopen
url = "http://169.254.169.254/metadata/identity/oauth2/token"
url += "?api-version=2018-02-01"
url += f"&resource={resource}"
if client_id:
url += f"&client_id={client_id}"
headers = {"Metadata": "true", "Accept": "application/json"}
request = Request(url, headers=headers) # noqa: S310
try:
with urlopen(request, timeout=timeout) as response: # noqa: S310
status = response.status
body = response.read().decode("utf8")
except Exception as e:
msg = "Failed to acquire IMDS access token: %s" % e
raise ValueError(msg) from None
if status != 200:
msg = "Failed to acquire IMDS access token."
raise ValueError(msg)
try:
data = json.loads(body)
except Exception:
raise ValueError("Azure IMDS response must be in JSON format") from None
for key in ["access_token", "expires_in"]:
if not data.get(key):
msg = "Azure IMDS response must contain %s, but was %s."
msg = msg % (key, body)
raise ValueError(msg)
return data

View File

@@ -0,0 +1,79 @@
# Copyright 2024-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License. You
# may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Constants, types, and classes shared across Client Bulk Write API implementations."""
from __future__ import annotations
from typing import TYPE_CHECKING, Any, Mapping, MutableMapping, NoReturn
from pymongo.errors import ClientBulkWriteException, OperationFailure
from pymongo.helpers_shared import _get_wce_doc
if TYPE_CHECKING:
from pymongo.typings import _DocumentOut
def _merge_command(
ops: list[tuple[str, Mapping[str, Any]]],
offset: int,
full_result: MutableMapping[str, Any],
result: Mapping[str, Any],
) -> None:
"""Merge result of a single bulk write batch into the full result."""
if result.get("error"):
full_result["error"] = result["error"]
full_result["nInserted"] += result.get("nInserted", 0)
full_result["nDeleted"] += result.get("nDeleted", 0)
full_result["nMatched"] += result.get("nMatched", 0)
full_result["nModified"] += result.get("nModified", 0)
full_result["nUpserted"] += result.get("nUpserted", 0)
write_errors = result.get("writeErrors")
if write_errors:
for doc in write_errors:
# Leave the server response intact for APM.
replacement = doc.copy()
original_index = doc["idx"] + offset
replacement["idx"] = original_index
# Add the failed operation to the error document.
replacement["op"] = ops[original_index][1]
full_result["writeErrors"].append(replacement)
wce = _get_wce_doc(result)
if wce:
full_result["writeConcernErrors"].append(wce)
def _throw_client_bulk_write_exception(
full_result: _DocumentOut, verbose_results: bool
) -> NoReturn:
"""Raise a ClientBulkWriteException from the full result."""
# retryWrites on MMAPv1 should raise an actionable error.
if full_result["writeErrors"]:
full_result["writeErrors"].sort(key=lambda error: error["idx"])
err = full_result["writeErrors"][0]
code = err["code"]
msg = err["errmsg"]
if code == 20 and msg.startswith("Transaction numbers"):
errmsg = (
"This MongoDB deployment does not support "
"retryable writes. Please add retryWrites=false "
"to your connection string."
)
raise OperationFailure(errmsg, code, full_result)
if isinstance(full_result["error"], BaseException):
raise ClientBulkWriteException(full_result, verbose_results) from full_result["error"]
raise ClientBulkWriteException(full_result, verbose_results)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,167 @@
# Copyright 2022-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License. You
# may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Internal helpers for CSOT."""
from __future__ import annotations
import functools
import inspect
import time
from collections import deque
from contextlib import AbstractContextManager
from contextvars import ContextVar, Token
from typing import TYPE_CHECKING, Any, Callable, Deque, MutableMapping, Optional, TypeVar, cast
if TYPE_CHECKING:
from pymongo.write_concern import WriteConcern
TIMEOUT: ContextVar[Optional[float]] = ContextVar("TIMEOUT", default=None)
RTT: ContextVar[float] = ContextVar("RTT", default=0.0)
DEADLINE: ContextVar[float] = ContextVar("DEADLINE", default=float("inf"))
def reset_all() -> None:
TIMEOUT.set(None)
RTT.set(0.0)
DEADLINE.set(float("inf"))
def get_timeout() -> Optional[float]:
return TIMEOUT.get(None)
def get_rtt() -> float:
return RTT.get()
def get_deadline() -> float:
return DEADLINE.get()
def set_rtt(rtt: float) -> None:
RTT.set(rtt)
def remaining() -> Optional[float]:
if not get_timeout():
return None
return DEADLINE.get() - time.monotonic()
def clamp_remaining(max_timeout: float) -> float:
"""Return the remaining timeout clamped to a max value."""
timeout = remaining()
if timeout is None:
return max_timeout
return min(timeout, max_timeout)
class _TimeoutContext(AbstractContextManager[Any]):
"""Internal timeout context manager.
Use :func:`pymongo.timeout` instead::
with pymongo.timeout(0.5):
client.test.test.insert_one({})
"""
def __init__(self, timeout: Optional[float]):
self._timeout = timeout
self._tokens: Optional[tuple[Token[Optional[float]], Token[float], Token[float]]] = None
def __enter__(self) -> None:
timeout_token = TIMEOUT.set(self._timeout)
prev_deadline = DEADLINE.get()
next_deadline = time.monotonic() + self._timeout if self._timeout else float("inf")
deadline_token = DEADLINE.set(min(prev_deadline, next_deadline))
rtt_token = RTT.set(0.0)
self._tokens = (timeout_token, deadline_token, rtt_token)
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
if self._tokens:
timeout_token, deadline_token, rtt_token = self._tokens
TIMEOUT.reset(timeout_token)
DEADLINE.reset(deadline_token)
RTT.reset(rtt_token)
# See https://mypy.readthedocs.io/en/stable/generics.html?#decorator-factories
F = TypeVar("F", bound=Callable[..., Any])
def apply(func: F) -> F:
"""Apply the client's timeoutMS to this operation. Can wrap both asynchronous and synchronous methods"""
if inspect.iscoroutinefunction(func):
@functools.wraps(func)
async def csot_wrapper(self: Any, *args: Any, **kwargs: Any) -> Any:
if get_timeout() is None:
timeout = self._timeout
if timeout is not None:
with _TimeoutContext(timeout):
return await func(self, *args, **kwargs)
return await func(self, *args, **kwargs)
else:
@functools.wraps(func)
def csot_wrapper(self: Any, *args: Any, **kwargs: Any) -> Any:
if get_timeout() is None:
timeout = self._timeout
if timeout is not None:
with _TimeoutContext(timeout):
return func(self, *args, **kwargs)
return func(self, *args, **kwargs)
return cast(F, csot_wrapper)
def apply_write_concern(
cmd: MutableMapping[str, Any], write_concern: Optional[WriteConcern]
) -> None:
"""Apply the given write concern to a command."""
if not write_concern or write_concern.is_server_default:
return
wc = write_concern.document
if get_timeout() is not None:
wc.pop("wtimeout", None)
if wc:
cmd["writeConcern"] = wc
_MAX_RTT_SAMPLES: int = 10
_MIN_RTT_SAMPLES: int = 2
class MovingMinimum:
"""Tracks a minimum RTT within the last 10 RTT samples."""
samples: Deque[float]
def __init__(self) -> None:
self.samples = deque(maxlen=_MAX_RTT_SAMPLES)
def add_sample(self, sample: float) -> None:
if sample < 0:
raise ValueError(f"duration cannot be negative {sample}")
self.samples.append(sample)
def get(self) -> float:
"""Get the min, or 0.0 if there aren't enough samples yet."""
if len(self.samples) >= _MIN_RTT_SAMPLES:
return min(self.samples)
return 0.0
def reset(self) -> None:
self.samples.clear()

View File

@@ -0,0 +1,40 @@
# Copyright 2024-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GCP helpers."""
from __future__ import annotations
from typing import Any
def _get_gcp_response(resource: str, timeout: float = 5) -> dict[str, Any]:
from urllib.request import Request, urlopen
url = "http://metadata/computeMetadata/v1/instance/service-accounts/default/identity"
url += f"?audience={resource}"
headers = {"Metadata-Flavor": "Google"}
request = Request(url, headers=headers) # noqa: S310
try:
with urlopen(request, timeout=timeout) as response: # noqa: S310
status = response.status
body = response.read().decode("utf8")
except Exception as e:
msg = "Failed to acquire IMDS access token: %s" % e
raise ValueError(msg) from None
if status != 200:
msg = "Failed to acquire IMDS access token."
raise ValueError(msg)
return dict(access_token=body)

View File

@@ -0,0 +1,43 @@
# Copyright 2022-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Current version of PyMongo."""
from __future__ import annotations
import re
from typing import List, Tuple, Union
__version__ = "4.16.0"
def get_version_tuple(version: str) -> Tuple[Union[int, str], ...]:
pattern = r"(?P<major>\d+).(?P<minor>\d+).(?P<patch>\d+)(?P<rest>.*)"
match = re.match(pattern, version)
if match:
parts: List[Union[int, str]] = [int(match[part]) for part in ["major", "minor", "patch"]]
if match["rest"]:
parts.append(match["rest"])
elif re.match(r"\d+.\d+", version):
parts = [int(part) for part in version.split(".")]
else:
raise ValueError("Could not parse version")
return tuple(parts)
version_tuple = get_version_tuple(__version__)
version = __version__
def get_version_string() -> str:
return __version__

Some files were not shown because too many files have changed in this diff Show More