# Copyright 2009-present MongoDB, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Database level operations.""" from __future__ import annotations from copy import deepcopy from typing import ( TYPE_CHECKING, Any, Dict, Generic, List, Mapping, MutableMapping, NoReturn, Optional, Sequence, TypeVar, Union, cast, ) from bson.codec_options import DEFAULT_CODEC_OPTIONS from bson.dbref import DBRef from bson.son import SON from bson.timestamp import Timestamp from pymongo import _csot, common from pymongo.aggregation import _DatabaseAggregationCommand from pymongo.change_stream import DatabaseChangeStream from pymongo.collection import Collection from pymongo.command_cursor import CommandCursor from pymongo.common import _ecoc_coll_name, _esc_coll_name from pymongo.errors import CollectionInvalid, InvalidName from pymongo.read_preferences import ReadPreference, _ServerMode from pymongo.typings import _CollationIn, _DocumentType, _DocumentTypeArg, _Pipeline def _check_name(name): """Check if a database name is valid.""" if not name: raise InvalidName("database name cannot be the empty string") for invalid_char in [" ", ".", "$", "/", "\\", "\x00", '"']: if invalid_char in name: raise InvalidName("database names cannot contain the character %r" % invalid_char) if TYPE_CHECKING: import bson import bson.codec_options from pymongo.client_session import ClientSession from pymongo.mongo_client import MongoClient from pymongo.read_concern import ReadConcern from pymongo.write_concern import WriteConcern _CodecDocumentType = TypeVar("_CodecDocumentType", bound=Mapping[str, Any]) class Database(common.BaseObject, Generic[_DocumentType]): """A Mongo database.""" def __init__( self, client: "MongoClient[_DocumentType]", name: str, codec_options: Optional[bson.CodecOptions[_DocumentTypeArg]] = None, read_preference: Optional[_ServerMode] = None, write_concern: Optional[WriteConcern] = None, read_concern: Optional[ReadConcern] = None, ) -> None: """Get a database by client and name. Raises :class:`TypeError` if `name` is not an instance of :class:`basestring` (:class:`str` in python 3). Raises :class:`~pymongo.errors.InvalidName` if `name` is not a valid database name. :Parameters: - `client`: A :class:`~pymongo.mongo_client.MongoClient` instance. - `name`: The database name. - `codec_options` (optional): An instance of :class:`~bson.codec_options.CodecOptions`. If ``None`` (the default) client.codec_options is used. - `read_preference` (optional): The read preference to use. If ``None`` (the default) client.read_preference is used. - `write_concern` (optional): An instance of :class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the default) client.write_concern is used. - `read_concern` (optional): An instance of :class:`~pymongo.read_concern.ReadConcern`. If ``None`` (the default) client.read_concern is used. .. seealso:: The MongoDB documentation on `databases `_. .. versionchanged:: 4.0 Removed the eval, system_js, error, last_status, previous_error, reset_error_history, authenticate, logout, collection_names, current_op, add_user, remove_user, profiling_level, set_profiling_level, and profiling_info methods. See the :ref:`pymongo4-migration-guide`. .. versionchanged:: 3.2 Added the read_concern option. .. versionchanged:: 3.0 Added the codec_options, read_preference, and write_concern options. :class:`~pymongo.database.Database` no longer returns an instance of :class:`~pymongo.collection.Collection` for attribute names with leading underscores. You must use dict-style lookups instead:: db['__my_collection__'] Not: db.__my_collection__ """ super().__init__( codec_options or client.codec_options, read_preference or client.read_preference, write_concern or client.write_concern, read_concern or client.read_concern, ) if not isinstance(name, str): raise TypeError("name must be an instance of str") if name != "$external": _check_name(name) self.__name = name self.__client: MongoClient[_DocumentType] = client self._timeout = client.options.timeout @property def client(self) -> "MongoClient[_DocumentType]": """The client instance for this :class:`Database`.""" return self.__client @property def name(self) -> str: """The name of this :class:`Database`.""" return self.__name def with_options( self, codec_options: Optional[bson.CodecOptions[_DocumentTypeArg]] = None, read_preference: Optional[_ServerMode] = None, write_concern: Optional[WriteConcern] = None, read_concern: Optional[ReadConcern] = None, ) -> "Database[_DocumentType]": """Get a clone of this database changing the specified settings. >>> db1.read_preference Primary() >>> from pymongo.read_preferences import Secondary >>> db2 = db1.with_options(read_preference=Secondary([{'node': 'analytics'}])) >>> db1.read_preference Primary() >>> db2.read_preference Secondary(tag_sets=[{'node': 'analytics'}], max_staleness=-1, hedge=None) :Parameters: - `codec_options` (optional): An instance of :class:`~bson.codec_options.CodecOptions`. If ``None`` (the default) the :attr:`codec_options` of this :class:`Collection` is used. - `read_preference` (optional): The read preference to use. If ``None`` (the default) the :attr:`read_preference` of this :class:`Collection` is used. See :mod:`~pymongo.read_preferences` for options. - `write_concern` (optional): An instance of :class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the default) the :attr:`write_concern` of this :class:`Collection` is used. - `read_concern` (optional): An instance of :class:`~pymongo.read_concern.ReadConcern`. If ``None`` (the default) the :attr:`read_concern` of this :class:`Collection` is used. .. versionadded:: 3.8 """ return Database( self.client, self.__name, codec_options or self.codec_options, read_preference or self.read_preference, write_concern or self.write_concern, read_concern or self.read_concern, ) def __eq__(self, other: Any) -> bool: if isinstance(other, Database): return self.__client == other.client and self.__name == other.name return NotImplemented def __ne__(self, other: Any) -> bool: return not self == other def __hash__(self) -> int: return hash((self.__client, self.__name)) def __repr__(self): return f"Database({self.__client!r}, {self.__name!r})" def __getattr__(self, name: str) -> Collection[_DocumentType]: """Get a collection of this database by name. Raises InvalidName if an invalid collection name is used. :Parameters: - `name`: the name of the collection to get """ if name.startswith("_"): raise AttributeError( "Database has no attribute {!r}. To access the {}" " collection, use database[{!r}].".format(name, name, name) ) return self.__getitem__(name) def __getitem__(self, name: str) -> "Collection[_DocumentType]": """Get a collection of this database by name. Raises InvalidName if an invalid collection name is used. :Parameters: - `name`: the name of the collection to get """ return Collection(self, name) def get_collection( self, name: str, codec_options: Optional[bson.CodecOptions[_DocumentTypeArg]] = None, read_preference: Optional[_ServerMode] = None, write_concern: Optional[WriteConcern] = None, read_concern: Optional[ReadConcern] = None, ) -> Collection[_DocumentType]: """Get a :class:`~pymongo.collection.Collection` with the given name and options. Useful for creating a :class:`~pymongo.collection.Collection` with different codec options, read preference, and/or write concern from this :class:`Database`. >>> db.read_preference Primary() >>> coll1 = db.test >>> coll1.read_preference Primary() >>> from pymongo import ReadPreference >>> coll2 = db.get_collection( ... 'test', read_preference=ReadPreference.SECONDARY) >>> coll2.read_preference Secondary(tag_sets=None) :Parameters: - `name`: The name of the collection - a string. - `codec_options` (optional): An instance of :class:`~bson.codec_options.CodecOptions`. If ``None`` (the default) the :attr:`codec_options` of this :class:`Database` is used. - `read_preference` (optional): The read preference to use. If ``None`` (the default) the :attr:`read_preference` of this :class:`Database` is used. See :mod:`~pymongo.read_preferences` for options. - `write_concern` (optional): An instance of :class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the default) the :attr:`write_concern` of this :class:`Database` is used. - `read_concern` (optional): An instance of :class:`~pymongo.read_concern.ReadConcern`. If ``None`` (the default) the :attr:`read_concern` of this :class:`Database` is used. """ return Collection( self, name, False, codec_options, read_preference, write_concern, read_concern, ) def _get_encrypted_fields(self, kwargs, coll_name, ask_db): encrypted_fields = kwargs.get("encryptedFields") if encrypted_fields: return deepcopy(encrypted_fields) if ( self.client.options.auto_encryption_opts and self.client.options.auto_encryption_opts._encrypted_fields_map and self.client.options.auto_encryption_opts._encrypted_fields_map.get( f"{self.name}.{coll_name}" ) ): return deepcopy( self.client.options.auto_encryption_opts._encrypted_fields_map[ f"{self.name}.{coll_name}" ] ) if ask_db and self.client.options.auto_encryption_opts: options = self[coll_name].options() if options.get("encryptedFields"): return deepcopy(options["encryptedFields"]) return None @_csot.apply def create_collection( self, name: str, codec_options: Optional[bson.CodecOptions[_DocumentTypeArg]] = None, read_preference: Optional[_ServerMode] = None, write_concern: Optional[WriteConcern] = None, read_concern: Optional[ReadConcern] = None, session: Optional[ClientSession] = None, check_exists: Optional[bool] = True, **kwargs: Any, ) -> Collection[_DocumentType]: """Create a new :class:`~pymongo.collection.Collection` in this database. Normally collection creation is automatic. This method should only be used to specify options on creation. :class:`~pymongo.errors.CollectionInvalid` will be raised if the collection already exists. :Parameters: - `name`: the name of the collection to create - `codec_options` (optional): An instance of :class:`~bson.codec_options.CodecOptions`. If ``None`` (the default) the :attr:`codec_options` of this :class:`Database` is used. - `read_preference` (optional): The read preference to use. If ``None`` (the default) the :attr:`read_preference` of this :class:`Database` is used. - `write_concern` (optional): An instance of :class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the default) the :attr:`write_concern` of this :class:`Database` is used. - `read_concern` (optional): An instance of :class:`~pymongo.read_concern.ReadConcern`. If ``None`` (the default) the :attr:`read_concern` of this :class:`Database` is used. - `collation` (optional): An instance of :class:`~pymongo.collation.Collation`. - `session` (optional): a :class:`~pymongo.client_session.ClientSession`. - ``check_exists`` (optional): if True (the default), send a listCollections command to check if the collection already exists before creation. - `**kwargs` (optional): additional keyword arguments will be passed as options for the `create collection command`_ All optional `create collection command`_ parameters should be passed as keyword arguments to this method. Valid options include, but are not limited to: - ``size`` (int): desired initial size for the collection (in bytes). For capped collections this size is the max size of the collection. - ``capped`` (bool): if True, this is a capped collection - ``max`` (int): maximum number of objects if capped (optional) - ``timeseries`` (dict): a document specifying configuration options for timeseries collections - ``expireAfterSeconds`` (int): the number of seconds after which a document in a timeseries collection expires - ``validator`` (dict): a document specifying validation rules or expressions for the collection - ``validationLevel`` (str): how strictly to apply the validation rules to existing documents during an update. The default level is "strict" - ``validationAction`` (str): whether to "error" on invalid documents (the default) or just "warn" about the violations but allow invalid documents to be inserted - ``indexOptionDefaults`` (dict): a document specifying a default configuration for indexes when creating a collection - ``viewOn`` (str): the name of the source collection or view from which to create the view - ``pipeline`` (list): a list of aggregation pipeline stages - ``comment`` (str): a user-provided comment to attach to this command. This option is only supported on MongoDB >= 4.4. - ``encryptedFields`` (dict): **(BETA)** Document that describes the encrypted fields for Queryable Encryption. For example:: { "escCollection": "enxcol_.encryptedCollection.esc", "ecocCollection": "enxcol_.encryptedCollection.ecoc", "fields": [ { "path": "firstName", "keyId": Binary.from_uuid(UUID('00000000-0000-0000-0000-000000000000')), "bsonType": "string", "queries": {"queryType": "equality"} }, { "path": "ssn", "keyId": Binary.from_uuid(UUID('04104104-1041-0410-4104-104104104104')), "bsonType": "string" } ] } - ``clusteredIndex`` (dict): Document that specifies the clustered index configuration. It must have the following form:: { // key pattern must be {_id: 1} key: , // required unique: , // required, must be `true` name: , // optional, otherwise automatically generated v: , // optional, must be `2` if provided } - ``changeStreamPreAndPostImages`` (dict): a document with a boolean field ``enabled`` for enabling pre- and post-images. .. versionchanged:: 4.2 Added the ``check_exists``, ``clusteredIndex``, and ``encryptedFields`` parameters. .. versionchanged:: 3.11 This method is now supported inside multi-document transactions with MongoDB 4.4+. .. versionchanged:: 3.6 Added ``session`` parameter. .. versionchanged:: 3.4 Added the collation option. .. versionchanged:: 3.0 Added the codec_options, read_preference, and write_concern options. .. _create collection command: https://mongodb.com/docs/manual/reference/command/create """ encrypted_fields = self._get_encrypted_fields(kwargs, name, False) if encrypted_fields: common.validate_is_mapping("encryptedFields", encrypted_fields) kwargs["encryptedFields"] = encrypted_fields clustered_index = kwargs.get("clusteredIndex") if clustered_index: common.validate_is_mapping("clusteredIndex", clustered_index) with self.__client._tmp_session(session) as s: # Skip this check in a transaction where listCollections is not # supported. if ( check_exists and (not s or not s.in_transaction) and name in self.list_collection_names(filter={"name": name}, session=s) ): raise CollectionInvalid("collection %s already exists" % name) return Collection( self, name, True, codec_options, read_preference, write_concern, read_concern, session=s, **kwargs, ) def aggregate( self, pipeline: _Pipeline, session: Optional[ClientSession] = None, **kwargs: Any ) -> CommandCursor[_DocumentType]: """Perform a database-level aggregation. See the `aggregation pipeline`_ documentation for a list of stages that are supported. .. code-block:: python # Lists all operations currently running on the server. with client.admin.aggregate([{"$currentOp": {}}]) as cursor: for operation in cursor: print(operation) The :meth:`aggregate` method obeys the :attr:`read_preference` of this :class:`Database`, except when ``$out`` or ``$merge`` are used, in which case :attr:`~pymongo.read_preferences.ReadPreference.PRIMARY` is used. .. note:: This method does not support the 'explain' option. Please use :meth:`~pymongo.database.Database.command` instead. .. note:: The :attr:`~pymongo.database.Database.write_concern` of this collection is automatically applied to this operation. :Parameters: - `pipeline`: a list of aggregation pipeline stages - `session` (optional): a :class:`~pymongo.client_session.ClientSession`. - `**kwargs` (optional): extra `aggregate command`_ parameters. All optional `aggregate command`_ parameters should be passed as keyword arguments to this method. Valid options include, but are not limited to: - `allowDiskUse` (bool): Enables writing to temporary files. When set to True, aggregation stages can write data to the _tmp subdirectory of the --dbpath directory. The default is False. - `maxTimeMS` (int): The maximum amount of time to allow the operation to run in milliseconds. - `batchSize` (int): The maximum number of documents to return per batch. Ignored if the connected mongod or mongos does not support returning aggregate results using a cursor. - `collation` (optional): An instance of :class:`~pymongo.collation.Collation`. - `let` (dict): A dict of parameter names and values. Values must be constant or closed expressions that do not reference document fields. Parameters can then be accessed as variables in an aggregate expression context (e.g. ``"$$var"``). This option is only supported on MongoDB >= 5.0. :Returns: A :class:`~pymongo.command_cursor.CommandCursor` over the result set. .. versionadded:: 3.9 .. _aggregation pipeline: https://mongodb.com/docs/manual/reference/operator/aggregation-pipeline .. _aggregate command: https://mongodb.com/docs/manual/reference/command/aggregate """ with self.client._tmp_session(session, close=False) as s: cmd = _DatabaseAggregationCommand( self, CommandCursor, pipeline, kwargs, session is not None, user_fields={"cursor": {"firstBatch": 1}}, ) return self.client._retryable_read( cmd.get_cursor, cmd.get_read_preference(s), s, retryable=not cmd._performs_write ) def watch( self, pipeline: Optional[_Pipeline] = None, full_document: Optional[str] = None, resume_after: Optional[Mapping[str, Any]] = None, max_await_time_ms: Optional[int] = None, batch_size: Optional[int] = None, collation: Optional[_CollationIn] = None, start_at_operation_time: Optional[Timestamp] = None, session: Optional[ClientSession] = None, start_after: Optional[Mapping[str, Any]] = None, comment: Optional[Any] = None, full_document_before_change: Optional[str] = None, show_expanded_events: Optional[bool] = None, ) -> DatabaseChangeStream[_DocumentType]: """Watch changes on this database. Performs an aggregation with an implicit initial ``$changeStream`` stage and returns a :class:`~pymongo.change_stream.DatabaseChangeStream` cursor which iterates over changes on all collections in this database. Introduced in MongoDB 4.0. .. code-block:: python with db.watch() as stream: for change in stream: print(change) The :class:`~pymongo.change_stream.DatabaseChangeStream` iterable blocks until the next change document is returned or an error is raised. If the :meth:`~pymongo.change_stream.DatabaseChangeStream.next` method encounters a network error when retrieving a batch from the server, it will automatically attempt to recreate the cursor such that no change events are missed. Any error encountered during the resume attempt indicates there may be an outage and will be raised. .. code-block:: python try: with db.watch([{"$match": {"operationType": "insert"}}]) as stream: for insert_change in stream: print(insert_change) except pymongo.errors.PyMongoError: # The ChangeStream encountered an unrecoverable error or the # resume attempt failed to recreate the cursor. logging.error("...") For a precise description of the resume process see the `change streams specification`_. :Parameters: - `pipeline` (optional): A list of aggregation pipeline stages to append to an initial ``$changeStream`` stage. Not all pipeline stages are valid after a ``$changeStream`` stage, see the MongoDB documentation on change streams for the supported stages. - `full_document` (optional): The fullDocument to pass as an option to the ``$changeStream`` stage. Allowed values: 'updateLookup', 'whenAvailable', 'required'. When set to 'updateLookup', the change notification for partial updates will include both a delta describing the changes to the document, as well as a copy of the entire document that was changed from some time after the change occurred. - `full_document_before_change`: Allowed values: 'whenAvailable' and 'required'. Change events may now result in a 'fullDocumentBeforeChange' response field. - `resume_after` (optional): A resume token. If provided, the change stream will start returning changes that occur directly after the operation specified in the resume token. A resume token is the _id value of a change document. - `max_await_time_ms` (optional): The maximum time in milliseconds for the server to wait for changes before responding to a getMore operation. - `batch_size` (optional): The maximum number of documents to return per batch. - `collation` (optional): The :class:`~pymongo.collation.Collation` to use for the aggregation. - `start_at_operation_time` (optional): If provided, the resulting change stream will only return changes that occurred at or after the specified :class:`~bson.timestamp.Timestamp`. Requires MongoDB >= 4.0. - `session` (optional): a :class:`~pymongo.client_session.ClientSession`. - `start_after` (optional): The same as `resume_after` except that `start_after` can resume notifications after an invalidate event. This option and `resume_after` are mutually exclusive. - `comment` (optional): A user-provided comment to attach to this command. - `show_expanded_events` (optional): Include expanded events such as DDL events like `dropIndexes`. :Returns: A :class:`~pymongo.change_stream.DatabaseChangeStream` cursor. .. versionchanged:: 4.3 Added `show_expanded_events` parameter. .. versionchanged:: 4.2 Added ``full_document_before_change`` parameter. .. versionchanged:: 4.1 Added ``comment`` parameter. .. versionchanged:: 3.9 Added the ``start_after`` parameter. .. versionadded:: 3.7 .. seealso:: The MongoDB documentation on `changeStreams `_. .. _change streams specification: https://github.com/mongodb/specifications/blob/master/source/change-streams/change-streams.rst """ return DatabaseChangeStream( self, pipeline, full_document, resume_after, max_await_time_ms, batch_size, collation, start_at_operation_time, session, start_after, comment, full_document_before_change, show_expanded_events=show_expanded_events, ) def _command( self, sock_info, command, value=1, check=True, allowable_errors=None, read_preference=ReadPreference.PRIMARY, codec_options=DEFAULT_CODEC_OPTIONS, write_concern=None, parse_write_concern_error=False, session=None, **kwargs, ): """Internal command helper.""" if isinstance(command, str): command = SON([(command, value)]) command.update(kwargs) with self.__client._tmp_session(session) as s: return sock_info.command( self.__name, command, read_preference, codec_options, check, allowable_errors, write_concern=write_concern, parse_write_concern_error=parse_write_concern_error, session=s, client=self.__client, ) @_csot.apply def command( self, command: Union[str, MutableMapping[str, Any]], value: Any = 1, check: bool = True, allowable_errors: Optional[Sequence[Union[str, int]]] = None, read_preference: Optional[_ServerMode] = None, codec_options: "Optional[bson.codec_options.CodecOptions[_CodecDocumentType]]" = None, session: Optional[ClientSession] = None, comment: Optional[Any] = None, **kwargs: Any, ) -> _CodecDocumentType: """Issue a MongoDB command. Send command `command` to the database and return the response. If `command` is an instance of :class:`basestring` (:class:`str` in python 3) then the command {`command`: `value`} will be sent. Otherwise, `command` must be an instance of :class:`dict` and will be sent as is. Any additional keyword arguments will be added to the final command document before it is sent. For example, a command like ``{buildinfo: 1}`` can be sent using: >>> db.command("buildinfo") For a command where the value matters, like ``{count: collection_name}`` we can do: >>> db.command("count", collection_name) For commands that take additional arguments we can use kwargs. So ``{filemd5: object_id, root: file_root}`` becomes: >>> db.command("filemd5", object_id, root=file_root) :Parameters: - `command`: document representing the command to be issued, or the name of the command (for simple commands only). .. note:: the order of keys in the `command` document is significant (the "verb" must come first), so commands which require multiple keys (e.g. `findandmodify`) should use an instance of :class:`~bson.son.SON` or a string and kwargs instead of a Python `dict`. - `value` (optional): value to use for the command verb when `command` is passed as a string - `check` (optional): check the response for errors, raising :class:`~pymongo.errors.OperationFailure` if there are any - `allowable_errors`: if `check` is ``True``, error messages in this list will be ignored by error-checking - `read_preference` (optional): The read preference for this operation. See :mod:`~pymongo.read_preferences` for options. If the provided `session` is in a transaction, defaults to the read preference configured for the transaction. Otherwise, defaults to :attr:`~pymongo.read_preferences.ReadPreference.PRIMARY`. - `codec_options`: A :class:`~bson.codec_options.CodecOptions` instance. - `session` (optional): A :class:`~pymongo.client_session.ClientSession`. - `comment` (optional): A user-provided comment to attach to this command. - `**kwargs` (optional): additional keyword arguments will be added to the command document before it is sent .. note:: :meth:`command` does **not** obey this Database's :attr:`read_preference` or :attr:`codec_options`. You must use the ``read_preference`` and ``codec_options`` parameters instead. .. note:: :meth:`command` does **not** apply any custom TypeDecoders when decoding the command response. .. note:: If this client has been configured to use MongoDB Stable API (see :ref:`versioned-api-ref`), then :meth:`command` will automatically add API versioning options to the given command. Explicitly adding API versioning options in the command and declaring an API version on the client is not supported. .. versionchanged:: 3.6 Added ``session`` parameter. .. versionchanged:: 3.0 Removed the `as_class`, `fields`, `uuid_subtype`, `tag_sets`, and `secondary_acceptable_latency_ms` option. Removed `compile_re` option: PyMongo now always represents BSON regular expressions as :class:`~bson.regex.Regex` objects. Use :meth:`~bson.regex.Regex.try_compile` to attempt to convert from a BSON regular expression to a Python regular expression object. Added the ``codec_options`` parameter. .. seealso:: The MongoDB documentation on `commands `_. """ opts = codec_options or DEFAULT_CODEC_OPTIONS if comment is not None: kwargs["comment"] = comment if read_preference is None: read_preference = (session and session._txn_read_preference()) or ReadPreference.PRIMARY with self.__client._socket_for_reads(read_preference, session) as ( sock_info, read_preference, ): return self._command( sock_info, command, value, check, allowable_errors, read_preference, opts, session=session, **kwargs, ) def _retryable_read_command( self, command, value=1, check=True, allowable_errors=None, read_preference=None, codec_options=DEFAULT_CODEC_OPTIONS, session=None, **kwargs, ): """Same as command but used for retryable read commands.""" if read_preference is None: read_preference = (session and session._txn_read_preference()) or ReadPreference.PRIMARY def _cmd(session, server, sock_info, read_preference): return self._command( sock_info, command, value, check, allowable_errors, read_preference, codec_options, session=session, **kwargs, ) return self.__client._retryable_read(_cmd, read_preference, session) def _list_collections(self, sock_info, session, read_preference, **kwargs): """Internal listCollections helper.""" coll = self.get_collection("$cmd", read_preference=read_preference) cmd = SON([("listCollections", 1), ("cursor", {})]) cmd.update(kwargs) with self.__client._tmp_session(session, close=False) as tmp_session: cursor = self._command( sock_info, cmd, read_preference=read_preference, session=tmp_session )["cursor"] cmd_cursor = CommandCursor( coll, cursor, sock_info.address, session=tmp_session, explicit_session=session is not None, comment=cmd.get("comment"), ) cmd_cursor._maybe_pin_connection(sock_info) return cmd_cursor def list_collections( self, session: Optional[ClientSession] = None, filter: Optional[Mapping[str, Any]] = None, comment: Optional[Any] = None, **kwargs: Any, ) -> CommandCursor[Dict[str, Any]]: """Get a cursor over the collections of this database. :Parameters: - `session` (optional): a :class:`~pymongo.client_session.ClientSession`. - `filter` (optional): A query document to filter the list of collections returned from the listCollections command. - `comment` (optional): A user-provided comment to attach to this command. - `**kwargs` (optional): Optional parameters of the `listCollections command `_ can be passed as keyword arguments to this method. The supported options differ by server version. :Returns: An instance of :class:`~pymongo.command_cursor.CommandCursor`. .. versionadded:: 3.6 """ if filter is not None: kwargs["filter"] = filter read_pref = (session and session._txn_read_preference()) or ReadPreference.PRIMARY if comment is not None: kwargs["comment"] = comment def _cmd(session, server, sock_info, read_preference): return self._list_collections( sock_info, session, read_preference=read_preference, **kwargs ) return self.__client._retryable_read(_cmd, read_pref, session) def list_collection_names( self, session: Optional[ClientSession] = None, filter: Optional[Mapping[str, Any]] = None, comment: Optional[Any] = None, **kwargs: Any, ) -> List[str]: """Get a list of all the collection names in this database. For example, to list all non-system collections:: filter = {"name": {"$regex": r"^(?!system\\.)"}} db.list_collection_names(filter=filter) :Parameters: - `session` (optional): a :class:`~pymongo.client_session.ClientSession`. - `filter` (optional): A query document to filter the list of collections returned from the listCollections command. - `comment` (optional): A user-provided comment to attach to this command. - `**kwargs` (optional): Optional parameters of the `listCollections command `_ can be passed as keyword arguments to this method. The supported options differ by server version. .. versionchanged:: 3.8 Added the ``filter`` and ``**kwargs`` parameters. .. versionadded:: 3.6 """ if comment is not None: kwargs["comment"] = comment if filter is None: kwargs["nameOnly"] = True else: # The enumerate collections spec states that "drivers MUST NOT set # nameOnly if a filter specifies any keys other than name." common.validate_is_mapping("filter", filter) kwargs["filter"] = filter if not filter or (len(filter) == 1 and "name" in filter): kwargs["nameOnly"] = True return [result["name"] for result in self.list_collections(session=session, **kwargs)] def _drop_helper(self, name, session=None, comment=None): command = SON([("drop", name)]) if comment is not None: command["comment"] = comment with self.__client._socket_for_writes(session) as sock_info: return self._command( sock_info, command, allowable_errors=["ns not found", 26], write_concern=self._write_concern_for(session), parse_write_concern_error=True, session=session, ) @_csot.apply def drop_collection( self, name_or_collection: Union[str, Collection[_DocumentTypeArg]], session: Optional[ClientSession] = None, comment: Optional[Any] = None, encrypted_fields: Optional[Mapping[str, Any]] = None, ) -> Dict[str, Any]: """Drop a collection. :Parameters: - `name_or_collection`: the name of a collection to drop or the collection object itself - `session` (optional): a :class:`~pymongo.client_session.ClientSession`. - `comment` (optional): A user-provided comment to attach to this command. - `encrypted_fields`: **(BETA)** Document that describes the encrypted fields for Queryable Encryption. For example:: { "escCollection": "enxcol_.encryptedCollection.esc", "ecocCollection": "enxcol_.encryptedCollection.ecoc", "fields": [ { "path": "firstName", "keyId": Binary.from_uuid(UUID('00000000-0000-0000-0000-000000000000')), "bsonType": "string", "queries": {"queryType": "equality"} }, { "path": "ssn", "keyId": Binary.from_uuid(UUID('04104104-1041-0410-4104-104104104104')), "bsonType": "string" } ] } .. note:: The :attr:`~pymongo.database.Database.write_concern` of this database is automatically applied to this operation. .. versionchanged:: 4.2 Added ``encrypted_fields`` parameter. .. versionchanged:: 4.1 Added ``comment`` parameter. .. versionchanged:: 3.6 Added ``session`` parameter. .. versionchanged:: 3.4 Apply this database's write concern automatically to this operation when connected to MongoDB >= 3.4. """ name = name_or_collection if isinstance(name, Collection): name = name.name if not isinstance(name, str): raise TypeError("name_or_collection must be an instance of str") encrypted_fields = self._get_encrypted_fields( {"encryptedFields": encrypted_fields}, name, True, ) if encrypted_fields: common.validate_is_mapping("encrypted_fields", encrypted_fields) self._drop_helper( _esc_coll_name(encrypted_fields, name), session=session, comment=comment ) self._drop_helper( _ecoc_coll_name(encrypted_fields, name), session=session, comment=comment ) return self._drop_helper(name, session, comment) def validate_collection( self, name_or_collection: Union[str, Collection[_DocumentTypeArg]], scandata: bool = False, full: bool = False, session: Optional[ClientSession] = None, background: Optional[bool] = None, comment: Optional[Any] = None, ) -> Dict[str, Any]: """Validate a collection. Returns a dict of validation info. Raises CollectionInvalid if validation fails. See also the MongoDB documentation on the `validate command`_. :Parameters: - `name_or_collection`: A Collection object or the name of a collection to validate. - `scandata`: Do extra checks beyond checking the overall structure of the collection. - `full`: Have the server do a more thorough scan of the collection. Use with `scandata` for a thorough scan of the structure of the collection and the individual documents. - `session` (optional): a :class:`~pymongo.client_session.ClientSession`. - `background` (optional): A boolean flag that determines whether the command runs in the background. Requires MongoDB 4.4+. - `comment` (optional): A user-provided comment to attach to this command. .. versionchanged:: 4.1 Added ``comment`` parameter. .. versionchanged:: 3.11 Added ``background`` parameter. .. versionchanged:: 3.6 Added ``session`` parameter. .. _validate command: https://mongodb.com/docs/manual/reference/command/validate/ """ name = name_or_collection if isinstance(name, Collection): name = name.name if not isinstance(name, str): raise TypeError("name_or_collection must be an instance of str or Collection") cmd = SON([("validate", name), ("scandata", scandata), ("full", full)]) if comment is not None: cmd["comment"] = comment if background is not None: cmd["background"] = background result = cast(dict, self.command(cmd, session=session)) valid = True # Pre 1.9 results if "result" in result: info = result["result"] if info.find("exception") != -1 or info.find("corrupt") != -1: raise CollectionInvalid(f"{name} invalid: {info}") # Sharded results elif "raw" in result: for _, res in result["raw"].items(): if "result" in res: info = res["result"] if info.find("exception") != -1 or info.find("corrupt") != -1: raise CollectionInvalid(f"{name} invalid: {info}") elif not res.get("valid", False): valid = False break # Post 1.9 non-sharded results. elif not result.get("valid", False): valid = False if not valid: raise CollectionInvalid(f"{name} invalid: {result!r}") return result # See PYTHON-3084. __iter__ = None def __next__(self) -> NoReturn: raise TypeError("'Database' object is not iterable") next = __next__ def __bool__(self) -> NoReturn: raise NotImplementedError( "Database objects do not implement truth " "value testing or bool(). Please compare " "with None instead: database is not None" ) def dereference( self, dbref: DBRef, session: Optional[ClientSession] = None, comment: Optional[Any] = None, **kwargs: Any, ) -> Optional[_DocumentType]: """Dereference a :class:`~bson.dbref.DBRef`, getting the document it points to. Raises :class:`TypeError` if `dbref` is not an instance of :class:`~bson.dbref.DBRef`. Returns a document, or ``None`` if the reference does not point to a valid document. Raises :class:`ValueError` if `dbref` has a database specified that is different from the current database. :Parameters: - `dbref`: the reference - `session` (optional): a :class:`~pymongo.client_session.ClientSession`. - `comment` (optional): A user-provided comment to attach to this command. - `**kwargs` (optional): any additional keyword arguments are the same as the arguments to :meth:`~pymongo.collection.Collection.find`. .. versionchanged:: 4.1 Added ``comment`` parameter. .. versionchanged:: 3.6 Added ``session`` parameter. """ if not isinstance(dbref, DBRef): raise TypeError("cannot dereference a %s" % type(dbref)) if dbref.database is not None and dbref.database != self.__name: raise ValueError( "trying to dereference a DBRef that points to " "another database ({!r} not {!r})".format(dbref.database, self.__name) ) return self[dbref.collection].find_one( {"_id": dbref.id}, session=session, comment=comment, **kwargs )