Package exchangelib

Expand source code
import sys

from .account import Account, Identity
from .attachments import FileAttachment, ItemAttachment
from .autodiscover import discover
from .configuration import Configuration
from .credentials import DELEGATE, IMPERSONATION, Credentials, OAuth2Credentials, \
    OAuth2AuthorizationCodeCredentials
from .ewsdatetime import EWSDate, EWSDateTime, EWSTimeZone, UTC, UTC_NOW
from .extended_properties import ExtendedProperty
from .folders import Folder, RootOfHierarchy, FolderCollection, SHALLOW, DEEP
from .items import AcceptItem, TentativelyAcceptItem, DeclineItem, CalendarItem, CancelCalendarItem, Contact, \
    DistributionList, Message, PostItem, Task, ForwardItem, ReplyToItem, ReplyAllToItem
from .properties import Body, HTMLBody, ItemId, Mailbox, Attendee, Room, RoomList, UID, DLMailbox
from .protocol import FaultTolerance, FailFast, BaseProtocol, NoVerifyHTTPAdapter, TLSClientAuth
from .restriction import Q
from .settings import OofSettings
from .transport import BASIC, DIGEST, NTLM, GSSAPI, SSPI, OAUTH2, CBA
from .version import Build, Version

__version__ = '4.5.1'

__all__ = [
    '__version__',
    'Account', 'Identity',
    'FileAttachment', 'ItemAttachment',
    'discover',
    'Configuration',
    'DELEGATE', 'IMPERSONATION', 'Credentials', 'OAuth2AuthorizationCodeCredentials', 'OAuth2Credentials',
    'EWSDate', 'EWSDateTime', 'EWSTimeZone', 'UTC', 'UTC_NOW',
    'ExtendedProperty',
    'Folder', 'RootOfHierarchy', 'FolderCollection', 'SHALLOW', 'DEEP',
    'AcceptItem', 'TentativelyAcceptItem', 'DeclineItem', 'CalendarItem', 'CancelCalendarItem', 'Contact',
    'DistributionList', 'Message', 'PostItem', 'Task', 'ForwardItem', 'ReplyToItem', 'ReplyAllToItem',
    'ItemId', 'Mailbox', 'DLMailbox', 'Attendee', 'Room', 'RoomList', 'Body', 'HTMLBody', 'UID',
    'FailFast', 'FaultTolerance', 'BaseProtocol', 'NoVerifyHTTPAdapter', 'TLSClientAuth',
    'OofSettings',
    'Q',
    'BASIC', 'DIGEST', 'NTLM', 'GSSAPI', 'SSPI', 'OAUTH2', 'CBA',
    'Build', 'Version',
]

# Set a default user agent, e.g. "exchangelib/3.1.1 (python-requests/2.22.0)"
import requests.utils
BaseProtocol.USERAGENT = "%s/%s (%s)" % (__name__, __version__, requests.utils.default_user_agent())

# Support fromisoformat() in Python < 3.7
if sys.version_info[:2] < (3, 7):
    from backports.datetime_fromisoformat import MonkeyPatch
    MonkeyPatch.patch_fromisoformat()


def close_connections():
    from .autodiscover import close_connections as close_autodiscover_connections
    from .protocol import close_connections as close_protocol_connections
    close_autodiscover_connections()
    close_protocol_connections()

Sub-modules

exchangelib.account
exchangelib.attachments
exchangelib.autodiscover
exchangelib.configuration
exchangelib.credentials

Implements an Exchange user object and access types. Exchange provides two different ways of granting access for a login to a specific account …

exchangelib.errors

Stores errors specific to this package, and mirrors all the possible errors that EWS can return.

exchangelib.ewsdatetime
exchangelib.extended_properties
exchangelib.fields
exchangelib.folders
exchangelib.indexed_properties
exchangelib.items
exchangelib.properties
exchangelib.protocol

A protocol is an endpoint for EWS service connections. It contains all necessary information to make HTTPS connections …

exchangelib.queryset
exchangelib.recurrence
exchangelib.restriction
exchangelib.services

Implement a selection of EWS services (operations) …

exchangelib.settings
exchangelib.transport
exchangelib.util
exchangelib.version
exchangelib.winzone

A dict to translate from IANA location name to Windows timezone name. Translations taken from …

Functions

def UTC_NOW()
Expand source code
UTC_NOW = lambda: EWSDateTime.now(tz=UTC)  # noqa: E731
def discover(email, credentials=None, auth_type=None, retry_policy=None)
Expand source code
def discover(email, credentials=None, auth_type=None, retry_policy=None):
    return Autodiscovery(
        email=email, credentials=credentials, auth_type=auth_type, retry_policy=retry_policy
    ).discover()

Classes

class AcceptItem (**kwargs)

MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/acceptitem

Pick out optional 'account' and 'folder' kwargs, and pass the rest to the parent class.

:param kwargs: 'account' is optional but allows calling 'send()' and 'delete()' 'folder' is optional but allows calling 'save()'. If 'folder' has an account, and 'account' is not set, we use folder.account.

Expand source code
class AcceptItem(BaseMeetingReplyItem):
    """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/acceptitem"""

    ELEMENT_NAME = 'AcceptItem'

Ancestors

Class variables

var ELEMENT_NAME

Inherited members

class Account (primary_smtp_address, fullname=None, access_type=None, autodiscover=False, credentials=None, config=None, locale=None, default_timezone=None)

Models an Exchange server user account.

:param primary_smtp_address: The primary email address associated with the account on the Exchange server :param fullname: The full name of the account. Optional. (Default value = None) :param access_type: The access type granted to 'credentials' for this account. Valid options are 'delegate' and 'impersonation'. 'delegate' is default if 'credentials' is set. Otherwise, 'impersonation' is default. :param autodiscover: Whether to look up the EWS endpoint automatically using the autodiscover protocol. (Default value = False) :param credentials: A Credentials object containing valid credentials for this account. (Default value = None) :param config: A Configuration object containing EWS endpoint information. Required if autodiscover is disabled (Default value = None) :param locale: The locale of the user, e.g. 'en_US'. Defaults to the locale of the host, if available. :param default_timezone: EWS may return some datetime values without timezone information. In this case, we will assume values to be in the provided timezone. Defaults to the timezone of the host. :return:

Expand source code
class Account:
    """Models an Exchange server user account."""

    def __init__(self, primary_smtp_address, fullname=None, access_type=None, autodiscover=False, credentials=None,
                 config=None, locale=None, default_timezone=None):
        """

        :param primary_smtp_address: The primary email address associated with the account on the Exchange server
        :param fullname: The full name of the account. Optional. (Default value = None)
        :param access_type: The access type granted to 'credentials' for this account. Valid options are 'delegate'
            and 'impersonation'. 'delegate' is default if 'credentials' is set. Otherwise, 'impersonation' is default.
        :param autodiscover: Whether to look up the EWS endpoint automatically using the autodiscover protocol.
            (Default value = False)
        :param credentials: A Credentials object containing valid credentials for this account. (Default value = None)
        :param config: A Configuration object containing EWS endpoint information. Required if autodiscover is disabled
            (Default value = None)
        :param locale: The locale of the user, e.g. 'en_US'. Defaults to the locale of the host, if available.
        :param default_timezone: EWS may return some datetime values without timezone information. In this case, we will
            assume values to be in the provided timezone. Defaults to the timezone of the host.
        :return:
        """
        if '@' not in primary_smtp_address:
            raise ValueError("primary_smtp_address %r is not an email address" % primary_smtp_address)
        self.fullname = fullname
        # Assume delegate access if individual credentials are provided. Else, assume service user with impersonation
        self.access_type = access_type or (DELEGATE if credentials else IMPERSONATION)
        if self.access_type not in ACCESS_TYPES:
            raise ValueError("'access_type' %r must be one of %s" % (self.access_type, ACCESS_TYPES))
        try:
            self.locale = locale or getlocale()[0] or None  # get_locale() might not be able to determine the locale
        except ValueError as e:
            # getlocale() may throw ValueError if it fails to parse the system locale
            log.warning('Failed to get locale (%s)', e)
            self.locale = None
        if not isinstance(self.locale, (type(None), str)):
            raise ValueError("Expected 'locale' to be a string, got %r" % self.locale)
        if default_timezone:
            try:
                self.default_timezone = EWSTimeZone.from_timezone(default_timezone)
            except TypeError:
                raise ValueError("Expected 'default_timezone' to be an EWSTimeZone, got %r" % default_timezone)
        else:
            try:
                self.default_timezone = EWSTimeZone.localzone()
            except (ValueError, UnknownTimeZone) as e:
                # There is no translation from local timezone name to Windows timezone name, or e failed to find the
                # local timezone.
                log.warning('%s. Fallback to UTC', e.args[0])
                self.default_timezone = UTC
        if not isinstance(config, (Configuration, type(None))):
            raise ValueError("Expected 'config' to be a Configuration, got %r" % config)
        if autodiscover:
            if config:
                retry_policy, auth_type = config.retry_policy, config.auth_type
                if not credentials:
                    credentials = config.credentials
            else:
                retry_policy, auth_type = None, None
            self.ad_response, self.protocol = discover(
                email=primary_smtp_address, credentials=credentials, auth_type=auth_type, retry_policy=retry_policy
            )
            primary_smtp_address = self.ad_response.autodiscover_smtp_address
        else:
            if not config:
                raise AttributeError('non-autodiscover requires a config')
            self.ad_response = None
            self.protocol = Protocol(config=config)

        # Other ways of identifying the account can be added later
        self.identity = Identity(primary_smtp_address=primary_smtp_address)

        # We may need to override the default server version on a per-account basis because Microsoft may report one
        # server version up-front but delegate account requests to an older backend server.
        self.version = self.protocol.version
        log.debug('Added account: %s', self)

    @property
    def primary_smtp_address(self):
        return self.identity.primary_smtp_address

    @threaded_cached_property
    def admin_audit_logs(self):
        return self.root.get_default_folder(AdminAuditLogs)

    @threaded_cached_property
    def archive_deleted_items(self):
        return self.archive_root.get_default_folder(ArchiveDeletedItems)

    @threaded_cached_property
    def archive_inbox(self):
        return self.archive_root.get_default_folder(ArchiveInbox)

    @threaded_cached_property
    def archive_msg_folder_root(self):
        return self.archive_root.get_default_folder(ArchiveMsgFolderRoot)

    @threaded_cached_property
    def archive_recoverable_items_deletions(self):
        return self.archive_root.get_default_folder(ArchiveRecoverableItemsDeletions)

    @threaded_cached_property
    def archive_recoverable_items_purges(self):
        return self.archive_root.get_default_folder(ArchiveRecoverableItemsPurges)

    @threaded_cached_property
    def archive_recoverable_items_root(self):
        return self.archive_root.get_default_folder(ArchiveRecoverableItemsRoot)

    @threaded_cached_property
    def archive_recoverable_items_versions(self):
        return self.archive_root.get_default_folder(ArchiveRecoverableItemsVersions)

    @threaded_cached_property
    def archive_root(self):
        return ArchiveRoot.get_distinguished(account=self)

    @threaded_cached_property
    def calendar(self):
        # If the account contains a shared calendar from a different user, that calendar will be in the folder list.
        # Attempt not to return one of those. An account may not always have a calendar called "Calendar", but a
        # Calendar folder with a localized name instead. Return that, if it's available, but always prefer any
        # distinguished folder returned by the server.
        return self.root.get_default_folder(Calendar)

    @threaded_cached_property
    def conflicts(self):
        return self.root.get_default_folder(Conflicts)

    @threaded_cached_property
    def contacts(self):
        return self.root.get_default_folder(Contacts)

    @threaded_cached_property
    def conversation_history(self):
        return self.root.get_default_folder(ConversationHistory)

    @threaded_cached_property
    def directory(self):
        return self.root.get_default_folder(Directory)

    @threaded_cached_property
    def drafts(self):
        return self.root.get_default_folder(Drafts)

    @threaded_cached_property
    def favorites(self):
        return self.root.get_default_folder(Favorites)

    @threaded_cached_property
    def im_contact_list(self):
        return self.root.get_default_folder(IMContactList)

    @threaded_cached_property
    def inbox(self):
        return self.root.get_default_folder(Inbox)

    @threaded_cached_property
    def journal(self):
        return self.root.get_default_folder(Journal)

    @threaded_cached_property
    def junk(self):
        return self.root.get_default_folder(JunkEmail)

    @threaded_cached_property
    def local_failures(self):
        return self.root.get_default_folder(LocalFailures)

    @threaded_cached_property
    def msg_folder_root(self):
        return self.root.get_default_folder(MsgFolderRoot)

    @threaded_cached_property
    def my_contacts(self):
        return self.root.get_default_folder(MyContacts)

    @threaded_cached_property
    def notes(self):
        return self.root.get_default_folder(Notes)

    @threaded_cached_property
    def outbox(self):
        return self.root.get_default_folder(Outbox)

    @threaded_cached_property
    def people_connect(self):
        return self.root.get_default_folder(PeopleConnect)

    @threaded_cached_property
    def public_folders_root(self):
        return PublicFoldersRoot.get_distinguished(account=self)

    @threaded_cached_property
    def quick_contacts(self):
        return self.root.get_default_folder(QuickContacts)

    @threaded_cached_property
    def recipient_cache(self):
        return self.root.get_default_folder(RecipientCache)

    @threaded_cached_property
    def recoverable_items_deletions(self):
        return self.root.get_default_folder(RecoverableItemsDeletions)

    @threaded_cached_property
    def recoverable_items_purges(self):
        return self.root.get_default_folder(RecoverableItemsPurges)

    @threaded_cached_property
    def recoverable_items_root(self):
        return self.root.get_default_folder(RecoverableItemsRoot)

    @threaded_cached_property
    def recoverable_items_versions(self):
        return self.root.get_default_folder(RecoverableItemsVersions)

    @threaded_cached_property
    def root(self):
        return Root.get_distinguished(account=self)

    @threaded_cached_property
    def search_folders(self):
        return self.root.get_default_folder(SearchFolders)

    @threaded_cached_property
    def sent(self):
        return self.root.get_default_folder(SentItems)

    @threaded_cached_property
    def server_failures(self):
        return self.root.get_default_folder(ServerFailures)

    @threaded_cached_property
    def sync_issues(self):
        return self.root.get_default_folder(SyncIssues)

    @threaded_cached_property
    def tasks(self):
        return self.root.get_default_folder(Tasks)

    @threaded_cached_property
    def todo_search(self):
        return self.root.get_default_folder(ToDoSearch)

    @threaded_cached_property
    def trash(self):
        return self.root.get_default_folder(DeletedItems)

    @threaded_cached_property
    def voice_mail(self):
        return self.root.get_default_folder(VoiceMail)

    @property
    def domain(self):
        return get_domain(self.primary_smtp_address)

    @property
    def oof_settings(self):
        # We don't want to cache this property because then we can't easily get updates. 'threaded_cached_property'
        # supports the 'del self.oof_settings' syntax to invalidate the cache, but does not support custom setter
        # methods. Having a non-cached service call here goes against the assumption that properties are cheap, but the
        # alternative is to create get_oof_settings() and set_oof_settings(), and that's just too Java-ish for my taste.
        return GetUserOofSettings(account=self).get(
            mailbox=Mailbox(email_address=self.primary_smtp_address),
        )

    @oof_settings.setter
    def oof_settings(self, value):
        SetUserOofSettings(account=self).get(
            oof_settings=value,
            mailbox=Mailbox(email_address=self.primary_smtp_address),
        )

    def _consume_item_service(self, service_cls, items, chunk_size, kwargs):
        if isinstance(items, QuerySet):
            # We just want an iterator over the results
            items = iter(items)
        is_empty, items = peek(items)
        if is_empty:
            # We accept generators, so it's not always convenient for caller to know up-front if 'ids' is empty. Allow
            # empty 'ids' and return early.
            return
        kwargs['items'] = items
        yield from service_cls(account=self, chunk_size=chunk_size).call(**kwargs)

    def export(self, items, chunk_size=None):
        """Return export strings of the given items.

        :param items: An iterable containing the Items we want to export
        :param chunk_size: The number of items to send to the server in a single request (Default value = None)

        :return: A list of strings, the exported representation of the object
        """
        return list(
            self._consume_item_service(service_cls=ExportItems, items=items, chunk_size=chunk_size, kwargs={})
        )

    def upload(self, data, chunk_size=None):
        """Upload objects retrieved from an export to the given folders.

        :param data: An iterable of tuples containing the folder we want to upload the data to and the string outputs of
            exports. If you want to update items instead of create, the data must be a tuple of
            (ItemId, is_associated, data) values.
        :param chunk_size: The number of items to send to the server in a single request (Default value = None)

        :return: A list of tuples with the new ids and changekeys

          Example:
          account.upload([
              (account.inbox, "AABBCC..."),
              (account.inbox, (ItemId('AA', 'BB'), False, "XXYYZZ...")),
              (account.inbox, (('CC', 'DD'), None, "XXYYZZ...")),
              (account.calendar, "ABCXYZ..."),
          ])
          -> [("idA", "changekey"), ("idB", "changekey"), ("idC", "changekey")]
        """
        items = ((f, (None, False, d) if isinstance(d, str) else d) for f, d in data)
        return list(
            self._consume_item_service(service_cls=UploadItems, items=items, chunk_size=chunk_size, kwargs={})
        )

    def bulk_create(self, folder, items, message_disposition=SAVE_ONLY, send_meeting_invitations=SEND_TO_NONE,
                    chunk_size=None):
        """Create new items in 'folder'.

        :param folder: the folder to create the items in
        :param items: an iterable of Item objects
        :param message_disposition: only applicable to Message items. Possible values are specified in
            MESSAGE_DISPOSITION_CHOICES (Default value = SAVE_ONLY)
        :param send_meeting_invitations: only applicable to CalendarItem items. Possible values are specified in
            SEND_MEETING_INVITATIONS_CHOICES (Default value = SEND_TO_NONE)
        :param chunk_size: The number of items to send to the server in a single request (Default value = None)

        :return: a list of either BulkCreateResult or exception instances in the same order as the input. The returned
          BulkCreateResult objects are normal Item objects except they only contain the 'id' and 'changekey'
          of the created item, and the 'id' of any attachments that were also created.
        """
        if isinstance(items, QuerySet):
            # bulk_create() on a queryset does not make sense because it returns items that have already been created
            raise ValueError('Cannot bulk create items from a QuerySet')
        log.debug(
            'Adding items for %s (folder %s, message_disposition: %s, send_meeting_invitations: %s)',
            self,
            folder,
            message_disposition,
            send_meeting_invitations,
        )
        return list(self._consume_item_service(service_cls=CreateItem, items=items, chunk_size=chunk_size, kwargs=dict(
            folder=folder,
            message_disposition=message_disposition,
            send_meeting_invitations=send_meeting_invitations,
        )))

    def bulk_update(self, items, conflict_resolution=AUTO_RESOLVE, message_disposition=SAVE_ONLY,
                    send_meeting_invitations_or_cancellations=SEND_TO_NONE, suppress_read_receipts=True,
                    chunk_size=None):
        """Bulk update existing items.

        :param items: a list of (Item, fieldnames) tuples, where 'Item' is an Item object, and 'fieldnames' is a list
            containing the attributes on this Item object that we want to be updated.
        :param conflict_resolution: Possible values are specified in CONFLICT_RESOLUTION_CHOICES
            (Default value = AUTO_RESOLVE)
        :param message_disposition: only applicable to Message items. Possible values are specified in
            MESSAGE_DISPOSITION_CHOICES (Default value = SAVE_ONLY)
        :param send_meeting_invitations_or_cancellations: only applicable to CalendarItem items. Possible values are
            specified in SEND_MEETING_INVITATIONS_AND_CANCELLATIONS_CHOICES (Default value = SEND_TO_NONE)
        :param suppress_read_receipts: nly supported from Exchange 2013. True or False (Default value = True)
        :param chunk_size: The number of items to send to the server in a single request (Default value = None)

        :return: a list of either (id, changekey) tuples or exception instances, in the same order as the input
        """
        # bulk_update() on a queryset does not make sense because there would be no opportunity to alter the items. In
        # fact, it could be dangerous if the queryset contains an '.only()'. This would wipe out certain fields
        # entirely.
        if isinstance(items, QuerySet):
            raise ValueError('Cannot bulk update on a queryset')
        log.debug(
            'Updating items for %s (conflict_resolution %s, message_disposition: %s, send_meeting_invitations: %s)',
            self,
            conflict_resolution,
            message_disposition,
            send_meeting_invitations_or_cancellations,
        )
        return list(self._consume_item_service(service_cls=UpdateItem, items=items, chunk_size=chunk_size, kwargs=dict(
            conflict_resolution=conflict_resolution,
            message_disposition=message_disposition,
            send_meeting_invitations_or_cancellations=send_meeting_invitations_or_cancellations,
            suppress_read_receipts=suppress_read_receipts,
        )))

    def bulk_delete(self, ids, delete_type=HARD_DELETE, send_meeting_cancellations=SEND_TO_NONE,
                    affected_task_occurrences=ALL_OCCURRENCIES, suppress_read_receipts=True, chunk_size=None):
        """Bulk delete items.

        :param ids: an iterable of either (id, changekey) tuples or Item objects.
        :param delete_type: the type of delete to perform. Possible values are specified in DELETE_TYPE_CHOICES
            (Default value = HARD_DELETE)
        :param send_meeting_cancellations: only applicable to CalendarItem. Possible values are specified in
            SEND_MEETING_CANCELLATIONS_CHOICES. (Default value = SEND_TO_NONE)
        :param affected_task_occurrences: only applicable for recurring Task items. Possible values are specified in
            AFFECTED_TASK_OCCURRENCES_CHOICES. (Default value = ALL_OCCURRENCIES)
        :param suppress_read_receipts: only supported from Exchange 2013. True or False. (Default value = True)
        :param chunk_size: The number of items to send to the server in a single request (Default value = None)

        :return: a list of either True or exception instances, in the same order as the input
        """
        log.debug(
            'Deleting items for %s (delete_type: %s, send_meeting_invitations: %s, affected_task_occurences: %s)',
            self,
            delete_type,
            send_meeting_cancellations,
            affected_task_occurrences,
        )
        return list(
            self._consume_item_service(service_cls=DeleteItem, items=ids, chunk_size=chunk_size, kwargs=dict(
                delete_type=delete_type,
                send_meeting_cancellations=send_meeting_cancellations,
                affected_task_occurrences=affected_task_occurrences,
                suppress_read_receipts=suppress_read_receipts,
            ))
        )

    def bulk_send(self, ids, save_copy=True, copy_to_folder=None, chunk_size=None):
        """Send existing draft messages. If requested, save a copy in 'copy_to_folder'.

        :param ids: an iterable of either (id, changekey) tuples or Item objects.
        :param save_copy: If true, saves a copy of the message (Default value = True)
        :param copy_to_folder: If requested, save a copy of the message in this folder. Default is the Sent folder
        :param chunk_size: The number of items to send to the server in a single request (Default value = None)

        :return: Status for each send operation, in the same order as the input
        """
        if copy_to_folder and not save_copy:
            raise AttributeError("'save_copy' must be True when 'copy_to_folder' is set")
        if save_copy and not copy_to_folder:
            copy_to_folder = self.sent  # 'Sent' is default EWS behaviour
        return list(
            self._consume_item_service(service_cls=SendItem, items=ids, chunk_size=chunk_size, kwargs=dict(
                saved_item_folder=copy_to_folder,
            ))
        )

    def bulk_copy(self, ids, to_folder, chunk_size=None):
        """Copy items to another folder.

        :param ids: an iterable of either (id, changekey) tuples or Item objects.
        :param to_folder: The destination folder of the copy operation
        :param chunk_size: The number of items to send to the server in a single request (Default value = None)

        :return: Status for each send operation, in the same order as the input
        """
        return list(self._consume_item_service(service_cls=CopyItem, items=ids, chunk_size=chunk_size, kwargs=dict(
            to_folder=to_folder,
        )))

    def bulk_move(self, ids, to_folder, chunk_size=None):
        """Move items to another folder.

        :param ids: an iterable of either (id, changekey) tuples or Item objects.
        :param to_folder: The destination folder of the copy operation
        :param chunk_size: The number of items to send to the server in a single request (Default value = None)

        :return: The new IDs of the moved items, in the same order as the input. If 'to_folder' is a public folder or a
          folder in a different mailbox, an empty list is returned.
        """
        return list(self._consume_item_service(service_cls=MoveItem, items=ids, chunk_size=chunk_size, kwargs=dict(
            to_folder=to_folder,
        )))

    def bulk_archive(self, ids, to_folder, chunk_size=None):
        """Archive items to a folder in the archive mailbox. An archive mailbox must be enabled in order for this
        to work.

        :param ids: an iterable of either (id, changekey) tuples or Item objects.
        :param to_folder: The destination folder of the archive operation
        :param chunk_size: The number of items to send to the server in a single request (Default value = None)

        :return: A list containing True or an exception instance in stable order of the requested items
        """
        return list(self._consume_item_service(service_cls=ArchiveItem, items=ids, chunk_size=chunk_size, kwargs=dict(
                to_folder=to_folder,
            ))
        )

    def bulk_mark_as_junk(self, ids, is_junk, move_item, chunk_size=None):
        """Mark or un-mark message items as junk email and add or remove the sender from the blocked sender list.

        :param ids: an iterable of either (id, changekey) tuples or Item objects.
        :param is_junk: Whether the messages are junk or not
        :param move_item: Whether to move the messages to the junk folder or not
        :param chunk_size: The number of items to send to the server in a single request (Default value = None)

        :return: A list containing the new IDs of the moved items, if items were moved, or True, or an exception
          instance, in stable order of the requested items.
        """
        return list(self._consume_item_service(service_cls=MarkAsJunk, items=ids, chunk_size=chunk_size, kwargs=dict(
            is_junk=is_junk,
            move_item=move_item,
        )))

    def fetch(self, ids, folder=None, only_fields=None, chunk_size=None):
        """Fetch items by ID.

        :param ids: an iterable of either (id, changekey) tuples or Item objects.
        :param folder: used for validating 'only_fields' (Default value = None)
        :param only_fields: A list of string or FieldPath items specifying the fields to fetch. Default to all fields
        :param chunk_size: The number of items to send to the server in a single request (Default value = None)

        :return: A generator of Item objects, in the same order as the input
        """
        validation_folder = folder or Folder(root=self.root)  # Default to a folder type that supports all item types
        # 'ids' could be an unevaluated QuerySet, e.g. if we ended up here via `fetch(ids=some_folder.filter(...))`. In
        # that case, we want to use its iterator. Otherwise, peek() will start a count() which is wasteful because we
        # need the item IDs immediately afterwards. iterator() will only do the bare minimum.
        if only_fields is None:
            # We didn't restrict list of field paths. Get all fields from the server, including extended properties.
            additional_fields = {
                FieldPath(field=f) for f in validation_folder.allowed_item_fields(version=self.version)
            }
        else:
            for field in only_fields:
                validation_folder.validate_item_field(field=field, version=self.version)
            # Remove ItemId and ChangeKey. We get them unconditionally
            additional_fields = {f for f in validation_folder.normalize_fields(fields=only_fields)
                                 if not f.field.is_attribute}
        # Always use IdOnly here, because AllProperties doesn't actually get *all* properties
        yield from self._consume_item_service(service_cls=GetItem, items=ids, chunk_size=chunk_size, kwargs=dict(
                additional_fields=additional_fields,
                shape=ID_ONLY,
        ))

    def fetch_personas(self, ids):
        """Fetch personas by ID.

        :param ids: an iterable of either (id, changekey) tuples or Persona objects.
        :return: A generator of Persona objects, in the same order as the input
        """
        if isinstance(ids, QuerySet):
            # We just want an iterator over the results
            ids = iter(ids)
        is_empty, ids = peek(ids)
        if is_empty:
            # We accept generators, so it's not always convenient for caller to know up-front if 'ids' is empty. Allow
            # empty 'ids' and return early.
            return
        # GetPersona only accepts one persona ID per request. Crazy.
        svc = GetPersona(account=self)
        for i in ids:
            yield svc.call(persona=i)

    @property
    def mail_tips(self):
        """See self.oof_settings about caching considerations."""
        # mail_tips_requested must be one of properties.MAIL_TIPS_TYPES
        return GetMailTips(protocol=self.protocol).get(
            sending_as=SendingAs(email_address=self.primary_smtp_address),
            recipients=[Mailbox(email_address=self.primary_smtp_address)],
            mail_tips_requested='All',
        )

    @property
    def delegates(self):
        """Return a list of DelegateUser objects representing the delegates that are set on this account."""
        delegates = []
        for d in GetDelegate(account=self).call(user_ids=None, include_permissions=True):
            if isinstance(d, Exception):
                raise d
            delegates.append(d)
        return delegates

    def __str__(self):
        txt = '%s' % self.primary_smtp_address
        if self.fullname:
            txt += ' (%s)' % self.fullname
        return txt

Instance variables

var admin_audit_logs
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var archive_deleted_items
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var archive_inbox
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var archive_msg_folder_root
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var archive_recoverable_items_deletions
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var archive_recoverable_items_purges
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var archive_recoverable_items_root
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var archive_recoverable_items_versions
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var archive_root
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var calendar
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var conflicts
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var contacts
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var conversation_history
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var delegates

Return a list of DelegateUser objects representing the delegates that are set on this account.

Expand source code
@property
def delegates(self):
    """Return a list of DelegateUser objects representing the delegates that are set on this account."""
    delegates = []
    for d in GetDelegate(account=self).call(user_ids=None, include_permissions=True):
        if isinstance(d, Exception):
            raise d
        delegates.append(d)
    return delegates
var directory
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var domain
Expand source code
@property
def domain(self):
    return get_domain(self.primary_smtp_address)
var drafts
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var favorites
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var im_contact_list
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var inbox
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var journal
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var junk
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var local_failures
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var mail_tips

See self.oof_settings about caching considerations.

Expand source code
@property
def mail_tips(self):
    """See self.oof_settings about caching considerations."""
    # mail_tips_requested must be one of properties.MAIL_TIPS_TYPES
    return GetMailTips(protocol=self.protocol).get(
        sending_as=SendingAs(email_address=self.primary_smtp_address),
        recipients=[Mailbox(email_address=self.primary_smtp_address)],
        mail_tips_requested='All',
    )
var msg_folder_root
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var my_contacts
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var notes
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var oof_settings
Expand source code
@property
def oof_settings(self):
    # We don't want to cache this property because then we can't easily get updates. 'threaded_cached_property'
    # supports the 'del self.oof_settings' syntax to invalidate the cache, but does not support custom setter
    # methods. Having a non-cached service call here goes against the assumption that properties are cheap, but the
    # alternative is to create get_oof_settings() and set_oof_settings(), and that's just too Java-ish for my taste.
    return GetUserOofSettings(account=self).get(
        mailbox=Mailbox(email_address=self.primary_smtp_address),
    )
var outbox
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var people_connect
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var primary_smtp_address
Expand source code
@property
def primary_smtp_address(self):
    return self.identity.primary_smtp_address
var public_folders_root
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var quick_contacts
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var recipient_cache
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var recoverable_items_deletions
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var recoverable_items_purges
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var recoverable_items_root
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var recoverable_items_versions
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var root
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var search_folders
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var sent
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var server_failures
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var sync_issues
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var tasks
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var trash
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var voice_mail
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))

Methods

def bulk_archive(self, ids, to_folder, chunk_size=None)

Archive items to a folder in the archive mailbox. An archive mailbox must be enabled in order for this to work.

:param ids: an iterable of either (id, changekey) tuples or Item objects. :param to_folder: The destination folder of the archive operation :param chunk_size: The number of items to send to the server in a single request (Default value = None)

:return: A list containing True or an exception instance in stable order of the requested items

Expand source code
def bulk_archive(self, ids, to_folder, chunk_size=None):
    """Archive items to a folder in the archive mailbox. An archive mailbox must be enabled in order for this
    to work.

    :param ids: an iterable of either (id, changekey) tuples or Item objects.
    :param to_folder: The destination folder of the archive operation
    :param chunk_size: The number of items to send to the server in a single request (Default value = None)

    :return: A list containing True or an exception instance in stable order of the requested items
    """
    return list(self._consume_item_service(service_cls=ArchiveItem, items=ids, chunk_size=chunk_size, kwargs=dict(
            to_folder=to_folder,
        ))
    )
def bulk_copy(self, ids, to_folder, chunk_size=None)

Copy items to another folder.

:param ids: an iterable of either (id, changekey) tuples or Item objects. :param to_folder: The destination folder of the copy operation :param chunk_size: The number of items to send to the server in a single request (Default value = None)

:return: Status for each send operation, in the same order as the input

Expand source code
def bulk_copy(self, ids, to_folder, chunk_size=None):
    """Copy items to another folder.

    :param ids: an iterable of either (id, changekey) tuples or Item objects.
    :param to_folder: The destination folder of the copy operation
    :param chunk_size: The number of items to send to the server in a single request (Default value = None)

    :return: Status for each send operation, in the same order as the input
    """
    return list(self._consume_item_service(service_cls=CopyItem, items=ids, chunk_size=chunk_size, kwargs=dict(
        to_folder=to_folder,
    )))
def bulk_create(self, folder, items, message_disposition='SaveOnly', send_meeting_invitations='SendToNone', chunk_size=None)

Create new items in 'folder'.

:param folder: the folder to create the items in :param items: an iterable of Item objects :param message_disposition: only applicable to Message items. Possible values are specified in MESSAGE_DISPOSITION_CHOICES (Default value = SAVE_ONLY) :param send_meeting_invitations: only applicable to CalendarItem items. Possible values are specified in SEND_MEETING_INVITATIONS_CHOICES (Default value = SEND_TO_NONE) :param chunk_size: The number of items to send to the server in a single request (Default value = None)

:return: a list of either BulkCreateResult or exception instances in the same order as the input. The returned BulkCreateResult objects are normal Item objects except they only contain the 'id' and 'changekey' of the created item, and the 'id' of any attachments that were also created.

Expand source code
def bulk_create(self, folder, items, message_disposition=SAVE_ONLY, send_meeting_invitations=SEND_TO_NONE,
                chunk_size=None):
    """Create new items in 'folder'.

    :param folder: the folder to create the items in
    :param items: an iterable of Item objects
    :param message_disposition: only applicable to Message items. Possible values are specified in
        MESSAGE_DISPOSITION_CHOICES (Default value = SAVE_ONLY)
    :param send_meeting_invitations: only applicable to CalendarItem items. Possible values are specified in
        SEND_MEETING_INVITATIONS_CHOICES (Default value = SEND_TO_NONE)
    :param chunk_size: The number of items to send to the server in a single request (Default value = None)

    :return: a list of either BulkCreateResult or exception instances in the same order as the input. The returned
      BulkCreateResult objects are normal Item objects except they only contain the 'id' and 'changekey'
      of the created item, and the 'id' of any attachments that were also created.
    """
    if isinstance(items, QuerySet):
        # bulk_create() on a queryset does not make sense because it returns items that have already been created
        raise ValueError('Cannot bulk create items from a QuerySet')
    log.debug(
        'Adding items for %s (folder %s, message_disposition: %s, send_meeting_invitations: %s)',
        self,
        folder,
        message_disposition,
        send_meeting_invitations,
    )
    return list(self._consume_item_service(service_cls=CreateItem, items=items, chunk_size=chunk_size, kwargs=dict(
        folder=folder,
        message_disposition=message_disposition,
        send_meeting_invitations=send_meeting_invitations,
    )))
def bulk_delete(self, ids, delete_type='HardDelete', send_meeting_cancellations='SendToNone', affected_task_occurrences='AllOccurrences', suppress_read_receipts=True, chunk_size=None)

Bulk delete items.

:param ids: an iterable of either (id, changekey) tuples or Item objects. :param delete_type: the type of delete to perform. Possible values are specified in DELETE_TYPE_CHOICES (Default value = HARD_DELETE) :param send_meeting_cancellations: only applicable to CalendarItem. Possible values are specified in SEND_MEETING_CANCELLATIONS_CHOICES. (Default value = SEND_TO_NONE) :param affected_task_occurrences: only applicable for recurring Task items. Possible values are specified in AFFECTED_TASK_OCCURRENCES_CHOICES. (Default value = ALL_OCCURRENCIES) :param suppress_read_receipts: only supported from Exchange 2013. True or False. (Default value = True) :param chunk_size: The number of items to send to the server in a single request (Default value = None)

:return: a list of either True or exception instances, in the same order as the input

Expand source code
def bulk_delete(self, ids, delete_type=HARD_DELETE, send_meeting_cancellations=SEND_TO_NONE,
                affected_task_occurrences=ALL_OCCURRENCIES, suppress_read_receipts=True, chunk_size=None):
    """Bulk delete items.

    :param ids: an iterable of either (id, changekey) tuples or Item objects.
    :param delete_type: the type of delete to perform. Possible values are specified in DELETE_TYPE_CHOICES
        (Default value = HARD_DELETE)
    :param send_meeting_cancellations: only applicable to CalendarItem. Possible values are specified in
        SEND_MEETING_CANCELLATIONS_CHOICES. (Default value = SEND_TO_NONE)
    :param affected_task_occurrences: only applicable for recurring Task items. Possible values are specified in
        AFFECTED_TASK_OCCURRENCES_CHOICES. (Default value = ALL_OCCURRENCIES)
    :param suppress_read_receipts: only supported from Exchange 2013. True or False. (Default value = True)
    :param chunk_size: The number of items to send to the server in a single request (Default value = None)

    :return: a list of either True or exception instances, in the same order as the input
    """
    log.debug(
        'Deleting items for %s (delete_type: %s, send_meeting_invitations: %s, affected_task_occurences: %s)',
        self,
        delete_type,
        send_meeting_cancellations,
        affected_task_occurrences,
    )
    return list(
        self._consume_item_service(service_cls=DeleteItem, items=ids, chunk_size=chunk_size, kwargs=dict(
            delete_type=delete_type,
            send_meeting_cancellations=send_meeting_cancellations,
            affected_task_occurrences=affected_task_occurrences,
            suppress_read_receipts=suppress_read_receipts,
        ))
    )
def bulk_mark_as_junk(self, ids, is_junk, move_item, chunk_size=None)

Mark or un-mark message items as junk email and add or remove the sender from the blocked sender list.

:param ids: an iterable of either (id, changekey) tuples or Item objects. :param is_junk: Whether the messages are junk or not :param move_item: Whether to move the messages to the junk folder or not :param chunk_size: The number of items to send to the server in a single request (Default value = None)

:return: A list containing the new IDs of the moved items, if items were moved, or True, or an exception instance, in stable order of the requested items.

Expand source code
def bulk_mark_as_junk(self, ids, is_junk, move_item, chunk_size=None):
    """Mark or un-mark message items as junk email and add or remove the sender from the blocked sender list.

    :param ids: an iterable of either (id, changekey) tuples or Item objects.
    :param is_junk: Whether the messages are junk or not
    :param move_item: Whether to move the messages to the junk folder or not
    :param chunk_size: The number of items to send to the server in a single request (Default value = None)

    :return: A list containing the new IDs of the moved items, if items were moved, or True, or an exception
      instance, in stable order of the requested items.
    """
    return list(self._consume_item_service(service_cls=MarkAsJunk, items=ids, chunk_size=chunk_size, kwargs=dict(
        is_junk=is_junk,
        move_item=move_item,
    )))
def bulk_move(self, ids, to_folder, chunk_size=None)

Move items to another folder.

:param ids: an iterable of either (id, changekey) tuples or Item objects. :param to_folder: The destination folder of the copy operation :param chunk_size: The number of items to send to the server in a single request (Default value = None)

:return: The new IDs of the moved items, in the same order as the input. If 'to_folder' is a public folder or a folder in a different mailbox, an empty list is returned.

Expand source code
def bulk_move(self, ids, to_folder, chunk_size=None):
    """Move items to another folder.

    :param ids: an iterable of either (id, changekey) tuples or Item objects.
    :param to_folder: The destination folder of the copy operation
    :param chunk_size: The number of items to send to the server in a single request (Default value = None)

    :return: The new IDs of the moved items, in the same order as the input. If 'to_folder' is a public folder or a
      folder in a different mailbox, an empty list is returned.
    """
    return list(self._consume_item_service(service_cls=MoveItem, items=ids, chunk_size=chunk_size, kwargs=dict(
        to_folder=to_folder,
    )))
def bulk_send(self, ids, save_copy=True, copy_to_folder=None, chunk_size=None)

Send existing draft messages. If requested, save a copy in 'copy_to_folder'.

:param ids: an iterable of either (id, changekey) tuples or Item objects. :param save_copy: If true, saves a copy of the message (Default value = True) :param copy_to_folder: If requested, save a copy of the message in this folder. Default is the Sent folder :param chunk_size: The number of items to send to the server in a single request (Default value = None)

:return: Status for each send operation, in the same order as the input

Expand source code
def bulk_send(self, ids, save_copy=True, copy_to_folder=None, chunk_size=None):
    """Send existing draft messages. If requested, save a copy in 'copy_to_folder'.

    :param ids: an iterable of either (id, changekey) tuples or Item objects.
    :param save_copy: If true, saves a copy of the message (Default value = True)
    :param copy_to_folder: If requested, save a copy of the message in this folder. Default is the Sent folder
    :param chunk_size: The number of items to send to the server in a single request (Default value = None)

    :return: Status for each send operation, in the same order as the input
    """
    if copy_to_folder and not save_copy:
        raise AttributeError("'save_copy' must be True when 'copy_to_folder' is set")
    if save_copy and not copy_to_folder:
        copy_to_folder = self.sent  # 'Sent' is default EWS behaviour
    return list(
        self._consume_item_service(service_cls=SendItem, items=ids, chunk_size=chunk_size, kwargs=dict(
            saved_item_folder=copy_to_folder,
        ))
    )
def bulk_update(self, items, conflict_resolution='AutoResolve', message_disposition='SaveOnly', send_meeting_invitations_or_cancellations='SendToNone', suppress_read_receipts=True, chunk_size=None)

Bulk update existing items.

:param items: a list of (Item, fieldnames) tuples, where 'Item' is an Item object, and 'fieldnames' is a list containing the attributes on this Item object that we want to be updated. :param conflict_resolution: Possible values are specified in CONFLICT_RESOLUTION_CHOICES (Default value = AUTO_RESOLVE) :param message_disposition: only applicable to Message items. Possible values are specified in MESSAGE_DISPOSITION_CHOICES (Default value = SAVE_ONLY) :param send_meeting_invitations_or_cancellations: only applicable to CalendarItem items. Possible values are specified in SEND_MEETING_INVITATIONS_AND_CANCELLATIONS_CHOICES (Default value = SEND_TO_NONE) :param suppress_read_receipts: nly supported from Exchange 2013. True or False (Default value = True) :param chunk_size: The number of items to send to the server in a single request (Default value = None)

:return: a list of either (id, changekey) tuples or exception instances, in the same order as the input

Expand source code
def bulk_update(self, items, conflict_resolution=AUTO_RESOLVE, message_disposition=SAVE_ONLY,
                send_meeting_invitations_or_cancellations=SEND_TO_NONE, suppress_read_receipts=True,
                chunk_size=None):
    """Bulk update existing items.

    :param items: a list of (Item, fieldnames) tuples, where 'Item' is an Item object, and 'fieldnames' is a list
        containing the attributes on this Item object that we want to be updated.
    :param conflict_resolution: Possible values are specified in CONFLICT_RESOLUTION_CHOICES
        (Default value = AUTO_RESOLVE)
    :param message_disposition: only applicable to Message items. Possible values are specified in
        MESSAGE_DISPOSITION_CHOICES (Default value = SAVE_ONLY)
    :param send_meeting_invitations_or_cancellations: only applicable to CalendarItem items. Possible values are
        specified in SEND_MEETING_INVITATIONS_AND_CANCELLATIONS_CHOICES (Default value = SEND_TO_NONE)
    :param suppress_read_receipts: nly supported from Exchange 2013. True or False (Default value = True)
    :param chunk_size: The number of items to send to the server in a single request (Default value = None)

    :return: a list of either (id, changekey) tuples or exception instances, in the same order as the input
    """
    # bulk_update() on a queryset does not make sense because there would be no opportunity to alter the items. In
    # fact, it could be dangerous if the queryset contains an '.only()'. This would wipe out certain fields
    # entirely.
    if isinstance(items, QuerySet):
        raise ValueError('Cannot bulk update on a queryset')
    log.debug(
        'Updating items for %s (conflict_resolution %s, message_disposition: %s, send_meeting_invitations: %s)',
        self,
        conflict_resolution,
        message_disposition,
        send_meeting_invitations_or_cancellations,
    )
    return list(self._consume_item_service(service_cls=UpdateItem, items=items, chunk_size=chunk_size, kwargs=dict(
        conflict_resolution=conflict_resolution,
        message_disposition=message_disposition,
        send_meeting_invitations_or_cancellations=send_meeting_invitations_or_cancellations,
        suppress_read_receipts=suppress_read_receipts,
    )))
def export(self, items, chunk_size=None)

Return export strings of the given items.

:param items: An iterable containing the Items we want to export :param chunk_size: The number of items to send to the server in a single request (Default value = None)

:return: A list of strings, the exported representation of the object

Expand source code
def export(self, items, chunk_size=None):
    """Return export strings of the given items.

    :param items: An iterable containing the Items we want to export
    :param chunk_size: The number of items to send to the server in a single request (Default value = None)

    :return: A list of strings, the exported representation of the object
    """
    return list(
        self._consume_item_service(service_cls=ExportItems, items=items, chunk_size=chunk_size, kwargs={})
    )
def fetch(self, ids, folder=None, only_fields=None, chunk_size=None)

Fetch items by ID.

:param ids: an iterable of either (id, changekey) tuples or Item objects. :param folder: used for validating 'only_fields' (Default value = None) :param only_fields: A list of string or FieldPath items specifying the fields to fetch. Default to all fields :param chunk_size: The number of items to send to the server in a single request (Default value = None)

:return: A generator of Item objects, in the same order as the input

Expand source code
def fetch(self, ids, folder=None, only_fields=None, chunk_size=None):
    """Fetch items by ID.

    :param ids: an iterable of either (id, changekey) tuples or Item objects.
    :param folder: used for validating 'only_fields' (Default value = None)
    :param only_fields: A list of string or FieldPath items specifying the fields to fetch. Default to all fields
    :param chunk_size: The number of items to send to the server in a single request (Default value = None)

    :return: A generator of Item objects, in the same order as the input
    """
    validation_folder = folder or Folder(root=self.root)  # Default to a folder type that supports all item types
    # 'ids' could be an unevaluated QuerySet, e.g. if we ended up here via `fetch(ids=some_folder.filter(...))`. In
    # that case, we want to use its iterator. Otherwise, peek() will start a count() which is wasteful because we
    # need the item IDs immediately afterwards. iterator() will only do the bare minimum.
    if only_fields is None:
        # We didn't restrict list of field paths. Get all fields from the server, including extended properties.
        additional_fields = {
            FieldPath(field=f) for f in validation_folder.allowed_item_fields(version=self.version)
        }
    else:
        for field in only_fields:
            validation_folder.validate_item_field(field=field, version=self.version)
        # Remove ItemId and ChangeKey. We get them unconditionally
        additional_fields = {f for f in validation_folder.normalize_fields(fields=only_fields)
                             if not f.field.is_attribute}
    # Always use IdOnly here, because AllProperties doesn't actually get *all* properties
    yield from self._consume_item_service(service_cls=GetItem, items=ids, chunk_size=chunk_size, kwargs=dict(
            additional_fields=additional_fields,
            shape=ID_ONLY,
    ))
def fetch_personas(self, ids)

Fetch personas by ID.

:param ids: an iterable of either (id, changekey) tuples or Persona objects. :return: A generator of Persona objects, in the same order as the input

Expand source code
def fetch_personas(self, ids):
    """Fetch personas by ID.

    :param ids: an iterable of either (id, changekey) tuples or Persona objects.
    :return: A generator of Persona objects, in the same order as the input
    """
    if isinstance(ids, QuerySet):
        # We just want an iterator over the results
        ids = iter(ids)
    is_empty, ids = peek(ids)
    if is_empty:
        # We accept generators, so it's not always convenient for caller to know up-front if 'ids' is empty. Allow
        # empty 'ids' and return early.
        return
    # GetPersona only accepts one persona ID per request. Crazy.
    svc = GetPersona(account=self)
    for i in ids:
        yield svc.call(persona=i)
def upload(self, data, chunk_size=None)

Upload objects retrieved from an export to the given folders.

:param data: An iterable of tuples containing the folder we want to upload the data to and the string outputs of exports. If you want to update items instead of create, the data must be a tuple of (ItemId, is_associated, data) values. :param chunk_size: The number of items to send to the server in a single request (Default value = None)

:return: A list of tuples with the new ids and changekeys

Example: account.upload([ (account.inbox, "AABBCC…"), (account.inbox, (ItemId('AA', 'BB'), False, "XXYYZZ…")), (account.inbox, (('CC', 'DD'), None, "XXYYZZ…")), (account.calendar, "ABCXYZ…"), ]) -> [("idA", "changekey"), ("idB", "changekey"), ("idC", "changekey")]

Expand source code
def upload(self, data, chunk_size=None):
    """Upload objects retrieved from an export to the given folders.

    :param data: An iterable of tuples containing the folder we want to upload the data to and the string outputs of
        exports. If you want to update items instead of create, the data must be a tuple of
        (ItemId, is_associated, data) values.
    :param chunk_size: The number of items to send to the server in a single request (Default value = None)

    :return: A list of tuples with the new ids and changekeys

      Example:
      account.upload([
          (account.inbox, "AABBCC..."),
          (account.inbox, (ItemId('AA', 'BB'), False, "XXYYZZ...")),
          (account.inbox, (('CC', 'DD'), None, "XXYYZZ...")),
          (account.calendar, "ABCXYZ..."),
      ])
      -> [("idA", "changekey"), ("idB", "changekey"), ("idC", "changekey")]
    """
    items = ((f, (None, False, d) if isinstance(d, str) else d) for f, d in data)
    return list(
        self._consume_item_service(service_cls=UploadItems, items=items, chunk_size=chunk_size, kwargs={})
    )
class Attendee (**kwargs)
Expand source code
class Attendee(EWSElement):
    """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/attendee"""

    ELEMENT_NAME = 'Attendee'
    RESPONSE_TYPES = {'Unknown', 'Organizer', 'Tentative', 'Accept', 'Decline', 'NoResponseReceived'}

    mailbox = MailboxField(is_required=True)
    response_type = ChoiceField(field_uri='ResponseType', choices={Choice(c) for c in RESPONSE_TYPES},
                                default='Unknown')
    last_response_time = DateTimeField(field_uri='LastResponseTime')

    def __hash__(self):
        return hash(self.mailbox)

Ancestors

Class variables

var ELEMENT_NAME
var FIELDS
var RESPONSE_TYPES

Instance variables

var last_response_time
var mailbox
var response_type

Inherited members

class BaseProtocol (config)

Base class for Protocol which implements the bare essentials.

Expand source code
class BaseProtocol:
    """Base class for Protocol which implements the bare essentials."""

    # The maximum number of sessions (== TCP connections, see below) we will open to this service endpoint. Keep this
    # low unless you have an agreement with the Exchange admin on the receiving end to hammer the server and
    # rate-limiting policies have been disabled for the connecting user. Changing this setting only makes sense if
    # you are using a thread pool to run multiple concurrent workers in this process.
    SESSION_POOLSIZE = 1
    # We want only 1 TCP connection per Session object. We may have lots of different credentials hitting the server and
    # each credential needs its own session (NTLM auth will only send credentials once and then secure the connection,
    # so a connection can only handle requests for one credential). Having multiple connections per Session could
    # quickly exhaust the maximum number of concurrent connections the Exchange server allows from one client.
    CONNECTIONS_PER_SESSION = 1
    # The number of times a session may be reused before creating a new session object. 'None' means "infinite".
    # Discarding sessions after a certain number of usages may limit memory leaks in the Session object.
    MAX_SESSION_USAGE_COUNT = None
    # Timeout for HTTP requests
    TIMEOUT = 120

    # The adapter class to use for HTTP requests. Override this if you need e.g. proxy support or specific TLS versions
    HTTP_ADAPTER_CLS = requests.adapters.HTTPAdapter

    # The User-Agent header to use for HTTP requests. Override this to set an app-specific one
    USERAGENT = None

    def __init__(self, config):
        from .configuration import Configuration
        if not isinstance(config, Configuration):
            raise ValueError("'config' %r must be a Configuration instance" % config)
        if not config.service_endpoint:
            raise AttributeError("'config.service_endpoint' must be set")
        self.config = config
        self._session_pool_size = 0
        self._session_pool_maxsize = config.max_connections or self.SESSION_POOLSIZE

        # Try to behave nicely with the remote server. We want to keep the connection open between requests.
        # We also want to re-use sessions, to avoid the NTLM auth handshake on every request. We must know the
        # authentication method to create sessions.
        self._session_pool = LifoQueue()
        self._session_pool_lock = Lock()

    @property
    def service_endpoint(self):
        return self.config.service_endpoint

    @property
    def auth_type(self):
        return self.config.auth_type

    @property
    def credentials(self):
        return self.config.credentials

    @credentials.setter
    def credentials(self, value):
        # We are updating credentials, but that doesn't automatically propagate to the session objects. The simplest
        # solution is to just kill the sessions in the pool.
        with self._session_pool_lock:
            self.config._credentials = value
            self.close()

    @property
    def retry_policy(self):
        return self.config.retry_policy

    @property
    def server(self):
        return self.config.server

    def __getstate__(self):
        # The session pool and lock cannot be pickled
        state = self.__dict__.copy()
        del state['_session_pool']
        del state['_session_pool_lock']
        return state

    def __setstate__(self, state):
        # Restore the session pool and lock
        self.__dict__.update(state)
        self._session_pool = LifoQueue()
        self._session_pool_lock = Lock()

    def __del__(self):
        # pylint: disable=bare-except
        try:
            self.close()
        except Exception:  # nosec
            # __del__ should never fail
            pass

    def close(self):
        log.debug('Server %s: Closing sessions', self.server)
        while True:
            try:
                session = self._session_pool.get(block=False)
                self.close_session(session)
                self._session_pool_size -= 1
            except Empty:
                break

    @classmethod
    def get_adapter(cls):
        # We want just one connection per session. No retries, since we wrap all requests in our own retry handler
        return cls.HTTP_ADAPTER_CLS(
            pool_block=True,
            pool_connections=cls.CONNECTIONS_PER_SESSION,
            pool_maxsize=cls.CONNECTIONS_PER_SESSION,
            max_retries=0,
        )

    @property
    def session_pool_size(self):
        return self._session_pool_size

    def increase_poolsize(self):
        """Increases the session pool size. We increase by one session per call."""
        # Create a single session and insert it into the pool. We need to protect this with a lock while we are changing
        # the pool size variable, to avoid race conditions. We must not exceed the pool size limit.
        if self._session_pool_size == self._session_pool_maxsize:
            raise SessionPoolMaxSizeReached('Session pool size cannot be increased further')
        with self._session_pool_lock:
            if self._session_pool_size >= self._session_pool_maxsize:
                log.debug('Session pool size was increased in another thread')
                return
            log.debug('Server %s: Increasing session pool size from %s to %s', self.server, self._session_pool_size,
                      self._session_pool_size + 1)
            self._session_pool.put(self.create_session(), block=False)
            self._session_pool_size += 1

    def decrease_poolsize(self):
        """Decreases the session pool size in response to error messages from the server requesting to rate-limit
        requests. We decrease by one session per call.
        """
        # Take a single session from the pool and discard it. We need to protect this with a lock while we are changing
        # the pool size variable, to avoid race conditions. We must keep at least one session in the pool.
        if self._session_pool_size <= 1:
            raise SessionPoolMinSizeReached('Session pool size cannot be decreased further')
        with self._session_pool_lock:
            if self._session_pool_size <= 1:
                log.debug('Session pool size was decreased in another thread')
                return
            log.warning('Server %s: Decreasing session pool size from %s to %s', self.server, self._session_pool_size,
                        self._session_pool_size - 1)
            session = self.get_session()
            self.close_session(session)
            self._session_pool_size -= 1

    def get_session(self):
        # Try to get a session from the queue. If the queue is empty, try to add one more session to the queue. If the
        # queue is already at its max, wait until a session becomes available.
        _timeout = 60  # Rate-limit messages about session starvation
        try:
            session = self._session_pool.get(block=False)
            log.debug('Server %s: Got session immediately', self.server)
        except Empty:
            try:
                self.increase_poolsize()
            except SessionPoolMaxSizeReached:
                pass
            while True:
                try:
                    log.debug('Server %s: Waiting for session', self.server)
                    session = self._session_pool.get(timeout=_timeout)
                    break
                except Empty:
                    # This is normal when we have many worker threads starving for available sessions
                    log.debug('Server %s: No sessions available for %s seconds', self.server, _timeout)
        log.debug('Server %s: Got session %s', self.server, session.session_id)
        session.usage_count += 1
        return session

    def release_session(self, session):
        # This should never fail, as we don't have more sessions than the queue contains
        log.debug('Server %s: Releasing session %s', self.server, session.session_id)
        if self.MAX_SESSION_USAGE_COUNT and session.usage_count > self.MAX_SESSION_USAGE_COUNT:
            log.debug('Server %s: session %s usage exceeded limit. Discarding', self.server, session.session_id)
            session = self.renew_session(session)
        try:
            self._session_pool.put(session, block=False)
        except Full:
            log.debug('Server %s: Session pool was already full %s', self.server, session.session_id)

    @staticmethod
    def close_session(session):
        session.close()
        del session

    def retire_session(self, session):
        # The session is useless. Close it completely and place a fresh session in the pool
        log.debug('Server %s: Retiring session %s', self.server, session.session_id)
        self.close_session(session)
        self.release_session(self.create_session())

    def renew_session(self, session):
        # The session is useless. Close it completely and place a fresh session in the pool
        log.debug('Server %s: Renewing session %s', self.server, session.session_id)
        self.close_session(session)
        return self.create_session()

    def refresh_credentials(self, session):
        # Credentials need to be refreshed, probably due to an OAuth
        # access token expiring. If we've gotten here, it's because the
        # application didn't provide an OAuth client secret, so we can't
        # handle token refreshing for it.
        with self.credentials.lock:
            if self.credentials.sig() == session.credentials_sig:
                # Credentials have not been refreshed by another thread:
                # they're the same as the session was created with. If
                # this isn't the case, we can just go ahead with a new
                # session using the already-updated credentials.
                self.credentials.refresh(session=session)
        return self.renew_session(session)

    def create_session(self):
        if self.auth_type is None:
            raise ValueError('Cannot create session without knowing the auth type')
        if self.credentials is None:
            if self.auth_type in CREDENTIALS_REQUIRED:
                raise ValueError('Auth type %r requires credentials' % self.auth_type)
            session = self.raw_session(self.service_endpoint)
            session.auth = get_auth_instance(auth_type=self.auth_type)
        else:
            with self.credentials.lock:
                if isinstance(self.credentials, OAuth2Credentials):
                    session = self.create_oauth2_session()
                    # Keep track of the credentials used to create this session. If
                    # and when we need to renew credentials (for example, refreshing
                    # an OAuth access token), this lets us easily determine whether
                    # the credentials have already been refreshed in another thread
                    # by the time this session tries.
                    session.credentials_sig = self.credentials.sig()
                else:
                    if self.auth_type == NTLM and self.credentials.type == self.credentials.EMAIL:
                        username = '\\' + self.credentials.username
                    else:
                        username = self.credentials.username
                    session = self.raw_session(self.service_endpoint)
                    session.auth = get_auth_instance(auth_type=self.auth_type, username=username,
                                                     password=self.credentials.password)

        # Add some extra info
        session.session_id = sum(map(ord, str(os.urandom(100))))  # Used for debugging messages in services
        session.usage_count = 0
        session.protocol = self
        log.debug('Server %s: Created session %s', self.server, session.session_id)
        return session

    def create_oauth2_session(self):
        if self.auth_type != OAUTH2:
            raise ValueError(
                'Auth type must be %r for credentials type %s' % (OAUTH2, self.credentials.__class__.__name__)
            )

        has_token = False
        scope = ['https://outlook.office365.com/.default']
        session_params = {}
        token_params = {}

        if isinstance(self.credentials, OAuth2AuthorizationCodeCredentials):
            # Ask for a refresh token
            scope.append('offline_access')

            # We don't know (or need) the Microsoft tenant ID. Use
            # common/ to let Microsoft select the appropriate tenant
            # for the provided authorization code or refresh token.
            #
            # Suppress looks-like-password warning from Bandit.
            token_url = 'https://login.microsoftonline.com/common/oauth2/v2.0/token'  # nosec

            client_params = {}
            has_token = self.credentials.access_token is not None
            if has_token:
                session_params['token'] = self.credentials.access_token
            elif self.credentials.authorization_code is not None:
                token_params['code'] = self.credentials.authorization_code
                self.credentials.authorization_code = None

            if self.credentials.client_id is not None and self.credentials.client_secret is not None:
                # If we're given a client ID and secret, we have enough
                # to refresh access tokens ourselves. In other cases the
                # session will raise TokenExpiredError and we'll need to
                # ask the calling application to refresh the token (that
                # covers cases where the caller doesn't have access to
                # the client secret but is working with a service that
                # can provide it refreshed tokens on a limited basis).
                session_params.update({
                    'auto_refresh_kwargs': {
                        'client_id': self.credentials.client_id,
                        'client_secret': self.credentials.client_secret,
                    },
                    'auto_refresh_url': token_url,
                    'token_updater': self.credentials.on_token_auto_refreshed,
                })
            client = WebApplicationClient(self.credentials.client_id, **client_params)
        else:
            token_url = 'https://login.microsoftonline.com/%s/oauth2/v2.0/token' % self.credentials.tenant_id
            client = BackendApplicationClient(client_id=self.credentials.client_id)

        session = self.raw_session(self.service_endpoint, oauth2_client=client, oauth2_session_params=session_params)
        if not has_token:
            # Fetch the token explicitly -- it doesn't occur implicitly
            token = session.fetch_token(token_url=token_url, client_id=self.credentials.client_id,
                                        client_secret=self.credentials.client_secret, scope=scope,
                                        timeout=self.TIMEOUT, **token_params)
            # Allow the credentials object to update its copy of the new
            # token, and give the application an opportunity to cache it
            self.credentials.on_token_auto_refreshed(token)
        session.auth = get_auth_instance(auth_type=OAUTH2, client=client)

        return session

    @classmethod
    def raw_session(cls, prefix, oauth2_client=None, oauth2_session_params=None):
        if oauth2_client:
            session = OAuth2Session(client=oauth2_client, **(oauth2_session_params or {}))
        else:
            session = requests.sessions.Session()
        session.headers.update(DEFAULT_HEADERS)
        session.headers['User-Agent'] = cls.USERAGENT
        session.mount(prefix, adapter=cls.get_adapter())
        return session

    def __repr__(self):
        return self.__class__.__name__ + repr((self.service_endpoint, self.credentials, self.auth_type))

Subclasses

Class variables

var CONNECTIONS_PER_SESSION
var HTTP_ADAPTER_CLS

The built-in HTTP Adapter for urllib3.

Provides a general-case interface for Requests sessions to contact HTTP and HTTPS urls by implementing the Transport Adapter interface. This class will usually be created by the :class:Session <Session> class under the covers.

:param pool_connections: The number of urllib3 connection pools to cache. :param pool_maxsize: The maximum number of connections to save in the pool. :param max_retries: The maximum number of retries each connection should attempt. Note, this applies only to failed DNS lookups, socket connections and connection timeouts, never to requests where data has made it to the server. By default, Requests does not retry failed connections. If you need granular control over the conditions under which we retry a request, import urllib3's Retry class and pass that instead. :param pool_block: Whether the connection pool should block for connections.

Usage::

import requests s = requests.Session() a = requests.adapters.HTTPAdapter(max_retries=3) s.mount('http://', a)

var MAX_SESSION_USAGE_COUNT
var SESSION_POOLSIZE
var TIMEOUT
var USERAGENT

Static methods

def close_session(session)
Expand source code
@staticmethod
def close_session(session):
    session.close()
    del session
def get_adapter()
Expand source code
@classmethod
def get_adapter(cls):
    # We want just one connection per session. No retries, since we wrap all requests in our own retry handler
    return cls.HTTP_ADAPTER_CLS(
        pool_block=True,
        pool_connections=cls.CONNECTIONS_PER_SESSION,
        pool_maxsize=cls.CONNECTIONS_PER_SESSION,
        max_retries=0,
    )
def raw_session(prefix, oauth2_client=None, oauth2_session_params=None)
Expand source code
@classmethod
def raw_session(cls, prefix, oauth2_client=None, oauth2_session_params=None):
    if oauth2_client:
        session = OAuth2Session(client=oauth2_client, **(oauth2_session_params or {}))
    else:
        session = requests.sessions.Session()
    session.headers.update(DEFAULT_HEADERS)
    session.headers['User-Agent'] = cls.USERAGENT
    session.mount(prefix, adapter=cls.get_adapter())
    return session

Instance variables

var auth_type
Expand source code
@property
def auth_type(self):
    return self.config.auth_type
var credentials
Expand source code
@property
def credentials(self):
    return self.config.credentials
var retry_policy
Expand source code
@property
def retry_policy(self):
    return self.config.retry_policy
var server
Expand source code
@property
def server(self):
    return self.config.server
var service_endpoint
Expand source code
@property
def service_endpoint(self):
    return self.config.service_endpoint
var session_pool_size
Expand source code
@property
def session_pool_size(self):
    return self._session_pool_size

Methods

def close(self)
Expand source code
def close(self):
    log.debug('Server %s: Closing sessions', self.server)
    while True:
        try:
            session = self._session_pool.get(block=False)
            self.close_session(session)
            self._session_pool_size -= 1
        except Empty:
            break
def create_oauth2_session(self)
Expand source code
def create_oauth2_session(self):
    if self.auth_type != OAUTH2:
        raise ValueError(
            'Auth type must be %r for credentials type %s' % (OAUTH2, self.credentials.__class__.__name__)
        )

    has_token = False
    scope = ['https://outlook.office365.com/.default']
    session_params = {}
    token_params = {}

    if isinstance(self.credentials, OAuth2AuthorizationCodeCredentials):
        # Ask for a refresh token
        scope.append('offline_access')

        # We don't know (or need) the Microsoft tenant ID. Use
        # common/ to let Microsoft select the appropriate tenant
        # for the provided authorization code or refresh token.
        #
        # Suppress looks-like-password warning from Bandit.
        token_url = 'https://login.microsoftonline.com/common/oauth2/v2.0/token'  # nosec

        client_params = {}
        has_token = self.credentials.access_token is not None
        if has_token:
            session_params['token'] = self.credentials.access_token
        elif self.credentials.authorization_code is not None:
            token_params['code'] = self.credentials.authorization_code
            self.credentials.authorization_code = None

        if self.credentials.client_id is not None and self.credentials.client_secret is not None:
            # If we're given a client ID and secret, we have enough
            # to refresh access tokens ourselves. In other cases the
            # session will raise TokenExpiredError and we'll need to
            # ask the calling application to refresh the token (that
            # covers cases where the caller doesn't have access to
            # the client secret but is working with a service that
            # can provide it refreshed tokens on a limited basis).
            session_params.update({
                'auto_refresh_kwargs': {
                    'client_id': self.credentials.client_id,
                    'client_secret': self.credentials.client_secret,
                },
                'auto_refresh_url': token_url,
                'token_updater': self.credentials.on_token_auto_refreshed,
            })
        client = WebApplicationClient(self.credentials.client_id, **client_params)
    else:
        token_url = 'https://login.microsoftonline.com/%s/oauth2/v2.0/token' % self.credentials.tenant_id
        client = BackendApplicationClient(client_id=self.credentials.client_id)

    session = self.raw_session(self.service_endpoint, oauth2_client=client, oauth2_session_params=session_params)
    if not has_token:
        # Fetch the token explicitly -- it doesn't occur implicitly
        token = session.fetch_token(token_url=token_url, client_id=self.credentials.client_id,
                                    client_secret=self.credentials.client_secret, scope=scope,
                                    timeout=self.TIMEOUT, **token_params)
        # Allow the credentials object to update its copy of the new
        # token, and give the application an opportunity to cache it
        self.credentials.on_token_auto_refreshed(token)
    session.auth = get_auth_instance(auth_type=OAUTH2, client=client)

    return session
def create_session(self)
Expand source code
def create_session(self):
    if self.auth_type is None:
        raise ValueError('Cannot create session without knowing the auth type')
    if self.credentials is None:
        if self.auth_type in CREDENTIALS_REQUIRED:
            raise ValueError('Auth type %r requires credentials' % self.auth_type)
        session = self.raw_session(self.service_endpoint)
        session.auth = get_auth_instance(auth_type=self.auth_type)
    else:
        with self.credentials.lock:
            if isinstance(self.credentials, OAuth2Credentials):
                session = self.create_oauth2_session()
                # Keep track of the credentials used to create this session. If
                # and when we need to renew credentials (for example, refreshing
                # an OAuth access token), this lets us easily determine whether
                # the credentials have already been refreshed in another thread
                # by the time this session tries.
                session.credentials_sig = self.credentials.sig()
            else:
                if self.auth_type == NTLM and self.credentials.type == self.credentials.EMAIL:
                    username = '\\' + self.credentials.username
                else:
                    username = self.credentials.username
                session = self.raw_session(self.service_endpoint)
                session.auth = get_auth_instance(auth_type=self.auth_type, username=username,
                                                 password=self.credentials.password)

    # Add some extra info
    session.session_id = sum(map(ord, str(os.urandom(100))))  # Used for debugging messages in services
    session.usage_count = 0
    session.protocol = self
    log.debug('Server %s: Created session %s', self.server, session.session_id)
    return session
def decrease_poolsize(self)

Decreases the session pool size in response to error messages from the server requesting to rate-limit requests. We decrease by one session per call.

Expand source code
def decrease_poolsize(self):
    """Decreases the session pool size in response to error messages from the server requesting to rate-limit
    requests. We decrease by one session per call.
    """
    # Take a single session from the pool and discard it. We need to protect this with a lock while we are changing
    # the pool size variable, to avoid race conditions. We must keep at least one session in the pool.
    if self._session_pool_size <= 1:
        raise SessionPoolMinSizeReached('Session pool size cannot be decreased further')
    with self._session_pool_lock:
        if self._session_pool_size <= 1:
            log.debug('Session pool size was decreased in another thread')
            return
        log.warning('Server %s: Decreasing session pool size from %s to %s', self.server, self._session_pool_size,
                    self._session_pool_size - 1)
        session = self.get_session()
        self.close_session(session)
        self._session_pool_size -= 1
def get_session(self)
Expand source code
def get_session(self):
    # Try to get a session from the queue. If the queue is empty, try to add one more session to the queue. If the
    # queue is already at its max, wait until a session becomes available.
    _timeout = 60  # Rate-limit messages about session starvation
    try:
        session = self._session_pool.get(block=False)
        log.debug('Server %s: Got session immediately', self.server)
    except Empty:
        try:
            self.increase_poolsize()
        except SessionPoolMaxSizeReached:
            pass
        while True:
            try:
                log.debug('Server %s: Waiting for session', self.server)
                session = self._session_pool.get(timeout=_timeout)
                break
            except Empty:
                # This is normal when we have many worker threads starving for available sessions
                log.debug('Server %s: No sessions available for %s seconds', self.server, _timeout)
    log.debug('Server %s: Got session %s', self.server, session.session_id)
    session.usage_count += 1
    return session
def increase_poolsize(self)

Increases the session pool size. We increase by one session per call.

Expand source code
def increase_poolsize(self):
    """Increases the session pool size. We increase by one session per call."""
    # Create a single session and insert it into the pool. We need to protect this with a lock while we are changing
    # the pool size variable, to avoid race conditions. We must not exceed the pool size limit.
    if self._session_pool_size == self._session_pool_maxsize:
        raise SessionPoolMaxSizeReached('Session pool size cannot be increased further')
    with self._session_pool_lock:
        if self._session_pool_size >= self._session_pool_maxsize:
            log.debug('Session pool size was increased in another thread')
            return
        log.debug('Server %s: Increasing session pool size from %s to %s', self.server, self._session_pool_size,
                  self._session_pool_size + 1)
        self._session_pool.put(self.create_session(), block=False)
        self._session_pool_size += 1
def refresh_credentials(self, session)
Expand source code
def refresh_credentials(self, session):
    # Credentials need to be refreshed, probably due to an OAuth
    # access token expiring. If we've gotten here, it's because the
    # application didn't provide an OAuth client secret, so we can't
    # handle token refreshing for it.
    with self.credentials.lock:
        if self.credentials.sig() == session.credentials_sig:
            # Credentials have not been refreshed by another thread:
            # they're the same as the session was created with. If
            # this isn't the case, we can just go ahead with a new
            # session using the already-updated credentials.
            self.credentials.refresh(session=session)
    return self.renew_session(session)
def release_session(self, session)
Expand source code
def release_session(self, session):
    # This should never fail, as we don't have more sessions than the queue contains
    log.debug('Server %s: Releasing session %s', self.server, session.session_id)
    if self.MAX_SESSION_USAGE_COUNT and session.usage_count > self.MAX_SESSION_USAGE_COUNT:
        log.debug('Server %s: session %s usage exceeded limit. Discarding', self.server, session.session_id)
        session = self.renew_session(session)
    try:
        self._session_pool.put(session, block=False)
    except Full:
        log.debug('Server %s: Session pool was already full %s', self.server, session.session_id)
def renew_session(self, session)
Expand source code
def renew_session(self, session):
    # The session is useless. Close it completely and place a fresh session in the pool
    log.debug('Server %s: Renewing session %s', self.server, session.session_id)
    self.close_session(session)
    return self.create_session()
def retire_session(self, session)
Expand source code
def retire_session(self, session):
    # The session is useless. Close it completely and place a fresh session in the pool
    log.debug('Server %s: Retiring session %s', self.server, session.session_id)
    self.close_session(session)
    self.release_session(self.create_session())
class Body (...)

Helper to mark the 'body' field as a complex attribute.

MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/body

Expand source code
class Body(str):
    """Helper to mark the 'body' field as a complex attribute.

    MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/body
    """

    body_type = 'Text'

    def __add__(self, other):
        # Make sure Body('') + 'foo' returns a Body type
        return self.__class__(super().__add__(other))

    def __mod__(self, other):
        # Make sure Body('%s') % 'foo' returns a Body type
        return self.__class__(super().__mod__(other))

    def format(self, *args, **kwargs):
        # Make sure Body('{}').format('foo') returns a Body type
        return self.__class__(super().format(*args, **kwargs))

Ancestors

  • builtins.str

Subclasses

Class variables

var body_type

Methods

def format(self, *args, **kwargs)

S.format(args, *kwargs) -> str

Return a formatted version of S, using substitutions from args and kwargs. The substitutions are identified by braces ('{' and '}').

Expand source code
def format(self, *args, **kwargs):
    # Make sure Body('{}').format('foo') returns a Body type
    return self.__class__(super().format(*args, **kwargs))
class Build (major_version, minor_version, major_build=0, minor_build=0)

Holds methods for working with build numbers.

Expand source code
class Build:
    """Holds methods for working with build numbers."""

    # List of build numbers here: https://docs.microsoft.com/en-us/exchange/new-features/build-numbers-and-release-dates
    API_VERSION_MAP = {
        8: {
            0: 'Exchange2007',
            1: 'Exchange2007_SP1',
            2: 'Exchange2007_SP1',
            3: 'Exchange2007_SP1',
        },
        14: {
            0: 'Exchange2010',
            1: 'Exchange2010_SP1',
            2: 'Exchange2010_SP2',
            3: 'Exchange2010_SP2',
        },
        15: {
            0: 'Exchange2013',  # Minor builds starting from 847 are Exchange2013_SP1, see api_version()
            1: 'Exchange2016',
            2: 'Exchange2019',
            20: 'Exchange2016',  # This is Office365. See issue #221
        },
    }

    __slots__ = 'major_version', 'minor_version', 'major_build', 'minor_build'

    def __init__(self, major_version, minor_version, major_build=0, minor_build=0):
        if not isinstance(major_version, int):
            raise ValueError("'major_version' must be an integer")
        if not isinstance(minor_version, int):
            raise ValueError("'minor_version' must be an integer")
        if not isinstance(major_build, int):
            raise ValueError("'major_build' must be an integer")
        if not isinstance(minor_build, int):
            raise ValueError("'minor_build' must be an integer")
        self.major_version = major_version
        self.minor_version = minor_version
        self.major_build = major_build
        self.minor_build = minor_build
        if major_version < 8:
            raise ValueError("Exchange major versions below 8 don't support EWS (%s)" % self)

    @classmethod
    def from_xml(cls, elem):
        xml_elems_map = {
            'major_version': 'MajorVersion',
            'minor_version': 'MinorVersion',
            'major_build': 'MajorBuildNumber',
            'minor_build': 'MinorBuildNumber',
        }
        kwargs = {}
        for k, xml_elem in xml_elems_map.items():
            v = elem.get(xml_elem)
            if v is None:
                raise ValueError()
            kwargs[k] = int(v)  # Also raises ValueError
        return cls(**kwargs)

    @classmethod
    def from_hex_string(cls, s):
        """Parse a server version string as returned in an autodiscover response. The process is described here:
        https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/serverversion-pox#example

        The string is a hex string that, converted to a 32-bit binary, encodes the server version. The rules are:
            * The first 4 bits contain the version number structure version. Can be ignored
            * The next 6 bits contain the major version number
            * The next 6 bits contain the minor version number
            * The next bit contains a flag. Can be ignored
            * The next 15 bits contain the major build number

        :param s:
        """
        bin_s = '{:032b}'.format(int(s, 16))  # Convert string to 32-bit binary string
        major_version = int(bin_s[4:10], 2)
        minor_version = int(bin_s[10:16], 2)
        build_number = int(bin_s[17:32], 2)
        return cls(major_version=major_version, minor_version=minor_version, major_build=build_number)

    def api_version(self):
        if EXCHANGE_2013_SP1 <= self < EXCHANGE_2016:
            return 'Exchange2013_SP1'
        try:
            return self.API_VERSION_MAP[self.major_version][self.minor_version]
        except KeyError:
            raise ValueError('API version for build %s is unknown' % self)

    def fullname(self):
        return VERSIONS[self.api_version()][1]

    def __cmp__(self, other):
        # __cmp__ is not a magic method in Python3. We'll just use it here to implement comparison operators
        c = (self.major_version > other.major_version) - (self.major_version < other.major_version)
        if c != 0:
            return c
        c = (self.minor_version > other.minor_version) - (self.minor_version < other.minor_version)
        if c != 0:
            return c
        c = (self.major_build > other.major_build) - (self.major_build < other.major_build)
        if c != 0:
            return c
        return (self.minor_build > other.minor_build) - (self.minor_build < other.minor_build)

    def __eq__(self, other):
        return self.__cmp__(other) == 0

    def __hash__(self):
        return hash(repr(self))

    def __ne__(self, other):
        return self.__cmp__(other) != 0

    def __lt__(self, other):
        return self.__cmp__(other) < 0

    def __le__(self, other):
        return self.__cmp__(other) <= 0

    def __gt__(self, other):
        return self.__cmp__(other) > 0

    def __ge__(self, other):
        return self.__cmp__(other) >= 0

    def __str__(self):
        return '%s.%s.%s.%s' % (self.major_version, self.minor_version, self.major_build, self.minor_build)

    def __repr__(self):
        return self.__class__.__name__ \
               + repr((self.major_version, self.minor_version, self.major_build, self.minor_build))

Class variables

var API_VERSION_MAP

Static methods

def from_hex_string(s)

Parse a server version string as returned in an autodiscover response. The process is described here: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/serverversion-pox#example

The string is a hex string that, converted to a 32-bit binary, encodes the server version. The rules are: * The first 4 bits contain the version number structure version. Can be ignored * The next 6 bits contain the major version number * The next 6 bits contain the minor version number * The next bit contains a flag. Can be ignored * The next 15 bits contain the major build number

:param s:

Expand source code
@classmethod
def from_hex_string(cls, s):
    """Parse a server version string as returned in an autodiscover response. The process is described here:
    https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/serverversion-pox#example

    The string is a hex string that, converted to a 32-bit binary, encodes the server version. The rules are:
        * The first 4 bits contain the version number structure version. Can be ignored
        * The next 6 bits contain the major version number
        * The next 6 bits contain the minor version number
        * The next bit contains a flag. Can be ignored
        * The next 15 bits contain the major build number

    :param s:
    """
    bin_s = '{:032b}'.format(int(s, 16))  # Convert string to 32-bit binary string
    major_version = int(bin_s[4:10], 2)
    minor_version = int(bin_s[10:16], 2)
    build_number = int(bin_s[17:32], 2)
    return cls(major_version=major_version, minor_version=minor_version, major_build=build_number)
def from_xml(elem)
Expand source code
@classmethod
def from_xml(cls, elem):
    xml_elems_map = {
        'major_version': 'MajorVersion',
        'minor_version': 'MinorVersion',
        'major_build': 'MajorBuildNumber',
        'minor_build': 'MinorBuildNumber',
    }
    kwargs = {}
    for k, xml_elem in xml_elems_map.items():
        v = elem.get(xml_elem)
        if v is None:
            raise ValueError()
        kwargs[k] = int(v)  # Also raises ValueError
    return cls(**kwargs)

Instance variables

var major_build

Return an attribute of instance, which is of type owner.

var major_version

Return an attribute of instance, which is of type owner.

var minor_build

Return an attribute of instance, which is of type owner.

var minor_version

Return an attribute of instance, which is of type owner.

Methods

def api_version(self)
Expand source code
def api_version(self):
    if EXCHANGE_2013_SP1 <= self < EXCHANGE_2016:
        return 'Exchange2013_SP1'
    try:
        return self.API_VERSION_MAP[self.major_version][self.minor_version]
    except KeyError:
        raise ValueError('API version for build %s is unknown' % self)
def fullname(self)
Expand source code
def fullname(self):
    return VERSIONS[self.api_version()][1]
class CalendarItem (**kwargs)

MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/calendaritem

Pick out optional 'account' and 'folder' kwargs, and pass the rest to the parent class.

:param kwargs: 'account' is optional but allows calling 'send()' and 'delete()' 'folder' is optional but allows calling 'save()'. If 'folder' has an account, and 'account' is not set, we use folder.account.

Expand source code
class CalendarItem(Item, AcceptDeclineMixIn):
    """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/calendaritem"""

    ELEMENT_NAME = 'CalendarItem'

    uid = TextField(field_uri='calendar:UID', is_required_after_save=True, is_searchable=False)
    start = DateOrDateTimeField(field_uri='calendar:Start', is_required=True)
    end = DateOrDateTimeField(field_uri='calendar:End', is_required=True)
    original_start = DateTimeField(field_uri='calendar:OriginalStart', is_read_only=True)
    is_all_day = BooleanField(field_uri='calendar:IsAllDayEvent', is_required=True, default=False)
    legacy_free_busy_status = FreeBusyStatusField(field_uri='calendar:LegacyFreeBusyStatus', is_required=True,
                                                  default='Busy')
    location = TextField(field_uri='calendar:Location')
    when = TextField(field_uri='calendar:When')
    is_meeting = BooleanField(field_uri='calendar:IsMeeting', is_read_only=True)
    is_cancelled = BooleanField(field_uri='calendar:IsCancelled', is_read_only=True)
    is_recurring = BooleanField(field_uri='calendar:IsRecurring', is_read_only=True)
    meeting_request_was_sent = BooleanField(field_uri='calendar:MeetingRequestWasSent', is_read_only=True)
    is_response_requested = BooleanField(field_uri='calendar:IsResponseRequested', default=None,
                                         is_required_after_save=True, is_searchable=False)
    type = ChoiceField(field_uri='calendar:CalendarItemType', choices={Choice(c) for c in CALENDAR_ITEM_CHOICES},
                       is_read_only=True)
    my_response_type = ChoiceField(field_uri='calendar:MyResponseType', choices={
            Choice(c) for c in Attendee.RESPONSE_TYPES
    }, is_read_only=True)
    organizer = MailboxField(field_uri='calendar:Organizer', is_read_only=True)
    required_attendees = AttendeesField(field_uri='calendar:RequiredAttendees', is_searchable=False)
    optional_attendees = AttendeesField(field_uri='calendar:OptionalAttendees', is_searchable=False)
    resources = AttendeesField(field_uri='calendar:Resources', is_searchable=False)
    conflicting_meeting_count = IntegerField(field_uri='calendar:ConflictingMeetingCount', is_read_only=True)
    adjacent_meeting_count = IntegerField(field_uri='calendar:AdjacentMeetingCount', is_read_only=True)
    conflicting_meetings = EWSElementListField(field_uri='calendar:ConflictingMeetings', value_cls='CalendarItem',
                                               namespace=Item.NAMESPACE, is_read_only=True)
    adjacent_meetings = EWSElementListField(field_uri='calendar:AdjacentMeetings', value_cls='CalendarItem',
                                            namespace=Item.NAMESPACE, is_read_only=True)
    duration = CharField(field_uri='calendar:Duration', is_read_only=True)
    appointment_reply_time = DateTimeField(field_uri='calendar:AppointmentReplyTime', is_read_only=True)
    appointment_sequence_number = IntegerField(field_uri='calendar:AppointmentSequenceNumber', is_read_only=True)
    appointment_state = AppointmentStateField(field_uri='calendar:AppointmentState', is_read_only=True)
    recurrence = RecurrenceField(field_uri='calendar:Recurrence', is_searchable=False)
    first_occurrence = OccurrenceField(field_uri='calendar:FirstOccurrence', value_cls=FirstOccurrence,
                                       is_read_only=True)
    last_occurrence = OccurrenceField(field_uri='calendar:LastOccurrence', value_cls=LastOccurrence,
                                      is_read_only=True)
    modified_occurrences = OccurrenceListField(field_uri='calendar:ModifiedOccurrences', value_cls=Occurrence,
                                               is_read_only=True)
    deleted_occurrences = OccurrenceListField(field_uri='calendar:DeletedOccurrences', value_cls=DeletedOccurrence,
                                              is_read_only=True)
    _meeting_timezone = TimeZoneField(field_uri='calendar:MeetingTimeZone', deprecated_from=EXCHANGE_2010,
                                      is_searchable=False)
    _start_timezone = TimeZoneField(field_uri='calendar:StartTimeZone', supported_from=EXCHANGE_2010,
                                    is_searchable=False)
    _end_timezone = TimeZoneField(field_uri='calendar:EndTimeZone', supported_from=EXCHANGE_2010,
                                  is_searchable=False)
    conference_type = EnumAsIntField(field_uri='calendar:ConferenceType', enum=CONFERENCE_TYPES, min=0,
                                     default=None, is_required_after_save=True)
    allow_new_time_proposal = BooleanField(field_uri='calendar:AllowNewTimeProposal', default=None,
                                           is_required_after_save=True, is_searchable=False)
    is_online_meeting = BooleanField(field_uri='calendar:IsOnlineMeeting', default=None,
                                     is_read_only=True)
    meeting_workspace_url = URIField(field_uri='calendar:MeetingWorkspaceUrl')
    net_show_url = URIField(field_uri='calendar:NetShowUrl')

    def occurrence(self, index):
        """Get an occurrence of a recurring master by index. No query is sent to the server to actually fetch the item.
        Call refresh() on the item do do so.

        Only call this method on a recurring master.

        :param index: The index, which is 1-based

        :return The occurrence
        """
        return self.__class__(
            account=self.account,
            folder=self.folder,
            _id=OccurrenceItemId(id=self.id, changekey=self.changekey, instance_index=index),
        )

    def recurring_master(self):
        """Get the recurring master of an occurrence. No query is sent to the server to actually fetch the item.
        Call refresh() on the item do do so.

        Only call this method on an occurrence of a recurring master.

        :return: The master occurrence
        """
        return self.__class__(
            account=self.account,
            folder=self.folder,
            _id=RecurringMasterItemId(id=self.id, changekey=self.changekey),
        )

    @classmethod
    def timezone_fields(cls):
        return [f for f in cls.FIELDS if isinstance(f, TimeZoneField)]

    def clean_timezone_fields(self, version):
        # Sets proper values on the timezone fields if they are not already set
        if self.start is None:
            start_tz = None
        elif type(self.start) in (EWSDate, datetime.date):
            start_tz = self.account.default_timezone
        else:
            start_tz = self.start.tzinfo
        if self.end is None:
            end_tz = None
        elif type(self.end) in (EWSDate, datetime.date):
            end_tz = self.account.default_timezone
        else:
            end_tz = self.end.tzinfo
        if version.build < EXCHANGE_2010:
            if self._meeting_timezone is None:
                self._meeting_timezone = start_tz
            self._start_timezone = None
            self._end_timezone = None
        else:
            self._meeting_timezone = None
            if self._start_timezone is None:
                self._start_timezone = start_tz
            if self._end_timezone is None:
                self._end_timezone = end_tz

    def clean(self, version=None):
        super().clean(version=version)
        if self.start and self.end and self.end < self.start:
            raise ValueError("'end' must be greater than 'start' (%s -> %s)" % (self.start, self.end))
        if version:
            self.clean_timezone_fields(version=version)

    def cancel(self, **kwargs):
        return CancelCalendarItem(
            account=self.account,
            reference_item_id=ReferenceItemId(id=self.id, changekey=self.changekey),
            **kwargs
        ).send()

    def _update_fieldnames(self):
        update_fields = super()._update_fieldnames()
        if self.type == OCCURRENCE:
            # Some CalendarItem fields cannot be updated when the item is an occurrence. The values are empty when we
            # receive them so would have been updated because they are set to None.
            update_fields.remove('recurrence')
            update_fields.remove('uid')
        return update_fields

    @classmethod
    def from_xml(cls, elem, account):
        item = super().from_xml(elem=elem, account=account)
        # EWS returns the start and end values as a datetime regardless of the is_all_day status. Convert to date if
        # applicable.
        if not item.is_all_day:
            return item
        for field_name in ('start', 'end'):
            val = getattr(item, field_name)
            if val is None:
                continue
            # Return just the date part of the value. Subtract 1 day from the date if this is the end field. This is
            # the inverse of what we do in .to_xml(). Convert to the local timezone before getting the date.
            if field_name == 'end':
                val -= datetime.timedelta(days=1)
            tz = getattr(item, '_%s_timezone' % field_name)
            setattr(item, field_name, val.astimezone(tz).date())
        return item

    def tz_field_for_field_name(self, field_name):
        meeting_tz_field, start_tz_field, end_tz_field = CalendarItem.timezone_fields()
        if self.account.version.build < EXCHANGE_2010:
            return meeting_tz_field
        if field_name == 'start':
            return start_tz_field
        if field_name == 'end':
            return end_tz_field
        raise ValueError('Unsupported field_name')

    def date_to_datetime(self, field_name):
        # EWS always expects a datetime. If we have a date value, then convert it to datetime in the local
        # timezone. Additionally, if this the end field, add 1 day to the date. We could add 12 hours to both
        # start and end values and let EWS apply its logic, but that seems hacky.
        value = getattr(self, field_name)
        tz = getattr(self, self.tz_field_for_field_name(field_name).name)
        value = EWSDateTime.combine(value, datetime.time(0, 0)).replace(tzinfo=tz)
        if field_name == 'end':
            value += datetime.timedelta(days=1)
        return value

    def to_xml(self, version):
        # EWS has some special logic related to all-day start and end values. Non-midnight start values are pushed to
        # the previous midnight. Non-midnight end values are pushed to the following midnight. Midnight in this context
        # refers to midnight in the local timezone. See
        #
        # https://docs.microsoft.com/en-us/exchange/client-developer/exchange-web-services/how-to-create-all-day-events-by-using-ews-in-exchange
        #
        elem = super().to_xml(version=version)
        if not self.is_all_day:
            return elem
        for field_name in ('start', 'end'):
            value = getattr(self, field_name)
            if value is None:
                continue
            if type(value) in (EWSDate, datetime.date):
                # EWS always expects a datetime
                value = self.date_to_datetime(field_name=field_name)
                # We already generated an XML element for this field, but it contains a plain date at this point, which
                # is invalid. Replace the value.
                field = self.get_field_by_fieldname(field_name)
                set_xml_value(elem=elem.find(field.response_tag()), value=value, version=version)
        return elem

Ancestors

Class variables

var ELEMENT_NAME
var FIELDS

Static methods

def from_xml(elem, account)
Expand source code
@classmethod
def from_xml(cls, elem, account):
    item = super().from_xml(elem=elem, account=account)
    # EWS returns the start and end values as a datetime regardless of the is_all_day status. Convert to date if
    # applicable.
    if not item.is_all_day:
        return item
    for field_name in ('start', 'end'):
        val = getattr(item, field_name)
        if val is None:
            continue
        # Return just the date part of the value. Subtract 1 day from the date if this is the end field. This is
        # the inverse of what we do in .to_xml(). Convert to the local timezone before getting the date.
        if field_name == 'end':
            val -= datetime.timedelta(days=1)
        tz = getattr(item, '_%s_timezone' % field_name)
        setattr(item, field_name, val.astimezone(tz).date())
    return item
def timezone_fields()
Expand source code
@classmethod
def timezone_fields(cls):
    return [f for f in cls.FIELDS if isinstance(f, TimeZoneField)]

Instance variables

var adjacent_meeting_count
var adjacent_meetings
var allow_new_time_proposal
var appointment_reply_time
var appointment_sequence_number
var appointment_state
var conference_type
var conflicting_meeting_count
var conflicting_meetings
var deleted_occurrences
var duration
var end
var first_occurrence
var is_all_day
var is_cancelled
var is_meeting
var is_online_meeting
var is_recurring
var is_response_requested
var last_occurrence
var legacy_free_busy_status
var location
var meeting_request_was_sent
var meeting_workspace_url
var modified_occurrences
var my_response_type
var net_show_url
var optional_attendees
var organizer
var original_start
var recurrence
var required_attendees
var resources
var start
var type
var uid
var when

Methods

def cancel(self, **kwargs)
Expand source code
def cancel(self, **kwargs):
    return CancelCalendarItem(
        account=self.account,
        reference_item_id=ReferenceItemId(id=self.id, changekey=self.changekey),
        **kwargs
    ).send()
def clean(self, version=None)
Expand source code
def clean(self, version=None):
    super().clean(version=version)
    if self.start and self.end and self.end < self.start:
        raise ValueError("'end' must be greater than 'start' (%s -> %s)" % (self.start, self.end))
    if version:
        self.clean_timezone_fields(version=version)
def clean_timezone_fields(self, version)
Expand source code
def clean_timezone_fields(self, version):
    # Sets proper values on the timezone fields if they are not already set
    if self.start is None:
        start_tz = None
    elif type(self.start) in (EWSDate, datetime.date):
        start_tz = self.account.default_timezone
    else:
        start_tz = self.start.tzinfo
    if self.end is None:
        end_tz = None
    elif type(self.end) in (EWSDate, datetime.date):
        end_tz = self.account.default_timezone
    else:
        end_tz = self.end.tzinfo
    if version.build < EXCHANGE_2010:
        if self._meeting_timezone is None:
            self._meeting_timezone = start_tz
        self._start_timezone = None
        self._end_timezone = None
    else:
        self._meeting_timezone = None
        if self._start_timezone is None:
            self._start_timezone = start_tz
        if self._end_timezone is None:
            self._end_timezone = end_tz
def date_to_datetime(self, field_name)
Expand source code
def date_to_datetime(self, field_name):
    # EWS always expects a datetime. If we have a date value, then convert it to datetime in the local
    # timezone. Additionally, if this the end field, add 1 day to the date. We could add 12 hours to both
    # start and end values and let EWS apply its logic, but that seems hacky.
    value = getattr(self, field_name)
    tz = getattr(self, self.tz_field_for_field_name(field_name).name)
    value = EWSDateTime.combine(value, datetime.time(0, 0)).replace(tzinfo=tz)
    if field_name == 'end':
        value += datetime.timedelta(days=1)
    return value
def occurrence(self, index)

Get an occurrence of a recurring master by index. No query is sent to the server to actually fetch the item. Call refresh() on the item do do so.

Only call this method on a recurring master.

:param index: The index, which is 1-based

:return The occurrence

Expand source code
def occurrence(self, index):
    """Get an occurrence of a recurring master by index. No query is sent to the server to actually fetch the item.
    Call refresh() on the item do do so.

    Only call this method on a recurring master.

    :param index: The index, which is 1-based

    :return The occurrence
    """
    return self.__class__(
        account=self.account,
        folder=self.folder,
        _id=OccurrenceItemId(id=self.id, changekey=self.changekey, instance_index=index),
    )
def recurring_master(self)

Get the recurring master of an occurrence. No query is sent to the server to actually fetch the item. Call refresh() on the item do do so.

Only call this method on an occurrence of a recurring master.

:return: The master occurrence

Expand source code
def recurring_master(self):
    """Get the recurring master of an occurrence. No query is sent to the server to actually fetch the item.
    Call refresh() on the item do do so.

    Only call this method on an occurrence of a recurring master.

    :return: The master occurrence
    """
    return self.__class__(
        account=self.account,
        folder=self.folder,
        _id=RecurringMasterItemId(id=self.id, changekey=self.changekey),
    )
def to_xml(self, version)
Expand source code
def to_xml(self, version):
    # EWS has some special logic related to all-day start and end values. Non-midnight start values are pushed to
    # the previous midnight. Non-midnight end values are pushed to the following midnight. Midnight in this context
    # refers to midnight in the local timezone. See
    #
    # https://docs.microsoft.com/en-us/exchange/client-developer/exchange-web-services/how-to-create-all-day-events-by-using-ews-in-exchange
    #
    elem = super().to_xml(version=version)
    if not self.is_all_day:
        return elem
    for field_name in ('start', 'end'):
        value = getattr(self, field_name)
        if value is None:
            continue
        if type(value) in (EWSDate, datetime.date):
            # EWS always expects a datetime
            value = self.date_to_datetime(field_name=field_name)
            # We already generated an XML element for this field, but it contains a plain date at this point, which
            # is invalid. Replace the value.
            field = self.get_field_by_fieldname(field_name)
            set_xml_value(elem=elem.find(field.response_tag()), value=value, version=version)
    return elem
def tz_field_for_field_name(self, field_name)
Expand source code
def tz_field_for_field_name(self, field_name):
    meeting_tz_field, start_tz_field, end_tz_field = CalendarItem.timezone_fields()
    if self.account.version.build < EXCHANGE_2010:
        return meeting_tz_field
    if field_name == 'start':
        return start_tz_field
    if field_name == 'end':
        return end_tz_field
    raise ValueError('Unsupported field_name')

Inherited members

class CancelCalendarItem (**kwargs)
Expand source code
class CancelCalendarItem(BaseReplyItem):
    """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/cancelcalendaritem"""

    ELEMENT_NAME = 'CancelCalendarItem'
    author_idx = BaseReplyItem.FIELDS.index_by_name('author')
    FIELDS = BaseReplyItem.FIELDS[:author_idx] + BaseReplyItem.FIELDS[author_idx + 1:]

Ancestors

Class variables

var ELEMENT_NAME
var FIELDS
var author_idx

Inherited members

class Configuration (credentials=None, server=None, service_endpoint=None, auth_type=None, version=None, retry_policy=None, max_connections=None)

Contains information needed to create an authenticated connection to an EWS endpoint.

The 'credentials' argument contains the credentials needed to authenticate with the server. Multiple credentials implementations are available in 'exchangelib.credentials'.

config = Configuration(credentials=Credentials('john@example.com', 'MY_SECRET'), …)

The 'server' and 'service_endpoint' arguments are mutually exclusive. The former must contain only a domain name, the latter a full URL:

config = Configuration(server='example.com', ...)
config = Configuration(service_endpoint='https://mail.example.com/EWS/Exchange.asmx', ...)

If you know which authentication type the server uses, you add that as a hint in 'auth_type'. Likewise, you can add the server version as a hint. This allows to skip the auth type and version guessing routines:

config = Configuration(auth_type=NTLM, ...)
config = Configuration(version=Version(build=Build(15, 1, 2, 3)), ...)

You can use 'retry_policy' to define a custom retry policy for handling server connection failures:

config = Configuration(retry_policy=FaultTolerance(max_wait=3600), ...)

'max_connections' defines the max number of connections allowed for this server. This may be restricted by policies on the Exchange server.

Expand source code
class Configuration:
    """Contains information needed to create an authenticated connection to an EWS endpoint.

    The 'credentials' argument contains the credentials needed to authenticate with the server. Multiple credentials
    implementations are available in 'exchangelib.credentials'.

    config = Configuration(credentials=Credentials('john@example.com', 'MY_SECRET'), ...)

    The 'server' and 'service_endpoint' arguments are mutually exclusive. The former must contain only a domain name,
    the latter a full URL:

        config = Configuration(server='example.com', ...)
        config = Configuration(service_endpoint='https://mail.example.com/EWS/Exchange.asmx', ...)

    If you know which authentication type the server uses, you add that as a hint in 'auth_type'. Likewise, you can
    add the server version as a hint. This allows to skip the auth type and version guessing routines:

        config = Configuration(auth_type=NTLM, ...)
        config = Configuration(version=Version(build=Build(15, 1, 2, 3)), ...)

    You can use 'retry_policy' to define a custom retry policy for handling server connection failures:

        config = Configuration(retry_policy=FaultTolerance(max_wait=3600), ...)

    'max_connections' defines the max number of connections allowed for this server. This may be restricted by
    policies on the Exchange server.
    """

    def __init__(self, credentials=None, server=None, service_endpoint=None, auth_type=None, version=None,
                 retry_policy=None, max_connections=None):
        if not isinstance(credentials, (BaseCredentials, type(None))):
            raise ValueError("'credentials' %r must be a Credentials instance" % credentials)
        if isinstance(credentials, OAuth2Credentials) and auth_type is None:
            # This type of credentials *must* use the OAuth auth type
            auth_type = OAUTH2
        if server and service_endpoint:
            raise AttributeError("Only one of 'server' or 'service_endpoint' must be provided")
        if auth_type is not None and auth_type not in AUTH_TYPE_MAP:
            raise ValueError("'auth_type' %r must be one of %s"
                             % (auth_type, ', '.join("'%s'" % k for k in sorted(AUTH_TYPE_MAP))))
        if not retry_policy:
            retry_policy = FailFast()
        if not isinstance(version, (Version, type(None))):
            raise ValueError("'version' %r must be a Version instance" % version)
        if not isinstance(retry_policy, RetryPolicy):
            raise ValueError("'retry_policy' %r must be a RetryPolicy instance" % retry_policy)
        if not isinstance(max_connections, (int, type(None))):
            raise ValueError("'max_connections' must be an integer")
        self._credentials = credentials
        if server:
            self.service_endpoint = 'https://%s/EWS/Exchange.asmx' % server
        else:
            self.service_endpoint = service_endpoint
        self.auth_type = auth_type
        self.version = version
        self.retry_policy = retry_policy
        self.max_connections = max_connections

    @property
    def credentials(self):
        # Do not update credentials from this class. Instead, do it from Protocol
        return self._credentials

    @threaded_cached_property
    def server(self):
        if not self.service_endpoint:
            return None
        return split_url(self.service_endpoint)[1]

    def __repr__(self):
        return self.__class__.__name__ + '(%s)' % ', '.join('%s=%r' % (k, getattr(self, k)) for k in (
            'credentials', 'service_endpoint', 'auth_type', 'version', 'retry_policy'
        ))

Instance variables

var credentials
Expand source code
@property
def credentials(self):
    # Do not update credentials from this class. Instead, do it from Protocol
    return self._credentials
var server
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
class Contact (**kwargs)

MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/contact

Pick out optional 'account' and 'folder' kwargs, and pass the rest to the parent class.

:param kwargs: 'account' is optional but allows calling 'send()' and 'delete()' 'folder' is optional but allows calling 'save()'. If 'folder' has an account, and 'account' is not set, we use folder.account.

Expand source code
class Contact(Item):
    """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/contact"""

    ELEMENT_NAME = 'Contact'

    file_as = TextField(field_uri='contacts:FileAs')
    file_as_mapping = ChoiceField(field_uri='contacts:FileAsMapping', choices={
        Choice('None'), Choice('LastCommaFirst'), Choice('FirstSpaceLast'), Choice('Company'),
        Choice('LastCommaFirstCompany'), Choice('CompanyLastFirst'), Choice('LastFirst'),
        Choice('LastFirstCompany'), Choice('CompanyLastCommaFirst'), Choice('LastFirstSuffix'),
        Choice('LastSpaceFirstCompany'), Choice('CompanyLastSpaceFirst'), Choice('LastSpaceFirst'),
        Choice('DisplayName'), Choice('FirstName'), Choice('LastFirstMiddleSuffix'), Choice('LastName'),
        Choice('Empty'),
    })
    display_name = TextField(field_uri='contacts:DisplayName', is_required=True)
    given_name = CharField(field_uri='contacts:GivenName')
    initials = TextField(field_uri='contacts:Initials')
    middle_name = CharField(field_uri='contacts:MiddleName')
    nickname = TextField(field_uri='contacts:Nickname')
    complete_name = EWSElementField(field_uri='contacts:CompleteName', value_cls=CompleteName, is_read_only=True)
    company_name = TextField(field_uri='contacts:CompanyName')
    email_addresses = EmailAddressesField(field_uri='contacts:EmailAddress')
    physical_addresses = PhysicalAddressField(field_uri='contacts:PhysicalAddress')
    phone_numbers = PhoneNumberField(field_uri='contacts:PhoneNumber')
    assistant_name = TextField(field_uri='contacts:AssistantName')
    birthday = DateTimeBackedDateField(field_uri='contacts:Birthday', default_time=datetime.time(11, 59))
    business_homepage = URIField(field_uri='contacts:BusinessHomePage')
    children = TextListField(field_uri='contacts:Children')
    companies = TextListField(field_uri='contacts:Companies', is_searchable=False)
    contact_source = ChoiceField(field_uri='contacts:ContactSource', choices={
        Choice('Store'), Choice('ActiveDirectory')
    }, is_read_only=True)
    department = TextField(field_uri='contacts:Department')
    generation = TextField(field_uri='contacts:Generation')
    im_addresses = CharField(field_uri='contacts:ImAddresses', is_read_only=True)
    job_title = TextField(field_uri='contacts:JobTitle')
    manager = TextField(field_uri='contacts:Manager')
    mileage = TextField(field_uri='contacts:Mileage')
    office = TextField(field_uri='contacts:OfficeLocation')
    postal_address_index = ChoiceField(field_uri='contacts:PostalAddressIndex', choices={
        Choice('Business'), Choice('Home'), Choice('Other'), Choice('None')
    }, default='None', is_required_after_save=True)
    profession = TextField(field_uri='contacts:Profession')
    spouse_name = TextField(field_uri='contacts:SpouseName')
    surname = CharField(field_uri='contacts:Surname')
    wedding_anniversary = DateTimeBackedDateField(field_uri='contacts:WeddingAnniversary',
                                                  default_time=datetime.time(11, 59))
    has_picture = BooleanField(field_uri='contacts:HasPicture', supported_from=EXCHANGE_2010, is_read_only=True)
    phonetic_full_name = TextField(field_uri='contacts:PhoneticFullName', supported_from=EXCHANGE_2010_SP2,
                                   is_read_only=True)
    phonetic_first_name = TextField(field_uri='contacts:PhoneticFirstName', supported_from=EXCHANGE_2010_SP2,
                                    is_read_only=True)
    phonetic_last_name = TextField(field_uri='contacts:PhoneticLastName', supported_from=EXCHANGE_2010_SP2,
                                   is_read_only=True)
    email_alias = EmailAddressField(field_uri='contacts:Alias', is_read_only=True,
                                    supported_from=EXCHANGE_2010_SP2)
    # 'notes' is documented in MSDN but apparently unused. Writing to it raises ErrorInvalidPropertyRequest. OWA
    # put entries into the 'notes' form field into the 'body' field.
    notes = CharField(field_uri='contacts:Notes', supported_from=EXCHANGE_2010_SP2, is_read_only=True)
    # 'photo' is documented in MSDN but apparently unused. Writing to it raises ErrorInvalidPropertyRequest. OWA
    # adds photos as FileAttachments on the contact item (with 'is_contact_photo=True'), which automatically flips
    # the 'has_picture' field.
    photo = Base64Field(field_uri='contacts:Photo', supported_from=EXCHANGE_2010_SP2, is_read_only=True)
    user_smime_certificate = Base64Field(field_uri='contacts:UserSMIMECertificate', supported_from=EXCHANGE_2010_SP2,
                                         is_read_only=True)
    ms_exchange_certificate = Base64Field(field_uri='contacts:MSExchangeCertificate', supported_from=EXCHANGE_2010_SP2,
                                          is_read_only=True)
    directory_id = TextField(field_uri='contacts:DirectoryId', supported_from=EXCHANGE_2010_SP2, is_read_only=True)
    manager_mailbox = MailboxField(field_uri='contacts:ManagerMailbox', supported_from=EXCHANGE_2010_SP2,
                                   is_read_only=True)
    direct_reports = MailboxListField(field_uri='contacts:DirectReports', supported_from=EXCHANGE_2010_SP2,
                                      is_read_only=True)

Ancestors

Class variables

var ELEMENT_NAME
var FIELDS

Instance variables

var assistant_name
var birthday
var business_homepage
var children
var companies
var company_name
var complete_name
var contact_source
var department
var direct_reports
var directory_id
var display_name
var email_addresses
var email_alias
var file_as
var file_as_mapping
var generation
var given_name
var has_picture
var im_addresses
var initials
var job_title
var manager
var manager_mailbox
var middle_name
var mileage
var ms_exchange_certificate
var nickname
var notes
var office
var phone_numbers
var phonetic_first_name
var phonetic_full_name
var phonetic_last_name
var photo
var physical_addresses
var postal_address_index
var profession
var spouse_name
var surname
var user_smime_certificate
var wedding_anniversary

Inherited members

class Credentials (username, password)

Keeps login info the way Exchange likes it.

Usernames for authentication are of one of these forms: * PrimarySMTPAddress * WINDOMAIN\username * User Principal Name (UPN) password: Clear-text password

Expand source code
class Credentials(BaseCredentials):
    r"""Keeps login info the way Exchange likes it.

    Usernames for authentication are of one of these forms:
    * PrimarySMTPAddress
    * WINDOMAIN\username
    * User Principal Name (UPN)
      password: Clear-text password
    """

    EMAIL = 'email'
    DOMAIN = 'domain'
    UPN = 'upn'

    def __init__(self, username, password):
        super().__init__()
        if username.count('@') == 1:
            self.type = self.EMAIL
        elif username.count('\\') == 1:
            self.type = self.DOMAIN
        else:
            self.type = self.UPN
        self.username = username
        self.password = password

    def refresh(self, session):
        pass

    def __repr__(self):
        return self.__class__.__name__ + repr((self.username, '********'))

    def __str__(self):
        return self.username

Ancestors

Class variables

var DOMAIN
var EMAIL
var UPN

Inherited members

class DLMailbox (**kwargs)

Like Mailbox, but creates elements in the 'messages' namespace when sending requests.

Expand source code
class DLMailbox(Mailbox):
    """Like Mailbox, but creates elements in the 'messages' namespace when sending requests."""

    NAMESPACE = MNS

Ancestors

Class variables

var NAMESPACE

Inherited members

class DeclineItem (**kwargs)

MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/declineitem

Pick out optional 'account' and 'folder' kwargs, and pass the rest to the parent class.

:param kwargs: 'account' is optional but allows calling 'send()' and 'delete()' 'folder' is optional but allows calling 'save()'. If 'folder' has an account, and 'account' is not set, we use folder.account.

Expand source code
class DeclineItem(BaseMeetingReplyItem):
    """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/declineitem"""

    ELEMENT_NAME = 'DeclineItem'

Ancestors

Class variables

var ELEMENT_NAME

Inherited members

class DistributionList (**kwargs)

MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/distributionlist

Pick out optional 'account' and 'folder' kwargs, and pass the rest to the parent class.

:param kwargs: 'account' is optional but allows calling 'send()' and 'delete()' 'folder' is optional but allows calling 'save()'. If 'folder' has an account, and 'account' is not set, we use folder.account.

Expand source code
class DistributionList(Item):
    """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/distributionlist"""

    ELEMENT_NAME = 'DistributionList'

    display_name = CharField(field_uri='contacts:DisplayName', is_required=True)
    file_as = CharField(field_uri='contacts:FileAs', is_read_only=True)
    contact_source = ChoiceField(field_uri='contacts:ContactSource', choices={
        Choice('Store'), Choice('ActiveDirectory')
    }, is_read_only=True)
    members = MemberListField(field_uri='distributionlist:Members')

Ancestors

Class variables

var ELEMENT_NAME
var FIELDS

Instance variables

var contact_source
var display_name
var file_as
var members

Inherited members

class EWSDate (...)

Extends the normal date implementation to satisfy EWS.

Expand source code
class EWSDate(datetime.date):
    """Extends the normal date implementation to satisfy EWS."""

    __slots__ = '_year', '_month', '_day', '_hashcode'

    def ewsformat(self):
        """ISO 8601 format to satisfy xs:date as interpreted by EWS. Example: 2009-01-15."""
        return self.isoformat()

    def __add__(self, other):
        dt = super().__add__(other)
        if isinstance(dt, self.__class__):
            return dt
        return self.from_date(dt)  # We want to return EWSDate objects

    def __iadd__(self, other):
        return self + other

    def __sub__(self, other):
        dt = super().__sub__(other)
        if isinstance(dt, datetime.timedelta):
            return dt
        if isinstance(dt, self.__class__):
            return dt
        return self.from_date(dt)  # We want to return EWSDate objects

    def __isub__(self, other):
        return self - other

    @classmethod
    def fromordinal(cls, n):
        dt = super().fromordinal(n)
        if isinstance(dt, cls):
            return dt
        return cls.from_date(dt)  # We want to return EWSDate objects

    @classmethod
    def from_date(cls, d):
        if type(d) is not datetime.date:
            raise ValueError("%r must be a date instance" % d)
        return cls(d.year, d.month, d.day)

    @classmethod
    def from_string(cls, date_string):
        # Sometimes, we'll receive a date string with timezone information. Not very useful.
        if date_string.endswith('Z'):
            date_fmt = '%Y-%m-%dZ'
        elif ':' in date_string:
            if '+' in date_string:
                date_fmt = '%Y-%m-%d+%H:%M'
            else:
                date_fmt = '%Y-%m-%d-%H:%M'
        else:
            date_fmt = '%Y-%m-%d'
        d = datetime.datetime.strptime(date_string, date_fmt).date()
        if isinstance(d, cls):
            return d
        return cls.from_date(d)  # We want to return EWSDate objects

Ancestors

  • datetime.date

Static methods

def from_date(d)
Expand source code
@classmethod
def from_date(cls, d):
    if type(d) is not datetime.date:
        raise ValueError("%r must be a date instance" % d)
    return cls(d.year, d.month, d.day)
def from_string(date_string)
Expand source code
@classmethod
def from_string(cls, date_string):
    # Sometimes, we'll receive a date string with timezone information. Not very useful.
    if date_string.endswith('Z'):
        date_fmt = '%Y-%m-%dZ'
    elif ':' in date_string:
        if '+' in date_string:
            date_fmt = '%Y-%m-%d+%H:%M'
        else:
            date_fmt = '%Y-%m-%d-%H:%M'
    else:
        date_fmt = '%Y-%m-%d'
    d = datetime.datetime.strptime(date_string, date_fmt).date()
    if isinstance(d, cls):
        return d
    return cls.from_date(d)  # We want to return EWSDate objects
def fromordinal(n)

int -> date corresponding to a proleptic Gregorian ordinal.

Expand source code
@classmethod
def fromordinal(cls, n):
    dt = super().fromordinal(n)
    if isinstance(dt, cls):
        return dt
    return cls.from_date(dt)  # We want to return EWSDate objects

Methods

def ewsformat(self)

ISO 8601 format to satisfy xs:date as interpreted by EWS. Example: 2009-01-15.

Expand source code
def ewsformat(self):
    """ISO 8601 format to satisfy xs:date as interpreted by EWS. Example: 2009-01-15."""
    return self.isoformat()
class EWSDateTime (*args, **kwargs)

Extends the normal datetime implementation to satisfy EWS.

Expand source code
class EWSDateTime(datetime.datetime):
    """Extends the normal datetime implementation to satisfy EWS."""

    __slots__ = '_year', '_month', '_day', '_hour', '_minute', '_second', '_microsecond', '_tzinfo', '_hashcode'

    def __new__(cls, *args, **kwargs):
        # pylint: disable=arguments-differ

        if len(args) == 8:
            tzinfo = args[7]
        else:
            tzinfo = kwargs.get('tzinfo')
        if isinstance(tzinfo, zoneinfo.ZoneInfo):
            # Don't allow pytz or dateutil timezones here. They are not safe to use as direct input for datetime()
            tzinfo = EWSTimeZone.from_timezone(tzinfo)
        if not isinstance(tzinfo, (EWSTimeZone, type(None))):
            raise ValueError('tzinfo %r must be an EWSTimeZone instance' % tzinfo)
        if len(args) == 8:
            args = list(args)
            args[7] = tzinfo
            args = tuple(args)
        else:
            kwargs['tzinfo'] = tzinfo
        return super().__new__(cls, *args, **kwargs)

    def ewsformat(self):
        """ISO 8601 format to satisfy xs:datetime as interpreted by EWS. Examples:
        * 2009-01-15T13:45:56Z
        * 2009-01-15T13:45:56+01:00
        """
        if not self.tzinfo:
            raise ValueError('%r must be timezone-aware' % self)
        if self.tzinfo.key == 'UTC':
            return self.strftime('%Y-%m-%dT%H:%M:%SZ')
        return self.replace(microsecond=0).isoformat()

    @classmethod
    def from_datetime(cls, d):
        if type(d) is not datetime.datetime:
            raise ValueError("%r must be a datetime instance" % d)
        if d.tzinfo is None:
            tz = None
        elif isinstance(d.tzinfo, EWSTimeZone):
            tz = d.tzinfo
        else:
            tz = EWSTimeZone.from_timezone(d.tzinfo)
        return cls(d.year, d.month, d.day, d.hour, d.minute, d.second, d.microsecond, tzinfo=tz)

    def astimezone(self, tz=None):
        if tz is None:
            tz = EWSTimeZone.localzone()
        t = super().astimezone(tz=tz).replace(tzinfo=tz)
        if isinstance(t, self.__class__):
            return t
        return self.from_datetime(t)  # We want to return EWSDateTime objects

    def __add__(self, other):
        t = super().__add__(other)
        if isinstance(t, self.__class__):
            return t
        return self.from_datetime(t)  # We want to return EWSDateTime objects

    def __iadd__(self, other):
        return self + other

    def __sub__(self, other):
        t = super().__sub__(other)
        if isinstance(t, datetime.timedelta):
            return t
        if isinstance(t, self.__class__):
            return t
        return self.from_datetime(t)  # We want to return EWSDateTime objects

    def __isub__(self, other):
        return self - other

    @classmethod
    def from_string(cls, date_string):
        # Parses several common datetime formats and returns timezone-aware EWSDateTime objects
        if date_string.endswith('Z'):
            # UTC datetime
            return super().strptime(date_string, '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo=UTC)
        if len(date_string) == 19:
            # This is probably a naive datetime. Don't allow this, but signal caller with an appropriate error
            local_dt = super().strptime(date_string, '%Y-%m-%dT%H:%M:%S')
            raise NaiveDateTimeNotAllowed(local_dt)
        # This is probably a datetime value with timezone information. This comes in the form '+/-HH:MM'.
        aware_dt = datetime.datetime.fromisoformat(date_string).astimezone(UTC).replace(tzinfo=UTC)
        if isinstance(aware_dt, cls):
            return aware_dt
        return cls.from_datetime(aware_dt)

    @classmethod
    def fromtimestamp(cls, t, tz=None):
        dt = super().fromtimestamp(t, tz=tz)
        if isinstance(dt, cls):
            return dt
        return cls.from_datetime(dt)  # We want to return EWSDateTime objects

    @classmethod
    def utcfromtimestamp(cls, t):
        dt = super().utcfromtimestamp(t)
        if isinstance(dt, cls):
            return dt
        return cls.from_datetime(dt)  # We want to return EWSDateTime objects

    @classmethod
    def now(cls, tz=None):
        t = super().now(tz=tz)
        if isinstance(t, cls):
            return t
        return cls.from_datetime(t)  # We want to return EWSDateTime objects

    @classmethod
    def utcnow(cls):
        t = super().utcnow()
        if isinstance(t, cls):
            return t
        return cls.from_datetime(t)  # We want to return EWSDateTime objects

    def date(self):
        d = super().date()
        if isinstance(d, EWSDate):
            return d
        return EWSDate.from_date(d)  # We want to return EWSDate objects

Ancestors

  • datetime.datetime
  • datetime.date

Static methods

def from_datetime(d)
Expand source code
@classmethod
def from_datetime(cls, d):
    if type(d) is not datetime.datetime:
        raise ValueError("%r must be a datetime instance" % d)
    if d.tzinfo is None:
        tz = None
    elif isinstance(d.tzinfo, EWSTimeZone):
        tz = d.tzinfo
    else:
        tz = EWSTimeZone.from_timezone(d.tzinfo)
    return cls(d.year, d.month, d.day, d.hour, d.minute, d.second, d.microsecond, tzinfo=tz)
def from_string(date_string)
Expand source code
@classmethod
def from_string(cls, date_string):
    # Parses several common datetime formats and returns timezone-aware EWSDateTime objects
    if date_string.endswith('Z'):
        # UTC datetime
        return super().strptime(date_string, '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo=UTC)
    if len(date_string) == 19:
        # This is probably a naive datetime. Don't allow this, but signal caller with an appropriate error
        local_dt = super().strptime(date_string, '%Y-%m-%dT%H:%M:%S')
        raise NaiveDateTimeNotAllowed(local_dt)
    # This is probably a datetime value with timezone information. This comes in the form '+/-HH:MM'.
    aware_dt = datetime.datetime.fromisoformat(date_string).astimezone(UTC).replace(tzinfo=UTC)
    if isinstance(aware_dt, cls):
        return aware_dt
    return cls.from_datetime(aware_dt)
def fromtimestamp(t, tz=None)

timestamp[, tz] -> tz's local time from POSIX timestamp.

Expand source code
@classmethod
def fromtimestamp(cls, t, tz=None):
    dt = super().fromtimestamp(t, tz=tz)
    if isinstance(dt, cls):
        return dt
    return cls.from_datetime(dt)  # We want to return EWSDateTime objects
def now(tz=None)

Returns new datetime object representing current time local to tz.

tz Timezone object.

If no tz is specified, uses local timezone.

Expand source code
@classmethod
def now(cls, tz=None):
    t = super().now(tz=tz)
    if isinstance(t, cls):
        return t
    return cls.from_datetime(t)  # We want to return EWSDateTime objects
def utcfromtimestamp(t)

Construct a naive UTC datetime from a POSIX timestamp.

Expand source code
@classmethod
def utcfromtimestamp(cls, t):
    dt = super().utcfromtimestamp(t)
    if isinstance(dt, cls):
        return dt
    return cls.from_datetime(dt)  # We want to return EWSDateTime objects
def utcnow()

Return a new datetime representing UTC day and time.

Expand source code
@classmethod
def utcnow(cls):
    t = super().utcnow()
    if isinstance(t, cls):
        return t
    return cls.from_datetime(t)  # We want to return EWSDateTime objects

Methods

def astimezone(self, tz=None)

tz -> convert to local time in new timezone tz

Expand source code
def astimezone(self, tz=None):
    if tz is None:
        tz = EWSTimeZone.localzone()
    t = super().astimezone(tz=tz).replace(tzinfo=tz)
    if isinstance(t, self.__class__):
        return t
    return self.from_datetime(t)  # We want to return EWSDateTime objects
def date(self)

Return date object with same year, month and day.

Expand source code
def date(self):
    d = super().date()
    if isinstance(d, EWSDate):
        return d
    return EWSDate.from_date(d)  # We want to return EWSDate objects
def ewsformat(self)

ISO 8601 format to satisfy xs:datetime as interpreted by EWS. Examples: * 2009-01-15T13:45:56Z * 2009-01-15T13:45:56+01:00

Expand source code
def ewsformat(self):
    """ISO 8601 format to satisfy xs:datetime as interpreted by EWS. Examples:
    * 2009-01-15T13:45:56Z
    * 2009-01-15T13:45:56+01:00
    """
    if not self.tzinfo:
        raise ValueError('%r must be timezone-aware' % self)
    if self.tzinfo.key == 'UTC':
        return self.strftime('%Y-%m-%dT%H:%M:%SZ')
    return self.replace(microsecond=0).isoformat()
class EWSTimeZone (*args, **kwargs)

Represents a timezone as expected by the EWS TimezoneContext / TimezoneDefinition XML element, and returned by services.GetServerTimeZones.

Expand source code
class EWSTimeZone(zoneinfo.ZoneInfo):
    """Represents a timezone as expected by the EWS TimezoneContext / TimezoneDefinition XML element, and returned by
    services.GetServerTimeZones.
    """

    IANA_TO_MS_MAP = IANA_TO_MS_TIMEZONE_MAP
    MS_TO_IANA_MAP = MS_TIMEZONE_TO_IANA_MAP

    def __new__(cls, *args, **kwargs):
        try:
            instance = super().__new__(cls, *args, **kwargs)
        except zoneinfo.ZoneInfoNotFoundError as e:
            raise UnknownTimeZone(e.args[0])
        try:
            instance.ms_id = cls.IANA_TO_MS_MAP[instance.key][0]
        except KeyError:
            raise UnknownTimeZone('No Windows timezone name found for timezone "%s"' % instance.key)

        # We don't need the Windows long-format timezone name in long format. It's used in timezone XML elements, but
        # EWS happily accepts empty strings. For a full list of timezones supported by the target server, including
        # long-format names, see output of services.GetServerTimeZones(account.protocol).call()
        instance.ms_name = ''
        return instance

    def __eq__(self, other):
        # Microsoft timezones are less granular than IANA, so an EWSTimeZone created from 'Europe/Copenhagen' may return
        # from the server as 'Europe/Copenhagen'. We're catering for Microsoft here, so base equality on the Microsoft
        # timezone ID.
        if not isinstance(other, self.__class__):
            return NotImplemented
        return self.ms_id == other.ms_id

    @classmethod
    def from_ms_id(cls, ms_id):
        # Create a timezone instance from a Microsoft timezone ID. This is lossy because there is not a 1:1 translation
        # from MS timezone ID to IANA timezone.
        try:
            return cls(cls.MS_TO_IANA_MAP[ms_id])
        except KeyError:
            if '/' in ms_id:
                # EWS sometimes returns an ID that has a region/location format, e.g. 'Europe/Copenhagen'. Try the
                # string unaltered.
                return cls(ms_id)
            raise UnknownTimeZone("Windows timezone ID '%s' is unknown by CLDR" % ms_id)

    @classmethod
    def from_pytz(cls, tz):
        return cls(tz.zone)

    @classmethod
    def from_dateutil(cls, tz):
        # Objects returned by dateutil.tz.tzlocal() and dateutil.tz.gettz() are not supported. They
        # don't contain enough information to reliably match them with a CLDR timezone.
        if hasattr(tz, '_filename'):
            key = '/'.join(tz._filename.split('/')[-2:])
            return cls(key)
        return cls(tz.tzname(datetime.datetime.now()))

    @classmethod
    def from_zoneinfo(cls, tz):
        return cls(tz.key)

    @classmethod
    def from_timezone(cls, tz):
        # Support multiple tzinfo implementations. We could use isinstance(), but then we'd have to have pytz
        # and dateutil as dependencies for this package.
        tz_module = tz.__class__.__module__.split('.')[0]
        try:
            return {
                cls.__module__.split('.')[0]: lambda z: z,
                'backports': cls.from_zoneinfo,
                'dateutil': cls.from_dateutil,
                'pytz': cls.from_pytz,
                'zoneinfo': cls.from_zoneinfo,
            }[tz_module](tz)
        except KeyError:
            raise TypeError('Unsupported tzinfo type: %r' % tz)

    @classmethod
    def localzone(cls):
        try:
            tz = tzlocal.get_localzone()
        except zoneinfo.ZoneInfoNotFoundError:
            # Older versions of tzlocal will raise a pytz exception. Let's not depend on pytz just for that.
            raise UnknownTimeZone("Failed to guess local timezone")
        # Handle both old and new versions of tzlocal that may return pytz or zoneinfo objects, respectively
        return cls.from_timezone(tz)

    @classmethod
    def timezone(cls, location):
        warnings.warn('replace EWSTimeZone.timezone() with just EWSTimeZone()', DeprecationWarning, stacklevel=2)
        return cls(location)

    def normalize(self, dt, is_dst=False):
        warnings.warn('normalization is now handled gracefully', DeprecationWarning, stacklevel=2)
        return dt

    def localize(self, dt, is_dst=False):
        warnings.warn('replace tz.localize() with dt.replace(tzinfo=tz)', DeprecationWarning, stacklevel=2)
        if dt.tzinfo is not None:
            raise ValueError('%r must be timezone-unaware' % dt)
        dt = dt.replace(tzinfo=self)
        if is_dst is not None:
            # DST dates are assumed to always be after non-DST dates
            dt_before = dt.replace(fold=0)
            dt_after = dt.replace(fold=1)
            dst_before = dt_before.dst()
            dst_after = dt_after.dst()
            if dst_before > dst_after:
                dt = dt_before if is_dst else dt_after
            elif dst_before < dst_after:
                dt = dt_after if is_dst else dt_before
        return dt

    def fromutc(self, dt):
        t = super().fromutc(dt)
        if isinstance(t, EWSDateTime):
            return t
        return EWSDateTime.from_datetime(t)  # We want to return EWSDateTime objects

Ancestors

  • backports.zoneinfo.ZoneInfo
  • datetime.tzinfo

Class variables

var IANA_TO_MS_MAP
var MS_TO_IANA_MAP

Static methods

def from_dateutil(tz)
Expand source code
@classmethod
def from_dateutil(cls, tz):
    # Objects returned by dateutil.tz.tzlocal() and dateutil.tz.gettz() are not supported. They
    # don't contain enough information to reliably match them with a CLDR timezone.
    if hasattr(tz, '_filename'):
        key = '/'.join(tz._filename.split('/')[-2:])
        return cls(key)
    return cls(tz.tzname(datetime.datetime.now()))
def from_ms_id(ms_id)
Expand source code
@classmethod
def from_ms_id(cls, ms_id):
    # Create a timezone instance from a Microsoft timezone ID. This is lossy because there is not a 1:1 translation
    # from MS timezone ID to IANA timezone.
    try:
        return cls(cls.MS_TO_IANA_MAP[ms_id])
    except KeyError:
        if '/' in ms_id:
            # EWS sometimes returns an ID that has a region/location format, e.g. 'Europe/Copenhagen'. Try the
            # string unaltered.
            return cls(ms_id)
        raise UnknownTimeZone("Windows timezone ID '%s' is unknown by CLDR" % ms_id)
def from_pytz(tz)
Expand source code
@classmethod
def from_pytz(cls, tz):
    return cls(tz.zone)
def from_timezone(tz)
Expand source code
@classmethod
def from_timezone(cls, tz):
    # Support multiple tzinfo implementations. We could use isinstance(), but then we'd have to have pytz
    # and dateutil as dependencies for this package.
    tz_module = tz.__class__.__module__.split('.')[0]
    try:
        return {
            cls.__module__.split('.')[0]: lambda z: z,
            'backports': cls.from_zoneinfo,
            'dateutil': cls.from_dateutil,
            'pytz': cls.from_pytz,
            'zoneinfo': cls.from_zoneinfo,
        }[tz_module](tz)
    except KeyError:
        raise TypeError('Unsupported tzinfo type: %r' % tz)
def from_zoneinfo(tz)
Expand source code
@classmethod
def from_zoneinfo(cls, tz):
    return cls(tz.key)
def localzone()
Expand source code
@classmethod
def localzone(cls):
    try:
        tz = tzlocal.get_localzone()
    except zoneinfo.ZoneInfoNotFoundError:
        # Older versions of tzlocal will raise a pytz exception. Let's not depend on pytz just for that.
        raise UnknownTimeZone("Failed to guess local timezone")
    # Handle both old and new versions of tzlocal that may return pytz or zoneinfo objects, respectively
    return cls.from_timezone(tz)
def timezone(location)
Expand source code
@classmethod
def timezone(cls, location):
    warnings.warn('replace EWSTimeZone.timezone() with just EWSTimeZone()', DeprecationWarning, stacklevel=2)
    return cls(location)

Methods

def fromutc(self, dt)

Given a datetime with local time in UTC, retrieve an adjusted datetime in local time.

Expand source code
def fromutc(self, dt):
    t = super().fromutc(dt)
    if isinstance(t, EWSDateTime):
        return t
    return EWSDateTime.from_datetime(t)  # We want to return EWSDateTime objects
def localize(self, dt, is_dst=False)
Expand source code
def localize(self, dt, is_dst=False):
    warnings.warn('replace tz.localize() with dt.replace(tzinfo=tz)', DeprecationWarning, stacklevel=2)
    if dt.tzinfo is not None:
        raise ValueError('%r must be timezone-unaware' % dt)
    dt = dt.replace(tzinfo=self)
    if is_dst is not None:
        # DST dates are assumed to always be after non-DST dates
        dt_before = dt.replace(fold=0)
        dt_after = dt.replace(fold=1)
        dst_before = dt_before.dst()
        dst_after = dt_after.dst()
        if dst_before > dst_after:
            dt = dt_before if is_dst else dt_after
        elif dst_before < dst_after:
            dt = dt_after if is_dst else dt_before
    return dt
def normalize(self, dt, is_dst=False)
Expand source code
def normalize(self, dt, is_dst=False):
    warnings.warn('normalization is now handled gracefully', DeprecationWarning, stacklevel=2)
    return dt
class ExtendedProperty (*args, **kwargs)
Expand source code
class ExtendedProperty(EWSElement):
    """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/extendedproperty"""

    ELEMENT_NAME = 'ExtendedProperty'

    # Enum values: https://docs.microsoft.com/en-us/dotnet/api/exchangewebservices.distinguishedpropertysettype
    DISTINGUISHED_SETS = {
        'Address',
        'Appointment',
        'CalendarAssistant',
        'Common',
        'InternetHeaders',
        'Meeting',
        'PublicStrings',
        'Sharing',
        'Task',
        'UnifiedMessaging',
    }
    # Enum values: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/extendedfielduri
    PROPERTY_TYPES = {
        'ApplicationTime',
        'Binary',
        'BinaryArray',
        'Boolean',
        'CLSID',
        'CLSIDArray',
        'Currency',
        'CurrencyArray',
        'Double',
        'DoubleArray',
        # 'Error',
        'Float',
        'FloatArray',
        'Integer',
        'IntegerArray',
        'Long',
        'LongArray',
        # 'Null',
        # 'Object',
        # 'ObjectArray',
        'Short',
        'ShortArray',
        'SystemTime',
        'SystemTimeArray',
        'String',
        'StringArray',
    }  # The commented-out types cannot be used for setting or getting (see docs) and are thus not very useful here

    # Translation table between common distinguished_property_set_id and property_set_id values. See
    # https://docs.microsoft.com/en-us/office/client-developer/outlook/mapi/commonly-used-property-sets
    # ID values must be lowercase.
    DISTINGUISHED_SET_NAME_TO_ID_MAP = {
        'Address': '00062004-0000-0000-c000-000000000046',
        'AirSync': '71035549-0739-4dcb-9163-00f0580dbbdf',
        'Appointment': '00062002-0000-0000-c000-000000000046',
        'Common': '00062008-0000-0000-c000-000000000046',
        'InternetHeaders': '00020386-0000-0000-c000-000000000046',
        'Log': '0006200a-0000-0000-c000-000000000046',
        'Mapi': '00020328-0000-0000-c000-000000000046',
        'Meeting': '6ed8da90-450b-101b-98da-00aa003f1305',
        'Messaging': '41f28f13-83f4-4114-a584-eedb5a6b0bff',
        'Note': '0006200e-0000-0000-c000-000000000046',
        'PostRss': '00062041-0000-0000-c000-000000000046',
        'PublicStrings': '00020329-0000-0000-c000-000000000046',
        'Remote': '00062014-0000-0000-c000-000000000046',
        'Report': '00062013-0000-0000-c000-000000000046',
        'Sharing': '00062040-0000-0000-c000-000000000046',
        'Task': '00062003-0000-0000-c000-000000000046',
        'UnifiedMessaging': '4442858e-a9e3-4e80-b900-317a210cc15b',
    }
    DISTINGUISHED_SET_ID_TO_NAME_MAP = {v: k for k, v in DISTINGUISHED_SET_NAME_TO_ID_MAP.items()}

    distinguished_property_set_id = None
    property_set_id = None
    property_tag = None  # hex integer (e.g. 0x8000) or string ('0x8000')
    property_name = None
    property_id = None  # integer as hex-formatted int (e.g. 0x8000) or normal int (32768)
    property_type = ''

    __slots__ = 'value',

    def __init__(self, *args, **kwargs):
        if not kwargs:
            # Allow to set attributes without keyword
            kwargs = dict(zip(self._slots_keys, args))
        self.value = kwargs.pop('value')
        super().__init__(**kwargs)

    @classmethod
    def validate_cls(cls):
        # Validate values of class attributes and their inter-dependencies
        cls._validate_distinguished_property_set_id()
        cls._validate_property_set_id()
        cls._validate_property_tag()
        cls._validate_property_name()
        cls._validate_property_id()
        cls._validate_property_type()

    @classmethod
    def _validate_distinguished_property_set_id(cls):
        if cls.distinguished_property_set_id:
            if any([cls.property_set_id, cls.property_tag]):
                raise ValueError(
                    "When 'distinguished_property_set_id' is set, 'property_set_id' and 'property_tag' must be None"
                )
            if not any([cls.property_id, cls.property_name]):
                raise ValueError(
                    "When 'distinguished_property_set_id' is set, 'property_id' or 'property_name' must also be set"
                )
            if cls.distinguished_property_set_id not in cls.DISTINGUISHED_SETS:
                raise ValueError(
                    "'distinguished_property_set_id' %r must be one of %s"
                    % (cls.distinguished_property_set_id, sorted(cls.DISTINGUISHED_SETS))
                )

    @classmethod
    def _validate_property_set_id(cls):
        if cls.property_set_id:
            if any([cls.distinguished_property_set_id, cls.property_tag]):
                raise ValueError(
                    "When 'property_set_id' is set, 'distinguished_property_set_id' and 'property_tag' must be None"
                )
            if not any([cls.property_id, cls.property_name]):
                raise ValueError(
                    "When 'property_set_id' is set, 'property_id' or 'property_name' must also be set"
                )

    @classmethod
    def _validate_property_tag(cls):
        if cls.property_tag:
            if any([
                cls.distinguished_property_set_id, cls.property_set_id, cls.property_name, cls.property_id
            ]):
                raise ValueError("When 'property_tag' is set, only 'property_type' must be set")
            if 0x8000 <= cls.property_tag_as_int() <= 0xFFFE:
                raise ValueError(
                    "'property_tag' value '%s' is reserved for custom properties" % cls.property_tag_as_hex()
                )

    @classmethod
    def _validate_property_name(cls):
        if cls.property_name:
            if any([cls.property_id, cls.property_tag]):
                raise ValueError("When 'property_name' is set, 'property_id' and 'property_tag' must be None")
            if not any([cls.distinguished_property_set_id, cls.property_set_id]):
                raise ValueError(
                    "When 'property_name' is set, 'distinguished_property_set_id' or 'property_set_id' must also be set"
                )

    @classmethod
    def _validate_property_id(cls):
        if cls.property_id:
            if any([cls.property_name, cls.property_tag]):
                raise ValueError("When 'property_id' is set, 'property_name' and 'property_tag' must be None")
            if not any([cls.distinguished_property_set_id, cls.property_set_id]):
                raise ValueError(
                    "When 'property_id' is set, 'distinguished_property_set_id' or 'property_set_id' must also be set"
                )

    @classmethod
    def _validate_property_type(cls):
        if cls.property_type not in cls.PROPERTY_TYPES:
            raise ValueError(
                "'property_type' %r must be one of %s" % (cls.property_type, sorted(cls.PROPERTY_TYPES))
            )

    def clean(self, version=None):
        self.validate_cls()
        python_type = self.python_type()
        if self.is_array_type():
            if not is_iterable(self.value):
                raise ValueError("'%s' value %r must be a list" % (self.__class__.__name__, self.value))
            for v in self.value:
                if not isinstance(v, python_type):
                    raise TypeError(
                        "'%s' value element %r must be an instance of %s" % (self.__class__.__name__, v, python_type))
        else:
            if not isinstance(self.value, python_type):
                raise TypeError(
                    "'%s' value %r must be an instance of %s" % (self.__class__.__name__, self.value, python_type))

    @classmethod
    def _normalize_obj(cls, obj):
        # Sometimes, EWS will helpfully translate a 'distinguished_property_set_id' value to a 'property_set_id' value
        # and vice versa. Align these values on an ExtendedFieldURI instance.
        try:
            obj.property_set_id = cls.DISTINGUISHED_SET_NAME_TO_ID_MAP[obj.distinguished_property_set_id]
        except KeyError:
            try:
                obj.distinguished_property_set_id = cls.DISTINGUISHED_SET_ID_TO_NAME_MAP[obj.property_set_id]
            except KeyError:
                pass
        return obj

    @classmethod
    def is_property_instance(cls, elem):
        """Return whether an 'ExtendedProperty' element matches the definition for this class. Extended property fields
        do not have a name, so we must match on the cls.property_* attributes to match a field in the request with a
        field in the response.
        """
        # We can't use ExtendedFieldURI.from_xml(). It clears the XML element but we may not want to consume it here.
        kwargs = {
            f.name: f.from_xml(elem=elem.find(ExtendedFieldURI.response_tag()), account=None)
            for f in ExtendedFieldURI.FIELDS
        }
        xml_obj = ExtendedFieldURI(**kwargs)
        cls_obj = cls.as_object()
        return cls._normalize_obj(cls_obj) == cls._normalize_obj(xml_obj)

    @classmethod
    def from_xml(cls, elem, account):
        # Gets value of this specific ExtendedProperty from a list of 'ExtendedProperty' XML elements
        python_type = cls.python_type()
        if cls.is_array_type():
            values = elem.find('{%s}Values' % TNS)
            return [
                xml_text_to_value(value=val, value_type=python_type)
                for val in get_xml_attrs(values, '{%s}Value' % TNS)
            ]
        extended_field_value = xml_text_to_value(value=get_xml_attr(elem, '{%s}Value' % TNS), value_type=python_type)
        if python_type == str and not extended_field_value:
            # For string types, we want to return the empty string instead of None if the element was
            # actually found, but there was no XML value. For other types, it would be more problematic
            # to make that distinction, e.g. return False for bool, 0 for int, etc.
            return ''
        return extended_field_value

    def to_xml(self, version):
        if self.is_array_type():
            values = create_element('t:Values')
            for v in self.value:
                add_xml_child(values, 't:Value', v)
            return values
        return set_xml_value(create_element('t:Value'), self.value, version=version)

    @classmethod
    def is_array_type(cls):
        return cls.property_type.endswith('Array')

    @classmethod
    def property_tag_as_int(cls):
        if isinstance(cls.property_tag, str):
            return int(cls.property_tag, base=16)
        return cls.property_tag

    @classmethod
    def property_tag_as_hex(cls):
        return hex(cls.property_tag) if isinstance(cls.property_tag, int) else cls.property_tag

    @classmethod
    def python_type(cls):
        # Return the best equivalent for a Python type for the property type of this class
        base_type = cls.property_type[:-5] if cls.is_array_type() else cls.property_type
        return {
            'ApplicationTime': Decimal,
            'Binary': bytes,
            'Boolean': bool,
            'CLSID': str,
            'Currency': int,
            'Double': Decimal,
            'Float': Decimal,
            'Integer': int,
            'Long': int,
            'Short': int,
            'SystemTime': EWSDateTime,
            'String': str,
        }[base_type]

    @classmethod
    def as_object(cls):
        # Return an object we can use to match with the incoming object from XML
        return ExtendedFieldURI(
            distinguished_property_set_id=cls.distinguished_property_set_id,
            property_set_id=cls.property_set_id.lower() if cls.property_set_id else None,
            property_tag=cls.property_tag_as_hex(),
            property_name=cls.property_name,
            property_id=value_to_xml_text(cls.property_id) if cls.property_id else None,
            property_type=cls.property_type,
        )

Ancestors

Subclasses

Class variables

var DISTINGUISHED_SETS
var DISTINGUISHED_SET_ID_TO_NAME_MAP
var DISTINGUISHED_SET_NAME_TO_ID_MAP
var ELEMENT_NAME
var PROPERTY_TYPES
var distinguished_property_set_id
var property_id
var property_name
var property_set_id
var property_tag
var property_type

Static methods

def as_object()
Expand source code
@classmethod
def as_object(cls):
    # Return an object we can use to match with the incoming object from XML
    return ExtendedFieldURI(
        distinguished_property_set_id=cls.distinguished_property_set_id,
        property_set_id=cls.property_set_id.lower() if cls.property_set_id else None,
        property_tag=cls.property_tag_as_hex(),
        property_name=cls.property_name,
        property_id=value_to_xml_text(cls.property_id) if cls.property_id else None,
        property_type=cls.property_type,
    )
def from_xml(elem, account)
Expand source code
@classmethod
def from_xml(cls, elem, account):
    # Gets value of this specific ExtendedProperty from a list of 'ExtendedProperty' XML elements
    python_type = cls.python_type()
    if cls.is_array_type():
        values = elem.find('{%s}Values' % TNS)
        return [
            xml_text_to_value(value=val, value_type=python_type)
            for val in get_xml_attrs(values, '{%s}Value' % TNS)
        ]
    extended_field_value = xml_text_to_value(value=get_xml_attr(elem, '{%s}Value' % TNS), value_type=python_type)
    if python_type == str and not extended_field_value:
        # For string types, we want to return the empty string instead of None if the element was
        # actually found, but there was no XML value. For other types, it would be more problematic
        # to make that distinction, e.g. return False for bool, 0 for int, etc.
        return ''
    return extended_field_value
def is_array_type()
Expand source code
@classmethod
def is_array_type(cls):
    return cls.property_type.endswith('Array')
def is_property_instance(elem)

Return whether an 'ExtendedProperty' element matches the definition for this class. Extended property fields do not have a name, so we must match on the cls.property_* attributes to match a field in the request with a field in the response.

Expand source code
@classmethod
def is_property_instance(cls, elem):
    """Return whether an 'ExtendedProperty' element matches the definition for this class. Extended property fields
    do not have a name, so we must match on the cls.property_* attributes to match a field in the request with a
    field in the response.
    """
    # We can't use ExtendedFieldURI.from_xml(). It clears the XML element but we may not want to consume it here.
    kwargs = {
        f.name: f.from_xml(elem=elem.find(ExtendedFieldURI.response_tag()), account=None)
        for f in ExtendedFieldURI.FIELDS
    }
    xml_obj = ExtendedFieldURI(**kwargs)
    cls_obj = cls.as_object()
    return cls._normalize_obj(cls_obj) == cls._normalize_obj(xml_obj)
def property_tag_as_hex()
Expand source code
@classmethod
def property_tag_as_hex(cls):
    return hex(cls.property_tag) if isinstance(cls.property_tag, int) else cls.property_tag
def property_tag_as_int()
Expand source code
@classmethod
def property_tag_as_int(cls):
    if isinstance(cls.property_tag, str):
        return int(cls.property_tag, base=16)
    return cls.property_tag
def python_type()
Expand source code
@classmethod
def python_type(cls):
    # Return the best equivalent for a Python type for the property type of this class
    base_type = cls.property_type[:-5] if cls.is_array_type() else cls.property_type
    return {
        'ApplicationTime': Decimal,
        'Binary': bytes,
        'Boolean': bool,
        'CLSID': str,
        'Currency': int,
        'Double': Decimal,
        'Float': Decimal,
        'Integer': int,
        'Long': int,
        'Short': int,
        'SystemTime': EWSDateTime,
        'String': str,
    }[base_type]
def validate_cls()
Expand source code
@classmethod
def validate_cls(cls):
    # Validate values of class attributes and their inter-dependencies
    cls._validate_distinguished_property_set_id()
    cls._validate_property_set_id()
    cls._validate_property_tag()
    cls._validate_property_name()
    cls._validate_property_id()
    cls._validate_property_type()

Instance variables

var value

Return an attribute of instance, which is of type owner.

Methods

def clean(self, version=None)
Expand source code
def clean(self, version=None):
    self.validate_cls()
    python_type = self.python_type()
    if self.is_array_type():
        if not is_iterable(self.value):
            raise ValueError("'%s' value %r must be a list" % (self.__class__.__name__, self.value))
        for v in self.value:
            if not isinstance(v, python_type):
                raise TypeError(
                    "'%s' value element %r must be an instance of %s" % (self.__class__.__name__, v, python_type))
    else:
        if not isinstance(self.value, python_type):
            raise TypeError(
                "'%s' value %r must be an instance of %s" % (self.__class__.__name__, self.value, python_type))
def to_xml(self, version)
Expand source code
def to_xml(self, version):
    if self.is_array_type():
        values = create_element('t:Values')
        for v in self.value:
            add_xml_child(values, 't:Value', v)
        return values
    return set_xml_value(create_element('t:Value'), self.value, version=version)

Inherited members

class FailFast

Fail immediately on server errors.

Expand source code
class FailFast(RetryPolicy):
    """Fail immediately on server errors."""

    @property
    def fail_fast(self):
        return True

    @property
    def back_off_until(self):
        return None

    def back_off(self, seconds):
        raise ValueError('Cannot back off with fail-fast policy')

    def may_retry_on_error(self, response, wait):
        log.debug('No retry: no fail-fast policy')
        return False

Ancestors

Instance variables

var back_off_until
Expand source code
@property
def back_off_until(self):
    return None
var fail_fast
Expand source code
@property
def fail_fast(self):
    return True

Methods

def back_off(self, seconds)
Expand source code
def back_off(self, seconds):
    raise ValueError('Cannot back off with fail-fast policy')
def may_retry_on_error(self, response, wait)
Expand source code
def may_retry_on_error(self, response, wait):
    log.debug('No retry: no fail-fast policy')
    return False
class FaultTolerance (max_wait=3600)

Enables fault-tolerant error handling. Tells internal methods to do an exponential back off when requests start failing, and wait up to max_wait seconds before failing.

Expand source code
class FaultTolerance(RetryPolicy):
    """Enables fault-tolerant error handling. Tells internal methods to do an exponential back off when requests start
    failing, and wait up to max_wait seconds before failing.
    """

    # Back off 60 seconds if we didn't get an explicit suggested value
    DEFAULT_BACKOFF = 60

    def __init__(self, max_wait=3600):
        self.max_wait = max_wait
        self._back_off_until = None
        self._back_off_lock = Lock()

    def __getstate__(self):
        # Locks cannot be pickled
        state = self.__dict__.copy()
        del state['_back_off_lock']
        return state

    def __setstate__(self, state):
        # Restore the lock
        self.__dict__.update(state)
        self._back_off_lock = Lock()

    @property
    def fail_fast(self):
        return False

    @property
    def back_off_until(self):
        """Return the back off value as a datetime. Reset the current back off value if it has expired."""
        if self._back_off_until is None:
            return None
        with self._back_off_lock:
            if self._back_off_until is None:
                return None
            if self._back_off_until < datetime.datetime.now():
                self._back_off_until = None  # The back off value has expired. Reset
                return None
            return self._back_off_until

    @back_off_until.setter
    def back_off_until(self, value):
        with self._back_off_lock:
            self._back_off_until = value

    def back_off(self, seconds):
        if seconds is None:
            seconds = self.DEFAULT_BACKOFF
        value = datetime.datetime.now() + datetime.timedelta(seconds=seconds)
        with self._back_off_lock:
            self._back_off_until = value

    def may_retry_on_error(self, response, wait):
        if response.status_code not in (301, 302, 401, 500, 503):
            # Don't retry if we didn't get a status code that we can hope to recover from
            log.debug('No retry: wrong status code %s', response.status_code)
            return False
        if wait > self.max_wait:
            # We lost patience. Session is cleaned up in outer loop
            raise RateLimitError(
                'Max timeout reached', url=response.url, status_code=response.status_code, total_wait=wait)
        if response.status_code == 401:
            # EWS sometimes throws 401's when it wants us to throttle connections. OK to retry.
            return True
        if response.headers.get('connection') == 'close':
            # Connection closed. OK to retry.
            return True
        if response.status_code == 302 and response.headers.get('location', '').lower() \
                == '/ews/genericerrorpage.htm?aspxerrorpath=/ews/exchange.asmx':
            # The genericerrorpage.htm/internalerror.asp is ridiculous behaviour for random outages. OK to retry.
            #
            # Redirect to '/internalsite/internalerror.asp' or '/internalsite/initparams.aspx' is caused by e.g. TLS
            # certificate f*ckups on the Exchange server. We should not retry those.
            return True
        if response.status_code == 503:
            # Internal server error. OK to retry.
            return True
        if response.status_code == 500 and b"Server Error in '/EWS' Application" in response.content:
            # "Server Error in '/EWS' Application" has been seen in highly concurrent settings. OK to retry.
            log.debug('Retry allowed: conditions met')
            return True
        return False

Ancestors

Class variables

var DEFAULT_BACKOFF

Instance variables

var back_off_until

Return the back off value as a datetime. Reset the current back off value if it has expired.

Expand source code
@property
def back_off_until(self):
    """Return the back off value as a datetime. Reset the current back off value if it has expired."""
    if self._back_off_until is None:
        return None
    with self._back_off_lock:
        if self._back_off_until is None:
            return None
        if self._back_off_until < datetime.datetime.now():
            self._back_off_until = None  # The back off value has expired. Reset
            return None
        return self._back_off_until
var fail_fast
Expand source code
@property
def fail_fast(self):
    return False

Methods

def back_off(self, seconds)
Expand source code
def back_off(self, seconds):
    if seconds is None:
        seconds = self.DEFAULT_BACKOFF
    value = datetime.datetime.now() + datetime.timedelta(seconds=seconds)
    with self._back_off_lock:
        self._back_off_until = value
def may_retry_on_error(self, response, wait)
Expand source code
def may_retry_on_error(self, response, wait):
    if response.status_code not in (301, 302, 401, 500, 503):
        # Don't retry if we didn't get a status code that we can hope to recover from
        log.debug('No retry: wrong status code %s', response.status_code)
        return False
    if wait > self.max_wait:
        # We lost patience. Session is cleaned up in outer loop
        raise RateLimitError(
            'Max timeout reached', url=response.url, status_code=response.status_code, total_wait=wait)
    if response.status_code == 401:
        # EWS sometimes throws 401's when it wants us to throttle connections. OK to retry.
        return True
    if response.headers.get('connection') == 'close':
        # Connection closed. OK to retry.
        return True
    if response.status_code == 302 and response.headers.get('location', '').lower() \
            == '/ews/genericerrorpage.htm?aspxerrorpath=/ews/exchange.asmx':
        # The genericerrorpage.htm/internalerror.asp is ridiculous behaviour for random outages. OK to retry.
        #
        # Redirect to '/internalsite/internalerror.asp' or '/internalsite/initparams.aspx' is caused by e.g. TLS
        # certificate f*ckups on the Exchange server. We should not retry those.
        return True
    if response.status_code == 503:
        # Internal server error. OK to retry.
        return True
    if response.status_code == 500 and b"Server Error in '/EWS' Application" in response.content:
        # "Server Error in '/EWS' Application" has been seen in highly concurrent settings. OK to retry.
        log.debug('Retry allowed: conditions met')
        return True
    return False
class FileAttachment (**kwargs)
Expand source code
class FileAttachment(Attachment):
    """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/fileattachment"""

    ELEMENT_NAME = 'FileAttachment'

    is_contact_photo = BooleanField(field_uri='IsContactPhoto')
    _content = Base64Field(field_uri='Content')

    __slots__ = '_fp',

    def __init__(self, **kwargs):
        kwargs['_content'] = kwargs.pop('content', None)
        super().__init__(**kwargs)
        self._fp = None

    @property
    def fp(self):
        # Return a file-like object for the content. This avoids creating multiple in-memory copies of the content.
        if self._fp is None:
            self._init_fp()
        return self._fp

    def _init_fp(self):
        # Create a file-like object for the attachment content. We try hard to reduce memory consumption so we never
        # store the full attachment content in-memory.
        if not self.parent_item or not self.parent_item.account:
            raise ValueError('%s must have an account' % self.__class__.__name__)
        self._fp = FileAttachmentIO(attachment=self)

    @property
    def content(self):
        """Return the attachment content. Stores a local copy of the content in case you want to upload the attachment
        again later.
        """
        if self.attachment_id is None:
            return self._content
        if self._content is not None:
            return self._content
        # We have an ID to the data but still haven't called GetAttachment to get the actual data. Do that now.
        with self.fp as fp:
            self._content = fp.read()
        return self._content

    @content.setter
    def content(self, value):
        """Replace the attachment content."""
        if not isinstance(value, bytes):
            raise ValueError("'value' %r must be a bytes object" % value)
        self._content = value

    @classmethod
    def from_xml(cls, elem, account):
        kwargs = {f.name: f.from_xml(elem=elem, account=account) for f in cls.FIELDS}
        kwargs['content'] = kwargs.pop('_content')
        cls._clear(elem)
        return cls(**kwargs)

    def to_xml(self, version):
        self._content = self.content  # Make sure content is available, to avoid ErrorRequiredPropertyMissing
        return super().to_xml(version=version)

    def __getstate__(self):
        # The fp does not need to be pickled
        state = {k: getattr(self, k) for k in self._slots_keys}
        del state['_fp']
        return state

    def __setstate__(self, state):
        # Restore the fp
        for k in self._slots_keys:
            setattr(self, k, state.get(k))
        self._fp = None

Ancestors

Class variables

var ELEMENT_NAME
var FIELDS

Static methods

def from_xml(elem, account)
Expand source code
@classmethod
def from_xml(cls, elem, account):
    kwargs = {f.name: f.from_xml(elem=elem, account=account) for f in cls.FIELDS}
    kwargs['content'] = kwargs.pop('_content')
    cls._clear(elem)
    return cls(**kwargs)

Instance variables

var content

Return the attachment content. Stores a local copy of the content in case you want to upload the attachment again later.

Expand source code
@property
def content(self):
    """Return the attachment content. Stores a local copy of the content in case you want to upload the attachment
    again later.
    """
    if self.attachment_id is None:
        return self._content
    if self._content is not None:
        return self._content
    # We have an ID to the data but still haven't called GetAttachment to get the actual data. Do that now.
    with self.fp as fp:
        self._content = fp.read()
    return self._content
var fp
Expand source code
@property
def fp(self):
    # Return a file-like object for the content. This avoids creating multiple in-memory copies of the content.
    if self._fp is None:
        self._init_fp()
    return self._fp
var is_contact_photo

Methods

def to_xml(self, version)
Expand source code
def to_xml(self, version):
    self._content = self.content  # Make sure content is available, to avoid ErrorRequiredPropertyMissing
    return super().to_xml(version=version)

Inherited members

class Folder (**kwargs)
Expand source code
class Folder(BaseFolder):
    """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/folder"""

    permission_set = PermissionSetField(field_uri='folder:PermissionSet', supported_from=EXCHANGE_2007_SP1)
    effective_rights = EffectiveRightsField(field_uri='folder:EffectiveRights', is_read_only=True,
                                            supported_from=EXCHANGE_2007_SP1)

    __slots__ = '_root',

    def __init__(self, **kwargs):
        self._root = kwargs.pop('root', None)  # This is a pointer to the root of the folder hierarchy
        parent = kwargs.pop('parent', None)
        if parent:
            if self.root:
                if parent.root != self.root:
                    raise ValueError("'parent.root' must match 'root'")
            else:
                self.root = parent.root
            if 'parent_folder_id' in kwargs and parent.id != kwargs['parent_folder_id']:
                raise ValueError("'parent_folder_id' must match 'parent' ID")
            kwargs['parent_folder_id'] = ParentFolderId(id=parent.id, changekey=parent.changekey)
        super().__init__(**kwargs)

    @property
    def account(self):
        if self.root is None:
            return None
        return self.root.account

    @property
    def root(self):
        return self._root

    @root.setter
    def root(self, value):
        self._root = value

    @classmethod
    def register(cls, *args, **kwargs):
        if cls is not Folder:
            raise TypeError('For folders, custom fields must be registered on the Folder class')
        return super().register(*args, **kwargs)

    @classmethod
    def deregister(cls, *args, **kwargs):
        if cls is not Folder:
            raise TypeError('For folders, custom fields must be registered on the Folder class')
        return super().deregister(*args, **kwargs)

    @classmethod
    def get_distinguished(cls, root):
        """Get the distinguished folder for this folder class.

        :param root:
        :return:
        """
        try:
            return cls.resolve(
                account=root.account,
                folder=cls(root=root, name=cls.DISTINGUISHED_FOLDER_ID, is_distinguished=True)
            )
        except MISSING_FOLDER_ERRORS:
            raise ErrorFolderNotFound('Could not find distinguished folder %r' % cls.DISTINGUISHED_FOLDER_ID)

    @property
    def parent(self):
        if not self.parent_folder_id:
            return None
        if self.parent_folder_id.id == self.id:
            # Some folders have a parent that references itself. Avoid circular references here
            return None
        return self.root.get_folder(self.parent_folder_id)

    @parent.setter
    def parent(self, value):
        if value is None:
            self.parent_folder_id = None
        else:
            if not isinstance(value, BaseFolder):
                raise ValueError("'value' %r must be a Folder instance" % value)
            self.root = value.root
            self.parent_folder_id = ParentFolderId(id=value.id, changekey=value.changekey)

    def clean(self, version=None):
        from .roots import RootOfHierarchy
        super().clean(version=version)
        if self.root and not isinstance(self.root, RootOfHierarchy):
            raise ValueError("'root' %r must be a RootOfHierarchy instance" % self.root)

    @classmethod
    def from_xml_with_root(cls, elem, root):
        folder = cls.from_xml(elem=elem, account=root.account)
        folder_cls = cls
        if cls == Folder:
            # We were called on the generic Folder class. Try to find a more specific class to return objects as.
            #
            # The "FolderClass" element value is the only indication we have in the FindFolder response of which
            # folder class we should create the folder with. And many folders share the same 'FolderClass' value, e.g.
            # Inbox and DeletedItems. We want to distinguish between these because otherwise we can't locate the right
            # folders types for e.g. Account.inbox and Account.trash.
            #
            # We should be able to just use the name, but apparently default folder names can be renamed to a set of
            # localized names using a PowerShell command:
            # https://docs.microsoft.com/en-us/powershell/module/exchange/client-access/Set-MailboxRegionalConfiguration
            #
            # Instead, search for a folder class using the localized name. If none are found, fall back to getting the
            # folder class by the "FolderClass" value.
            #
            # The returned XML may contain neither folder class nor name. In that case, we default to the generic
            # Folder class.
            if folder.name:
                try:
                    # TODO: fld_class.LOCALIZED_NAMES is most definitely neither complete nor authoritative
                    folder_cls = root.folder_cls_from_folder_name(folder_name=folder.name,
                                                                  locale=root.account.locale)
                    log.debug('Folder class %s matches localized folder name %s', folder_cls, folder.name)
                except KeyError:
                    pass
            if folder.folder_class and folder_cls == Folder:
                try:
                    folder_cls = cls.folder_cls_from_container_class(container_class=folder.folder_class)
                    log.debug('Folder class %s matches container class %s (%s)', folder_cls, folder.folder_class,
                              folder.name)
                except KeyError:
                    pass
            if folder_cls == Folder:
                log.debug('Fallback to class Folder (folder_class %s, name %s)', folder.folder_class, folder.name)
        return folder_cls(root=root, **{f.name: getattr(folder, f.name) for f in folder.FIELDS})

Ancestors

Subclasses

Class variables

var FIELDS

Static methods

def from_xml_with_root(elem, root)
Expand source code
@classmethod
def from_xml_with_root(cls, elem, root):
    folder = cls.from_xml(elem=elem, account=root.account)
    folder_cls = cls
    if cls == Folder:
        # We were called on the generic Folder class. Try to find a more specific class to return objects as.
        #
        # The "FolderClass" element value is the only indication we have in the FindFolder response of which
        # folder class we should create the folder with. And many folders share the same 'FolderClass' value, e.g.
        # Inbox and DeletedItems. We want to distinguish between these because otherwise we can't locate the right
        # folders types for e.g. Account.inbox and Account.trash.
        #
        # We should be able to just use the name, but apparently default folder names can be renamed to a set of
        # localized names using a PowerShell command:
        # https://docs.microsoft.com/en-us/powershell/module/exchange/client-access/Set-MailboxRegionalConfiguration
        #
        # Instead, search for a folder class using the localized name. If none are found, fall back to getting the
        # folder class by the "FolderClass" value.
        #
        # The returned XML may contain neither folder class nor name. In that case, we default to the generic
        # Folder class.
        if folder.name:
            try:
                # TODO: fld_class.LOCALIZED_NAMES is most definitely neither complete nor authoritative
                folder_cls = root.folder_cls_from_folder_name(folder_name=folder.name,
                                                              locale=root.account.locale)
                log.debug('Folder class %s matches localized folder name %s', folder_cls, folder.name)
            except KeyError:
                pass
        if folder.folder_class and folder_cls == Folder:
            try:
                folder_cls = cls.folder_cls_from_container_class(container_class=folder.folder_class)
                log.debug('Folder class %s matches container class %s (%s)', folder_cls, folder.folder_class,
                          folder.name)
            except KeyError:
                pass
        if folder_cls == Folder:
            log.debug('Fallback to class Folder (folder_class %s, name %s)', folder.folder_class, folder.name)
    return folder_cls(root=root, **{f.name: getattr(folder, f.name) for f in folder.FIELDS})
def get_distinguished(root)

Get the distinguished folder for this folder class.

:param root: :return:

Expand source code
@classmethod
def get_distinguished(cls, root):
    """Get the distinguished folder for this folder class.

    :param root:
    :return:
    """
    try:
        return cls.resolve(
            account=root.account,
            folder=cls(root=root, name=cls.DISTINGUISHED_FOLDER_ID, is_distinguished=True)
        )
    except MISSING_FOLDER_ERRORS:
        raise ErrorFolderNotFound('Could not find distinguished folder %r' % cls.DISTINGUISHED_FOLDER_ID)

Instance variables

var account
Expand source code
@property
def account(self):
    if self.root is None:
        return None
    return self.root.account
var effective_rights
var parent
Expand source code
@property
def parent(self):
    if not self.parent_folder_id:
        return None
    if self.parent_folder_id.id == self.id:
        # Some folders have a parent that references itself. Avoid circular references here
        return None
    return self.root.get_folder(self.parent_folder_id)
var permission_set
var root
Expand source code
@property
def root(self):
    return self._root

Methods

def clean(self, version=None)
Expand source code
def clean(self, version=None):
    from .roots import RootOfHierarchy
    super().clean(version=version)
    if self.root and not isinstance(self.root, RootOfHierarchy):
        raise ValueError("'root' %r must be a RootOfHierarchy instance" % self.root)

Inherited members

class FolderCollection (account, folders)

A class that implements an API for searching folders.

Implement a search API on a collection of folders.

:param account: An Account object :param folders: An iterable of folders, e.g. Folder.walk(), Folder.glob(), or [a.calendar, a.inbox]

Expand source code
class FolderCollection(SearchableMixIn):
    """A class that implements an API for searching folders."""

    # These fields are required in a FindFolder or GetFolder call to properly identify folder types
    REQUIRED_FOLDER_FIELDS = ('name', 'folder_class')

    def __init__(self, account, folders):
        """Implement a search API on a collection of folders.

        :param account: An Account object
        :param folders: An iterable of folders, e.g. Folder.walk(), Folder.glob(), or [a.calendar, a.inbox]
        """
        self.account = account
        self._folders = folders

    @threaded_cached_property
    def folders(self):
        # Resolve the list of folders, in case it's a generator
        return list(self._folders)

    def __len__(self):
        return len(self.folders)

    def __iter__(self):
        yield from self.folders

    def get(self, *args, **kwargs):
        return QuerySet(self).get(*args, **kwargs)

    def all(self):
        return QuerySet(self).all()

    def none(self):
        return QuerySet(self).none()

    def filter(self, *args, **kwargs):
        """Find items in the folder(s).

        Non-keyword args may be a list of Q instances.

        Optional extra keyword arguments follow a Django-like QuerySet filter syntax (see
           https://docs.djangoproject.com/en/1.10/ref/models/querysets/#field-lookups).

        We don't support '__year' and other date-related lookups. We also don't support '__endswith' or '__iendswith'.

        We support the additional '__not' lookup in place of Django's exclude() for simple cases. For more complicated
        cases you need to create a Q object and use ~Q().

        Examples:

            my_account.inbox.filter(datetime_received__gt=EWSDateTime(2016, 1, 1))
            my_account.calendar.filter(start__range=(EWSDateTime(2016, 1, 1), EWSDateTime(2017, 1, 1)))
            my_account.tasks.filter(subject='Hi mom')
            my_account.tasks.filter(subject__not='Hi mom')
            my_account.tasks.filter(subject__contains='Foo')
            my_account.tasks.filter(subject__icontains='foo')

        'endswith' and 'iendswith' could be emulated by searching with 'contains' or 'icontains' and then
        post-processing items. Fetch the field in question with additional_fields and remove items where the search
        string is not a postfix.
        """
        return QuerySet(self).filter(*args, **kwargs)

    def exclude(self, *args, **kwargs):
        return QuerySet(self).exclude(*args, **kwargs)

    def people(self):
        return QuerySet(self).people()

    def view(self, start, end, max_items=None, *args, **kwargs):
        """Implement the CalendarView option to FindItem. The difference between 'filter' and 'view' is that 'filter'
        only returns the master CalendarItem for recurring items, while 'view' unfolds recurring items and returns all
        CalendarItem occurrences as one would normally expect when presenting a calendar.

        Supports the same semantics as filter, except for 'start' and 'end' keyword attributes which are both required
        and behave differently than filter. Here, they denote the start and end of the timespan of the view. All items
        the overlap the timespan are returned (items that end exactly on 'start' are also returned, for some reason).

        EWS does not allow combining CalendarView with search restrictions (filter and exclude).

        'max_items' defines the maximum number of items returned in this view. Optional.

        :param start:
        :param end:
        :param max_items:  (Default value = None)
        :return:
        """
        qs = QuerySet(self).filter(*args, **kwargs)
        qs.calendar_view = CalendarView(start=start, end=end, max_items=max_items)
        return qs

    def allowed_item_fields(self):
        # Return non-ID fields of all item classes allowed in this folder type
        fields = set()
        for item_model in self.supported_item_models:
            fields.update(set(item_model.supported_fields(version=self.account.version)))
        return fields

    @property
    def supported_item_models(self):
        return tuple(item_model for folder in self.folders for item_model in folder.supported_item_models)

    def validate_item_field(self, field, version):
        # For each field, check if the field is valid for any of the item models supported by this folder
        for item_model in self.supported_item_models:
            try:
                item_model.validate_field(field=field, version=version)
                break
            except InvalidField:
                continue
        else:
            raise InvalidField("%r is not a valid field on %s" % (field, self.supported_item_models))

    def find_items(self, q, shape=ID_ONLY, depth=None, additional_fields=None, order_fields=None,
                   calendar_view=None, page_size=None, max_items=None, offset=0):
        """Private method to call the FindItem service.

        :param q: a Q instance containing any restrictions
        :param shape: controls whether to return (id, chanegkey) tuples or Item objects. If additional_fields is
          non-null, we always return Item objects. (Default value = ID_ONLY)
        :param depth: controls the whether to return soft-deleted items or not. (Default value = None)
        :param additional_fields: the extra properties we want on the return objects. Default is no properties. Be aware
          that complex fields can only be fetched with fetch() (i.e. the GetItem service).
        :param order_fields: the SortOrder fields, if any (Default value = None)
        :param calendar_view: a CalendarView instance, if any (Default value = None)
        :param page_size: the requested number of items per page (Default value = None)
        :param max_items: the max number of items to return (Default value = None)
        :param offset: the offset relative to the first item in the item collection (Default value = 0)

        :return: a generator for the returned item IDs or items
        """
        if not self.folders:
            log.debug('Folder list is empty')
            return
        if q.is_never():
            log.debug('Query will never return results')
            return
        if shape not in SHAPE_CHOICES:
            raise ValueError("'shape' %s must be one of %s" % (shape, SHAPE_CHOICES))
        if depth is None:
            depth = self._get_default_item_traversal_depth()
        if depth not in ITEM_TRAVERSAL_CHOICES:
            raise ValueError("'depth' %s must be one of %s" % (depth, ITEM_TRAVERSAL_CHOICES))
        if additional_fields:
            for f in additional_fields:
                self.validate_item_field(field=f, version=self.account.version)
                if f.field.is_complex:
                    raise ValueError("find_items() does not support field '%s'. Use fetch() instead" % f.field.name)
        if calendar_view is not None and not isinstance(calendar_view, CalendarView):
            raise ValueError("'calendar_view' %s must be a CalendarView instance" % calendar_view)

        # Build up any restrictions
        if q.is_empty():
            restriction = None
            query_string = None
        elif q.query_string:
            restriction = None
            query_string = Restriction(q, folders=self.folders, applies_to=Restriction.ITEMS)
        else:
            restriction = Restriction(q, folders=self.folders, applies_to=Restriction.ITEMS)
            query_string = None
        log.debug(
            'Finding %s items in folders %s (shape: %s, depth: %s, additional_fields: %s, restriction: %s)',
            self.folders,
            self.account,
            shape,
            depth,
            additional_fields,
            restriction.q if restriction else None,
        )
        yield from FindItem(account=self.account, chunk_size=page_size).call(
            folders=self.folders,
            additional_fields=additional_fields,
            restriction=restriction,
            order_fields=order_fields,
            shape=shape,
            query_string=query_string,
            depth=depth,
            calendar_view=calendar_view,
            max_items=calendar_view.max_items if calendar_view else max_items,
            offset=offset,
        )

    def _get_single_folder(self):
        if len(self.folders) > 1:
            raise ValueError('Syncing folder hierarchy can only be done on a single folder')
        if not self.folders:
            log.debug('Folder list is empty')
            return None
        return self.folders[0]

    def find_people(self, q, shape=ID_ONLY, depth=None, additional_fields=None, order_fields=None,
                    page_size=None, max_items=None, offset=0):
        """Private method to call the FindPeople service.

        :param q: a Q instance containing any restrictions
        :param shape: controls whether to return (id, chanegkey) tuples or Persona objects. If additional_fields is
          non-null, we always return Persona objects. (Default value = ID_ONLY)
        :param depth: controls the whether to return soft-deleted items or not. (Default value = None)
        :param additional_fields: the extra properties we want on the return objects. Default is no properties.
        :param order_fields: the SortOrder fields, if any (Default value = None)
        :param page_size: the requested number of items per page (Default value = None)
        :param max_items: the max number of items to return (Default value = None)
        :param offset: the offset relative to the first item in the item collection (Default value = 0)

        :return: a generator for the returned personas
        """
        folder = self._get_single_folder()
        if not folder:
            return
        if q.is_never():
            log.debug('Query will never return results')
            return
        if shape not in SHAPE_CHOICES:
            raise ValueError("'shape' %s must be one of %s" % (shape, SHAPE_CHOICES))
        if depth is None:
            depth = self._get_default_item_traversal_depth()
        if depth not in ITEM_TRAVERSAL_CHOICES:
            raise ValueError("'depth' %s must be one of %s" % (depth, ITEM_TRAVERSAL_CHOICES))
        if additional_fields:
            for f in additional_fields:
                Persona.validate_field(field=f, version=self.account.version)
                if f.field.is_complex:
                    raise ValueError("find_people() does not support field '%s'" % f.field.name)

        # Build up any restrictions
        if q.is_empty():
            restriction = None
            query_string = None
        elif q.query_string:
            restriction = None
            query_string = Restriction(q, folders=[folder], applies_to=Restriction.ITEMS)
        else:
            restriction = Restriction(q, folders=[folder], applies_to=Restriction.ITEMS)
            query_string = None
        yield from FindPeople(account=self.account, chunk_size=page_size).call(
                folder=[folder],
                additional_fields=additional_fields,
                restriction=restriction,
                order_fields=order_fields,
                shape=shape,
                query_string=query_string,
                depth=depth,
                max_items=max_items,
                offset=offset,
        )

    def get_folder_fields(self, target_cls, is_complex=None):
        return {
            FieldPath(field=f) for f in target_cls.supported_fields(version=self.account.version)
            if is_complex is None or f.is_complex is is_complex
        }

    def _get_target_cls(self):
        # We may have root folders that don't support the same set of fields as normal folders. If there is a mix of
        # both folder types in self.folders, raise an error so we don't risk losing some fields in the query.
        from .base import Folder
        from .roots import RootOfHierarchy
        has_roots = False
        has_non_roots = False
        for f in self.folders:
            if isinstance(f, RootOfHierarchy):
                if has_non_roots:
                    raise ValueError('Cannot call GetFolder on a mix of folder types: {}'.format(self.folders))
                has_roots = True
            else:
                if has_roots:
                    raise ValueError('Cannot call GetFolder on a mix of folder types: {}'.format(self.folders))
                has_non_roots = True
        return RootOfHierarchy if has_roots else Folder

    def _get_default_item_traversal_depth(self):
        # When searching folders, some folders require 'Shallow' and others 'Associated' traversal depth.
        unique_depths = {f.DEFAULT_ITEM_TRAVERSAL_DEPTH for f in self.folders}
        if len(unique_depths) == 1:
            return unique_depths.pop()
        raise ValueError(
            'Folders in this collection do not have a common DEFAULT_ITEM_TRAVERSAL_DEPTH value. You need to '
            'define an explicit traversal depth with QuerySet.depth() (values: %s)' % unique_depths
        )

    def _get_default_folder_traversal_depth(self):
        # When searching folders, some folders require 'Shallow' and others 'Deep' traversal depth.
        unique_depths = {f.DEFAULT_FOLDER_TRAVERSAL_DEPTH for f in self.folders}
        if len(unique_depths) == 1:
            return unique_depths.pop()
        raise ValueError(
            'Folders in this collection do not have a common DEFAULT_FOLDER_TRAVERSAL_DEPTH value. You need to '
            'define an explicit traversal depth with FolderQuerySet.depth() (values: %s)' % unique_depths
        )

    def resolve(self):
        # Looks up the folders or folder IDs in the collection and returns full Folder instances with all fields set.
        from .base import BaseFolder
        resolveable_folders = []
        for f in self.folders:
            if isinstance(f, BaseFolder) and not f.get_folder_allowed:
                log.debug('GetFolder not allowed on folder %s. Non-complex fields must be fetched with FindFolder', f)
                yield f
            else:
                resolveable_folders.append(f)
        # Fetch all properties for the remaining folders of folder IDs
        additional_fields = self.get_folder_fields(target_cls=self._get_target_cls(), is_complex=None)
        yield from self.__class__(account=self.account, folders=resolveable_folders).get_folders(
                additional_fields=additional_fields
        )

    @require_account
    def find_folders(self, q=None, shape=ID_ONLY, depth=None, additional_fields=None, page_size=None, max_items=None,
                     offset=0):
        # 'depth' controls whether to return direct children or recurse into sub-folders
        from .base import BaseFolder, Folder
        if q is None:
            q = Q()
        if not self.folders:
            log.debug('Folder list is empty')
            return
        if q.is_never():
            log.debug('Query will never return results')
            return
        if q.is_empty():
            restriction = None
        else:
            restriction = Restriction(q, folders=self.folders, applies_to=Restriction.FOLDERS)
        if shape not in SHAPE_CHOICES:
            raise ValueError("'shape' %s must be one of %s" % (shape, SHAPE_CHOICES))
        if depth is None:
            depth = self._get_default_folder_traversal_depth()
        if depth not in FOLDER_TRAVERSAL_CHOICES:
            raise ValueError("'depth' %s must be one of %s" % (depth, FOLDER_TRAVERSAL_CHOICES))
        if additional_fields is None:
            # Default to all non-complex properties. Subfolders will always be of class Folder
            additional_fields = self.get_folder_fields(target_cls=Folder, is_complex=False)
        else:
            for f in additional_fields:
                if f.field.is_complex:
                    raise ValueError("find_folders() does not support field '%s'. Use get_folders()." % f.field.name)

        # Add required fields
        additional_fields.update(
            (FieldPath(field=BaseFolder.get_field_by_fieldname(f)) for f in self.REQUIRED_FOLDER_FIELDS)
        )

        yield from FindFolder(account=self.account, chunk_size=page_size).call(
                folders=self.folders,
                additional_fields=additional_fields,
                restriction=restriction,
                shape=shape,
                depth=depth,
                max_items=max_items,
                offset=offset,
        )

    def get_folders(self, additional_fields=None):
        # Expand folders with their full set of properties
        from .base import BaseFolder
        if not self.folders:
            log.debug('Folder list is empty')
            return
        if additional_fields is None:
            # Default to all complex properties
            additional_fields = self.get_folder_fields(target_cls=self._get_target_cls(), is_complex=True)

        # Add required fields
        additional_fields.update(
            (FieldPath(field=BaseFolder.get_field_by_fieldname(f)) for f in self.REQUIRED_FOLDER_FIELDS)
        )

        yield from GetFolder(account=self.account).call(
                folders=self.folders,
                additional_fields=additional_fields,
                shape=ID_ONLY,
        )

    def subscribe_to_pull(self, event_types=SubscribeToPull.EVENT_TYPES, watermark=None, timeout=60):
        if not self.folders:
            log.debug('Folder list is empty')
            return
        yield from SubscribeToPull(account=self.account).call(
            folders=self.folders, event_types=event_types, watermark=watermark, timeout=timeout,
        )

    def subscribe_to_push(self, callback_url, event_types=SubscribeToPush.EVENT_TYPES, watermark=None,
                          status_frequency=1):
        if not self.folders:
            log.debug('Folder list is empty')
            return
        yield from SubscribeToPush(account=self.account).call(
            folders=self.folders, event_types=event_types, watermark=watermark, status_frequency=status_frequency,
            url=callback_url,
        )

    def subscribe_to_streaming(self, event_types=SubscribeToPush.EVENT_TYPES):
        if not self.folders:
            log.debug('Folder list is empty')
            return
        yield from SubscribeToStreaming(account=self.account).call(folders=self.folders, event_types=event_types)

    def sync_items(self, sync_state=None, only_fields=None, ignore=None, max_changes_returned=None, sync_scope=None):
        folder = self._get_single_folder()
        if not folder:
            return
        if only_fields is None:
            # We didn't restrict list of field paths. Get all fields from the server, including extended properties.
            additional_fields = {FieldPath(field=f) for f in folder.allowed_item_fields(version=self.account.version)}
        else:
            for field in only_fields:
                folder.validate_item_field(field=field, version=self.account.version)
            # Remove ItemId and ChangeKey. We get them unconditionally
            additional_fields = {f for f in folder.normalize_fields(fields=only_fields) if not f.field.is_attribute}

        svc = SyncFolderItems(account=self.account)
        while True:
            yield from svc.call(
                folder=folder,
                shape=ID_ONLY,
                additional_fields=additional_fields,
                sync_state=sync_state,
                ignore=ignore,
                max_changes_returned=max_changes_returned,
                sync_scope=sync_scope,
            )
            if svc.sync_state == sync_state:
                # We sometimes get the same sync_state back, even though includes_last_item_in_range is False. Stop here
                break
            sync_state = svc.sync_state  # Set the new sync state in the next call
            if svc.includes_last_item_in_range:  # Try again if there are more items
                break
        raise SyncCompleted(sync_state=svc.sync_state)

    def sync_hierarchy(self, sync_state=None, only_fields=None):
        folder = self._get_single_folder()
        if not folder:
            return
        if only_fields is None:
            # We didn't restrict list of field paths. Get all fields from the server, including extended properties.
            additional_fields = {FieldPath(field=f) for f in folder.supported_fields(version=self.account.version)}
        else:
            for f in only_fields:
                folder.validate_field(field=f, version=self.account.version)
            # Remove ItemId and ChangeKey. We get them unconditionally
            additional_fields = {f for f in folder.normalize_fields(fields=only_fields) if not f.field.is_attribute}

        # Add required fields
        additional_fields.update(
            (FieldPath(field=folder.get_field_by_fieldname(f)) for f in self.REQUIRED_FOLDER_FIELDS)
        )

        svc = SyncFolderHierarchy(account=self.account)
        while True:
            yield from svc.call(
                folder=folder,
                shape=ID_ONLY,
                additional_fields=additional_fields,
                sync_state=sync_state,
            )
            if svc.sync_state == sync_state:
                # We sometimes get the same sync_state back, even though includes_last_item_in_range is False. Stop here
                break
            sync_state = svc.sync_state  # Set the new sync state in the next call
            if svc.includes_last_item_in_range:  # Try again if there are more items
                break
        raise SyncCompleted(sync_state=svc.sync_state)

Ancestors

Class variables

var REQUIRED_FOLDER_FIELDS

Instance variables

var folders
Expand source code
def __get__(self, obj, cls):
    if obj is None:
        return self

    obj_dict = obj.__dict__
    name = self.func.__name__
    with self.lock:
        try:
            # check if the value was computed before the lock was acquired
            return obj_dict[name]

        except KeyError:
            # if not, do the calculation and release the lock
            return obj_dict.setdefault(name, self.func(obj))
var supported_item_models
Expand source code
@property
def supported_item_models(self):
    return tuple(item_model for folder in self.folders for item_model in folder.supported_item_models)

Methods

def all(self)
Expand source code
def all(self):
    return QuerySet(self).all()
def allowed_item_fields(self)
Expand source code
def allowed_item_fields(self):
    # Return non-ID fields of all item classes allowed in this folder type
    fields = set()
    for item_model in self.supported_item_models:
        fields.update(set(item_model.supported_fields(version=self.account.version)))
    return fields
def exclude(self, *args, **kwargs)
Expand source code
def exclude(self, *args, **kwargs):
    return QuerySet(self).exclude(*args, **kwargs)
def filter(self, *args, **kwargs)

Find items in the folder(s).

Non-keyword args may be a list of Q instances.

Optional extra keyword arguments follow a Django-like QuerySet filter syntax (see https://docs.djangoproject.com/en/1.10/ref/models/querysets/#field-lookups).

We don't support '__year' and other date-related lookups. We also don't support '__endswith' or '__iendswith'.

We support the additional '__not' lookup in place of Django's exclude() for simple cases. For more complicated cases you need to create a Q object and use ~Q().

Examples

my_account.inbox.filter(datetime_received__gt=EWSDateTime(2016, 1, 1)) my_account.calendar.filter(start__range=(EWSDateTime(2016, 1, 1), EWSDateTime(2017, 1, 1))) my_account.tasks.filter(subject='Hi mom') my_account.tasks.filter(subject__not='Hi mom') my_account.tasks.filter(subject__contains='Foo') my_account.tasks.filter(subject__icontains='foo')

'endswith' and 'iendswith' could be emulated by searching with 'contains' or 'icontains' and then post-processing items. Fetch the field in question with additional_fields and remove items where the search string is not a postfix.

Expand source code
def filter(self, *args, **kwargs):
    """Find items in the folder(s).

    Non-keyword args may be a list of Q instances.

    Optional extra keyword arguments follow a Django-like QuerySet filter syntax (see
       https://docs.djangoproject.com/en/1.10/ref/models/querysets/#field-lookups).

    We don't support '__year' and other date-related lookups. We also don't support '__endswith' or '__iendswith'.

    We support the additional '__not' lookup in place of Django's exclude() for simple cases. For more complicated
    cases you need to create a Q object and use ~Q().

    Examples:

        my_account.inbox.filter(datetime_received__gt=EWSDateTime(2016, 1, 1))
        my_account.calendar.filter(start__range=(EWSDateTime(2016, 1, 1), EWSDateTime(2017, 1, 1)))
        my_account.tasks.filter(subject='Hi mom')
        my_account.tasks.filter(subject__not='Hi mom')
        my_account.tasks.filter(subject__contains='Foo')
        my_account.tasks.filter(subject__icontains='foo')

    'endswith' and 'iendswith' could be emulated by searching with 'contains' or 'icontains' and then
    post-processing items. Fetch the field in question with additional_fields and remove items where the search
    string is not a postfix.
    """
    return QuerySet(self).filter(*args, **kwargs)
def find_folders(self, q=None, shape='IdOnly', depth=None, additional_fields=None, page_size=None, max_items=None, offset=0)
Expand source code
@require_account
def find_folders(self, q=None, shape=ID_ONLY, depth=None, additional_fields=None, page_size=None, max_items=None,
                 offset=0):
    # 'depth' controls whether to return direct children or recurse into sub-folders
    from .base import BaseFolder, Folder
    if q is None:
        q = Q()
    if not self.folders:
        log.debug('Folder list is empty')
        return
    if q.is_never():
        log.debug('Query will never return results')
        return
    if q.is_empty():
        restriction = None
    else:
        restriction = Restriction(q, folders=self.folders, applies_to=Restriction.FOLDERS)
    if shape not in SHAPE_CHOICES:
        raise ValueError("'shape' %s must be one of %s" % (shape, SHAPE_CHOICES))
    if depth is None:
        depth = self._get_default_folder_traversal_depth()
    if depth not in FOLDER_TRAVERSAL_CHOICES:
        raise ValueError("'depth' %s must be one of %s" % (depth, FOLDER_TRAVERSAL_CHOICES))
    if additional_fields is None:
        # Default to all non-complex properties. Subfolders will always be of class Folder
        additional_fields = self.get_folder_fields(target_cls=Folder, is_complex=False)
    else:
        for f in additional_fields:
            if f.field.is_complex:
                raise ValueError("find_folders() does not support field '%s'. Use get_folders()." % f.field.name)

    # Add required fields
    additional_fields.update(
        (FieldPath(field=BaseFolder.get_field_by_fieldname(f)) for f in self.REQUIRED_FOLDER_FIELDS)
    )

    yield from FindFolder(account=self.account, chunk_size=page_size).call(
            folders=self.folders,
            additional_fields=additional_fields,
            restriction=restriction,
            shape=shape,
            depth=depth,
            max_items=max_items,
            offset=offset,
    )
def find_items(self, q, shape='IdOnly', depth=None, additional_fields=None, order_fields=None, calendar_view=None, page_size=None, max_items=None, offset=0)

Private method to call the FindItem service.

:param q: a Q instance containing any restrictions :param shape: controls whether to return (id, chanegkey) tuples or Item objects. If additional_fields is non-null, we always return Item objects. (Default value = ID_ONLY) :param depth: controls the whether to return soft-deleted items or not. (Default value = None) :param additional_fields: the extra properties we want on the return objects. Default is no properties. Be aware that complex fields can only be fetched with fetch() (i.e. the GetItem service). :param order_fields: the SortOrder fields, if any (Default value = None) :param calendar_view: a CalendarView instance, if any (Default value = None) :param page_size: the requested number of items per page (Default value = None) :param max_items: the max number of items to return (Default value = None) :param offset: the offset relative to the first item in the item collection (Default value = 0)

:return: a generator for the returned item IDs or items

Expand source code
def find_items(self, q, shape=ID_ONLY, depth=None, additional_fields=None, order_fields=None,
               calendar_view=None, page_size=None, max_items=None, offset=0):
    """Private method to call the FindItem service.

    :param q: a Q instance containing any restrictions
    :param shape: controls whether to return (id, chanegkey) tuples or Item objects. If additional_fields is
      non-null, we always return Item objects. (Default value = ID_ONLY)
    :param depth: controls the whether to return soft-deleted items or not. (Default value = None)
    :param additional_fields: the extra properties we want on the return objects. Default is no properties. Be aware
      that complex fields can only be fetched with fetch() (i.e. the GetItem service).
    :param order_fields: the SortOrder fields, if any (Default value = None)
    :param calendar_view: a CalendarView instance, if any (Default value = None)
    :param page_size: the requested number of items per page (Default value = None)
    :param max_items: the max number of items to return (Default value = None)
    :param offset: the offset relative to the first item in the item collection (Default value = 0)

    :return: a generator for the returned item IDs or items
    """
    if not self.folders:
        log.debug('Folder list is empty')
        return
    if q.is_never():
        log.debug('Query will never return results')
        return
    if shape not in SHAPE_CHOICES:
        raise ValueError("'shape' %s must be one of %s" % (shape, SHAPE_CHOICES))
    if depth is None:
        depth = self._get_default_item_traversal_depth()
    if depth not in ITEM_TRAVERSAL_CHOICES:
        raise ValueError("'depth' %s must be one of %s" % (depth, ITEM_TRAVERSAL_CHOICES))
    if additional_fields:
        for f in additional_fields:
            self.validate_item_field(field=f, version=self.account.version)
            if f.field.is_complex:
                raise ValueError("find_items() does not support field '%s'. Use fetch() instead" % f.field.name)
    if calendar_view is not None and not isinstance(calendar_view, CalendarView):
        raise ValueError("'calendar_view' %s must be a CalendarView instance" % calendar_view)

    # Build up any restrictions
    if q.is_empty():
        restriction = None
        query_string = None
    elif q.query_string:
        restriction = None
        query_string = Restriction(q, folders=self.folders, applies_to=Restriction.ITEMS)
    else:
        restriction = Restriction(q, folders=self.folders, applies_to=Restriction.ITEMS)
        query_string = None
    log.debug(
        'Finding %s items in folders %s (shape: %s, depth: %s, additional_fields: %s, restriction: %s)',
        self.folders,
        self.account,
        shape,
        depth,
        additional_fields,
        restriction.q if restriction else None,
    )
    yield from FindItem(account=self.account, chunk_size=page_size).call(
        folders=self.folders,
        additional_fields=additional_fields,
        restriction=restriction,
        order_fields=order_fields,
        shape=shape,
        query_string=query_string,
        depth=depth,
        calendar_view=calendar_view,
        max_items=calendar_view.max_items if calendar_view else max_items,
        offset=offset,
    )
def find_people(self, q, shape='IdOnly', depth=None, additional_fields=None, order_fields=None, page_size=None, max_items=None, offset=0)

Private method to call the FindPeople service.

:param q: a Q instance containing any restrictions :param shape: controls whether to return (id, chanegkey) tuples or Persona objects. If additional_fields is non-null, we always return Persona objects. (Default value = ID_ONLY) :param depth: controls the whether to return soft-deleted items or not. (Default value = None) :param additional_fields: the extra properties we want on the return objects. Default is no properties. :param order_fields: the SortOrder fields, if any (Default value = None) :param page_size: the requested number of items per page (Default value = None) :param max_items: the max number of items to return (Default value = None) :param offset: the offset relative to the first item in the item collection (Default value = 0)

:return: a generator for the returned personas

Expand source code
def find_people(self, q, shape=ID_ONLY, depth=None, additional_fields=None, order_fields=None,
                page_size=None, max_items=None, offset=0):
    """Private method to call the FindPeople service.

    :param q: a Q instance containing any restrictions
    :param shape: controls whether to return (id, chanegkey) tuples or Persona objects. If additional_fields is
      non-null, we always return Persona objects. (Default value = ID_ONLY)
    :param depth: controls the whether to return soft-deleted items or not. (Default value = None)
    :param additional_fields: the extra properties we want on the return objects. Default is no properties.
    :param order_fields: the SortOrder fields, if any (Default value = None)
    :param page_size: the requested number of items per page (Default value = None)
    :param max_items: the max number of items to return (Default value = None)
    :param offset: the offset relative to the first item in the item collection (Default value = 0)

    :return: a generator for the returned personas
    """
    folder = self._get_single_folder()
    if not folder:
        return
    if q.is_never():
        log.debug('Query will never return results')
        return
    if shape not in SHAPE_CHOICES:
        raise ValueError("'shape' %s must be one of %s" % (shape, SHAPE_CHOICES))
    if depth is None:
        depth = self._get_default_item_traversal_depth()
    if depth not in ITEM_TRAVERSAL_CHOICES:
        raise ValueError("'depth' %s must be one of %s" % (depth, ITEM_TRAVERSAL_CHOICES))
    if additional_fields:
        for f in additional_fields:
            Persona.validate_field(field=f, version=self.account.version)
            if f.field.is_complex:
                raise ValueError("find_people() does not support field '%s'" % f.field.name)

    # Build up any restrictions
    if q.is_empty():
        restriction = None
        query_string = None
    elif q.query_string:
        restriction = None
        query_string = Restriction(q, folders=[folder], applies_to=Restriction.ITEMS)
    else:
        restriction = Restriction(q, folders=[folder], applies_to=Restriction.ITEMS)
        query_string = None
    yield from FindPeople(account=self.account, chunk_size=page_size).call(
            folder=[folder],
            additional_fields=additional_fields,
            restriction=restriction,
            order_fields=order_fields,
            shape=shape,
            query_string=query_string,
            depth=depth,
            max_items=max_items,
            offset=offset,
    )
def get(self, *args, **kwargs)
Expand source code
def get(self, *args, **kwargs):
    return QuerySet(self).get(*args, **kwargs)
def get_folder_fields(self, target_cls, is_complex=None)
Expand source code
def get_folder_fields(self, target_cls, is_complex=None):
    return {
        FieldPath(field=f) for f in target_cls.supported_fields(version=self.account.version)
        if is_complex is None or f.is_complex is is_complex
    }
def get_folders(self, additional_fields=None)
Expand source code
def get_folders(self, additional_fields=None):
    # Expand folders with their full set of properties
    from .base import BaseFolder
    if not self.folders:
        log.debug('Folder list is empty')
        return
    if additional_fields is None:
        # Default to all complex properties
        additional_fields = self.get_folder_fields(target_cls=self._get_target_cls(), is_complex=True)

    # Add required fields
    additional_fields.update(
        (FieldPath(field=BaseFolder.get_field_by_fieldname(f)) for f in self.REQUIRED_FOLDER_FIELDS)
    )

    yield from GetFolder(account=self.account).call(
            folders=self.folders,
            additional_fields=additional_fields,
            shape=ID_ONLY,
    )
def none(self)
Expand source code
def none(self):
    return QuerySet(self).none()
def people(self)
Expand source code
def people(self):
    return QuerySet(self).people()
def resolve(self)
Expand source code
def resolve(self):
    # Looks up the folders or folder IDs in the collection and returns full Folder instances with all fields set.
    from .base import BaseFolder
    resolveable_folders = []
    for f in self.folders:
        if isinstance(f, BaseFolder) and not f.get_folder_allowed:
            log.debug('GetFolder not allowed on folder %s. Non-complex fields must be fetched with FindFolder', f)
            yield f
        else:
            resolveable_folders.append(f)
    # Fetch all properties for the remaining folders of folder IDs
    additional_fields = self.get_folder_fields(target_cls=self._get_target_cls(), is_complex=None)
    yield from self.__class__(account=self.account, folders=resolveable_folders).get_folders(
            additional_fields=additional_fields
    )
def subscribe_to_pull(self, event_types=('CopiedEvent', 'CreatedEvent', 'DeletedEvent', 'ModifiedEvent', 'MovedEvent', 'NewMailEvent', 'FreeBusyChangedEvent'), watermark=None, timeout=60)
Expand source code
def subscribe_to_pull(self, event_types=SubscribeToPull.EVENT_TYPES, watermark=None, timeout=60):
    if not self.folders:
        log.debug('Folder list is empty')
        return
    yield from SubscribeToPull(account=self.account).call(
        folders=self.folders, event_types=event_types, watermark=watermark, timeout=timeout,
    )
def subscribe_to_push(self, callback_url, event_types=('CopiedEvent', 'CreatedEvent', 'DeletedEvent', 'ModifiedEvent', 'MovedEvent', 'NewMailEvent', 'FreeBusyChangedEvent'), watermark=None, status_frequency=1)
Expand source code
def subscribe_to_push(self, callback_url, event_types=SubscribeToPush.EVENT_TYPES, watermark=None,
                      status_frequency=1):
    if not self.folders:
        log.debug('Folder list is empty')
        return
    yield from SubscribeToPush(account=self.account).call(
        folders=self.folders, event_types=event_types, watermark=watermark, status_frequency=status_frequency,
        url=callback_url,
    )
def subscribe_to_streaming(self, event_types=('CopiedEvent', 'CreatedEvent', 'DeletedEvent', 'ModifiedEvent', 'MovedEvent', 'NewMailEvent', 'FreeBusyChangedEvent'))
Expand source code
def subscribe_to_streaming(self, event_types=SubscribeToPush.EVENT_TYPES):
    if not self.folders:
        log.debug('Folder list is empty')
        return
    yield from SubscribeToStreaming(account=self.account).call(folders=self.folders, event_types=event_types)
def sync_hierarchy(self, sync_state=None, only_fields=None)
Expand source code
def sync_hierarchy(self, sync_state=None, only_fields=None):
    folder = self._get_single_folder()
    if not folder:
        return
    if only_fields is None:
        # We didn't restrict list of field paths. Get all fields from the server, including extended properties.
        additional_fields = {FieldPath(field=f) for f in folder.supported_fields(version=self.account.version)}
    else:
        for f in only_fields:
            folder.validate_field(field=f, version=self.account.version)
        # Remove ItemId and ChangeKey. We get them unconditionally
        additional_fields = {f for f in folder.normalize_fields(fields=only_fields) if not f.field.is_attribute}

    # Add required fields
    additional_fields.update(
        (FieldPath(field=folder.get_field_by_fieldname(f)) for f in self.REQUIRED_FOLDER_FIELDS)
    )

    svc = SyncFolderHierarchy(account=self.account)
    while True:
        yield from svc.call(
            folder=folder,
            shape=ID_ONLY,
            additional_fields=additional_fields,
            sync_state=sync_state,
        )
        if svc.sync_state == sync_state:
            # We sometimes get the same sync_state back, even though includes_last_item_in_range is False. Stop here
            break
        sync_state = svc.sync_state  # Set the new sync state in the next call
        if svc.includes_last_item_in_range:  # Try again if there are more items
            break
    raise SyncCompleted(sync_state=svc.sync_state)
def sync_items(self, sync_state=None, only_fields=None, ignore=None, max_changes_returned=None, sync_scope=None)
Expand source code
def sync_items(self, sync_state=None, only_fields=None, ignore=None, max_changes_returned=None, sync_scope=None):
    folder = self._get_single_folder()
    if not folder:
        return
    if only_fields is None:
        # We didn't restrict list of field paths. Get all fields from the server, including extended properties.
        additional_fields = {FieldPath(field=f) for f in folder.allowed_item_fields(version=self.account.version)}
    else:
        for field in only_fields:
            folder.validate_item_field(field=field, version=self.account.version)
        # Remove ItemId and ChangeKey. We get them unconditionally
        additional_fields = {f for f in folder.normalize_fields(fields=only_fields) if not f.field.is_attribute}

    svc = SyncFolderItems(account=self.account)
    while True:
        yield from svc.call(
            folder=folder,
            shape=ID_ONLY,
            additional_fields=additional_fields,
            sync_state=sync_state,
            ignore=ignore,
            max_changes_returned=max_changes_returned,
            sync_scope=sync_scope,
        )
        if svc.sync_state == sync_state:
            # We sometimes get the same sync_state back, even though includes_last_item_in_range is False. Stop here
            break
        sync_state = svc.sync_state  # Set the new sync state in the next call
        if svc.includes_last_item_in_range:  # Try again if there are more items
            break
    raise SyncCompleted(sync_state=svc.sync_state)
def validate_item_field(self, field, version)
Expand source code
def validate_item_field(self, field, version):
    # For each field, check if the field is valid for any of the item models supported by this folder
    for item_model in self.supported_item_models:
        try:
            item_model.validate_field(field=field, version=version)
            break
        except InvalidField:
            continue
    else:
        raise InvalidField("%r is not a valid field on %s" % (field, self.supported_item_models))
def view(self, start, end, max_items=None, *args, **kwargs)

Implement the CalendarView option to FindItem. The difference between 'filter' and 'view' is that 'filter' only returns the master CalendarItem for recurring items, while 'view' unfolds recurring items and returns all CalendarItem occurrences as one would normally expect when presenting a calendar.

Supports the same semantics as filter, except for 'start' and 'end' keyword attributes which are both required and behave differently than filter. Here, they denote the start and end of the timespan of the view. All items the overlap the timespan are returned (items that end exactly on 'start' are also returned, for some reason).

EWS does not allow combining CalendarView with search restrictions (filter and exclude).

'max_items' defines the maximum number of items returned in this view. Optional.

:param start: :param end: :param max_items: (Default value = None) :return:

Expand source code
def view(self, start, end, max_items=None, *args, **kwargs):
    """Implement the CalendarView option to FindItem. The difference between 'filter' and 'view' is that 'filter'
    only returns the master CalendarItem for recurring items, while 'view' unfolds recurring items and returns all
    CalendarItem occurrences as one would normally expect when presenting a calendar.

    Supports the same semantics as filter, except for 'start' and 'end' keyword attributes which are both required
    and behave differently than filter. Here, they denote the start and end of the timespan of the view. All items
    the overlap the timespan are returned (items that end exactly on 'start' are also returned, for some reason).

    EWS does not allow combining CalendarView with search restrictions (filter and exclude).

    'max_items' defines the maximum number of items returned in this view. Optional.

    :param start:
    :param end:
    :param max_items:  (Default value = None)
    :return:
    """
    qs = QuerySet(self).filter(*args, **kwargs)
    qs.calendar_view = CalendarView(start=start, end=end, max_items=max_items)
    return qs
class ForwardItem (**kwargs)
Expand source code
class ForwardItem(BaseReplyItem):
    """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/forwarditem"""

    ELEMENT_NAME = 'ForwardItem'

Ancestors

Class variables

var ELEMENT_NAME

Inherited members

class HTMLBody (...)

Helper to mark the 'body' field as a complex attribute.

MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/body

Expand source code
class HTMLBody(Body):
    """Helper to mark the 'body' field as a complex attribute.

    MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/body
    """

    body_type = 'HTML'

Ancestors

Class variables

var body_type

Inherited members

class Identity (primary_smtp_address=None, smtp_address=None, upn=None, sid=None)

Contains information that uniquely identifies an account. Currently only used for SOAP impersonation headers.

:param primary_smtp_address: The primary email address associated with the account (Default value = None) :param smtp_address: The (non-)primary email address associated with the account (Default value = None) :param upn: (Default value = None) :param sid: (Default value = None) :return:

Expand source code
class Identity:
    """Contains information that uniquely identifies an account. Currently only used for SOAP impersonation headers."""

    def __init__(self, primary_smtp_address=None, smtp_address=None, upn=None, sid=None):
        """

        :param primary_smtp_address: The primary email address associated with the account (Default value = None)
        :param smtp_address: The (non-)primary email address associated with the account (Default value = None)
        :param upn: (Default value = None)
        :param sid: (Default value = None)
        :return:
        """
        self.primary_smtp_address = primary_smtp_address
        self.smtp_address = smtp_address
        self.upn = upn
        self.sid = sid

    def __eq__(self, other):
        for k in self.__dict__:
            if getattr(self, k) != getattr(other, k):
                return False
        return True

    def __hash__(self):
        return hash(repr(self))

    def __repr__(self):
        return self.__class__.__name__ + repr((self.primary_smtp_address, self.smtp_address, self.upn, self.sid))
class ItemAttachment (**kwargs)
Expand source code
class ItemAttachment(Attachment):
    """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/itemattachment"""

    ELEMENT_NAME = 'ItemAttachment'

    _item = ItemField(field_uri='Item')

    def __init__(self, **kwargs):
        kwargs['_item'] = kwargs.pop('item', None)
        super().__init__(**kwargs)

    @property
    def item(self):
        from .folders import BaseFolder
        if self.attachment_id is None:
            return self._item
        if self._item is not None:
            return self._item
        # We have an ID to the data but still haven't called GetAttachment to get the actual data. Do that now.
        if not self.parent_item or not self.parent_item.account:
            raise ValueError('%s must have an account' % self.__class__.__name__)
        additional_fields = {
            FieldPath(field=f) for f in BaseFolder.allowed_item_fields(version=self.parent_item.account.version)
        }
        attachment = GetAttachment(account=self.parent_item.account).get(
            items=[self.attachment_id], include_mime_content=True, body_type=None, filter_html_content=None,
            additional_fields=additional_fields,
        )
        self._item = attachment.item
        return self._item

    @item.setter
    def item(self, value):
        from .items import Item
        if not isinstance(value, Item):
            raise ValueError("'value' %r must be an Item object" % value)
        self._item = value

    @classmethod
    def from_xml(cls, elem, account):
        kwargs = {f.name: f.from_xml(elem=elem, account=account) for f in cls.FIELDS}
        kwargs['item'] = kwargs.pop('_item')
        cls._clear(elem)
        return cls(**kwargs)

Ancestors

Class variables

var ELEMENT_NAME
var FIELDS

Static methods

def from_xml(elem, account)
Expand source code
@classmethod
def from_xml(cls, elem, account):
    kwargs = {f.name: f.from_xml(elem=elem, account=account) for f in cls.FIELDS}
    kwargs['item'] = kwargs.pop('_item')
    cls._clear(elem)
    return cls(**kwargs)

Instance variables

var item
Expand source code
@property
def item(self):
    from .folders import BaseFolder
    if self.attachment_id is None:
        return self._item
    if self._item is not None:
        return self._item
    # We have an ID to the data but still haven't called GetAttachment to get the actual data. Do that now.
    if not self.parent_item or not self.parent_item.account:
        raise ValueError('%s must have an account' % self.__class__.__name__)
    additional_fields = {
        FieldPath(field=f) for f in BaseFolder.allowed_item_fields(version=self.parent_item.account.version)
    }
    attachment = GetAttachment(account=self.parent_item.account).get(
        items=[self.attachment_id], include_mime_content=True, body_type=None, filter_html_content=None,
        additional_fields=additional_fields,
    )
    self._item = attachment.item
    return self._item

Inherited members

class ItemId (*args, **kwargs)

'id' and 'changekey' are UUIDs generated by Exchange.

MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/itemid

Expand source code
class ItemId(BaseItemId):
    """'id' and 'changekey' are UUIDs generated by Exchange.

    MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/itemid
    """

    ELEMENT_NAME = 'ItemId'
    ID_ATTR = 'Id'
    CHANGEKEY_ATTR = 'ChangeKey'

    id = IdField(field_uri=ID_ATTR, is_required=True)
    changekey = IdField(field_uri=CHANGEKEY_ATTR, is_required=False)

Ancestors

Subclasses

Class variables

var CHANGEKEY_ATTR
var ELEMENT_NAME
var FIELDS
var ID_ATTR

Instance variables

var changekey
var id

Inherited members

class Mailbox (**kwargs)
Expand source code
class Mailbox(EWSElement):
    """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/mailbox"""

    ELEMENT_NAME = 'Mailbox'
    MAILBOX = 'Mailbox'
    ONE_OFF = 'OneOff'
    MAILBOX_TYPE_CHOICES = {
            Choice(MAILBOX), Choice('PublicDL'), Choice('PrivateDL'), Choice('Contact'), Choice('PublicFolder'),
            Choice('Unknown'), Choice(ONE_OFF), Choice('GroupMailbox', supported_from=EXCHANGE_2013)
        }

    name = TextField(field_uri='Name')
    email_address = EmailAddressField(field_uri='EmailAddress')
    # RoutingType values are not restricted:
    # https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/routingtype-emailaddresstype
    routing_type = TextField(field_uri='RoutingType', default='SMTP')
    mailbox_type = ChoiceField(field_uri='MailboxType', choices=MAILBOX_TYPE_CHOICES, default=MAILBOX)
    item_id = EWSElementField(value_cls=ItemId, is_read_only=True)

    def clean(self, version=None):
        super().clean(version=version)

        if self.mailbox_type != self.ONE_OFF and not self.email_address and not self.item_id:
            # A OneOff Mailbox (a one-off member of a personal distribution list) may lack these fields, but other
            # Mailboxes require at least one. See also "Remarks" section of
            # https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/mailbox
            raise ValueError("Mailbox type %r must have either 'email_address' or 'item_id' set" % self.mailbox_type)

    def __hash__(self):
        # Exchange may add 'mailbox_type' and 'name' on insert. We're satisfied if the item_id or email address matches.
        if self.item_id:
            return hash(self.item_id)
        if self.email_address:
            return hash(self.email_address.lower())
        return super().__hash__()

Ancestors

Subclasses

Class variables

var ELEMENT_NAME
var FIELDS
var MAILBOX
var MAILBOX_TYPE_CHOICES
var ONE_OFF

Instance variables

var email_address
var item_id
var mailbox_type
var name
var routing_type

Methods

def clean(self, version=None)
Expand source code
def clean(self, version=None):
    super().clean(version=version)

    if self.mailbox_type != self.ONE_OFF and not self.email_address and not self.item_id:
        # A OneOff Mailbox (a one-off member of a personal distribution list) may lack these fields, but other
        # Mailboxes require at least one. See also "Remarks" section of
        # https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/mailbox
        raise ValueError("Mailbox type %r must have either 'email_address' or 'item_id' set" % self.mailbox_type)

Inherited members

class Message (**kwargs)

MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/message-ex15websvcsotherref

Pick out optional 'account' and 'folder' kwargs, and pass the rest to the parent class.

:param kwargs: 'account' is optional but allows calling 'send()' and 'delete()' 'folder' is optional but allows calling 'save()'. If 'folder' has an account, and 'account' is not set, we use folder.account.

Expand source code
class Message(Item):
    """MSDN:
    https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/message-ex15websvcsotherref
    """

    ELEMENT_NAME = 'Message'

    sender = MailboxField(field_uri='message:Sender', is_read_only=True, is_read_only_after_send=True)
    to_recipients = MailboxListField(field_uri='message:ToRecipients', is_read_only_after_send=True,
                                     is_searchable=False)
    cc_recipients = MailboxListField(field_uri='message:CcRecipients', is_read_only_after_send=True,
                                     is_searchable=False)
    bcc_recipients = MailboxListField(field_uri='message:BccRecipients', is_read_only_after_send=True,
                                      is_searchable=False)
    is_read_receipt_requested = BooleanField(field_uri='message:IsReadReceiptRequested',
                                             is_required=True, default=False, is_read_only_after_send=True)
    is_delivery_receipt_requested = BooleanField(field_uri='message:IsDeliveryReceiptRequested', is_required=True,
                                                 default=False, is_read_only_after_send=True)
    conversation_index = Base64Field(field_uri='message:ConversationIndex', is_read_only=True)
    conversation_topic = CharField(field_uri='message:ConversationTopic', is_read_only=True)
    # Rename 'From' to 'author'. We can't use fieldname 'from' since it's a Python keyword.
    author = MailboxField(field_uri='message:From', is_read_only_after_send=True)
    message_id = CharField(field_uri='message:InternetMessageId', is_read_only_after_send=True)
    is_read = BooleanField(field_uri='message:IsRead', is_required=True, default=False)
    is_response_requested = BooleanField(field_uri='message:IsResponseRequested', default=False, is_required=True)
    references = TextField(field_uri='message:References')
    reply_to = MailboxListField(field_uri='message:ReplyTo', is_read_only_after_send=True, is_searchable=False)
    received_by = MailboxField(field_uri='message:ReceivedBy', is_read_only=True)
    received_representing = MailboxField(field_uri='message:ReceivedRepresenting', is_read_only=True)
    reminder_message_data = EWSElementField(field_uri='message:ReminderMessageData', value_cls=ReminderMessageData,
                                            supported_from=EXCHANGE_2013_SP1, is_read_only=True)

    @require_account
    def send(self, save_copy=True, copy_to_folder=None, conflict_resolution=AUTO_RESOLVE,
             send_meeting_invitations=SEND_TO_NONE):
        # Only sends a message. The message can either be an existing draft stored in EWS or a new message that does
        # not yet exist in EWS.
        if copy_to_folder and not save_copy:
            raise AttributeError("'save_copy' must be True when 'copy_to_folder' is set")
        if save_copy and not copy_to_folder:
            copy_to_folder = self.account.sent  # 'Sent' is default EWS behaviour
        if self.id:
            SendItem(account=self.account).get(items=[self], saved_item_folder=copy_to_folder)
            # The item will be deleted from the original folder
            self._id = None
            self.folder = copy_to_folder
            return None

        # New message
        if copy_to_folder:
            # This would better be done via send_and_save() but lets just support it here
            self.folder = copy_to_folder
            return self.send_and_save(conflict_resolution=conflict_resolution,
                                      send_meeting_invitations=send_meeting_invitations)

        if self.account.version.build < EXCHANGE_2013 and self.attachments:
            # At least some versions prior to Exchange 2013 can't send attachments immediately. You need to first save,
            # then attach, then send. This is done in send_and_save(). send() will delete the item again.
            self.send_and_save(conflict_resolution=conflict_resolution,
                               send_meeting_invitations=send_meeting_invitations)
            return None

        self._create(message_disposition=SEND_ONLY, send_meeting_invitations=send_meeting_invitations)
        return None

    def send_and_save(self, update_fields=None, conflict_resolution=AUTO_RESOLVE,
                      send_meeting_invitations=SEND_TO_NONE):
        # Sends Message and saves a copy in the parent folder. Does not return an ItemId.
        if self.id:
            self._update(
                update_fieldnames=update_fields,
                message_disposition=SEND_AND_SAVE_COPY,
                conflict_resolution=conflict_resolution,
                send_meeting_invitations=send_meeting_invitations
            )
        else:
            if self.account.version.build < EXCHANGE_2013 and self.attachments:
                # At least some versions prior to Exchange 2013 can't send-and-save attachments immediately. You need
                # to first save, then attach, then send. This is done in save().
                self.save(update_fields=update_fields, conflict_resolution=conflict_resolution,
                          send_meeting_invitations=send_meeting_invitations)
                self.send(save_copy=False, conflict_resolution=conflict_resolution,
                          send_meeting_invitations=send_meeting_invitations)
            else:
                res = self._create(
                    message_disposition=SEND_AND_SAVE_COPY,
                    send_meeting_invitations=send_meeting_invitations
                )
                if res:
                    raise ValueError('Unexpected response in send-only mode')

    @require_id
    def create_reply(self, subject, body, to_recipients=None, cc_recipients=None, bcc_recipients=None):
        if to_recipients is None:
            if not self.author:
                raise ValueError("'to_recipients' must be set when message has no 'author'")
            to_recipients = [self.author]
        return ReplyToItem(
            account=self.account,
            reference_item_id=ReferenceItemId(id=self.id, changekey=self.changekey),
            subject=subject,
            new_body=body,
            to_recipients=to_recipients,
            cc_recipients=cc_recipients,
            bcc_recipients=bcc_recipients,
        )

    def reply(self, subject, body, to_recipients=None, cc_recipients=None, bcc_recipients=None):
        self.create_reply(
            subject,
            body,
            to_recipients,
            cc_recipients,
            bcc_recipients
        ).send()

    @require_id
    def create_reply_all(self, subject, body):
        to_recipients = list(self.to_recipients) if self.to_recipients else []
        if self.author:
            to_recipients.append(self.author)
        return ReplyAllToItem(
            account=self.account,
            reference_item_id=ReferenceItemId(id=self.id, changekey=self.changekey),
            subject=subject,
            new_body=body,
            to_recipients=to_recipients,
            cc_recipients=self.cc_recipients,
            bcc_recipients=self.bcc_recipients,
        )

    def reply_all(self, subject, body):
        self.create_reply_all(subject, body).send()

    def mark_as_junk(self, is_junk=True, move_item=True):
        """Mark or un-marks items as junk email.

        :param is_junk: If True, the sender will be added from the blocked sender list. Otherwise, the sender will be
        removed.
        :param move_item: If true, the item will be moved to the junk folder.
        :return:
        """
        res = MarkAsJunk(account=self.account).get(
            items=[self], is_junk=is_junk, move_item=move_item, expect_result=move_item
        )
        if res is None:
            return
        self.folder = self.account.junk if is_junk else self.account.inbox
        self.id, self.changekey = res

Ancestors

Class variables

var ELEMENT_NAME
var FIELDS

Instance variables

var author
var bcc_recipients
var cc_recipients
var conversation_index
var conversation_topic
var is_delivery_receipt_requested
var is_read
var is_read_receipt_requested
var is_response_requested
var message_id
var received_by
var received_representing
var references
var reminder_message_data
var reply_to
var sender
var to_recipients

Methods

def create_reply(self, subject, body, to_recipients=None, cc_recipients=None, bcc_recipients=None)
Expand source code
@require_id
def create_reply(self, subject, body, to_recipients=None, cc_recipients=None, bcc_recipients=None):
    if to_recipients is None:
        if not self.author:
            raise ValueError("'to_recipients' must be set when message has no 'author'")
        to_recipients = [self.author]
    return ReplyToItem(
        account=self.account,
        reference_item_id=ReferenceItemId(id=self.id, changekey=self.changekey),
        subject=subject,
        new_body=body,
        to_recipients=to_recipients,
        cc_recipients=cc_recipients,
        bcc_recipients=bcc_recipients,
    )
def create_reply_all(self, subject, body)
Expand source code
@require_id
def create_reply_all(self, subject, body):
    to_recipients = list(self.to_recipients) if self.to_recipients else []
    if self.author:
        to_recipients.append(self.author)
    return ReplyAllToItem(
        account=self.account,
        reference_item_id=ReferenceItemId(id=self.id, changekey=self.changekey),
        subject=subject,
        new_body=body,
        to_recipients=to_recipients,
        cc_recipients=self.cc_recipients,
        bcc_recipients=self.bcc_recipients,
    )
def mark_as_junk(self, is_junk=True, move_item=True)

Mark or un-marks items as junk email.

:param is_junk: If True, the sender will be added from the blocked sender list. Otherwise, the sender will be removed. :param move_item: If true, the item will be moved to the junk folder. :return:

Expand source code
def mark_as_junk(self, is_junk=True, move_item=True):
    """Mark or un-marks items as junk email.

    :param is_junk: If True, the sender will be added from the blocked sender list. Otherwise, the sender will be
    removed.
    :param move_item: If true, the item will be moved to the junk folder.
    :return:
    """
    res = MarkAsJunk(account=self.account).get(
        items=[self], is_junk=is_junk, move_item=move_item, expect_result=move_item
    )
    if res is None:
        return
    self.folder = self.account.junk if is_junk else self.account.inbox
    self.id, self.changekey = res
def reply(self, subject, body, to_recipients=None, cc_recipients=None, bcc_recipients=None)
Expand source code
def reply(self, subject, body, to_recipients=None, cc_recipients=None, bcc_recipients=None):
    self.create_reply(
        subject,
        body,
        to_recipients,
        cc_recipients,
        bcc_recipients
    ).send()
def reply_all(self, subject, body)
Expand source code
def reply_all(self, subject, body):
    self.create_reply_all(subject, body).send()
def send(self, save_copy=True, copy_to_folder=None, conflict_resolution='AutoResolve', send_meeting_invitations='SendToNone')
Expand source code
@require_account
def send(self, save_copy=True, copy_to_folder=None, conflict_resolution=AUTO_RESOLVE,
         send_meeting_invitations=SEND_TO_NONE):
    # Only sends a message. The message can either be an existing draft stored in EWS or a new message that does
    # not yet exist in EWS.
    if copy_to_folder and not save_copy:
        raise AttributeError("'save_copy' must be True when 'copy_to_folder' is set")
    if save_copy and not copy_to_folder:
        copy_to_folder = self.account.sent  # 'Sent' is default EWS behaviour
    if self.id:
        SendItem(account=self.account).get(items=[self], saved_item_folder=copy_to_folder)
        # The item will be deleted from the original folder
        self._id = None
        self.folder = copy_to_folder
        return None

    # New message
    if copy_to_folder:
        # This would better be done via send_and_save() but lets just support it here
        self.folder = copy_to_folder
        return self.send_and_save(conflict_resolution=conflict_resolution,
                                  send_meeting_invitations=send_meeting_invitations)

    if self.account.version.build < EXCHANGE_2013 and self.attachments:
        # At least some versions prior to Exchange 2013 can't send attachments immediately. You need to first save,
        # then attach, then send. This is done in send_and_save(). send() will delete the item again.
        self.send_and_save(conflict_resolution=conflict_resolution,
                           send_meeting_invitations=send_meeting_invitations)
        return None

    self._create(message_disposition=SEND_ONLY, send_meeting_invitations=send_meeting_invitations)
    return None
def send_and_save(self, update_fields=None, conflict_resolution='AutoResolve', send_meeting_invitations='SendToNone')
Expand source code
def send_and_save(self, update_fields=None, conflict_resolution=AUTO_RESOLVE,
                  send_meeting_invitations=SEND_TO_NONE):
    # Sends Message and saves a copy in the parent folder. Does not return an ItemId.
    if self.id:
        self._update(
            update_fieldnames=update_fields,
            message_disposition=SEND_AND_SAVE_COPY,
            conflict_resolution=conflict_resolution,
            send_meeting_invitations=send_meeting_invitations
        )
    else:
        if self.account.version.build < EXCHANGE_2013 and self.attachments:
            # At least some versions prior to Exchange 2013 can't send-and-save attachments immediately. You need
            # to first save, then attach, then send. This is done in save().
            self.save(update_fields=update_fields, conflict_resolution=conflict_resolution,
                      send_meeting_invitations=send_meeting_invitations)
            self.send(save_copy=False, conflict_resolution=conflict_resolution,
                      send_meeting_invitations=send_meeting_invitations)
        else:
            res = self._create(
                message_disposition=SEND_AND_SAVE_COPY,
                send_meeting_invitations=send_meeting_invitations
            )
            if res:
                raise ValueError('Unexpected response in send-only mode')

Inherited members

class NoVerifyHTTPAdapter (pool_connections=10, pool_maxsize=10, max_retries=0, pool_block=False)

An HTTP adapter that ignores TLS validation errors. Use at own risk.

Expand source code
class NoVerifyHTTPAdapter(requests.adapters.HTTPAdapter):
    """An HTTP adapter that ignores TLS validation errors. Use at own risk."""

    def cert_verify(self, conn, url, verify, cert):
        # pylint: disable=unused-argument
        # We're overriding a method so we have to keep the signature
        super().cert_verify(conn=conn, url=url, verify=False, cert=cert)

Ancestors

  • requests.adapters.HTTPAdapter
  • requests.adapters.BaseAdapter

Methods

def cert_verify(self, conn, url, verify, cert)

Verify a SSL certificate. This method should not be called from user code, and is only exposed for use when subclassing the :class:HTTPAdapter <requests.adapters.HTTPAdapter>.

:param conn: The urllib3 connection object associated with the cert. :param url: The requested URL. :param verify: Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: The SSL certificate to verify.

Expand source code
def cert_verify(self, conn, url, verify, cert):
    # pylint: disable=unused-argument
    # We're overriding a method so we have to keep the signature
    super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
class OAuth2AuthorizationCodeCredentials (authorization_code=None, access_token=None, **kwargs)

Login info for OAuth 2.0 authentication using the authorization code grant type. This can be used in one of several ways: * Given an authorization code, client ID, and client secret, fetch a token ourselves and refresh it as needed if supplied with a refresh token. * Given an existing access token, refresh token, client ID, and client secret, use the access token until it expires and then refresh it as needed. * Given only an existing access token, use it until it expires. This can be used to let the calling application refresh tokens itself by subclassing and implementing refresh().

Unlike the base (client credentials) grant, authorization code credentials don't require a Microsoft tenant ID because each access token (and the authorization code used to get the access token) is restricted to a single tenant.

:param client_id: ID of an authorized OAuth application, required for automatic token fetching and refreshing :param client_secret: Secret associated with the OAuth application :param tenant_id: Microsoft tenant ID of the account to access :param identity: An Identity object representing the account that these credentials are connected to. :param authorization_code: Code obtained when authorizing the application to access an account. In combination with client_id and client_secret, will be used to obtain an access token. :param access_token: Previously-obtained access token. If a token exists and the application will handle refreshing by itself (or opts not to handle it), this parameter alone is sufficient.

Expand source code
class OAuth2AuthorizationCodeCredentials(OAuth2Credentials):
    """Login info for OAuth 2.0 authentication using the authorization code grant type. This can be used in one of
    several ways:
    * Given an authorization code, client ID, and client secret, fetch a token ourselves and refresh it as needed if
      supplied with a refresh token.
    * Given an existing access token, refresh token, client ID, and client secret, use the access token until it
      expires and then refresh it as needed.
    * Given only an existing access token, use it until it expires. This can be used to let the calling application
      refresh tokens itself by subclassing and implementing refresh().

    Unlike the base (client credentials) grant, authorization code credentials don't require a Microsoft tenant ID
    because each access token (and the authorization code used to get the access token) is restricted to a single
    tenant.
    """

    def __init__(self, authorization_code=None, access_token=None, **kwargs):
        """

        :param client_id: ID of an authorized OAuth application, required for automatic token fetching and refreshing
        :param client_secret: Secret associated with the OAuth application
        :param tenant_id: Microsoft tenant ID of the account to access
        :param identity: An Identity object representing the account that these credentials are connected to.
        :param authorization_code: Code obtained when authorizing the application to access an account. In combination
          with client_id and client_secret, will be used to obtain an access token.
        :param access_token: Previously-obtained access token. If a token exists and the application will handle
          refreshing by itself (or opts not to handle it), this parameter alone is sufficient.
        """
        super().__init__(**kwargs)
        self.authorization_code = authorization_code
        if access_token is not None and not isinstance(access_token, dict):
            raise ValueError("'access_token' must be an OAuth2Token")
        self.access_token = access_token

    def __repr__(self):
        return self.__class__.__name__ + repr(
            (self.client_id, '[client_secret]', '[authorization_code]', '[access_token]')
        )

    def __str__(self):
        client_id = self.client_id
        credential = '[access_token]' if self.access_token is not None else \
            ('[authorization_code]' if self.authorization_code is not None else None)
        description = ' '.join(filter(None, [client_id, credential]))
        return description or '[underspecified credentials]'

Ancestors

Inherited members

class OAuth2Credentials (client_id, client_secret, tenant_id=None, identity=None)

Login info for OAuth 2.0 client credentials authentication, as well as a base for other OAuth 2.0 grant types.

This is primarily useful for in-house applications accessing data from a single Microsoft account. For applications that will access multiple tenants' data, the client credentials flow does not give the application enough information to restrict end users' access to the appropriate account. Use OAuth2AuthorizationCodeCredentials and the associated auth code grant type for multi-tenant applications.

:param client_id: ID of an authorized OAuth application, required for automatic token fetching and refreshing :param client_secret: Secret associated with the OAuth application :param tenant_id: Microsoft tenant ID of the account to access :param identity: An Identity object representing the account that these credentials are connected to.

Expand source code
class OAuth2Credentials(BaseCredentials):
    """Login info for OAuth 2.0 client credentials authentication, as well as a base for other OAuth 2.0 grant types.

    This is primarily useful for in-house applications accessing data from a single Microsoft account. For applications
    that will access multiple tenants' data, the client credentials flow does not give the application enough
    information to restrict end users' access to the appropriate account. Use OAuth2AuthorizationCodeCredentials and
    the associated auth code grant type for multi-tenant applications.
    """

    def __init__(self, client_id, client_secret, tenant_id=None, identity=None):
        """

        :param client_id: ID of an authorized OAuth application, required for automatic token fetching and refreshing
        :param client_secret: Secret associated with the OAuth application
        :param tenant_id: Microsoft tenant ID of the account to access
        :param identity: An Identity object representing the account that these credentials are connected to.
        """
        super().__init__()
        self.client_id = client_id
        self.client_secret = client_secret
        self.tenant_id = tenant_id
        self.identity = identity
        # When set, access_token is a dict (or an oauthlib.oauth2.OAuth2Token, which is also a dict)
        self.access_token = None

    def refresh(self, session):
        # Creating a new session gets a new access token, so there's no work here to refresh the credentials. This
        # implementation just makes sure we don't raise a NotImplementedError.
        pass

    def on_token_auto_refreshed(self, access_token):
        """Set the access_token. Called after the access token is refreshed (requests-oauthlib can automatically
        refresh tokens if given an OAuth client ID and secret, so this is how our copy of the token stays up-to-date).
        Applications that cache access tokens can override this to store the new token - just remember to call the
        super() method.

        :param access_token: New token obtained by refreshing
        """
        # Ensure we don't update the object in the middle of a new session being created, which could cause a race.
        if not isinstance(access_token, dict):
            raise ValueError("'access_token' must be an OAuth2Token")
        with self.lock:
            log.debug('%s auth token for %s', 'Refreshing' if self.access_token else 'Setting', self.client_id)
            self.access_token = access_token

    def _get_hash_values(self):
        # 'access_token' may be refreshed once in a while. This should not affect the hash signature.
        # 'identity' is just informational and should also not affect the hash signature.
        return (getattr(self, k) for k in self.__dict__ if k not in ('_lock', 'identity', 'access_token'))

    def sig(self):
        # Like hash(self), but pulls in the access token. Protocol.refresh_credentials() uses this to find out
        # if the access_token needs to be refreshed.
        res = []
        for k in self.__dict__:
            if k in ('_lock', 'identity'):
                continue
            if k == 'access_token':
                res.append(self.access_token['access_token'] if self.access_token else None)
                continue
            res.append(getattr(self, k))
        return hash(tuple(res))

    def __repr__(self):
        return self.__class__.__name__ + repr((self.client_id, '********'))

    def __str__(self):
        return self.client_id

Ancestors

Subclasses

Methods

def on_token_auto_refreshed(self, access_token)

Set the access_token. Called after the access token is refreshed (requests-oauthlib can automatically refresh tokens if given an OAuth client ID and secret, so this is how our copy of the token stays up-to-date). Applications that cache access tokens can override this to store the new token - just remember to call the super() method.

:param access_token: New token obtained by refreshing

Expand source code
def on_token_auto_refreshed(self, access_token):
    """Set the access_token. Called after the access token is refreshed (requests-oauthlib can automatically
    refresh tokens if given an OAuth client ID and secret, so this is how our copy of the token stays up-to-date).
    Applications that cache access tokens can override this to store the new token - just remember to call the
    super() method.

    :param access_token: New token obtained by refreshing
    """
    # Ensure we don't update the object in the middle of a new session being created, which could cause a race.
    if not isinstance(access_token, dict):
        raise ValueError("'access_token' must be an OAuth2Token")
    with self.lock:
        log.debug('%s auth token for %s', 'Refreshing' if self.access_token else 'Setting', self.client_id)
        self.access_token = access_token
def sig(self)
Expand source code
def sig(self):
    # Like hash(self), but pulls in the access token. Protocol.refresh_credentials() uses this to find out
    # if the access_token needs to be refreshed.
    res = []
    for k in self.__dict__:
        if k in ('_lock', 'identity'):
            continue
        if k == 'access_token':
            res.append(self.access_token['access_token'] if self.access_token else None)
            continue
        res.append(getattr(self, k))
    return hash(tuple(res))

Inherited members

class OofSettings (**kwargs)
Expand source code
class OofSettings(EWSElement):
    """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/oofsettings"""

    ELEMENT_NAME = 'OofSettings'
    REQUEST_ELEMENT_NAME = 'UserOofSettings'

    ENABLED = 'Enabled'
    SCHEDULED = 'Scheduled'
    DISABLED = 'Disabled'
    STATE_CHOICES = (ENABLED, SCHEDULED, DISABLED)

    state = ChoiceField(field_uri='OofState', is_required=True, choices={Choice(c) for c in STATE_CHOICES})
    external_audience = ChoiceField(field_uri='ExternalAudience',
                                    choices={Choice('None'), Choice('Known'), Choice('All')}, default='All')
    start = DateTimeField(field_uri='StartTime')
    end = DateTimeField(field_uri='EndTime')
    internal_reply = MessageField(field_uri='InternalReply')
    external_reply = MessageField(field_uri='ExternalReply')

    def clean(self, version=None):
        super().clean(version=version)
        if self.state == self.SCHEDULED:
            if not self.start or not self.end:
                raise ValueError("'start' and 'end' must be set when state is '%s'" % self.SCHEDULED)
            if self.start >= self.end:
                raise ValueError("'start' must be before 'end'")
            if self.end < datetime.datetime.now(tz=UTC):
                raise ValueError("'end' must be in the future")
        if self.state != self.DISABLED and (not self.internal_reply or not self.external_reply):
            raise ValueError("'internal_reply' and 'external_reply' must be set when state is not '%s'" % self.DISABLED)

    @classmethod
    def from_xml(cls, elem, account):
        kwargs = {}
        for attr in ('state', 'external_audience', 'internal_reply', 'external_reply'):
            f = cls.get_field_by_fieldname(attr)
            kwargs[attr] = f.from_xml(elem=elem, account=account)
        kwargs.update(OutOfOffice.duration_to_start_end(elem=elem, account=account))
        cls._clear(elem)
        return cls(**kwargs)

    def to_xml(self, version):
        self.clean(version=version)
        elem = create_element('t:%s' % self.REQUEST_ELEMENT_NAME)
        for attr in ('state', 'external_audience'):
            value = getattr(self, attr)
            if value is None:
                continue
            f = self.get_field_by_fieldname(attr)
            set_xml_value(elem, f.to_xml(value, version=version), version=version)
        if self.start or self.end:
            duration = create_element('t:Duration')
            if self.start:
                f = self.get_field_by_fieldname('start')
                set_xml_value(duration, f.to_xml(self.start, version=version), version)
            if self.end:
                f = self.get_field_by_fieldname('end')
                set_xml_value(duration, f.to_xml(self.end, version=version), version)
            elem.append(duration)
        for attr in ('internal_reply', 'external_reply'):
            value = getattr(self, attr)
            if value is None:
                value = ''  # The value can be empty, but the XML element must always be present
            f = self.get_field_by_fieldname(attr)
            set_xml_value(elem, f.to_xml(value, version=version), version)
        return elem

    def __hash__(self):
        # Customize comparison
        if self.state == self.DISABLED:
            # All values except state are ignored by the server
            relevant_attrs = ('state',)
        elif self.state != self.SCHEDULED:
            # 'start' and 'end' values are ignored by the server, and the server always returns today's date
            relevant_attrs = tuple(f.name for f in self.FIELDS if f.name not in ('start', 'end'))
        else:
            relevant_attrs = tuple(f.name for f in self.FIELDS)
        return hash(tuple(getattr(self, attr) for attr in relevant_attrs))

Ancestors

Class variables

var DISABLED
var ELEMENT_NAME
var ENABLED
var FIELDS
var REQUEST_ELEMENT_NAME
var SCHEDULED
var STATE_CHOICES

Static methods

def from_xml(elem, account)
Expand source code
@classmethod
def from_xml(cls, elem, account):
    kwargs = {}
    for attr in ('state', 'external_audience', 'internal_reply', 'external_reply'):
        f = cls.get_field_by_fieldname(attr)
        kwargs[attr] = f.from_xml(elem=elem, account=account)
    kwargs.update(OutOfOffice.duration_to_start_end(elem=elem, account=account))
    cls._clear(elem)
    return cls(**kwargs)

Instance variables

var end
var external_audience
var external_reply
var internal_reply
var start
var state

Methods

def clean(self, version=None)
Expand source code
def clean(self, version=None):
    super().clean(version=version)
    if self.state == self.SCHEDULED:
        if not self.start or not self.end:
            raise ValueError("'start' and 'end' must be set when state is '%s'" % self.SCHEDULED)
        if self.start >= self.end:
            raise ValueError("'start' must be before 'end'")
        if self.end < datetime.datetime.now(tz=UTC):
            raise ValueError("'end' must be in the future")
    if self.state != self.DISABLED and (not self.internal_reply or not self.external_reply):
        raise ValueError("'internal_reply' and 'external_reply' must be set when state is not '%s'" % self.DISABLED)
def to_xml(self, version)
Expand source code
def to_xml(self, version):
    self.clean(version=version)
    elem = create_element('t:%s' % self.REQUEST_ELEMENT_NAME)
    for attr in ('state', 'external_audience'):
        value = getattr(self, attr)
        if value is None:
            continue
        f = self.get_field_by_fieldname(attr)
        set_xml_value(elem, f.to_xml(value, version=version), version=version)
    if self.start or self.end:
        duration = create_element('t:Duration')
        if self.start:
            f = self.get_field_by_fieldname('start')
            set_xml_value(duration, f.to_xml(self.start, version=version), version)
        if self.end:
            f = self.get_field_by_fieldname('end')
            set_xml_value(duration, f.to_xml(self.end, version=version), version)
        elem.append(duration)
    for attr in ('internal_reply', 'external_reply'):
        value = getattr(self, attr)
        if value is None:
            value = ''  # The value can be empty, but the XML element must always be present
        f = self.get_field_by_fieldname(attr)
        set_xml_value(elem, f.to_xml(value, version=version), version)
    return elem

Inherited members

class PostItem (**kwargs)

MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/postitem

Pick out optional 'account' and 'folder' kwargs, and pass the rest to the parent class.

:param kwargs: 'account' is optional but allows calling 'send()' and 'delete()' 'folder' is optional but allows calling 'save()'. If 'folder' has an account, and 'account' is not set, we use folder.account.

Expand source code
class PostItem(Item):
    """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/postitem"""

    ELEMENT_NAME = 'PostItem'

    conversation_index = Message.FIELDS['conversation_index']
    conversation_topic = Message.FIELDS['conversation_topic']

    author = Message.FIELDS['author']
    message_id = Message.FIELDS['message_id']
    is_read = Message.FIELDS['is_read']

    posted_time = DateTimeField(field_uri='postitem:PostedTime', is_read_only=True)
    references = TextField(field_uri='message:References')
    sender = MailboxField(field_uri='message:Sender', is_read_only=True, is_read_only_after_send=True)

Ancestors

Class variables

var ELEMENT_NAME
var FIELDS

Instance variables

var author
var conversation_index
var conversation_topic
var is_read
var message_id
var posted_time
var references
var sender

Inherited members

class Q (*args, **kwargs)

A class with an API similar to Django Q objects. Used to implemnt advanced filtering logic.

Expand source code
class Q:
    """A class with an API similar to Django Q objects. Used to implemnt advanced filtering logic."""

    # Connection types
    AND = 'AND'
    OR = 'OR'
    NOT = 'NOT'
    NEVER = 'NEVER'  # This is not specified by EWS. We use it for queries that will never match, e.g. 'foo__in=()'
    CONN_TYPES = {AND, OR, NOT, NEVER}

    # EWS Operators
    EQ = '=='
    NE = '!='
    GT = '>'
    GTE = '>='
    LT = '<'
    LTE = '<='
    EXACT = 'exact'
    IEXACT = 'iexact'
    CONTAINS = 'contains'
    ICONTAINS = 'icontains'
    STARTSWITH = 'startswith'
    ISTARTSWITH = 'istartswith'
    EXISTS = 'exists'
    OP_TYPES = {EQ, NE, GT, GTE, LT, LTE, EXACT, IEXACT, CONTAINS, ICONTAINS, STARTSWITH, ISTARTSWITH, EXISTS}
    CONTAINS_OPS = {EXACT, IEXACT, CONTAINS, ICONTAINS, STARTSWITH, ISTARTSWITH}

    # Valid lookups
    LOOKUP_RANGE = 'range'
    LOOKUP_IN = 'in'
    LOOKUP_NOT = 'not'
    LOOKUP_GT = 'gt'
    LOOKUP_GTE = 'gte'
    LOOKUP_LT = 'lt'
    LOOKUP_LTE = 'lte'
    LOOKUP_EXACT = 'exact'
    LOOKUP_IEXACT = 'iexact'
    LOOKUP_CONTAINS = 'contains'
    LOOKUP_ICONTAINS = 'icontains'
    LOOKUP_STARTSWITH = 'startswith'
    LOOKUP_ISTARTSWITH = 'istartswith'
    LOOKUP_EXISTS = 'exists'
    LOOKUP_TYPES = {LOOKUP_RANGE, LOOKUP_IN, LOOKUP_NOT, LOOKUP_GT, LOOKUP_GTE, LOOKUP_LT, LOOKUP_LTE, LOOKUP_EXACT,
                    LOOKUP_IEXACT, LOOKUP_CONTAINS, LOOKUP_ICONTAINS, LOOKUP_STARTSWITH, LOOKUP_ISTARTSWITH,
                    LOOKUP_EXISTS}

    __slots__ = 'conn_type', 'field_path', 'op', 'value', 'children', 'query_string'

    def __init__(self, *args, **kwargs):
        self.conn_type = kwargs.pop('conn_type', self.AND)

        self.field_path = None  # Name of the field we want to filter on
        self.op = None
        self.value = None
        self.query_string = None

        # Parsing of args and kwargs may require child elements
        self.children = []

        # Check for query string as the only argument
        if not kwargs and len(args) == 1 and isinstance(args[0], str):
            self.query_string = args[0]
            args = ()

        # Parse args which must now be Q objects
        for q in args:
            if not isinstance(q, self.__class__):
                raise ValueError("Non-keyword arg %r must be a Q instance" % q)
        self.children.extend(args)

        # Parse keyword args and extract the filter
        is_single_kwarg = len(args) == 0 and len(kwargs) == 1
        for key, value in kwargs.items():
            self.children.extend(
                self._get_children_from_kwarg(key=key, value=value, is_single_kwarg=is_single_kwarg)
            )

        # Simplify this object
        self.reduce()

        # Final sanity check
        self._check_integrity()

    def _get_children_from_kwarg(self, key, value, is_single_kwarg=False):
        """Generate Q objects corresponding to a single keyword argument. Make this a leaf if there are no children to
        generate.
        """
        key_parts = key.rsplit('__', 1)
        if len(key_parts) == 2 and key_parts[1] in self.LOOKUP_TYPES:
            # This is a kwarg with a lookup at the end
            field_path, lookup = key_parts
            if lookup == self.LOOKUP_EXISTS:
                # value=True will fall through to further processing
                if not value:
                    return [~self.__class__(**{key: True})]

            if lookup == self.LOOKUP_RANGE:
                # EWS doesn't have a 'range' operator. Emulate 'foo__range=(1, 2)' as 'foo__gte=1 and foo__lte=2'
                # (both values inclusive).
                if len(value) != 2:
                    raise ValueError("Value of lookup '%s' must have exactly 2 elements" % key)
                return [
                    self.__class__(**{'%s__gte' % field_path: value[0]}),
                    self.__class__(**{'%s__lte' % field_path: value[1]}),
                ]

            # Filtering on list types is a bit quirky. The only lookup type I have found to work is:
            #
            #     item:Categories == 'foo' AND item:Categories == 'bar' AND ...
            #
            #     item:Categories == 'foo' OR item:Categories == 'bar' OR ...
            #
            # The former returns items that have all these categories, but maybe also others. The latter returns
            # items that have at least one of these categories. This translates to the 'contains' and 'in' lookups,
            # respectively. Both versions are case-insensitive.
            #
            # Exact matching and case-sensitive or partial-string matching is not possible since that requires the
            # 'Contains' element which only supports matching on string elements, not arrays.
            #
            # Exact matching of categories (i.e. match ['a', 'b'] but not ['a', 'b', 'c']) could be implemented by
            # post-processing items by fetch the categories field unconditionally and removing the items that don't
            # have an exact match.
            if lookup == self.LOOKUP_IN:
                # EWS doesn't have an '__in' operator. Allow '__in' lookups on list and non-list field types,
                # specifying a list value. We'll emulate it as a set of OR'ed exact matches.
                if not is_iterable(value, generators_allowed=True):
                    raise ValueError("Value for lookup %r must be a list" % key)
                children = [self.__class__(**{field_path: v}) for v in value]
                if not children:
                    # This is an '__in' operator with an empty list as the value. We interpret it to mean "is foo
                    # contained in the empty set?" which is always false. Mark this Q object as such.
                    return [self.__class__(conn_type=self.NEVER)]
                return [self.__class__(*children, conn_type=self.OR)]

            if lookup == self.LOOKUP_CONTAINS and is_iterable(value, generators_allowed=True):
                # A '__contains' lookup with an list as the value ony makes sense for list fields, since exact match
                # on multiple distinct values will always fail for single-value fields.
                #
                # An empty list as value is allowed. We interpret it to mean "are all values in the empty set contained
                # in foo?" which is always true.
                children = [self.__class__(**{field_path: v}) for v in value]
                return [self.__class__(*children, conn_type=self.AND)]

            try:
                op = self._lookup_to_op(lookup)
            except KeyError:
                raise ValueError("Lookup '%s' is not supported (called as '%s=%r')" % (lookup, key, value))
        else:
            field_path, op = key, self.EQ

        if not is_single_kwarg:
            return [self.__class__(**{key: value})]

        # This is a single-kwarg Q object with a lookup that requires a single value. Make this a leaf
        self.field_path = field_path
        self.op = op
        self.value = value
        return []

    def reduce(self):
        """Simplify this object, if possible."""
        self._reduce_children()
        self._promote()

    def _reduce_children(self):
        """Look at the children of this object and remove unnecessary items."""
        children = self.children
        if any((isinstance(a, self.__class__) and a.is_never()) for a in children):
            # We have at least one 'never' arg
            if self.conn_type == self.AND:
                # Remove all other args since nothing we AND together with a 'never' arg can change the result
                children = [self.__class__(conn_type=self.NEVER)]
            elif self.conn_type == self.OR:
                # Remove all 'never' args because all other args will decide the result. Keep one 'never' arg in case
                # all args are 'never' args.
                children = [a for a in children if not (isinstance(a, self.__class__) and a.is_never())]
                if not children:
                    children = [self.__class__(conn_type=self.NEVER)]
            elif self.conn_type == self.NOT:
                # Let's interpret 'not never' to mean 'always'. Remove all 'never' args
                children = [a for a in children if not (isinstance(a, self.__class__) and a.is_never())]

        # Remove any empty Q elements in args before proceeding
        children = [a for a in children if not (isinstance(a, self.__class__) and a.is_empty())]
        self.children = children

    def _promote(self):
        """When we only have one child and no expression on ourselves, we are a no-op. Flatten by taking over the only
        child.
        """
        if len(self.children) != 1 or self.field_path is not None or self.conn_type == self.NOT:
            return

        q = self.children[0]
        self.conn_type = q.conn_type
        self.field_path = q.field_path
        self.op = q.op
        self.value = q.value
        self.query_string = q.query_string
        self.children = q.children

    def clean(self, version):
        """Do some basic checks on the attributes, using a generic folder. to_xml() does a really good job of
        validating. There's no reason to replicate much of that here.
        """
        from .folders import Folder
        self.to_xml(folders=[Folder()], version=version, applies_to=Restriction.ITEMS)

    @classmethod
    def _lookup_to_op(cls, lookup):
        return {
            cls.LOOKUP_NOT: cls.NE,
            cls.LOOKUP_GT: cls.GT,
            cls.LOOKUP_GTE: cls.GTE,
            cls.LOOKUP_LT: cls.LT,
            cls.LOOKUP_LTE: cls.LTE,
            cls.LOOKUP_EXACT: cls.EXACT,
            cls.LOOKUP_IEXACT: cls.IEXACT,
            cls.LOOKUP_CONTAINS: cls.CONTAINS,
            cls.LOOKUP_ICONTAINS: cls.ICONTAINS,
            cls.LOOKUP_STARTSWITH: cls.STARTSWITH,
            cls.LOOKUP_ISTARTSWITH: cls.ISTARTSWITH,
            cls.LOOKUP_EXISTS: cls.EXISTS,
        }[lookup]

    @classmethod
    def _conn_to_xml(cls, conn_type):
        xml_tag_map = {
            cls.AND: 't:And',
            cls.OR: 't:Or',
            cls.NOT: 't:Not',
        }
        return create_element(xml_tag_map[conn_type])

    @classmethod
    def _op_to_xml(cls, op):
        xml_tag_map = {
            cls.EQ: 't:IsEqualTo',
            cls.NE: 't:IsNotEqualTo',
            cls.GTE: 't:IsGreaterThanOrEqualTo',
            cls.LTE: 't:IsLessThanOrEqualTo',
            cls.LT: 't:IsLessThan',
            cls.GT: 't:IsGreaterThan',
            cls.EXISTS: 't:Exists',
        }
        if op in xml_tag_map:
            return create_element(xml_tag_map[op])
        valid_ops = cls.EXACT, cls.IEXACT, cls.CONTAINS, cls.ICONTAINS, cls.STARTSWITH, cls.ISTARTSWITH
        if op not in valid_ops:
            raise ValueError("'op' %s must be one of %s" % (op, valid_ops))

        # For description of Contains attribute values, see
        #     https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/contains
        #
        # Possible ContainmentMode values:
        #     FullString, Prefixed, Substring, PrefixOnWords, ExactPhrase
        # Django lookups have no equivalent of PrefixOnWords and ExactPhrase (and I'm unsure how they actually
        # work).
        #
        # EWS has no equivalent of '__endswith' or '__iendswith'. That could be emulated using '__contains' and
        # '__icontains' and filtering results afterwards in Python. But it could be inefficient because we might be
        # fetching and discarding a lot of non-matching items, plus we would need to always fetch the field we're
        # matching on, to be able to do the filtering. I think it's better to leave this to the consumer, i.e.:
        #
        # items = [i for i in fld.filter(subject__contains=suffix) if i.subject.endswith(suffix)]
        # items = [i for i in fld.filter(subject__icontains=suffix) if i.subject.lower().endswith(suffix.lower())]
        #
        # Possible ContainmentComparison values (there are more, but the rest are "To be removed"):
        #     Exact, IgnoreCase, IgnoreNonSpacingCharacters, IgnoreCaseAndNonSpacingCharacters
        # I'm unsure about non-spacing characters, but as I read
        #    https://en.wikipedia.org/wiki/Graphic_character#Spacing_and_non-spacing_characters
        # we shouldn't ignore them ('a' would match both 'a' and 'å', the latter having a non-spacing character).
        if op in {cls.EXACT, cls.IEXACT}:
            match_mode = 'FullString'
        elif op in (cls.CONTAINS, cls.ICONTAINS):
            match_mode = 'Substring'
        elif op in (cls.STARTSWITH, cls.ISTARTSWITH):
            match_mode = 'Prefixed'
        else:
            raise ValueError('Unsupported op: %s' % op)
        if op in (cls.IEXACT, cls.ICONTAINS, cls.ISTARTSWITH):
            compare_mode = 'IgnoreCase'
        else:
            compare_mode = 'Exact'
        return create_element(
            't:Contains',
            attrs=OrderedDict([
                ('ContainmentMode', match_mode),
                ('ContainmentComparison', compare_mode),
            ])
        )

    def is_leaf(self):
        return not self.children

    def is_empty(self):
        """Return True if this object is without any restrictions at all."""
        return self.is_leaf() and self.field_path is None and self.query_string is None and self.conn_type != self.NEVER

    def is_never(self):
        """Return True if this object has a restriction that will never match anything."""
        return self.conn_type == self.NEVER

    def expr(self):
        if self.is_empty():
            return None
        if self.is_never():
            return self.NEVER
        if self.query_string:
            return self.query_string
        if self.is_leaf():
            expr = '%s %s %r' % (self.field_path, self.op, self.value)
        else:
            # Sort children by field name so we get stable output (for easier testing). Children should never be empty.
            expr = (' %s ' % (self.AND if self.conn_type == self.NOT else self.conn_type)).join(
                (c.expr() if c.is_leaf() or c.conn_type == self.NOT else '(%s)' % c.expr())
                for c in sorted(self.children, key=lambda i: i.field_path or '')
            )
        if self.conn_type == self.NOT:
            # Add the NOT operator. Put children in parens if there is more than one child.
            if self.is_leaf() or len(self.children) == 1:
                return self.conn_type + ' %s' % expr
            return self.conn_type + ' (%s)' % expr
        return expr

    def to_xml(self, folders, version, applies_to):
        if self.query_string:
            self._check_integrity()
            if version.build < EXCHANGE_2010:
                raise NotImplementedError('QueryString filtering is only supported for Exchange 2010 servers and later')
            elem = create_element('m:QueryString')
            elem.text = self.query_string
            return elem
        # Translate this Q object to a valid Restriction XML tree
        elem = self.xml_elem(folders=folders, version=version, applies_to=applies_to)
        if elem is None:
            return None
        restriction = create_element('m:Restriction')
        restriction.append(elem)
        return restriction

    def _check_integrity(self):
        if self.is_empty():
            return
        if self.conn_type == self.NEVER:
            if any([self.field_path, self.op, self.value, self.children]):
                raise ValueError("'never' queries cannot be combined with other settings")
            return
        if self.query_string:
            if any([self.field_path, self.op, self.value, self.children]):
                raise ValueError('Query strings cannot be combined with other settings')
            return
        if self.conn_type not in self.CONN_TYPES:
            raise ValueError("'conn_type' %s must be one of %s" % (self.conn_type, self.CONN_TYPES))
        if not self.is_leaf():
            for q in self.children:
                if q.query_string and len(self.children) > 1:
                    raise ValueError(
                        'A query string cannot be combined with other restrictions'
                    )
            return
        if not self.field_path:
            raise ValueError("'field_path' must be set")
        if self.op not in self.OP_TYPES:
            raise ValueError("'op' %s must be one of %s" % (self.op, self.OP_TYPES))
        if self.op == self.EXISTS and self.value is not True:
            raise ValueError("'value' must be True when operator is EXISTS")
        if self.value is None:
            raise ValueError('Value for filter on field path "%s" cannot be None' % self.field_path)
        if is_iterable(self.value, generators_allowed=True):
            raise ValueError(
                'Value %r for filter on field path "%s" must be a single value' % (self.value, self.field_path)
            )

    def _validate_field_path(self, field_path, folder, applies_to, version):
        from .indexed_properties import MultiFieldIndexedElement
        if applies_to == Restriction.FOLDERS:
            # This is a restriction on Folder fields
            folder.validate_field(field=field_path.field, version=version)
        else:
            folder.validate_item_field(field=field_path.field, version=version)
        if not field_path.field.is_searchable:
            raise ValueError("EWS does not support filtering on field '%s'" % field_path.field.name)
        if field_path.subfield and not field_path.subfield.is_searchable:
            raise ValueError("EWS does not support filtering on subfield '%s'" % field_path.subfield.name)
        if issubclass(field_path.field.value_cls, MultiFieldIndexedElement) and not field_path.subfield:
            raise ValueError("Field path '%s' must contain a subfield" % self.field_path)

    def _get_field_path(self, folders, applies_to, version):
        # Convert the string field path to a real FieldPath object. The path is validated using the given folders.
        for folder in folders:
            try:
                if applies_to == Restriction.FOLDERS:
                    # This is a restriction on Folder fields
                    field = folder.get_field_by_fieldname(fieldname=self.field_path)
                    field_path = FieldPath(field=field)
                else:
                    field_path = FieldPath.from_string(field_path=self.field_path, folder=folder)
            except ValueError:
                continue
            self._validate_field_path(field_path=field_path, folder=folder, applies_to=applies_to, version=version)
            break
        else:
            raise InvalidField("Unknown field path %r on folders %s" % (self.field_path, folders))
        return field_path

    def _get_clean_value(self, field_path, version):
        if self.op == self.EXISTS:
            return None
        clean_field = field_path.subfield if (field_path.subfield and field_path.label) else field_path.field
        if clean_field.is_list:
            # __contains and __in are implemented as multiple leaves, with one value per leaf. clean() on list fields
            # only works on lists, so clean a one-element list.
            return clean_field.clean(value=[self.value], version=version)[0]
        return clean_field.clean(value=self.value, version=version)

    def xml_elem(self, folders, version, applies_to):
        # Recursively build an XML tree structure of this Q object. If this is an empty leaf (the equivalent of Q()),
        # return None.
        from .indexed_properties import SingleFieldIndexedElement
        # Don't check self.value just yet. We want to return error messages on the field path first, and then the value.
        # This is done in _get_field_path() and _get_clean_value(), respectively.
        self._check_integrity()
        if self.is_empty():
            return None
        if self.is_never():
            raise ValueError("EWS does not support 'never' queries")
        if self.is_leaf():
            elem = self._op_to_xml(self.op)
            field_path = self._get_field_path(folders, applies_to=applies_to, version=version)
            clean_value = self._get_clean_value(field_path=field_path, version=version)
            if issubclass(field_path.field.value_cls, SingleFieldIndexedElement) and not field_path.label:
                # We allow a filter shortcut of e.g. email_addresses__contains=EmailAddress(label='Foo', ...) instead of
                # email_addresses__Foo_email_address=.... Set FieldPath label now so we can generate the field_uri.
                field_path.label = clean_value.label
            elif isinstance(field_path.field, DateTimeBackedDateField):
                # We need to convert to datetime
                clean_value = field_path.field.date_to_datetime(clean_value)
            elem.append(field_path.to_xml())
            constant = create_element('t:Constant')
            if self.op != self.EXISTS:
                # Use .set() to not fill up the create_element() cache with unique values
                constant.set('Value', value_to_xml_text(clean_value))
                if self.op in self.CONTAINS_OPS:
                    elem.append(constant)
                else:
                    uriorconst = create_element('t:FieldURIOrConstant')
                    uriorconst.append(constant)
                    elem.append(uriorconst)
        elif len(self.children) == 1:
            # We have only one child
            elem = self.children[0].xml_elem(folders=folders, version=version, applies_to=applies_to)
        else:
            # We have multiple children. If conn_type is NOT, then group children with AND. We'll add the NOT later
            elem = self._conn_to_xml(self.AND if self.conn_type == self.NOT else self.conn_type)
            # Sort children by field name so we get stable output (for easier testing). Children should never be empty
            for c in sorted(self.children, key=lambda i: i.field_path or ''):
                elem.append(c.xml_elem(folders=folders, version=version, applies_to=applies_to))
        if elem is None:
            return None  # Should not be necessary, but play safe
        if self.conn_type == self.NOT:
            # Encapsulate everything in the NOT element
            not_elem = self._conn_to_xml(self.conn_type)
            not_elem.append(elem)
            return not_elem
        return elem

    def __and__(self, other):
        # & operator. Return a new Q with two children and conn_type AND
        return self.__class__(self, other, conn_type=self.AND)

    def __or__(self, other):
        # | operator. Return a new Q with two children and conn_type OR
        return self.__class__(self, other, conn_type=self.OR)

    def __invert__(self):
        # ~ operator. If op has an inverse, change op. Else return a new Q with conn_type NOT
        if self.conn_type == self.NOT:
            # This is NOT NOT. Change to AND
            new = copy(self)
            new.conn_type = self.AND
            new.reduce()
            return new
        if self.is_leaf():
            inverse_ops = {
                self.EQ: self.NE,
                self.NE: self.EQ,
                self.GT: self.LTE,
                self.GTE: self.LT,
                self.LT: self.GTE,
                self.LTE: self.GT,
            }
            try:
                new = copy(self)
                new.op = inverse_ops[self.op]
                new.reduce()
                return new
            except KeyError:
                pass
        return self.__class__(self, conn_type=self.NOT)

    def __eq__(self, other):
        return repr(self) == repr(other)

    def __hash__(self):
        return hash(repr(self))

    def __str__(self):
        return self.expr() or 'Q()'

    def __repr__(self):
        if self.is_leaf():
            if self.query_string:
                return self.__class__.__name__ + '(%r)' % self.query_string
            if self.is_never():
                return self.__class__.__name__ + '(conn_type=%r)' % (self.conn_type)
            return self.__class__.__name__ + '(%s %s %r)' % (self.field_path, self.op, self.value)
        sorted_children = tuple(sorted(self.children, key=lambda i: i.field_path or ''))
        if self.conn_type == self.NOT or len(self.children) > 1:
            return self.__class__.__name__ + repr((self.conn_type,) + sorted_children)
        return self.__class__.__name__ + repr(sorted_children)

Class variables

var AND
var CONN_TYPES
var CONTAINS
var CONTAINS_OPS
var EQ
var EXACT
var EXISTS
var GT
var GTE
var ICONTAINS
var IEXACT
var ISTARTSWITH
var LOOKUP_CONTAINS
var LOOKUP_EXACT
var LOOKUP_EXISTS
var LOOKUP_GT
var LOOKUP_GTE
var LOOKUP_ICONTAINS
var LOOKUP_IEXACT
var LOOKUP_IN
var LOOKUP_ISTARTSWITH
var LOOKUP_LT
var LOOKUP_LTE
var LOOKUP_NOT
var LOOKUP_RANGE
var LOOKUP_STARTSWITH
var LOOKUP_TYPES
var LT
var LTE
var NE
var NEVER
var NOT
var OP_TYPES
var OR
var STARTSWITH

Instance variables

var children

Return an attribute of instance, which is of type owner.

var conn_type

Return an attribute of instance, which is of type owner.

var field_path

Return an attribute of instance, which is of type owner.

var op

Return an attribute of instance, which is of type owner.

var query_string

Return an attribute of instance, which is of type owner.

var value

Return an attribute of instance, which is of type owner.

Methods

def clean(self, version)

Do some basic checks on the attributes, using a generic folder. to_xml() does a really good job of validating. There's no reason to replicate much of that here.

Expand source code
def clean(self, version):
    """Do some basic checks on the attributes, using a generic folder. to_xml() does a really good job of
    validating. There's no reason to replicate much of that here.
    """
    from .folders import Folder
    self.to_xml(folders=[Folder()], version=version, applies_to=Restriction.ITEMS)
def expr(self)
Expand source code
def expr(self):
    if self.is_empty():
        return None
    if self.is_never():
        return self.NEVER
    if self.query_string:
        return self.query_string
    if self.is_leaf():
        expr = '%s %s %r' % (self.field_path, self.op, self.value)
    else:
        # Sort children by field name so we get stable output (for easier testing). Children should never be empty.
        expr = (' %s ' % (self.AND if self.conn_type == self.NOT else self.conn_type)).join(
            (c.expr() if c.is_leaf() or c.conn_type == self.NOT else '(%s)' % c.expr())
            for c in sorted(self.children, key=lambda i: i.field_path or '')
        )
    if self.conn_type == self.NOT:
        # Add the NOT operator. Put children in parens if there is more than one child.
        if self.is_leaf() or len(self.children) == 1:
            return self.conn_type + ' %s' % expr
        return self.conn_type + ' (%s)' % expr
    return expr
def is_empty(self)

Return True if this object is without any restrictions at all.

Expand source code
def is_empty(self):
    """Return True if this object is without any restrictions at all."""
    return self.is_leaf() and self.field_path is None and self.query_string is None and self.conn_type != self.NEVER
def is_leaf(self)
Expand source code
def is_leaf(self):
    return not self.children
def is_never(self)

Return True if this object has a restriction that will never match anything.

Expand source code
def is_never(self):
    """Return True if this object has a restriction that will never match anything."""
    return self.conn_type == self.NEVER
def reduce(self)

Simplify this object, if possible.

Expand source code
def reduce(self):
    """Simplify this object, if possible."""
    self._reduce_children()
    self._promote()
def to_xml(self, folders, version, applies_to)
Expand source code
def to_xml(self, folders, version, applies_to):
    if self.query_string:
        self._check_integrity()
        if version.build < EXCHANGE_2010:
            raise NotImplementedError('QueryString filtering is only supported for Exchange 2010 servers and later')
        elem = create_element('m:QueryString')
        elem.text = self.query_string
        return elem
    # Translate this Q object to a valid Restriction XML tree
    elem = self.xml_elem(folders=folders, version=version, applies_to=applies_to)
    if elem is None:
        return None
    restriction = create_element('m:Restriction')
    restriction.append(elem)
    return restriction
def xml_elem(self, folders, version, applies_to)
Expand source code
def xml_elem(self, folders, version, applies_to):
    # Recursively build an XML tree structure of this Q object. If this is an empty leaf (the equivalent of Q()),
    # return None.
    from .indexed_properties import SingleFieldIndexedElement
    # Don't check self.value just yet. We want to return error messages on the field path first, and then the value.
    # This is done in _get_field_path() and _get_clean_value(), respectively.
    self._check_integrity()
    if self.is_empty():
        return None
    if self.is_never():
        raise ValueError("EWS does not support 'never' queries")
    if self.is_leaf():
        elem = self._op_to_xml(self.op)
        field_path = self._get_field_path(folders, applies_to=applies_to, version=version)
        clean_value = self._get_clean_value(field_path=field_path, version=version)
        if issubclass(field_path.field.value_cls, SingleFieldIndexedElement) and not field_path.label:
            # We allow a filter shortcut of e.g. email_addresses__contains=EmailAddress(label='Foo', ...) instead of
            # email_addresses__Foo_email_address=.... Set FieldPath label now so we can generate the field_uri.
            field_path.label = clean_value.label
        elif isinstance(field_path.field, DateTimeBackedDateField):
            # We need to convert to datetime
            clean_value = field_path.field.date_to_datetime(clean_value)
        elem.append(field_path.to_xml())
        constant = create_element('t:Constant')
        if self.op != self.EXISTS:
            # Use .set() to not fill up the create_element() cache with unique values
            constant.set('Value', value_to_xml_text(clean_value))
            if self.op in self.CONTAINS_OPS:
                elem.append(constant)
            else:
                uriorconst = create_element('t:FieldURIOrConstant')
                uriorconst.append(constant)
                elem.append(uriorconst)
    elif len(self.children) == 1:
        # We have only one child
        elem = self.children[0].xml_elem(folders=folders, version=version, applies_to=applies_to)
    else:
        # We have multiple children. If conn_type is NOT, then group children with AND. We'll add the NOT later
        elem = self._conn_to_xml(self.AND if self.conn_type == self.NOT else self.conn_type)
        # Sort children by field name so we get stable output (for easier testing). Children should never be empty
        for c in sorted(self.children, key=lambda i: i.field_path or ''):
            elem.append(c.xml_elem(folders=folders, version=version, applies_to=applies_to))
    if elem is None:
        return None  # Should not be necessary, but play safe
    if self.conn_type == self.NOT:
        # Encapsulate everything in the NOT element
        not_elem = self._conn_to_xml(self.conn_type)
        not_elem.append(elem)
        return not_elem
    return elem
class ReplyAllToItem (**kwargs)
Expand source code
class ReplyAllToItem(BaseReplyItem):
    """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/replyalltoitem"""

    ELEMENT_NAME = 'ReplyAllToItem'

Ancestors

Class variables

var ELEMENT_NAME

Inherited members

class ReplyToItem (**kwargs)
Expand source code
class ReplyToItem(BaseReplyItem):
    """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/replytoitem"""

    ELEMENT_NAME = 'ReplyToItem'

Ancestors

Class variables

var ELEMENT_NAME

Inherited members

class Room (**kwargs)
Expand source code
class Room(Mailbox):
    """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/room"""

    ELEMENT_NAME = 'Room'

    @classmethod
    def from_xml(cls, elem, account):
        id_elem = elem.find('{%s}Id' % TNS)
        item_id_elem = id_elem.find(ItemId.response_tag())
        kwargs = dict(
            name=get_xml_attr(id_elem, '{%s}Name' % TNS),
            email_address=get_xml_attr(id_elem, '{%s}EmailAddress' % TNS),
            mailbox_type=get_xml_attr(id_elem, '{%s}MailboxType' % TNS),
            item_id=ItemId.from_xml(elem=item_id_elem, account=account) if item_id_elem else None,
        )
        cls._clear(elem)
        return cls(**kwargs)

Ancestors

Class variables

var ELEMENT_NAME

Static methods

def from_xml(elem, account)
Expand source code
@classmethod
def from_xml(cls, elem, account):
    id_elem = elem.find('{%s}Id' % TNS)
    item_id_elem = id_elem.find(ItemId.response_tag())
    kwargs = dict(
        name=get_xml_attr(id_elem, '{%s}Name' % TNS),
        email_address=get_xml_attr(id_elem, '{%s}EmailAddress' % TNS),
        mailbox_type=get_xml_attr(id_elem, '{%s}MailboxType' % TNS),
        item_id=ItemId.from_xml(elem=item_id_elem, account=account) if item_id_elem else None,
    )
    cls._clear(elem)
    return cls(**kwargs)

Inherited members

class RoomList (**kwargs)
Expand source code
class RoomList(Mailbox):
    """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/roomlist"""

    ELEMENT_NAME = 'RoomList'
    NAMESPACE = MNS

    @classmethod
    def response_tag(cls):
        # In a GetRoomLists response, room lists are delivered as Address elements. See
        # https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/address-emailaddresstype
        return '{%s}Address' % TNS

Ancestors

Class variables

var ELEMENT_NAME
var NAMESPACE

Static methods

def response_tag()
Expand source code
@classmethod
def response_tag(cls):
    # In a GetRoomLists response, room lists are delivered as Address elements. See
    # https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/address-emailaddresstype
    return '{%s}Address' % TNS

Inherited members

class RootOfHierarchy (**kwargs)

Base class for folders that implement the root of a folder hierarchy.

Expand source code
class RootOfHierarchy(BaseFolder, metaclass=EWSMeta):
    """Base class for folders that implement the root of a folder hierarchy."""

    # A list of wellknown, or "distinguished", folders that are belong in this folder hierarchy. See
    # https://docs.microsoft.com/en-us/dotnet/api/microsoft.exchange.webservices.data.wellknownfoldername
    # and https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/distinguishedfolderid
    # 'RootOfHierarchy' subclasses must not be in this list.
    WELLKNOWN_FOLDERS = []

    _subfolders_lock = Lock()

    # This folder type also has 'folder:PermissionSet' on some server versions, but requesting it sometimes causes
    # 'ErrorAccessDenied', as reported by some users. Ignore it entirely for root folders - it's usefulness is
    # deemed minimal at best.
    effective_rights = EffectiveRightsField(field_uri='folder:EffectiveRights', is_read_only=True,
                                            supported_from=EXCHANGE_2007_SP1)

    __slots__ = '_account', '_subfolders'

    # A special folder that acts as the top of a folder hierarchy. Finds and caches subfolders at arbitrary depth.
    def __init__(self, **kwargs):
        self._account = kwargs.pop('account', None)  # A pointer back to the account holding the folder hierarchy
        super().__init__(**kwargs)
        self._subfolders = None  # See self._folders_map()

    @property
    def account(self):
        return self._account

    @property
    def root(self):
        return self

    @property
    def parent(self):
        return None

    @classmethod
    def register(cls, *args, **kwargs):
        if cls is not RootOfHierarchy:
            raise TypeError('For folder roots, custom fields must be registered on the RootOfHierarchy class')
        return super().register(*args, **kwargs)

    @classmethod
    def deregister(cls, *args, **kwargs):
        if cls is not RootOfHierarchy:
            raise TypeError('For folder roots, custom fields must be registered on the RootOfHierarchy class')
        return super().deregister(*args, **kwargs)

    def get_folder(self, folder):
        if not folder.id:
            raise ValueError("'folder' must have an ID")
        return self._folders_map.get(folder.id, None)

    def add_folder(self, folder):
        if not folder.id:
            raise ValueError("'folder' must have an ID")
        self._folders_map[folder.id] = folder

    def update_folder(self, folder):
        if not folder.id:
            raise ValueError("'folder' must have an ID")
        self._folders_map[folder.id] = folder

    def remove_folder(self, folder):
        if not folder.id:
            raise ValueError("'folder' must have an ID")
        try:
            del self._folders_map[folder.id]
        except KeyError:
            pass

    def clear_cache(self):
        with self._subfolders_lock:
            self._subfolders = None

    def get_children(self, folder):
        for f in self._folders_map.values():
            if not f.parent:
                continue
            if f.parent.id == folder.id:
                yield f

    @classmethod
    def get_distinguished(cls, account):
        """Get the distinguished folder for this folder class.

        :param account:
        """
        if not cls.DISTINGUISHED_FOLDER_ID:
            raise ValueError('Class %s must have a DISTINGUISHED_FOLDER_ID value' % cls)
        try:
            return cls.resolve(
                account=account,
                folder=cls(account=account, name=cls.DISTINGUISHED_FOLDER_ID, is_distinguished=True)
            )
        except MISSING_FOLDER_ERRORS:
            raise ErrorFolderNotFound('Could not find distinguished folder %s' % cls.DISTINGUISHED_FOLDER_ID)

    def get_default_folder(self, folder_cls):
        """Return the distinguished folder instance of type folder_cls belonging to this account. If no distinguished
        folder was found, try as best we can to return the default folder of type 'folder_cls'
        """
        if not folder_cls.DISTINGUISHED_FOLDER_ID:
            raise ValueError("'folder_cls' %s must have a DISTINGUISHED_FOLDER_ID value" % folder_cls)
        # Use cached distinguished folder instance, but only if cache has already been prepped. This is an optimization
        # for accessing e.g. 'account.contacts' without fetching all folders of the account.
        if self._subfolders is not None:
            for f in self._folders_map.values():
                # Require exact class, to not match subclasses, e.g. RecipientCache instead of Contacts
                if f.__class__ == folder_cls and f.is_distinguished:
                    log.debug('Found cached distinguished %s folder', folder_cls)
                    return f
        try:
            log.debug('Requesting distinguished %s folder explicitly', folder_cls)
            return folder_cls.get_distinguished(root=self)
        except ErrorAccessDenied:
            # Maybe we just don't have GetFolder access? Try FindItems instead
            log.debug('Testing default %s folder with FindItem', folder_cls)
            fld = folder_cls(root=self, name=folder_cls.DISTINGUISHED_FOLDER_ID, is_distinguished=True)
            fld.test_access()
            return self._folders_map.get(fld.id, fld)  # Use cached instance if available
        except MISSING_FOLDER_ERRORS:
            # The Exchange server does not return a distinguished folder of this type
            pass
        raise ErrorFolderNotFound('No usable default %s folders' % folder_cls)

    @property
    def _folders_map(self):
        if self._subfolders is not None:
            return self._subfolders

        with self._subfolders_lock:
            # Map root, and all subfolders of root, at arbitrary depth by folder ID. First get distinguished folders,
            # so we are sure to apply the correct Folder class, then fetch all subfolders of this root.
            folders_map = {self.id: self}
            distinguished_folders = [
                cls(root=self, name=cls.DISTINGUISHED_FOLDER_ID, is_distinguished=True)
                for cls in self.WELLKNOWN_FOLDERS
                if cls.get_folder_allowed and cls.supports_version(self.account.version)
            ]
            for f in FolderCollection(account=self.account, folders=distinguished_folders).resolve():
                if isinstance(f, MISSING_FOLDER_ERRORS):
                    # This is just a distinguished folder the server does not have
                    continue
                if isinstance(f, ErrorInvalidOperation):
                    # This is probably a distinguished folder the server does not have. We previously tested the exact
                    # error message (f.value), but some Exchange servers return localized error messages, so that's not
                    # possible to do reliably.
                    continue
                if isinstance(f, ErrorAccessDenied):
                    # We may not have GetFolder access, either to this folder or at all
                    continue
                if isinstance(f, Exception):
                    raise f
                folders_map[f.id] = f
            for f in SingleFolderQuerySet(account=self.account, folder=self).depth(
                    self.DEFAULT_FOLDER_TRAVERSAL_DEPTH
            ).all():
                if isinstance(f, ErrorAccessDenied):
                    # We may not have FindFolder access, or GetFolder access, either to this folder or at all
                    continue
                if isinstance(f, Exception):
                    raise f
                if f.id in folders_map:
                    # Already exists. Probably a distinguished folder
                    continue
                folders_map[f.id] = f
            self._subfolders = folders_map
            return folders_map

    @classmethod
    def from_xml(cls, elem, account):
        kwargs = cls._kwargs_from_elem(elem=elem, account=account)
        cls._clear(elem)
        return cls(account=account, **kwargs)

    @classmethod
    def folder_cls_from_folder_name(cls, folder_name, locale):
        """Return the folder class that matches a localized folder name.

        :param folder_name:
        :param locale: a string, e.g. 'da_DK'
        """
        for folder_cls in cls.WELLKNOWN_FOLDERS + NON_DELETABLE_FOLDERS:
            if folder_name.lower() in folder_cls.localized_names(locale):
                return folder_cls
        raise KeyError()

    def __repr__(self):
        # Let's not create an infinite loop when printing self.root
        return self.__class__.__name__ + \
               repr((self.account, '[self]', self.name, self.total_count, self.unread_count, self.child_folder_count,
                     self.folder_class, self.id, self.changekey))

Ancestors

Subclasses

Class variables

var FIELDS
var WELLKNOWN_FOLDERS

Static methods

def folder_cls_from_folder_name(folder_name, locale)

Return the folder class that matches a localized folder name.

:param folder_name: :param locale: a string, e.g. 'da_DK'

Expand source code
@classmethod
def folder_cls_from_folder_name(cls, folder_name, locale):
    """Return the folder class that matches a localized folder name.

    :param folder_name:
    :param locale: a string, e.g. 'da_DK'
    """
    for folder_cls in cls.WELLKNOWN_FOLDERS + NON_DELETABLE_FOLDERS:
        if folder_name.lower() in folder_cls.localized_names(locale):
            return folder_cls
    raise KeyError()
def from_xml(elem, account)
Expand source code
@classmethod
def from_xml(cls, elem, account):
    kwargs = cls._kwargs_from_elem(elem=elem, account=account)
    cls._clear(elem)
    return cls(account=account, **kwargs)
def get_distinguished(account)

Get the distinguished folder for this folder class.

:param account:

Expand source code
@classmethod
def get_distinguished(cls, account):
    """Get the distinguished folder for this folder class.

    :param account:
    """
    if not cls.DISTINGUISHED_FOLDER_ID:
        raise ValueError('Class %s must have a DISTINGUISHED_FOLDER_ID value' % cls)
    try:
        return cls.resolve(
            account=account,
            folder=cls(account=account, name=cls.DISTINGUISHED_FOLDER_ID, is_distinguished=True)
        )
    except MISSING_FOLDER_ERRORS:
        raise ErrorFolderNotFound('Could not find distinguished folder %s' % cls.DISTINGUISHED_FOLDER_ID)

Instance variables

var account
Expand source code
@property
def account(self):
    return self._account
var effective_rights
var parent
Expand source code
@property
def parent(self):
    return None
var root
Expand source code
@property
def root(self):
    return self

Methods

def add_folder(self, folder)
Expand source code
def add_folder(self, folder):
    if not folder.id:
        raise ValueError("'folder' must have an ID")
    self._folders_map[folder.id] = folder
def clear_cache(self)
Expand source code
def clear_cache(self):
    with self._subfolders_lock:
        self._subfolders = None
def get_children(self, folder)
Expand source code
def get_children(self, folder):
    for f in self._folders_map.values():
        if not f.parent:
            continue
        if f.parent.id == folder.id:
            yield f
def get_default_folder(self, folder_cls)

Return the distinguished folder instance of type folder_cls belonging to this account. If no distinguished folder was found, try as best we can to return the default folder of type 'folder_cls'

Expand source code
def get_default_folder(self, folder_cls):
    """Return the distinguished folder instance of type folder_cls belonging to this account. If no distinguished
    folder was found, try as best we can to return the default folder of type 'folder_cls'
    """
    if not folder_cls.DISTINGUISHED_FOLDER_ID:
        raise ValueError("'folder_cls' %s must have a DISTINGUISHED_FOLDER_ID value" % folder_cls)
    # Use cached distinguished folder instance, but only if cache has already been prepped. This is an optimization
    # for accessing e.g. 'account.contacts' without fetching all folders of the account.
    if self._subfolders is not None:
        for f in self._folders_map.values():
            # Require exact class, to not match subclasses, e.g. RecipientCache instead of Contacts
            if f.__class__ == folder_cls and f.is_distinguished:
                log.debug('Found cached distinguished %s folder', folder_cls)
                return f
    try:
        log.debug('Requesting distinguished %s folder explicitly', folder_cls)
        return folder_cls.get_distinguished(root=self)
    except ErrorAccessDenied:
        # Maybe we just don't have GetFolder access? Try FindItems instead
        log.debug('Testing default %s folder with FindItem', folder_cls)
        fld = folder_cls(root=self, name=folder_cls.DISTINGUISHED_FOLDER_ID, is_distinguished=True)
        fld.test_access()
        return self._folders_map.get(fld.id, fld)  # Use cached instance if available
    except MISSING_FOLDER_ERRORS:
        # The Exchange server does not return a distinguished folder of this type
        pass
    raise ErrorFolderNotFound('No usable default %s folders' % folder_cls)
def get_folder(self, folder)
Expand source code
def get_folder(self, folder):
    if not folder.id:
        raise ValueError("'folder' must have an ID")
    return self._folders_map.get(folder.id, None)
def remove_folder(self, folder)
Expand source code
def remove_folder(self, folder):
    if not folder.id:
        raise ValueError("'folder' must have an ID")
    try:
        del self._folders_map[folder.id]
    except KeyError:
        pass
def update_folder(self, folder)
Expand source code
def update_folder(self, folder):
    if not folder.id:
        raise ValueError("'folder' must have an ID")
    self._folders_map[folder.id] = folder

Inherited members

class TLSClientAuth (pool_connections=10, pool_maxsize=10, max_retries=0, pool_block=False)

An HTTP adapter that implements Certificate Based Authentication (CBA).

Expand source code
class TLSClientAuth(requests.adapters.HTTPAdapter):
    """An HTTP adapter that implements Certificate Based Authentication (CBA)."""

    cert_file = None

    def init_poolmanager(self, *args, **kwargs):
        kwargs['cert_file'] = self.cert_file
        return super().init_poolmanager(*args, **kwargs)

Ancestors

  • requests.adapters.HTTPAdapter
  • requests.adapters.BaseAdapter

Class variables

var cert_file

Methods

def init_poolmanager(self, *args, **kwargs)

Initializes a urllib3 PoolManager.

This method should not be called from user code, and is only exposed for use when subclassing the :class:HTTPAdapter <requests.adapters.HTTPAdapter>.

:param connections: The number of urllib3 connection pools to cache. :param maxsize: The maximum number of connections to save in the pool. :param block: Block when no free connections are available. :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.

Expand source code
def init_poolmanager(self, *args, **kwargs):
    kwargs['cert_file'] = self.cert_file
    return super().init_poolmanager(*args, **kwargs)
class Task (**kwargs)

MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/task

Pick out optional 'account' and 'folder' kwargs, and pass the rest to the parent class.

:param kwargs: 'account' is optional but allows calling 'send()' and 'delete()' 'folder' is optional but allows calling 'save()'. If 'folder' has an account, and 'account' is not set, we use folder.account.

Expand source code
class Task(Item):
    """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/task"""

    ELEMENT_NAME = 'Task'
    NOT_STARTED = 'NotStarted'
    COMPLETED = 'Completed'

    actual_work = IntegerField(field_uri='task:ActualWork', min=0)
    assigned_time = DateTimeField(field_uri='task:AssignedTime', is_read_only=True)
    billing_information = TextField(field_uri='task:BillingInformation')
    change_count = IntegerField(field_uri='task:ChangeCount', is_read_only=True, min=0)
    companies = TextListField(field_uri='task:Companies')
    # 'complete_date' can be set, but is ignored by the server, which sets it to now()
    complete_date = DateTimeField(field_uri='task:CompleteDate', is_read_only=True)
    contacts = TextListField(field_uri='task:Contacts')
    delegation_state = ChoiceField(field_uri='task:DelegationState', choices={
        Choice('NoMatch'), Choice('OwnNew'), Choice('Owned'), Choice('Accepted'), Choice('Declined'), Choice('Max')
    }, is_read_only=True)
    delegator = CharField(field_uri='task:Delegator', is_read_only=True)
    due_date = DateTimeBackedDateField(field_uri='task:DueDate')
    is_editable = BooleanField(field_uri='task:IsAssignmentEditable', is_read_only=True)
    is_complete = BooleanField(field_uri='task:IsComplete', is_read_only=True)
    is_recurring = BooleanField(field_uri='task:IsRecurring', is_read_only=True)
    is_team_task = BooleanField(field_uri='task:IsTeamTask', is_read_only=True)
    mileage = TextField(field_uri='task:Mileage')
    owner = CharField(field_uri='task:Owner', is_read_only=True)
    percent_complete = DecimalField(field_uri='task:PercentComplete', is_required=True, default=Decimal(0.0),
                                    min=Decimal(0), max=Decimal(100), is_searchable=False)
    recurrence = TaskRecurrenceField(field_uri='task:Recurrence', is_searchable=False)
    start_date = DateTimeBackedDateField(field_uri='task:StartDate')
    status = ChoiceField(field_uri='task:Status', choices={
        Choice(NOT_STARTED), Choice('InProgress'), Choice(COMPLETED), Choice('WaitingOnOthers'), Choice('Deferred')
    }, is_required=True, is_searchable=False, default=NOT_STARTED)
    status_description = CharField(field_uri='task:StatusDescription', is_read_only=True)
    total_work = IntegerField(field_uri='task:TotalWork', min=0)

    def clean(self, version=None):
        super().clean(version=version)
        if self.due_date and self.start_date and self.due_date < self.start_date:
            log.warning("'due_date' must be greater than 'start_date' (%s vs %s). Resetting 'due_date'",
                        self.due_date, self.start_date)
            self.due_date = self.start_date
        if self.complete_date:
            if self.status != self.COMPLETED:
                log.warning("'status' must be '%s' when 'complete_date' is set (%s). Resetting",
                            self.COMPLETED, self.status)
                self.status = self.COMPLETED
            now = datetime.datetime.now(tz=UTC)
            if (self.complete_date - now).total_seconds() > 120:
                # Reset complete_date values that are in the future
                # 'complete_date' can be set automatically by the server. Allow some grace between local and server time
                log.warning("'complete_date' must be in the past (%s vs %s). Resetting", self.complete_date, now)
                self.complete_date = now
            if self.start_date and self.complete_date.date() < self.start_date:
                log.warning("'complete_date' must be greater than 'start_date' (%s vs %s). Resetting",
                            self.complete_date, self.start_date)
                self.complete_date = EWSDateTime.combine(self.start_date, datetime.time(0, 0)).replace(tzinfo=UTC)
        if self.percent_complete is not None:
            if self.status == self.COMPLETED and self.percent_complete != Decimal(100):
                # percent_complete must be 100% if task is complete
                log.warning("'percent_complete' must be 100 when 'status' is '%s' (%s). Resetting",
                            self.COMPLETED, self.percent_complete)
                self.percent_complete = Decimal(100)
            elif self.status == self.NOT_STARTED and self.percent_complete != Decimal(0):
                # percent_complete must be 0% if task is not started
                log.warning("'percent_complete' must be 0 when 'status' is '%s' (%s). Resetting",
                            self.NOT_STARTED, self.percent_complete)
                self.percent_complete = Decimal(0)

    def complete(self):
        # A helper method to mark a task as complete on the server
        self.status = Task.COMPLETED
        self.percent_complete = Decimal(100)
        self.save()

Ancestors

Class variables

var COMPLETED
var ELEMENT_NAME
var FIELDS
var NOT_STARTED

Instance variables

var actual_work
var assigned_time
var billing_information
var change_count
var companies
var complete_date
var contacts
var delegation_state
var delegator
var due_date
var is_complete
var is_editable
var is_recurring
var is_team_task
var mileage
var owner
var percent_complete
var recurrence
var start_date
var status
var status_description
var total_work

Methods

def clean(self, version=None)
Expand source code
def clean(self, version=None):
    super().clean(version=version)
    if self.due_date and self.start_date and self.due_date < self.start_date:
        log.warning("'due_date' must be greater than 'start_date' (%s vs %s). Resetting 'due_date'",
                    self.due_date, self.start_date)
        self.due_date = self.start_date
    if self.complete_date:
        if self.status != self.COMPLETED:
            log.warning("'status' must be '%s' when 'complete_date' is set (%s). Resetting",
                        self.COMPLETED, self.status)
            self.status = self.COMPLETED
        now = datetime.datetime.now(tz=UTC)
        if (self.complete_date - now).total_seconds() > 120:
            # Reset complete_date values that are in the future
            # 'complete_date' can be set automatically by the server. Allow some grace between local and server time
            log.warning("'complete_date' must be in the past (%s vs %s). Resetting", self.complete_date, now)
            self.complete_date = now
        if self.start_date and self.complete_date.date() < self.start_date:
            log.warning("'complete_date' must be greater than 'start_date' (%s vs %s). Resetting",
                        self.complete_date, self.start_date)
            self.complete_date = EWSDateTime.combine(self.start_date, datetime.time(0, 0)).replace(tzinfo=UTC)
    if self.percent_complete is not None:
        if self.status == self.COMPLETED and self.percent_complete != Decimal(100):
            # percent_complete must be 100% if task is complete
            log.warning("'percent_complete' must be 100 when 'status' is '%s' (%s). Resetting",
                        self.COMPLETED, self.percent_complete)
            self.percent_complete = Decimal(100)
        elif self.status == self.NOT_STARTED and self.percent_complete != Decimal(0):
            # percent_complete must be 0% if task is not started
            log.warning("'percent_complete' must be 0 when 'status' is '%s' (%s). Resetting",
                        self.NOT_STARTED, self.percent_complete)
            self.percent_complete = Decimal(0)
def complete(self)
Expand source code
def complete(self):
    # A helper method to mark a task as complete on the server
    self.status = Task.COMPLETED
    self.percent_complete = Decimal(100)
    self.save()

Inherited members

class TentativelyAcceptItem (**kwargs)

MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/tentativelyacceptitem

Pick out optional 'account' and 'folder' kwargs, and pass the rest to the parent class.

:param kwargs: 'account' is optional but allows calling 'send()' and 'delete()' 'folder' is optional but allows calling 'save()'. If 'folder' has an account, and 'account' is not set, we use folder.account.

Expand source code
class TentativelyAcceptItem(BaseMeetingReplyItem):
    """MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/tentativelyacceptitem"""

    ELEMENT_NAME = 'TentativelyAcceptItem'

Ancestors

Class variables

var ELEMENT_NAME

Inherited members

class UID (uid)

Helper class to encode Calendar UIDs. See issue #453. Example:

class GlobalObjectId(ExtendedProperty): distinguished_property_set_id = 'Meeting' property_id = 3 property_type = 'Binary'

CalendarItem.register('global_object_id', GlobalObjectId) account.calendar.filter(global_object_id=UID('261cbc18-1f65-5a0a-bd11-23b1e224cc2f'))

Expand source code
class UID(bytes):
    """Helper class to encode Calendar UIDs. See issue #453. Example:

    class GlobalObjectId(ExtendedProperty):
        distinguished_property_set_id = 'Meeting'
        property_id = 3
        property_type = 'Binary'

    CalendarItem.register('global_object_id', GlobalObjectId)
    account.calendar.filter(global_object_id=UID('261cbc18-1f65-5a0a-bd11-23b1e224cc2f'))
    """

    _HEADER = binascii.hexlify(bytearray((
        0x04, 0x00, 0x00, 0x00,
        0x82, 0x00, 0xE0, 0x00,
        0x74, 0xC5, 0xB7, 0x10,
        0x1A, 0x82, 0xE0, 0x08)))

    _EXCEPTION_REPLACEMENT_TIME = binascii.hexlify(bytearray((
        0, 0, 0, 0)))

    _CREATION_TIME = binascii.hexlify(bytearray((
        0, 0, 0, 0,
        0, 0, 0, 0)))

    _RESERVED = binascii.hexlify(bytearray((
        0, 0, 0, 0,
        0, 0, 0, 0)))

    # https://docs.microsoft.com/en-us/openspecs/exchange_server_protocols/ms-oxocal/1d3aac05-a7b9-45cc-a213-47f0a0a2c5c1
    # https://docs.microsoft.com/en-us/openspecs/exchange_server_protocols/ms-asemail/e7424ddc-dd10-431e-a0b7-5c794863370e
    # https://stackoverflow.com/questions/42259122
    # https://stackoverflow.com/questions/33757805

    def __new__(cls, uid):
        payload = binascii.hexlify(bytearray('vCal-Uid\x01\x00\x00\x00{}\x00'.format(uid).encode('ascii')))
        length = binascii.hexlify(bytearray(struct.pack('<I', int(len(payload)/2))))
        encoding = b''.join([
            cls._HEADER, cls._EXCEPTION_REPLACEMENT_TIME, cls._CREATION_TIME, cls._RESERVED, length, payload
        ])
        return super().__new__(cls, codecs.decode(encoding, 'hex'))

    @classmethod
    def to_global_object_id(cls, uid):
        """Converts a UID as returned by EWS to GlobalObjectId format"""
        return binascii.unhexlify(uid)

Ancestors

  • builtins.bytes

Static methods

def to_global_object_id(uid)

Converts a UID as returned by EWS to GlobalObjectId format

Expand source code
@classmethod
def to_global_object_id(cls, uid):
    """Converts a UID as returned by EWS to GlobalObjectId format"""
    return binascii.unhexlify(uid)
class Version (build, api_version=None)

Holds information about the server version.

Expand source code
class Version:
    """Holds information about the server version."""

    __slots__ = 'build', 'api_version'

    def __init__(self, build, api_version=None):
        if not isinstance(build, (Build, type(None))):
            raise ValueError("'build' must be a Build instance")
        self.build = build
        if api_version is None:
            self.api_version = build.api_version()
        else:
            if not isinstance(api_version, str):
                raise ValueError("'api_version' must be a string")
            self.api_version = api_version

    @property
    def fullname(self):
        return VERSIONS[self.api_version][1]

    @classmethod
    def guess(cls, protocol, api_version_hint=None):
        """Ask the server which version it has. We haven't set up an Account object yet, so we generate requests
        by hand. We only need a response header containing a ServerVersionInfo element.

        To get API version and build numbers from the server, we need to send a valid SOAP request. We can't do that
        without a valid API version. To solve this chicken-and-egg problem, we try all possible API versions that this
        package supports, until we get a valid response.

        :param protocol:
        :param api_version_hint:  (Default value = None)
        """
        from .services import ResolveNames
        # The protocol doesn't have a version yet, so default to latest supported version if we don't have a hint.
        api_version = api_version_hint or API_VERSIONS[0]
        log.debug('Asking server for version info using API version %s', api_version)
        # We don't know the build version yet. Hopefully, the server will report it in the SOAP header. Lots of
        # places expect a version to have a build, so this is a bit dangerous, but passing a fake build around is also
        # dangerous. Make sure the call to ResolveNames does not require a version build.
        protocol.config.version = Version(build=None, api_version=api_version)
        # Use ResolveNames as a minimal request to the server to test if the version is correct. If not, ResolveNames
        # will try to guess the version automatically.
        name = str(protocol.credentials) if protocol.credentials and str(protocol.credentials) else 'DUMMY'
        try:
            list(ResolveNames(protocol=protocol).call(unresolved_entries=[name]))
        except ResponseMessageError as e:
            # We may have survived long enough to get a new version
            if not protocol.config.version.build:
                raise TransportError('No valid version headers found in response (%r)' % e)
        if not protocol.config.version.build:
            raise TransportError('No valid version headers found in response')
        return protocol.version

    @staticmethod
    def _is_invalid_version_string(version):
        # Check if a version string is bogus, e.g. V2_, V2015_ or V2018_
        return re.match(r'V[0-9]{1,4}_.*', version)

    @classmethod
    def from_soap_header(cls, requested_api_version, header):
        info = header.find('{%s}ServerVersionInfo' % TNS)
        if info is None:
            raise TransportError('No ServerVersionInfo in header: %r' % xml_to_str(header))
        try:
            build = Build.from_xml(elem=info)
        except ValueError:
            raise TransportError('Bad ServerVersionInfo in response: %r' % xml_to_str(header))
        # Not all Exchange servers send the Version element
        api_version_from_server = info.get('Version') or build.api_version()
        if api_version_from_server != requested_api_version:
            if cls._is_invalid_version_string(api_version_from_server):
                # For unknown reasons, Office 365 may respond with an API version strings that is invalid in a request.
                # Detect these so we can fallback to a valid version string.
                log.debug('API version "%s" worked but server reports version "%s". Using "%s"', requested_api_version,
                          api_version_from_server, requested_api_version)
                api_version_from_server = requested_api_version
            else:
                # Trust API version from server response
                log.debug('API version "%s" worked but server reports version "%s". Using "%s"', requested_api_version,
                          api_version_from_server, api_version_from_server)
        return cls(build=build, api_version=api_version_from_server)

    def __eq__(self, other):
        if self.api_version != other.api_version:
            return False
        if self.build and not other.build:
            return False
        if other.build and not self.build:
            return False
        return self.build == other.build

    def __repr__(self):
        return self.__class__.__name__ + repr((self.build, self.api_version))

    def __str__(self):
        return 'Build=%s, API=%s, Fullname=%s' % (self.build, self.api_version, self.fullname)

Static methods

def from_soap_header(requested_api_version, header)
Expand source code
@classmethod
def from_soap_header(cls, requested_api_version, header):
    info = header.find('{%s}ServerVersionInfo' % TNS)
    if info is None:
        raise TransportError('No ServerVersionInfo in header: %r' % xml_to_str(header))
    try:
        build = Build.from_xml(elem=info)
    except ValueError:
        raise TransportError('Bad ServerVersionInfo in response: %r' % xml_to_str(header))
    # Not all Exchange servers send the Version element
    api_version_from_server = info.get('Version') or build.api_version()
    if api_version_from_server != requested_api_version:
        if cls._is_invalid_version_string(api_version_from_server):
            # For unknown reasons, Office 365 may respond with an API version strings that is invalid in a request.
            # Detect these so we can fallback to a valid version string.
            log.debug('API version "%s" worked but server reports version "%s". Using "%s"', requested_api_version,
                      api_version_from_server, requested_api_version)
            api_version_from_server = requested_api_version
        else:
            # Trust API version from server response
            log.debug('API version "%s" worked but server reports version "%s". Using "%s"', requested_api_version,
                      api_version_from_server, api_version_from_server)
    return cls(build=build, api_version=api_version_from_server)
def guess(protocol, api_version_hint=None)

Ask the server which version it has. We haven't set up an Account object yet, so we generate requests by hand. We only need a response header containing a ServerVersionInfo element.

To get API version and build numbers from the server, we need to send a valid SOAP request. We can't do that without a valid API version. To solve this chicken-and-egg problem, we try all possible API versions that this package supports, until we get a valid response.

:param protocol: :param api_version_hint: (Default value = None)

Expand source code
@classmethod
def guess(cls, protocol, api_version_hint=None):
    """Ask the server which version it has. We haven't set up an Account object yet, so we generate requests
    by hand. We only need a response header containing a ServerVersionInfo element.

    To get API version and build numbers from the server, we need to send a valid SOAP request. We can't do that
    without a valid API version. To solve this chicken-and-egg problem, we try all possible API versions that this
    package supports, until we get a valid response.

    :param protocol:
    :param api_version_hint:  (Default value = None)
    """
    from .services import ResolveNames
    # The protocol doesn't have a version yet, so default to latest supported version if we don't have a hint.
    api_version = api_version_hint or API_VERSIONS[0]
    log.debug('Asking server for version info using API version %s', api_version)
    # We don't know the build version yet. Hopefully, the server will report it in the SOAP header. Lots of
    # places expect a version to have a build, so this is a bit dangerous, but passing a fake build around is also
    # dangerous. Make sure the call to ResolveNames does not require a version build.
    protocol.config.version = Version(build=None, api_version=api_version)
    # Use ResolveNames as a minimal request to the server to test if the version is correct. If not, ResolveNames
    # will try to guess the version automatically.
    name = str(protocol.credentials) if protocol.credentials and str(protocol.credentials) else 'DUMMY'
    try:
        list(ResolveNames(protocol=protocol).call(unresolved_entries=[name]))
    except ResponseMessageError as e:
        # We may have survived long enough to get a new version
        if not protocol.config.version.build:
            raise TransportError('No valid version headers found in response (%r)' % e)
    if not protocol.config.version.build:
        raise TransportError('No valid version headers found in response')
    return protocol.version

Instance variables

var api_version

Return an attribute of instance, which is of type owner.

var build

Return an attribute of instance, which is of type owner.

var fullname
Expand source code
@property
def fullname(self):
    return VERSIONS[self.api_version][1]