diff --git a/docs/enable-gcp-apis.md b/docs/enable-gcp-apis.md index a82cbee..d26f183 100644 --- a/docs/enable-gcp-apis.md +++ b/docs/enable-gcp-apis.md @@ -16,4 +16,19 @@ If the process was successful, you will see the following screen: ![](images/gmail-api-enabled.png) -The Gmail API is enabled. \ No newline at end of file +The Gmail API is enabled. + +## People API (for contacts) + +Navigate to this link: +https://console.cloud.google.com/marketplace/product/google/people.googleapis.com + +Click to "ENABLE" button. + +![](images/people-api-enable.png) + +If the process was successful, you will see the following screen: + +![](images/people-api-enabled.png) + +The People API is enabled. diff --git a/docs/images/people-api-enable.png b/docs/images/people-api-enable.png new file mode 100644 index 0000000..033b19a Binary files /dev/null and b/docs/images/people-api-enable.png differ diff --git a/docs/images/people-api-enabled.png b/docs/images/people-api-enabled.png new file mode 100644 index 0000000..3142c43 Binary files /dev/null and b/docs/images/people-api-enabled.png differ diff --git a/gwbackupy/__main__.py b/gwbackupy/__main__.py index 92f0f43..9ebb69e 100644 --- a/gwbackupy/__main__.py +++ b/gwbackupy/__main__.py @@ -1,4 +1,4 @@ -from gwbackupy import gwbackupy_cli +from gwbackupy.cli import gwbackupy_cli if __name__ == "__main__": gwbackupy_cli.cli_startup() diff --git a/gwbackupy/cli/gmail_cli.py b/gwbackupy/cli/gmail_cli.py new file mode 100644 index 0000000..54d3acb --- /dev/null +++ b/gwbackupy/cli/gmail_cli.py @@ -0,0 +1,71 @@ +def add_cli_args_gmail(service_parser): + gmail_parser = service_parser.add_parser("gmail", help="GMail service commands") + gmail_command_parser = gmail_parser.add_subparsers(dest="command") + + gmail_oauth_init_parser = gmail_command_parser.add_parser( + "access-init", help="Access initialization e.g. OAuth authentication" + ) + gmail_oauth_init_parser.add_argument( + "--email", type=str, help="Email account", required=True + ) + gmail_oauth_check_parser = gmail_command_parser.add_parser( + "access-check", help="Check access e.g. OAuth tokens" + ) + gmail_oauth_check_parser.add_argument( + "--email", type=str, help="Email account", required=True + ) + gmail_backup_parser = gmail_command_parser.add_parser("backup", help="Backup gmail") + gmail_backup_parser.add_argument( + "--email", type=str, help="Email of the account", required=True + ) + gmail_backup_parser.add_argument( + "--quick-sync-days", + type=int, + default=None, + help="Quick sync number of days back. (It does not delete messages from local " + "storage.)", + ) + + gmail_restore_parser = gmail_command_parser.add_parser( + "restore", help="Restore gmail" + ) + gmail_restore_parser.add_argument( + "--email", type=str, help="Email from which restore", required=True + ) + gmail_restore_parser.add_argument( + "--to-email", + type=str, + help="Destination email account, if not specified, then --email is used", + ) + gmail_restore_parser.add_argument( + "--add-label", + type=str, + action="append", + help="Add label to restored emails", + default=None, + dest="add_labels", + ) + gmail_restore_parser.add_argument( + "--restore-deleted", + help="Restore deleted emails", + default=False, + action="store_true", + ) + gmail_restore_parser.add_argument( + "--restore-missing", + help="Restore missing emails", + default=False, + action="store_true", + ) + gmail_restore_parser.add_argument( + "--filter-date-from", + type=str, + help="Filter date from (inclusive, format: yyyy-mm-dd or yyyy-mm-dd hh:mm:ss)", + default=None, + ) + gmail_restore_parser.add_argument( + "--filter-date-to", + type=str, + help="Filter date to (exclusive, format: yyyy-mm-dd or yyyy-mm-dd hh:mm:ss)", + default=None, + ) diff --git a/gwbackupy/gwbackupy_cli.py b/gwbackupy/cli/gwbackupy_cli.py similarity index 66% rename from gwbackupy/gwbackupy_cli.py rename to gwbackupy/cli/gwbackupy_cli.py index e376128..9c37838 100644 --- a/gwbackupy/gwbackupy_cli.py +++ b/gwbackupy/cli/gwbackupy_cli.py @@ -1,5 +1,6 @@ import argparse import logging +import os import sys import threading @@ -7,18 +8,23 @@ from tzlocal import get_localzone import gwbackupy.global_properties as global_properties +from gwbackupy.cli.gmail_cli import add_cli_args_gmail +from gwbackupy.cli.peoples_cli import add_cli_args_peoples from gwbackupy.filters.gmail_filter import GmailFilter from gwbackupy.gmail import Gmail from gwbackupy.helpers import parse_date +from gwbackupy.people import People from gwbackupy.providers.gapi_gmail_service_wrapper import GapiGmailServiceWrapper +from gwbackupy.providers.gapi_people_service_wrapper import GapiPeopleServiceWrapper from gwbackupy.providers.gapi_service_provider import AccessNotInitializedError from gwbackupy.providers.gmail_service_provider import GmailServiceProvider +from gwbackupy.providers.people_service_provider import PeopleServiceProvider from gwbackupy.storage.file_storage import FileStorage lock = threading.Lock() -def parse_arguments() -> argparse.Namespace: +def parse_arguments(people_cli=None) -> argparse.Namespace: log_levels = { "finest": global_properties.log_finest, "debug": logging.DEBUG, @@ -104,77 +110,9 @@ def parse_arguments() -> argparse.Namespace: help="OAuth redirect host, default is localhost", ) service_parser = parser.add_subparsers(dest="service") - gmail_parser = service_parser.add_parser("gmail", help="GMail service commands") - gmail_command_parser = gmail_parser.add_subparsers(dest="command") + add_cli_args_gmail(service_parser) + add_cli_args_peoples(service_parser) - gmail_oauth_init_parser = gmail_command_parser.add_parser( - "access-init", help="Access initialization e.g. OAuth authentication" - ) - gmail_oauth_init_parser.add_argument( - "--email", type=str, help="Email account", required=True - ) - gmail_oauth_check_parser = gmail_command_parser.add_parser( - "access-check", help="Check access e.g. OAuth tokens" - ) - gmail_oauth_check_parser.add_argument( - "--email", type=str, help="Email account", required=True - ) - - gmail_backup_parser = gmail_command_parser.add_parser("backup", help="Backup gmail") - gmail_backup_parser.add_argument( - "--email", type=str, help="Email of the account", required=True - ) - gmail_backup_parser.add_argument( - "--quick-sync-days", - type=int, - default=None, - help="Quick sync number of days back. (It does not delete messages from local " - "storage.)", - ) - - gmail_restore_parser = gmail_command_parser.add_parser( - "restore", help="Restore gmail" - ) - gmail_restore_parser.add_argument( - "--email", type=str, help="Email from which restore", required=True - ) - gmail_restore_parser.add_argument( - "--to-email", - type=str, - help="Destination email account, if not specified, then --email is used", - ) - gmail_restore_parser.add_argument( - "--add-label", - type=str, - action="append", - help="Add label to restored emails", - default=None, - dest="add_labels", - ) - gmail_restore_parser.add_argument( - "--restore-deleted", - help="Restore deleted emails", - default=False, - action="store_true", - ) - gmail_restore_parser.add_argument( - "--restore-missing", - help="Restore missing emails", - default=False, - action="store_true", - ) - gmail_restore_parser.add_argument( - "--filter-date-from", - type=str, - help="Filter date from (inclusive, format: yyyy-mm-dd or yyyy-mm-dd hh:mm:ss)", - default=None, - ) - gmail_restore_parser.add_argument( - "--filter-date-to", - type=str, - help="Filter date to (exclusive, format: yyyy-mm-dd or yyyy-mm-dd hh:mm:ss)", - default=None, - ) if len(sys.argv) == 1 or "--help" in sys.argv: parser.print_help(sys.stderr) sys.exit(1) @@ -204,23 +142,56 @@ def parse_arguments() -> argparse.Namespace: def cli_startup(): try: args = parse_arguments() - if args.service == "gmail": - storage = FileStorage(args.workdir + "/" + args.email + "/gmail") - storage_oauth_tokens = FileStorage(args.workdir + "/oauth-tokens") - service_provider = GmailServiceProvider( - credentials_file_path=args.credentials_filepath, - service_account_email=args.service_account_email, - service_account_file_path=args.service_account_key_filepath, - storage=storage_oauth_tokens, - oauth_bind_addr=args.oauth_bind_address, - oauth_port=args.oauth_port, - oauth_redirect_host=args.oauth_redirect_host, + + storage = FileStorage(os.path.join(args.workdir, args.email, args.service)) + storage_oauth_tokens = FileStorage(os.path.join(args.workdir, "oauth-tokens")) + service_provider_args = { + "credentials_file_path": args.credentials_filepath, + "service_account_email": args.service_account_email, + "service_account_file_path": args.service_account_key_filepath, + "storage": storage_oauth_tokens, + "oauth_bind_addr": args.oauth_bind_address, + "oauth_port": args.oauth_port, + "oauth_redirect_host": args.oauth_redirect_host, + } + + if args.service == "peoples": + service_provider = PeopleServiceProvider(**service_provider_args) + service_wrapper = GapiPeopleServiceWrapper( + service_provider=service_provider, + dry_mode=args.dry, + ) + + service = People( + email=args.email, + service_wrapper=service_wrapper, + # batch_size=args.batch_size, + batch_size=1, + storage=storage, + dry_mode=args.dry, ) + if args.command == "access-init": + service_wrapper.get_peoples(args.email) + elif args.command == "access-check": + try: + with service_provider.get_service(args.email, False) as s: + service_wrapper.get_peoples(args.email) + except AccessNotInitializedError: + exit(1) + elif args.command == "backup": + if service.backup(): + exit(0) + else: + exit(1) + else: + exit(1) + elif args.service == "gmail": + service_provider = GmailServiceProvider(**service_provider_args) service_wrapper = GapiGmailServiceWrapper( service_provider=service_provider, dry_mode=args.dry, ) - gmail = Gmail( + service = Gmail( email=args.email, service_wrapper=service_wrapper, batch_size=args.batch_size, @@ -236,7 +207,7 @@ def cli_startup(): except AccessNotInitializedError: exit(1) elif args.command == "backup": - if gmail.backup(quick_sync_days=args.quick_sync_days): + if service.backup(quick_sync_days=args.quick_sync_days): exit(0) else: exit(1) @@ -258,7 +229,7 @@ def cli_startup(): dt = parse_date(args.filter_date_to, args.timezone) item_filter.date_to(dt) logging.info(f"Filter options: date to {dt}") - if gmail.restore( + if service.restore( to_email=args.to_email, item_filter=item_filter, restore_deleted=args.restore_deleted, diff --git a/gwbackupy/cli/peoples_cli.py b/gwbackupy/cli/peoples_cli.py new file mode 100644 index 0000000..63c8d11 --- /dev/null +++ b/gwbackupy/cli/peoples_cli.py @@ -0,0 +1,56 @@ +from gwbackupy.helpers import parse_date + + +def add_cli_args_peoples(service_parser): + people_parser = service_parser.add_parser( + "peoples", help="Peoples (contacts) service commands" + ) + people_command_parser = people_parser.add_subparsers(dest="command") + people_oauth_init_parser = people_command_parser.add_parser( + "access-init", help="Access initialization e.g. OAuth authentication" + ) + people_oauth_init_parser.add_argument( + "--email", type=str, help="Email account", required=True + ) + people_oauth_check_parser = people_command_parser.add_parser( + "access-check", help="Check access e.g. OAuth tokens" + ) + people_oauth_check_parser.add_argument( + "--email", type=str, help="Email account", required=True + ) + people_backup_parser = people_command_parser.add_parser( + "backup", help="Backup people" + ) + people_backup_parser.add_argument( + "--email", type=str, help="Email account", required=True + ) + people_backup_parser.add_argument( + "--start-date", + type=parse_date, + help="Start date (inclusive)", + required=False, + ) + people_backup_parser.add_argument( + "--end-date", + type=parse_date, + help="End date (exclusive)", + required=False, + ) + people_restore_parser = people_command_parser.add_parser( + "restore", help="Restore people" + ) + people_restore_parser.add_argument( + "--email", type=str, help="Email account", required=True + ) + people_restore_parser.add_argument( + "--restore-deleted", + help="Restore deleted emails", + default=False, + action="store_true", + ) + people_restore_parser.add_argument( + "--restore-missing", + help="Restore missing emails", + default=False, + action="store_true", + ) diff --git a/gwbackupy/gmail.py b/gwbackupy/gmail.py index d739863..e35a756 100644 --- a/gwbackupy/gmail.py +++ b/gwbackupy/gmail.py @@ -57,7 +57,6 @@ def __init__( batch_size = 5 self.batch_size = batch_size self.__lock = threading.RLock() - self.__services = {} self.__error_count = 0 self.__service_wrapper = service_wrapper if labels is None: diff --git a/gwbackupy/helpers.py b/gwbackupy/helpers.py index b4cbd74..525f357 100644 --- a/gwbackupy/helpers.py +++ b/gwbackupy/helpers.py @@ -8,6 +8,7 @@ import logging from json import JSONDecodeError from typing import IO +import hashlib import tzlocal from googleapiclient.errors import HttpError @@ -82,3 +83,9 @@ def is_rate_limit_exceeded(e) -> bool: def random_string(length: int = 8) -> str: return "".join(random.choice(string.ascii_lowercase) for _ in range(length)) + + +def md5hex(data: bytes | str) -> str: + if isinstance(data, str): + data = data.encode("utf-8") + return hashlib.md5(data).hexdigest().lower() diff --git a/gwbackupy/people.py b/gwbackupy/people.py new file mode 100644 index 0000000..d0c4719 --- /dev/null +++ b/gwbackupy/people.py @@ -0,0 +1,210 @@ +from __future__ import annotations + +import concurrent +import json +import logging +import threading + +import re + +from gwbackupy import global_properties +from gwbackupy.helpers import md5hex +from gwbackupy.process_helpers import await_all_futures +from gwbackupy.providers.people_service_wrapper_interface import ( + PeopleServiceWrapperInterface, +) +from gwbackupy.storage.storage_interface import StorageInterface, LinkInterface, Data + + +class People: + """People (contacts) service""" + + def __init__( + self, + email: str, + storage: StorageInterface, + service_wrapper: PeopleServiceWrapperInterface, + batch_size: int = 10, + dry_mode: bool = False, + ): + self.dry_mode = dry_mode + self.email = email + self.storage = storage + if batch_size is None or batch_size < 1: + batch_size = 5 + self.batch_size = batch_size + self.__lock = threading.RLock() + self.__error_count = 0 + self.__service_wrapper = service_wrapper + + def backup(self): + logging.info(f"Starting backup for {self.email}") + self.__error_count = 0 + + logging.info("Scanning backup storage...") + stored_data_all = self.storage.find() + logging.info(f"Stored items: {len(stored_data_all)}") + + stored_items: dict[str, dict[str, LinkInterface]] = stored_data_all.find( + f=lambda l: not l.is_special_id() and (l.is_metadata() or l.is_object()), + g=lambda l: [ + l.id(), + "" if l.is_metadata() else l.get_property(LinkInterface.property_etag), + ], + ) + + del stored_data_all + for item_id in list(stored_items.keys()): + link_metadata = stored_items[item_id].get("") + if link_metadata is None: + logging.error(f"{item_id} metadata is not found in locally") + del stored_items[item_id] + elif link_metadata.is_deleted(): + logging.debug(f"{item_id} metadata is already deleted") + del stored_items[item_id] + else: + logging.log( + global_properties.log_finest, + f"{item_id} is usable from backup storage", + ) + logging.info(f"Stored peoples: {len(stored_items)}") + + items_from_server = self.__service_wrapper.get_peoples(self.email) + + logging.info("Processing...") + executor = concurrent.futures.ThreadPoolExecutor(max_workers=self.batch_size) + futures = [] + # submit message download jobs + for message_id in items_from_server: + futures.append( + executor.submit( + self.__backup_item, + items_from_server[message_id], + stored_items, + ) + ) + # wait for jobs + if not await_all_futures(futures): + # cancel jobs + executor.shutdown(cancel_futures=True) + logging.warning("Process is killed") + return False + logging.info("Processed") + + if self.__error_count > 0: + # if error then never delete! + logging.error("Backup failed with " + str(self.__error_count) + " errors") + return False + + return False + + def __backup_item( + self, people: dict[str, any], stored_items: dict[str, dict[str, LinkInterface]] + ): + people_id = people.get("resourceName", "UNKNOWN") # for logging + try: + people_id = people["resourceName"] + links: dict[str, LinkInterface] = dict() + latest_meta_link = None + if people_id in stored_items: + latest_meta_link = stored_items[people_id][""] + links = stored_items[people_id] + is_new = latest_meta_link is None + if is_new: + logging.debug(f"{people_id} is new") + + write_meta = True # if any failure then write it force + + # ... + etag = people.get("etag", None) + if not is_new: + etag_currently = latest_meta_link.get_property( + LinkInterface.property_etag + ) + if etag_currently is not None and etag_currently == etag: + write_meta = False + logging.debug(f"{people_id} is not changed, skip put") + + if write_meta: + logging.info(f"{people_id} is changed") + folders = ["people", people_id.split("/")[1][0:3]] + self.__backup_photos(people, people_id, links, folders) + link = ( + self.storage.new_link( + object_id=people_id, + extension="json", + folders=folders, + ) + .set_properties({LinkInterface.property_metadata: True}) + .set_properties({LinkInterface.property_etag: etag}) + ) + if self.__storage_put(link, data=json.dumps(people)): + logging.info(f"{people_id} meta data is saved") + else: + raise Exception("Meta data put failed") + + except Exception as e: + with self.__lock: + self.__error_count += 1 + if str(e) == "SKIP": + return + logging.exception(f"{people_id} {e}") + + def __backup_photos( + self, + people: dict[str, any], + people_id: str, + links: dict[str, LinkInterface], + folders: [str], + ): + logging.debug(f"{people_id} processing photos...") + for photo in people.get("photos", []): + photo_url = photo.get("url", None) + if photo_url is None: + # not found url or default photo + continue + photo_url = re.sub(r"=s100$", "", photo_url) + photo_url_md5 = md5hex(photo_url) + if photo_url_md5 in links: + # already exists + links.pop(photo_url_md5) + continue + descriptor = self.__service_wrapper.get_photo( + email=self.email, uri=photo_url, people_id=people_id + ) + extension = descriptor.mime_type.split("/")[-1] + logging.debug( + f"{people_id} photo download success ({len(descriptor.data)} bytes / {descriptor.mime_type})" + ) + link = ( + self.storage.new_link( + object_id=people_id, + extension=extension, + folders=folders, + ) + .set_properties({LinkInterface.property_object: True}) + .set_properties({LinkInterface.property_etag: md5hex(photo_url)}) + ) + if self.__storage_put(link, data=descriptor.data): + logging.info(f"{people_id} photo is saved ({photo_url})") + else: + raise Exception(f"Photo put failed ({link})") + # delete old photos + for photo_url_md5 in links: + if photo_url_md5 == "": + continue + if self.storage.remove(links[photo_url_md5]): + logging.info( + f"{people_id} old photo is deleted ({links[photo_url_md5]})" + ) + else: + raise Exception(f"Photo link delete failed ({links[photo_url_md5]})") + + def __storage_put(self, link: LinkInterface, data: Data) -> bool: + if self.dry_mode: + logging.info(f"DRY MODE storage put: {link}") + return True + return self.storage.put(link, data) + + def restore(self): + pass diff --git a/gwbackupy/providers/gapi_people_service_wrapper.py b/gwbackupy/providers/gapi_people_service_wrapper.py new file mode 100644 index 0000000..dbf901d --- /dev/null +++ b/gwbackupy/providers/gapi_people_service_wrapper.py @@ -0,0 +1,81 @@ +from __future__ import annotations + +import logging + +import requests + +from gwbackupy import global_properties +from gwbackupy.providers.people_service_provider import PeopleServiceProvider +from gwbackupy.providers.people_service_wrapper_interface import ( + PeopleServiceWrapperInterface, + PhotoDescriptor, +) + + +class GapiPeopleServiceWrapper(PeopleServiceWrapperInterface): + def __init__( + self, + service_provider: PeopleServiceProvider, + try_count: int = 5, + try_sleep: int = 10, + dry_mode: bool = False, + ): + self.try_count = try_count + self.try_sleep = try_sleep + self.service_provider = service_provider + self.dry_mode = dry_mode + + def get_service_provider(self) -> PeopleServiceProvider: + return self.service_provider + + def get_peoples(self, email: str) -> dict[str, [dict[str, any]]]: + with self.service_provider.get_service(email) as service: + next_page_token = None + page = 1 + items: dict[str, [dict[str, any]]] = dict() + while True: + logging.debug(f"Loading page {page}. from server...") + data = ( + service.people() + .connections() + .list( + resourceName="people/me", + pageSize=2000, + pageToken=next_page_token, + personFields="addresses,ageRange,biographies,birthdays,braggingRights,coverPhotos," + "events,genders,imClients,interests,locales,memberships,metadata,names,nicknames," + "emailAddresses,occupations,organizations,phoneNumbers,photos,relations,relationshipInterests," + "relationshipStatuses,residences,skills,taglines,urls,userDefined", + ) + .execute() + ) + next_page_token = data.get("nextPageToken", None) + page_message_count = len(data.get("connections", [])) + logging.debug( + f"Page {page} successfully loaded (connections count: {page_message_count} / next page token: {next_page_token})" + ) + for item in data.get("connections", []): + items[item.get("resourceName")] = item + if next_page_token is None: + break + page += 1 + logging.log(global_properties.log_finest, f"Items: {items}") + return items + + def get_photo(self, email: str, people_id: str, uri: str) -> PhotoDescriptor: + logging.debug(f"{people_id} downloading photo: {uri}") + r = requests.get(uri, stream=True) + if r.status_code != 200: + raise Exception( + f"photo download failed, status code: {r.status_code} ({uri})" + ) + for header in r.headers: + logging.log(global_properties.log_finest, f"{header}: {r.headers[header]}") + photo_bytes = b"" + for chunk in r.iter_content(chunk_size=1024): + photo_bytes += chunk + return PhotoDescriptor( + uri=uri, + data=photo_bytes, + mime_type=r.headers.get("content-type", "image/unknown"), + ) diff --git a/gwbackupy/providers/people_service_provider.py b/gwbackupy/providers/people_service_provider.py new file mode 100644 index 0000000..6208ef9 --- /dev/null +++ b/gwbackupy/providers/people_service_provider.py @@ -0,0 +1,13 @@ +from __future__ import annotations + +from gwbackupy.providers.gapi_service_provider import GapiServiceProvider +from gwbackupy.storage.storage_interface import StorageInterface + + +class PeopleServiceProvider(GapiServiceProvider): + """Contacts service provider from gmail/v1 API with full access scope""" + + def __init__(self, **kwargs): + super(PeopleServiceProvider, self).__init__( + "people", "v1", ["https://www.googleapis.com/auth/contacts"], **kwargs + ) diff --git a/gwbackupy/providers/people_service_wrapper_interface.py b/gwbackupy/providers/people_service_wrapper_interface.py new file mode 100644 index 0000000..c3b9279 --- /dev/null +++ b/gwbackupy/providers/people_service_wrapper_interface.py @@ -0,0 +1,16 @@ +from __future__ import annotations + + +class PeopleServiceWrapperInterface: + def get_peoples(self, email: str) -> dict[str, [dict[str, any]]]: + pass + + def get_photo(self, email: str, people_id: str, uri: str) -> PhotoDescriptor: + pass + + +class PhotoDescriptor: + def __init__(self, uri: str, data: bytes, mime_type: str): + self.uri = uri + self.data = data + self.mime_type = mime_type diff --git a/gwbackupy/storage/file_storage.py b/gwbackupy/storage/file_storage.py index 6846e65..c5a80dd 100644 --- a/gwbackupy/storage/file_storage.py +++ b/gwbackupy/storage/file_storage.py @@ -198,16 +198,20 @@ def new_link( object_id: str, extension: str, created_timestamp: int | float | None = None, + folders: list[str] | None = None, ) -> FileLink: link = FileLink() path = self.root - if created_timestamp is not None: - sub_paths = ( + if folders is not None: + folders.insert(0, path) + path = os.path.join(*folders) + elif created_timestamp is not None: + sub_paths: [str] = ( datetime.fromtimestamp(created_timestamp, tz=timezone.utc) .strftime("%Y-%m-%d") .split("-", 1) ) - path += f"/{sub_paths[0]}/{sub_paths[1]}" + path = os.path.join(path, sub_paths[0], sub_paths[1]) link.fill( { "path": path, diff --git a/gwbackupy/storage/storage_interface.py b/gwbackupy/storage/storage_interface.py index 15054c4..03cd18c 100644 --- a/gwbackupy/storage/storage_interface.py +++ b/gwbackupy/storage/storage_interface.py @@ -17,6 +17,9 @@ class LinkInterface: property_object = "object" property_mutation = "mutation" property_content_hash = "ch" + """Content hash. Used to check if the content is changed. Calculated by content.""" + property_etag = "etag" + """ETag. Used to check if the content is changed. Calculated by API.""" id_special_prefix = "--gwbackupy-" def id(self) -> str: @@ -140,6 +143,7 @@ def new_link( object_id: str, extension: str, created_timestamp: int | float | None = None, + folders: list[str] | None = None, ) -> LinkInterface: raise NotImplementedError("StorageInterface#new_link") diff --git a/gwbackupy/tests/test_helpers.py b/gwbackupy/tests/test_helpers.py index 1d44979..29bc9b2 100644 --- a/gwbackupy/tests/test_helpers.py +++ b/gwbackupy/tests/test_helpers.py @@ -13,6 +13,7 @@ parse_date, is_rate_limit_exceeded, random_string, + md5hex, ) @@ -94,3 +95,12 @@ def test_is_rate_limit_exceeded(): def test_random_string(): for i in range(32): assert len(random_string(i)) == i + + +def test_md5hex(): + assert md5hex(b"") == "d41d8cd98f00b204e9800998ecf8427e" + assert md5hex(b"abc") == "900150983cd24fb0d6963f7d28e17f72" + assert md5hex(b"message digest") == "f96b697d7cb7938d525a2f31aaf161d0" + assert md5hex("") == "d41d8cd98f00b204e9800998ecf8427e" + assert md5hex("abc") == "900150983cd24fb0d6963f7d28e17f72" + assert md5hex("message digest") == "f96b697d7cb7938d525a2f31aaf161d0" diff --git a/requirements.txt b/requirements.txt index be2ec7b..3e4a8d7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,9 @@ -google-api-python-client~=2.71 +google-api-python-client==2.78.0 oauth2client~=4.1 pyopenssl~=23.0 tzlocal~=4.2 pytz~=2022.7 google-auth-httplib2~=0.1.0 -google-auth-oauthlib~=0.8.0 \ No newline at end of file +google-auth-oauthlib==1.0.0 +setuptools==67.3.2 +requests~=2.28.2 \ No newline at end of file diff --git a/setup.py b/setup.py index 778c44c..9f789bd 100644 --- a/setup.py +++ b/setup.py @@ -17,6 +17,7 @@ "gwbackupy.storage", "gwbackupy.filters", "gwbackupy.providers", + "gwbackupy.cli", ], url="https://github.com/smartondev/gwbackupy", license='BSD 3-Clause "New" or "Revised" License',