Recursive ls seems to works

This commit is contained in:
2019-09-28 23:14:16 +01:00
parent 115b235256
commit e8ffe8cb78
4 changed files with 84 additions and 186 deletions

83
main.py
View File

@@ -1,79 +1,55 @@
#!/usr/bin/env python
import os
import sys
import json
import argparse
import time
import webbrowser
import logging
import json
import queue
import threading
import signal
import threading
import time
import logging
from boxsdk import Client
from pathlib import Path
import src.setup # pylint: disable=unused-import
from src.job import Job
from src.setup import setup_logger
from src.auth_helper import init_oauth
from src.const import SETTING_FILE
from src.worker import Worker
from src.token_manager import TokenManager
from src.auth_helper import get_sign_in_url, get_token_from_code
from src.drive_helper import DriveHelper, get_user
from src.job import JobDirectory
def interactive_confirm():
inp = input("Confirm? (y/N): ")
if inp.lower() != 'y':
print('Exiting')
sys.exit(1)
def main():
with open('sync_settings.json') as f:
SETTINGS = json.load(f)
logging.info('Loaded Settings: %s', SETTINGS)
setup_logger()
with open(SETTING_FILE) as f:
settings = json.load(f)
parser = argparse.ArgumentParser()
parser.add_argument("baseItemId", nargs='?', default='', help="base itemId (ABC12345!00001)")
parser.add_argument("remote", nargs='?', default='', help="remote path to sync")
parser.add_argument("local", nargs='?', default='', help="local path to sync")
parser.add_argument("itemId", nargs='?', default=None, help="Item ID to download, use 0 for root")
parser.add_argument("localDirectory", nargs='?', default='', help="Local path of the item")
parser.add_argument("-y", "--yes", help="skip confirmation dialogue", action="store_true")
args = parser.parse_args()
client = Client(init_oauth())
q = queue.Queue()
if args.baseItemId:
remote = args.remote.rstrip('/')
local = os.path.expanduser(args.local.rstrip('/'))
print('baseItemId: [{0}]'.format(args.baseItemId))
print('driveRoot: [{0}]'.format(args.driveRoot))
print('Syncing Remote: [{0}]'.format(remote))
print('With Local: [{0}]'.format(local))
q.put(JobDirectory(args.baseItemId, remote, local))
if args.itemId is not None:
local = Path('args.localDirectory')
folder = client.folder(args.itemId).get(['name', 'id', 'size', 'modified_at', 'path_collection'])
q.put(Job(folder, local))
else:
print('Not implemented reading from settings yet, using test data')
local = Path('Temp')
folder = client.folder('0').get(['name', 'id', 'size', 'modified_at', 'path_collection'])
q.put(Job(folder, local))
'''
for job in SETTINGS.get('jobs', []):
q.put(JobDirectory(job['itemId'], job['remote'], job['local']))
print('Processing jobs in setting file')
if not (args.yes or SETTINGS.get('defaultYes', False)):
interactive_confirm()
try:
token_manager = TokenManager('token.json')
get_user(token_manager.get_token()) # Check token validity
except Exception:
logging.warning('Token not working, logging in')
sign_in_url, state = get_sign_in_url()
webbrowser.open(sign_in_url, new=2)
print('After logging in, please paste the entire callback URL (such as http://localhost:8000/......)')
callback_url = input('Paste here: ')
token = get_token_from_code(callback_url, state)
token_manager = TokenManager('token.json', token)
logging.info('Token successfully loaded')
'''
threads = []
drive_helper = DriveHelper(token_manager)
interrupt_flag = {'exit': False}
worker_object = Worker(q, drive_helper, SETTINGS.get('blacklist', []), interrupt_flag)
worker_object = Worker(q, client, settings.get('blacklist', []), interrupt_flag)
thread_count = SETTINGS.get('thread_count', 4)
thread_count = settings.get('thread_count', 4)
logging.info('Launching %s threads', thread_count)
original_sigint_handler = signal.signal(signal.SIGINT, signal.SIG_IGN)
for _ in range(thread_count):
@@ -109,9 +85,6 @@ def main():
for f in sorted(worker_object.downloaded):
print('-', f)
print(f'ls API call count: {drive_helper.ls_call_counts}')
print(f'dl API call count: {drive_helper.dl_call_counts}')
if __name__ == '__main__':
main()