fix level and format parameters when more than one given

This commit is contained in:
Maciej Lebiest 2022-07-27 11:49:07 +02:00
parent ff31611d46
commit 947c25d34d
2 changed files with 32 additions and 32 deletions

View file

@ -18,8 +18,9 @@ Full example on how to use the script:
```bash ```bash
python exporter.py \ python exporter.py \
-H https://wiki.example.com \ -H https://wiki.example.com \
-f pdf,md,plaintext,html \ -f pdf md plaintext html \
-l pages,chapters,books -l pages chapters books \
--force-update-files \
-t ./token.txt \ -t ./token.txt \
-V debug \ -V debug \
-p ./ -p ./
@ -29,25 +30,19 @@ Customization:
```text ```text
options: options:
-p PATH, --path PATH Path where exported files will be placed. -p PATH, --path PATH Path where exported files will be placed.
Default: .
-t TOKEN_FILE, --token-file TOKEN_FILE -t TOKEN_FILE, --token-file TOKEN_FILE
File containing authorization token in format TOKEN_ID:TOKEN_SECRET File containing authorization token in format TOKEN_ID:TOKEN_SECRET
Default: ./token.txt
-H HOST, --host HOST Your domain with protocol prefix, example: https://example.com -H HOST, --host HOST Your domain with protocol prefix, example: https://example.com
Default: https://localhost -f {markdown,plaintext,pdf,html} [{markdown,plaintext,pdf,html} ...],
-f FORMATS, --formats FORMATS --formats {markdown,plaintext,pdf,html} [{markdown,plaintext,pdf,html} ...]
Coma separated list of formats to use for export. Space separated list of formats to use for export.
Available ones: markdown,plaintext,pdf,html -l {pages,chapters,books} [{pages,chapters,books} ...], --level {pages,chapters,books} [{pages,chapters,books} ...]
default: markdown Space separated list of levels at which should be export performed.
-l LEVEL, --level LEVEL --force-update-files Set this option to skip checking local files timestamps against remote last edit
Coma separated list of levels at which should be export performed. timestamps.This will cause overwriting local files, even if they seem to be already in
Available levels: ['pages', 'chapters', 'books'] newest version.
Default: pages -V {debug,info,warning,error}, --log-level {debug,info,warning,error}
-V LOG_LEVEL, --log-level LOG_LEVEL Set verbosity level.
Set verbosity level.
Available levels: dict_keys(['debug', 'info', 'warning', 'error'])
Default: info
``` ```
### TODO: ### TODO:

View file

@ -37,22 +37,23 @@ parser.add_argument('-t', '--token-file', type=str, default=f'.{os.path.sep}toke
help='File containing authorization token in format TOKEN_ID:TOKEN_SECRET') help='File containing authorization token in format TOKEN_ID:TOKEN_SECRET')
parser.add_argument('-H', '--host', type=str, default='https://localhost', parser.add_argument('-H', '--host', type=str, default='https://localhost',
help='Your domain with protocol prefix, example: https://example.com') help='Your domain with protocol prefix, example: https://example.com')
parser.add_argument('-f', '--formats', type=str, default='markdown', parser.add_argument('-f', '--formats', type=str, default=['markdown'], nargs="+",
help=f'Coma separated list of formats to use for export.', choices=FORMATS.keys()) help=f'Space separated list of formats to use for export.', choices=FORMATS.keys())
parser.add_argument('-l', '--level', type=str, default='pages', parser.add_argument('-l', '--level', type=str, default=['pages'], nargs="+",
help=f'Coma separated list of levels at which should be export performed. ', choices=LEVELS) help=f'Space separated list of levels at which should be export performed. ', choices=LEVELS)
parser.add_argument('--force-update-files', action='store_true',
help="Set this option to skip checking local files timestamps against remote last edit timestamps."
"This will cause overwriting local files, even if they seem to be already in newest version.")
parser.set_defaults(force_update_files=False)
parser.add_argument('-V', '--log-level', type=str, default='info', parser.add_argument('-V', '--log-level', type=str, default='info',
help=f'Set verbosity level. ', choices=LOG_LEVEL.keys()) help=f'Set verbosity level. ', choices=LOG_LEVEL.keys())
args = parser.parse_args() args = parser.parse_args()
if args.log_level not in LOG_LEVEL.keys():
error(f"Bad log level {args.log_level}, available levels: {LOG_LEVEL.keys()}")
exit(1)
logging.basicConfig(format='%(levelname)s :: %(message)s', level=LOG_LEVEL.get(args.log_level)) logging.basicConfig(format='%(levelname)s :: %(message)s', level=LOG_LEVEL.get(args.log_level))
formats = args.formats.split(',') formats: list[str] = args.formats
for frmt in formats: for frmt in formats:
if frmt not in FORMATS.keys(): if frmt not in FORMATS.keys():
error("Unknown format name (NOT file extension), " error("Unknown format name (NOT file extension), "
@ -61,7 +62,7 @@ for frmt in formats:
API_PREFIX: str = f"{args.host.removesuffix(os.path.sep)}/api" API_PREFIX: str = f"{args.host.removesuffix(os.path.sep)}/api"
FS_PATH: str = args.path.removesuffix(os.path.sep) FS_PATH: str = args.path.removesuffix(os.path.sep)
LEVEL_CHOICE: list[str] = args.level.split(',') LEVEL_CHOICE: list[str] = args.level
for lvl in LEVEL_CHOICE: for lvl in LEVEL_CHOICE:
if lvl not in LEVELS: if lvl not in LEVELS:
error(f"Level {lvl} is not supported, can be only one of {LEVELS}") error(f"Level {lvl} is not supported, can be only one of {LEVELS}")
@ -72,6 +73,7 @@ with open(args.token_file, 'r') as f:
HEADERS = {'Content-Type': 'application/json; charset=utf-8', HEADERS = {'Content-Type': 'application/json; charset=utf-8',
'Authorization': f"Token {TOKEN}"} 'Authorization': f"Token {TOKEN}"}
SKIP_TIMESTAMPS: bool = args.force_update_files
class Node: class Node:
@ -194,6 +196,10 @@ def api_get_listing(path: str) -> list:
def check_if_update_needed(file_path: str, document: Node) -> bool: def check_if_update_needed(file_path: str, document: Node) -> bool:
if SKIP_TIMESTAMPS:
return True
debug(f"Checking for update for file {file_path}")
if not os.path.exists(file_path): if not os.path.exists(file_path):
debug(f"Document {file_path} is missing on disk, update needed.") debug(f"Document {file_path} is missing on disk, update needed.")
return True return True
@ -205,10 +211,10 @@ def check_if_update_needed(file_path: str, document: Node) -> bool:
changes: int = document.changed_since(local_last_edit) changes: int = document.changed_since(local_last_edit)
if changes > 0: if changes > 0:
info(f"Document \"{document.get_name()}\" consists of {changes} outdated documents, update needed.") info(f"Document \"{file_path}\" consists of {changes} outdated documents, update needed.")
return True return True
debug(f"Document \"{document.get_name()}\" consists of {changes} outdated documents.") debug(f"Document \"{file_path}\" consists of {changes} outdated documents, skipping updating.")
return False return False
@ -218,9 +224,8 @@ def export(documents: list[Node], level: str):
for frmt in formats: for frmt in formats:
path: str = f"{FS_PATH}{os.path.sep}{document.get_path()}{os.path.sep}{document.get_name()}.{FORMATS[frmt]}" path: str = f"{FS_PATH}{os.path.sep}{document.get_path()}{os.path.sep}{document.get_name()}.{FORMATS[frmt]}"
debug(f"Checking for update for file {path}")
if not check_if_update_needed(path, document): if not check_if_update_needed(path, document):
debug("Already updated")
continue continue
data: bytes = api_get_bytes(f'{level}/{document.get_id()}/export/{frmt}') data: bytes = api_get_bytes(f'{level}/{document.get_id()}/export/{frmt}')