feat: uses aio.gather to make parallel HTTP requests

This commit is contained in:
Thibaud Gasser 2020-03-13 00:30:18 +01:00
parent 94e57f51f5
commit 4f06791cad

View File

@ -35,41 +35,42 @@ async def connect_to_wallabag(
)
def read_urls_file(f):
while True:
line = f.readline()
if line:
yield line
else:
break
async def post_entry(w_api: Wallabag, item: Dict[str, str]):
entry = await w_api.post_entries(
url=item["url"],
archive=item["is_archived"],
starred=item["is_starred"],
original_url=item["origin_url"],
tags=",".join(item["tags"]),
)
logging.info(f"Entry url {item['url']} posted to wallabag with id {entry['id']}")
return entry["id"]
async def delete_entry(w_api: Wallabag, item_id: int):
entry = await w_api.delete_entries(item_id)
logging.info(f"Deleted wallabag entry with id {entry['id']}")
async def import_articles(w_api: Wallabag, path: str, limit=0):
entries_id = []
tasks = []
with open(path) as f:
for index, item in enumerate(json.load(f)):
if limit > 0 and index == limit:
break
task = asyncio.ensure_future(post_entry(w_api, item))
tasks.append(task)
entry = await w_api.post_entries(
url=item["url"],
archive=item["is_archived"],
starred=item["is_starred"],
original_url=item["origin_url"],
tags=",".join(item["tags"]),
)
logging.info(
f"Entry url {item['url']} posted to wallabag with id {entry['id']}"
)
entries_id.append(entry["id"])
entries_id = await asyncio.gather(*tasks)
return entries_id
async def delete_all_entries(ids: List[int], w_api: Wallabag):
tasks = []
for id in ids:
entry = await w_api.delete_entries(id)
logging.info(f"Deleted wallabag entry with id {entry['id']}")
tasks.append(delete_entry(w_api, id))
await asyncio.gather(*tasks)
async def main():
@ -77,7 +78,7 @@ async def main():
configuration = dict(load_configuration(CONFIG_PATH))
w_api = await connect_to_wallabag(configuration, session)
entries_id = await import_articles(w_api, "articles.json", limit=1)
entries_id = await import_articles(w_api, ARTICLES_PATH, limit=4)
await delete_all_entries(entries_id, w_api)