99dotenv .load_dotenv ()
1010sesame_api_baseurl = os .getenv ('SESAME_API_BASEURL' )
1111sesame_api_token = os .getenv ('SESAME_API_TOKEN' )
12+ sesame_import_parallels_files = int (os .getenv ('SESAME_IMPORT_PARALLELS_FILES' , 1 ))
13+ sesame_import_parallels_entries = int (os .getenv ('SESAME_IMPORT_PARALLELS_ENTRIES' , 5 ))
1214
1315async def gather_with_concurrency (n , tasks ):
1416 semaphore = asyncio .Semaphore (n )
@@ -50,7 +52,7 @@ async def process_data(data, config, file, session):
5052 with open (f'./data/{ file } ' , 'w' , encoding = 'utf-8' ) as fichier :
5153 json .dump (result , fichier , ensure_ascii = False )
5254 tasks = [send_request (session , f'{ sesame_api_baseurl } /management/identities/upsert' , entry ) for entry in result ]
53- await gather_with_concurrency (25 , tasks )
55+ await gather_with_concurrency (sesame_import_parallels_files , tasks )
5456 print (f"Processed { file } " )
5557
5658async def load_config ():
@@ -69,4 +71,4 @@ async def import_ind():
6971
7072 async with aiohttp .ClientSession () as session :
7173 tasks = [process_data (datas [file ], configs [file ], file , session ) for file in cache_files if file in configs .keys ()]
72- await gather_with_concurrency (10 , tasks )
74+ await gather_with_concurrency (sesame_import_parallels_files , tasks )
0 commit comments