1
0
mirror of https://github.com/Ahp06/SUMO_Emissions.git synced 2024-11-22 03:26:30 +00:00

Use paths relative to the simulation dir

Use paths relative to the simulation dir for all
generated files.
This commit is contained in:
Thibaud Gasser 2019-02-06 17:06:48 +01:00
parent f741e76454
commit a9c4baf3a7
2 changed files with 12 additions and 14 deletions

View File

@ -41,7 +41,7 @@ class RunProcess(multiprocessing.Process):
Run process inheriting from multiprocessing.Process Run process inheriting from multiprocessing.Process
""" """
def __init__(self, data : Data, config : Config, save_logs, csv_export): def __init__(self, data: Data, config: Config, save_logs: bool, csv_export: bool):
""" """
RunProcess constructor RunProcess constructor
:param data: The data instance :param data: The data instance
@ -62,13 +62,13 @@ class RunProcess(multiprocessing.Process):
now = datetime.datetime.now() now = datetime.datetime.now()
current_date = now.strftime("%Y_%m_%d_%H_%M_%S") current_date = now.strftime("%Y_%m_%d_%H_%M_%S")
logdir = os.path.join(os.path.dirname(__file__), f'{self.data.dir}') logdir = f'{self.data.dir}/logs/'
logging.info(logdir)
if not os.path.exists(logdir): if not os.path.exists(logdir):
os.makedirs(f'logs') os.mkdir(logdir)
conf_name = self.config.config_filename.replace('.json', '') conf_name = self.config.config_filename.replace('.json', '')
log_filename = f'{self.data.dir}/logs/{current_date}.log' log_filename = f'{logdir}/{current_date}.log'
self.logger = logging.getLogger(f'{self.data.dir}_{conf_name}') self.logger = logging.getLogger(f'{self.data.dir}_{conf_name}')
self.logger.setLevel(logging.INFO) self.logger.setLevel(logging.INFO)
@ -87,14 +87,15 @@ class RunProcess(multiprocessing.Process):
""" """
Export all Emission objects as a CSV file into the csv directory Export all Emission objects as a CSV file into the csv directory
""" """
csv_dir = 'files/csv' csv_dir = f'{self.data.dir}/csv'
if not os.path.exists(csv_dir): if not os.path.exists(csv_dir):
os.mkdir(csv_dir) os.mkdir(csv_dir)
now = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S") now = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
conf_name = self.config.config_filename.replace('.json', '') conf_name = self.config.config_filename.replace('.json', '')
with open(f'files/csv/{self.data.dump_name}_{conf_name}_{now}.csv', 'w') as f: csvfile = os.path.join(csv_dir, f'{self.data.dump_name}_{conf_name}_{now}.csv')
with open(csvfile, 'w') as f:
writer = csv.writer(f) writer = csv.writer(f)
# Write CSV headers # Write CSV headers
writer.writerow(itertools.chain(('Step',), (a.name for a in self.data.grid))) writer.writerow(itertools.chain(('Step',), (a.name for a in self.data.grid)))
@ -158,7 +159,6 @@ class RunProcess(multiprocessing.Process):
total_emissions.__getattribute__(pollutant)) total_emissions.__getattribute__(pollutant))
self.logger.info(f'-> {pollutant.upper()} reduction = {reduc_percentage} %') self.logger.info(f'-> {pollutant.upper()} reduction = {reduc_percentage} %')
simulation_time = round(time.perf_counter() - start, 2) simulation_time = round(time.perf_counter() - start, 2)
self.logger.info(f'End of the simulation ({simulation_time}s)') self.logger.info(f'End of the simulation ({simulation_time}s)')
@ -201,7 +201,7 @@ def create_dump(dump_name, simulation_dir, areas_number):
print(f'Data loaded ({loading_time}s)') print(f'Data loaded ({loading_time}s)')
print(f'Dump {dump_name} created') print(f'Dump {dump_name} created')
else: else:
print(f'Dump with name {dump_name} already exist') print(f'Dump with name {dump_name} already exists')
traci.close(False) traci.close(False)
@ -281,9 +281,7 @@ def main(args):
for config in bundle_files: for config in bundle_files:
files.append(os.path.join(path, config)) files.append(os.path.join(path, config))
for conf in files: # Initialize all process for conf in files: # Initialize all process
config = Config(conf,data) config = Config(conf,data)
p = RunProcess(data, config, args.save, args.csv) p = RunProcess(data, config, args.save, args.csv)
process.append(p) process.append(p)