2019-01-22 11:55:06 +00:00
|
|
|
'''
|
|
|
|
Created on 19 janv. 2019
|
|
|
|
|
|
|
|
@author: Axel Huynh-Phuc
|
|
|
|
'''
|
|
|
|
|
|
|
|
"""
|
2019-01-22 14:01:40 +00:00
|
|
|
This module defines the entry point of the application
|
2019-01-22 11:55:06 +00:00
|
|
|
"""
|
|
|
|
|
2019-01-19 18:56:59 +00:00
|
|
|
import argparse
|
2019-01-22 11:55:06 +00:00
|
|
|
import csv
|
|
|
|
import datetime
|
|
|
|
import itertools
|
|
|
|
import logging
|
|
|
|
import multiprocessing
|
2019-01-19 18:56:59 +00:00
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import time
|
|
|
|
import traci
|
|
|
|
|
|
|
|
import jsonpickle
|
|
|
|
|
|
|
|
from config import Config
|
|
|
|
from data import Data
|
|
|
|
import emissions
|
|
|
|
from model import Emission
|
|
|
|
|
2019-01-30 16:57:29 +00:00
|
|
|
|
2019-02-06 14:06:41 +00:00
|
|
|
"""
|
|
|
|
Init the Traci API
|
|
|
|
"""
|
2019-01-22 11:55:06 +00:00
|
|
|
if 'SUMO_HOME' in os.environ:
|
|
|
|
tools = os.path.join(os.environ['SUMO_HOME'], 'tools')
|
|
|
|
sys.path.append(tools)
|
|
|
|
else:
|
|
|
|
sys.exit("please declare environment variable 'SUMO_HOME'")
|
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
class RunProcess(multiprocessing.Process):
|
2019-01-30 16:57:29 +00:00
|
|
|
"""
|
|
|
|
Run process inheriting from multiprocessing.Process
|
|
|
|
"""
|
2019-01-20 14:56:05 +00:00
|
|
|
|
2019-02-06 16:06:48 +00:00
|
|
|
def __init__(self, data: Data, config: Config, save_logs: bool, csv_export: bool):
|
2019-01-22 14:01:40 +00:00
|
|
|
"""
|
|
|
|
RunProcess constructor
|
|
|
|
:param data: The data instance
|
|
|
|
:param config: The config instance
|
|
|
|
:param save_logs: If save_logs == True, it will save the logs into the logs directory
|
|
|
|
:param csv_export: If csv_export == True, it will export all emissions data into a csv file
|
|
|
|
"""
|
2019-01-22 10:56:44 +00:00
|
|
|
multiprocessing.Process.__init__(self)
|
|
|
|
self.data = data
|
|
|
|
self.config = config
|
|
|
|
self.save_logs = save_logs
|
|
|
|
self.csv_export = csv_export
|
2019-01-19 18:56:59 +00:00
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
def init_logger(self):
|
2019-01-22 14:01:40 +00:00
|
|
|
"""
|
|
|
|
Init logger properties
|
|
|
|
"""
|
2019-01-22 10:56:44 +00:00
|
|
|
now = datetime.datetime.now()
|
|
|
|
current_date = now.strftime("%Y_%m_%d_%H_%M_%S")
|
2019-02-06 15:29:43 +00:00
|
|
|
|
2019-02-06 16:06:48 +00:00
|
|
|
logdir = f'{self.data.dir}/logs/'
|
|
|
|
logging.info(logdir)
|
2019-02-06 15:29:43 +00:00
|
|
|
if not os.path.exists(logdir):
|
2019-02-06 16:06:48 +00:00
|
|
|
os.mkdir(logdir)
|
2019-01-19 18:56:59 +00:00
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
conf_name = self.config.config_filename.replace('.json', '')
|
2019-02-06 16:06:48 +00:00
|
|
|
log_filename = f'{logdir}/{current_date}.log'
|
2019-01-22 12:19:46 +00:00
|
|
|
|
2019-02-07 18:14:06 +00:00
|
|
|
self.logger = logging.getLogger(f'sumo_logger')
|
2019-01-22 10:56:44 +00:00
|
|
|
self.logger.setLevel(logging.INFO)
|
|
|
|
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
|
|
|
|
|
|
|
|
if self.save_logs:
|
|
|
|
file_handler = logging.FileHandler(log_filename)
|
|
|
|
file_handler.setFormatter(formatter)
|
|
|
|
self.logger.addHandler(file_handler)
|
|
|
|
|
|
|
|
handler = logging.StreamHandler()
|
|
|
|
handler.setFormatter(formatter)
|
|
|
|
self.logger.addHandler(handler)
|
2019-01-19 18:56:59 +00:00
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
def export_data_to_csv(self):
|
|
|
|
"""
|
|
|
|
Export all Emission objects as a CSV file into the csv directory
|
|
|
|
"""
|
2019-02-06 16:06:48 +00:00
|
|
|
csv_dir = f'{self.data.dir}/csv'
|
2019-01-22 10:56:44 +00:00
|
|
|
if not os.path.exists(csv_dir):
|
|
|
|
os.mkdir(csv_dir)
|
2019-01-19 18:56:59 +00:00
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
now = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
|
|
|
|
conf_name = self.config.config_filename.replace('.json', '')
|
|
|
|
|
2019-02-06 16:06:48 +00:00
|
|
|
csvfile = os.path.join(csv_dir, f'{self.data.dump_name}_{conf_name}_{now}.csv')
|
|
|
|
with open(csvfile, 'w') as f:
|
2019-01-22 10:56:44 +00:00
|
|
|
writer = csv.writer(f)
|
|
|
|
# Write CSV headers
|
|
|
|
writer.writerow(itertools.chain(('Step',), (a.name for a in self.data.grid)))
|
|
|
|
# Write all areas emission value for each step
|
|
|
|
for step in range(self.config.n_steps):
|
|
|
|
em_for_step = (f'{a.emissions_by_step[step].value():.3f}' for a in self.data.grid)
|
|
|
|
writer.writerow(itertools.chain((step,), em_for_step))
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
"""
|
2019-01-30 16:57:29 +00:00
|
|
|
Launch a simulation, will be called when a RunProcess instance is started
|
2019-01-22 10:56:44 +00:00
|
|
|
"""
|
|
|
|
try:
|
|
|
|
self.init_logger()
|
2019-01-22 11:55:06 +00:00
|
|
|
self.logger.info(f'Running simulation dump "{self.data.dump_name}" with the config "{self.config.config_filename}" ...')
|
2019-01-20 10:42:08 +00:00
|
|
|
|
2019-01-30 16:57:29 +00:00
|
|
|
if self.config.without_actions_mode:
|
|
|
|
self.logger.info('Reference simulation')
|
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
traci.start(self.config.sumo_cmd)
|
2019-01-20 14:56:05 +00:00
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
for area in self.data.grid: # Set acquisition window size
|
|
|
|
area.set_window_size(self.config.window_size)
|
|
|
|
traci.polygon.add(area.name, area.rectangle.exterior.coords, (255, 0, 0)) # Add polygon for UI
|
|
|
|
|
|
|
|
self.logger.info(f'Loaded simulation file : {self.config._SUMOCFG}')
|
|
|
|
self.logger.info('Loading data for the simulation')
|
2019-01-20 14:56:05 +00:00
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
start = time.perf_counter()
|
|
|
|
self.logger.info('Simulation started...')
|
|
|
|
step = 0
|
|
|
|
while step < self.config.n_steps:
|
|
|
|
traci.simulationStep()
|
|
|
|
|
|
|
|
vehicles = emissions.get_all_vehicles()
|
|
|
|
emissions.get_emissions(self, vehicles, step)
|
|
|
|
step += 1
|
|
|
|
|
|
|
|
print(f'step = {step}/{self.config.n_steps}', end='\r')
|
|
|
|
|
|
|
|
finally:
|
|
|
|
traci.close(False)
|
2019-01-20 14:56:05 +00:00
|
|
|
|
|
|
|
total_emissions = Emission()
|
2019-01-22 10:56:44 +00:00
|
|
|
for area in self.data.grid:
|
2019-01-20 14:56:05 +00:00
|
|
|
total_emissions += area.sum_all_emissions()
|
2019-01-22 10:56:44 +00:00
|
|
|
|
|
|
|
self.logger.info(f'Total emissions = {total_emissions.value()} mg')
|
2019-01-30 16:57:29 +00:00
|
|
|
for pollutant in ['co2','co','nox','hc','pmx']:
|
|
|
|
value = total_emissions.__getattribute__(pollutant)
|
|
|
|
self.logger.info(f'{pollutant.upper()} = {value} mg')
|
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
simulation_time = round(time.perf_counter() - start, 2)
|
|
|
|
self.logger.info(f'End of the simulation ({simulation_time}s)')
|
2019-01-30 16:57:29 +00:00
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
# 1 step is equal to one second simulated
|
|
|
|
self.logger.info(f'Real-time factor : {self.config.n_steps / simulation_time}')
|
2019-01-20 14:56:05 +00:00
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
if self.csv_export:
|
|
|
|
self.export_data_to_csv()
|
|
|
|
self.logger.info(f'Exported data into the csv folder')
|
2019-01-22 14:01:40 +00:00
|
|
|
|
2019-01-22 11:55:06 +00:00
|
|
|
def create_dump(dump_name, simulation_dir, areas_number):
|
|
|
|
"""
|
|
|
|
Create a new dump with config file and dump_name chosen
|
|
|
|
:param dump_name: The name of the data dump
|
|
|
|
:param simulation_dir: The simulation directory
|
|
|
|
:param areas_number: The number of areas in grid
|
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
|
2019-02-06 14:06:41 +00:00
|
|
|
#sumo_binary = os.path.join(os.environ['SUMO_HOME'], 'bin', 'sumo')
|
|
|
|
#sumo_cmd = [sumo_binary, "-c", f'files/simulations/{simulation_dir}/osm.sumocfg']
|
|
|
|
|
|
|
|
for f in os.listdir(simulation_dir):
|
|
|
|
if f.endswith('.sumocfg'):
|
|
|
|
_SUMOCFG = os.path.join(simulation_dir, f)
|
|
|
|
|
2019-01-22 11:55:06 +00:00
|
|
|
sumo_binary = os.path.join(os.environ['SUMO_HOME'], 'bin', 'sumo')
|
2019-02-06 14:06:41 +00:00
|
|
|
sumo_cmd = [sumo_binary, "-c", _SUMOCFG]
|
|
|
|
|
2019-01-22 11:55:06 +00:00
|
|
|
|
|
|
|
traci.start(sumo_cmd)
|
2019-02-06 15:29:43 +00:00
|
|
|
if not os.path.isfile(f'{simulation_dir}/dump/{dump_name}.json'):
|
2019-01-22 11:55:06 +00:00
|
|
|
start = time.perf_counter()
|
|
|
|
data = Data(dump_name, traci.simulation.getNetBoundary(), areas_number, simulation_dir)
|
|
|
|
data.init_grid()
|
|
|
|
data.add_data_to_areas()
|
|
|
|
data.save()
|
|
|
|
|
|
|
|
loading_time = round(time.perf_counter() - start, 2)
|
|
|
|
print(f'Data loaded ({loading_time}s)')
|
|
|
|
print(f'Dump {dump_name} created')
|
|
|
|
else:
|
2019-02-06 16:06:48 +00:00
|
|
|
print(f'Dump with name {dump_name} already exists')
|
2019-01-22 11:55:06 +00:00
|
|
|
|
2019-01-22 12:19:46 +00:00
|
|
|
traci.close(False)
|
2019-01-23 16:01:42 +00:00
|
|
|
|
2019-01-22 12:19:46 +00:00
|
|
|
def add_options(parser):
|
|
|
|
"""
|
|
|
|
Add command line options
|
|
|
|
:param parser: The command line parser
|
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
|
|
|
|
parser.add_argument("-new_dump", "--new_dump", type=str,
|
2019-01-23 16:01:42 +00:00
|
|
|
help='Load and create a new data dump with the configuration file chosen')
|
|
|
|
parser.add_argument("-areas", "--areas", type=int,
|
2019-01-22 12:19:46 +00:00
|
|
|
help='Will create a grid with "areas x areas" areas')
|
2019-01-23 16:01:42 +00:00
|
|
|
parser.add_argument("-simulation_dir", "--simulation_dir", type=str,
|
2019-01-22 12:19:46 +00:00
|
|
|
help='Choose the simulation directory')
|
2019-01-22 11:55:06 +00:00
|
|
|
|
2019-01-22 12:19:46 +00:00
|
|
|
parser.add_argument("-run", "--run", type=str,
|
2019-01-22 14:45:24 +00:00
|
|
|
help='Run a simulation process with the dump chosen')
|
|
|
|
parser.add_argument("-c", "--c", metavar =('config1','config2'), nargs='+', type=str,
|
|
|
|
help='Choose your(s) configuration file(s) from your working directory')
|
2019-01-23 13:56:41 +00:00
|
|
|
parser.add_argument("-c_dir", "--c_dir", type=str,
|
|
|
|
help='Choose a directory which contains your(s) configuration file(s)')
|
2019-01-22 12:19:46 +00:00
|
|
|
parser.add_argument("-save", "--save", action="store_true",
|
|
|
|
help='Save the logs into the logs folder')
|
|
|
|
parser.add_argument("-csv", "--csv", action="store_true",
|
|
|
|
help="Export all data emissions into a CSV file")
|
|
|
|
|
2019-01-23 16:01:42 +00:00
|
|
|
def check_user_entry(args):
|
|
|
|
"""
|
|
|
|
Check the user entry consistency
|
|
|
|
"""
|
|
|
|
if (args.new_dump is not None):
|
|
|
|
if(args.areas is None or args.simulation_dir is None):
|
|
|
|
print('The -new_dump argument requires the -areas and -simulation_dir options')
|
|
|
|
return False
|
|
|
|
|
|
|
|
if (args.run is not None):
|
|
|
|
if(args.c is None and args.c_dir is None):
|
|
|
|
print('The -run argument requires the -c or -c_dir')
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2019-01-22 11:55:06 +00:00
|
|
|
def main(args):
|
|
|
|
"""
|
|
|
|
The entry point of the application
|
|
|
|
:param args: Command line options
|
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
add_options(parser)
|
|
|
|
args = parser.parse_args(args)
|
|
|
|
|
2019-01-23 16:01:42 +00:00
|
|
|
if(check_user_entry(args)):
|
|
|
|
|
|
|
|
if args.new_dump is not None:
|
|
|
|
if (args.simulation_dir is not None) and (args.areas is not None):
|
|
|
|
create_dump(args.new_dump, args.simulation_dir, args.areas)
|
|
|
|
|
|
|
|
if args.run is not None:
|
2019-02-06 14:06:41 +00:00
|
|
|
dump_path = f'{args.run}'
|
2019-01-23 16:01:42 +00:00
|
|
|
if os.path.isfile(dump_path):
|
|
|
|
with open(dump_path, 'r') as f:
|
|
|
|
data = jsonpickle.decode(f.read())
|
2019-01-23 13:56:41 +00:00
|
|
|
|
2019-01-23 16:01:42 +00:00
|
|
|
process = []
|
|
|
|
files = []
|
|
|
|
|
|
|
|
if args.c is not None:
|
|
|
|
for config in args.c:
|
2019-02-06 14:06:41 +00:00
|
|
|
files.append(f'{config}')
|
2019-01-23 16:01:42 +00:00
|
|
|
|
|
|
|
if args.c_dir is not None:
|
2019-02-06 14:06:41 +00:00
|
|
|
path = f'{args.c_dir}'
|
2019-01-23 16:01:42 +00:00
|
|
|
bundle_files = [f for f in os.listdir(path) if os.path.isfile(os.path.join(path, f))]
|
|
|
|
for config in bundle_files:
|
2019-02-06 14:06:41 +00:00
|
|
|
files.append(os.path.join(path, config))
|
|
|
|
|
2019-02-06 16:06:48 +00:00
|
|
|
for conf in files: # Initialize all process
|
2019-01-23 16:01:42 +00:00
|
|
|
config = Config(conf,data)
|
2019-02-06 16:06:48 +00:00
|
|
|
p = RunProcess(data, config, args.save, args.csv)
|
2019-01-23 16:01:42 +00:00
|
|
|
process.append(p)
|
|
|
|
p.start()
|
|
|
|
|
|
|
|
for p in process : p.join()
|
2019-01-22 10:56:44 +00:00
|
|
|
|
2019-01-22 11:55:06 +00:00
|
|
|
if __name__ == '__main__':
|
|
|
|
main(sys.argv[1:])
|