2019-01-22 11:55:06 +00:00
|
|
|
'''
|
|
|
|
Created on 19 janv. 2019
|
|
|
|
|
|
|
|
@author: Axel Huynh-Phuc
|
|
|
|
'''
|
|
|
|
|
|
|
|
"""
|
|
|
|
Init the Traci API
|
|
|
|
"""
|
|
|
|
|
2019-01-19 18:56:59 +00:00
|
|
|
import argparse
|
2019-01-22 11:55:06 +00:00
|
|
|
import csv
|
|
|
|
import datetime
|
|
|
|
import itertools
|
|
|
|
import logging
|
|
|
|
import multiprocessing
|
2019-01-19 18:56:59 +00:00
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import time
|
|
|
|
import traci
|
|
|
|
|
|
|
|
import jsonpickle
|
|
|
|
|
|
|
|
from config import Config
|
|
|
|
from data import Data
|
|
|
|
import emissions
|
|
|
|
from model import Emission
|
|
|
|
|
|
|
|
|
2019-01-22 11:55:06 +00:00
|
|
|
if 'SUMO_HOME' in os.environ:
|
|
|
|
tools = os.path.join(os.environ['SUMO_HOME'], 'tools')
|
|
|
|
sys.path.append(tools)
|
|
|
|
else:
|
|
|
|
sys.exit("please declare environment variable 'SUMO_HOME'")
|
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
class RunProcess(multiprocessing.Process):
|
2019-01-20 14:56:05 +00:00
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
def __init__(self, data : Data, config : Config, save_logs, csv_export):
|
|
|
|
multiprocessing.Process.__init__(self)
|
|
|
|
self.data = data
|
|
|
|
self.config = config
|
|
|
|
self.save_logs = save_logs
|
|
|
|
self.csv_export = csv_export
|
2019-01-19 18:56:59 +00:00
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
def init_logger(self):
|
|
|
|
now = datetime.datetime.now()
|
|
|
|
current_date = now.strftime("%Y_%m_%d_%H_%M_%S")
|
2019-01-19 18:56:59 +00:00
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
if not os.path.exists('files/logs'):
|
|
|
|
os.makedirs('logs')
|
2019-01-19 18:56:59 +00:00
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
# log_filename = f'files/logs/{logger_name}_{current_date}.log'
|
|
|
|
log_filename = f'files/logs/{current_date}.log'
|
2019-01-19 20:26:37 +00:00
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
conf_name = self.config.config_filename.replace('.json', '')
|
|
|
|
self.logger = logging.getLogger(f'{self.data.dir}_{conf_name}')
|
|
|
|
self.logger.setLevel(logging.INFO)
|
|
|
|
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
|
|
|
|
|
|
|
|
if self.save_logs:
|
|
|
|
file_handler = logging.FileHandler(log_filename)
|
|
|
|
file_handler.setFormatter(formatter)
|
|
|
|
self.logger.addHandler(file_handler)
|
|
|
|
|
|
|
|
handler = logging.StreamHandler()
|
|
|
|
handler.setFormatter(formatter)
|
|
|
|
self.logger.addHandler(handler)
|
2019-01-19 18:56:59 +00:00
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
def export_data_to_csv(self):
|
|
|
|
"""
|
|
|
|
Export all Emission objects as a CSV file into the csv directory
|
|
|
|
"""
|
|
|
|
csv_dir = 'files/csv'
|
|
|
|
if not os.path.exists(csv_dir):
|
|
|
|
os.mkdir(csv_dir)
|
2019-01-19 18:56:59 +00:00
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
now = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
|
|
|
|
conf_name = self.config.config_filename.replace('.json', '')
|
|
|
|
|
|
|
|
with open(f'files/csv/{self.data.dump_name}_{conf_name}_{now}.csv', 'w') as f:
|
|
|
|
writer = csv.writer(f)
|
|
|
|
# Write CSV headers
|
|
|
|
writer.writerow(itertools.chain(('Step',), (a.name for a in self.data.grid)))
|
|
|
|
# Write all areas emission value for each step
|
|
|
|
for step in range(self.config.n_steps):
|
|
|
|
em_for_step = (f'{a.emissions_by_step[step].value():.3f}' for a in self.data.grid)
|
|
|
|
writer.writerow(itertools.chain((step,), em_for_step))
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
"""
|
|
|
|
Run a data set
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
self.init_logger()
|
2019-01-22 11:55:06 +00:00
|
|
|
self.logger.info(f'Running simulation dump "{self.data.dump_name}" with the config "{self.config.config_filename}" ...')
|
2019-01-20 10:42:08 +00:00
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
traci.start(self.config.sumo_cmd)
|
2019-01-20 14:56:05 +00:00
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
for area in self.data.grid: # Set acquisition window size
|
|
|
|
area.set_window_size(self.config.window_size)
|
|
|
|
traci.polygon.add(area.name, area.rectangle.exterior.coords, (255, 0, 0)) # Add polygon for UI
|
|
|
|
|
|
|
|
self.logger.info(f'Loaded simulation file : {self.config._SUMOCFG}')
|
|
|
|
self.logger.info('Loading data for the simulation')
|
2019-01-20 14:56:05 +00:00
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
start = time.perf_counter()
|
|
|
|
self.logger.info('Simulation started...')
|
|
|
|
step = 0
|
|
|
|
while step < self.config.n_steps:
|
|
|
|
traci.simulationStep()
|
|
|
|
|
|
|
|
vehicles = emissions.get_all_vehicles()
|
|
|
|
emissions.get_emissions(self, vehicles, step)
|
|
|
|
step += 1
|
|
|
|
|
|
|
|
print(f'step = {step}/{self.config.n_steps}', end='\r')
|
|
|
|
|
|
|
|
finally:
|
|
|
|
traci.close(False)
|
2019-01-20 14:56:05 +00:00
|
|
|
|
|
|
|
total_emissions = Emission()
|
2019-01-22 10:56:44 +00:00
|
|
|
for area in self.data.grid:
|
2019-01-20 14:56:05 +00:00
|
|
|
total_emissions += area.sum_all_emissions()
|
2019-01-22 10:56:44 +00:00
|
|
|
|
|
|
|
self.logger.info(f'Total emissions = {total_emissions.value()} mg')
|
|
|
|
|
|
|
|
if not self.config.without_actions_mode: # If it's not a simulation without actions
|
|
|
|
ref = self.config.get_ref_emissions()
|
2019-01-20 14:56:05 +00:00
|
|
|
if not (ref is None): # If a reference value exist (add yours into config.py)
|
|
|
|
global_diff = (ref.value() - total_emissions.value()) / ref.value()
|
2019-01-22 10:56:44 +00:00
|
|
|
self.logger.info(f'Global reduction percentage of emissions = {global_diff * 100} %')
|
|
|
|
self.logger.info(f'-> CO2 emissions = {emissions.get_reduction_percentage(ref.co2, total_emissions.co2)} %')
|
|
|
|
self.logger.info(f'-> CO emissions = {emissions.get_reduction_percentage(ref.co, total_emissions.co)} %')
|
|
|
|
self.logger.info(f'-> Nox emissions = {emissions.get_reduction_percentage(ref.nox, total_emissions.nox)} %')
|
|
|
|
self.logger.info(f'-> HC emissions = {emissions.get_reduction_percentage(ref.hc, total_emissions.hc)} %')
|
|
|
|
self.logger.info(f'-> PMx emissions = {emissions.get_reduction_percentage(ref.pmx, total_emissions.pmx)} %')
|
|
|
|
|
|
|
|
simulation_time = round(time.perf_counter() - start, 2)
|
|
|
|
self.logger.info(f'End of the simulation ({simulation_time}s)')
|
|
|
|
# 1 step is equal to one second simulated
|
|
|
|
self.logger.info(f'Real-time factor : {self.config.n_steps / simulation_time}')
|
2019-01-20 14:56:05 +00:00
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
if self.csv_export:
|
|
|
|
self.export_data_to_csv()
|
|
|
|
self.logger.info(f'Exported data into the csv folder')
|
2019-01-22 11:55:06 +00:00
|
|
|
|
|
|
|
|
|
|
|
def add_options(parser):
|
|
|
|
"""
|
|
|
|
Add command line options
|
|
|
|
:param parser: The command line parser
|
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
|
|
|
|
# TODO: Faire que -areas & -simulation_dir soit requis si -new_dump
|
|
|
|
# Faire que -c soit requis si -run
|
|
|
|
|
|
|
|
parser.add_argument("-new_dump", "--new_dump", type=str,
|
|
|
|
required=False, help='Load and create a new data dump with the configuration file chosen')
|
|
|
|
parser.add_argument("-areas", "--areas", type=int, required=False,
|
|
|
|
help='Will create a grid with "areas x areas" areas')
|
|
|
|
parser.add_argument("-simulation_dir", "--simulation_dir", type=str, required=False,
|
|
|
|
help='Choose the simulation directory')
|
|
|
|
|
|
|
|
parser.add_argument("-run", "--run", type=str,
|
|
|
|
help='Run a simulation with the dump chosen')
|
|
|
|
parser.add_argument("-c", "--c", nargs='+', type=str,
|
|
|
|
help='Choose your configuration file from your working directory')
|
|
|
|
parser.add_argument("-save", "--save", action="store_true",
|
|
|
|
help='Save the logs into the logs folder')
|
|
|
|
parser.add_argument("-csv", "--csv", action="store_true",
|
|
|
|
help="Export all data emissions into a CSV file")
|
|
|
|
|
|
|
|
|
|
|
|
def create_dump(dump_name, simulation_dir, areas_number):
|
|
|
|
"""
|
|
|
|
Create a new dump with config file and dump_name chosen
|
|
|
|
:param dump_name: The name of the data dump
|
|
|
|
:param simulation_dir: The simulation directory
|
|
|
|
:param areas_number: The number of areas in grid
|
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
|
|
|
|
sumo_binary = os.path.join(os.environ['SUMO_HOME'], 'bin', 'sumo')
|
|
|
|
sumo_cmd = [sumo_binary, "-c", f'files/simulations/{simulation_dir}/osm.sumocfg']
|
|
|
|
|
|
|
|
traci.start(sumo_cmd)
|
|
|
|
if not os.path.isfile(f'files/dump/{dump_name}.json'):
|
|
|
|
start = time.perf_counter()
|
|
|
|
data = Data(dump_name, traci.simulation.getNetBoundary(), areas_number, simulation_dir)
|
|
|
|
data.init_grid()
|
|
|
|
data.add_data_to_areas()
|
|
|
|
data.save()
|
|
|
|
|
|
|
|
loading_time = round(time.perf_counter() - start, 2)
|
|
|
|
print(f'Data loaded ({loading_time}s)')
|
|
|
|
print(f'Dump {dump_name} created')
|
|
|
|
else:
|
|
|
|
print(f'Dump with name {dump_name} already exist')
|
|
|
|
|
|
|
|
traci.close(False)
|
|
|
|
|
|
|
|
def main(args):
|
|
|
|
"""
|
|
|
|
The entry point of the application
|
|
|
|
:param args: Command line options
|
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
add_options(parser)
|
|
|
|
args = parser.parse_args(args)
|
|
|
|
|
|
|
|
if args.new_dump is not None:
|
|
|
|
if (args.simulation_dir is not None) and (args.areas is not None):
|
|
|
|
create_dump(args.new_dump, args.simulation_dir, args.areas)
|
|
|
|
|
|
|
|
if args.run is not None:
|
|
|
|
dump_path = f'files/dump/{args.run}.json'
|
|
|
|
if os.path.isfile(dump_path):
|
|
|
|
with open(dump_path, 'r') as f:
|
|
|
|
data = jsonpickle.decode(f.read())
|
|
|
|
|
|
|
|
process = []
|
|
|
|
|
|
|
|
if args.c is not None:
|
|
|
|
|
|
|
|
# Init all process
|
|
|
|
for conf in args.c:
|
|
|
|
|
|
|
|
config = Config()
|
|
|
|
config.import_config_file(conf)
|
|
|
|
config.init_traci(data.dir)
|
|
|
|
config.check_config()
|
|
|
|
|
|
|
|
p = RunProcess(data, config,args.save,args.csv)
|
|
|
|
p.init_logger()
|
|
|
|
process.append(p)
|
|
|
|
|
|
|
|
p.start()
|
|
|
|
|
|
|
|
for p in process : p.join()
|
|
|
|
|
2019-01-22 10:56:44 +00:00
|
|
|
|
2019-01-22 11:55:06 +00:00
|
|
|
if __name__ == '__main__':
|
|
|
|
main(sys.argv[1:])
|