1
0
mirror of https://github.com/Ahp06/SUMO_Emissions.git synced 2024-11-21 19:16:30 +00:00

Added RunProcess class and added multiprocessing run

This commit is contained in:
Ahp06 2019-01-22 11:56:44 +01:00
parent eded591023
commit ee6ef7a010
6 changed files with 251 additions and 241 deletions

View File

@ -69,7 +69,6 @@ class Config:
:return: All properties chosen by the user
"""
return (
f'grid : {self.areas_number}x{self.areas_number}\n'
f'step number = {self.n_steps}\n'
f'window size = {self.window_size}\n'
f'weight routing mode = {self.weight_routing_mode}\n'
@ -89,36 +88,6 @@ class Config:
sumo_binary = os.path.join(os.environ['SUMO_HOME'], 'bin', self._SUMOCMD)
self.sumo_cmd = [sumo_binary, "-c", self._SUMOCFG]
def init_logger(self, dump_name, save_logs=False):
"""
Init the application logger
:param dump_name: The name of the data dump to use
:param save_logs: If save_logs is True, it will save the logs into the logs directory
:return:
"""
now = datetime.datetime.now()
current_date = now.strftime("%Y_%m_%d_%H_%M_%S")
if not os.path.exists('files/logs'):
os.makedirs('logs')
log_filename = f'files/logs/{dump_name}_{current_date}.log'
logger = logging.getLogger(f'sumo_logger_{dump_name}')
logger.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
if save_logs:
file_handler = logging.FileHandler(log_filename)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)
return logger
def get_ref_emissions(self):
"""
:return: Return the sum of all emissions (in mg) from the simulation of reference

View File

@ -30,10 +30,11 @@ from model import Area, Vehicle, Lane, TrafficLight, Phase, Logic, Emission
class Data:
def __init__(self, map_bounds, areas_number,simulation_dir):
def __init__(self, dump_name, map_bounds, areas_number,simulation_dir):
"""
Data constructor
"""
self.dump_name = dump_name
self.map_bounds = map_bounds
self.areas_number = areas_number
self.dir = simulation_dir
@ -109,7 +110,7 @@ class Data:
logics.append(Logic(l, phases))
area.add_tl(TrafficLight(tl_id, logics))
def save(self, dump_name):
def save(self):
"""
Save simulation data into a json file
:param dump_name: The name of your data dump
@ -120,6 +121,6 @@ class Data:
os.mkdir(dump_dir)
s = json.dumps(json.loads(jsonpickle.encode(self)), indent=4) # for pretty JSON
with open(f'{dump_dir}/{dump_name}.json', 'w') as f:
with open(f'{dump_dir}/{self.dump_name}.json', 'w') as f:
f.write(s)

View File

@ -4,28 +4,16 @@ Created on 17 oct. 2018
@author: Axel Huynh-Phuc, Thibaud Gasser
"""
"""
This module defines the entry point of the application
"""
import argparse
import csv
import datetime
import itertools
import os
import sys
import time
import traci
from typing import List
import jsonpickle
from parse import search
from shapely.geometry import LineString
import actions
from config import Config
from data import Data
from model import Area, Vehicle, Lane, TrafficLight, Phase, Logic, Emission
from runner import RunProcess
def compute_vehicle_emissions(veh_id):
@ -57,7 +45,7 @@ def get_all_vehicles() -> List[Vehicle]:
return vehicles
def get_emissions(grid: List[Area], vehicles: List[Vehicle], current_step, config, logger):
def get_emissions(p : RunProcess, vehicles: List[Vehicle], current_step):
"""
For each area retrieves the acquired emissions in the window,
and acts according to the configuration chosen by the user
@ -68,7 +56,7 @@ def get_emissions(grid: List[Area], vehicles: List[Vehicle], current_step, confi
:param logger: The simulation logger
:return:
"""
for area in grid:
for area in p.data.grid:
total_emissions = Emission()
for vehicle in vehicles:
if vehicle.pos in area:
@ -76,24 +64,24 @@ def get_emissions(grid: List[Area], vehicles: List[Vehicle], current_step, confi
# Adding of the total of emissions pollutant at the current step into memory
area.emissions_by_step.append(total_emissions)
# If the sum of pollutant emissions (in mg) exceeds the threshold
if area.sum_emissions_into_window(current_step) >= config.emissions_threshold:
if area.sum_emissions_into_window(current_step) >= p.config.emissions_threshold:
if config.limit_speed_mode and not area.limited_speed:
logger.info(f'Action - Decreased max speed into {area.name} by {config.speed_rf * 100}%')
actions.limit_speed_into_area(area, config.speed_rf)
if config.adjust_traffic_light_mode and not area.tls_adjusted:
logger.info(
f'Action - Decreased traffic lights duration by {config.trafficLights_duration_rf * 100}%')
actions.adjust_traffic_light_phase_duration(area, config.trafficLights_duration_rf)
if p.config.limit_speed_mode and not area.limited_speed:
p.logger.info(f'Action - Decreased max speed into {area.name} by {p.config.speed_rf * 100}%')
actions.limit_speed_into_area(area, p.config.speed_rf)
if p.config.adjust_traffic_light_mode and not area.tls_adjusted:
p.logger.info(
f'Action - Decreased traffic lights duration by {p.config.trafficLights_duration_rf * 100}%')
actions.adjust_traffic_light_phase_duration(area, p.config.trafficLights_duration_rf)
if config.lock_area_mode and not area.locked:
if p.config.lock_area_mode and not area.locked:
if actions.count_vehicles_in_area(area):
logger.info(f'Action - {area.name} blocked')
p.logger.info(f'Action - {area.name} blocked')
actions.lock_area(area)
if config.weight_routing_mode and not area.weight_adjusted:
if p.config.weight_routing_mode and not area.weight_adjusted:
actions.adjust_edges_weights(area)
traci.polygon.setFilled(area.name, True)
@ -112,26 +100,3 @@ def get_reduction_percentage(ref, total):
"""
return (ref - total) / ref * 100
def export_data_to_csv(config, conf, grid, dump_name):
"""
Export all Emission objects as a CSV file into the csv directory
:param config: The simulation configuration
:param grid: The list of areas
:return:
"""
csv_dir = 'files/csv'
if not os.path.exists(csv_dir):
os.mkdir(csv_dir)
now = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
with open(f'files/csv/{dump_name}_{conf}_{now}.csv', 'w') as f:
writer = csv.writer(f)
# Write CSV headers
writer.writerow(itertools.chain(('Step',), (a.name for a in grid)))
# Write all areas emission value for each step
for step in range(config.n_steps):
em_for_step = (f'{a.emissions_by_step[step].value():.3f}' for a in grid)
writer.writerow(itertools.chain((step,), em_for_step))

View File

@ -5,13 +5,13 @@
"n_steps": 200,
"window_size":100,
"without_actions_mode": true,
"without_actions_mode": false,
"limit_speed_mode": true,
"speed_rf": 0.1,
"speed_rf": 1.1,
"adjust_traffic_light_mode": true,
"trafficLights_duration_rf": 0.2,
"trafficLights_duration_rf": 0.3,
"weight_routing_mode": false,

126
sumo_project/main.py Normal file
View File

@ -0,0 +1,126 @@
'''
Created on 19 janv. 2019
@author: Axel Huynh-Phuc
'''
import sys
import os
import argparse
import traci
import time
import jsonpickle
from data import Data
from config import Config
from runner import RunProcess
"""
Init the Traci API
"""
if 'SUMO_HOME' in os.environ:
tools = os.path.join(os.environ['SUMO_HOME'], 'tools')
sys.path.append(tools)
else:
sys.exit("please declare environment variable 'SUMO_HOME'")
def add_options(parser):
"""
Add command line options
:param parser: The command line parser
:return:
"""
# TODO: Faire que -areas & -simulation_dir soit requis si -new_dump
# Faire que -c soit requis si -run
parser.add_argument("-new_dump", "--new_dump", type=str,
required=False, help='Load and create a new data dump with the configuration file chosen')
parser.add_argument("-areas", "--areas", type=int, required=False,
help='Will create a grid with "areas x areas" areas')
parser.add_argument("-simulation_dir", "--simulation_dir", type=str, required=False,
help='Choose the simulation directory')
parser.add_argument("-run", "--run", type=str,
help='Run a simulation with the dump chosen')
parser.add_argument("-c", "--c", nargs='+', type=str,
help='Choose your configuration file from your working directory')
parser.add_argument("-save", "--save", action="store_true",
help='Save the logs into the logs folder')
parser.add_argument("-csv", "--csv", action="store_true",
help="Export all data emissions into a CSV file")
def create_dump(dump_name, simulation_dir, areas_number):
"""
Create a new dump with config file and dump_name chosen
:param dump_name: The name of the data dump
:param simulation_dir: The simulation directory
:param areas_number: The number of areas in grid
:return:
"""
sumo_binary = os.path.join(os.environ['SUMO_HOME'], 'bin', 'sumo')
sumo_cmd = [sumo_binary, "-c", f'files/simulations/{simulation_dir}/osm.sumocfg']
traci.start(sumo_cmd)
if not os.path.isfile(f'files/dump/{dump_name}.json'):
start = time.perf_counter()
data = Data(dump_name, traci.simulation.getNetBoundary(), areas_number, simulation_dir)
data.init_grid()
data.add_data_to_areas()
data.save()
loading_time = round(time.perf_counter() - start, 2)
print(f'Data loaded ({loading_time}s)')
print(f'Dump {dump_name} created')
else:
print(f'Dump with name {dump_name} already exist')
traci.close(False)
def main(args):
"""
The entry point of the application
:param args: Command line options
:return:
"""
parser = argparse.ArgumentParser()
add_options(parser)
args = parser.parse_args(args)
if args.new_dump is not None:
if (args.simulation_dir is not None) and (args.areas is not None):
create_dump(args.new_dump, args.simulation_dir, args.areas)
if args.run is not None:
dump_path = f'files/dump/{args.run}.json'
if os.path.isfile(dump_path):
with open(dump_path, 'r') as f:
data = jsonpickle.decode(f.read())
process = []
if args.c is not None:
# Init all process
for conf in args.c:
config = Config()
config.import_config_file(conf)
config.init_traci(data.dir)
config.check_config()
p = RunProcess(data, config,args.save,args.csv)
p.init_logger()
process.append(p)
p.logger.info(f'Running simulation dump "{args.run}" with the config "{conf}" ...')
p.start()
p.join()
if __name__ == '__main__':
main(sys.argv[1:])

View File

@ -1,181 +1,130 @@
'''
Created on 19 janv. 2019
@author: Axel Huynh-Phuc
'''
import argparse
import os
import sys
import time
import traci
import logging
import itertools
import csv
import jsonpickle
import multiprocessing
import datetime
from config import Config
from data import Data
import emissions
from model import Emission
"""
Init the Traci API
"""
if 'SUMO_HOME' in os.environ:
tools = os.path.join(os.environ['SUMO_HOME'], 'tools')
sys.path.append(tools)
else:
sys.exit("please declare environment variable 'SUMO_HOME'")
class RunProcess(multiprocessing.Process):
def __init__(self, data : Data, config : Config, save_logs, csv_export):
multiprocessing.Process.__init__(self)
self.data = data
self.config = config
self.save_logs = save_logs
self.csv_export = csv_export
def add_options(parser):
"""
Add command line options
:param parser: The command line parser
:return:
"""
# TODO: Faire que -areas & -simulation_dir soit requis si -new_dump
# Faire que -c soit requis si -run
parser.add_argument("-new_dump", "--new_dump", type=str,
required=False, help='Load and create a new data dump with the configuration file chosen')
parser.add_argument("-areas", "--areas", type=int, required=False,
help='Will create a grid with "areas x areas" areas')
parser.add_argument("-simulation_dir", "--simulation_dir", type=str, required=False,
help='Choose the simulation directory')
parser.add_argument("-run", "--run", type=str,
help='Run a simulation with the dump chosen')
parser.add_argument("-c", "--c", type=str,
help='Choose your configuration file from your working directory')
parser.add_argument("-save", "--save", action="store_true",
help='Save the logs into the logs folder')
parser.add_argument("-csv", "--csv", action="store_true",
help="Export all data emissions into a CSV file")
def init_logger(self):
now = datetime.datetime.now()
current_date = now.strftime("%Y_%m_%d_%H_%M_%S")
def create_dump(dump_name, simulation_dir, areas_number):
"""
Create a new dump with config file and dump_name chosen
:param dump_name: The name of the data dump
:param simulation_dir: The simulation directory
:param areas_number: The number of areas in grid
:return:
"""
sumo_binary = os.path.join(os.environ['SUMO_HOME'], 'bin', 'sumo')
sumo_cmd = [sumo_binary, "-c", f'files/simulations/{simulation_dir}/osm.sumocfg']
traci.start(sumo_cmd)
if not os.path.isfile(f'files/dump/{dump_name}.json'):
start = time.perf_counter()
data = Data(traci.simulation.getNetBoundary(), areas_number, simulation_dir)
data.init_grid()
data.add_data_to_areas()
data.save(dump_name)
if not os.path.exists('files/logs'):
os.makedirs('logs')
# log_filename = f'files/logs/{logger_name}_{current_date}.log'
log_filename = f'files/logs/{current_date}.log'
conf_name = self.config.config_filename.replace('.json', '')
self.logger = logging.getLogger(f'{self.data.dir}_{conf_name}')
self.logger.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
if self.save_logs:
file_handler = logging.FileHandler(log_filename)
file_handler.setFormatter(formatter)
self.logger.addHandler(file_handler)
handler = logging.StreamHandler()
handler.setFormatter(formatter)
self.logger.addHandler(handler)
loading_time = round(time.perf_counter() - start, 2)
print(f'Data loaded ({loading_time}s)')
print(f'Dump {dump_name} created')
else:
print(f'Dump with name {dump_name} already exist')
def export_data_to_csv(self):
"""
Export all Emission objects as a CSV file into the csv directory
"""
csv_dir = 'files/csv'
if not os.path.exists(csv_dir):
os.mkdir(csv_dir)
now = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
conf_name = self.config.config_filename.replace('.json', '')
with open(f'files/csv/{self.data.dump_name}_{conf_name}_{now}.csv', 'w') as f:
writer = csv.writer(f)
# Write CSV headers
writer.writerow(itertools.chain(('Step',), (a.name for a in self.data.grid)))
# Write all areas emission value for each step
for step in range(self.config.n_steps):
em_for_step = (f'{a.emissions_by_step[step].value():.3f}' for a in self.data.grid)
writer.writerow(itertools.chain((step,), em_for_step))
traci.close(False)
def run(data : Data, config : Config, logger):
"""
Run a data set
:param data: The data instance
:param config: The config instance
:param logger: The logger instance
"""
try:
traci.start(config.sumo_cmd)
for area in data.grid: # Set acquisition window size
area.set_window_size(config.window_size)
traci.polygon.add(area.name, area.rectangle.exterior.coords, (255, 0, 0)) # Add polygon for UI
logger.info(f'Loaded simulation file : {config._SUMOCFG}')
logger.info('Loading data for the simulation')
start = time.perf_counter()
logger.info('Simulation started...')
step = 0
while step < config.n_steps:
traci.simulationStep()
vehicles = emissions.get_all_vehicles()
emissions.get_emissions(data.grid, vehicles, step, config, logger)
step += 1
print(f'step = {step}/{config.n_steps}', end='\r')
finally:
traci.close(False)
def main(args):
"""
The entry point of the application
:param args: Command line options
:return:
"""
parser = argparse.ArgumentParser()
add_options(parser)
args = parser.parse_args(args)
if args.new_dump is not None:
if (args.simulation_dir is not None) and (args.areas is not None):
create_dump(args.new_dump, args.simulation_dir, args.areas)
if args.run is not None:
dump_path = f'files/dump/{args.run}.json'
if os.path.isfile(dump_path):
with open(dump_path, 'r') as f:
data = jsonpickle.decode(f.read())
def run(self):
"""
Run a data set
"""
try:
self.init_logger()
config = Config()
if args.c is not None:
config.import_config_file(args.c)
config.init_traci(data.dir)
logger = config.init_logger(dump_name=args.run, save_logs=args.save)
config.check_config()
traci.start(self.config.sumo_cmd)
for area in self.data.grid: # Set acquisition window size
area.set_window_size(self.config.window_size)
traci.polygon.add(area.name, area.rectangle.exterior.coords, (255, 0, 0)) # Add polygon for UI
self.logger.info(f'Loaded simulation file : {self.config._SUMOCFG}')
self.logger.info('Loading data for the simulation')
logger.info(f'Running simulation dump {args.run}...')
start = time.perf_counter()
run(data, config, logger)
simulation_time = round(time.perf_counter() - start, 2)
logger.info(f'End of the simulation ({simulation_time}s)')
if args.csv:
emissions.export_data_to_csv(config, data.grid, dump_name=args.run)
logger.info(f'Exported data into the csv folder')
# 1 step is equal to one second simulated
logger.info(f'Real-time factor : {config.n_steps / simulation_time}')
self.logger.info('Simulation started...')
step = 0
while step < self.config.n_steps:
traci.simulationStep()
vehicles = emissions.get_all_vehicles()
emissions.get_emissions(self, vehicles, step)
step += 1
print(f'step = {step}/{self.config.n_steps}', end='\r')
finally:
traci.close(False)
total_emissions = Emission()
for area in data.grid:
for area in self.data.grid:
total_emissions += area.sum_all_emissions()
logger.info(f'Total emissions = {total_emissions.value()} mg')
if not config.without_actions_mode: # If it's not a simulation without actions
ref = config.get_ref_emissions()
self.logger.info(f'Total emissions = {total_emissions.value()} mg')
if not self.config.without_actions_mode: # If it's not a simulation without actions
ref = self.config.get_ref_emissions()
if not (ref is None): # If a reference value exist (add yours into config.py)
global_diff = (ref.value() - total_emissions.value()) / ref.value()
self.logger.info(f'Global reduction percentage of emissions = {global_diff * 100} %')
self.logger.info(f'-> CO2 emissions = {emissions.get_reduction_percentage(ref.co2, total_emissions.co2)} %')
self.logger.info(f'-> CO emissions = {emissions.get_reduction_percentage(ref.co, total_emissions.co)} %')
self.logger.info(f'-> Nox emissions = {emissions.get_reduction_percentage(ref.nox, total_emissions.nox)} %')
self.logger.info(f'-> HC emissions = {emissions.get_reduction_percentage(ref.hc, total_emissions.hc)} %')
self.logger.info(f'-> PMx emissions = {emissions.get_reduction_percentage(ref.pmx, total_emissions.pmx)} %')
simulation_time = round(time.perf_counter() - start, 2)
self.logger.info(f'End of the simulation ({simulation_time}s)')
# 1 step is equal to one second simulated
self.logger.info(f'Real-time factor : {self.config.n_steps / simulation_time}')
logger.info(f'Global reduction percentage of emissions = {global_diff * 100} %')
logger.info(f'-> CO2 emissions = {emissions.get_reduction_percentage(ref.co2, total_emissions.co2)} %')
logger.info(f'-> CO emissions = {emissions.get_reduction_percentage(ref.co, total_emissions.co)} %')
logger.info(f'-> Nox emissions = {emissions.get_reduction_percentage(ref.nox, total_emissions.nox)} %')
logger.info(f'-> HC emissions = {emissions.get_reduction_percentage(ref.hc, total_emissions.hc)} %')
logger.info(f'-> PMx emissions = {emissions.get_reduction_percentage(ref.pmx, total_emissions.pmx)} %')
if __name__ == '__main__':
main(sys.argv[1:])
if self.csv_export:
self.export_data_to_csv()
self.logger.info(f'Exported data into the csv folder')