User guide

Prerequisites

First it’s necessary to load the package:

 # CRAN limite CPU usage
data.table::setDTthreads(2)
library(antaresEditObject)

You need to set the path to an Antares study in “input” mode:

antaresRead::setSimulationPath(path = "path/to/study", simulation = "input")

Or you can simply create a new study:

createStudy("path/to/study")

Save study

Before modifying your study, you can save it in an archive:

backupStudy(what = "input")

This will create a .tar.gz file in your study folder.

Create a new area

You can create a new area with:

createArea(name = "myarea")

# The new area should appear here:
antaresRead::getAreas()

You can specify the localization of the area on the map, and also its color.

There are two helper functions for area parameters:

Create a new cluster

You can initialize a cluster with some parameters:

createCluster(
  area = "myarea", 
  cluster_name = "myareacluster",
  group = "other",
  unitcount = 1,
  nominalcapacity = 8400,
  `min-down-time` = 0,
  `marginal-cost` = 0.010000,
  `market-bid-cost` = 0.010000
)

You can also edit the settings of an existing cluster:

editCluster(
  area = "myarea", 
  cluster_name = "myareacluster", 
  nominalcapacity = 10600.000
)

Create a binding constraint

createBindingConstraint(
  name = "myconstraint", 
  values = matrix(data = c(rep(c(19200, 0, 0), each = 366)), ncol = 3), 
  enabled = FALSE, 
  timeStep = "daily",
  operator = "both",
  coefficients = c("fr%myarea" = 1)
)

Create several Pumped Storage Power plant (PSP)

pspData <- data.frame(
  area = c("a", "b"), 
  installedCapacity = c(800,900)
)

createPSP(
  areasAndCapacities = pspData, 
  efficiency = 0.75
)

Create several Demand Side Response (DSR)

dsrData <- data.frame(
  area = c("a", "b"),
  unit = c(10,20), 
  nominalCapacity = c(100, 120),
  marginalCost = c(52, 65),
  hour = c(3, 7)
)
  
createDSR(dsrData)

Update general settings

For example, set the output of simulation year by year, and limit the number of Monte-Carlo years to 10:

updateGeneralSettings(year.by.year = TRUE, nbyears = 10)

Remove methods

You can remove areas, links, clusters and binding constraints from input folder with remove* functions, e.g.:

removeArea("myarea")

Run Time-Series Generator

First, update general settings to activate time series to generate:

updateGeneralSettings(generate = "thermal")

Then run TS-generator:

runTsGenerator(
  path_solver = "C:/path/to/antares-solver.exe", 
  show_output_on_console = TRUE
)

Run an Antares simulation

Launch an Antares simulation from R:

runSimulation(
  name = "myAwesomeSimulation", 
  mode = "economy",
  path_solver = "C:/path/to/antares-solver.exe", 
  show_output_on_console = TRUE
)

Read a time series, update it and write it

To update an existing time series and write it, you can use the following commands :

# Filepath of the study, version >= 820
my_study <- file.path("", "", "")
opts <- setSimulationPath(my_study, simulation ="input")
opts$timeIdMax <- 8760

# Links, use only one link
my_link <- as.character(getLinks()[1])
ts_input <- readInputTS(linkCapacity = my_link, opts = opts)

# Sort the data to ensure its reliability
data.table::setorder(ts_input, cols = "tsId", "timeId") 

# Reshape to wide format : writeInputTS expects a 8760 * N matrix 
metrics <- c("transCapacityDirect", "transCapacityIndirect")
ts_input_reformatted <- data.table::dcast(ts_input,
                                          timeId ~ tsId,
                                          value.var = metrics
                                          )
# Add a value my_param to your matrix
my_param <- 123
writeInputTS(data = ts_input_reformatted[,2:ncol(ts_input_reformatted)] + my_param,
             type = "tsLink",
             link = my_link,
             overwrite = TRUE,
             opts = opts
             )


# Thermal, use only one area and one cluster
my_area <- "zone"
my_cluster <- "mon_cluster"
ts_input <- readInputTS(thermalAvailabilities = my_area, opts = opts)
ts_input <- ts_input[cluster == paste0(my_area,"_",my_cluster)]

# Sort the data to ensure its reliability
data.table::setorder(ts_input, cols = "tsId", "timeId")

# Reshape to wide format : writeInputTS expects a 8760 * N matrix 
metrics <- c("ThermalAvailabilities")
ts_input_reformatted <- data.table::dcast(ts_input,
                                          timeId ~ tsId,
                                          value.var = metrics
                                          )

# Add a value my_param to your matrix
my_param <- 1000
editCluster(area = my_area,
            cluster_name = my_cluster,
            time_series = ts_input_reformatted[,2:ncol(ts_input_reformatted)] + my_param,
            opts = opts
            )


# Run of River, use only one area
my_area <- "zone"
ts_input <- readInputTS(ror = my_area, opts = opts)

# Sort the data to ensure its reliability
data.table::setorder(ts_input, cols = "tsId", "timeId")

# Reshape to wide format : writeInputTS expects a 8760 * N matrix 
metrics <- c("ror")
ts_input_reformatted <- data.table::dcast(ts_input,
                                          timeId ~ tsId,
                                          value.var = metrics
)

# Add a value my_param to your matrix
my_param <- 1000
writeInputTS(area = my_area,
            type = "hydroROR",
            data = ts_input_reformatted[,2:ncol(ts_input_reformatted)] + my_param,
            overwrite = TRUE,
            opts = opts
)