Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
126 changes: 107 additions & 19 deletions notebooks/figures.ipynb

Large diffs are not rendered by default.

72 changes: 58 additions & 14 deletions scripts/generate.jl
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,10 @@ s = ArgParseSettings()
help = "Output directory path."
arg_type = String
default = "./data"
"--suffix"
help = "Suffix to add to the output file name."
arg_type = String
default = ""
"--n_samples"
help = "Number of samples to generate if `--label_train` is provided the samples will be labeled."
arg_type = Int
Expand All @@ -28,6 +32,9 @@ s = ArgParseSettings()
"--simple"
help = "Simplify the network model."
action = :store_true
"--remove_random_branch"
help = "Remove a random branch from the network in each sample."
action = :store_true
end
args = parse_args(ARGS, s)

Expand Down Expand Up @@ -76,7 +83,7 @@ function label_network(network_data::Dict{String,Any}, load::Dict{String,Any})::
network_data = deepcopy(network_data)
network_data["load"] = load

ipopt_args = Dict("tol" => 1e-8, "print_level" => 1, "sb" => "yes")
ipopt_args = Dict("tol" => 1e-6, "print_level" => 1, "sb" => "yes")
if use_hsl
hsl_args = Dict("linear_solver" => "ma57", "hsllib" => HSL_jll.libhsl_path)
solver = optimizer_with_attributes(Ipopt.Optimizer, merge(ipopt_args, hsl_args)...)
Expand Down Expand Up @@ -165,8 +172,8 @@ function simplify_network(network_data::Dict{String,Any})::Dict{String,Any}
network_data["gen"]["$(i)"]["qmin"] = sum([gen["qmin"] for gen in gens])
network_data["gen"]["$(i)"]["qmax"] = sum([gen["qmax"] for gen in gens])
# assert model is quadratic
if network_data["gen"]["$(i)"]["model"] != 2
throw(ArgumentError("Only the quadratic cost model is supported."))
if network_data["gen"]["$(i)"]["model"] != 2
throw(ArgumentError("Only the quadratic cost model is supported."))
end
# compute average of the cost functions
ncost = maximum([gen["ncost"] for gen in gens])
Expand All @@ -186,35 +193,50 @@ Generate `n_samples` from the power system network. Each sample is labeled with
Samples that did not converge are discarded. Outputs a dictionary with the following keys:
- `load`: an array of (n_samples, n_load, 2) with the active and reactive load at each bus.
- `gen`: an array of (n_samples, n_gen, 2) with the active and reactive power generated at each generator.
- `branch`: an array of (n_samples, n_branch, 5) the columns are [pf, qf, pt, qt].
- `branch_status`: an array of (n_samples, n_branch) with the status of the branches.
- `termination_status`: an array of (n_samples,) with the termination status of the optimization problem.
- `primal_status`: an array of (n_samples,) with the primal status of the optimization problem.
- `dual_status`: an array of (n_samples,) with the dual status of the optimization problem.
- `solve_time`: an array of (n_samples,) with the time taken to solve the optimization problem.
- `objective`: an array of (n_samples,) with the objective value of the optimization problem.
"""
function generate_samples_numpy(network_data, n_samples, min_load=0.9, max_load=1.1)
function generate_samples_numpy(network_data, n_samples, min_load=0.9, max_load=1.1, remove_random_branch=false)
count_atomic = Threads.Atomic{Int}(0)
data = Dict(
"bus" => Array{Float64,3}(undef, 2, length(network_data["bus"]), n_samples),
"load" => Array{Float64,3}(undef, 2, length(network_data["load"]), n_samples),
"gen" => Array{Float64,3}(undef, 2, length(network_data["gen"]), n_samples),
"branch" => Array{Float64,3}(undef, 4, length(network_data["branch"]), n_samples),
"branch_status" => Array{Int}(undef, length(network_data["branch"]), n_samples),
"termination_status" => Array{String}(undef, n_samples),
"primal_status" => Array{String}(undef, n_samples),
"dual_status" => Array{String}(undef, n_samples),
"solve_time" => Array{Float64}(undef, n_samples),
"objective" => Array{Float64}(undef, n_samples),
)
br_removed_index = ""
Copy link
Copy Markdown
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Remove

progress = Progress(n_samples, desc="Generating labeled samples:")
Threads.@threads for i = 1:n_samples
solved = false
while !solved
Threads.atomic_add!(count_atomic, 1)
load = sample_load(network_data, min_load, max_load)
result, solved = label_network(network_data, load)

_network_data = deepcopy(network_data)
load = sample_load(_network_data, min_load, max_load)
if remove_random_branch
branch_to_remove = rand(keys(_network_data["branch"]))
_network_data["branch"][branch_to_remove]["br_status"] = 0
# br_removed_index = parse(Int, branch_to_remove)
Copy link
Copy Markdown
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Remove the comented out code.

br_removed_index = branch_to_remove
println("testing removal of branch $br_removed_index for feasibility")
end
result, solved = label_network(_network_data, load)
if !solved
# no... this is in NOT solved... missing some logic here
Copy link
Copy Markdown
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Remove

continue
end
println("branch $br_removed_index found to be feasibile")
Copy link
Copy Markdown
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Remove the debug logging.

for j = 1:length(network_data["bus"])
data["bus"][1, j, i] = result["solution"]["bus"]["$(j)"]["vm"]
data["bus"][2, j, i] = result["solution"]["bus"]["$(j)"]["va"]
Expand All @@ -228,22 +250,35 @@ function generate_samples_numpy(network_data, n_samples, min_load=0.9, max_load=
data["gen"][2, j, i] = result["solution"]["gen"]["$(j)"]["qg"]
end
for j = 1:length(network_data["branch"])
data["branch"][1, j, i] = result["solution"]["branch"]["$(j)"]["pf"]
data["branch"][2, j, i] = result["solution"]["branch"]["$(j)"]["qf"]
data["branch"][3, j, i] = result["solution"]["branch"]["$(j)"]["pt"]
data["branch"][4, j, i] = result["solution"]["branch"]["$(j)"]["qt"]
if "$(j)" in keys(result["solution"]["branch"])
data["branch"][1, j, i] = result["solution"]["branch"]["$(j)"]["pf"]
data["branch"][2, j, i] = result["solution"]["branch"]["$(j)"]["qf"]
data["branch"][3, j, i] = result["solution"]["branch"]["$(j)"]["pt"]
data["branch"][4, j, i] = result["solution"]["branch"]["$(j)"]["qt"]
else
data["branch"][1, j, i] = 0
data["branch"][2, j, i] = 0
data["branch"][3, j, i] = 0
data["branch"][4, j, i] = 0
end
# branch status is stored in the network data, not in the result
data["branch_status"][j, i] = _network_data["branch"]["$(j)"]["br_status"]
end
data["termination_status"][i] = string(result["termination_status"])
data["primal_status"][i] = string(result["primal_status"])
data["dual_status"][i] = string(result["dual_status"])
data["solve_time"][i] = result["solve_time"]
data["objective"][i] = result["objective"]

# # maybe need to move this
# # or make a deep copt
# network_data["branch"][branch_to_remove]["br_status"] = 1
Copy link
Copy Markdown
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Remove.

end
next!(progress)
end
count = count_atomic[]
println("Generated $n_samples feasible samples using $count samples. Feasibility ratio: $(n_samples / count).")
return data
return data, br_removed_index
end

function check_assumptions!(network_data)
Expand Down Expand Up @@ -304,6 +339,8 @@ function main()
min_load = args["min_load"]
max_load = args["max_load"]
simple = args["simple"]
suffix = args["suffix"]
remove_random_branch = args["remove_random_branch"]

if max_load < min_load
error("max_load must be greater than min_load")
Expand All @@ -322,15 +359,22 @@ function main()

check_assumptions!(network_data)

# data = generate_samples_numpy(network_data, n_samples, min_load, max_load, remove_random_branch)
Copy link
Copy Markdown
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Remove the debug logic

# br_removed_index = -1 if remove_random_branch=false
data, br_removed_index = generate_samples_numpy(network_data, n_samples, min_load, max_load, remove_random_branch)
if br_removed_index != ""
println("removing branch $br_removed_index from json file")
network_data["branch"][br_removed_index]["br_status"] = 0
end

casename_suffix = simple ? "_simple" : ""
# save network data in JSON format
open(joinpath(out_dir, casename * casename_suffix * ".json"), "w") do f
open(joinpath(out_dir, casename * casename_suffix * suffix * ".json"), "w") do f
JSON.print(f, network_data, 4)
end

data = generate_samples_numpy(network_data, n_samples, min_load, max_load)
# write to h5 file
h5file = joinpath(out_dir, casename * casename_suffix * ".h5")
h5file = joinpath(out_dir, casename * casename_suffix * suffix * ".h5")
h5open(h5file, "w") do file
for (k, v) in data
write(file, k, v)
Expand Down
37 changes: 26 additions & 11 deletions src/opf/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ def build_graph(
graph["bus"].params = bus_params.to_tensor()
graph["gen"].params = gen_params.to_tensor()
gen_index = torch.arange(params.n_gen)

Copy link
Copy Markdown
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should not be included in commit.

# Edges betwen buses
graph["bus", "branch", "bus"].edge_index = torch.stack(
[params.fr_bus, params.to_bus], dim=0
Expand Down Expand Up @@ -140,6 +141,7 @@ def __init__(
Sg: torch.Tensor,
Sf: torch.Tensor,
St: torch.Tensor,
branch_status: torch.Tensor,
graph: HeteroData,
casefile: str,
dual_graph: bool = False,
Expand All @@ -165,6 +167,7 @@ def __init__(
self.V = V
self.Sf = Sf
self.St = St
self.branch_status = branch_status
self.graph = graph
self.graph.casefile = casefile
self.dual_graph = dual_graph
Expand All @@ -179,12 +182,15 @@ def __getitem__(self, index) -> PowerflowData:
data["bus"].load = self.load[index]
data["bus"].V = self.V[index]
data["gen"].Sg = self.Sg[index]
branch_mask = self.branch_status[index]
if self.dual_graph:
data["branch"].Sf = self.Sf[index]
data["branch"].St = self.St[index]
data["bus", "branch", "bus"].Sf = self.Sf[index, branch_mask]
data["bus", "branch", "bus"].St = self.St[index, branch_mask]
else:
data["bus", "branch", "bus"].Sf = self.Sf[index]
data["bus", "branch", "bus"].St = self.St[index]
data["bus", "branch", "bus"].edge_index = data["bus", "branch", "bus"].edge_index[:, branch_mask[:, 0]]
data["bus", "branch", "bus"].params = data["bus", "branch", "bus"].params[branch_mask[:, 0]]
data["bus", "branch", "bus"].Sf = self.Sf[index, branch_mask]
data["bus", "branch", "bus"].St = self.St[index, branch_mask]

return PowerflowData(
data,
Expand Down Expand Up @@ -238,7 +244,11 @@ def prepare_data(self):

@property
def case_path(self):
return Path(self.data_dir / f"{self.case_name}.json")
# TODO: replace this and add a removed_branch argument
if self.case_name == "case118_ieee":
Copy link
Copy Markdown
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this logic necessary? Can't I simply provide case_name case118_ieee_removed_branch?

return Path(self.data_dir / f"{self.case_name}_removed_branch.json")
else:
return Path(self.data_dir / f"{self.case_name}.json")

@property
def dataset_path(self):
Expand Down Expand Up @@ -287,6 +297,10 @@ def setup(self, stage: Optional[str] = None):
branch = torch.from_numpy(f["branch"][:]).float() # type: ignore
Sf = branch[..., :2]
St = branch[..., 2:]
if "branch_status" in f:
branch_status = torch.from_numpy(f["branch_status"][:]).bool()
else:
branch_status = torch.ones_like(Sf, dtype=torch.bool)
self.powerflow_parameters.reference_cost = torch.from_numpy(f["objective"][:]).mean().float() # type: ignore

if self.graph is None:
Expand All @@ -300,6 +314,7 @@ def setup(self, stage: Optional[str] = None):

Sd = torch.zeros((n_samples, n_bus, 2))
Sd[:, self.powerflow_parameters.load_bus_ids, :] = load
# branch_status = self.powerflow_parameters ...??
Copy link
Copy Markdown
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

don't commit commented out code


# figure out the number of samples for each set
if data_splits_total := sum(self.data_splits) > 1:
Expand All @@ -309,21 +324,21 @@ def setup(self, stage: Optional[str] = None):
self.data_splits = [x / data_splits_total for x in self.data_splits]

n_train, n_val, n_test = [int(split * n_samples) for split in self.data_splits]
# I am indexing from the end, so that I can change the size of the training dataset
# without changing wich samples are used for testing and validation
val_offset = n_samples - n_val - n_test
test_offset = n_samples - n_test
# Create a tuple of variables, each row defining a sample in the dataset
# Will programatically split them into train, val and test
variables = (Sd, V, Sg, Sf, St)
variables = (Sd, V, Sg, Sf, St, branch_status)

if stage == "fit" or stage is None:
self.train_dataset = OPFDataset(
*(x[:n_train] for x in variables),
graph=self.graph,
casefile=self.powerflow_parameters.casefile,
dual_graph=self.dual_graph,
)
# I am indexing from the end, so that I can change the size of the training dataset
# without changing wich samples are used for testing and validation
val_offset = n_samples - n_val - n_test
test_offset = n_samples - n_test

self.val_dataset = OPFDataset(
*(x[val_offset:test_offset] for x in variables),
graph=self.graph,
Expand Down