e2nIEE / pandapipes
Showing 26 of 122 files from the diff.
Other files ignored by Codecov
setup.py has changed.
README.rst has changed.
CHANGELOG.rst has changed.

@@ -5,6 +5,7 @@
Loading
5 5
import matplotlib.pyplot as plt
6 6
import numpy as np
7 7
8 +
8 9
from pandapipes.component_models.abstract_models import BranchWInternalsComponent
9 10
from pandapipes.component_models.auxiliaries.component_toolbox import p_correction_height_air, \
10 11
    vinterp
@@ -17,7 +18,7 @@
Loading
17 18
from pandapipes.constants import NORMAL_TEMPERATURE, NORMAL_PRESSURE
18 19
19 20
from pandapipes.pipeflow_setup import get_net_option, get_fluid, get_lookup
20 -
from pandapipes.internals_toolbox import _sum_by_group
21 +
from pandapipes.internals_toolbox import _sum_by_group, select_from_pit
21 22
from numpy import dtype
22 23
23 24
try:
@@ -235,8 +236,11 @@
Loading
235 236
                idx_active, v_gas_from, v_gas_to, v_gas_mean, normfactor_from, normfactor_to,
236 237
                np.ones_like(idx_active))
237 238
238 -
            res_table["v_from_m_per_s"].values[placement_table] = v_gas_from_sum / internal_pipes
239 -
            res_table["v_to_m_per_s"].values[placement_table] = v_gas_to_sum / internal_pipes
239 +
            v_gas_from_ordered = select_from_pit(from_nodes,from_junction_nodes, v_gas_from)
240 +
            v_gas_to_ordered = select_from_pit(to_nodes,to_junction_nodes, v_gas_to)
241 +
242 +
            res_table["v_from_m_per_s"].values[placement_table] = v_gas_from_ordered
243 +
            res_table["v_to_m_per_s"].values[placement_table] = v_gas_to_ordered
240 244
            res_table["v_mean_m_per_s"].values[placement_table] = v_gas_mean_sum / internal_pipes
241 245
            res_table["normfactor_from"].values[placement_table] = nf_from_sum / internal_pipes
242 246
            res_table["normfactor_to"].values[placement_table] = nf_to_sum / internal_pipes
@@ -259,11 +263,12 @@
Loading
259 263
    @classmethod
260 264
    def get_internal_results(cls, net, pipe):
261 265
        """
266 +
        Retrieve velocity (at to/from node; mean), pressure and temperature of the internal sections of pipes. The pipes have to have at least 2 internal sections.
262 267
263 268
        :param net: The pandapipes network
264 269
        :type net: pandapipesNet
265 -
        :param pipe:
266 -
        :type pipe:
270 +
        :param pipe: indices of pipes to evaluate
271 +
        :type pipe: np.array
267 272
        :return: pipe_results
268 273
        :rtype:
269 274
        """
@@ -274,7 +279,10 @@
Loading
274 279
        pipe_results = dict()
275 280
        pipe_results["PINIT"] = np.zeros((len(p_node_idx), 2), dtype=np.float64)
276 281
        pipe_results["TINIT"] = np.zeros((len(p_node_idx), 2), dtype=np.float64)
277 -
        pipe_results["VINIT"] = np.zeros((len(v_pipe_idx), 2), dtype=np.float64)
282 +
        pipe_results["VINIT_FROM"] = np.zeros((len(v_pipe_idx), 2), dtype=np.float64)
283 +
        pipe_results["VINIT_TO"] = np.zeros((len(v_pipe_idx), 2), dtype=np.float64)
284 +
        pipe_results["VINIT_MEAN"] = np.zeros((len(v_pipe_idx), 2), dtype=np.float64)
285 +
278 286
279 287
        if np.all(internal_sections[pipe] >= 2):
280 288
            fluid = get_fluid(net)
@@ -292,8 +300,7 @@
Loading
292 300
293 301
            selected_indices_p_final = np.logical_or.reduce(selected_indices_p[:])
294 302
            selected_indices_v_final = np.logical_or.reduce(selected_indices_v[:])
295 -
            # a = np.where(int_p_lookup[:,0] ==  True, False)
296 -
            # b = np.where(int_v_lookup[:, 0] == v_pipe_idx, True, False)
303 +
297 304
            p_nodes = int_p_lookup[:, 1][selected_indices_p_final]
298 305
            v_nodes = int_v_lookup[:, 1][selected_indices_v_final]
299 306
@@ -313,17 +320,38 @@
Loading
313 320
                p_mean = np.where(p_from == p_to, p_from,
314 321
                                  2 / 3 * (p_from ** 3 - p_to ** 3) / (p_from ** 2 - p_to ** 2))
315 322
                numerator = NORMAL_PRESSURE * node_pit[v_nodes, TINIT_NODE]
316 -
                normfactor = numerator * fluid.get_property("compressibility", p_mean) \
323 +
                normfactor_mean = numerator * fluid.get_property("compressibility", p_mean) \
317 324
                             / (p_mean * NORMAL_TEMPERATURE)
325 +
                normfactor_from = numerator * fluid.get_property("compressibility", p_from) \
326 +
                                  / (p_from * NORMAL_TEMPERATURE)
327 +
                normfactor_to = numerator * fluid.get_property("compressibility", p_to) \
328 +
                                / (p_to * NORMAL_TEMPERATURE)
329 +
330 +
                v_pipe_data_mean = v_pipe_data * normfactor_mean
331 +
                v_pipe_data_from = v_pipe_data * normfactor_from
332 +
                v_pipe_data_to = v_pipe_data * normfactor_to
333 +
334 +
                pipe_results["VINIT_FROM"][:, 0] = v_pipe_idx
335 +
                pipe_results["VINIT_FROM"][:, 1] = v_pipe_data_from
336 +
                pipe_results["VINIT_TO"][:, 0] = v_pipe_idx
337 +
                pipe_results["VINIT_TO"][:, 1] = v_pipe_data_to
338 +
                pipe_results["VINIT_MEAN"][:, 0] = v_pipe_idx
339 +
                pipe_results["VINIT_MEAN"][:, 1] = v_pipe_data_mean
340 +
            else:
341 +
                pipe_results["VINIT_FROM"][:, 0] = v_pipe_idx
342 +
                pipe_results["VINIT_FROM"][:, 1] = v_pipe_data
343 +
                pipe_results["VINIT_TO"][:, 0] = v_pipe_idx
344 +
                pipe_results["VINIT_TO"][:, 1] = v_pipe_data
345 +
                pipe_results["VINIT_MEAN"][:, 0] = v_pipe_idx
346 +
                pipe_results["VINIT_MEAN"][:, 1] = v_pipe_data
347 +
318 348
319 -
                v_pipe_data = v_pipe_data * normfactor
320 349
321 350
            pipe_results["PINIT"][:, 0] = p_node_idx
322 351
            pipe_results["PINIT"][:, 1] = p_node_data
323 352
            pipe_results["TINIT"][:, 0] = p_node_idx
324 353
            pipe_results["TINIT"][:, 1] = t_node_data
325 -
            pipe_results["VINIT"][:, 0] = v_pipe_idx
326 -
            pipe_results["VINIT"][:, 1] = v_pipe_data
354 +
327 355
        else:
328 356
            logger.warning("For at least one pipe no internal data is available.")
329 357
@@ -394,10 +422,10 @@
Loading
394 422
        :return: No Output.
395 423
        """
396 424
        pipe_p_data_idx = np.where(pipe_results["PINIT"][:, 0] == pipe)
397 -
        pipe_v_data_idx = np.where(pipe_results["VINIT"][:, 0] == pipe)
425 +
        pipe_v_data_idx = np.where(pipe_results["VINIT_MEAN"][:, 0] == pipe)
398 426
        pipe_p_data = pipe_results["PINIT"][pipe_p_data_idx, 1]
399 427
        pipe_t_data = pipe_results["TINIT"][pipe_p_data_idx, 1]
400 -
        pipe_v_data = pipe_results["VINIT"][pipe_v_data_idx, 1]
428 +
        pipe_v_data = pipe_results["VINIT_MEAN"][pipe_v_data_idx, 1]
401 429
        node_pit = net["_pit"]["node"]
402 430
403 431
        junction_idx_lookup = get_lookup(net, "node", "index")[Junction.table_name()]

@@ -0,0 +1,82 @@
Loading
1 +
# -*- coding: utf-8 -*-
2 +
3 +
# Copyright (c) 2016-2020 by University of Kassel and Fraunhofer Institute for Energy Economics
4 +
# and Energy System Technology (IEE), Kassel. All rights reserved.
5 +
6 +
import networkx as nx
7 +
8 +
from pandapower.topology.create_graph import get_edge_table, add_edges, init_par
9 +
10 +
try:
11 +
    import pplog as logging
12 +
except ImportError:
13 +
    import logging
14 +
15 +
INDEX = 0
16 +
F_JUNCTION = 1
17 +
T_JUNCTION = 2
18 +
19 +
WEIGHT = 0
20 +
21 +
logger = logging.getLogger(__name__)
22 +
23 +
24 +
def create_nxgraph(net, include_pipes=True, include_valves=True, include_pumps=True,
25 +
                   nogojunctions=None, notravjunctions=None, multi=True,
26 +
                   include_out_of_service=False):
27 +
28 +
    if multi:
29 +
        mg = nx.MultiGraph()
30 +
    else:
31 +
        mg = nx.Graph()
32 +
33 +
    if hasattr(net, "pipe"):
34 +
        pipe = get_edge_table(net, "pipe", include_pipes)
35 +
        if pipe is not None:
36 +
            indices, parameter, in_service = init_par(pipe)
37 +
            indices[:, F_JUNCTION] = pipe.from_junction.values
38 +
            indices[:, T_JUNCTION] = pipe.to_junction.values
39 +
            parameter[:, WEIGHT] = pipe.length_km.values
40 +
            add_edges(mg, indices, parameter, in_service, net, "pipe")
41 +
42 +
    if hasattr(net, "valve"):
43 +
        valve = get_edge_table(net, "valve", include_valves)
44 +
        if valve is not None:
45 +
            indices, parameter, in_service = init_par(valve)
46 +
            indices[:, F_JUNCTION] = valve.from_junction.values
47 +
            indices[:, T_JUNCTION] = valve.to_junction.values
48 +
            add_edges(mg, indices, parameter, in_service, net, "valve")
49 +
50 +
    if hasattr(net, "pump"):
51 +
        pump = get_edge_table(net, "pump", include_pumps)
52 +
        if pump is not None:
53 +
            indices, parameter, in_service = init_par(pump)
54 +
            indices[:, F_JUNCTION] = pump.from_junction.values
55 +
            indices[:, T_JUNCTION] = pump.to_junction.values
56 +
            add_edges(mg, indices, parameter, in_service, net, "pump")
57 +
58 +
    # add all junctions that were not added when creating branches
59 +
    if len(mg.nodes()) < len(net.junction.index):
60 +
        for b in set(net.junction.index) - set(mg.nodes()):
61 +
            mg.add_node(b)
62 +
63 +
    # remove nogojunctions
64 +
    if nogojunctions is not None:
65 +
        for b in nogojunctions:
66 +
            mg.remove_node(b)
67 +
68 +
    # remove the edges pointing away of notravjunctions
69 +
    if notravjunctions is not None:
70 +
        for b in notravjunctions:
71 +
            for i in list(mg[b].keys()):
72 +
                try:
73 +
                    del mg[b][i]  # networkx versions < 2.0
74 +
                except:
75 +
                    del mg._adj[b][i]  # networkx versions 2.0
76 +
77 +
    # remove out of service junctions
78 +
    if not include_out_of_service:
79 +
        for b in net.junction.index[~net.junction.in_service.values]:
80 +
            mg.remove_node(b)
81 +
82 +
    return mg

@@ -17,6 +17,7 @@
Loading
17 17
from pandapipes.component_models import Junction
18 18
from pandapipes.component_models.abstract_models import NodeComponent, NodeElementComponent, \
19 19
    BranchComponent, BranchWInternalsComponent
20 +
from pandapower.auxiliary import ppException
20 21
21 22
try:
22 23
    import pplog as logging
@@ -113,7 +114,8 @@
Loading
113 114
    # ---------------------------------------------------------------------------------------------
114 115
    niter = 0
115 116
    create_internal_results(net)
116 -
    net["_internal_data"] = dict()
117 +
    if not get_net_option(net, "reuse_internal_data") or "_internal_data" not in net:
118 +
        net["_internal_data"] = dict()
117 119
118 120
    # This branch is used to stop the solver after a specified error tolerance is reached
119 121
    error_v, error_p, residual_norm = [], [], None
@@ -171,7 +173,8 @@
Loading
171 173
        logger.info("tol_v: %s" % get_net_option(net, "tol_v"))
172 174
        net['converged'] = True
173 175
174 -
    net.pop("_internal_data", None)
176 +
    if not get_net_option(net, "reuse_internal_data"):
177 +
        net.pop("_internal_data", None)
175 178
    set_user_pf_options(net, hyd_flag=True)
176 179
177 180
    return niter
@@ -353,3 +356,10 @@
Loading
353 356
        set_net_option(net, "alpha", current_alpha * 10 if current_alpha <= 0.1 else 1.0)
354 357
355 358
    return error_x0_increased, error_x1_increased
359 +
360 +
361 +
class PipeflowNotConverged(ppException):
362 +
    """
363 +
    Exception being raised in case pipeflow did not converge.
364 +
    """
365 +
    pass

@@ -444,11 +444,11 @@
Loading
444 444
            os.path.join(pp_dir, "properties", fluid, prop + ".txt"))
445 445
446 446
    liquids = ["water"]
447 -
    gases = ["air", "lgas", "hgas"]
447 +
    gases = ["air", "lgas", "hgas", "hydrogen"]
448 448
449 449
    if fluid == "natural_gas":
450 450
        logger.error("'natural_gas' is ambigious. Please choose 'hgas' or 'lgas' "
451 -
                     "(high- or low caloric natural gas)")
451 +
                     "(high- or low calorific natural gas)")
452 452
    if fluid not in liquids and fluid not in gases:
453 453
        raise AttributeError("Fluid '%s' not found in the fluid library. It might not be "
454 454
                             "implemented yet." % fluid)
@@ -484,7 +484,7 @@
Loading
484 484
    return fluid
485 485
486 486
487 -
def add_fluid_to_net(net, fluid, overwrite=True):
487 +
def _add_fluid_to_net(net, fluid, overwrite=True):
488 488
    """
489 489
    Adds a fluid to a net. If overwrite is False, a warning is printed and the fluid is not set.
490 490
@@ -501,7 +501,12 @@
Loading
501 501
        fluid_msg = "an existing fluid" if not hasattr(net["fluid"], "name") \
502 502
            else "the fluid %s" % net["fluid"].name
503 503
        logger.warning("The fluid %s would replace %s and thus cannot be created. Try to set "
504 -
                       "overwrite to False" % (fluid.name, fluid_msg))
504 +
                       "overwrite to True" % (fluid.name, fluid_msg))
505 505
        return
506 506
507 +
    if isinstance(fluid, str):
508 +
        logger.warning("Instead of a pandapipes.Fluid, a string ('%s') was passed to the fluid "
509 +
                       "argument. Internally, it will be passed to call_lib(fluid) to get the "
510 +
                       "respective pandapipes.Fluid." %fluid)
511 +
        fluid = call_lib(fluid)
507 512
    net["fluid"] = fluid

@@ -14,7 +14,7 @@
Loading
14 14
15 15
16 16
def build_system_matrix(net, branch_pit, node_pit, heat_mode):
17 -
    """
17 +
    """Builds the system matrix.
18 18
19 19
    :param net: The pandapipes network
20 20
    :type net: pandapipesNet
@@ -29,7 +29,7 @@
Loading
29 29
    """
30 30
    update_option = get_net_option(net, "only_update_hydraulic_matrix")
31 31
    update_only = update_option and "hydraulic_data_sorting" in net["_internal_data"] \
32 -
                  and "hydraulic_matrix" in net["_internal_data"]
32 +
        and "hydraulic_matrix" in net["_internal_data"]
33 33
34 34
    len_b = len(branch_pit)
35 35
    len_n = len(node_pit)

@@ -12,7 +12,7 @@
Loading
12 12
from pandapipes.plotting.generic_geodata import create_generic_coordinates
13 13
from pandapower.plotting import draw_collections
14 14
from itertools import chain
15 -
import numpy as np
15 +
16 16
17 17
try:
18 18
    import pplog as logging
@@ -45,8 +45,7 @@
Loading
45 45
    :type pipe_width: float, default 5.0
46 46
    :param junction_size: Relative size of junctions to plot. The value junction_size is multiplied\
47 47
            with mean_distance_between_buses, which equals the distance between the max geoocord\
48 -
            and the min divided by 200: \n
49 -
            >>> mean_distance_between_buses = sum((net['bus_geodata'].max() - net['bus_geodata'].min()) / 200)
48 +
            and the min divided by 200
50 49
    :type junction_size: float, default 1.0
51 50
    :param ext_grid_size: Relative size of ext_grids to plot. See bus sizes for details. Note: \
52 51
            ext_grids are plottet as rectangles
53 52
imilarity index 100%
54 53
ename from pandapipes/properties/carbondioxid/density.txt
55 54
ename to pandapipes/properties/carbondioxide/density.txt
56 55
imilarity index 100%
57 56
ename from pandapipes/properties/carbondioxid/heat_capacity.txt
58 57
ename to pandapipes/properties/carbondioxide/heat_capacity.txt
59 58
imilarity index 100%
60 59
ename from pandapipes/properties/carbondioxid/molar_mass.txt
61 60
ename to pandapipes/properties/carbondioxide/molar_mass.txt
62 61
imilarity index 100%
63 62
ename from pandapipes/properties/carbondioxid/viscosity.txt
64 63
ename to pandapipes/properties/carbondioxide/viscosity.txt

@@ -7,8 +7,12 @@
Loading
7 7
8 8
import pandas as pd
9 9
from pandapipes.component_models import ExtGrid, Pipe, Sink, Source, Junction
10 -
from pandapower.plotting.generic_geodata import coords_from_igraph
10 +
from pandapower.plotting.generic_geodata import coords_from_igraph, \
11 +
                                                _prepare_geodata_table, \
12 +
                                                _get_element_mask_from_nodes,\
13 +
                                                _igraph_meshed
11 14
15 +
import numpy as np
12 16
try:
13 17
    import igraph
14 18
@@ -24,7 +28,7 @@
Loading
24 28
logger = logging.getLogger(__name__)
25 29
26 30
27 -
def build_igraph_from_ppipes(net):
31 +
def build_igraph_from_ppipes(net, junctions=None):
28 32
    """
29 33
    This function uses the igraph library to create an igraph graph for a given pandapipes network.
30 34
    Pipes and valves are respected.
@@ -32,6 +36,8 @@
Loading
32 36
33 37
    :param net: The pandapipes network
34 38
    :type net: pandapipesNet
39 +
    :param junctions: subset of junctions that should be part of the graph
40 +
    :type junctions: list
35 41
    :return: The returned values are:
36 42
        - g - The igraph graph
37 43
        - meshed - a flag that states whether the graph is meshed
@@ -42,39 +48,47 @@
Loading
42 48
    """
43 49
44 50
    try:
45 -
        import igraph
51 +
        import igraph as ig
46 52
    except (DeprecationWarning, ImportError):
47 -
        raise ImportError("Please install python-igraph")
48 -
    g = igraph.Graph(directed=True)
49 -
    g.add_vertices(net.junction.shape[0])
50 -
    g.vs["label"] = net.junction.index.tolist()
51 -
    pp_junction_mapping = dict(list(zip(net.junction.index,
52 -
                                        list(range(net.junction.index.shape[0])))))
53 -
54 -
    for lix in net.pipe.index:
55 -
        fb, tb = net.pipe.at[lix, "from_junction"], net.pipe.at[lix, "to_junction"]
56 -
        g.add_edge(pp_junction_mapping[fb], pp_junction_mapping[tb],
57 -
                   weight=net.pipe.at[lix, "length_km"])
53 +
        raise ImportError("Please install python-igraph with "
54 +
                          "`pip install python-igraph` or "
55 +
                          "`conda install python-igraph` "
56 +
                          "or from https://www.lfd.uci.edu/~gohlke/pythonlibs")
57 +
    g = ig.Graph(directed=True)
58 +
    junction_index = net.junction.index if junctions is None else np.array(junctions)
59 +
    nr_junctions = len(junction_index)
60 +
    g.add_vertices(nr_junctions)
61 +
    g.vs["label"] = list(junction_index)
62 +
    pp_junction_mapping = dict(list(zip(junction_index, list(range(nr_junctions)))))
63 +
64 +
    mask = _get_element_mask_from_nodes(net, "pipe", ["from_junction", "to_junction"], junctions)
65 +
    for junction in net.pipe[mask].itertuples():
66 +
        g.add_edge(pp_junction_mapping[junction.from_junction],
67 +
                   pp_junction_mapping[junction.to_junction],
68 +
                   weight=junction.length_km)
58 69
59 70
    for comp in net['component_list']:
60 71
        if comp in [Source, Sink, ExtGrid, Pipe, Junction]:
61 72
            continue
62 -
        else:
63 -
            for comp_data in net[comp.table_name()].itertuples():
64 -
                g.add_edge(pp_junction_mapping[comp_data.from_junction],
65 -
                           pp_junction_mapping[comp_data.to_junction], weight=0.001)
73 +
        mask = _get_element_mask_from_nodes(net, comp.table_name(),
74 +
                                            ["from_junction", "to_junction"], 
75 +
                                            junctions)
76 +
        for comp_data in net[comp.table_name()][mask].itertuples():
77 +
            g.add_edge(pp_junction_mapping[comp_data.from_junction],
78 +
                       pp_junction_mapping[comp_data.to_junction],
79 +
                       weight=0.001)
66 80
67 -
    meshed = False
68 -
    for i in range(1, net.junction.shape[0]):
69 -
        if len(g.get_all_shortest_paths(0, i, mode="ALL")) > 1:
70 -
            meshed = True
71 -
            break
72 81
82 +
    meshed = _igraph_meshed(g)
83 +
        
73 84
    roots = [pp_junction_mapping[s] for s in net.ext_grid.junction.values]
74 85
    return g, meshed, roots  # g, (not g.is_dag())
75 86
76 87
77 -
def create_generic_coordinates(net, mg=None, library="igraph"):
88 +
def create_generic_coordinates(net, mg=None, library="igraph",
89 +
                               geodata_table="junction_geodata",
90 +
                               junctions=None,
91 +
                               overwrite=False):
78 92
    """
79 93
    This function will add arbitrary geo-coordinates for all junctions based on an analysis of
80 94
    branches and rings. It will remove out of service junctions/pipes from the net. The coordinates
@@ -95,22 +109,13 @@
Loading
95 109
        The networkx implementation is currently not working, as a proper networkx graph creation\
96 110
        is not yet implemented in pandapipes. **Coming soon!**
97 111
    """
98 -
    if "junction_geodata" in net and net.junction_geodata.shape[0]:
99 -
        logger.warning("Please delete all geodata. This function cannot be used with pre-existing "
100 -
                       "geodata.")
101 -
        return
102 -
    if "junction_geodata" not in net or net.junction_geodata is None:
103 -
        net.junction_geodata = pd.DataFrame(columns=["x", "y"])
104 -
105 -
    gnet = copy.deepcopy(net)
106 -
    gnet.junction = gnet.junction[gnet.junction.in_service]
107 -
    gnet.pipe = gnet.pipe[gnet.pipe.in_service]
112 +
    _prepare_geodata_table(net, geodata_table, overwrite)
108 113
109 114
    if library == "igraph":
110 115
        if not IGRAPH_INSTALLED:
111 116
            raise UserWarning("The library igraph is selected for plotting, but not installed "
112 117
                              "correctly.")
113 -
        graph, meshed, roots = build_igraph_from_ppipes(gnet)
118 +
        graph, meshed, roots = build_igraph_from_ppipes(net)
114 119
        coords = coords_from_igraph(graph, roots, meshed)
115 120
    elif library == "networkx":
116 121
        logger.warning("The networkx implementation is not working currently!")
@@ -123,7 +128,8 @@
Loading
123 128
    else:
124 129
        raise ValueError("Unknown library %s - chose 'igraph' or 'networkx'" % library)
125 130
126 -
    net.junction_geodata.x = coords[1]
127 -
    net.junction_geodata.y = coords[0]
128 -
    net.junction_geodata.index = gnet.junction.index
131 +
    if len(coords):
132 +
        net[geodata_table].x = coords[1]
133 +
        net[geodata_table].y = coords[0]
134 +
        net[geodata_table].index = net.junction.index if junctions is None else junctions
129 135
    return net

@@ -50,4 +50,29 @@
Loading
50 50
    for i in range(len(val)):
51 51
        val[i] = val[i][order]
52 52
53 -
    return _sum_by_group_sorted(indices, *val)

@@ -81,7 +81,7 @@
Loading
81 81
82 82
    @classmethod
83 83
    def get_result_table(cls, net):
84 -
        """
84 +
        """Get results.
85 85
86 86
        :param net: The pandapipes network
87 87
        :type net: pandapipesNet

@@ -41,3 +41,4 @@
Loading
41 41
        "pump": pump_size * mean_distance_between_junctions * 8
42 42
    }
43 43
    return sizes
44 +

@@ -56,6 +56,9 @@
Loading
56 56
    :return:
57 57
    :rtype:
58 58
    """
59 +
    if not int(degree) == degree:
60 +
        raise UserWarning("The polynomial degree has to be an integer, but %s was given. "
61 +
                          "It will be rounded down now." % str(degree))
59 62
    z = np.polyfit(v_values, p_values, degree)
60 63
    reg_par = z
61 64
    return reg_par

@@ -7,14 +7,13 @@
Loading
7 7
from functools import partial
8 8
from inspect import isclass
9 9
10 -
import numpy
11 -
import pandas as pd
12 -
from networkx.readwrite import json_graph
10 +
import pandapower as pp
13 11
from pandapipes.component_models.abstract_models import Component
12 +
from pandapipes.create import create_empty_network as create_fluid_network
14 13
from pandapipes.pandapipes_net import pandapipesNet
14 +
from pandapower.io_utils import pp_hook
15 15
from pandapower.io_utils import with_signature, to_serializable, JSONSerializableClass, \
16 -
    isinstance_partial as ppow_isinstance
17 -
from pandapipes.create import create_empty_network as create_fluid_network
16 +
    isinstance_partial as ppow_isinstance, FromSerializableRegistry, PPJSONDecoder
18 17
19 18
try:
20 19
    import pplog as logging
@@ -45,91 +44,57 @@
Loading
45 44
    return ppow_isinstance(obj, cls)
46 45
47 46
48 -
class PPJSONDecoder(json.JSONDecoder):
49 -
    def __init__(self, **kwargs):
50 -
        net = create_fluid_network(add_stdtypes=False)
51 -
        super_kwargs = {"object_hook": partial(ppipes_hook, net=net)}
52 -
        super_kwargs.update(kwargs)
53 -
        super().__init__(**super_kwargs)
54 -
55 -
56 -
def ppipes_hook(d, net=None):
57 -
    if '_module' in d and '_class' in d:
58 -
        if "_object" in d:
59 -
            obj = d.pop('_object')
60 -
        elif "_state" in d:
61 -
            obj = d['_state']
62 -
            if d['has_net']:
63 -
                obj['net'] = 'net'
64 -
            if '_init' in obj:
65 -
                del obj['_init']
66 -
            return obj  # backwards compatibility
47 +
class FromSerializableRegistryPpipe(FromSerializableRegistry):
48 +
    from_serializable = FromSerializableRegistry.from_serializable
49 +
    class_name = ''
50 +
    module_name = ''
51 +
52 +
    def __init__(self, obj, d, net, ppipes_hook):
53 +
        super().__init__(obj, d, net, ppipes_hook)
54 +
55 +
    @from_serializable.register(class_name='pandapowerNet', module_name='pandapower.auxiliary')
56 +
    def pandapowerNet(self):
57 +
        if isinstance(self.obj, str):  # backwards compatibility
58 +
            from pandapower import from_json_string
59 +
            return from_json_string(self.obj)
67 60
        else:
68 -
            obj = {key: val for key, val in d.items() if key not in ['_module', '_class']}
69 -
        class_name = d.pop('_class')
70 -
        module_name = d.pop('_module')
71 -
72 -
        if class_name == 'Series':
73 -
            return pd.read_json(obj, precise_float=True, **d)
74 -
        elif class_name == "DataFrame":
75 -
            df = pd.read_json(obj, precise_float=True, **d)
76 -
            try:
77 -
                df.set_index(df.index.astype(numpy.int64), inplace=True)
78 -
            except (ValueError, TypeError, AttributeError):
79 -
                logger.debug("failed setting int64 index")
80 -
            # recreate jsoned objects
81 -
            for col in ('object', 'controller'):  # "controller" for backwards compatibility
82 -
                if col in df.columns:
83 -
                    df[col] = df[col].apply(ppipes_hook, args=(net,))
84 -
            return df
85 -
        elif GEOPANDAS_INSTALLED and class_name == 'GeoDataFrame':
86 -
            df = geopandas.GeoDataFrame.from_features(fiona.Collection(obj), crs=d['crs'])
87 -
            if "id" in df:
88 -
                df.set_index(df['id'].values.astype(numpy.int64), inplace=True)
89 -
            # coords column is not handled properly when using from_features
90 -
            if 'coords' in df:
91 -
                # df['coords'] = df.coords.apply(json.loads)
92 -
                valid_coords = ~pd.isnull(df.coords)
93 -
                df.loc[valid_coords, 'coords'] = df.loc[valid_coords, "coords"].apply(json.loads)
94 -
            df = df.reindex(columns=d['columns'])
95 -
            return df
96 -
        elif SHAPELY_INSTALLED and module_name == "shapely":
97 -
            return shapely.geometry.shape(obj)
98 -
        elif class_name == "pandapipesNet":
99 -
            net = create_fluid_network(add_stdtypes=False)
100 -
            net.update(obj)
61 +
            net = pp.create_empty_network()
62 +
            net.update(self.obj)
101 63
            return net
102 -
        elif class_name == "pandapowerNet":
103 -
            if isinstance(obj, str):  # backwards compatibility
104 -
                from pandapower import from_json_string
105 -
                return from_json_string(obj)
106 -
            else:
107 -
                # net = create_empty_network()
108 -
                net.update(obj)
109 -
                return net
110 -
        elif module_name == "networkx":
111 -
            return json_graph.adjacency_graph(obj, attrs={'id': 'json_id', 'key': 'json_key'})
64 +
65 +
    @from_serializable.register(class_name="method")
66 +
    def method(self):
67 +
        module = importlib.import_module(self.module_name)
68 +
        logger.warning('Deserializing of method not tested. This might fail...')
69 +
        func = getattr(module, self.obj)
70 +
        # class_ = getattr(module, obj) # doesn't work
71 +
        return func
72 +
73 +
    @from_serializable.register(class_name='pandapipesNet', module_name='pandapipes.pandapipes_net')
74 +
    def pandapipesNet(self):
75 +
        if isinstance(self.obj, str):  # backwards compatibility
76 +
            from pandapipes import from_json_string
77 +
            return from_json_string(self.obj)
112 78
        else:
113 -
            module = importlib.import_module(module_name)
114 -
            if class_name == "method":
115 -
                logger.warning('Deserializing of method not tested. This might fail...')
116 -
                func = getattr(module, obj)  # doesn't always work
117 -
                return func
118 -
            elif class_name == "function":
119 -
                class_ = getattr(module, obj)  # works
120 -
                return class_
121 -
            class_ = getattr(module, class_name)
122 -
            if isclass(class_) and issubclass(class_, JSONSerializableClass):
123 -
                if isinstance(obj, str):
124 -
                    obj = json.loads(obj, cls=PPJSONDecoder)  # backwards compatibility
125 -
                return class_.from_dict(obj, net)
126 -
            if isclass(class_) and issubclass(class_, Component):
127 -
                return class_
128 -
            else:
129 -
                # for non-pp objects, e.g. tuple
130 -
                return class_(obj, **d)
131 -
    else:
132 -
        return d
79 +
            self.net.update(self.obj)
80 +
            return self.net
81 +
82 +
    @from_serializable.register()
83 +
    def rest(self):
84 +
        module = importlib.import_module(self.module_name)
85 +
        class_ = getattr(module, self.class_name)
86 +
        if isclass(class_) and issubclass(class_, JSONSerializableClass):
87 +
            if isinstance(self.obj, str):
88 +
                self.obj = json.loads(self.obj, cls=PPJSONDecoder,
89 +
                                      object_hook=partial(pp_hook, net=self.net,
90 +
                                                          registry_class=FromSerializableRegistryPpipe))
91 +
                                                          # backwards compatibility
92 +
            return class_.from_dict(self.obj, self.net)
93 +
        if isclass(class_) and issubclass(class_, Component):
94 +
            return class_
95 +
        else:
96 +
            # for non-pp objects, e.g. tuple
97 +
            return class_(self.obj, **self.d)
133 98
134 99
135 100
@to_serializable.register(pandapipesNet)

@@ -55,12 +55,11 @@
Loading
55 55
    >>> print vrange(starts, lengths)
56 56
    """
57 57
    # Repeat start position index length times and concatenate
58 -
    cat_start = np.repeat(starts, lengths)
58 +
    starting_array = np.repeat(starts, lengths)
59 59
    # Create group counter that resets for each start/length
60 -
    cat_counter = np.arange(lengths.sum()) - np.repeat(lengths.cumsum() - lengths, lengths)
60 +
    length_ranges = np.arange(lengths.sum()) - np.repeat(lengths.cumsum() - lengths, lengths)
61 61
    # Add group counter to group specific starts
62 -
    cat_range = cat_start + cat_counter
63 -
    return cat_range
62 +
    return starting_array + length_ranges
64 63
65 64
66 65
def init_results_element(net, element, output, all_float):

@@ -16,9 +16,13 @@
Loading
16 16
    GEOPANDAS_INSTALLED = False
17 17
18 18
from pandapipes.pandapipes_net import pandapipesNet
19 -
from pandapower.io_utils import PPJSONEncoder, PPJSONDecoder, to_dict_with_coord_transform, \
20 -
    get_raw_data_from_pickle, transform_net_with_df_and_geo
21 -
from pandapipes.io.io_utils import ppipes_hook, isinstance_partial
19 +
from pandapower.io_utils import PPJSONEncoder, to_dict_with_coord_transform, \
20 +
    get_raw_data_from_pickle, transform_net_with_df_and_geo, PPJSONDecoder
21 +
from pandapipes.io.io_utils import isinstance_partial, FromSerializableRegistryPpipe
22 +
from pandapower.io_utils import pp_hook
23 +
from pandapipes.create import create_empty_network
24 +
from functools import partial
25 +
from pandapipes.io.convert_format import convert_format
22 26
23 27
24 28
def to_pickle(net, filename):
@@ -94,7 +98,7 @@
Loading
94 98
    return net
95 99
96 100
97 -
def from_json(filename):
101 +
def from_json(filename, convert=True):
98 102
    """
99 103
    Load a pandapipes network from a JSON file or string.
100 104
    The index of the returned network is not necessarily in the same order as the original network.
@@ -111,16 +115,16 @@
Loading
111 115
112 116
    """
113 117
    if hasattr(filename, 'read'):
114 -
        net = json.load(filename, cls=PPJSONDecoder, object_hook=ppipes_hook)
118 +
        json_string = filename.read()
115 119
    elif not os.path.isfile(filename):
116 120
        raise UserWarning("File {} does not exist!!".format(filename))
117 121
    else:
118 122
        with open(filename) as fp:
119 -
            net = json.load(fp, cls=PPJSONDecoder, object_hook=ppipes_hook)
120 -
    return net
123 +
            json_string = fp.read()
124 +
    return from_json_string(json_string, convert=convert)
121 125
122 126
123 -
def from_json_string(json_string):
127 +
def from_json_string(json_string, convert=False):
124 128
    """
125 129
    Load a pandapipes network from a JSON string.
126 130
    The index of the returned network is not necessarily in the same order as the original network.
@@ -136,5 +140,10 @@
Loading
136 140
        >>> net = pandapipes.from_json_string(json_str)
137 141
138 142
    """
139 -
    net = json.loads(json_string, cls=PPJSONDecoder, object_hook=ppipes_hook)
143 +
    net = create_empty_network()
144 +
    net = json.loads(json_string, cls=PPJSONDecoder, object_hook=partial(pp_hook, net=net,
145 +
                                                                         registry_class=FromSerializableRegistryPpipe))
146 +
147 +
    if convert:
148 +
        convert_format(net)
140 149
    return net

@@ -76,14 +76,10 @@
Loading
76 76
        numerator = NORMAL_PRESSURE * pump_pit[:, TINIT]
77 77
        v_mps = pump_pit[:, VINIT]
78 78
        if fluid.is_gas:
79 -
            mask = p_from != p_to
80 -
            p_mean = np.empty_like(p_to)
81 -
            p_mean[~mask] = p_from[~mask]
82 -
            p_mean[mask] = 2 / 3 * (p_from[mask] ** 3 - p_to[mask] ** 3) \
83 -
                           / (p_from[mask] ** 2 - p_to[mask] ** 2)
84 -
            normfactor_mean = numerator * fluid.get_property("compressibility", p_mean) \
85 -
                              / (p_mean * NORMAL_TEMPERATURE)
86 -
            v_mean = v_mps * normfactor_mean
79 +
            # consider volume flow at inlet
80 +
            normfactor_from = numerator * fluid.get_property("compressibility", p_from) \
81 +
                              / (p_from * NORMAL_TEMPERATURE)
82 +
            v_mean = v_mps * normfactor_from
87 83
        else:
88 84
            v_mean = v_mps
89 85
        vol = v_mean * area

@@ -2,4 +2,4 @@
Loading
2 2
# and Energy System Technology (IEE), Kassel. All rights reserved.
3 3
# Use of this source code is governed by a BSD-style license that can be found in the LICENSE file.
4 4
5 -
from pandapipes.control.run_control import run_control_ppipe
5 +
from pandapipes.control.run_control import run_control

@@ -2,7 +2,7 @@
Loading
2 2
# and Energy System Technology (IEE), Kassel. All rights reserved.
3 3
# Use of this source code is governed by a BSD-style license that can be found in the LICENSE file.
4 4
5 -
__version__ = '0.1.2'
5 +
__version__ = '0.2.0'
6 6
7 7
import pandas as pd
8 8
import os

@@ -0,0 +1,47 @@
Loading
1 +
# -*- coding: utf-8 -*-
2 +
3 +
# Copyright (c) 2016-2020 by University of Kassel and Fraunhofer Institute for Energy Economics
4 +
# and Energy System Technology (IEE), Kassel. All rights reserved.
5 +
6 +
from packaging import version
7 +
8 +
from pandapipes import __version__
9 +
10 +
try:
11 +
    import pplog as logging
12 +
except ImportError:
13 +
    import logging
14 +
15 +
logger = logging.getLogger(__name__)
16 +
17 +
18 +
def convert_format(net):
19 +
    """
20 +
    Converts old nets to new format to ensure consistency. The converted net is returned.
21 +
    """
22 +
    if isinstance(net.version, str) and version.parse(net.version) >= version.parse(__version__):
23 +
        return net
24 +
    _rename_columns(net)
25 +
    _update_initial_run(net)
26 +
    net.version = __version__
27 +
    return net
28 +
29 +
30 +
def _rename_columns(net):
31 +
    if "controller" in net:
32 +
        if ("controller" in net.controller) and ("object" in net.controller):
33 +
            if net['controller'].at[0, 'object'] is None:
34 +
                net['controller'].drop('object', inplace=True, axis=1)
35 +
            else:
36 +
                net['controller'].drop('controller', inplace=True, axis=1)
37 +
        net["controller"].rename(columns={"controller": "object"}, inplace=True)
38 +
39 +
40 +
def _update_initial_run(net):
41 +
    if "controller" in net:
42 +
        for ctrl in net.controller.object.values:
43 +
            if hasattr(ctrl, 'initial_pipeflow'):
44 +
                logger.warning(
45 +
                    "initial_pipeflow is deprecated, but it is still an attribute in your controllers. "
46 +
                    "It will be removed in the future. Please use initial_run instead!")
47 +
                ctrl.initial_run = ctrl.initial_pipeflow

@@ -0,0 +1,2 @@
Loading
1 +
from pandapipes.topology.create_graph import *
2 +
from pandapower.topology.graph_searches import connected_component, connected_components

@@ -2,5 +2,5 @@
Loading
2 2
# and Energy System Technology (IEE), Kassel. All rights reserved.
3 3
# Use of this source code is governed by a BSD-style license that can be found in the LICENSE file.
4 4
5 -
from pandapipes.timeseries.run_time_series import run_timeseries_ppipe
6 -
from pandapower.timeseries.output_writer import OutputWriter
5 +
from pandapipes.timeseries.run_time_series import run_timeseries
6 +
from pandapipes.timeseries.run_time_series import init_default_outputwriter

@@ -6,7 +6,8 @@
Loading
6 6
import pandas as pd
7 7
from pandapipes.component_models.auxiliaries.component_toolbox import add_new_component
8 8
from pandapipes.pandapipes_net import pandapipesNet, get_default_pandapipes_structure
9 -
from pandapipes.properties import call_lib, add_fluid_to_net
9 +
from pandapipes.properties import call_lib
10 +
from pandapipes.properties.fluids import _add_fluid_to_net
10 11
from pandapower.auxiliary import get_free_id, _preserve_dtypes
11 12
from pandapipes.properties.fluids import Fluid
12 13
from pandapipes.std_types.std_type import PumpStdType, add_basic_std_types, add_pump_std_type, \
@@ -81,7 +82,7 @@
Loading
81 82
    :type height_m: float, default 0
82 83
    :param name: The name for this junction
83 84
    :type name: string, default None
84 -
    :param index: Force a specified ID if it is available. If None, the index one higher than the \
85 +
    :param index: Force a specified ID if it is available. If None, the index one higher than the\
85 86
            highest already existing index is selected.
86 87
    :type index: int, default None
87 88
    :param in_service: True for in_service or False for out of service
@@ -143,7 +144,7 @@
Loading
143 144
    :type scaling: float, default 1
144 145
    :param name: A name tag for this sink
145 146
    :type name: str, default None
146 -
    :param index: Force a specified ID if it is available. If None, the index one higher than the \
147 +
    :param index: Force a specified ID if it is available. If None, the index one higher than the\
147 148
            highest already existing index is selected.
148 149
    :type index: int, default None
149 150
    :param in_service: True for in service, False for out of service
@@ -201,7 +202,7 @@
Loading
201 202
    :type scaling: float, default 1
202 203
    :param name: A name tag for this source
203 204
    :type name: str, default None
204 -
    :param index: Force a specified ID if it is available. If None, the index one higher than the \
205 +
    :param index: Force a specified ID if it is available. If None, the index one higher than the\
205 206
            highest already existing index is selected.
206 207
    :type index: int, default None
207 208
    :param in_service: True for in service, False for out of service
@@ -263,9 +264,9 @@
Loading
263 264
    :type name: str, default None
264 265
    :param in_service: True for in service, False for out of service
265 266
    :type in_service: bool, default True
266 -
    :param index: Force a specified ID if it is available. If None, the index one higher than the \
267 +
    :param index: Force a specified ID if it is available. If None, the index one higher than the\
267 268
            highest already existing index is selected.
268 -
    :param type: The external grid type denotes the values that are fixed at the respective node: \n
269 +
    :param type: The external grid type denotes the values that are fixed at the respective node:\n
269 270
            - "p": The pressure is fixed, the node acts as a slack node for the mass flow.
270 271
            - "t": The temperature is fixed and will not be solved for, but is assumed as the node's mix temperature. Please note that pandapipes cannot check for inconsistencies in the formulation of heat transfer equations yet. \n
271 272
            - "pt": The external grid shows both "p" and "t" behavior.
@@ -310,10 +311,10 @@
Loading
310 311
311 312
    :param net: The net within this heat exchanger should be created
312 313
    :type net: pandapipesNet
313 -
    :param from_junction: ID of the junction on one side which the heat exchanger  will be \
314 +
    :param from_junction: ID of the junction on one side which the heat exchanger will be\
314 315
            connected with
315 316
    :type from_junction: int
316 -
    :param to_junction: ID of the junction on the other side which the heat exchanger  will be \
317 +
    :param to_junction: ID of the junction on the other side which the heat exchanger will be\
317 318
            connected with
318 319
    :type to_junction: int
319 320
    :param diameter_m: The heat exchanger inner diameter in [m]
@@ -324,7 +325,7 @@
Loading
324 325
    :type loss_coefficient: float
325 326
    :param name: The name of the heat exchanger
326 327
    :type name: str, default None
327 -
    :param index: Force a specified ID if it is available. If None, the index one higher than the \
328 +
    :param index: Force a specified ID if it is available. If None, the index one higher than the\
328 329
            highest already existing index is selected.
329 330
    :type index: int, default None
330 331
    :param in_service: True for in_service or False for out of service
@@ -380,7 +381,7 @@
Loading
380 381
    :type net: pandapipesNet
381 382
    :param from_junction: ID of the junction on one side which the pipe will be connected to
382 383
    :type from_junction: int
383 -
    :param to_junction: ID of the junction on the other side which the pipe will be connected to
384 +
    :param to_junction: ID of the junction on the other side to which the pipe will be connected to
384 385
    :type to_junction: int
385 386
    :param std_type: Name of standard type
386 387
    :type std_type: str
@@ -395,23 +396,23 @@
Loading
395 396
    :type sections: int, default 1
396 397
    :param alpha_w_per_m2k: Heat transfer coefficient in [W/(m^2*K)]
397 398
    :type alpha_w_per_m2k: float, default 0
399 +
    :param text_k: Ambient temperature of pipe in [K]
400 +
    :type text_k: float, default 293
401 +
    :param qext_w: External heat feed-in to the pipe in [W]
402 +
    :type qext_w: float, default 0
398 403
    :param name: A name tag for this pipe
399 404
    :type name: str, default None
400 -
    :param index: Force a specified ID if it is available. If None, the index one higher than the \
405 +
    :param index: Force a specified ID if it is available. If None, the index one higher than the\
401 406
            highest already existing index is selected.
402 407
    :type index: int, default None
403 -
    :param geodata: The coordinates of the pipe. The first row should be the coordinates of \
408 +
    :param geodata: The coordinates of the pipe. The first row should be the coordinates of\
404 409
            junction a and the last should be the coordinates of junction b. The points in the\
405 410
            middle represent the bending points of the pipe.
406 -
    :type geodata: array, shape = (,2L), default None
411 +
    :type geodata: array, shape=(,2L), default None
407 412
    :param in_service: True for in service, False for out of service
408 413
    :type in_service: bool, default True
409 414
    :param type: An identifier for special types of pipes (e.g. below or above ground)
410 415
    :type type: str, default "pipe"
411 -
    :param qext_w: External heat feed-in to the pipe in [W]
412 -
    :type qext_w: float, default 0
413 -
    :param text_k: Ambient temperature of pipe in [K]
414 -
    :type text_k: float, default 293
415 416
    :param kwargs: Additional keyword arguments will be added as further columns to the\
416 417
            net["pipe"] table
417 418
    :return: index - The unique ID of the created element
@@ -477,27 +478,31 @@
Loading
477 478
    :type net: pandapipesNet
478 479
    :param from_junction: ID of the junction on one side which the pipe will be connected with
479 480
    :type from_junction: int
480 -
    :param to_junction: ID of the junction on the other side which the pipe will be connected with
481 +
    :param to_junction: ID of the junction on the other side to which the pipe will be connected to
481 482
    :type to_junction: int
482 483
    :param length_km: Length of the pipe in [km]
483 484
    :type length_km: float
484 -
    :param diameter_m: The pipe diameter im [m]
485 +
    :param diameter_m: The pipe diameter in [m]
485 486
    :type diameter_m: float
486 487
    :param k_mm: Pipe roughness in [mm]
487 488
    :type k_mm: float, default 1
488 489
    :param loss_coefficient: An additional pressure loss coefficient, introduced by e.g. bends
489 490
    :type loss_coefficient: float, default 0
490 -
    :param alpha_w_per_m2k: Heat transfer coefficient in [W/(m^2*K)]
491 -
    :type alpha_w_per_m2k: float, default 0
492 491
    :param sections: The number of internal pipe sections. Important for gas and temperature\
493 492
            calculations, where variables are dependent on pipe length.
494 493
    :type sections: int, default 1
494 +
    :param alpha_w_per_m2k: Heat transfer coefficient in [W/(m^2*K)]
495 +
    :type alpha_w_per_m2k: float, default 0
496 +
    :param qext_w: external heat feed-in to the pipe in [W]
497 +
    :type qext_w: float, default 0
498 +
    :param text_k: Ambient temperature of pipe in [K]
499 +
    :type text_k: float, default 293
495 500
    :param name: A name tag for this pipe
496 501
    :type name: str, default None
497 -
    :param index: Force a specified ID if it is available. If None, the index one higher than the \
502 +
    :param index: Force a specified ID if it is available. If None, the index one higher than the\
498 503
            highest already existing index is selected.
499 504
    :type index: int, default None
500 -
    :param geodata: The coordinates of the pipe. The first row should be the coordinates of \
505 +
    :param geodata: The coordinates of the pipe. The first row should be the coordinates of\
501 506
            junction a and the last should be the coordinates of junction b. The points in the\
502 507
            middle represent the bending points of the pipe
503 508
    :type geodata: array, shape= (,2L), default None
@@ -505,10 +510,6 @@
Loading
505 510
    :type in_service: bool, default True
506 511
    :param type: An identifier for special types of pipes (e.g. below or above ground)
507 512
    :type type: str, default "pipe"
508 -
    :param qext_w: external heat feed-in to the pipe in [W]
509 -
    :type qext_w: float, default 0
510 -
    :param text_k: Ambient temperature of pipe in [K]
511 -
    :type text_k: float, default 293
512 513
    :param kwargs: Additional keyword arguments will be added as further columns to the\
513 514
            net["pipe"] table
514 515
    :return: index - The unique ID of the created element
@@ -578,7 +579,7 @@
Loading
578 579
    :type loss_coefficient: float, default 0
579 580
    :param name: A name tag for this valve
580 581
    :type name: str, default None
581 -
    :param index: Force a specified ID if it is available. If None, the index one higher than the \
582 +
    :param index: Force a specified ID if it is available. If None, the index one higher than the\
582 583
            highest already existing index is selected.
583 584
    :type index: int, default None
584 585
    :param type: An identifier for special types of valves
@@ -633,8 +634,8 @@
Loading
633 634
    :type from_junction: int
634 635
    :param to_junction: ID of the junction on the other side which the pump will be connected with
635 636
    :type to_junction: int
636 -
    :param std_type: There are currently three different std_types. This std_types are P1, P2, P3. \
637 -
            Each of them describes a specific pump behaviour setting volume flow and pressure in \
637 +
    :param std_type: There are currently three different std_types. This std_types are P1, P2, P3.\
638 +
            Each of them describes a specific pump behaviour setting volume flow and pressure in\
638 639
            context.
639 640
    :type std_type: string, default None
640 641
    :param name: A name tag for this pump
@@ -688,9 +689,9 @@
Loading
688 689
    return index
689 690
690 691
691 -
def create_pump_from_parameters(net, from_junction, to_junction, pump_name, pressure_list=None,
692 -
                                flowrate_list=None, regression_degree=None,
693 -
                                regression_parameters=None, name=None, index=None, in_service=True,
692 +
def create_pump_from_parameters(net, from_junction, to_junction, new_std_type_name, pressure_list=None,
693 +
                                flowrate_list=None, reg_polynomial_degree=None,
694 +
                                poly_coefficents=None, name=None, index=None, in_service=True,
694 695
                                type="pump", **kwargs):
695 696
    """
696 697
    Adds one pump in table net["pump"].
@@ -701,29 +702,31 @@
Loading
701 702
    :type from_junction: int
702 703
    :param to_junction: ID of the junction on the other side which the pump will be connected with
703 704
    :type to_junction: int
704 -
    :param pump_name: Set a name for your pump. You will find your definied pump under std_type in\
705 -
            your net. The name will be given under std_type in net.pump.
706 -
    :type pump_name: string
707 -
    :param pressure_list: This list contains measured pressure supporting points required \
708 -
            to define and determine the dependencies of the pump between pressure and volume flow. \
709 -
            The pressure must be given in [bar]. Needs to be defined only if no pump of standard \
705 +
    :param new_std_type_name: Set a name for your pump. You will find your definied pump under
706 +
            std_type in your net. The name will be given under std_type in net.pump.
707 +
    :type new_std_type_name: string
708 +
    :param pressure_list: This list contains measured pressure supporting points required\
709 +
            to define and determine the dependencies of the pump between pressure and volume flow.\
710 +
            The pressure must be given in [bar]. Needs to be defined only if no pump of standard\
710 711
            type is selected.
711 712
    :type pressure_list: list, default None
712 -
    :param flowrate_list: This list contains the corresponding flowrate values to the given \
713 -
            pressure values. Thus the length must be equal to the pressure list. Needs to be \
714 -
            defined only if no pump of standard type is selected. ATTENTION: The flowrate values \
713 +
    :param flowrate_list: This list contains the corresponding flowrate values to the given\
714 +
            pressure values. Thus the length must be equal to the pressure list. Needs to be\
715 +
            defined only if no pump of standard type is selected. ATTENTION: The flowrate values\
715 716
            are given in :math:`[\\frac{m^3}{h}]`.
716 717
    :type flowrate_list: list, default None
717 -
    :param regression_degree: The regression degree must be defined if pressure and flowrate list \
718 -
            are given. It describes the degree of the regression function polynomial describing \
719 -
            the behaviour of the pump.
720 -
    :type regression_degree: int, default None
721 -
    :param regression_parameters: Alternatviely to taking measurement values \
722 -
            also the already calculated regression parameters can be given. It describes the \
723 -
            dependency between pressure and flowrate. ATTENTION: The determined parameteres must \
724 -
            be retrieved by setting flowrate given in :math:`[\\frac{m^3}{h}]` and pressure given \
725 -
            in bar in context.
726 -
    :type regression_parameters: list, default None
718 +
    :param reg_polynomial_degree: The degree of the polynomial fit must be defined if pressure\
719 +
            and flowrate list are given. The fit describes the behaviour of the pump (delta P /\
720 +
            volumen flow curve).
721 +
    :type reg_polynomial_degree: int, default None
722 +
    :param poly_coefficents: Alternatviely to taking measurement values and degree of polynomial
723 +
            fit, previously calculated regression parameters can also be given directly. It
724 +
            describes the dependency between pressure and flowrate.\
725 +
            ATTENTION: The determined parameteres must be retrieved by setting flowrate given\
726 +
            in :math:`[\\frac{m^3}{h}]` and pressure given in bar in context. The first entry in\
727 +
            the list (c[0]) is for the polynom of highest degree (c[0]*x**n), the last one for
728 +
            c*x**0.
729 +
    :type poly_coefficents: list, default None
727 730
    :param name: A name tag for this pump
728 731
    :type name: str, default None
729 732
    :param index: Force a specified ID if it is available. If None, the index one higher than the\
@@ -740,9 +743,9 @@
Loading
740 743
    :rtype: int
741 744
742 745
    EXAMPLE:
743 -
        >>> create_pump_from_parameters(net, 0, 1, 'pump1', pressure_list=[0,1,2,3], \
744 -
                                        flowrate_list=[0,1,2,3], regression_degree=1)
745 -
        >>> create_pump_from_parameters(net, 0, 1, 'pump2', regression_parameters=[1,0])
746 +
        >>> create_pump_from_parameters(net, 0, 1, 'pump1', pressure_list=[0,1,2,3],\
747 +
                                        flowrate_list=[0,1,2,3], reg_polynomial_degree=1)
748 +
        >>> create_pump_from_parameters(net, 0, 1, 'pump2', poly_coefficents=[1,0])
746 749
747 750
    """
748 751
    add_new_component(net, Pump)
@@ -759,16 +762,16 @@
Loading
759 762
    # store dtypes
760 763
    dtypes = net.pump.dtypes
761 764
762 -
    if pressure_list is not None and flowrate_list is not None and regression_degree is not None:
763 -
        reg_par = regression_function(pressure_list, flowrate_list, regression_degree)
764 -
        pump = PumpStdType(pump_name, reg_par)
765 -
        add_pump_std_type(net, pump_name, pump)
766 -
    elif regression_parameters is not None:
767 -
        pump = PumpStdType(pump_name, regression_parameters)
768 -
        add_pump_std_type(net, pump_name, pump)
765 +
    if pressure_list is not None and flowrate_list is not None and reg_polynomial_degree is not None:
766 +
        reg_par = regression_function(pressure_list, flowrate_list, reg_polynomial_degree)
767 +
        pump = PumpStdType(new_std_type_name, reg_par)
768 +
        add_pump_std_type(net, new_std_type_name, pump)
769 +
    elif poly_coefficents is not None:
770 +
        pump = PumpStdType(new_std_type_name, poly_coefficents)
771 +
        add_pump_std_type(net, new_std_type_name, pump)
769 772
770 773
    v = {"name": name, "from_junction": from_junction, "to_junction": to_junction,
771 -
         "std_type": pump_name, "in_service": bool(in_service), "type": type}
774 +
         "std_type": new_std_type_name, "in_service": bool(in_service), "type": type}
772 775
    v.update(kwargs)
773 776
    # and preserve dtypes
774 777
    for col, val in v.items():
@@ -803,12 +806,14 @@
Loading
803 806
    :type index: int, default None
804 807
    :param in_service: True for in_service or False for out of service
805 808
    :type in_service: bool, default True
806 -
    :param type: The pump type denotes the values that are fixed: \n
809 +
    :param type: The pump type denotes the values that are fixed:\n
807 810
            - "p": The pressure is fixed.
808 -
            - "t": The temperature is fixed and will not be solved. Please note that pandapipes cannot check for inconsistencies in the formulation of heat transfer equations yet.
811 +
            - "t": The temperature is fixed and will not be solved. Please note that pandapipes\
812 +
             cannot check for inconsistencies in the formulation of heat transfer equations yet.
809 813
            - "pt": The pump shows both "p" and "t" behavior.
810 814
    :type type: str, default "pt"
811 -
    :param kwargs: Additional keyword arguments will be added as further columns to the net["circ_pump_pressure"] table
815 +
    :param kwargs: Additional keyword arguments will be added as further columns to the\
816 +
            net["circ_pump_pressure"] table
812 817
    :type kwargs: dict
813 818
    :return: index - The unique ID of the created element
814 819
    :rtype: int
@@ -871,12 +876,14 @@
Loading
871 876
    :type index: int, default None
872 877
    :param in_service: True for in_service or False for out of service
873 878
    :type in_service: bool, default True
874 -
    :param type: The pump type denotes the values that are fixed: \n
879 +
    :param type: The pump type denotes the values that are fixed:\n
875 880
            - "p": The pressure is fixed.
876 -
            - "t": The temperature is fixed and will not be solved. Please note that pandapipes cannot check for inconsistencies in the formulation of heat transfer equations yet.
881 +
            - "t": The temperature is fixed and will not be solved. Please note that pandapipes\
882 +
             cannot check for inconsistencies in the formulation of heat transfer equations yet.
877 883
            - "pt": The pump shows both "p" and "t" behavior.
878 884
    :type type: str, default "pt"
879 -
    :param kwargs: Additional keyword arguments will be added as further columns to the net["circ_pump_mass"] table
885 +
    :param kwargs: Additional keyword arguments will be added as further columns to the\
886 +
            net["circ_pump_mass"] table
880 887
    :type kwargs: dict
881 888
    :return: index - The unique ID of the created element
882 889
    :rtype: int
@@ -916,7 +923,7 @@
Loading
916 923
def create_fluid_from_lib(net, name, overwrite=True):
917 924
    """
918 925
    Creates a fluid from library (if there is an entry) and sets net["fluid"] to this value.
919 -
    Currently existing fluids in the library are: "hgas", "lgas", "water", "air".
926 +
    Currently existing fluids in the library are: "hgas", "lgas", "hydrogen", "water", "air".
920 927
921 928
    :param net: The net for which this fluid should be created
922 929
    :type net: pandapipesNet
@@ -930,4 +937,4 @@
Loading
930 937
        >>> pp.create_fluid_from_lib(net, name="water")
931 938
932 939
    """
933 -
    add_fluid_to_net(net, call_lib(name), overwrite=overwrite)
940 +
    _add_fluid_to_net(net, call_lib(name), overwrite=overwrite)

@@ -44,7 +44,6 @@
Loading
44 44
    """
45 45
46 46
    :param method: Which results should be loaded: prandtl-colebrook or swamee-jain
47 -
48 47
    :type method: str, default "colebrook"
49 48
    :return: net - OpenModelica network converted to a pandapipes network
50 49
    :rtype: pandapipesNet

@@ -4,14 +4,11 @@
Loading
4 4
5 5
import tempfile
6 6
7 -
import numpy as np
8 -
9 -
from pandapower import LoadflowNotConverged, OPFNotConverged
10 -
from pandapower.control.run_control import ControllerNotConverged, get_controller_order
7 +
import pandapipes as ppipes
8 +
from pandapipes.pipeflow import PipeflowNotConverged
11 9
from pandapower.control.util.diagnostic import control_diagnostic
12 10
from pandapower.timeseries.output_writer import OutputWriter
13 -
import pandapipes as ppipe
14 -
from pandapipes.control.run_control import check_for_initial_pipeflow, run_control_ppipe
11 +
from pandapower.timeseries.run_time_series import init_time_series as init_time_series_pp, cleanup, run_loop
15 12
16 13
try:
17 14
    import pplog as logging
@@ -22,372 +19,107 @@
Loading
22 19
logger.setLevel(level=logging.WARNING)
23 20
24 21
25 -
def get_default_output_writer_ppipe(net, timesteps):
22 +
def init_default_outputwriter(net, time_steps, **kwargs):
26 23
    """
27 24
    Creates a default output writer for the time series calculation.
28 25
29 26
    :param net: The pandapipes format network
30 27
    :type net: pandapipesNet
31 -
    :param timesteps: timesteps to calculate as list
28 +
    :param timesteps: Time steps to calculate as list
32 29
    :type timesteps: list
33 -
    :return: output_writer - The default output_writer
34 -
    :rtype: ?
35 -
    """
36 -
37 -
    ow = OutputWriter(net, timesteps, output_path=tempfile.gettempdir(), log_variables=[])
38 -
    ow.log_variable('res_sink', 'mdot_kg_per_s')
39 -
    ow.log_variable('res_source', 'mdot_kg_per_s')
40 -
    ow.log_variable('res_ext_grid', 'mdot_kg_per_s')
41 -
    ow.log_variable('res_pipe', 'v_mean_m_per_s')
42 -
    ow.log_variable('res_junction', 'p_bar')
43 -
    ow.log_variable('res_junction', 't_k')
44 -
    return ow
45 -
46 -
47 -
def init_outputwriter_ppipe(net, time_steps, output_writer=None):
48 -
    """
49 -
    Initilizes output writer. If output_writer is None, default output_writer is created.
50 -
51 -
    :param net: The pandapipes format network
52 -
    :type net: pandapipesNet
53 -
    :param time_steps: timesteps to calculate as list
54 -
    :type time_steps: list
55 -
    :param output_writer: An output_writer
56 -
    :type output_writer: ?
57 -
    :return: output_writer - The initialized output_writer
58 -
    :rtype: ?
59 -
    """
60 -
61 -
    if output_writer is None:
62 -
        output_writer = get_default_output_writer_ppipe(net, time_steps)
63 -
        logger.info("No output writer specified. Using default which writes to: {}"
64 -
                    .format(output_writer.output_path))
65 -
    else:
66 -
        # inits output writer before time series calculation
67 -
        output_writer.time_steps = time_steps
68 -
        output_writer.init_all()
69 -
    return output_writer
70 -
71 -
72 -
def print_progress_bar(iteration, total, prefix='', suffix='', decimals=1, length=100, fill='█'):
73 -
    """
74 -
     Call in a loop to create terminal progress bar.
75 -
    the idea was mentioned in :
76 -
    https://stackoverflow.com/questions/3173320/text-progress-bar-in-the-console
77 -
78 -
    :param iteration: Current iteration
79 -
    :type iteration: int
80 -
    :param total: total iterations
81 -
    :type total: int
82 -
    :param prefix: prefix string
83 -
    :type prefix: str
84 -
    :param suffix: suffix string
85 -
    :type suffix: str
86 -
    :param decimals: positive number of decimals in percent complete
87 -
    :type decimals: int
88 -
    :param length: character length of bar
89 -
    :type length: int
90 -
    :param fill: bar fill character
91 -
    :type fill: str
92 -
    :return: No output.
93 -
    """
94 -
95 -
    percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total)))
96 -
    filled_length = int(length * iteration // total)
97 -
    bar = fill * filled_length + '-' * (length - filled_length)
98 -
    # logger.info('\r%s |%s| %s%% %s' % (prefix, bar, percent, suffix))
99 -
    print('\r%s |%s| %s%% %s' % (prefix, bar, percent, suffix), end="")
100 -
    # Print New Line on Complete
101 -
    if iteration == total:
102 -
        print("\n")
103 -
104 -
105 -
def controller_not_converged(net, time_step, ts_variables):
106 -
    """
107 -
    Todo: Fill out parameters.
108 -
109 -
    :param net:
110 -
    :type net:
111 -
    :param time_step:
112 -
    :type time_step:
113 -
    :param ts_variables:
114 -
    :type ts_variables:
115 -
    :return:
116 -
    :rtype:
117 -
    """
118 -
119 -
    logger.error('ControllerNotConverged at time step %s' % time_step)
120 -
    if not ts_variables["continue_on_divergence"]:
121 -
        raise ControllerNotConverged
30 +
    :return: ow - The default output writer
31 +
    :rtype: pandapower.timeseries.output_writer.OutputWriter
32 +
    """
33 +
    output_writer = kwargs.get("output_writer", None)
34 +
    if output_writer is not None:
35 +
        # write the output_writer to net
36 +
        logger.warning("deprecated: output_writer should not be given to run_timeseries(). "
37 +
                       "This overwrites the stored one in net.output_writer.")
38 +
        net.output_writer.iat[0, 0] = output_writer
39 +
    if "output_writer" not in net or net.output_writer.iat[0, 0] is None:
40 +
        ow = OutputWriter(net, time_steps, output_path=tempfile.gettempdir(), log_variables=[])
41 +
        ow.log_variable('res_sink', 'mdot_kg_per_s')
42 +
        ow.log_variable('res_source', 'mdot_kg_per_s')
43 +
        ow.log_variable('res_ext_grid', 'mdot_kg_per_s')
44 +
        ow.log_variable('res_pipe', 'v_mean_m_per_s')
45 +
        ow.log_variable('res_junction', 'p_bar')
46 +
        ow.log_variable('res_junction', 't_k')
47 +
        logger.info("No output writer specified. Using default:")
48 +
        logger.info(ow)
122 49
123 50
124 51
def pf_not_converged(time_step, ts_variables):
125 52
    """
126 -
    Todo: Fill out parameters.
127 53
128 -
    :param time_step: time_step to be calculated
54 +
    :param time_step: Time step to be calculated
129 55
    :type time_step: int
130 -
    :param ts_variables: contains settings for controller and time series simulation. \n
131 -
                                  See init_time_series()
56 +
    :param ts_variables: Contains settings for controller and time series simulation. \n
57 +
                         See init_time_series()
132 58
    :type ts_variables: dict
133 -
    :return:
134 -
    :rtype:
59 +
    :return: No output
135 60
    """
136 61
    logger.error('PipeflowNotConverged at time step %s' % time_step)
137 62
    if not ts_variables["continue_on_divergence"]:
138 -
        raise LoadflowNotConverged
63 +
        raise PipeflowNotConverged
139 64
140 65
141 -
def run_time_step(net, time_step, ts_variables, **kwargs):
66 +
def init_time_series(net, time_steps, continue_on_divergence=False,
67 +
                     verbose=True, **kwargs):
142 68
    """
143 -
    Time Series step function
144 -
    Should be called to run the PANDAPOWER AC power flows based on time series in controllers
145 -
    (or other functions).
146 -
    **NOTE: Description refers to pandapower power flow.
69 +
    Initializes the time series calculation. Creates the dict ts_variables, which includes
70 +
    necessary variables for the time series / control function.
147 71
148 72
    :param net: The pandapipes format network
149 73
    :type net: pandapipesNet
150 -
    :param time_step: time_step to be calculated
151 -
    :type time_step: int
152 -
    :param ts_variables: contains settings for controller and time series simulation. \n
153 -
                                  See init_time_series()
154 -
    :type ts_variables: dict
155 -
    :param kwargs:
156 -
    :return: No output.
157 -
    """
158 -
159 -
    ctrl_converged = True
160 -
    pf_converged = True
161 -
    output_writer = ts_variables["output_writer"]
162 -
    # update time step for output writer
163 -
    output_writer.time_step = time_step
164 -
    # run time step function for each controller
165 -
    for levelorder in ts_variables["controller_order"]:
166 -
        for ctrl in levelorder:
167 -
            ctrl.time_step(time_step)
168 -
169 -
    try:
170 -
        # calls controller init, control steps and run function (runpp usually is called in here)
171 -
        run_control_ppipe(net, ctrl_variables=ts_variables, **kwargs)
172 -
    except ControllerNotConverged:
173 -
        ctrl_converged = False
174 -
        # If controller did not converge do some stuff
175 -
        controller_not_converged(net, time_step, ts_variables)
176 -
    except (LoadflowNotConverged, OPFNotConverged):
177 -
        # If power flow did not converge simulation aborts or continues if continue_on_divergence
178 -
        # is True
179 -
        pf_converged = False
180 -
        pf_not_converged(time_step, ts_variables)
181 -
182 -
    # save
183 -
    output_writer.save_results(time_step, pf_converged=pf_converged, ctrl_converged=ctrl_converged)
184 -
185 -
186 -
def all_controllers_recycleable(net):
187 -
    """
188 -
189 -
    :param net:
190 -
    :type net:
191 -
    :return:
192 -
    :rtype:
193 -
    """
194 -
    # checks if controller are recycleable
195 -
    recycleable = np.alltrue(net["controller"]["recycle"].values)
196 -
    if not recycleable:
197 -
        logger.warning("recycle feature not supported by some controllers in net. I have to "
198 -
                       "deactive recycle")
199 -
    return recycleable
200 -
201 -
202 -
def get_run_function(**kwargs):
203 -
    """
204 -
    Todo: Fill out parameters.
205 -
    checks if "run" is specified in kwargs and calls this function in time series loop.
206 -
    if "recycle" is in kwargs we use the TimeSeriesRunpp class.
207 -
208 -
    :param kwargs:
209 -
    :type kwargs:
210 -
    :return: run - the run function to be called (default is pp.runpp())
211 -
             recycle_class - class to recycle implementation
212 -
    :rtype:
213 -
    """
214 -
215 -
    recycle_class = None
216 -
217 -
    if "run" in kwargs:
218 -
        run = kwargs.pop("run")
219 -
    else:
220 -
        run = ppipe.pipeflow
221 -
    return run, recycle_class
222 -
223 -
224 -
def init_time_steps_ppipe(net, time_steps, **kwargs):
225 -
    """
226 -
    Todo: Fill out parameters.
227 -
228 -
    :param net: The pandapipes format network
229 -
    :type net: pandapipesNet
230 -
    :param time_steps: time_steps to calculate as list
231 -
    :type time_steps:
232 -
    :param kwargs:
233 -
    :type kwargs:
234 -
    :return:
235 -
    :rtype:
236 -
    """
237 -
    # initializes time steps if as a range
238 -
    if not (isinstance(time_steps, list) or isinstance(time_steps, range)):
239 -
        if time_steps is None and ("start_step" in kwargs and "stop_step" in kwargs):
240 -
            logger.warning("start_step and stop_step are depricated. Please use a tuple like "
241 -
                           "time_steps = (start_step, stop_step) instead or a list")
242 -
            time_steps = range(kwargs["start_step"], kwargs["stop_step"] + 1)
243 -
        elif isinstance(time_steps, tuple):
244 -
            time_steps = range(time_steps[0], time_steps[1])
245 -
        else:
246 -
            logger.warning("No time steps to calculate are specified. I'll check the datasource of"
247 -
                           " the first controller for avaiable time steps")
248 -
            max_timestep = net.controller.loc[0].controller.data_source.get_time_steps_len()
249 -
            time_steps = range(max_timestep)
250 -
    return time_steps
251 -
252 -
253 -
def init_time_series_ppipe(net, time_steps, output_writer=None, continue_on_divergence=False,
254 -
                           verbose=True, **kwargs):
255 -
    """
256 -
    Inits the time series calculation.
257 -
    Creates the dict ts_variables, which includes necessary variables for the time series / control
258 -
    function.
259 -
260 -
    :param net: The pandapipes format network
261 -
    :type net: pandapipesNet
262 -
    :param time_steps: time_steps to calculate as list or tuple (start, stop) if None, all time
263 -
                        steps from provided data source are simulated
74 +
    :param time_steps: Time steps to calculate as list or tuple (start, stop). If None, all time
75 +
                       steps from provided data source are simulated.
264 76
    :type time_steps: list or tuple
265 -
    :param output_writer: A predefined output writer. If None the a default one is created with
266 -
                            get_default_output_writer()
267 -
    :type output_writer: ?, default None
268 -
    :param continue_on_divergence: If True time series calculation continues in case of errors.
77 +
    :param continue_on_divergence: If True, time series calculation continues in case of errors.
269 78
    :type continue_on_divergence: bool, default False
270 -
    :param verbose: prints progess bar or logger debug messages
79 +
    :param verbose: Prints progress bar or logger debug messages
271 80
    :type verbose: bool, default True
272 -
    :param kwargs:
273 -
    :type kwargs:
274 -
    :return:
275 -
    :rtype:
81 +
    :param kwargs: Keyword arguments for run_control and runpp
82 +
    :type kwargs: dict
83 +
    :return: ts_variables, kwargs
84 +
    :rtype: dict, dict
276 85
    """
277 86
278 -
    time_steps = init_time_steps_ppipe(net, time_steps, **kwargs)
279 -
280 -
    ts_variables = dict()
87 +
    run = kwargs.get("run", ppipes.pipeflow)
88 +
    init_default_outputwriter(net, time_steps, **kwargs)
281 89
282 -
    output_writer = init_outputwriter_ppipe(net, time_steps, output_writer)
283 -
    level, order = get_controller_order(net)
284 -
    # use faster runpp if timeseries possible
285 -
    run, recycle_class = get_run_function(**kwargs)
90 +
    ts_variables = init_time_series_pp(net, time_steps, continue_on_divergence, verbose, run=run, **kwargs)
286 91
287 -
    # True at default. Initial power flow is calculated before each control step
288 -
    # (some controllers need inits)
289 -
    ts_variables["initial_pipeflow"] = check_for_initial_pipeflow(order)
290 -
    ts_variables["initial_powerflow"] = ts_variables["initial_pipeflow"]
291 -
    # order of controller (controllers are called in a for loop.)
292 -
    ts_variables["controller_order"] = order
293 -
    # run function to be called in run_control - default is pp.runpp, but can be runopf or whatever
294 -
    # you like
295 -
    ts_variables["run"] = run
296 -
    # recycle class function, which stores some NR variables. Only used if recycle == True
297 -
    ts_variables["recycle_class"] = recycle_class
298 -
    # output writer, which logs information during the time series simulation
299 -
    ts_variables["output_writer"] = output_writer
300 -
    # time steps to be calculated (list or range)
301 -
    ts_variables["time_steps"] = time_steps
302 -
    # If True, a diverged power flow is ignored and the next step is calculated
303 -
    ts_variables["continue_on_divergence"] = continue_on_divergence
92 +
    ts_variables["errors"] = (PipeflowNotConverged)
304 93
305 -
    if (logger.level != 10) and verbose:
306 -
        # simple progress bar
307 -
        print_progress_bar(0, len(time_steps), prefix='Progress:', suffix='Complete', length=50)
94 +
    return ts_variables
308 95
309 -
    if "recycle" in kwargs:
310 -
        kwargs.pop("recycle")
311 -
312 -
    return ts_variables, kwargs
313 -
314 -
315 -
def cleanup(ts_variables):
316 -
    """
317 96
318 -
    :param ts_variables:
319 -
    :type ts_variables:
320 -
    :return:
321 -
    :rtype:
97 +
def run_timeseries(net, time_steps=None, continue_on_divergence=False, verbose=True, **kwargs):
322 98
    """
323 -
    if ts_variables["recycle_class"] is not None:
324 -
        ts_variables["recycle_class"].cleanup()
325 -
326 -
327 -
def print_progress(i, time_step, time_steps, verbose, **kwargs):
328 -
    """
329 -
    Todo: Fill out parameters.
330 -
    :param i:
331 -
    :type i:
332 -
    :param time_step:
333 -
    :type time_step:
334 -
    :param time_steps:
335 -
    :type time_steps:
336 -
    :param verbose:
337 -
    :type verbose:
338 -
    :param kwargs:
339 -
    :type kwargs:
340 -
    """
341 -
    # simple status print in each time step.
342 -
    if (logger.level != 10) and verbose:
343 -
        len_timesteps = len(time_steps)
344 -
        print_progress_bar(i + 1, len_timesteps, prefix='Progress:', suffix='Complete', length=50)
345 -
346 -
    # print debug info
347 -
    if logger.level == logging.DEBUG and verbose:
348 -
        logger.debug("run time step %i" % time_step)
99 +
    Time Series main function
349 100
350 -
    # print luigi pipeline progress
351 -
    if "luigi_progress" in kwargs and i % 365 == 0:
352 -
        # print only every 365 time steps
353 -
        message = kwargs["luigi_progress"]["message"]
354 -
        progress = kwargs["luigi_progress"]["progress"]
355 -
        len_timesteps = len(time_steps)
356 -
        message("Progress: %d / %d" % (i, len_timesteps))
357 -
        progress_percentage = int(((i + 1) / len_timesteps) * 100)
358 -
        progress(progress_percentage)
101 +
    Execution of pipe flow calculations for a time series using controllers.
102 +
    Optionally other functions than pipeflow can be called by setting the run function in kwargs.
359 103
104 +
    .. note:: Refers to pandapower power flow.
360 105
361 -
def run_timeseries_ppipe(net, time_steps=None, output_writer=None, continue_on_divergence=False,
362 -
                         verbose=True, **kwargs):
363 -
    """
364 -
    Time Series main function
365 -
    Runs multiple PANDAPOWER AC power flows based on time series in controllers
366 -
    Optionally other functions than the pp power flow can be called by setting the run function in
367 -
    kwargs.
368 -
    **NOTE: refers to pandapower power flow.
369 106
    :param net: The pandapipes format network
370 107
    :type net: pandapipesNet
371 -
    :param time_steps: time_steps to calculate as list or tuple(start, stop) if None, all time steps
372 -
                        from provided data source are simulated
108 +
    :param time_steps: Time steps to calculate as list or tuple(start, stop). If None, all time steps
109 +
                       from provided data source are simulated.
373 110
    :type time_steps: list or tuple, default None
374 -
    :param output_writer: A predefined output writer. If None the a default one is created with
375 -
                            get_default_output_writer()
376 -
    :type output_writer: ?, default None
377 -
    :param continue_on_divergence: If True time series calculation continues in case of errors.
111 +
    :param continue_on_divergence: If True, time series calculation continues in case of errors.
378 112
    :type continue_on_divergence: bool, default False
379 -
    :param verbose: prints progress bar or if logger.level == Debug it prints debug  messages
113 +
    :param verbose: Prints progress bar or if *logger.level == Debug*, it prints debug messages
380 114
    :type verbose: bool, default True
381 115
    :param kwargs: Keyword arguments for run_control and runpp
382 -
    :return: No output.
116 +
    :type kwargs: dict
117 +
    :return: No output
383 118
    """
384 -
    ts_variables, kwargs = init_time_series_ppipe(net, time_steps, output_writer,
385 -
                                                  continue_on_divergence, verbose, **kwargs)
119 +
    ts_variables = init_time_series(net, time_steps, continue_on_divergence, verbose, **kwargs)
386 120
387 121
    control_diagnostic(net)
388 -
    for i, time_step in enumerate(ts_variables["time_steps"]):
389 -
        print_progress(i, time_step, ts_variables["time_steps"], verbose, **kwargs)
390 -
        run_time_step(net, time_step, ts_variables, **kwargs)
122 +
    run_loop(net, ts_variables, **kwargs)
391 123
392 124
    # cleanup functions after the last time step was calculated
393 125
    cleanup(ts_variables)

@@ -2,66 +2,51 @@
Loading
2 2
# and Energy System Technology (IEE), Kassel. All rights reserved.
3 3
# Use of this source code is governed by a BSD-style license that can be found in the LICENSE file.
4 4
5 -
from pandapower.control import *
5 +
from pandapower.control import run_control as run_control_pandapower, prepare_run_ctrl as prepare_run_control_pandapower
6 6
import pandapipes as ppipe
7 +
from pandapipes.pipeflow import PipeflowNotConverged
7 8
8 -
9 -
def run_control_ppipe(net, ctrl_variables=None, max_iter=30, continue_on_lf_divergence=False,
9 +
def run_control(net, ctrl_variables=None, max_iter=30, continue_on_lf_divergence=False,
10 10
                      **kwargs):
11 11
    """
12 12
    Function to run a control of the pandapipes network.
13 13
14 14
    :param net: The pandapipes network
15 15
    :type net: pandapipesNet
16 -
    :param ctrl_variables: used control variables. If None, default control variables are used.
17 -
    :type ctrl_variables: ?, default None
18 -
    :param max_iter: maximal amount of iterations
16 +
    :param ctrl_variables: Used control variables. If None, default control variables are used.
17 +
    :type ctrl_variables: dict, default None
18 +
    :param max_iter: Maximal amount of iterations
19 19
    :type max_iter: int, default 30
20 20
    :param continue_on_lf_divergence: ?
21 21
    :type continue_on_lf_divergence: bool, default False
22 -
    :param kwargs: additional key word arguments
23 -
    :return: No Output.
22 +
    :param kwargs: Additional keyword arguments
23 +
    :type kwargs: dict
24 +
    :return: No output
24 25
    """
25 26
    if ctrl_variables is None:
26 -
        ctrl_variables = ctrl_variables_ppipe_default(net)
27 -
    else:
28 -
        ctrl_variables["initial_powerflow"] = ctrl_variables["initial_pipeflow"]
29 -
    run_control(net, ctrl_variables=ctrl_variables, max_iter=max_iter,
30 -
                continue_on_lf_divergence=continue_on_lf_divergence, **kwargs)
27 +
        ctrl_variables = prepare_run_ctrl(net, None)
28 +
31 29
30 +
    run_control_pandapower(net, ctrl_variables=ctrl_variables, max_iter=max_iter,
31 +
                           continue_on_lf_divergence=continue_on_lf_divergence, **kwargs)
32 32
33 -
def ctrl_variables_ppipe_default(net):
33 +
34 +
def prepare_run_ctrl(net, ctrl_variables):
34 35
    """
35 36
    Function that defines default control variables.
36 37
37 38
    :param net: The pandapipes network
38 39
    :type net: pandapipesNet
39 40
    :return: ctrl_variables
40 -
    :rtype: ?
41 +
    :rtype: dict
41 42
    """
42 -
    ctrl_variables = dict()
43 -
    ctrl_variables["level"], ctrl_variables["controller_order"] = get_controller_order(net)
44 -
    ctrl_variables["run"] = ppipe.pipeflow
45 -
    ctrl_variables["initial_pipeflow"] = check_for_initial_pipeflow(
46 -
        ctrl_variables["controller_order"])
47 -
    ctrl_variables["initial_powerflow"] = ctrl_variables["initial_pipeflow"]
43 +
    if ctrl_variables is None:
44 +
        ctrl_variables  = prepare_run_control_pandapower(net, None)
45 +
        ctrl_variables["run"] = ppipe.pipeflow
46 +
47 +
    ctrl_variables["errors"] = (PipeflowNotConverged)
48 +
48 49
    return ctrl_variables
49 50
50 51
51 -
def check_for_initial_pipeflow(controllers):
52 -
    """
53 -
    Function checking if any of the controllers need an initial pipe flow
54 -
    If net has no controllers, an initial pipe flow is done by default.
55 -
    :param controllers:
56 -
    :type controllers:
57 -
    :return:
58 -
    :rtype:
59 -
    """
60 -
    if not len(controllers[0]):
61 -
        return True
62 52
63 -
    for order in controllers:
64 -
        for ctrl in order:
65 -
            if ctrl.initial_pipeflow:
66 -
                return True
67 -
    return False

@@ -25,6 +25,7 @@
Loading
25 25
                   "nonlinear_method": "constant", "p_scale": 1, "mode": "hydraulics",
26 26
                   "ambient_temperature": 293, "check_connectivity": True,
27 27
                   "only_update_hydraulic_matrix": False,
28 +
                   "reuse_internal_data": False,
28 29
                   "quit_on_inconsistency_connectivity": False}
29 30
30 31
@@ -169,8 +170,8 @@
Loading
169 170
    This function sets the "user_pf_options" dictionary for net. These options overrule
170 171
    net._internal_options once they are added to net. These options are used in configuration of
171 172
    load flow calculation.
172 -
    At the same time, user-defined arguments for pandapower.runpp() always have a higher priority.
173 -
    To remove user_pf_options, set "overwrite = True" and provide no additional arguments.
173 +
    At the same time, user-defined arguments for `pandapipes.pipeflow()` always have a higher
174 +
    priority. To remove user_pf_options, set "reset = True" and provide no additional arguments.
174 175
175 176
    :param net: pandapipes network for which to create user options
176 177
    :type net: pandapipesNet
@@ -286,6 +287,8 @@
Loading
286 287
    params.update(local_parameters["kwargs"])
287 288
    net["_options"].update(params)
288 289
    net["_options"]["fluid"] = get_fluid(net).name
290 +
    if not net["_options"]["only_update_hydraulic_matrix"]:
291 +
        net["_options"]["reuse_internal_data"] = False
289 292
290 293
291 294
def create_internal_results(net):

@@ -6,8 +6,8 @@
Loading
6 6
7 7
import numpy as np
8 8
from pandapipes import pp_dir
9 -
from pandapower.io_utils import JSONSerializableClass
10 9
from pandapipes.std_types.std_type_toolbox import get_data, get_p_v_values, regression_function
10 +
from pandapower.io_utils import JSONSerializableClass
11 11
12 12
try:
13 13
    import pplog as logging
@@ -51,15 +51,26 @@
Loading
51 51
52 52
    def get_pressure(self, vdot_m3_per_s):
53 53
        """
54 +
        Calculate the pressure lift based on a polynomial from a regression.
54 55
55 -
        :param vdot_m3_per_s: Volume flow rate of a fluid in [m^3/s]
56 +
        It is ensured that the pressure lift is always >= 0. For reverse flows, bypassing is
57 +
        assumed.
58 +
59 +
        :param vdot_m3_per_s: Volume flow rate of a fluid in [m^3/s]. Abs() will be applied.
56 60
        :type vdot_m3_per_s: float
57 61
        :return: This function returns the corresponding pressure to the given volume flow rate \
58 62
                in [bar]
59 63
        :rtype: float
60 64
        """
61 65
        n = np.arange(len(self.reg_par), 0, -1)
62 -
        p = sum(self.reg_par * (vdot_m3_per_s * 3600) ** (n - 1))
66 +
        # no reverse flow - for vdot < 0, assume bypassing
67 +
        if vdot_m3_per_s < 0:
68 +
            logger.debug("Reverse flow observed in a %s pump. "
69 +
                         "Bypassing without pressure change is assumed" % str(self.name))
70 +
            return 0
71 +
        # no negative pressure lift - bypassing always allowed:
72 +
        # /1 to ensure float format:
73 +
        p = max(0, sum(self.reg_par * (vdot_m3_per_s/1 * 3600) ** (n - 1)))
63 74
        return p
64 75
65 76
    @classmethod
@@ -77,6 +88,11 @@
Loading
77 88
        reg_par = regression_function(p_values, v_values, degree)
78 89
        return cls(name, reg_par)
79 90
91 +
    @classmethod
92 +
    def from_list(cls, name, p_values, v_values, degree):
93 +
        reg_par = regression_function(p_values, v_values, degree)
94 +
        return cls(name, reg_par)
95 +
80 96
81 97
def add_basic_std_types(net):
82 98
    """
Files Coverage
pandapipes 91.04%
Project Totals (62 files) 91.04%
381.1
TRAVIS_PYTHON_VERSION=3.6
TRAVIS_OS_NAME=linux
380.1
TRAVIS_PYTHON_VERSION=3.6
TRAVIS_OS_NAME=linux

No yaml found.

Create your codecov.yml to customize your Codecov experience

Sunburst
The inner-most circle is the entire project, moving away from the center are folders then, finally, a single file. The size and color of each slice is representing the number of statements and the coverage, respectively.
Icicle
The top section represents the entire project. Proceeding with folders and finally individual files. The size and color of each slice is representing the number of statements and the coverage, respectively.
Grid
Each block represents a single file in the project. The size and color of each block is represented by the number of statements and the coverage, respectively.
Loading