e2nIEE / pandapower

@@ -1801,7 +1801,7 @@
Loading
1801 1801
1802 1802
        **cols_to_keep** (list, None) - list of column names which should be kept while replacing
1803 1803
        ext_grids. If None these columns are kept if values exist: "max_p_mw", "min_p_mw",
1804 -
        "max_q_mvar", "min_q_mvar". However cols_to_keep is given, these columns are alway set:
1804 +
        "max_q_mvar", "min_q_mvar". However cols_to_keep is given, these columns are always set:
1805 1805
        "bus", "vm_pu", "p_mw", "name", "in_service", "controllable"
1806 1806
1807 1807
        **add_cols_to_keep** (list, None) - list of column names which should be added to
@@ -1862,13 +1862,14 @@
Loading
1862 1862
1863 1863
    # --- result data
1864 1864
    if net.res_ext_grid.shape[0]:
1865 -
        to_add = net.res_ext_grid.loc[ext_grids]
1866 -
        to_add.index = new_idx
1865 +
        in_res = pd.Series(ext_grids).isin(net["res_ext_grid"].index).values
1866 +
        to_add = net.res_ext_grid.loc[pd.Index(ext_grids)[in_res]]
1867 +
        to_add.index = pd.Index(new_idx)[in_res]
1867 1868
        if version.parse(pd.__version__) < version.parse("0.23"):
1868 1869
            net.res_gen = pd.concat([net.res_gen, to_add])
1869 1870
        else:
1870 1871
            net.res_gen = pd.concat([net.res_gen, to_add], sort=True)
1871 -
        net.res_ext_grid.drop(ext_grids, inplace=True)
1872 +
        net.res_ext_grid.drop(pd.Index(ext_grids)[in_res], inplace=True)
1872 1873
    return new_idx
1873 1874
1874 1875
@@ -1944,13 +1945,14 @@
Loading
1944 1945
1945 1946
    # --- result data
1946 1947
    if net.res_gen.shape[0]:
1947 -
        to_add = net.res_gen.loc[gens]
1948 -
        to_add.index = new_idx
1948 +
        in_res = pd.Series(gens).isin(net["res_gen"].index).values
1949 +
        to_add = net.res_gen.loc[pd.Index(gens)[in_res]]
1950 +
        to_add.index = pd.Index(new_idx)[in_res]
1949 1951
        if version.parse(pd.__version__) < version.parse("0.23"):
1950 1952
            net.res_ext_grid = pd.concat([net.res_ext_grid, to_add])
1951 1953
        else:
1952 1954
            net.res_ext_grid = pd.concat([net.res_ext_grid, to_add], sort=True)
1953 -
        net.res_gen.drop(gens, inplace=True)
1955 +
        net.res_gen.drop(pd.Index(gens)[in_res], inplace=True)
1954 1956
    return new_idx
1955 1957
1956 1958
@@ -2028,13 +2030,14 @@
Loading
2028 2030
2029 2031
    # --- result data
2030 2032
    if net.res_gen.shape[0]:
2031 -
        to_add = net.res_gen.loc[gens]
2032 -
        to_add.index = new_idx
2033 +
        in_res = pd.Series(gens).isin(net["res_gen"].index).values
2034 +
        to_add = net.res_gen.loc[pd.Index(gens)[in_res]]
2035 +
        to_add.index = pd.Index(new_idx)[in_res]
2033 2036
        if version.parse(pd.__version__) < version.parse("0.23"):
2034 2037
            net.res_sgen = pd.concat([net.res_sgen, to_add])
2035 2038
        else:
2036 2039
            net.res_sgen = pd.concat([net.res_sgen, to_add], sort=True)
2037 -
        net.res_gen.drop(gens, inplace=True)
2040 +
        net.res_gen.drop(pd.Index(gens)[in_res], inplace=True)
2038 2041
    return new_idx
2039 2042
2040 2043
@@ -2128,13 +2131,14 @@
Loading
2128 2131
2129 2132
    # --- result data
2130 2133
    if net.res_sgen.shape[0]:
2131 -
        to_add = net.res_sgen.loc[sgens]
2132 -
        to_add.index = new_idx
2134 +
        in_res = pd.Series(sgens).isin(net["res_sgen"].index).values
2135 +
        to_add = net.res_sgen.loc[pd.Index(sgens)[in_res]]
2136 +
        to_add.index = pd.Index(new_idx)[in_res]
2133 2137
        if version.parse(pd.__version__) < version.parse("0.23"):
2134 2138
            net.res_gen = pd.concat([net.res_gen, to_add])
2135 2139
        else:
2136 2140
            net.res_gen = pd.concat([net.res_gen, to_add], sort=True)
2137 -
        net.res_sgen.drop(sgens, inplace=True)
2141 +
        net.res_sgen.drop(pd.Index(sgens)[in_res], inplace=True)
2138 2142
    return new_idx
2139 2143
2140 2144
@@ -2241,13 +2245,14 @@
Loading
2241 2245
2242 2246
    # --- result data
2243 2247
    if net["res_" + old_elm].shape[0]:
2244 -
        to_add = net["res_" + old_elm].loc[old_indices]
2245 -
        to_add.index = new_idx
2248 +
        in_res = pd.Series(old_indices).isin(net["res_" + old_elm].index).values
2249 +
        to_add = net["res_" + old_elm].loc[pd.Index(old_indices)[in_res]]
2250 +
        to_add.index = pd.Index(new_idx)[in_res]
2246 2251
        if version.parse(pd.__version__) < version.parse("0.23"):
2247 2252
            net["res_" + new_elm] = pd.concat([net["res_" + new_elm], to_add])
2248 2253
        else:
2249 2254
            net["res_" + new_elm] = pd.concat([net["res_" + new_elm], to_add], sort=True)
2250 -
        net["res_" + old_elm].drop(old_indices, inplace=True)
2255 +
        net["res_" + old_elm].drop(pd.Index(old_indices)[in_res], inplace=True)
2251 2256
    return new_idx
2252 2257
2253 2258

@@ -19,19 +19,24 @@
Loading
19 19
20 20
def from_mpc(mpc_file, f_hz=50, casename_mpc_file='mpc', validate_conversion=False):
21 21
    """
22 -
    This function converts a matpower case file (.mat) version 2 to a pandapower net.
22 +
    This function converts a matpower case file version 2 to a pandapower net.
23 +
24 +
    Note: The input is a .mat file not an .m script. You need to save the mpc dict variable as .mat
25 +
    file. If the saved variable of the matlab workspace is not named 'mpc', you can adapt the value
26 +
    of 'casename_mpc_file' as needed.
23 27
24 28
    Note: python is 0-based while Matlab is 1-based.
25 29
26 30
    INPUT:
27 31
28 -
        **mpc_file** - path to a matpower case file (.mat).
32 +
        **mpc_file** - path to a matpower case file (.mat format not .m script).
29 33
30 34
    OPTIONAL:
31 35
32 36
        **f_hz** (int, 50) - The frequency of the network.
33 37
34 -
        **casename_mpc_file** (str, 'mpc') - The name of the variable in .mat file which contain the matpower case structure, i.e. the arrays "gen", "branch" and "bus".
38 +
        **casename_mpc_file** (str, 'mpc') - The name of the variable in .mat file which contain
39 +
        the matpower case structure, i.e. the arrays "gen", "branch" and "bus".
35 40
36 41
    OUTPUT:
37 42

@@ -17,8 +17,11 @@
Loading
17 17
18 18
class ConstControl(Controller):
19 19
    """
20 -
    Class representing a generic time series controller for a specified element and variable
21 -
    Control strategy: "No Control" -> just updates timeseries
20 +
    Class representing a generic time series controller for a specified element and variable.
21 +
    Control strategy: "No Control" -> updates values of specified elements according to timeseries input data.
22 +
    If ConstControl is used without timeseries input data, it will reset the controlled values to the initial values,
23 +
    preserving the initial net state.
24 +
    The timeseries values are written to net during time_step before the initial powerflow run and before other controllers' control_step.
22 25
    It is possible to set attributes of objects that are contained in a net table, e.g. attributes of other controllers. This can be helpful
23 26
    e.g. if a voltage setpoint of a transformer tap changer depends on the time step.
24 27
    An attribute of an object in the "object" column of a table (e.g. net.controller["object"] -> net.controller.object.at[0, "vm_set_pu"]
@@ -141,20 +144,19 @@
Loading
141 144
    def time_step(self, net, time):
142 145
        """
143 146
        Get the values of the element from data source
147 +
        Write to pandapower net by calling write_to_net()
148 +
        If ConstControl is used without a data_source, it will reset the controlled values to the initial values,
149 +
        preserving the initial net state.
144 150
        """
145 -
        self.values = self.data_source.get_time_step_value(time_step=time,
146 -
                                                           profile_name=self.profile_name,
147 -
                                                           scale_factor=self.scale_factor)
148 -
        # self.write_to_net()
149 -
150 -
    def initialize_control(self, net):
151 -
        """
152 -
        At the beginning of each run_control call reset applied-flag
153 -
        """
154 -
        #
151 +
        self.applied = False
155 152
        if self.data_source is None:
156 153
            self.values = net[self.element][self.variable].loc[self.element_index]
157 -
        self.applied = False
154 +
        else:
155 +
            self.values = self.data_source.get_time_step_value(time_step=time,
156 +
                                                               profile_name=self.profile_name,
157 +
                                                               scale_factor=self.scale_factor)
158 +
        if self.values is not None:
159 +
            self.write_to_net(net)
158 160
159 161
    def is_converged(self, net):
160 162
        """
@@ -164,10 +166,8 @@
Loading
164 166
165 167
    def control_step(self, net):
166 168
        """
167 -
        Write to pandapower net by calling write_to_net()
169 +
        Set applied to True, which means that the values set in time_step have been included in the load flow calculation.
168 170
        """
169 -
        if self.values is not None:
170 -
            self.write_to_net(net)
171 171
        self.applied = True
172 172
173 173
    def _write_to_single_index(self, net):
Files Coverage
pandapower 87.59%
setup.py 0.00%
Project Totals (168 files) 87.52%

No yaml found.

Create your codecov.yml to customize your Codecov experience

Sunburst
The inner-most circle is the entire project, moving away from the center are folders then, finally, a single file. The size and color of each slice is representing the number of statements and the coverage, respectively.
Icicle
The top section represents the entire project. Proceeding with folders and finally individual files. The size and color of each slice is representing the number of statements and the coverage, respectively.
Grid
Each block represents a single file in the project. The size and color of each block is represented by the number of statements and the coverage, respectively.
Loading