e2nIEE / pandapower
1
# -*- coding: utf-8 -*-
2

3
# Copyright (c) 2016-2021 by University of Kassel and Fraunhofer Institute for Energy Economics
4
# and Energy System Technology (IEE), Kassel. All rights reserved.
5 1
import copy
6 1
import importlib
7 1
import json
8 1
import numbers
9 1
import os
10 1
import pickle
11 1
import sys
12 1
import types
13 1
import weakref
14 1
from functools import partial
15 1
from inspect import isclass, signature, _findclass
16 1
from warnings import warn
17

18 1
import networkx
19 1
import numpy
20 1
import pandas as pd
21 1
from networkx.readwrite import json_graph
22 1
from numpy import ndarray, generic, equal, isnan, allclose, any as anynp
23 1
from packaging import version
24 1
from pandas.testing import assert_series_equal, assert_frame_equal
25

26 1
from pandapower.auxiliary import pandapowerNet
27 1
from pandapower.create import create_empty_network
28

29 1
try:
30 1
    from functools import singledispatch
31 0
except ImportError:
32
    # Python 2.7
33 0
    from singledispatch import singledispatch
34

35 1
try:
36 1
    import fiona
37 0
    import fiona.crs
38 0
    import geopandas
39

40 0
    GEOPANDAS_INSTALLED = True
41 1
except ImportError:
42 1
    GEOPANDAS_INSTALLED = False
43

44 1
try:
45 1
    import shapely.geometry
46

47 0
    SHAPELY_INSTALLED = True
48 1
except (ImportError, OSError):
49 1
    SHAPELY_INSTALLED = False
50

51 1
try:
52 1
    import pplog as logging
53 1
except ImportError:
54 1
    import logging
55

56 1
logger = logging.getLogger(__name__)
57

58

59 1
def coords_to_df(value, geotype="line"):
60 1
    columns = ["x", "y", "coords"] if geotype == "bus" else ["coords"]
61 1
    geo = pd.DataFrame(columns=columns, index=value.index)
62 1
    if any(~value.coords.isnull()):
63 1
        k = max(len(v) for v in value.coords.values)
64 1
        v = numpy.empty((len(value), k * 2))
65 1
        v.fill(numpy.nan)
66 1
        for i, idx in enumerate(value.index):
67
            # get coords and convert them to x1, y1, x2, y2...
68 1
            coords = value.at[idx, 'coords']
69 1
            if coords is None:
70 0
                continue
71 1
            v[i, :len(coords) * 2] = numpy.array(coords).flatten()
72 1
        geo = pd.DataFrame(v, index=value.index)
73 1
        geo.columns = ["%s%i" % (w, i) for i in range(k) for w in "xy"]
74 1
    if geotype == "bus":
75 1
        geo["x"] = value["x"].values
76 1
        geo["y"] = value["y"].values
77 1
    return geo
78

79

80 1
def to_dict_of_dfs(net, include_results=False, fallback_to_pickle=True, include_empty_tables=True):
81 1
    dodfs = dict()
82 1
    dtypes = []
83 1
    dodfs["parameters"] = dict()  # pd.DataFrame(columns=["parameter"])
84 1
    for item, value in net.items():
85
        # dont save internal variables and results (if not explicitely specified)
86 1
        if item.startswith("_") or (item.startswith("res") and not include_results):
87 0
            continue
88 1
        elif item == "std_types":
89 1
            for t in net.std_types.keys():  # which are ["line", "trafo", "trafo3w"]
90 1
                if net.std_types[t]:  # avoid empty excel sheets for std_types if empty
91 1
                    dodfs["%s_std_types" % t] = pd.DataFrame(net.std_types[t]).T
92 1
            continue
93 1
        elif item == "profiles":
94 0
            for t in net.profiles.keys():  # which could be e.g. "sgen", "gen", "load", ...
95 0
                if net.profiles[t].shape[0]:  # avoid empty excel sheets for std_types if empty
96 0
                    dodfs["%s_profiles" % t] = pd.DataFrame(net.profiles[t])
97 0
            continue
98 1
        elif item == "user_pf_options":
99 1
            if len(value) > 0:
100 1
                dodfs["user_pf_options"] = pd.DataFrame(value, index=[0])
101 1
            continue
102 1
        elif isinstance(value, (int, float, bool, str)):
103
            # attributes of primitive types are just stored in a DataFrame "parameters"
104 1
            dodfs["parameters"][item] = net[item]
105 1
            continue
106 1
        elif not isinstance(value, pd.DataFrame):
107 0
            logger.warning("Could not serialize net.%s" % item)
108 0
            continue
109

110
        # value is pandas DataFrame
111 1
        if not include_empty_tables and value.empty:
112 1
            continue
113

114 1
        if item == "bus_geodata":
115 1
            geo = coords_to_df(value, geotype="bus")
116 1
            if GEOPANDAS_INSTALLED and isinstance(value, geopandas.GeoDataFrame):
117 0
                geo["geometry"] = [s.to_wkt() for s in net.bus_geodata.geometry.values]
118 1
            dodfs[item] = geo
119 1
        elif item == "line_geodata":
120 1
            geo = coords_to_df(value, geotype="line")
121 1
            if GEOPANDAS_INSTALLED and isinstance(value, geopandas.GeoDataFrame):
122 0
                geo["geometry"] = [s.to_wkt() for s in net.line_geodata.geometry.values]
123 1
            dodfs[item] = geo
124
        else:
125 1
            dodfs[item] = value
126
        # save dtypes
127 1
        for column, dtype in value.dtypes.iteritems():
128 1
            dtypes.append((item, column, str(dtype)))
129 1
    dodfs["dtypes"] = pd.DataFrame(dtypes, columns=["element", "column", "dtype"])
130 1
    dodfs["parameters"] = pd.DataFrame(dodfs["parameters"], index=[0])
131 1
    return dodfs
132

133

134 1
def dicts_to_pandas(json_dict):
135 0
    warn("This function is deprecated and will be removed in a future release.\r\n"
136
         "Please resave your grid using the current pandapower version.", DeprecationWarning)
137 0
    pd_dict = dict()
138 0
    for k in sorted(json_dict.keys()):
139 0
        if isinstance(json_dict[k], dict):
140 0
            pd_dict[k] = pd.DataFrame.from_dict(json_dict[k], orient="columns")
141 0
            if pd_dict[k].shape[0] == 0:  # skip empty dataframes
142 0
                continue
143 0
            if pd_dict[k].index[0].isdigit():
144 0
                pd_dict[k].set_index(pd_dict[k].index.astype(numpy.int64), inplace=True)
145
        else:
146 0
            raise UserWarning("The network is an old version or corrupt. "
147
                              "Try to use the old load function")
148 0
    return pd_dict
149

150

151 1
def df_to_coords(net, item, table):
152
    # converts dataframe to coords in net
153 1
    num_points = len(table.columns) // 2
154 1
    net[item] = pd.DataFrame(index=table.index, columns=net[item].columns)
155 1
    if item == "bus_geodata":
156 1
        num_points -= 1
157 1
        net[item].loc[:, ['x', 'y']] = table.loc[:, ['x', 'y']]
158

159 1
    for i in table.index:
160 1
        coords = table.loc[i]
161
        # for i, coords in table.iterrows():
162 1
        coord = [(coords["x%u" % nr], coords["y%u" % nr]) for nr in range(num_points)
163
                 if pd.notnull(coords["x%u" % nr])]
164 1
        if len(coord):
165 1
            net[item].loc[i, "coords"] = coord
166

167

168 1
def from_dict_of_dfs(dodfs):
169 1
    net = create_empty_network()
170 1
    for c in dodfs["parameters"].columns:
171 1
        net[c] = dodfs["parameters"].at[0, c]
172 1
    for item, table in dodfs.items():
173 1
        if item in ("parameters", "dtypes"):
174 1
            continue
175 1
        elif item in ["line_geodata", "bus_geodata"]:
176 1
            df_to_coords(net, item, table)
177 1
        elif item.endswith("_std_types"):
178 1
            net["std_types"][item[:-10]] = table.T.to_dict()
179 1
            continue  # don't go into try..except
180 1
        elif item.endswith("_profiles"):
181 0
            if "profiles" not in net.keys():
182 0
                net["profiles"] = dict()
183 0
            net["profiles"][item[:-9]] = table
184 0
            continue  # don't go into try..except
185 1
        elif item == "user_pf_options":
186 1
            net['user_pf_options'] = {c: v for c, v in zip(table.columns, table.values[0])}
187 1
            continue  # don't go into try..except
188
        else:
189 1
            net[item] = table
190
        # set the index to be Int64Index
191 1
        try:
192 1
            net[item].set_index(net[item].index.astype(numpy.int64), inplace=True)
193 0
        except TypeError:
194
            # TypeError: if not int64 index (e.g. str)
195 0
            pass
196 1
    restore_all_dtypes(net, dodfs["dtypes"])
197 1
    return net
198

199

200 1
def restore_all_dtypes(net, dtypes):
201 1
    for _, v in dtypes.iterrows():
202 1
        try:
203 1
            if v["dtype"] == "object":
204 1
                c = net[v.element][v.column]
205 1
                net[v.element][v.column] = numpy.where(c.isnull(), None, c)
206
                # net[v.element][v.column] = net[v.element][v.column].fillna(value=None)
207 1
            net[v.element][v.column] = net[v.element][v.column].astype(v["dtype"])
208 0
        except KeyError:
209 0
            pass
210

211

212 1
def to_dict_with_coord_transform(net, point_geo_columns, line_geo_columns):
213 1
    save_net = dict()
214 1
    for key, item in net.items():
215 1
        if hasattr(item, "columns") and "geometry" in item.columns:
216
            # we convert shapely-objects to primitive data-types on a deepcopy
217 0
            item = copy.deepcopy(item)
218 0
            if key in point_geo_columns and not isinstance(item.geometry.values[0], tuple):
219 0
                item["geometry"] = item.geometry.apply(lambda x: (x.x, x.y))
220 0
            elif key in line_geo_columns and not isinstance(item.geometry.values[0], list):
221 0
                item["geometry"] = item.geometry.apply(lambda x: list(x.coords))
222

223 1
        save_net[key] = {"DF": item.to_dict("split"),
224
                         "dtypes": {col: dt for col, dt in zip(item.columns, item.dtypes)}} \
225
            if isinstance(item, pd.DataFrame) else item
226 1
    return save_net
227

228

229 1
def get_raw_data_from_pickle(filename):
230 1
    def read(f):
231 1
        if sys.version_info >= (3, 0):
232 1
            return pickle.load(f, encoding='latin1')
233
        else:
234 0
            return pickle.load(f)
235

236 1
    if hasattr(filename, 'read'):
237 0
        net = read(filename)
238 1
    elif not os.path.isfile(filename):
239 0
        raise UserWarning("File %s does not exist!!" % filename)
240
    else:
241 1
        with open(filename, "rb") as f:
242 1
            net = read(f)
243 1
    return net
244

245

246 1
def transform_net_with_df_and_geo(net, point_geo_columns, line_geo_columns):
247 1
    try:
248 1
        epsg = net.gis_epsg_code
249 1
    except AttributeError:
250 1
        epsg = None
251

252 1
    for key, item in net.items():
253 1
        if isinstance(item, dict) and "DF" in item:
254 1
            df_dict = item["DF"]
255 1
            if "columns" in df_dict:
256
                # make sure the index is Int64Index
257 1
                try:
258 1
                    df_index = pd.Int64Index(df_dict['index'])
259 0
                except TypeError:
260 0
                    df_index = df_dict['index']
261 1
                if GEOPANDAS_INSTALLED and "geometry" in df_dict["columns"] \
262
                        and epsg is not None:
263
                    # convert primitive data-types to shapely-objects
264 0
                    if key in point_geo_columns:
265 0
                        data = {"x": [row[0] for row in df_dict["data"]],
266
                                "y": [row[1] for row in df_dict["data"]]}
267 0
                        geo = [shapely.geometry.Point(row[2][0], row[2][1]) for row in df_dict["data"]]
268 0
                    elif key in line_geo_columns:
269 0
                        data = {"coords": [row[0] for row in df_dict["data"]]}
270 0
                        geo = [shapely.geometry.LineString(row[1]) for row in df_dict["data"]]
271

272 0
                    net[key] = geopandas.GeoDataFrame(data, crs=fiona.crs.from_epsg(epsg),
273
                                                      geometry=geo, index=df_index)
274
                else:
275 1
                    net[key] = pd.DataFrame(columns=df_dict["columns"], index=df_index,
276
                                            data=df_dict["data"])
277
            else:
278 0
                net[key] = pd.DataFrame.from_dict(df_dict)
279 0
                if "columns" in item:
280 0
                    if version.parse(pd.__version__) < version.parse("0.21"):
281 0
                        net[key] = net[key].reindex_axis(item["columns"], axis=1)
282
                    else:
283 0
                        net[key] = net[key].reindex(item["columns"], axis=1)
284

285 1
            if "dtypes" in item:
286 1
                if "columns" in df_dict and "geometry" in df_dict["columns"]:
287 0
                    pass
288
                else:
289 1
                    try:
290
                        # only works with pandas 0.19 or newer
291 1
                        net[key] = net[key].astype(item["dtypes"])
292 0
                    except:
293
                        # works with pandas <0.19
294 0
                        for column in net[key].columns:
295 0
                            net[key][column] = net[key][column].astype(item["dtypes"][column])
296

297

298 1
def isinstance_partial(obj, cls):
299 1
    if isinstance(obj, (pandapowerNet, tuple)):
300 1
        return False
301 1
    return isinstance(obj, cls)
302

303

304 1
class PPJSONEncoder(json.JSONEncoder):
305 1
    def __init__(self, isinstance_func=isinstance_partial, **kwargs):
306 1
        super(PPJSONEncoder, self).__init__(**kwargs)
307 1
        self.isinstance_func = isinstance_func
308

309 1
    def iterencode(self, o, _one_shot=False):
310
        """Encode the given object and yield each string
311
        representation as available.
312

313
        For example::
314

315
            for chunk in JSONEncoder().iterencode(bigobject):
316
                mysocket.write(chunk)
317

318
        """
319 1
        if self.check_circular:
320 1
            markers = {}
321
        else:
322 0
            markers = None
323 1
        if self.ensure_ascii:
324 1
            _encoder = json.encoder.encode_basestring_ascii
325
        else:
326 0
            _encoder = json.encoder.encode_basestring
327

328 1
        def floatstr(o, allow_nan=self.allow_nan, _repr=float.__repr__, _inf=json.encoder.INFINITY,
329
                     _neginf=-json.encoder.INFINITY):
330
            # Check for specials.  Note that this type of test is processor
331
            # and/or platform-specific, so do tests which don't depend on the
332
            # internals.
333

334 1
            if o != o:
335 1
                text = 'NaN'
336 1
            elif o == _inf:
337 0
                text = 'Infinity'
338 1
            elif o == _neginf:
339 0
                text = '-Infinity'
340
            else:
341 1
                return _repr(o)
342

343 1
            if not allow_nan:
344 0
                raise ValueError(
345
                    "Out of range float values are not JSON compliant: " + repr(o))
346

347 1
            return text
348

349 1
        _iterencode = json.encoder._make_iterencode(
350
            markers, self.default, _encoder, self.indent, floatstr,
351
            self.key_separator, self.item_separator, self.sort_keys,
352
            self.skipkeys, _one_shot, isinstance=self.isinstance_func)
353 1
        return _iterencode(o, 0)
354

355 1
    def default(self, o):
356 1
        try:
357 1
            s = to_serializable(o)
358 0
        except TypeError:
359
            # Let the base class default method raise the TypeError
360 0
            return json.JSONEncoder.default(self, o)
361
        else:
362 1
            return s
363

364

365 1
class FromSerializable:
366 1
    def __init__(self):
367 1
        self.class_name = 'class_name'
368 1
        self.module_name = 'module_name'
369 1
        self.registry = {}
370

371 1
    def __get__(self, instance, owner):
372 1
        if instance is None:
373 0
            return self
374 1
        class_module = getattr(instance, self.class_name), getattr(instance, self.module_name)
375 1
        if class_module not in self.registry:
376 1
            _class = (class_module[0], '')
377 1
            _module = ('', class_module[1])
378 1
            if (_class in self.registry) and (_module in self.registry):
379 0
                logger.error('the saved object %s is ambiguous. There are at least two possibilites'
380
                             ' to decode the object' % class_module)
381 1
            elif _class in self.registry:
382 1
                class_module = _class
383 1
            elif _module in self.registry:
384 0
                class_module = _module
385
            else:
386 1
                class_module = ('', '')
387 1
        method = self.registry[class_module]
388 1
        return method.__get__(instance, owner)
389

390 1
    def register(self, class_name='', module_name=''):
391 1
        def decorator(method):
392 1
            self.registry[(class_name, module_name)] = method
393 1
            return method
394

395 1
        return decorator
396

397

398 1
class FromSerializableRegistry():
399 1
    from_serializable = FromSerializable()
400 1
    class_name = ''
401 1
    module_name = ''
402

403 1
    def __init__(self, obj, d, pp_hook_funct):
404 1
        self.obj = obj
405 1
        self.d = d
406 1
        self.pp_hook = pp_hook_funct
407

408 1
    @from_serializable.register(class_name='Series', module_name='pandas.core.series')
409 1
    def Series(self):
410 1
        return pd.read_json(self.obj, precise_float=True, **self.d)
411

412 1
    @from_serializable.register(class_name='DataFrame', module_name='pandas.core.frame')
413 1
    def DataFrame(self):
414 1
        df = pd.read_json(self.obj, precise_float=True, convert_axes=False, **self.d)
415 1
        try:
416 1
            df.set_index(df.index.astype(numpy.int64), inplace=True)
417 0
        except (ValueError, TypeError, AttributeError):
418 0
            logger.debug("failed setting int64 index")
419
        # recreate jsoned objects
420 1
        for col in ('object', 'controller'):  # "controller" for backwards compatibility
421 1
            if (col in df.columns):
422 1
                df[col] = df[col].apply(self.pp_hook)
423 1
        return df
424

425 1
    @from_serializable.register(class_name='pandapowerNet', module_name='pandapower.auxiliary')
426 1
    def pandapowerNet(self):
427 1
        if isinstance(self.obj, str):  # backwards compatibility
428 1
            from pandapower import from_json_string
429 1
            return from_json_string(self.obj)
430
        else:
431 1
            net = create_empty_network()
432 1
            net.update(self.obj)
433 1
            return net
434

435 1
    @from_serializable.register(class_name="MultiGraph", module_name="networkx")
436 1
    def networkx(self):
437 1
        return json_graph.adjacency_graph(self.obj, attrs={'id': 'json_id', 'key': 'json_key'})
438

439 1
    @from_serializable.register(class_name="method")
440 1
    def method(self):
441 0
        logger.warning('deserializing of method not implemented')
442
        # class_ = getattr(module, obj) # doesn't work
443 0
        return self.obj
444

445 1
    @from_serializable.register(class_name='function')
446 1
    def function(self):
447 1
        module = importlib.import_module(self.module_name)
448 1
        if not hasattr(module, self.obj):  # in case a function is a lambda or is not defined
449 0
            raise UserWarning('Could not find the definition of the function %s in the module %s' %
450
                              (self.obj, module.__name__))
451 1
        class_ = getattr(module, self.obj)  # works
452 1
        return class_
453

454 1
    @from_serializable.register()
455 1
    def rest(self):
456 1
        module = importlib.import_module(self.module_name)
457 1
        class_ = getattr(module, self.class_name)
458 1
        if isclass(class_) and issubclass(class_, JSONSerializableClass):
459 1
            if isinstance(self.obj, str):
460 1
                self.obj = json.loads(self.obj, cls=PPJSONDecoder,
461
                                      object_hook=partial(pp_hook,
462
                                                          registry_class=FromSerializableRegistry))
463
                # backwards compatibility
464 1
            if "net" in self.obj:
465 1
                del self.obj["net"]
466 1
            return class_.from_dict(self.obj)
467
        else:
468
            # for non-pp objects, e.g. tuple
469 1
            try:
470 1
                return class_(self.obj, **self.d)
471 0
            except ValueError:
472 0
                data = json.loads(self.obj)
473 0
                df = pd.DataFrame(columns=self.d["columns"])
474 0
                for d in data["features"]:
475 0
                    idx = int(d["id"])
476 0
                    for prop, val in d["properties"].items():
477 0
                        df.at[idx, prop] = val
478
                    # for geom, val in d["geometry"].items():
479
                    #     df.at[idx, geom] = val
480 0
                return df
481

482 1
    if GEOPANDAS_INSTALLED:
483 0
        @from_serializable.register(class_name='GeoDataFrame', module_name='geopandas.geodataframe')
484 0
        def GeoDataFrame(self):
485 0
            df = geopandas.GeoDataFrame.from_features(fiona.Collection(self.obj), crs=self.d['crs'])
486 0
            if "id" in df:
487 0
                df.set_index(df['id'].values.astype(numpy.int64), inplace=True)
488
            else:
489 0
                df.set_index(df.index.values.astype(numpy.int64), inplace=True)
490
            # coords column is not handled properly when using from_features
491 0
            if 'coords' in df:
492
                # df['coords'] = df.coords.apply(json.loads)
493 0
                valid_coords = ~pd.isnull(df.coords)
494 0
                df.loc[valid_coords, 'coords'] = df.loc[valid_coords, "coords"].apply(json.loads)
495 0
            df = df.reindex(columns=self.d['columns'])
496 0
            df = df.astype(self.d['dtype'])
497 0
            return df
498

499 1
    if SHAPELY_INSTALLED:
500 0
        @from_serializable.register(module_name='shapely')
501 0
        def shapely(self):
502 0
            return shapely.geometry.shape(self.obj)
503

504

505 1
class PPJSONDecoder(json.JSONDecoder):
506 1
    def __init__(self, **kwargs):
507
        # net = pandapowerNet.__new__(pandapowerNet)
508
#        net = create_empty_network()
509 1
        super_kwargs = {"object_hook": partial(pp_hook, registry_class=FromSerializableRegistry)}
510 1
        super_kwargs.update(kwargs)
511 1
        super().__init__(**super_kwargs)
512

513

514 1
def pp_hook(d, registry_class=FromSerializableRegistry):
515 1
    try:
516 1
        if '_module' in d and '_class' in d:
517 1
            if "_object" in d:
518 1
                obj = d.pop('_object')
519 1
            elif "_state" in d:
520 1
                obj = d['_state']
521 1
                if '_init' in obj:
522 1
                    del obj['_init']
523 1
                return obj  # backwards compatibility
524
            else:
525
                # obj = {"_init": d, "_state": dict()}  # backwards compatibility
526 0
                obj = {key: val for key, val in d.items() if key not in ['_module', '_class']}
527 1
            fs = registry_class(obj, d, pp_hook)
528 1
            fs.class_name = d.pop('_class', '')
529 1
            fs.module_name = d.pop('_module', '')
530 1
            return fs.from_serializable()
531
        else:
532 1
            return d
533 1
    except TypeError:
534 1
        logger.debug('Loading your grid raised a TypeError. %s raised this exception' % d)
535 1
        return d
536

537

538 1
def encrypt_string(s, key, compress=True):
539 1
    from cryptography.fernet import Fernet
540 1
    import hashlib
541 1
    import base64
542 1
    key_base = hashlib.sha256(key.encode())
543 1
    key = base64.urlsafe_b64encode(key_base.digest())
544 1
    cipher_suite = Fernet(key)
545

546 1
    s = s.encode()
547 1
    if compress:
548 1
        import zlib
549 1
        s = zlib.compress(s)
550 1
    s = cipher_suite.encrypt(s)
551 1
    s = s.decode()
552 1
    return s
553

554

555 1
def decrypt_string(s, key):
556 1
    from cryptography.fernet import Fernet
557 1
    import hashlib
558 1
    import base64
559 1
    key_base = hashlib.sha256(key.encode())
560 1
    key = base64.urlsafe_b64encode(key_base.digest())
561 1
    cipher_suite = Fernet(key)
562

563 1
    s = s.encode()
564 1
    s = cipher_suite.decrypt(s)
565 1
    try:
566 1
        import zlib
567 1
        s = zlib.decompress(s)
568 0
    except:
569 0
        pass
570 1
    s = s.decode()
571 1
    return s
572

573

574 1
class JSONSerializableClass(object):
575 1
    json_excludes = ["self", "__class__"]
576

577 1
    def __init__(self, **kwargs):
578 1
        pass
579

580 1
    def to_json(self):
581
        """
582
        Each controller should have this method implemented. The resulting json string should be
583
        readable by the controller's from_json function and by the function add_ctrl_from_json in
584
        control_handler.
585
        """
586 1
        return json.dumps(self.to_dict(), cls=PPJSONEncoder)
587

588 1
    def to_dict(self):
589 1
        def consider_callable(value):
590 1
            if callable(value) and value.__class__ in (types.MethodType, types.FunctionType):
591 1
                if value.__class__ == types.MethodType and _findclass(value) is not None:
592 0
                    return with_signature(value, value.__name__, obj_module=_findclass(value))
593 1
                return with_signature(value, value.__name__)
594 1
            return value
595

596 1
        d = {key: consider_callable(val) for key, val in self.__dict__.items()
597
             if key not in self.json_excludes}
598 1
        return d
599

600 1
    def add_to_net(self, net, element, index, column="object", overwrite=False):
601 1
        if element not in net:
602 1
            net[element] = pd.DataFrame(columns=[column])
603 1
        if index in net[element].index.values:
604 1
            obj = net[element].object.at[index]
605 1
            if overwrite or not isinstance(obj, JSONSerializableClass):
606 1
                logger.info("Updating %s with index %s" % (element, index))
607
            else:
608 0
                raise UserWarning("%s with index %s already exists" % (element, index))
609 1
        net[element].at[index, column] = self
610

611 1
    def equals(self, other):
612

613 1
        class UnequalityFound(Exception):
614 1
            pass
615

616 1
        def check_equality(obj1, obj2):
617 1
            if isinstance(obj1, (ndarray, generic)) or isinstance(obj2, (ndarray, generic)):
618 1
                unequal = True
619 1
                if equal(obj1, obj2):
620 1
                    unequal = False
621 1
                elif anynp(isnan(obj1)):
622 1
                    if allclose(obj1, obj2, atol=0, rtol=0, equal_nan=True):
623 1
                        unequal = False
624 1
                if unequal:
625 0
                    raise UnequalityFound
626 1
            elif not isinstance(obj2, type(obj1)):
627 0
                raise UnequalityFound
628 1
            elif isinstance(obj1, pandapowerNet):
629 0
                pass
630 1
            elif isinstance(obj1, pd.DataFrame):
631 0
                if len(obj1) > 0:
632 0
                    try:
633 0
                        assert_frame_equal(obj1, obj2)
634 0
                    except:
635 0
                        raise UnequalityFound
636 1
            elif isinstance(obj2, pd.Series):
637 0
                if len(obj1) > 0:
638 0
                    try:
639 0
                        assert_series_equal(obj1, obj2)
640 0
                    except:
641 0
                        raise UnequalityFound
642 1
            elif isinstance(obj1, dict):
643 1
                check_dictionary_equality(obj1, obj2)
644 1
            elif obj1 != obj1 and obj2 != obj2:
645 0
                pass
646 1
            elif callable(obj1):
647 0
                check_callable_equality(obj1, obj2)
648 1
            elif obj1 != obj2:
649 1
                try:
650 1
                    if not (isnan(obj1) and isnan(obj2)):
651 1
                        raise UnequalityFound
652 1
                except:
653 1
                    raise UnequalityFound
654

655 1
        def check_dictionary_equality(obj1, obj2):
656 1
            if set(obj1.keys()) != set(obj2.keys()):
657 0
                raise UnequalityFound
658 1
            for key in obj1.keys():
659 1
                if key != "_init":
660 1
                    check_equality(obj1[key], obj2[key])
661

662 1
        def check_callable_equality(obj1, obj2):
663 0
            if isinstance(obj1, weakref.ref) and isinstance(obj2, weakref.ref):
664 0
                return
665 0
            if str(obj1) != str(obj2):
666 0
                raise UnequalityFound
667

668 1
        if isinstance(other, self.__class__):
669 1
            try:
670 1
                check_equality(self.__dict__, other.__dict__)
671 1
                return True
672 1
            except UnequalityFound:
673 1
                return False
674
        else:
675 0
            return False
676

677 1
    @classmethod
678 1
    def from_dict(cls, d):
679 1
        obj = JSONSerializableClass.__new__(cls)
680 1
        obj.__dict__.update(d)
681 1
        return obj
682

683 1
    @classmethod
684 1
    def from_json(cls, json_string):
685 0
        d = json.loads(json_string, cls=PPJSONDecoder)
686 0
        return cls.from_dict(d)
687

688

689 1
def with_signature(obj, val, obj_module=None, obj_class=None):
690 1
    if obj_module is None:
691 1
        obj_module = obj.__module__.__str__()
692 1
    if obj_class is None:
693 1
        obj_class = obj.__class__.__name__
694 1
    d = {'_module': obj_module, '_class': obj_class, '_object': val}
695 1
    if hasattr(obj, 'dtype'):
696 1
        d.update({'dtype': str(obj.dtype)})
697 1
    return d
698

699

700 1
@singledispatch
701 1
def to_serializable(obj):
702 1
    logger.debug('standard case')
703 1
    return str(obj)
704

705

706 1
@to_serializable.register(pandapowerNet)
707 1
def json_pandapowernet(obj):
708 1
    net_dict = {k: item for k, item in obj.items() if not k.startswith("_")}
709 1
    d = with_signature(obj, net_dict)
710 1
    return d
711

712

713 1
@to_serializable.register(pd.DataFrame)
714 1
def json_dataframe(obj):
715 1
    logger.debug('DataFrame')
716 1
    orient = "split"
717 1
    json_string = obj.to_json(orient=orient, default_handler=to_serializable, double_precision=15)
718 1
    d = with_signature(obj, json_string)
719 1
    d['orient'] = orient
720 1
    if len(obj.columns) > 0 and isinstance(obj.columns[0], str):
721 1
        d['dtype'] = obj.dtypes.astype('str').to_dict()
722 1
    return d
723

724

725 1
if GEOPANDAS_INSTALLED:
726 0
    @to_serializable.register(geopandas.GeoDataFrame)
727 0
    def json_geodataframe(obj):
728 0
        logger.debug('GeoDataFrame')
729 0
        d = with_signature(obj, obj.to_json())
730 0
        d.update({'dtype': obj.dtypes.astype('str').to_dict(),
731
                  'crs': obj.crs, 'columns': obj.columns})
732 0
        return d
733

734

735 1
@to_serializable.register(pd.Series)
736 1
def json_series(obj):
737 1
    logger.debug('Series')
738 1
    d = with_signature(obj, obj.to_json(orient='split', default_handler=to_serializable,
739
                                        double_precision=15))
740 1
    d.update({'dtype': str(obj.dtypes), 'orient': 'split', 'typ': 'series'})
741 1
    return d
742

743

744 1
@to_serializable.register(numpy.ndarray)
745 1
def json_array(obj):
746 1
    logger.debug("ndarray")
747 1
    d = with_signature(obj, list(obj), obj_module='numpy', obj_class='array')
748 1
    return d
749

750

751 1
@to_serializable.register(numpy.integer)
752 1
def json_npint(obj):
753 1
    logger.debug("integer")
754 1
    return int(obj)
755

756

757 1
@to_serializable.register(numpy.floating)
758 1
def json_npfloat(obj):
759 0
    logger.debug("floating")
760 0
    return float(obj)
761

762

763 1
@to_serializable.register(numbers.Number)
764 1
def json_num(obj):
765 0
    logger.debug("numbers.Number")
766 0
    return str(obj)
767

768

769 1
@to_serializable.register(complex)
770 1
def json_complex(obj):
771 0
    logger.debug("complex")
772 0
    d = with_signature(obj, str(obj), obj_module='builtins', obj_class='complex')
773 0
    d.pop('dtype')
774 0
    return d
775

776

777 1
@to_serializable.register(pd.Index)
778 1
def json_pdindex(obj):
779 0
    logger.debug("pd.Index")
780 0
    return with_signature(obj, list(obj), obj_module='pandas')
781

782

783 1
@to_serializable.register(bool)
784 1
def json_bool(obj):
785 0
    logger.debug("bool")
786 0
    return "true" if obj else "false"
787

788

789 1
@to_serializable.register(tuple)
790 1
def json_tuple(obj):
791 1
    logger.debug("tuple")
792 1
    d = with_signature(obj, list(obj), obj_module='builtins', obj_class='tuple')
793 1
    return d
794

795

796 1
@to_serializable.register(set)
797 1
def json_set(obj):
798 1
    logger.debug("set")
799 1
    d = with_signature(obj, list(obj), obj_module='builtins', obj_class='set')
800 1
    return d
801

802

803 1
@to_serializable.register(frozenset)
804 1
def json_frozenset(obj):
805 1
    logger.debug("frozenset")
806 1
    d = with_signature(obj, list(obj), obj_module='builtins', obj_class='frozenset')
807 1
    return d
808

809

810 1
@to_serializable.register(networkx.Graph)
811 1
def json_networkx(obj):
812 1
    logger.debug("nx graph")
813 1
    json_string = json_graph.adjacency_data(obj, attrs={'id': 'json_id', 'key': 'json_key'})
814 1
    d = with_signature(obj, json_string, obj_module="networkx")
815 1
    return d
816

817

818 1
@to_serializable.register(JSONSerializableClass)
819 1
def controller_to_serializable(obj):
820 1
    logger.debug('JSONSerializableClass')
821 1
    d = with_signature(obj, obj.to_json())
822 1
    return d
823

824

825 1
def mkdirs_if_not_existent(dir_to_create):
826 1
    already_exist = os.path.isdir(dir_to_create)
827 1
    os.makedirs(dir_to_create, exist_ok=True)
828 1
    return ~already_exist
829

830

831 1
if SHAPELY_INSTALLED:
832 0
    @to_serializable.register(shapely.geometry.LineString)
833 0
    def json_linestring(obj):
834 0
        logger.debug("shapely linestring")
835 0
        json_string = shapely.geometry.mapping(obj)
836 0
        d = with_signature(obj, json_string, obj_module="shapely")
837 0
        return d
838

839

840 0
    @to_serializable.register(shapely.geometry.Point)
841 0
    def json_point(obj):
842 0
        logger.debug("shapely Point")
843 0
        json_string = shapely.geometry.mapping(obj)
844 0
        d = with_signature(obj, json_string, obj_module="shapely")
845 0
        return d
846

847

848 0
    @to_serializable.register(shapely.geometry.Polygon)
849 0
    def json_polygon(obj):
850 0
        logger.debug("shapely Polygon")
851 0
        json_string = shapely.geometry.mapping(obj)
852 0
        d = with_signature(obj, json_string, obj_module="shapely")
853 0
        return d
854

855 1
if __name__ == '__main__':
856 0
    import pandapower as pp
857 0
    net = pp.from_json(r'edis_zone_3_6.json')

Read our documentation on viewing source code .

Loading