e2nIEE / pandapower
1
# -*- coding: utf-8 -*-
2

3
# Copyright (c) 2016-2021 by University of Kassel and Fraunhofer Institute for Energy Economics
4
# and Energy System Technology (IEE), Kassel. All rights reserved.
5 1
import copy
6 1
import importlib
7 1
import json
8 1
import numbers
9 1
import os
10 1
import pickle
11 1
import sys
12 1
import types
13 1
import weakref
14 1
from functools import partial
15 1
from inspect import isclass, signature, _findclass
16 1
from warnings import warn
17

18 1
import networkx
19 1
import numpy
20 1
import pandas as pd
21 1
from networkx.readwrite import json_graph
22 1
from numpy import ndarray, generic, equal, isnan, allclose, any as anynp
23 1
from packaging import version
24 1
from pandas.testing import assert_series_equal, assert_frame_equal
25

26 1
from pandapower.auxiliary import pandapowerNet
27 1
from pandapower.create import create_empty_network
28

29 1
try:
30 1
    from functools import singledispatch
31 0
except ImportError:
32
    # Python 2.7
33 0
    from singledispatch import singledispatch
34

35 1
try:
36 1
    import fiona
37 0
    import fiona.crs
38 0
    import geopandas
39

40 0
    GEOPANDAS_INSTALLED = True
41 1
except ImportError:
42 1
    GEOPANDAS_INSTALLED = False
43

44 1
try:
45 1
    import shapely.geometry
46

47 0
    SHAPELY_INSTALLED = True
48 1
except (ImportError, OSError):
49 1
    SHAPELY_INSTALLED = False
50

51 1
try:
52 1
    import pplog as logging
53 1
except ImportError:
54 1
    import logging
55

56 1
logger = logging.getLogger(__name__)
57

58

59 1
def coords_to_df(value, geotype="line"):
60 1
    columns = ["x", "y", "coords"] if geotype == "bus" else ["coords"]
61 1
    geo = pd.DataFrame(columns=columns, index=value.index)
62 1
    if any(~value.coords.isnull()):
63 1
        k = max(len(v) for v in value.coords.values)
64 1
        v = numpy.empty((len(value), k * 2))
65 1
        v.fill(numpy.nan)
66 1
        for i, idx in enumerate(value.index):
67
            # get coords and convert them to x1, y1, x2, y2...
68 1
            coords = value.at[idx, 'coords']
69 1
            if coords is None:
70 0
                continue
71 1
            v[i, :len(coords) * 2] = numpy.array(coords).flatten()
72 1
        geo = pd.DataFrame(v, index=value.index)
73 1
        geo.columns = ["%s%i" % (w, i) for i in range(k) for w in "xy"]
74 1
    if geotype == "bus":
75 1
        geo["x"] = value["x"].values
76 1
        geo["y"] = value["y"].values
77 1
    return geo
78

79

80 1
def to_dict_of_dfs(net, include_results=False, fallback_to_pickle=True, include_empty_tables=True):
81 1
    dodfs = dict()
82 1
    dtypes = []
83 1
    dodfs["parameters"] = dict()  # pd.DataFrame(columns=["parameter"])
84 1
    for item, value in net.items():
85
        # dont save internal variables and results (if not explicitely specified)
86 1
        if item.startswith("_") or (item.startswith("res") and not include_results):
87 0
            continue
88 1
        elif item == "std_types":
89 1
            for t in net.std_types.keys():  # which are ["line", "trafo", "trafo3w"]
90 1
                if net.std_types[t]:  # avoid empty excel sheets for std_types if empty
91 1
                    dodfs["%s_std_types" % t] = pd.DataFrame(net.std_types[t]).T
92 1
            continue
93 1
        elif item == "profiles":
94 0
            for t in net.profiles.keys():  # which could be e.g. "sgen", "gen", "load", ...
95 0
                if net.profiles[t].shape[0]:  # avoid empty excel sheets for std_types if empty
96 0
                    dodfs["%s_profiles" % t] = pd.DataFrame(net.profiles[t])
97 0
            continue
98 1
        elif item == "user_pf_options":
99 1
            if len(value) > 0:
100 1
                dodfs["user_pf_options"] = pd.DataFrame(value, index=[0])
101 1
            continue
102 1
        elif isinstance(value, (int, float, bool, str)):
103
            # attributes of primitive types are just stored in a DataFrame "parameters"
104 1
            dodfs["parameters"][item] = net[item]
105 1
            continue
106 1
        elif not isinstance(value, pd.DataFrame):
107 0
            logger.warning("Could not serialize net.%s" % item)
108 0
            continue
109

110
        # value is pandas DataFrame
111 1
        if not include_empty_tables and value.empty:
112 1
            continue
113

114 1
        if item == "bus_geodata":
115 1
            geo = coords_to_df(value, geotype="bus")
116 1
            if GEOPANDAS_INSTALLED and isinstance(value, geopandas.GeoDataFrame):
117 0
                geo["geometry"] = [s.to_wkt() for s in net.bus_geodata.geometry.values]
118 1
            dodfs[item] = geo
119 1
        elif item == "line_geodata":
120 1
            geo = coords_to_df(value, geotype="line")
121 1
            if GEOPANDAS_INSTALLED and isinstance(value, geopandas.GeoDataFrame):
122 0
                geo["geometry"] = [s.to_wkt() for s in net.line_geodata.geometry.values]
123 1
            dodfs[item] = geo
124
        else:
125 1
            dodfs[item] = value
126
        # save dtypes
127 1
        for column, dtype in value.dtypes.iteritems():
128 1
            dtypes.append((item, column, str(dtype)))
129 1
    dodfs["dtypes"] = pd.DataFrame(dtypes, columns=["element", "column", "dtype"])
130 1
    dodfs["parameters"] = pd.DataFrame(dodfs["parameters"], index=[0])
131 1
    return dodfs
132

133

134 1
def dicts_to_pandas(json_dict):
135 0
    warn("This function is deprecated and will be removed in a future release.\r\n"
136
         "Please resave your grid using the current pandapower version.", DeprecationWarning)
137 0
    pd_dict = dict()
138 0
    for k in sorted(json_dict.keys()):
139 0
        if isinstance(json_dict[k], dict):
140 0
            pd_dict[k] = pd.DataFrame.from_dict(json_dict[k], orient="columns")
141 0
            if pd_dict[k].shape[0] == 0:  # skip empty dataframes
142 0
                continue
143 0
            if pd_dict[k].index[0].isdigit():
144 0
                pd_dict[k].set_index(pd_dict[k].index.astype(numpy.int64), inplace=True)
145
        else:
146 0
            raise UserWarning("The network is an old version or corrupt. "
147
                              "Try to use the old load function")
148 0
    return pd_dict
149

150

151 1
def df_to_coords(net, item, table):
152
    # converts dataframe to coords in net
153 1
    num_points = len(table.columns) // 2
154 1
    net[item] = pd.DataFrame(index=table.index, columns=net[item].columns)
155 1
    if item == "bus_geodata":
156 1
        num_points -= 1
157 1
        net[item].loc[:, ['x', 'y']] = table.loc[:, ['x', 'y']]
158

159 1
    for i in table.index:
160 1
        coords = table.loc[i]
161
        # for i, coords in table.iterrows():
162 1
        coord = [(coords["x%u" % nr], coords["y%u" % nr]) for nr in range(num_points)
163
                 if pd.notnull(coords["x%u" % nr])]
164 1
        if len(coord):
165 1
            net[item].loc[i, "coords"] = coord
166

167

168 1
def from_dict_of_dfs(dodfs):
169 1
    net = create_empty_network()
170 1
    for c in dodfs["parameters"].columns:
171 1
        net[c] = dodfs["parameters"].at[0, c]
172 1
    for item, table in dodfs.items():
173 1
        if item in ("parameters", "dtypes"):
174 1
            continue
175 1
        elif item in ["line_geodata", "bus_geodata"]:
176 1
            df_to_coords(net, item, table)
177 1
        elif item.endswith("_std_types"):
178 1
            net["std_types"][item[:-10]] = table.T.to_dict()
179 1
            continue  # don't go into try..except
180 1
        elif item.endswith("_profiles"):
181 0
            if "profiles" not in net.keys():
182 0
                net["profiles"] = dict()
183 0
            net["profiles"][item[:-9]] = table
184 0
            continue  # don't go into try..except
185 1
        elif item == "user_pf_options":
186 1
            net['user_pf_options'] = {c: v for c, v in zip(table.columns, table.values[0])}
187 1
            continue  # don't go into try..except
188
        else:
189 1
            net[item] = table
190
        # set the index to be Int64Index
191 1
        try:
192 1
            net[item].set_index(net[item].index.astype(numpy.int64), inplace=True)
193 0
        except TypeError:
194
            # TypeError: if not int64 index (e.g. str)
195 0
            pass
196 1
    restore_all_dtypes(net, dodfs["dtypes"])
197 1
    return net
198

199

200 1
def restore_all_dtypes(net, dtypes):
201 1
    for _, v in dtypes.iterrows():
202 1
        try:
203 1
            if v["dtype"] == "object":
204 1
                c = net[v.element][v.column]
205 1
                net[v.element][v.column] = numpy.where(c.isnull(), None, c)
206
                # net[v.element][v.column] = net[v.element][v.column].fillna(value=None)
207 1
            net[v.element][v.column] = net[v.element][v.column].astype(v["dtype"])
208 0
        except KeyError:
209 0
            pass
210

211

212 1
def to_dict_with_coord_transform(net, point_geo_columns, line_geo_columns):
213 1
    save_net = dict()
214 1
    for key, item in net.items():
215 1
        if hasattr(item, "columns") and "geometry" in item.columns:
216
            # we convert shapely-objects to primitive data-types on a deepcopy
217 0
            item = copy.deepcopy(item)
218 0
            if key in point_geo_columns and not isinstance(item.geometry.values[0], tuple):
219 0
                item["geometry"] = item.geometry.apply(lambda x: (x.x, x.y))
220 0
            elif key in line_geo_columns and not isinstance(item.geometry.values[0], list):
221 0
                item["geometry"] = item.geometry.apply(lambda x: list(x.coords))
222

223 1
        save_net[key] = {"DF": item.to_dict("split"),
224
                         "dtypes": {col: dt for col, dt in zip(item.columns, item.dtypes)}} \
225
            if isinstance(item, pd.DataFrame) else item
226 1
    return save_net
227

228

229 1
def get_raw_data_from_pickle(filename):
230 1
    def read(f):
231 1
        if sys.version_info >= (3, 0):
232 1
            return pickle.load(f, encoding='latin1')
233
        else:
234 0
            return pickle.load(f)
235

236 1
    if hasattr(filename, 'read'):
237 0
        net = read(filename)
238 1
    elif not os.path.isfile(filename):
239 0
        raise UserWarning("File %s does not exist!!" % filename)
240
    else:
241 1
        with open(filename, "rb") as f:
242 1
            net = read(f)
243 1
    return net
244

245

246 1
def transform_net_with_df_and_geo(net, point_geo_columns, line_geo_columns):
247 1
    try:
248 1
        epsg = net.gis_epsg_code
249 1
    except AttributeError:
250 1
        epsg = None
251

252 1
    for key, item in net.items():
253 1
        if isinstance(item, dict) and "DF" in item:
254 1
            df_dict = item["DF"]
255 1
            if "columns" in df_dict:
256
                # make sure the index is Int64Index
257 1
                try:
258 1
                    df_index = pd.Int64Index(df_dict['index'])
259 0
                except TypeError:
260 0
                    df_index = df_dict['index']
261 1
                if GEOPANDAS_INSTALLED and "geometry" in df_dict["columns"] \
262
                        and epsg is not None:
263
                    # convert primitive data-types to shapely-objects
264 0
                    if key in point_geo_columns:
265 0
                        data = {"x": [row[0] for row in df_dict["data"]],
266
                                "y": [row[1] for row in df_dict["data"]]}
267 0
                        geo = [shapely.geometry.Point(row[2][0], row[2][1]) for row in df_dict["data"]]
268 0
                    elif key in line_geo_columns:
269 0
                        data = {"coords": [row[0] for row in df_dict["data"]]}
270 0
                        geo = [shapely.geometry.LineString(row[1]) for row in df_dict["data"]]
271

272 0
                    net[key] = geopandas.GeoDataFrame(data, crs=f"epsg:{epsg}", geometry=geo, index=df_index)
273
                else:
274 1
                    net[key] = pd.DataFrame(columns=df_dict["columns"], index=df_index,
275
                                            data=df_dict["data"])
276
            else:
277 0
                net[key] = pd.DataFrame.from_dict(df_dict)
278 0
                if "columns" in item:
279 0
                    if version.parse(pd.__version__) < version.parse("0.21"):
280 0
                        net[key] = net[key].reindex_axis(item["columns"], axis=1)
281
                    else:
282 0
                        net[key] = net[key].reindex(item["columns"], axis=1)
283

284 1
            if "dtypes" in item:
285 1
                if "columns" in df_dict and "geometry" in df_dict["columns"]:
286 0
                    pass
287
                else:
288 1
                    try:
289
                        # only works with pandas 0.19 or newer
290 1
                        net[key] = net[key].astype(item["dtypes"])
291 0
                    except:
292
                        # works with pandas <0.19
293 0
                        for column in net[key].columns:
294 0
                            net[key][column] = net[key][column].astype(item["dtypes"][column])
295

296

297 1
def isinstance_partial(obj, cls):
298 1
    if isinstance(obj, (pandapowerNet, tuple)):
299 1
        return False
300 1
    return isinstance(obj, cls)
301

302

303 1
class PPJSONEncoder(json.JSONEncoder):
304 1
    def __init__(self, isinstance_func=isinstance_partial, **kwargs):
305 1
        super(PPJSONEncoder, self).__init__(**kwargs)
306 1
        self.isinstance_func = isinstance_func
307

308 1
    def iterencode(self, o, _one_shot=False):
309
        """Encode the given object and yield each string
310
        representation as available.
311

312
        For example::
313

314
            for chunk in JSONEncoder().iterencode(bigobject):
315
                mysocket.write(chunk)
316

317
        """
318 1
        if self.check_circular:
319 1
            markers = {}
320
        else:
321 0
            markers = None
322 1
        if self.ensure_ascii:
323 1
            _encoder = json.encoder.encode_basestring_ascii
324
        else:
325 0
            _encoder = json.encoder.encode_basestring
326

327 1
        def floatstr(o, allow_nan=self.allow_nan, _repr=float.__repr__, _inf=json.encoder.INFINITY,
328
                     _neginf=-json.encoder.INFINITY):
329
            # Check for specials.  Note that this type of test is processor
330
            # and/or platform-specific, so do tests which don't depend on the
331
            # internals.
332

333 1
            if o != o:
334 1
                text = 'NaN'
335 1
            elif o == _inf:
336 0
                text = 'Infinity'
337 1
            elif o == _neginf:
338 0
                text = '-Infinity'
339
            else:
340 1
                return _repr(o)
341

342 1
            if not allow_nan:
343 0
                raise ValueError(
344
                    "Out of range float values are not JSON compliant: " + repr(o))
345

346 1
            return text
347

348 1
        _iterencode = json.encoder._make_iterencode(
349
            markers, self.default, _encoder, self.indent, floatstr,
350
            self.key_separator, self.item_separator, self.sort_keys,
351
            self.skipkeys, _one_shot, isinstance=self.isinstance_func)
352 1
        return _iterencode(o, 0)
353

354 1
    def default(self, o):
355 1
        try:
356 1
            s = to_serializable(o)
357 0
        except TypeError:
358
            # Let the base class default method raise the TypeError
359 0
            return json.JSONEncoder.default(self, o)
360
        else:
361 1
            return s
362

363

364 1
class FromSerializable:
365 1
    def __init__(self):
366 1
        self.class_name = 'class_name'
367 1
        self.module_name = 'module_name'
368 1
        self.registry = {}
369

370 1
    def __get__(self, instance, owner):
371 1
        if instance is None:
372 0
            return self
373 1
        class_module = getattr(instance, self.class_name), getattr(instance, self.module_name)
374 1
        if class_module not in self.registry:
375 1
            _class = (class_module[0], '')
376 1
            _module = ('', class_module[1])
377 1
            if (_class in self.registry) and (_module in self.registry):
378 0
                logger.error('the saved object %s is ambiguous. There are at least two possibilites'
379
                             ' to decode the object' % class_module)
380 1
            elif _class in self.registry:
381 1
                class_module = _class
382 1
            elif _module in self.registry:
383 0
                class_module = _module
384
            else:
385 1
                class_module = ('', '')
386 1
        method = self.registry[class_module]
387 1
        return method.__get__(instance, owner)
388

389 1
    def register(self, class_name='', module_name=''):
390 1
        def decorator(method):
391 1
            self.registry[(class_name, module_name)] = method
392 1
            return method
393

394 1
        return decorator
395

396

397 1
class FromSerializableRegistry():
398 1
    from_serializable = FromSerializable()
399 1
    class_name = ''
400 1
    module_name = ''
401

402 1
    def __init__(self, obj, d, pp_hook_funct):
403 1
        self.obj = obj
404 1
        self.d = d
405 1
        self.pp_hook = pp_hook_funct
406

407 1
    @from_serializable.register(class_name='Series', module_name='pandas.core.series')
408 1
    def Series(self):
409 1
        return pd.read_json(self.obj, precise_float=True, **self.d)
410

411 1
    @from_serializable.register(class_name='DataFrame', module_name='pandas.core.frame')
412 1
    def DataFrame(self):
413 1
        df = pd.read_json(self.obj, precise_float=True, convert_axes=False, **self.d)
414 1
        try:
415 1
            df.set_index(df.index.astype(numpy.int64), inplace=True)
416 0
        except (ValueError, TypeError, AttributeError):
417 0
            logger.debug("failed setting int64 index")
418
        # recreate jsoned objects
419 1
        for col in ('object', 'controller'):  # "controller" for backwards compatibility
420 1
            if (col in df.columns):
421 1
                df[col] = df[col].apply(self.pp_hook)
422 1
        return df
423

424 1
    @from_serializable.register(class_name='pandapowerNet', module_name='pandapower.auxiliary')
425 1
    def pandapowerNet(self):
426 1
        if isinstance(self.obj, str):  # backwards compatibility
427 1
            from pandapower import from_json_string
428 1
            return from_json_string(self.obj)
429
        else:
430 1
            net = create_empty_network()
431 1
            net.update(self.obj)
432 1
            return net
433

434 1
    @from_serializable.register(class_name="MultiGraph", module_name="networkx")
435 1
    def networkx(self):
436 1
        return json_graph.adjacency_graph(self.obj, attrs={'id': 'json_id', 'key': 'json_key'})
437

438 1
    @from_serializable.register(class_name="method")
439 1
    def method(self):
440 0
        logger.warning('deserializing of method not implemented')
441
        # class_ = getattr(module, obj) # doesn't work
442 0
        return self.obj
443

444 1
    @from_serializable.register(class_name='function')
445 1
    def function(self):
446 1
        module = importlib.import_module(self.module_name)
447 1
        if not hasattr(module, self.obj):  # in case a function is a lambda or is not defined
448 0
            raise UserWarning('Could not find the definition of the function %s in the module %s' %
449
                              (self.obj, module.__name__))
450 1
        class_ = getattr(module, self.obj)  # works
451 1
        return class_
452

453 1
    @from_serializable.register()
454 1
    def rest(self):
455 1
        module = importlib.import_module(self.module_name)
456 1
        class_ = getattr(module, self.class_name)
457 1
        if isclass(class_) and issubclass(class_, JSONSerializableClass):
458 1
            if isinstance(self.obj, str):
459 1
                self.obj = json.loads(self.obj, cls=PPJSONDecoder,
460
                                      object_hook=partial(pp_hook,
461
                                                          registry_class=FromSerializableRegistry))
462
                # backwards compatibility
463 1
            if "net" in self.obj:
464 1
                del self.obj["net"]
465 1
            return class_.from_dict(self.obj)
466
        else:
467
            # for non-pp objects, e.g. tuple
468 1
            try:
469 1
                return class_(self.obj, **self.d)
470 0
            except ValueError:
471 0
                data = json.loads(self.obj)
472 0
                df = pd.DataFrame(columns=self.d["columns"])
473 0
                for d in data["features"]:
474 0
                    idx = int(d["id"])
475 0
                    for prop, val in d["properties"].items():
476 0
                        df.at[idx, prop] = val
477
                    # for geom, val in d["geometry"].items():
478
                    #     df.at[idx, geom] = val
479 0
                return df
480

481 1
    if GEOPANDAS_INSTALLED:
482 0
        @from_serializable.register(class_name='GeoDataFrame', module_name='geopandas.geodataframe')
483 0
        def GeoDataFrame(self):
484 0
            df = geopandas.GeoDataFrame.from_features(fiona.Collection(self.obj), crs=self.d['crs'])
485 0
            if "id" in df:
486 0
                df.set_index(df['id'].values.astype(numpy.int64), inplace=True)
487
            else:
488 0
                df.set_index(df.index.values.astype(numpy.int64), inplace=True)
489
            # coords column is not handled properly when using from_features
490 0
            if 'coords' in df:
491
                # df['coords'] = df.coords.apply(json.loads)
492 0
                valid_coords = ~pd.isnull(df.coords)
493 0
                df.loc[valid_coords, 'coords'] = df.loc[valid_coords, "coords"].apply(json.loads)
494 0
            df = df.reindex(columns=self.d['columns'])
495 0
            df = df.astype(self.d['dtype'])
496 0
            return df
497

498 1
    if SHAPELY_INSTALLED:
499 0
        @from_serializable.register(module_name='shapely')
500 0
        def shapely(self):
501 0
            return shapely.geometry.shape(self.obj)
502

503

504 1
class PPJSONDecoder(json.JSONDecoder):
505 1
    def __init__(self, **kwargs):
506
        # net = pandapowerNet.__new__(pandapowerNet)
507
#        net = create_empty_network()
508 1
        super_kwargs = {"object_hook": partial(pp_hook, registry_class=FromSerializableRegistry)}
509 1
        super_kwargs.update(kwargs)
510 1
        super().__init__(**super_kwargs)
511

512

513 1
def pp_hook(d, registry_class=FromSerializableRegistry):
514 1
    try:
515 1
        if '_module' in d and '_class' in d:
516 1
            if "_object" in d:
517 1
                obj = d.pop('_object')
518 1
            elif "_state" in d:
519 1
                obj = d['_state']
520 1
                if '_init' in obj:
521 1
                    del obj['_init']
522 1
                return obj  # backwards compatibility
523
            else:
524
                # obj = {"_init": d, "_state": dict()}  # backwards compatibility
525 0
                obj = {key: val for key, val in d.items() if key not in ['_module', '_class']}
526 1
            fs = registry_class(obj, d, pp_hook)
527 1
            fs.class_name = d.pop('_class', '')
528 1
            fs.module_name = d.pop('_module', '')
529 1
            return fs.from_serializable()
530
        else:
531 1
            return d
532 1
    except TypeError:
533 1
        logger.debug('Loading your grid raised a TypeError. %s raised this exception' % d)
534 1
        return d
535

536

537 1
def encrypt_string(s, key, compress=True):
538 1
    from cryptography.fernet import Fernet
539 1
    import hashlib
540 1
    import base64
541 1
    key_base = hashlib.sha256(key.encode())
542 1
    key = base64.urlsafe_b64encode(key_base.digest())
543 1
    cipher_suite = Fernet(key)
544

545 1
    s = s.encode()
546 1
    if compress:
547 1
        import zlib
548 1
        s = zlib.compress(s)
549 1
    s = cipher_suite.encrypt(s)
550 1
    s = s.decode()
551 1
    return s
552

553

554 1
def decrypt_string(s, key):
555 1
    from cryptography.fernet import Fernet
556 1
    import hashlib
557 1
    import base64
558 1
    key_base = hashlib.sha256(key.encode())
559 1
    key = base64.urlsafe_b64encode(key_base.digest())
560 1
    cipher_suite = Fernet(key)
561

562 1
    s = s.encode()
563 1
    s = cipher_suite.decrypt(s)
564 1
    try:
565 1
        import zlib
566 1
        s = zlib.decompress(s)
567 0
    except:
568 0
        pass
569 1
    s = s.decode()
570 1
    return s
571

572

573 1
class JSONSerializableClass(object):
574 1
    json_excludes = ["self", "__class__"]
575

576 1
    def __init__(self, **kwargs):
577 1
        pass
578

579 1
    def to_json(self):
580
        """
581
        Each controller should have this method implemented. The resulting json string should be
582
        readable by the controller's from_json function and by the function add_ctrl_from_json in
583
        control_handler.
584
        """
585 1
        return json.dumps(self.to_dict(), cls=PPJSONEncoder)
586

587 1
    def to_dict(self):
588 1
        def consider_callable(value):
589 1
            if callable(value) and value.__class__ in (types.MethodType, types.FunctionType):
590 1
                if value.__class__ == types.MethodType and _findclass(value) is not None:
591 0
                    return with_signature(value, value.__name__, obj_module=_findclass(value))
592 1
                return with_signature(value, value.__name__)
593 1
            return value
594

595 1
        d = {key: consider_callable(val) for key, val in self.__dict__.items()
596
             if key not in self.json_excludes}
597 1
        return d
598

599 1
    def add_to_net(self, net, element, index, column="object", overwrite=False):
600 1
        if element not in net:
601 1
            net[element] = pd.DataFrame(columns=[column])
602 1
        if index in net[element].index.values:
603 1
            obj = net[element].object.at[index]
604 1
            if overwrite or not isinstance(obj, JSONSerializableClass):
605 1
                logger.info("Updating %s with index %s" % (element, index))
606
            else:
607 0
                raise UserWarning("%s with index %s already exists" % (element, index))
608 1
        net[element].at[index, column] = self
609

610 1
    def equals(self, other):
611

612 1
        class UnequalityFound(Exception):
613 1
            pass
614

615 1
        def check_equality(obj1, obj2):
616 1
            if isinstance(obj1, (ndarray, generic)) or isinstance(obj2, (ndarray, generic)):
617 1
                unequal = True
618 1
                if equal(obj1, obj2):
619 1
                    unequal = False
620 1
                elif anynp(isnan(obj1)):
621 1
                    if allclose(obj1, obj2, atol=0, rtol=0, equal_nan=True):
622 1
                        unequal = False
623 1
                if unequal:
624 0
                    raise UnequalityFound
625 1
            elif not isinstance(obj2, type(obj1)):
626 0
                raise UnequalityFound
627 1
            elif isinstance(obj1, pandapowerNet):
628 0
                pass
629 1
            elif isinstance(obj1, pd.DataFrame):
630 0
                if len(obj1) > 0:
631 0
                    try:
632 0
                        assert_frame_equal(obj1, obj2)
633 0
                    except:
634 0
                        raise UnequalityFound
635 1
            elif isinstance(obj2, pd.Series):
636 0
                if len(obj1) > 0:
637 0
                    try:
638 0
                        assert_series_equal(obj1, obj2)
639 0
                    except:
640 0
                        raise UnequalityFound
641 1
            elif isinstance(obj1, dict):
642 1
                check_dictionary_equality(obj1, obj2)
643 1
            elif obj1 != obj1 and obj2 != obj2:
644 0
                pass
645 1
            elif callable(obj1):
646 0
                check_callable_equality(obj1, obj2)
647 1
            elif obj1 != obj2:
648 1
                try:
649 1
                    if not (isnan(obj1) and isnan(obj2)):
650 1
                        raise UnequalityFound
651 1
                except:
652 1
                    raise UnequalityFound
653

654 1
        def check_dictionary_equality(obj1, obj2):
655 1
            if set(obj1.keys()) != set(obj2.keys()):
656 0
                raise UnequalityFound
657 1
            for key in obj1.keys():
658 1
                if key != "_init":
659 1
                    check_equality(obj1[key], obj2[key])
660

661 1
        def check_callable_equality(obj1, obj2):
662 0
            if isinstance(obj1, weakref.ref) and isinstance(obj2, weakref.ref):
663 0
                return
664 0
            if str(obj1) != str(obj2):
665 0
                raise UnequalityFound
666

667 1
        if isinstance(other, self.__class__):
668 1
            try:
669 1
                check_equality(self.__dict__, other.__dict__)
670 1
                return True
671 1
            except UnequalityFound:
672 1
                return False
673
        else:
674 0
            return False
675

676 1
    @classmethod
677 1
    def from_dict(cls, d):
678 1
        obj = JSONSerializableClass.__new__(cls)
679 1
        obj.__dict__.update(d)
680 1
        return obj
681

682 1
    @classmethod
683 1
    def from_json(cls, json_string):
684 0
        d = json.loads(json_string, cls=PPJSONDecoder)
685 0
        return cls.from_dict(d)
686

687

688 1
def with_signature(obj, val, obj_module=None, obj_class=None):
689 1
    if obj_module is None:
690 1
        obj_module = obj.__module__.__str__()
691 1
    if obj_class is None:
692 1
        obj_class = obj.__class__.__name__
693 1
    d = {'_module': obj_module, '_class': obj_class, '_object': val}
694 1
    if hasattr(obj, 'dtype'):
695 1
        d.update({'dtype': str(obj.dtype)})
696 1
    return d
697

698

699 1
@singledispatch
700 1
def to_serializable(obj):
701 1
    logger.debug('standard case')
702 1
    return str(obj)
703

704

705 1
@to_serializable.register(pandapowerNet)
706 1
def json_pandapowernet(obj):
707 1
    net_dict = {k: item for k, item in obj.items() if not k.startswith("_")}
708 1
    d = with_signature(obj, net_dict)
709 1
    return d
710

711

712 1
@to_serializable.register(pd.DataFrame)
713 1
def json_dataframe(obj):
714 1
    logger.debug('DataFrame')
715 1
    orient = "split"
716 1
    json_string = obj.to_json(orient=orient, default_handler=to_serializable, double_precision=15)
717 1
    d = with_signature(obj, json_string)
718 1
    d['orient'] = orient
719 1
    if len(obj.columns) > 0 and isinstance(obj.columns[0], str):
720 1
        d['dtype'] = obj.dtypes.astype('str').to_dict()
721 1
    return d
722

723

724 1
if GEOPANDAS_INSTALLED:
725 0
    @to_serializable.register(geopandas.GeoDataFrame)
726 0
    def json_geodataframe(obj):
727 0
        logger.debug('GeoDataFrame')
728 0
        d = with_signature(obj, obj.to_json())
729 0
        d.update({'dtype': obj.dtypes.astype('str').to_dict(),
730
                  'crs': obj.crs, 'columns': obj.columns})
731 0
        return d
732

733

734 1
@to_serializable.register(pd.Series)
735 1
def json_series(obj):
736 1
    logger.debug('Series')
737 1
    d = with_signature(obj, obj.to_json(orient='split', default_handler=to_serializable,
738
                                        double_precision=15))
739 1
    d.update({'dtype': str(obj.dtypes), 'orient': 'split', 'typ': 'series'})
740 1
    return d
741

742

743 1
@to_serializable.register(numpy.ndarray)
744 1
def json_array(obj):
745 1
    logger.debug("ndarray")
746 1
    d = with_signature(obj, list(obj), obj_module='numpy', obj_class='array')
747 1
    return d
748

749

750 1
@to_serializable.register(numpy.integer)
751 1
def json_npint(obj):
752 1
    logger.debug("integer")
753 1
    return int(obj)
754

755

756 1
@to_serializable.register(numpy.floating)
757 1
def json_npfloat(obj):
758 0
    logger.debug("floating")
759 0
    return float(obj)
760

761

762 1
@to_serializable.register(numbers.Number)
763 1
def json_num(obj):
764 0
    logger.debug("numbers.Number")
765 0
    return str(obj)
766

767

768 1
@to_serializable.register(complex)
769 1
def json_complex(obj):
770 0
    logger.debug("complex")
771 0
    d = with_signature(obj, str(obj), obj_module='builtins', obj_class='complex')
772 0
    d.pop('dtype')
773 0
    return d
774

775

776 1
@to_serializable.register(pd.Index)
777 1
def json_pdindex(obj):
778 0
    logger.debug("pd.Index")
779 0
    return with_signature(obj, list(obj), obj_module='pandas')
780

781

782 1
@to_serializable.register(bool)
783 1
def json_bool(obj):
784 0
    logger.debug("bool")
785 0
    return "true" if obj else "false"
786

787

788 1
@to_serializable.register(tuple)
789 1
def json_tuple(obj):
790 1
    logger.debug("tuple")
791 1
    d = with_signature(obj, list(obj), obj_module='builtins', obj_class='tuple')
792 1
    return d
793

794

795 1
@to_serializable.register(set)
796 1
def json_set(obj):
797 1
    logger.debug("set")
798 1
    d = with_signature(obj, list(obj), obj_module='builtins', obj_class='set')
799 1
    return d
800

801

802 1
@to_serializable.register(frozenset)
803 1
def json_frozenset(obj):
804 1
    logger.debug("frozenset")
805 1
    d = with_signature(obj, list(obj), obj_module='builtins', obj_class='frozenset')
806 1
    return d
807

808

809 1
@to_serializable.register(networkx.Graph)
810 1
def json_networkx(obj):
811 1
    logger.debug("nx graph")
812 1
    json_string = json_graph.adjacency_data(obj, attrs={'id': 'json_id', 'key': 'json_key'})
813 1
    d = with_signature(obj, json_string, obj_module="networkx")
814 1
    return d
815

816

817 1
@to_serializable.register(JSONSerializableClass)
818 1
def controller_to_serializable(obj):
819 1
    logger.debug('JSONSerializableClass')
820 1
    d = with_signature(obj, obj.to_json())
821 1
    return d
822

823

824 1
def mkdirs_if_not_existent(dir_to_create):
825 1
    already_exist = os.path.isdir(dir_to_create)
826 1
    os.makedirs(dir_to_create, exist_ok=True)
827 1
    return ~already_exist
828

829

830 1
if SHAPELY_INSTALLED:
831 0
    @to_serializable.register(shapely.geometry.LineString)
832 0
    def json_linestring(obj):
833 0
        logger.debug("shapely linestring")
834 0
        json_string = shapely.geometry.mapping(obj)
835 0
        d = with_signature(obj, json_string, obj_module="shapely")
836 0
        return d
837

838

839 0
    @to_serializable.register(shapely.geometry.Point)
840 0
    def json_point(obj):
841 0
        logger.debug("shapely Point")
842 0
        json_string = shapely.geometry.mapping(obj)
843 0
        d = with_signature(obj, json_string, obj_module="shapely")
844 0
        return d
845

846

847 0
    @to_serializable.register(shapely.geometry.Polygon)
848 0
    def json_polygon(obj):
849 0
        logger.debug("shapely Polygon")
850 0
        json_string = shapely.geometry.mapping(obj)
851 0
        d = with_signature(obj, json_string, obj_module="shapely")
852 0
        return d
853

854 1
if __name__ == '__main__':
855 0
    import pandapower as pp
856 0
    net = pp.from_json(r'edis_zone_3_6.json')

Read our documentation on viewing source code .

Loading