Showing 22 of 27 files from the diff.

@@ -10,8 +10,7 @@
Loading
10 10
11 11
12 12
class BaseAdapter(abc.ABC):
13 -
    """A BaseAdapter for wrapping compute engines
14 -
    """
13 +
    """A BaseAdapter for wrapping compute engines"""
15 14
16 15
    def __init__(
17 16
        self,

@@ -27,7 +27,7 @@
Loading
27 27
28 28
class BaseResultORM(Base):
29 29
    """
30 -
        Abstract Base class for ResultORMs and ProcedureORMs
30 +
    Abstract Base class for ResultORMs and ProcedureORMs
31 31
    """
32 32
33 33
    __tablename__ = "base_result"
@@ -118,7 +118,7 @@
Loading
118 118
119 119
class ResultORM(BaseResultORM):
120 120
    """
121 -
        Hold the result of an atomic single calculation
121 +
    Hold the result of an atomic single calculation
122 122
    """
123 123
124 124
    __tablename__ = "result"
@@ -176,7 +176,7 @@
Loading
176 176
177 177
class ProcedureMixin:
178 178
    """
179 -
        A procedure mixin to be used by specific procedure types
179 +
    A procedure mixin to be used by specific procedure types
180 180
    """
181 181
182 182
    program = Column(String(100), nullable=False)
@@ -211,7 +211,7 @@
Loading
211 211
212 212
class OptimizationProcedureORM(ProcedureMixin, BaseResultORM):
213 213
    """
214 -
        An Optimization  procedure
214 +
    An Optimization  procedure
215 215
    """
216 216
217 217
    __tablename__ = "optimization_procedure"
@@ -422,7 +422,7 @@
Loading
422 422
423 423
class TorsionDriveProcedureORM(ProcedureMixin, BaseResultORM):
424 424
    """
425 -
        A torsion drive  procedure
425 +
    A torsion drive  procedure
426 426
    """
427 427
428 428
    __tablename__ = "torsiondrive_procedure"

@@ -135,9 +135,7 @@
Loading
135 135
    _required_auth = "read"
136 136
137 137
    def get(self):
138 -
        """
139 -
140 -
        """
138 +
        """"""
141 139
142 140
        self.logger.info("GET: Information")
143 141

@@ -23,8 +23,7 @@
Loading
23 23
24 24
25 25
class ParslAdapter(BaseAdapter):
26 -
    """An Adapter for Parsl.
27 -
    """
26 +
    """An Adapter for Parsl."""
28 27
29 28
    def __init__(self, client: Any, logger: Optional[logging.Logger] = None, **kwargs):
30 29
        BaseAdapter.__init__(self, client, logger, **kwargs)

@@ -274,9 +274,7 @@
Loading
274 274
        id: QueryObjectId = Field(None, description="Id of the Key/Value Storage object to get.")
275 275
276 276
    meta: EmptyMeta = Field({}, description=common_docs[EmptyMeta])
277 -
    data: Data = Field(
278 -
        ..., description="Data of the KV Get field: consists of Id of the Key/Value object to fetch."
279 -
    )
277 +
    data: Data = Field(..., description="Data of the KV Get field: consists of Id of the Key/Value object to fetch.")
280 278
281 279
282 280
class KVStoreGETResponse(ProtoModel):

@@ -1337,7 +1337,12 @@
Loading
1337 1337
            prog_default_kw = self.data.default_keywords.get(program, None)
1338 1338
            for kwalias, kwid in kwaliases.items():
1339 1339
                data.append(
1340 -
                    {"program": program, "keywords": kwalias, "id": kwid, "default": prog_default_kw == kwalias,}
1340 +
                    {
1341 +
                        "program": program,
1342 +
                        "keywords": kwalias,
1343 +
                        "id": kwid,
1344 +
                        "default": prog_default_kw == kwalias,
1345 +
                    }
1341 1346
                )
1342 1347
        return pd.DataFrame(data).set_index("program")
1343 1348

@@ -32,11 +32,11 @@
Loading
32 32
        A logger for use by the service
33 33
    service_input
34 34
        The service to be initialized.
35 -
	tag : Optional
35 +
    tag : Optional
36 36
        Optional tag to user with the service. Defaults to None
37 37
    priority :
38 38
        The priority of the service.
39 -
		
39 +
40 40
    Returns
41 41
    -------
42 42
    Service

@@ -16,10 +16,10 @@
Loading
16 16
17 17
class CollectionORM(Base):
18 18
    """
19 -
        A base collection class of precomuted workflows such as datasets, ..
19 +
    A base collection class of precomuted workflows such as datasets, ..
20 20
21 -
        This is a dynamic document, so it will accept any number of
22 -
        extra fields (expandable and uncontrolled schema)
21 +
    This is a dynamic document, so it will accept any number of
22 +
    extra fields (expandable and uncontrolled schema)
23 23
    """
24 24
25 25
    __tablename__ = "collection"
@@ -63,7 +63,7 @@
Loading
63 63
64 64
class DatasetMixin:
65 65
    """
66 -
        Mixin class for common Dataset attributes.
66 +
    Mixin class for common Dataset attributes.
67 67
    """
68 68
69 69
    default_benchmark = Column(String)
@@ -80,7 +80,7 @@
Loading
80 80
81 81
class ContributedValuesORM(Base):
82 82
    """One group of a contibuted values per dataset
83 -
    Each dataset can have multiple rows in this table """
83 +
    Each dataset can have multiple rows in this table"""
84 84
85 85
    __tablename__ = "contributed_values"
86 86
@@ -118,7 +118,7 @@
Loading
118 118
119 119
class DatasetORM(CollectionORM, DatasetMixin):
120 120
    """
121 -
        The Dataset class for homogeneous computations on many molecules.
121 +
    The Dataset class for homogeneous computations on many molecules.
122 122
    """
123 123
124 124
    __tablename__ = "dataset"
@@ -230,7 +230,7 @@
Loading
230 230
231 231
class ReactionDatasetORM(CollectionORM, DatasetMixin):
232 232
    """
233 -
        Reaction Dataset
233 +
    Reaction Dataset
234 234
    """
235 235
236 236
    __tablename__ = "reaction_dataset"

@@ -495,8 +495,7 @@
Loading
495 495
    ## Updates
496 496
497 497
    def update_services(self) -> int:
498 -
        """Runs through all active services and examines their current status.
499 -
        """
498 +
        """Runs through all active services and examines their current status."""
500 499
501 500
        # Grab current services
502 501
        current_services = self.storage.get_services(status="RUNNING")["data"]

@@ -20,8 +20,7 @@
Loading
20 20
21 21
22 22
class ExecutorAdapter(BaseAdapter):
23 -
    """A Queue Adapter for Python Executors
24 -
    """
23 +
    """A Queue Adapter for Python Executors"""
25 24
26 25
    def __repr__(self):
27 26
@@ -66,8 +65,7 @@
Loading
66 65
67 66
68 67
class DaskAdapter(ExecutorAdapter):
69 -
    """A Queue Adapter for Dask
70 -
    """
68 +
    """A Queue Adapter for Dask"""
71 69
72 70
    def __repr__(self):
73 71

@@ -179,7 +179,7 @@
Loading
179 179
    Expands an n-dimensional key/value grid.
180 180
181 181
    Example
182 -
	-------
182 +
    -------
183 183
    >>> expand_ndimensional_grid((3, 3), {(1, 1)}, set())
184 184
    [((1, 1), (0, 1)), ((1, 1), (2, 1)), ((1, 1), (1, 0)), ((1, 1), (1, 2))]
185 185
    """

@@ -301,8 +301,7 @@
Loading
301 301
302 302
    @validator("method")
303 303
    def check_method(cls, v):
304 -
        """Methods should have a lower string to match the database.
305 -
        """
304 +
        """Methods should have a lower string to match the database."""
306 305
        return v.lower()
307 306
308 307
    @validator("basis")

@@ -230,9 +230,9 @@
Loading
230 230
        force : bool, optional
231 231
            Data is typically cached, forces a new query if True
232 232
233 -
       Returns
234 -
       -------
235 -
       DataFrame
233 +
        Returns
234 +
        ------
235 +
        DataFrame
236 236
           A DataFrame of values with columns corresponding to methods and rows corresponding to reaction entries.
237 237
           Contributed (native=False) columns are marked with "(contributed)" and may include units in square brackets
238 238
           if their units differ in dimensionality from the ReactionDataset's default units.

@@ -49,7 +49,7 @@
Loading
49 49
50 50
def provenance_stamp(routine):
51 51
    """Return dictionary satisfying QCSchema,
52 -
   generating routine's name is passed in through `routine`.
52 +
    generating routine's name is passed in through `routine`.
53 53
54 -
   """
54 +
    """
55 55
    return {"creator": "QCFractal", "version": get_information("version"), "routine": routine}

@@ -296,7 +296,7 @@
Loading
296 296
297 297
    def _get_all_results(self, optimization_ids: List[Union[int, str]] = None):
298 298
        """Returns all the results objects (trajectory) of each optmization
299 -
        Returns list(list) """
299 +
        Returns list(list)"""
300 300
301 301
        if optimization_ids is None:
302 302
            self._raise_missing_attribute("all_results", "List of optimizations ids")

@@ -164,8 +164,7 @@
Loading
164 164
165 165
@contextmanager
166 166
def preserve_cwd():
167 -
    """Always returns to CWD on exit
168 -
    """
167 +
    """Always returns to CWD on exit"""
169 168
    cwd = os.getcwd()
170 169
    try:
171 170
        yield cwd
@@ -366,8 +365,7 @@
Loading
366 365
367 366
368 367
def reset_server_database(server):
369 -
    """Resets the server database for testing.
370 -
    """
368 +
    """Resets the server database for testing."""
371 369
    if "QCFRACTAL_RESET_TESTING_DB" in os.environ:
372 370
        server.storage._clear_db(server.storage._project_name)
373 371

@@ -108,7 +108,7 @@
Loading
108 108
109 109
class MoleculeORM(Base):
110 110
    """
111 -
        The molecule DB collection is managed by pymongo, so far
111 +
    The molecule DB collection is managed by pymongo, so far
112 112
    """
113 113
114 114
    __tablename__ = "molecule"
@@ -180,7 +180,7 @@
Loading
180 180
181 181
class KeywordsORM(Base):
182 182
    """
183 -
        KeywordsORM are unique for a specific program and name
183 +
    KeywordsORM are unique for a specific program and name
184 184
    """
185 185
186 186
    __tablename__ = "keywords"
@@ -203,8 +203,8 @@
Loading
203 203
class TaskQueueORM(Base):
204 204
    """A queue of tasks corresponding to a procedure
205 205
206 -
       Notes: don't sort query results without having the index sorted
207 -
              will impact the performance
206 +
    Notes: don't sort query results without having the index sorted
207 +
           will impact the performance
208 208
    """
209 209
210 210
    __tablename__ = "task_queue"
@@ -320,8 +320,7 @@
Loading
320 320
321 321
322 322
class QueueManagerORM(Base):
323 -
    """
324 -
    """
323 +
    """"""
325 324
326 325
    __tablename__ = "queue_manager"
327 326

@@ -154,7 +154,7 @@
Loading
154 154
155 155
class SQLAlchemySocket:
156 156
    """
157 -
        SQLAlcehmy QCDB wrapper class.
157 +
    SQLAlcehmy QCDB wrapper class.
158 158
    """
159 159
160 160
    def __init__(
@@ -319,8 +319,8 @@
Loading
319 319
320 320
    def get_limit(self, limit: Optional[int]) -> int:
321 321
        """Get the allowed limit on results to return in queries based on the
322 -
         given `limit`. If this number is greater than the
323 -
         SQLAlchemySocket.max_limit then the max_limit will be returned instead.
322 +
        given `limit`. If this number is greater than the
323 +
        SQLAlchemySocket.max_limit then the max_limit will be returned instead.
324 324
        """
325 325
326 326
        return limit if limit is not None and limit < self._max_limit else self._max_limit
@@ -1183,18 +1183,24 @@
Loading
1183 1183
            existing_results = {}
1184 1184
1185 1185
            for cond in conds:
1186 -
                doc = session.query(
1187 -
                    ResultORM.program,
1188 -
                    ResultORM.driver,
1189 -
                    ResultORM.method,
1190 -
                    ResultORM.basis,
1191 -
                    ResultORM.keywords,
1192 -
                    ResultORM.molecule,
1193 -
                    ResultORM.id
1194 -
                ).filter(cond).one_or_none()
1186 +
                doc = (
1187 +
                    session.query(
1188 +
                        ResultORM.program,
1189 +
                        ResultORM.driver,
1190 +
                        ResultORM.method,
1191 +
                        ResultORM.basis,
1192 +
                        ResultORM.keywords,
1193 +
                        ResultORM.molecule,
1194 +
                        ResultORM.id,
1195 +
                    )
1196 +
                    .filter(cond)
1197 +
                    .one_or_none()
1198 +
                )
1195 1199
1196 1200
                if doc is not None:
1197 -
                    existing_results[(doc.program, doc.driver, doc.method, doc.basis, doc.keywords, str(doc.molecule))] = doc
1201 +
                    existing_results[
1202 +
                        (doc.program, doc.driver, doc.method, doc.basis, doc.keywords, str(doc.molecule))
1203 +
                    ] = doc
1198 1204
1199 1205
            # Loop over all (input) records, keeping track each record's index in the list
1200 1206
            for i, result in enumerate(record_list):
@@ -1365,7 +1371,7 @@
Loading
1365 1371
            id or a list of ids of tasks
1366 1372
        manager_id: str or List[str]
1367 1373
            id or a list of ids of queue_mangers
1368 -
        status : bool, optional 
1374 +
        status : bool, optional
1369 1375
            The status of the result: 'COMPLETE', 'INCOMPLETE', or 'ERROR'
1370 1376
            Default is 'COMPLETE'
1371 1377
        include : Optional[List[str]], optional
@@ -1380,7 +1386,7 @@
Loading
1380 1386
        skip : int, optional
1381 1387
            skip the first 'skip' results. Used to paginate
1382 1388
            Default is 0
1383 -
        return_json : bool, optional 
1389 +
        return_json : bool, optional
1384 1390
            Return the results as a list of json inseated of objects
1385 1391
            default is True
1386 1392
        with_ids : bool, optional
@@ -1991,13 +1997,13 @@
Loading
1991 1997
1992 1998
    def services_completed(self, records_list: List["BaseService"]) -> int:
1993 1999
        """
1994 -
        Delete the services which are completed from the database. 
1995 -
        
2000 +
        Delete the services which are completed from the database.
2001 +
1996 2002
        Parameters
1997 2003
        ----------
1998 2004
        records_list : List["BaseService"]
1999 2005
            List of Service objects which are completed.
2000 -
        
2006 +
2001 2007
        Returns
2002 2008
        -------
2003 2009
        int
@@ -2214,7 +2220,7 @@
Loading
2214 2220
            skip the first 'skip' results. Used to paginate, default is 0
2215 2221
        return_json : bool, optional
2216 2222
            Return the results as a list of json inseated of objects, deafult is True
2217 -
        with_ids : bool, optional 
2223 +
        with_ids : bool, optional
2218 2224
            Include the ids in the returned objects/dicts, default is True
2219 2225
2220 2226
        Returns

@@ -21,8 +21,7 @@
Loading
21 21
    _required_auth = "compute"
22 22
23 23
    def post(self):
24 -
        """Posts new tasks to the task queue.
25 -
        """
24 +
        """Posts new tasks to the task queue."""
26 25
27 26
        body_model, response_model = rest_model("task_queue", "post")
28 27
        body = self.parse_bodymodel(body_model)
@@ -45,8 +44,7 @@
Loading
45 44
        self.write(response)
46 45
47 46
    def get(self):
48 -
        """Gets task information from the task queue
49 -
        """
47 +
        """Gets task information from the task queue"""
50 48
51 49
        body_model, response_model = rest_model("task_queue", "get")
52 50
        body = self.parse_bodymodel(body_model)
@@ -58,8 +56,7 @@
Loading
58 56
        self.write(response)
59 57
60 58
    def put(self):
61 -
        """Modifies tasks in the task queue
62 -
        """
59 +
        """Modifies tasks in the task queue"""
63 60
64 61
        body_model, response_model = rest_model("task_queue", "put")
65 62
        body = self.parse_bodymodel(body_model)
@@ -87,8 +84,7 @@
Loading
87 84
    _required_auth = "compute"
88 85
89 86
    def post(self):
90 -
        """Posts new services to the service queue.
91 -
        """
87 +
        """Posts new services to the service queue."""
92 88
93 89
        body_model, response_model = rest_model("service_queue", "post")
94 90
        body = self.parse_bodymodel(body_model)
@@ -120,8 +116,7 @@
Loading
120 116
        self.write(response)
121 117
122 118
    def get(self):
123 -
        """Gets information about services from the service queue.
124 -
        """
119 +
        """Gets information about services from the service queue."""
125 120
126 121
        body_model, response_model = rest_model("service_queue", "get")
127 122
        body = self.parse_bodymodel(body_model)
@@ -133,8 +128,7 @@
Loading
133 128
        self.write(response)
134 129
135 130
    def put(self):
136 -
        """Modifies services in the service queue
137 -
        """
131 +
        """Modifies services in the service queue"""
138 132
139 133
        body_model, response_model = rest_model("service_queue", "put")
140 134
        body = self.parse_bodymodel(body_model)
@@ -241,8 +235,7 @@
Loading
241 235
        return len(completed), len(error_data)
242 236
243 237
    def get(self):
244 -
        """Pulls new tasks from the task queue
245 -
        """
238 +
        """Pulls new tasks from the task queue"""
246 239
247 240
        body_model, response_model = rest_model("queue_manager", "get")
248 241
        body = self.parse_bodymodel(body_model)
@@ -274,8 +267,7 @@
Loading
274 267
        self.storage.manager_update(name, submitted=len(new_tasks), **body.meta.dict())
275 268
276 269
    def post(self):
277 -
        """Posts complete tasks to the task queue
278 -
        """
270 +
        """Posts complete tasks to the task queue"""
279 271
280 272
        body_model, response_model = rest_model("queue_manager", "post")
281 273
        body = self.parse_bodymodel(body_model)
@@ -357,8 +349,7 @@
Loading
357 349
    _required_auth = "admin"
358 350
359 351
    def get(self):
360 -
        """Gets manager information from the task queue
361 -
        """
352 +
        """Gets manager information from the task queue"""
362 353
363 354
        body_model, response_model = rest_model("manager", "get")
364 355
        body = self.parse_bodymodel(body_model)

@@ -98,15 +98,15 @@
Loading
98 98
        (According to pydantic docs, validators are run in the order of field definition)
99 99
        """
100 100
        if isinstance(data, dict):
101 -
            if values['compression'] != CompressionEnum.none:
101 +
            if values["compression"] != CompressionEnum.none:
102 102
                raise ValueError("Compression is set, but input is a dictionary")
103 -
            if values['compression_level'] != 0:
103 +
            if values["compression_level"] != 0:
104 104
                raise ValueError("Compression level is set, but input is a dictionary")
105 105
            return json.dumps(data).encode()
106 106
        elif isinstance(data, str):
107 -
            if values['compression'] != CompressionEnum.none:
107 +
            if values["compression"] != CompressionEnum.none:
108 108
                raise ValueError("Compression is set, but input is a string")
109 -
            if values['compression_level'] != 0:
109 +
            if values["compression_level"] != 0:
110 110
                raise ValueError("Compression level is set, but input is a string")
111 111
            return data.encode()
112 112
        else:
@@ -135,13 +135,18 @@
Loading
135 135
            return compression_level
136 136
137 137
    @classmethod
138 -
    def compress(cls, input_str: str, compression_type: CompressionEnum = CompressionEnum.none, compression_level: Optional[int] = None):
139 -
        '''Compresses a string given a compression scheme and level
138 +
    def compress(
139 +
        cls,
140 +
        input_str: str,
141 +
        compression_type: CompressionEnum = CompressionEnum.none,
142 +
        compression_level: Optional[int] = None,
143 +
    ):
144 +
        """Compresses a string given a compression scheme and level
140 145
141 146
        Returns an object of type `cls`
142 147
143 148
        If compression_level is None, but a compression_type is specified, an appropriate default level is chosen
144 -
        '''
149 +
        """
145 150
146 151
        data = input_str.encode()
147 152
@@ -165,7 +170,7 @@
Loading
165 170
        # By default, use level = 1 for larger files (>15MB or so)
166 171
        elif compression_type is CompressionEnum.lzma:
167 172
            if compression_level is None:
168 -
                if len(data) > 15*1048576:
173 +
                if len(data) > 15 * 1048576:
169 174
                    compression_level = 1
170 175
                else:
171 176
                    compression_level = 6
@@ -176,7 +181,6 @@
Loading
176 181
177 182
        return cls(data=data, compression=compression_type, compression_level=compression_level)
178 183
179 -
180 184
    def get_string(self):
181 185
        """
182 186
        Returns the string representing the output
@@ -193,7 +197,6 @@
Loading
193 197
            # Shouldn't ever happen, unless we change CompressionEnum but not the rest of this function
194 198
            raise TypeError("Unknown compression type??")
195 199
196 -
197 200
    def get_json(self):
198 201
        """
199 202
        Returns a dict if the data stored is a JSON string

@@ -60,8 +60,8 @@
Loading
60 60
61 61
class SingleResultTasks(BaseTasks):
62 62
    """A task generator for a single Result.
63 -
     Unique by: driver, method, basis, option (the name in the options table),
64 -
     and program.
63 +
    Unique by: driver, method, basis, option (the name in the options table),
64 +
    and program.
65 65
    """
66 66
67 67
    def verify_input(self, data):

@@ -13,8 +13,7 @@
Loading
13 13
14 14
15 15
def import_module(module, package=None):
16 -
    """Protected import of a module
17 -
    """
16 +
    """Protected import of a module"""
18 17
    try:
19 18
        ret = importlib.import_module(module, package=package)
20 19
    except ModuleNotFoundError:
@@ -25,8 +24,7 @@
Loading
25 24
26 25
27 26
def read_config_file(fname):
28 -
    """Reads a JSON or YAML file.
29 -
    """
27 +
    """Reads a JSON or YAML file."""
30 28
    if fname.endswith(".yaml") or fname.endswith(".yml"):
31 29
        try:
32 30
            rfunc = partial(yaml.load, Loader=yaml.FullLoader)
Files Coverage
qcfractal 88.13%
Project Totals (67 files) 88.13%
1841.1
TRAVIS_OS_NAME=linux
1841.2
TRAVIS_OS_NAME=linux
1841.4
TRAVIS_OS_NAME=linux
1841.3
TRAVIS_OS_NAME=linux
1
coverage:
2
  ignore:
3
    - */tests/*
4
    - qcfractal/dashboard/* # early state
5
    - qcfractal/alembic/* # difficult to test
6
    - qcfractal/_version.py
7
    - setup.py
8
  status:
9
    patch: false
10
    project:
11
      default:
12
        threshold: 80%
13
comment:
14
  layout: "header"
15
  require_changes: false
16
  branches: null
17
  behavior: once
18
  flags: null
19
  paths: null
Sunburst
The inner-most circle is the entire project, moving away from the center are folders then, finally, a single file. The size and color of each slice is representing the number of statements and the coverage, respectively.
Icicle
The top section represents the entire project. Proceeding with folders and finally individual files. The size and color of each slice is representing the number of statements and the coverage, respectively.
Grid
Each block represents a single file in the project. The size and color of each block is represented by the number of statements and the coverage, respectively.
Loading