#1572 Feature/memoize key

Merged Arnault Wirg Pseudo commit used to compare (c4e24fb...b6823ff)
Missing base report.

Unable to compare commits because the base of the pull request did not upload a coverage report.

Changes found in between c4e24fb...b6823ff (pseudo...base) which prevent comparing this pull request.


@@ -2,7 +2,7 @@
Loading
2 2
3 3
import spacy
4 4
5 -
from snorkel.types import FieldMap
5 +
from snorkel.types import FieldMap, HashingFunction
6 6
7 7
from .core import BasePreprocessor, Preprocessor
8 8
@@ -40,6 +40,8 @@
Loading
40 40
        Preprocessors to run before this preprocessor is executed
41 41
    memoize
42 42
        Memoize preprocessor outputs?
43 +
    memoize_key
44 +
        Hashing function to handle the memoization (default to snorkel.map.core.get_hashable)
43 45
    gpu
44 46
        Prefer Spacy GPU processing?
45 47
    """
@@ -52,6 +54,7 @@
Loading
52 54
        disable: Optional[List[str]] = None,
53 55
        pre: Optional[List[BasePreprocessor]] = None,
54 56
        memoize: bool = False,
57 +
        memoize_key: Optional[HashingFunction] = None,
55 58
        gpu: bool = False,
56 59
    ) -> None:
57 60
        name = type(self).__name__
@@ -61,6 +64,7 @@
Loading
61 64
            mapped_field_names=dict(doc=doc_field),
62 65
            pre=pre,
63 66
            memoize=memoize,
67 +
            memoize_key=memoize_key,
64 68
        )
65 69
        self.gpu = gpu
66 70
        if self.gpu:

@@ -49,6 +49,8 @@
Loading
49 49
        See https://spacy.io/usage/processing-pipelines#disabling
50 50
    memoize
51 51
        Memoize preprocessor outputs?
52 +
    memoize_key
53 +
        Hashing function to handle the memoization (default to snorkel.map.core.get_hashable)
52 54
53 55
    Raises
54 56
    ------

@@ -25,7 +25,7 @@
Loading
25 25
26 26
27 27
class LambdaPreprocessor(LambdaMapper):
28 -
    """Convenience class for definining preprocessors from functions.
28 +
    """Convenience class for defining preprocessors from functions.
29 29
30 30
    See ``snorkel.map.core.LambdaMapper`` for details.
31 31
    """

@@ -90,7 +90,7 @@
Loading
90 90
        Name of the LF
91 91
    resources
92 92
        Labeling resources passed in to ``f`` via ``kwargs``
93 -
    preprocessors
93 +
    pre
94 94
        Preprocessors to run on data points before LF execution
95 95
96 96
    Examples

@@ -179,7 +179,7 @@
Loading
179 179
        # Set to training mode
180 180
        model.train()
181 181
182 -
        logging.info(f"Start training...")
182 +
        logging.info("Start training...")
183 183
184 184
        self.metrics: Dict[str, float] = dict()
185 185
        self._reset_losses()
@@ -372,7 +372,7 @@
Loading
372 372
        if self.config.lr_scheduler_config.warmup_steps:
373 373
            warmup_steps = self.config.lr_scheduler_config.warmup_steps
374 374
            if warmup_steps < 0:
375 -
                raise ValueError(f"warmup_steps much greater or equal than 0.")
375 +
                raise ValueError("warmup_steps much greater or equal than 0.")
376 376
            warmup_unit = self.config.lr_scheduler_config.warmup_unit
377 377
            if warmup_unit == "epochs":
378 378
                self.warmup_steps = int(warmup_steps * self.n_batches_per_epoch)

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Unable to process changes.

No base report to compare against.

Files Coverage
snorkel 97.19%
Project Totals (68 files) 97.19%
Loading