diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml new file mode 100644 index 0000000..b596fc6 --- /dev/null +++ b/.github/workflows/integration.yml @@ -0,0 +1,52 @@ +name: integration + +on: [push, pull_request] + +jobs: + + checks: + runs-on: ubuntu-latest + strategy: + max-parallel: 8 + matrix: + check: [bluecheck, doc8, docs, flake8, isortcheck, mypy, pylint, rstcheck] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.11' + - name: Install dependencies + run: | + pip install --upgrade pip + pip install tox + - name: Run checks with tox + run: | + tox -e ${{ matrix.check }} + + tests: + needs: checks + runs-on: ${{ matrix.os }} + strategy: + max-parallel: 8 + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + python-version: [3.8, 3.9, '3.10', 3.11] + + steps: + - name: Set up Python ${{ matrix.python-version }} x64 + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + architecture: x64 + + - uses: actions/checkout@v3 + + - name: Install tox + run: | + pip install --upgrade pip + pip install tox + + - name: Test with tox + run: tox -e py diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..33b3a8f --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,30 @@ +name: release + +on: + push: + tags: + - v* + +jobs: + + upload: + runs-on: ubuntu-latest + permissions: + id-token: write + + steps: + - uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.11' + + - name: Install build + run: pip install build + + - name: Create build + run: python -m build + + - name: Publish package distributions to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.gitignore b/.gitignore index c496721..67157b8 100644 --- a/.gitignore +++ b/.gitignore @@ -7,7 +7,7 @@ # test files/directories /.cache/ -.coverage +.coverage* .pytest_cache/ /.tox/ diff --git a/.pylintrc b/.pylintrc index 3314897..dc1490a 100644 --- a/.pylintrc +++ b/.pylintrc @@ -1,24 +1,77 @@ -[MASTER] +[MAIN] + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Clear in-memory caches upon conclusion of linting. Useful if running pylint +# in a server-like mode. +clear-cache-post-run=no + +# Load and enable all available extensions. Use --list-extensions to see a list +# all available extensions. +#enable-all-extensions= + +# In error mode, messages with a category besides ERROR or FATAL are +# suppressed, and no reports are done by default. Error mode is compatible with +# disabling specific errors. +#errors-only= + +# Always return a 0 (non-error) status code, even if lint errors are found. +# This is primarily useful in continuous integration scripts. +#exit-zero= # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. +extension-pkg-allow-list= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. (This is an alternative name to extension-pkg-allow-list +# for backward compatibility.) extension-pkg-whitelist= -# Add files or directories to the blacklist. They should be base names, not -# paths. +# Return non-zero exit code if any of these messages/categories are detected, +# even if score is above --fail-under value. Syntax same as enable. Messages +# specified are enabled, while categories only check already-enabled messages. +fail-on= + +# Specify a score threshold under which the program will exit with error. +fail-under=10 + +# Interpret the stdin as a python script, whose filename needs to be passed as +# the module_or_package argument. +#from-stdin= + +# Files or directories to be skipped. They should be base names, not paths. ignore=CVS -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns= +# Add files or directories matching the regular expressions patterns to the +# ignore-list. The regex matches against paths and can be in Posix or Windows +# format. Because '\\' represents the directory delimiter on Windows systems, +# it can't be used as an escape character. +ignore-paths= + +# Files or directories matching the regular expression patterns are skipped. +# The regex matches against base names, not paths. The default value ignores +# Emacs file locks +ignore-patterns=^\.# + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis). It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). #init-hook= # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use. +# number of processors available to use, and will cap the count on Windows to +# avoid hangs. jobs=1 # Control the amount of potential inferred values when inferring a single @@ -26,15 +79,25 @@ jobs=1 # complex, nested conditions. limit-inference-results=100 -# List of plugins (as comma separated values of python modules names) to load, +# List of plugins (as comma separated values of python module names) to load, # usually to register additional checkers. load-plugins= # Pickle collected data for later comparisons. persistent=yes -# Specify a configuration file. -#rcfile= +# Minimum Python version to use for version dependent checks. Will default to +# the version used to run pylint. +py-version=3.11 + +# Discover python modules and packages in the file system subtree. +recursive=no + +# Add paths to the list of the source roots. Supports globbing patterns. The +# source root is an absolute path or a path relative to the current working +# directory used to determine a package namespace for modules located under the +# source root. +source-roots= # When enabled, pylint would attempt to guess common misconfiguration and emit # user-friendly hints instead of false-positive error messages. @@ -44,323 +107,8 @@ suggestion-mode=yes # active Python interpreter and may run arbitrary code. unsafe-load-any-extension=no - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use "--disable=all --enable=classes -# --disable=W". -disable=print-statement, - parameter-unpacking, - unpacking-in-except, - old-raise-syntax, - backtick, - long-suffix, - old-ne-operator, - old-octal-literal, - import-star-module-level, - non-ascii-bytes-literal, - raw-checker-failed, - bad-inline-option, - locally-disabled, - file-ignored, - suppressed-message, - useless-suppression, - deprecated-pragma, - use-symbolic-message-instead, - apply-builtin, - basestring-builtin, - buffer-builtin, - cmp-builtin, - coerce-builtin, - execfile-builtin, - file-builtin, - long-builtin, - raw_input-builtin, - reduce-builtin, - standarderror-builtin, - unicode-builtin, - xrange-builtin, - coerce-method, - delslice-method, - getslice-method, - setslice-method, - no-absolute-import, - old-division, - dict-iter-method, - dict-view-method, - next-method-called, - metaclass-assignment, - indexing-exception, - raising-string, - reload-builtin, - oct-method, - hex-method, - nonzero-method, - cmp-method, - input-builtin, - round-builtin, - intern-builtin, - unichr-builtin, - map-builtin-not-iterating, - zip-builtin-not-iterating, - range-builtin-not-iterating, - filter-builtin-not-iterating, - using-cmp-argument, - eq-without-hash, - div-method, - idiv-method, - rdiv-method, - exception-message-attribute, - invalid-str-codec, - sys-max-int, - bad-python3-import, - deprecated-string-function, - deprecated-str-translate-call, - deprecated-itertools-function, - deprecated-types-field, - next-method-defined, - dict-items-not-iterating, - dict-keys-not-iterating, - dict-values-not-iterating, - deprecated-operator-function, - deprecated-urllib-function, - xreadlines-attribute, - deprecated-sys-function, - exception-escape, - comprehension-escape, - no-else-return, - no-member, - useless-object-inheritance, - inconsistent-return-statements, - ungrouped-imports, - not-callable, - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member - - -[REPORTS] - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages. -reports=no - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=6 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=sys.exit - - -[LOGGING] - -# Format style used to check logging format string. `old` means using % -# formatting, while `new` is for `{}` formatting. -logging-format-style=old - -# Logging modules to check that the string format arguments are in logging -# function parameter format. -logging-modules=logging - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes. -max-spelling-suggestions=4 - -# Spelling dictionary name. Available dictionaries: none. To make it working -# install python-enchant package.. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to indicated private dictionary in -# --spelling-private-dict-file option instead of raising a message. -spelling-store-unknown-words=no - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX, - TODO - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# Tells whether to warn about missing members when the owner of the attribute -# is inferred to be None. -ignore-none=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis. It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid defining new builtins when possible. -additional-builtins= - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expected to -# not be used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore. -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=100 - -# Maximum number of lines in a module. -max-module-lines=2500 - -# List of optional constructs for which whitespace checking is disabled. `dict- -# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. -# `trailing-comma` allows a space between comma and closing bracket: (a, ). -# `empty-line` allows space-only lines. -no-space-check=trailing-comma, - dict-separator - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[SIMILARITIES] - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=no - -# Minimum lines number of a similarity. -min-similarity-lines=9 +# In verbose mode, extra non-checker-related info will be displayed. +#verbose= [BASIC] @@ -369,13 +117,15 @@ min-similarity-lines=9 argument-naming-style=snake_case # Regular expression matching correct argument names. Overrides argument- -# naming-style. +# naming-style. If left empty, argument names will be checked with the set +# naming style. #argument-rgx= # Naming style matching correct attribute names. attr-naming-style=snake_case # Regular expression matching correct attribute names. Overrides attr-naming- +# style. If left empty, attribute names will be checked with the set naming # style. #attr-rgx= @@ -387,24 +137,38 @@ bad-names=foo, tutu, tata +# Bad variable names regexes, separated by a comma. If names match any regex, +# they will always be refused +bad-names-rgxs= + # Naming style matching correct class attribute names. class-attribute-naming-style=any # Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. +# attribute-naming-style. If left empty, class attribute names will be checked +# with the set naming style. #class-attribute-rgx= +# Naming style matching correct class constant names. +class-const-naming-style=UPPER_CASE + +# Regular expression matching correct class constant names. Overrides class- +# const-naming-style. If left empty, class constant names will be checked with +# the set naming style. +#class-const-rgx= + # Naming style matching correct class names. class-naming-style=PascalCase # Regular expression matching correct class names. Overrides class-naming- -# style. +# style. If left empty, class names will be checked with the set naming style. #class-rgx= # Naming style matching correct constant names. const-naming-style=UPPER_CASE # Regular expression matching correct constant names. Overrides const-naming- +# style. If left empty, constant names will be checked with the set naming # style. #const-rgx= @@ -416,7 +180,8 @@ docstring-min-length=-1 function-naming-style=snake_case # Regular expression matching correct function names. Overrides function- -# naming-style. +# naming-style. If left empty, function names will be checked with the set +# naming style. #function-rgx= # Good variable names which should always be accepted, separated by a comma. @@ -427,6 +192,10 @@ good-names=i, Run, _ +# Good variable names regexes, separated by a comma. If names match any regex, +# they will always be accepted +good-names-rgxs= + # Include a hint for the correct naming format with invalid-name. include-naming-hint=no @@ -434,21 +203,22 @@ include-naming-hint=no inlinevar-naming-style=any # Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. +# inlinevar-naming-style. If left empty, inline iteration names will be checked +# with the set naming style. #inlinevar-rgx= # Naming style matching correct method names. method-naming-style=snake_case # Regular expression matching correct method names. Overrides method-naming- -# style. +# style. If left empty, method names will be checked with the set naming style. #method-rgx= # Naming style matching correct module names. module-naming-style=snake_case # Regular expression matching correct module names. Overrides module-naming- -# style. +# style. If left empty, module names will be checked with the set naming style. #module-rgx= # Colon-delimited sets of names that determine each other's naming style when @@ -464,86 +234,63 @@ no-docstring-rgx=^_ # These decorators are taken in consideration only for invalid-name. property-classes=abc.abstractproperty +# Regular expression matching correct type alias names. If left empty, type +# alias names will be checked with the set naming style. +#typealias-rgx= + +# Regular expression matching correct type variable names. If left empty, type +# variable names will be checked with the set naming style. +#typevar-rgx= + # Naming style matching correct variable names. variable-naming-style=snake_case # Regular expression matching correct variable names. Overrides variable- -# naming-style. +# naming-style. If left empty, variable names will be checked with the set +# naming style. #variable-rgx= -[STRING] - -# This flag controls whether the implicit-str-concat-in-sequence should -# generate a warning on implicit string concatenation in sequences defined over -# several lines. -check-str-concat-over-line-jumps=no - - -[IMPORTS] - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma. -deprecated-modules=optparse,tkinter.tix - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled). -ext-import-graph= - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled). -import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled). -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - - [CLASSES] +# Warn about protected attribute access inside special methods +check-protected-access-in-special-methods=no + # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods=__init__, __new__, - setUp + setUp, + asyncSetUp, + __post_init__ # List of member names, which should be excluded from the protected access # warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make +exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls # List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=cls +valid-metaclass-classmethod-first-arg=mcs [DESIGN] +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +exclude-too-few-public-methods= + +# List of qualified class names to ignore when counting class parents (see +# R0901) +ignored-parents= + # Maximum number of arguments for function / method. max-args=8 # Maximum number of attributes for a class (see R0902). -max-attributes=7 +max-attributes=8 -# Maximum number of boolean expressions in an if statement. +# Maximum number of boolean expressions in an if statement (see R0916). max-bool-expr=5 # Maximum number of branch for function / method body. @@ -570,7 +317,320 @@ min-public-methods=2 [EXCEPTIONS] -# Exceptions that will emit a warning when being caught. Defaults to -# "BaseException, Exception". -overgeneral-exceptions=BaseException, - Exception +# Exceptions that will emit a warning when caught. +overgeneral-exceptions=builtins.BaseException,builtins.Exception + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=100 + +# Maximum number of lines in a module. +max-module-lines=2500 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[IMPORTS] + +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= + +# Allow explicit reexports by alias from a package __init__. +allow-reexport-from-package=no + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules= + +# Output a graph (.gv or any supported image format) of external dependencies +# to the given file (report RP0402 must not be disabled). +ext-import-graph= + +# Output a graph (.gv or any supported image format) of all (i.e. internal and +# external) dependencies to the given file (report RP0402 must not be +# disabled). +import-graph= + +# Output a graph (.gv or any supported image format) of internal dependencies +# to the given file (report RP0402 must not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= + + +[LOGGING] + +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. +logging-format-style=old + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, +# UNDEFINED. +confidence=HIGH, + CONTROL_FLOW, + INFERENCE, + INFERENCE_FAILURE, + UNDEFINED + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then re-enable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=raw-checker-failed, + bad-inline-option, + locally-disabled, + file-ignored, + suppressed-message, + useless-suppression, + deprecated-pragma, + use-symbolic-message-instead, + consider-using-f-string, + no-member, + no-else-return, + no-else-raise, + inconsistent-return-statements + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable=c-extension-no-member + + +[METHOD_ARGS] + +# List of qualified names (i.e., library.method) which require a timeout +# parameter e.g. 'requests.api.get,requests.api.post' +timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX, + TODO + +# Regular expression of note tags to take in consideration. +notes-rgx= + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit,argparse.parse_error + + +[REPORTS] + +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'fatal', 'error', 'warning', 'refactor', +# 'convention', and 'info' which contain the number of messages in each +# category, as well as 'statement' which is the total number of statements +# analyzed. This score is used by the global evaluation report (RP0004). +evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +#output-format= + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + + +[SIMILARITIES] + +# Comments are removed from the similarity computation +ignore-comments=yes + +# Docstrings are removed from the similarity computation +ignore-docstrings=yes + +# Imports are removed from the similarity computation +ignore-imports=yes + +# Signatures are removed from the similarity computation +ignore-signatures=yes + +# Minimum lines number of a similarity. +min-similarity-lines=20 + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 + +# Spelling dictionary name. No available dictionaries : You need to install +# both the python package and the system dependency for enchant to work.. +spelling-dict= + +# List of comma separated words that should be considered directives if they +# appear at the beginning of a comment and should not be checked. +spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +spelling-store-unknown-words=no + + +[STRING] + +# This flag controls whether inconsistent-quotes generates a warning when the +# character used as a quote delimiter is used inconsistently within a module. +check-quote-consistency=no + +# This flag controls whether the implicit-str-concat should generate a warning +# on implicit string concatenation in sequences defined over several lines. +check-str-concat-over-line-jumps=no + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +ignore-none=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of symbolic message names to ignore for Mixin members. +ignored-checks-for-mixins=no-member, + not-async-context-manager, + not-context-manager, + attribute-defined-outside-init + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + +# Regex pattern to define which classes are considered mixins. +mixin-class-rgx=.*[Mm]ixin + +# List of decorators that change the signature of a decorated function. +signature-mutators= + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of names allowed to shadow builtins +allowed-redefined-builtins= + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 5e5c760..0000000 --- a/.travis.yml +++ /dev/null @@ -1,19 +0,0 @@ -sudo: false -language: python -install: python -m pip install tox -script: python -m tox -e py -matrix: - include: - - python: 3.5 - env: TOXENV=py35 - - python: 3.6 - env: TOXENV=py36 - - python: 3.7 - dist: xenial - env: TOXENV=py37 - - python: 3.8 - dist: xenial - env: TOXENV=py38 - - python: 3.8 - dist: xenial - env: TOXENV=pylint diff --git a/LICENSE b/LICENSE index 3259b98..bb4cfb7 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright 2016-2019 Grant Jenks +Copyright 2016-2022 Grant Jenks Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the diff --git a/README.rst b/README.rst index 57b7a2e..04abdc0 100644 --- a/README.rst +++ b/README.rst @@ -4,7 +4,7 @@ DiskCache: Disk Backed Cache `DiskCache`_ is an Apache2 licensed disk and file backed cache library, written in pure-Python, and compatible with Django. -The cloud-based computing of 2019 puts a premium on memory. Gigabytes of empty +The cloud-based computing of 2023 puts a premium on memory. Gigabytes of empty space is left on disks as processes vie for memory. Among these processes is Memcached (and sometimes Redis) which is used as a cache. Wouldn't it be nice to leverage empty disk space for caching? @@ -77,16 +77,16 @@ Features - Thread-safe and process-safe - Supports multiple eviction policies (LRU and LFU included) - Keys support "tag" metadata and eviction -- Developed on Python 3.7 -- Tested on CPython 2.7, 3.4, 3.5, 3.6, 3.7 and PyPy +- Developed on Python 3.10 +- Tested on CPython 3.6, 3.7, 3.8, 3.9, 3.10 - Tested on Linux, Mac OS X, and Windows -- Tested using Travis CI and AppVeyor CI +- Tested using GitHub Actions -.. image:: https://api.travis-ci.org/grantjenks/python-diskcache.svg?branch=master - :target: http://www.grantjenks.com/docs/diskcache/ +.. image:: https://github.com/grantjenks/python-diskcache/workflows/integration/badge.svg + :target: https://github.com/grantjenks/python-diskcache/actions?query=workflow%3Aintegration -.. image:: https://ci.appveyor.com/api/projects/status/github/grantjenks/python-diskcache?branch=master&svg=true - :target: http://www.grantjenks.com/docs/diskcache/ +.. image:: https://github.com/grantjenks/python-diskcache/workflows/release/badge.svg + :target: https://github.com/grantjenks/python-diskcache/actions?query=workflow%3Arelease Quickstart ---------- @@ -99,7 +99,7 @@ You can access documentation in the interpreter with Python's built-in help function:: >>> import diskcache - >>> help(diskcache) + >>> help(diskcache) # doctest: +SKIP The core of `DiskCache`_ is three data types intended for caching. `Cache`_ objects manage a SQLite database and filesystem directory to store key and @@ -107,26 +107,26 @@ value pairs. `FanoutCache`_ provides a sharding layer to utilize multiple caches and `DjangoCache`_ integrates that with `Django`_:: >>> from diskcache import Cache, FanoutCache, DjangoCache - >>> help(Cache) - >>> help(FanoutCache) - >>> help(DjangoCache) + >>> help(Cache) # doctest: +SKIP + >>> help(FanoutCache) # doctest: +SKIP + >>> help(DjangoCache) # doctest: +SKIP Built atop the caching data types, are `Deque`_ and `Index`_ which work as a cross-process, persistent replacements for Python's ``collections.deque`` and ``dict``. These implement the sequence and mapping container base classes:: >>> from diskcache import Deque, Index - >>> help(Deque) - >>> help(Index) + >>> help(Deque) # doctest: +SKIP + >>> help(Index) # doctest: +SKIP Finally, a number of `recipes`_ for cross-process synchronization are provided using an underlying cache. Features like memoization with cache stampede prevention, cross-process locking, and cross-process throttling are available:: >>> from diskcache import memoize_stampede, Lock, throttle - >>> help(memoize_stampede) - >>> help(Lock) - >>> help(throttle) + >>> help(memoize_stampede) # doctest: +SKIP + >>> help(Lock) # doctest: +SKIP + >>> help(throttle) # doctest: +SKIP Python's docstrings are a quick way to get started but not intended as a replacement for the `DiskCache Tutorial`_ and `DiskCache API Reference`_. @@ -185,7 +185,7 @@ other projects are shown in the tables below. access. Keys are arbitrary strings, values arbitrary pickle-able objects. * `pickleDB`_ is a lightweight and simple key-value store. It is built upon Python's simplejson module and was inspired by Redis. It is licensed with the - BSD three-caluse license. + BSD three-clause license. .. _`dbm`: https://docs.python.org/3/library/dbm.html .. _`shelve`: https://docs.python.org/3/library/shelve.html @@ -314,7 +314,6 @@ Object Relational Mappings (ORM) .. _`Django ORM`: https://docs.djangoproject.com/en/dev/topics/db/ .. _`SQLAlchemy`: https://www.sqlalchemy.org/ .. _`Peewee`: http://docs.peewee-orm.com/ -.. _`dataset`: https://dataset.readthedocs.io/ .. _`SQLObject`: http://sqlobject.org/ .. _`Pony ORM`: https://ponyorm.com/ @@ -388,7 +387,7 @@ Reference License ------- -Copyright 2016-2019 Grant Jenks +Copyright 2016-2023 Grant Jenks Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index 1f52a08..0000000 --- a/appveyor.yml +++ /dev/null @@ -1,22 +0,0 @@ -environment: - - matrix: - - - PYTHON: "C:\\Python35" - - PYTHON: "C:\\Python36" - - PYTHON: "C:\\Python37" - - PYTHON: "C:\\Python38" - - PYTHON: "C:\\Python35-x64" - - PYTHON: "C:\\Python36-x64" - - PYTHON: "C:\\Python37-x64" - - PYTHON: "C:\\Python38-x64" - -install: - - - "%PYTHON%\\python.exe -m pip install tox" - -build: off - -test_script: - - - "%PYTHON%\\python.exe -m tox -e py" diff --git a/diskcache/__init__.py b/diskcache/__init__.py index 192524e..7757d66 100644 --- a/diskcache/__init__.py +++ b/diskcache/__init__.py @@ -3,15 +3,31 @@ ======================= The :doc:`tutorial` provides a helpful walkthrough of most methods. - """ -from .core import Cache, Disk, EmptyDirWarning, JSONDisk, UnknownFileWarning, Timeout -from .core import DEFAULT_SETTINGS, ENOVAL, EVICTION_POLICY, UNKNOWN +from .core import ( + DEFAULT_SETTINGS, + ENOVAL, + EVICTION_POLICY, + UNKNOWN, + Cache, + Disk, + EmptyDirWarning, + JSONDisk, + Timeout, + UnknownFileWarning, +) from .fanout import FanoutCache from .persistent import Deque, Index -from .recipes import Averager, BoundedSemaphore, Lock, RLock -from .recipes import barrier, memoize_stampede, throttle +from .recipes import ( + Averager, + BoundedSemaphore, + Lock, + RLock, + barrier, + memoize_stampede, + throttle, +) __all__ = [ 'Averager', @@ -37,15 +53,16 @@ ] try: - from .djangocache import DjangoCache # pylint: disable=wrong-import-position + from .djangocache import DjangoCache # noqa + __all__.append('DjangoCache') -except Exception: # pylint: disable=broad-except +except Exception: # pylint: disable=broad-except # pragma: no cover # Django not installed or not setup so ignore. pass __title__ = 'diskcache' -__version__ = '4.1.0' -__build__ = 0x040100 +__version__ = '5.6.3' +__build__ = 0x050603 __author__ = 'Grant Jenks' __license__ = 'Apache 2.0' -__copyright__ = 'Copyright 2016-2018 Grant Jenks' +__copyright__ = 'Copyright 2016-2023 Grant Jenks' diff --git a/diskcache/cli.py b/diskcache/cli.py index 44bffeb..6a39f60 100644 --- a/diskcache/cli.py +++ b/diskcache/cli.py @@ -1 +1 @@ -"Command line interface to disk cache." +"""Command line interface to disk cache.""" diff --git a/diskcache/core.py b/diskcache/core.py index 0c8fd2c..0e912b0 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -1,5 +1,4 @@ """Core disk and file backed cache API. - """ import codecs @@ -10,67 +9,32 @@ import json import os import os.path as op +import pickle import pickletools import sqlite3 import struct -import sys import tempfile import threading import time import warnings import zlib -############################################################################ -# BEGIN Python 2/3 Shims -############################################################################ - -if sys.hexversion < 0x03000000: - import cPickle as pickle # pylint: disable=import-error - # ISSUE #25 Fix for http://bugs.python.org/issue10211 - from cStringIO import StringIO as BytesIO # pylint: disable=import-error - from thread import get_ident # pylint: disable=import-error,no-name-in-module - TextType = unicode # pylint: disable=invalid-name,undefined-variable - BytesType = str - INT_TYPES = int, long # pylint: disable=undefined-variable - range = xrange # pylint: disable=redefined-builtin,invalid-name,undefined-variable - io_open = io.open # pylint: disable=invalid-name -else: - import pickle - from io import BytesIO # pylint: disable=ungrouped-imports - from threading import get_ident - TextType = str - BytesType = bytes - INT_TYPES = (int,) - io_open = open # pylint: disable=invalid-name def full_name(func): - "Return full name of `func` by adding the module and function name." - try: - # The __qualname__ attribute is only available in Python 3.3 and later. - # GrantJ 2019-03-29 Remove after support for Python 2 is dropped. - name = func.__qualname__ - except AttributeError: - name = func.__name__ - return func.__module__ + '.' + name - -############################################################################ -# END Python 2/3 Shims -############################################################################ - -try: - WindowsError -except NameError: - class WindowsError(Exception): - "Windows error place-holder on platforms without support." + """Return full name of `func` by adding the module and function name.""" + return func.__module__ + '.' + func.__qualname__ + class Constant(tuple): - "Pretty display of immutable constant." + """Pretty display of immutable constant.""" + def __new__(cls, name): return tuple.__new__(cls, (name,)) def __repr__(self): return '%s' % self[0] + DBNAME = 'cache.db' ENOVAL = Constant('ENOVAL') UNKNOWN = Constant('UNKNOWN') @@ -82,27 +46,31 @@ def __repr__(self): MODE_PICKLE = 4 DEFAULT_SETTINGS = { - u'statistics': 0, # False - u'tag_index': 0, # False - u'eviction_policy': u'least-recently-stored', - u'size_limit': 2 ** 30, # 1gb - u'cull_limit': 10, - u'sqlite_auto_vacuum': 1, # FULL - u'sqlite_cache_size': 2 ** 13, # 8,192 pages - u'sqlite_journal_mode': u'wal', - u'sqlite_mmap_size': 2 ** 26, # 64mb - u'sqlite_synchronous': 1, # NORMAL - u'disk_min_file_size': 2 ** 15, # 32kb - u'disk_pickle_protocol': pickle.HIGHEST_PROTOCOL, + 'statistics': 0, # False + 'tag_index': 0, # False + 'eviction_policy': 'least-recently-stored', + 'size_limit': 2**30, # 1gb + 'cull_limit': 10, + 'sqlite_query_only': 0, + 'sqlite_auto_vacuum': 1, # FULL + 'sqlite_cache_size': 2**13, # 8,192 pages + 'sqlite_journal_mode': 'wal', + 'sqlite_mmap_size': 2**26, # 64mb + 'sqlite_synchronous': 1, # NORMAL + 'disk_min_file_size': 2**15, # 32kb + 'disk_pickle_protocol': pickle.HIGHEST_PROTOCOL, } METADATA = { - u'count': 0, - u'size': 0, - u'hits': 0, - u'misses': 0, + 'count': 0, + 'size': 0, + 'hits': 0, + 'misses': 0, } +# these cannot be verified for a read_only database +UNVERIFIABLE_SETTINGS = ['sqlite_query_only'] + EVICTION_POLICY = { 'none': { 'init': None, @@ -136,8 +104,9 @@ def __repr__(self): } -class Disk(object): - "Cache key and value serialization for SQLite database and files." +class Disk: + """Cache key and value serialization for SQLite database and files.""" + def __init__(self, directory, min_file_size=0, pickle_protocol=0): """Initialize disk instance. @@ -150,7 +119,6 @@ def __init__(self, directory, min_file_size=0, pickle_protocol=0): self.min_file_size = min_file_size self.pickle_protocol = pickle_protocol - def hash(self, key): """Compute portable hash for `key`. @@ -164,15 +132,14 @@ def hash(self, key): if type_disk_key is sqlite3.Binary: return zlib.adler32(disk_key) & mask - elif type_disk_key is TextType: - return zlib.adler32(disk_key.encode('utf-8')) & mask # pylint: disable=no-member - elif type_disk_key in INT_TYPES: + elif type_disk_key is str: + return zlib.adler32(disk_key.encode('utf-8')) & mask # noqa + elif type_disk_key is int: return disk_key % mask else: assert type_disk_key is float return zlib.adler32(struct.pack('!d', disk_key)) & mask - def put(self, key): """Convert `key` to fields key and raw for Cache table. @@ -180,22 +147,25 @@ def put(self, key): :return: (database key, raw boolean) pair """ - # pylint: disable=bad-continuation,unidiomatic-typecheck + # pylint: disable=unidiomatic-typecheck type_key = type(key) - if type_key is BytesType: + if type_key is bytes: return sqlite3.Binary(key), True - elif ((type_key is TextType) - or (type_key in INT_TYPES - and -9223372036854775808 <= key <= 9223372036854775807) - or (type_key is float)): + elif ( + (type_key is str) + or ( + type_key is int + and -9223372036854775808 <= key <= 9223372036854775807 + ) + or (type_key is float) + ): return key, True else: data = pickle.dumps(key, protocol=self.pickle_protocol) result = pickletools.optimize(data) return sqlite3.Binary(result), False - def get(self, key, raw): """Convert fields `key` and `raw` from Cache table to key. @@ -204,12 +174,11 @@ def get(self, key, raw): :return: corresponding Python key """ - # pylint: disable=no-self-use,unidiomatic-typecheck + # pylint: disable=unidiomatic-typecheck if raw: - return BytesType(key) if type(key) is sqlite3.Binary else key + return bytes(key) if type(key) is sqlite3.Binary else key else: - return pickle.load(BytesIO(key)) - + return pickle.load(io.BytesIO(key)) def store(self, value, read, key=UNKNOWN): """Convert `value` to fields size, mode, filename, and value for Cache @@ -225,39 +194,32 @@ def store(self, value, read, key=UNKNOWN): type_value = type(value) min_file_size = self.min_file_size - if ((type_value is TextType and len(value) < min_file_size) - or (type_value in INT_TYPES - and -9223372036854775808 <= value <= 9223372036854775807) - or (type_value is float)): + if ( + (type_value is str and len(value) < min_file_size) + or ( + type_value is int + and -9223372036854775808 <= value <= 9223372036854775807 + ) + or (type_value is float) + ): return 0, MODE_RAW, None, value - elif type_value is BytesType: + elif type_value is bytes: if len(value) < min_file_size: return 0, MODE_RAW, None, sqlite3.Binary(value) else: filename, full_path = self.filename(key, value) - - with open(full_path, 'wb') as writer: - writer.write(value) - + self._write(full_path, io.BytesIO(value), 'xb') return len(value), MODE_BINARY, filename, None - elif type_value is TextType: + elif type_value is str: filename, full_path = self.filename(key, value) - - with io_open(full_path, 'w', encoding='UTF-8') as writer: - writer.write(value) - + self._write(full_path, io.StringIO(value), 'x', 'UTF-8') size = op.getsize(full_path) return size, MODE_TEXT, filename, None elif read: - size = 0 - reader = ft.partial(value.read, 2 ** 22) + reader = ft.partial(value.read, 2**22) filename, full_path = self.filename(key, value) - - with open(full_path, 'wb') as writer: - for chunk in iter(reader, b''): - size += len(chunk) - writer.write(chunk) - + iterator = iter(reader, b'') + size = self._write(full_path, iterator, 'xb') return size, MODE_BINARY, filename, None else: result = pickle.dumps(value, protocol=self.pickle_protocol) @@ -266,12 +228,32 @@ def store(self, value, read, key=UNKNOWN): return 0, MODE_PICKLE, None, sqlite3.Binary(result) else: filename, full_path = self.filename(key, value) + self._write(full_path, io.BytesIO(result), 'xb') + return len(result), MODE_PICKLE, filename, None - with open(full_path, 'wb') as writer: - writer.write(result) + def _write(self, full_path, iterator, mode, encoding=None): + full_dir, _ = op.split(full_path) - return len(result), MODE_PICKLE, filename, None + for count in range(1, 11): + with cl.suppress(OSError): + os.makedirs(full_dir) + + try: + # Another cache may have deleted the directory before + # the file could be opened. + writer = open(full_path, mode, encoding=encoding) + except OSError: + if count == 10: + # Give up after 10 tries to open the file. + raise + continue + with writer: + size = 0 + for chunk in iterator: + size += len(chunk) + writer.write(chunk) + return size def fetch(self, mode, filename, value, read): """Convert fields `mode`, `filename`, and `value` from Cache table to @@ -282,11 +264,12 @@ def fetch(self, mode, filename, value, read): :param value: database value :param bool read: when True, return an open file handle :return: corresponding Python value + :raises: IOError if the value cannot be read """ - # pylint: disable=no-self-use,unidiomatic-typecheck + # pylint: disable=unidiomatic-typecheck,consider-using-with if mode == MODE_RAW: - return BytesType(value) if type(value) is sqlite3.Binary else value + return bytes(value) if type(value) is sqlite3.Binary else value elif mode == MODE_BINARY: if read: return open(op.join(self._directory, filename), 'rb') @@ -295,15 +278,14 @@ def fetch(self, mode, filename, value, read): return reader.read() elif mode == MODE_TEXT: full_path = op.join(self._directory, filename) - with io_open(full_path, 'r', encoding='UTF-8') as reader: + with open(full_path, 'r', encoding='UTF-8') as reader: return reader.read() elif mode == MODE_PICKLE: if value is None: with open(op.join(self._directory, filename), 'rb') as reader: return pickle.load(reader) else: - return pickle.load(BytesIO(value)) - + return pickle.load(io.BytesIO(value)) def filename(self, key=UNKNOWN, value=UNKNOWN): """Return filename and full-path tuple for file storage. @@ -327,43 +309,35 @@ def filename(self, key=UNKNOWN, value=UNKNOWN): hex_name = codecs.encode(os.urandom(16), 'hex').decode('utf-8') sub_dir = op.join(hex_name[:2], hex_name[2:4]) name = hex_name[4:] + '.val' - directory = op.join(self._directory, sub_dir) - - try: - os.makedirs(directory) - except OSError as error: - if error.errno != errno.EEXIST: - raise - filename = op.join(sub_dir, name) full_path = op.join(self._directory, filename) return filename, full_path + def remove(self, file_path): + """Remove a file given by `file_path`. - def remove(self, filename): - """Remove a file given by `filename`. + This method is cross-thread and cross-process safe. If an OSError + occurs, it is suppressed. - This method is cross-thread and cross-process safe. If an "error no - entry" occurs, it is suppressed. - - :param str filename: relative path to file + :param str file_path: relative path to file """ - full_path = op.join(self._directory, filename) + full_path = op.join(self._directory, file_path) + full_dir, _ = op.split(full_path) - try: + # Suppress OSError that may occur if two caches attempt to delete the + # same file or directory at the same time. + + with cl.suppress(OSError): os.remove(full_path) - except WindowsError: - pass - except OSError as error: - if error.errno != errno.ENOENT: - # ENOENT may occur if two caches attempt to delete the same - # file at the same time. - raise + + with cl.suppress(OSError): + os.removedirs(full_dir) class JSONDisk(Disk): - "Cache key and value using JSON serialization with zlib compression." + """Cache key and value using JSON serialization with zlib compression.""" + def __init__(self, directory, compress_level=1, **kwargs): """Initialize JSON disk instance. @@ -378,60 +352,62 @@ def __init__(self, directory, compress_level=1, **kwargs): """ self.compress_level = compress_level - super(JSONDisk, self).__init__(directory, **kwargs) - + super().__init__(directory, **kwargs) def put(self, key): json_bytes = json.dumps(key).encode('utf-8') data = zlib.compress(json_bytes, self.compress_level) - return super(JSONDisk, self).put(data) - + return super().put(data) def get(self, key, raw): - data = super(JSONDisk, self).get(key, raw) + data = super().get(key, raw) return json.loads(zlib.decompress(data).decode('utf-8')) - def store(self, value, read, key=UNKNOWN): if not read: json_bytes = json.dumps(value).encode('utf-8') value = zlib.compress(json_bytes, self.compress_level) - return super(JSONDisk, self).store(value, read, key=key) - + return super().store(value, read, key=key) def fetch(self, mode, filename, value, read): - data = super(JSONDisk, self).fetch(mode, filename, value, read) + data = super().fetch(mode, filename, value, read) if not read: data = json.loads(zlib.decompress(data).decode('utf-8')) return data class Timeout(Exception): - "Database timeout expired." + """Database timeout expired.""" + + +class ReadOnlyError(Exception): + """Cache mutation in read-only mode.""" class UnknownFileWarning(UserWarning): - "Warning used by Cache.check for unknown files." + """Warning used by Cache.check for unknown files.""" class EmptyDirWarning(UserWarning): - "Warning used by Cache.check for empty directories." + """Warning used by Cache.check for empty directories.""" -def args_to_key(base, args, kwargs, typed): +def args_to_key(base, args, kwargs, typed, ignore): """Create cache key out of function arguments. :param tuple base: base of key :param tuple args: function arguments :param dict kwargs: function keyword arguments :param bool typed: include types in cache key + :param set ignore: positional or keyword args to ignore :return: cache key tuple """ - key = base + args + args = tuple(arg for index, arg in enumerate(args) if index not in ignore) + key = base + args + (None,) if kwargs: - key += (ENOVAL,) + kwargs = {key: val for key, val in kwargs.items() if key not in ignore} sorted_items = sorted(kwargs.items()) for item in sorted_items: @@ -446,9 +422,9 @@ def args_to_key(base, args, kwargs, typed): return key -class Cache(object): - "Disk and file backed cache." - # pylint: disable=bad-continuation +class Cache: + """Disk and file backed cache.""" + def __init__(self, directory=None, timeout=60, disk=Disk, **settings): """Initialize cache instance. @@ -461,10 +437,11 @@ def __init__(self, directory=None, timeout=60, disk=Disk, **settings): try: assert issubclass(disk, Disk) except (TypeError, AssertionError): - raise ValueError('disk must subclass diskcache.Disk') + raise ValueError('disk must subclass diskcache.Disk') from None if directory is None: directory = tempfile.mkdtemp(prefix='diskcache-') + directory = str(directory) directory = op.expanduser(directory) directory = op.expandvars(directory) @@ -481,17 +458,25 @@ def __init__(self, directory=None, timeout=60, disk=Disk, **settings): raise EnvironmentError( error.errno, 'Cache directory "%s" does not exist' - ' and could not be created' % self._directory - ) + ' and could not be created' % self._directory, + ) from None + + # The SQLite query_only pragma is a special case. Before the cache + # connection is opened, get the setting from keyword arguments. + + self.sqlite_query_only = settings.get( + 'sqlite_query_only', + DEFAULT_SETTINGS['sqlite_query_only'], + ) sql = self._sql_retry # Setup Settings table. try: - current_settings = dict(sql( - 'SELECT key, value FROM Settings' - ).fetchall()) + current_settings = dict( + sql('SELECT key, value FROM Settings').fetchall() + ) except sqlite3.OperationalError: current_settings = {} @@ -508,7 +493,8 @@ def __init__(self, directory=None, timeout=60, disk=Disk, **settings): if key.startswith('sqlite_'): self.reset(key, value, update=False) - sql('CREATE TABLE IF NOT EXISTS Settings (' + sql( + 'CREATE TABLE IF NOT EXISTS Settings (' ' key TEXT NOT NULL UNIQUE,' ' value)' ) @@ -516,28 +502,20 @@ def __init__(self, directory=None, timeout=60, disk=Disk, **settings): # Setup Disk object (must happen after settings initialized). kwargs = { - key[5:]: value for key, value in sets.items() + key[5:]: value + for key, value in sets.items() if key.startswith('disk_') } self._disk = disk(directory, **kwargs) - # Set cached attributes: updates settings and sets pragmas. + self._synch_settings(sets, sql) - for key, value in sets.items(): - query = 'INSERT OR REPLACE INTO Settings VALUES (?, ?)' - sql(query, (key, value)) - self.reset(key, value) - - for key, value in METADATA.items(): - query = 'INSERT OR IGNORE INTO Settings VALUES (?, ?)' - sql(query, (key, value)) - self.reset(key) - - (self._page_size,), = sql('PRAGMA page_size').fetchall() + ((self._page_size,),) = sql('PRAGMA page_size').fetchall() # Setup Cache table. - sql('CREATE TABLE IF NOT EXISTS Cache (' + sql( + 'CREATE TABLE IF NOT EXISTS Cache (' ' rowid INTEGER PRIMARY KEY,' ' key BLOB,' ' raw INTEGER,' @@ -552,11 +530,13 @@ def __init__(self, directory=None, timeout=60, disk=Disk, **settings): ' value BLOB)' ) - sql('CREATE UNIQUE INDEX IF NOT EXISTS Cache_key_raw ON' + sql( + 'CREATE UNIQUE INDEX IF NOT EXISTS Cache_key_raw ON' ' Cache(key, raw)' ) - sql('CREATE INDEX IF NOT EXISTS Cache_expire_time ON' + sql( + 'CREATE INDEX IF NOT EXISTS Cache_expire_time ON' ' Cache (expire_time)' ) @@ -567,32 +547,37 @@ def __init__(self, directory=None, timeout=60, disk=Disk, **settings): # Use triggers to keep Metadata updated. - sql('CREATE TRIGGER IF NOT EXISTS Settings_count_insert' + sql( + 'CREATE TRIGGER IF NOT EXISTS Settings_count_insert' ' AFTER INSERT ON Cache FOR EACH ROW BEGIN' ' UPDATE Settings SET value = value + 1' ' WHERE key = "count"; END' ) - sql('CREATE TRIGGER IF NOT EXISTS Settings_count_delete' + sql( + 'CREATE TRIGGER IF NOT EXISTS Settings_count_delete' ' AFTER DELETE ON Cache FOR EACH ROW BEGIN' ' UPDATE Settings SET value = value - 1' ' WHERE key = "count"; END' ) - sql('CREATE TRIGGER IF NOT EXISTS Settings_size_insert' + sql( + 'CREATE TRIGGER IF NOT EXISTS Settings_size_insert' ' AFTER INSERT ON Cache FOR EACH ROW BEGIN' ' UPDATE Settings SET value = value + NEW.size' ' WHERE key = "size"; END' ) - sql('CREATE TRIGGER IF NOT EXISTS Settings_size_update' + sql( + 'CREATE TRIGGER IF NOT EXISTS Settings_size_update' ' AFTER UPDATE ON Cache FOR EACH ROW BEGIN' ' UPDATE Settings' ' SET value = value + NEW.size - OLD.size' ' WHERE key = "size"; END' ) - sql('CREATE TRIGGER IF NOT EXISTS Settings_size_delete' + sql( + 'CREATE TRIGGER IF NOT EXISTS Settings_size_delete' ' AFTER DELETE ON Cache FOR EACH ROW BEGIN' ' UPDATE Settings SET value = value - OLD.size' ' WHERE key = "size"; END' @@ -611,25 +596,42 @@ def __init__(self, directory=None, timeout=60, disk=Disk, **settings): self._timeout = timeout self._sql # pylint: disable=pointless-statement + def _synch_settings(self, sets, sql): + # Set cached attributes: updates settings and sets pragmas. + + for key, value in sets.items(): + if self.sqlite_query_only: + query = 'SELECT value FROM Settings WHERE key = ?' + db_value = sql(query, (key,)).fetchall() + assert len(db_value) == 1 + if key not in UNVERIFIABLE_SETTINGS: + assert value == db_value[0][0] + else: + query = 'INSERT OR REPLACE INTO Settings VALUES (?, ?)' + sql(query, (key, value)) + self.reset(key, value, update=not self.sqlite_query_only) + + for key, value in METADATA.items(): + if not self.sqlite_query_only: + query = 'INSERT OR IGNORE INTO Settings VALUES (?, ?)' + sql(query, (key, value)) + self.reset(key, update=not self.sqlite_query_only) @property def directory(self): """Cache directory.""" return self._directory - @property def timeout(self): """SQLite connection timeout value in seconds.""" return self._timeout - @property def disk(self): """Disk used for serialization.""" return self._disk - @property def _con(self): # Check process ID to support process forking. If the process @@ -656,6 +658,8 @@ def _con(self): # Settings table may not exist so catch and ignore the # OperationalError that may occur. + sqlite_query_only = self.sqlite_query_only + try: select = 'SELECT key, value FROM Settings' settings = con.execute(select).fetchall() @@ -666,14 +670,18 @@ def _con(self): if key.startswith('sqlite_'): self.reset(key, value, update=False) - return con + # The settings read from the database always contain sqlite_query_only=0. + # So the above loop will have overwritten self.sqlite_query_only + # Here we restore it to the value we had before + if sqlite_query_only: + self.reset('sqlite_query_only', 1, update=False) + return con @property def _sql(self): return self._con.execute - @property def _sql_retry(self): sql = self._sql @@ -701,7 +709,6 @@ def _execute_with_retry(statement, *args, **kwargs): return _execute_with_retry - @cl.contextmanager def transact(self, retry=False): """Context manager to perform a transaction by locking the cache. @@ -733,13 +740,15 @@ def transact(self, retry=False): with self._transact(retry=retry): yield - @cl.contextmanager def _transact(self, retry=False, filename=None): + if self.sqlite_query_only: + raise ReadOnlyError + sql = self._sql filenames = [] _disk_remove = self._disk.remove - tid = get_ident() + tid = threading.get_ident() txn_id = self._txn_id if tid == txn_id: @@ -756,7 +765,7 @@ def _transact(self, retry=False, filename=None): continue if filename is not None: _disk_remove(filename) - raise Timeout + raise Timeout from None try: yield sql, filenames.append @@ -775,7 +784,6 @@ def _transact(self, retry=False, filename=None): if name is not None: _disk_remove(name) - def set(self, key, value, expire=None, read=False, tag=None, retry=False): """Set `key` and `value` item in cache. @@ -831,7 +839,7 @@ def set(self, key, value, expire=None, read=False, tag=None, retry=False): ).fetchall() if rows: - (rowid, old_filename), = rows + ((rowid, old_filename),) = rows cleanup(old_filename) self._row_update(rowid, now, columns) else: @@ -841,7 +849,6 @@ def set(self, key, value, expire=None, read=False, tag=None, retry=False): return True - def __setitem__(self, key, value): """Set corresponding `value` for `key` in cache. @@ -853,11 +860,11 @@ def __setitem__(self, key, value): """ self.set(key, value, retry=True) - def _row_update(self, rowid, now, columns): sql = self._sql expire_time, tag, size, mode, filename, value = columns - sql('UPDATE Cache SET' + sql( + 'UPDATE Cache SET' ' store_time = ?,' ' expire_time = ?,' ' access_time = ?,' @@ -867,11 +874,12 @@ def _row_update(self, rowid, now, columns): ' mode = ?,' ' filename = ?,' ' value = ?' - ' WHERE rowid = ?', ( - now, # store_time + ' WHERE rowid = ?', + ( + now, # store_time expire_time, - now, # access_time - 0, # access_count + now, # access_time + 0, # access_count tag, size, mode, @@ -881,20 +889,21 @@ def _row_update(self, rowid, now, columns): ), ) - def _row_insert(self, key, raw, now, columns): sql = self._sql expire_time, tag, size, mode, filename, value = columns - sql('INSERT INTO Cache(' + sql( + 'INSERT INTO Cache(' ' key, raw, store_time, expire_time, access_time,' ' access_count, tag, size, mode, filename, value' - ') VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', ( + ') VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', + ( key, raw, - now, # store_time + now, # store_time expire_time, - now, # access_time - 0, # access_count + now, # access_time + 0, # access_count tag, size, mode, @@ -903,7 +912,6 @@ def _row_insert(self, key, raw, now, columns): ), ) - def _cull(self, now, sql, cleanup, limit=None): cull_limit = self.cull_limit if limit is None else limit @@ -922,13 +930,12 @@ def _cull(self, now, sql, cleanup, limit=None): rows = sql(select_expired, (now, cull_limit)).fetchall() if rows: - delete_expired = ( - 'DELETE FROM Cache WHERE rowid IN (%s)' - % (select_expired_template % 'rowid') + delete_expired = 'DELETE FROM Cache WHERE rowid IN (%s)' % ( + select_expired_template % 'rowid' ) sql(delete_expired, (now, cull_limit)) - for filename, in rows: + for (filename,) in rows: cleanup(filename) cull_limit -= len(rows) @@ -947,16 +954,14 @@ def _cull(self, now, sql, cleanup, limit=None): rows = sql(select_filename, (cull_limit,)).fetchall() if rows: - delete = ( - 'DELETE FROM Cache WHERE rowid IN (%s)' - % (select_policy.format(fields='rowid', now=now)) + delete = 'DELETE FROM Cache WHERE rowid IN (%s)' % ( + select_policy.format(fields='rowid', now=now) ) sql(delete, (cull_limit,)) - for filename, in rows: + for (filename,) in rows: cleanup(filename) - def touch(self, key, expire=None, retry=False): """Touch `key` in cache and update `expire` time. @@ -983,17 +988,17 @@ def touch(self, key, expire=None, retry=False): ).fetchall() if rows: - (rowid, old_expire_time), = rows + ((rowid, old_expire_time),) = rows if old_expire_time is None or old_expire_time > now: - sql('UPDATE Cache SET expire_time = ? WHERE rowid = ?', + sql( + 'UPDATE Cache SET expire_time = ? WHERE rowid = ?', (expire_time, rowid), ) return True return False - def add(self, key, value, expire=None, read=False, tag=None, retry=False): """Add `key` and `value` item to cache. @@ -1033,7 +1038,7 @@ def add(self, key, value, expire=None, read=False, tag=None, retry=False): ).fetchall() if rows: - (rowid, old_filename, old_expire_time), = rows + ((rowid, old_filename, old_expire_time),) = rows if old_expire_time is None or old_expire_time > now: cleanup(filename) @@ -1048,7 +1053,6 @@ def add(self, key, value, expire=None, read=False, tag=None, retry=False): return True - def incr(self, key, delta=1, default=0, retry=False): """Increment value by delta for item with key. @@ -1089,19 +1093,23 @@ def incr(self, key, delta=1, default=0, retry=False): raise KeyError(key) value = default + delta - columns = (None, None) + self._disk.store(value, False, key=key) + columns = (None, None) + self._disk.store( + value, False, key=key + ) self._row_insert(db_key, raw, now, columns) self._cull(now, sql, cleanup) return value - (rowid, expire_time, filename, value), = rows + ((rowid, expire_time, filename, value),) = rows if expire_time is not None and expire_time < now: if default is None: raise KeyError(key) value = default + delta - columns = (None, None) + self._disk.store(value, False, key=key) + columns = (None, None) + self._disk.store( + value, False, key=key + ) self._row_update(rowid, now, columns) self._cull(now, sql, cleanup) cleanup(filename) @@ -1120,7 +1128,6 @@ def incr(self, key, delta=1, default=0, retry=False): return value - def decr(self, key, delta=1, default=0, retry=False): """Decrement value by delta for item with key. @@ -1151,9 +1158,15 @@ def decr(self, key, delta=1, default=0, retry=False): """ return self.incr(key, -delta, default, retry) - - def get(self, key, default=None, read=False, expire_time=False, tag=False, - retry=False): + def get( + self, + key, + default=None, + read=False, + expire_time=False, + tag=False, + retry=False, + ): """Retrieve value from cache. If `key` is missing, return `default`. Raises :exc:`Timeout` error when database timeout occurs and `retry` is @@ -1192,7 +1205,7 @@ def get(self, key, default=None, read=False, expire_time=False, tag=False, if not rows: return default - (rowid, db_expire_time, db_tag, mode, filename, db_value), = rows + ((rowid, db_expire_time, db_tag, mode, filename, db_value),) = rows try: value = self._disk.fetch(mode, filename, db_value, read) @@ -1216,19 +1229,17 @@ def get(self, key, default=None, read=False, expire_time=False, tag=False, sql(cache_miss) return default - (rowid, db_expire_time, db_tag, - mode, filename, db_value), = rows + ( + (rowid, db_expire_time, db_tag, mode, filename, db_value), + ) = rows # noqa: E127 try: value = self._disk.fetch(mode, filename, db_value, read) - except IOError as error: - if error.errno == errno.ENOENT: - # Key was deleted before we could retrieve result. - if self.statistics: - sql(cache_miss) - return default - else: - raise + except IOError: + # Key was deleted before we could retrieve result. + if self.statistics: + sql(cache_miss) + return default if self.statistics: sql(cache_hit) @@ -1248,7 +1259,6 @@ def get(self, key, default=None, read=False, expire_time=False, tag=False, else: return value - def __getitem__(self, key): """Return corresponding value for `key` from cache. @@ -1262,7 +1272,6 @@ def __getitem__(self, key): raise KeyError(key) return value - def read(self, key, retry=False): """Return file handle value corresponding to `key` from cache. @@ -1281,7 +1290,6 @@ def read(self, key, retry=False): raise KeyError(key) return handle - def __contains__(self, key): """Return `True` if `key` matching item is found in cache. @@ -1301,8 +1309,9 @@ def __contains__(self, key): return bool(rows) - - def pop(self, key, default=None, expire_time=False, tag=False, retry=False): + def pop( + self, key, default=None, expire_time=False, tag=False, retry=False + ): # noqa: E501 """Remove corresponding item for `key` from cache and return value. If `key` is missing, return `default`. @@ -1340,18 +1349,15 @@ def pop(self, key, default=None, expire_time=False, tag=False, retry=False): if not rows: return default - (rowid, db_expire_time, db_tag, mode, filename, db_value), = rows + ((rowid, db_expire_time, db_tag, mode, filename, db_value),) = rows sql('DELETE FROM Cache WHERE rowid = ?', (rowid,)) try: value = self._disk.fetch(mode, filename, db_value, False) - except IOError as error: - if error.errno == errno.ENOENT: - # Key was deleted before we could retrieve result. - return default - else: - raise + except IOError: + # Key was deleted before we could retrieve result. + return default finally: if filename is not None: self._disk.remove(filename) @@ -1365,7 +1371,6 @@ def pop(self, key, default=None, expire_time=False, tag=False, retry=False): else: return value - def __delitem__(self, key, retry=True): """Delete corresponding item for `key` from cache. @@ -1391,13 +1396,12 @@ def __delitem__(self, key, retry=True): if not rows: raise KeyError(key) - (rowid, filename), = rows + ((rowid, filename),) = rows sql('DELETE FROM Cache WHERE rowid = ?', (rowid,)) cleanup(filename) return True - def delete(self, key, retry=False): """Delete corresponding item for `key` from cache. @@ -1412,14 +1416,22 @@ def delete(self, key, retry=False): :raises Timeout: if database timeout occurs """ + # pylint: disable=unnecessary-dunder-call try: return self.__delitem__(key, retry=retry) except KeyError: return False - - def push(self, value, prefix=None, side='back', expire=None, read=False, - tag=None, retry=False): + def push( + self, + value, + prefix=None, + side='back', + expire=None, + read=False, + tag=None, + retry=False, + ): """Push `value` onto `side` of queue identified by `prefix` in cache. When prefix is None, integer keys are used. Otherwise, string keys are @@ -1485,10 +1497,10 @@ def push(self, value, prefix=None, side='back', expire=None, read=False, rows = sql(select, (min_key, max_key, raw)).fetchall() if rows: - (key,), = rows + ((key,),) = rows if prefix is not None: - num = int(key[(key.rfind('-') + 1):]) + num = int(key[(key.rfind('-') + 1) :]) else: num = key @@ -1510,9 +1522,15 @@ def push(self, value, prefix=None, side='back', expire=None, read=False, return db_key - - def pull(self, prefix=None, default=(None, None), side='front', - expire_time=False, tag=False, retry=False): + def pull( + self, + prefix=None, + default=(None, None), + side='front', + expire_time=False, + tag=False, + retry=False, + ): """Pull key and value item pair from `side` of queue in cache. When prefix is None, integer keys are used. Otherwise, string keys are @@ -1593,8 +1611,9 @@ def pull(self, prefix=None, default=(None, None), side='front', if not rows: return default - (rowid, key, db_expire, db_tag, mode, name, - db_value), = rows + ( + (rowid, key, db_expire, db_tag, mode, name, db_value), + ) = rows sql('DELETE FROM Cache WHERE rowid = ?', (rowid,)) @@ -1605,12 +1624,9 @@ def pull(self, prefix=None, default=(None, None), side='front', try: value = self._disk.fetch(mode, name, db_value, False) - except IOError as error: - if error.errno == errno.ENOENT: - # Key was deleted before we could retrieve result. - continue - else: - raise + except IOError: + # Key was deleted before we could retrieve result. + continue finally: if name is not None: self._disk.remove(name) @@ -1625,9 +1641,15 @@ def pull(self, prefix=None, default=(None, None), side='front', else: return key, value - - def peek(self, prefix=None, default=(None, None), side='front', - expire_time=False, tag=False, retry=False): + def peek( + self, + prefix=None, + default=(None, None), + side='front', + expire_time=False, + tag=False, + retry=False, + ): """Peek at key and value item pair from `side` of queue in cache. When prefix is None, integer keys are used. Otherwise, string keys are @@ -1704,8 +1726,9 @@ def peek(self, prefix=None, default=(None, None), side='front', if not rows: return default - (rowid, key, db_expire, db_tag, mode, name, - db_value), = rows + ( + (rowid, key, db_expire, db_tag, mode, name, db_value), + ) = rows if db_expire is not None and db_expire < time.time(): sql('DELETE FROM Cache WHERE rowid = ?', (rowid,)) @@ -1715,15 +1738,9 @@ def peek(self, prefix=None, default=(None, None), side='front', try: value = self._disk.fetch(mode, name, db_value, False) - except IOError as error: - if error.errno == errno.ENOENT: - # Key was deleted before we could retrieve result. - continue - else: - raise - finally: - if name is not None: - self._disk.remove(name) + except IOError: + # Key was deleted before we could retrieve result. + continue break if expire_time and tag: @@ -1735,7 +1752,6 @@ def peek(self, prefix=None, default=(None, None), side='front', else: return key, value - def peekitem(self, last=True, expire_time=False, tag=False, retry=False): """Peek at key and value item pair in cache based on iteration order. @@ -1777,8 +1793,18 @@ def peekitem(self, last=True, expire_time=False, tag=False, retry=False): if not rows: raise KeyError('dictionary is empty') - (rowid, db_key, raw, db_expire, db_tag, mode, name, - db_value), = rows + ( + ( + rowid, + db_key, + raw, + db_expire, + db_tag, + mode, + name, + db_value, + ), + ) = rows if db_expire is not None and db_expire < time.time(): sql('DELETE FROM Cache WHERE rowid = ?', (rowid,)) @@ -1790,12 +1816,9 @@ def peekitem(self, last=True, expire_time=False, tag=False, retry=False): try: value = self._disk.fetch(mode, name, db_value, False) - except IOError as error: - if error.errno == errno.ENOENT: - # Key was deleted before we could retrieve result. - continue - else: - raise + except IOError: + # Key was deleted before we could retrieve result. + continue break if expire_time and tag: @@ -1807,8 +1830,9 @@ def peekitem(self, last=True, expire_time=False, tag=False, retry=False): else: return key, value - - def memoize(self, name=None, typed=False, expire=None, tag=None): + def memoize( + self, name=None, typed=False, expire=None, tag=None, ignore=() + ): """Memoizing cache decorator. Decorator to wrap callable with memoizing function using cache. @@ -1867,6 +1891,7 @@ def memoize(self, name=None, typed=False, expire=None, tag=None): :param float expire: seconds until arguments expire (default None, no expiry) :param str tag: text to associate with arguments (default None) + :param set ignore: positional or keyword args to ignore (default ()) :return: callable decorator """ @@ -1875,12 +1900,12 @@ def memoize(self, name=None, typed=False, expire=None, tag=None): raise TypeError('name cannot be callable') def decorator(func): - "Decorator created by memoize() for callable `func`." + """Decorator created by memoize() for callable `func`.""" base = (full_name(func),) if name is None else (name,) @ft.wraps(func) def wrapper(*args, **kwargs): - "Wrapper for callable to cache arguments and return values." + """Wrapper for callable to cache arguments and return values.""" key = wrapper.__cache_key__(*args, **kwargs) result = self.get(key, default=ENOVAL, retry=True) @@ -1892,15 +1917,14 @@ def wrapper(*args, **kwargs): return result def __cache_key__(*args, **kwargs): - "Make key for cache given function arguments." - return args_to_key(base, args, kwargs, typed) + """Make key for cache given function arguments.""" + return args_to_key(base, args, kwargs, typed, ignore) wrapper.__cache_key__ = __cache_key__ return wrapper return decorator - def check(self, fix=False, retry=False): """Check database and file system consistency. @@ -1929,8 +1953,8 @@ def check(self, fix=False, retry=False): rows = sql('PRAGMA integrity_check').fetchall() - if len(rows) != 1 or rows[0][0] != u'ok': - for message, in rows: + if len(rows) != 1 or rows[0][0] != 'ok': + for (message,) in rows: warnings.warn(message) if fix: @@ -1961,7 +1985,8 @@ def check(self, fix=False, retry=False): warnings.warn(message % args) if fix: - sql('UPDATE Cache SET size = ?' + sql( + 'UPDATE Cache SET size = ?' ' WHERE rowid = ?', (real_size, rowid), ) @@ -2002,14 +2027,15 @@ def check(self, fix=False, retry=False): # Check Settings.count against count of Cache rows. self.reset('count') - (count,), = sql('SELECT COUNT(key) FROM Cache').fetchall() + ((count,),) = sql('SELECT COUNT(key) FROM Cache').fetchall() if self.count != count: message = 'Settings.count != COUNT(Cache.key); %d != %d' warnings.warn(message % (self.count, count)) if fix: - sql('UPDATE Settings SET value = ? WHERE key = ?', + sql( + 'UPDATE Settings SET value = ? WHERE key = ?', (count, 'count'), ) @@ -2017,20 +2043,20 @@ def check(self, fix=False, retry=False): self.reset('size') select_size = 'SELECT COALESCE(SUM(size), 0) FROM Cache' - (size,), = sql(select_size).fetchall() + ((size,),) = sql(select_size).fetchall() if self.size != size: message = 'Settings.size != SUM(Cache.size); %d != %d' warnings.warn(message % (self.size, size)) if fix: - sql('UPDATE Settings SET value = ? WHERE key =?', + sql( + 'UPDATE Settings SET value = ? WHERE key =?', (size, 'size'), ) return warns - def create_tag_index(self): """Create tag index on cache database. @@ -2039,10 +2065,12 @@ def create_tag_index(self): :raises Timeout: if database timeout occurs """ - sql = self._sql - sql('CREATE INDEX IF NOT EXISTS Cache_tag_rowid ON Cache(tag, rowid)') - self.reset('tag_index', 1) - + if self.tag_index == 0: + sql = self._sql + sql( + 'CREATE INDEX IF NOT EXISTS Cache_tag_rowid ON Cache(tag, rowid)' + ) + self.reset('tag_index', 1) def drop_tag_index(self): """Drop tag index on cache database. @@ -2050,10 +2078,10 @@ def drop_tag_index(self): :raises Timeout: if database timeout occurs """ - sql = self._sql - sql('DROP INDEX IF EXISTS Cache_tag_rowid') - self.reset('tag_index', 0) - + if self.tag_index == 1: + sql = self._sql + sql('DROP INDEX IF EXISTS Cache_tag_rowid') + self.reset('tag_index', 0) def evict(self, tag, retry=False): """Remove items with matching `tag` from cache. @@ -2082,7 +2110,6 @@ def evict(self, tag, retry=False): args = [tag, 0, 100] return self._select_delete(select, args, arg_index=1, retry=retry) - def expire(self, now=None, retry=False): """Remove expired items from cache. @@ -2110,7 +2137,6 @@ def expire(self, now=None, retry=False): args = [0, now or time.time(), 100] return self._select_delete(select, args, row_index=1, retry=retry) - def cull(self, retry=False): """Cull items from cache until volume is less than size limit. @@ -2140,7 +2166,7 @@ def cull(self, retry=False): select_policy = EVICTION_POLICY[self.eviction_policy]['cull'] if select_policy is None: - return + return 0 select_filename = select_policy.format(fields='filename', now=now) @@ -2159,14 +2185,13 @@ def cull(self, retry=False): ) sql(delete, (10,)) - for filename, in rows: + for (filename,) in rows: cleanup(filename) except Timeout: - raise Timeout(count) + raise Timeout(count) from None return count - def clear(self, retry=False): """Remove all items from cache. @@ -2193,9 +2218,9 @@ def clear(self, retry=False): args = [0, 100] return self._select_delete(select, args, retry=retry) - - def _select_delete(self, select, args, row_index=0, arg_index=0, - retry=False): + def _select_delete( + self, select, args, row_index=0, arg_index=0, retry=False + ): count = 0 delete = 'DELETE FROM Cache WHERE rowid IN (%s)' @@ -2215,11 +2240,10 @@ def _select_delete(self, select, args, row_index=0, arg_index=0, cleanup(row[-1]) except Timeout: - raise Timeout(count) + raise Timeout(count) from None return count - def iterkeys(self, reverse=False): """Iterate Cache keys in database sort order. @@ -2263,7 +2287,7 @@ def iterkeys(self, reverse=False): row = sql(select).fetchall() if row: - (key, raw), = row + ((key, raw),) = row else: return @@ -2278,11 +2302,10 @@ def iterkeys(self, reverse=False): for key, raw in rows: yield _disk_get(key, raw) - def _iter(self, ascending=True): sql = self._sql rows = sql('SELECT MAX(rowid) FROM Cache').fetchall() - (max_rowid,), = rows + ((max_rowid,),) = rows yield # Signal ready. if max_rowid is None: @@ -2312,21 +2335,18 @@ def _iter(self, ascending=True): for rowid, key, raw in rows: yield _disk_get(key, raw) - def __iter__(self): - "Iterate keys in cache including expired items." + """Iterate keys in cache including expired items.""" iterator = self._iter() next(iterator) return iterator - def __reversed__(self): - "Reverse iterate keys in cache including expired items." + """Reverse iterate keys in cache including expired items.""" iterator = self._iter(ascending=False) next(iterator) return iterator - def stats(self, enable=True, reset=False): """Return cache statistics hits and misses. @@ -2346,22 +2366,18 @@ def stats(self, enable=True, reset=False): return result - def volume(self): """Return estimated total size of cache on disk. :return: size in bytes """ - (page_count,), = self._sql('PRAGMA page_count').fetchall() + ((page_count,),) = self._sql('PRAGMA page_count').fetchall() total_size = self._page_size * page_count + self.reset('size') return total_size - def close(self): - """Close database connection. - - """ + """Close database connection.""" con = getattr(self._local, 'con', None) if con is None: @@ -2374,30 +2390,25 @@ def close(self): except AttributeError: pass - def __enter__(self): # Create connection in thread. - connection = self._con # pylint: disable=unused-variable + # pylint: disable=unused-variable + connection = self._con # noqa return self - def __exit__(self, *exception): self.close() - def __len__(self): - "Count of items in cache including expired items." + """Count of items in cache including expired items.""" return self.reset('count') - def __getstate__(self): return (self.directory, self.timeout, type(self.disk)) - def __setstate__(self, state): self.__init__(*state) - def reset(self, key, value=ENOVAL, update=True): """Reset `key` and `value` item from Settings table. @@ -2431,7 +2442,7 @@ def reset(self, key, value=ENOVAL, update=True): if value is ENOVAL: select = 'SELECT value FROM Settings WHERE key = ?' - (value,), = sql_retry(select, (key,)).fetchall() + ((value,),) = sql_retry(select, (key,)).fetchall() setattr(self, key, value) return value @@ -2459,7 +2470,9 @@ def reset(self, key, value=ENOVAL, update=True): while True: try: try: - (old_value,), = sql('PRAGMA %s' % (pragma)).fetchall() + ((old_value,),) = sql( + 'PRAGMA %s' % (pragma) + ).fetchall() update = old_value != value except ValueError: update = True diff --git a/diskcache/djangocache.py b/diskcache/djangocache.py index 997b852..5dc8ce2 100644 --- a/diskcache/djangocache.py +++ b/diskcache/djangocache.py @@ -1,11 +1,12 @@ -"Django-compatible disk and file backed cache." +"""Django-compatible disk and file backed cache.""" from functools import wraps + from django.core.cache.backends.base import BaseCache try: from django.core.cache.backends.base import DEFAULT_TIMEOUT -except ImportError: +except ImportError: # pragma: no cover # For older versions of Django simply use 300 seconds. DEFAULT_TIMEOUT = 300 @@ -14,7 +15,8 @@ class DjangoCache(BaseCache): - "Django-compatible disk and file backed cache." + """Django-compatible disk and file backed cache.""" + def __init__(self, directory, params): """Initialize DjangoCache instance. @@ -22,19 +24,17 @@ def __init__(self, directory, params): :param dict params: cache parameters """ - super(DjangoCache, self).__init__(params) + super().__init__(params) shards = params.get('SHARDS', 8) timeout = params.get('DATABASE_TIMEOUT', 0.010) options = params.get('OPTIONS', {}) self._cache = FanoutCache(directory, shards, timeout, **options) - @property def directory(self): """Cache directory.""" return self._cache.directory - def cache(self, name): """Return Cache with given `name` in subdirectory. @@ -44,16 +44,15 @@ def cache(self, name): """ return self._cache.cache(name) - - def deque(self, name): + def deque(self, name, maxlen=None): """Return Deque with given `name` in subdirectory. :param str name: subdirectory name for Deque + :param maxlen: max length (default None, no max) :return: Deque with given name """ - return self._cache.deque(name) - + return self._cache.deque(name, maxlen=maxlen) def index(self, name): """Return Index with given `name` in subdirectory. @@ -64,9 +63,16 @@ def index(self, name): """ return self._cache.index(name) - - def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None, - read=False, tag=None, retry=True): + def add( + self, + key, + value, + timeout=DEFAULT_TIMEOUT, + version=None, + read=False, + tag=None, + retry=True, + ): """Set a value in the cache if the key does not already exist. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. @@ -89,9 +95,16 @@ def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None, timeout = self.get_backend_timeout(timeout=timeout) return self._cache.add(key, value, timeout, read, tag, retry) - - def get(self, key, default=None, version=None, read=False, - expire_time=False, tag=False, retry=False): + def get( + self, + key, + default=None, + version=None, + read=False, + expire_time=False, + tag=False, + retry=False, + ): """Fetch a given key from the cache. If the key does not exist, return default, which itself defaults to None. @@ -111,7 +124,6 @@ def get(self, key, default=None, version=None, read=False, key = self.make_key(key, version=version) return self._cache.get(key, default, read, expire_time, tag, retry) - def read(self, key, version=None): """Return file handle corresponding to `key` from Cache. @@ -124,9 +136,16 @@ def read(self, key, version=None): key = self.make_key(key, version=version) return self._cache.read(key) - - def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None, - read=False, tag=None, retry=True): + def set( + self, + key, + value, + timeout=DEFAULT_TIMEOUT, + version=None, + read=False, + tag=None, + retry=True, + ): """Set a value in the cache. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. @@ -146,7 +165,6 @@ def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None, timeout = self.get_backend_timeout(timeout=timeout) return self._cache.set(key, value, timeout, read, tag, retry) - def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None, retry=True): """Touch a key in the cache. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. @@ -164,9 +182,15 @@ def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None, retry=True): timeout = self.get_backend_timeout(timeout=timeout) return self._cache.touch(key, timeout, retry) - - def pop(self, key, default=None, version=None, expire_time=False, - tag=False, retry=True): + def pop( + self, + key, + default=None, + version=None, + expire_time=False, + tag=False, + retry=True, + ): """Remove corresponding item for `key` from cache and return value. If `key` is missing, return `default`. @@ -186,7 +210,6 @@ def pop(self, key, default=None, version=None, expire_time=False, key = self.make_key(key, version=version) return self._cache.pop(key, default, expire_time, tag, retry) - def delete(self, key, version=None, retry=True): """Delete a key from the cache, failing silently. @@ -198,8 +221,7 @@ def delete(self, key, version=None, retry=True): """ # pylint: disable=arguments-differ key = self.make_key(key, version=version) - self._cache.delete(key, retry) - + return self._cache.delete(key, retry) def incr(self, key, delta=1, version=None, default=None, retry=True): """Increment value by delta for item with key. @@ -228,8 +250,7 @@ def incr(self, key, delta=1, version=None, default=None, retry=True): try: return self._cache.incr(key, delta, default, retry) except KeyError: - raise ValueError("Key '%s' not found" % key) - + raise ValueError("Key '%s' not found" % key) from None def decr(self, key, delta=1, version=None, default=None, retry=True): """Decrement value by delta for item with key. @@ -259,7 +280,6 @@ def decr(self, key, delta=1, version=None, default=None, retry=True): # pylint: disable=arguments-differ return self.incr(key, -delta, version, default, retry) - def has_key(self, key, version=None): """Returns True if the key is in the cache and has not expired. @@ -271,7 +291,6 @@ def has_key(self, key, version=None): key = self.make_key(key, version=version) return key in self._cache - def expire(self): """Remove expired items from cache. @@ -280,7 +299,6 @@ def expire(self): """ return self._cache.expire() - def stats(self, enable=True, reset=False): """Return cache statistics hits and misses. @@ -291,7 +309,6 @@ def stats(self, enable=True, reset=False): """ return self._cache.stats(enable=enable, reset=reset) - def create_tag_index(self): """Create tag index on cache database. @@ -302,7 +319,6 @@ def create_tag_index(self): """ self._cache.create_tag_index() - def drop_tag_index(self): """Drop tag index on cache database. @@ -311,7 +327,6 @@ def drop_tag_index(self): """ self._cache.drop_tag_index() - def evict(self, tag): """Remove items with matching `tag` from cache. @@ -321,7 +336,6 @@ def evict(self, tag): """ return self._cache.evict(tag) - def cull(self): """Cull items from cache until volume is less than size limit. @@ -330,18 +344,15 @@ def cull(self): """ return self._cache.cull() - def clear(self): - "Remove *all* values from the cache at once." + """Remove *all* values from the cache at once.""" return self._cache.clear() - def close(self, **kwargs): - "Close the cache connection." + """Close the cache connection.""" # pylint: disable=unused-argument self._cache.close() - def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT): """Return seconds to expiration. @@ -356,9 +367,15 @@ def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT): timeout = -1 return None if timeout is None else timeout - - def memoize(self, name=None, timeout=DEFAULT_TIMEOUT, version=None, - typed=False, tag=None): + def memoize( + self, + name=None, + timeout=DEFAULT_TIMEOUT, + version=None, + typed=False, + tag=None, + ignore=(), + ): """Memoizing cache decorator. Decorator to wrap callable with memoizing function using cache. @@ -392,6 +409,7 @@ def memoize(self, name=None, timeout=DEFAULT_TIMEOUT, version=None, :param int version: key version number (default None, cache parameter) :param bool typed: cache different types separately (default False) :param str tag: text to associate with arguments (default None) + :param set ignore: positional or keyword args to ignore (default ()) :return: callable decorator """ @@ -400,12 +418,12 @@ def memoize(self, name=None, timeout=DEFAULT_TIMEOUT, version=None, raise TypeError('name cannot be callable') def decorator(func): - "Decorator created by memoize() for callable `func`." + """Decorator created by memoize() for callable `func`.""" base = (full_name(func),) if name is None else (name,) @wraps(func) def wrapper(*args, **kwargs): - "Wrapper for callable to cache arguments and return values." + """Wrapper for callable to cache arguments and return values.""" key = wrapper.__cache_key__(*args, **kwargs) result = self.get(key, ENOVAL, version, retry=True) @@ -418,14 +436,19 @@ def wrapper(*args, **kwargs): ) if valid_timeout: self.set( - key, result, timeout, version, tag=tag, retry=True, + key, + result, + timeout, + version, + tag=tag, + retry=True, ) return result def __cache_key__(*args, **kwargs): - "Make key for cache given function arguments." - return args_to_key(base, args, kwargs, typed) + """Make key for cache given function arguments.""" + return args_to_key(base, args, kwargs, typed, ignore) wrapper.__cache_key__ = __cache_key__ return wrapper diff --git a/diskcache/fanout.py b/diskcache/fanout.py index 8a0a722..9822ee4 100644 --- a/diskcache/fanout.py +++ b/diskcache/fanout.py @@ -1,32 +1,24 @@ -"Fanout cache automatically shards keys and values." +"""Fanout cache automatically shards keys and values.""" +import contextlib as cl +import functools import itertools as it import operator import os.path as op import sqlite3 -import sys import tempfile import time -from .core import ENOVAL, DEFAULT_SETTINGS, Cache, Disk, Timeout +from .core import DEFAULT_SETTINGS, ENOVAL, Cache, Disk, Timeout from .persistent import Deque, Index -############################################################################ -# BEGIN Python 2/3 Shims -############################################################################ -if sys.hexversion >= 0x03000000: - from functools import reduce +class FanoutCache: + """Cache that shards keys and values.""" -############################################################################ -# END Python 2/3 Shims -############################################################################ - - -class FanoutCache(object): - "Cache that shards keys and values." - def __init__(self, directory=None, shards=8, timeout=0.010, disk=Disk, - **settings): + def __init__( + self, directory=None, shards=8, timeout=0.010, disk=Disk, **settings + ): """Initialize cache instance. :param str directory: cache directory @@ -38,6 +30,7 @@ def __init__(self, directory=None, shards=8, timeout=0.010, disk=Disk, """ if directory is None: directory = tempfile.mkdtemp(prefix='diskcache-') + directory = str(directory) directory = op.expanduser(directory) directory = op.expandvars(directory) @@ -46,13 +39,14 @@ def __init__(self, directory=None, shards=8, timeout=0.010, disk=Disk, self._count = shards self._directory = directory + self._disk = disk self._shards = tuple( Cache( directory=op.join(directory, '%03d' % num), timeout=timeout, disk=disk, size_limit=size_limit, - **settings + **settings, ) for num in range(shards) ) @@ -61,16 +55,49 @@ def __init__(self, directory=None, shards=8, timeout=0.010, disk=Disk, self._deques = {} self._indexes = {} - @property def directory(self): """Cache directory.""" return self._directory - def __getattr__(self, name): + safe_names = {'timeout', 'disk'} + valid_name = name in DEFAULT_SETTINGS or name in safe_names + assert valid_name, 'cannot access {} in cache shard'.format(name) return getattr(self._shards[0], name) + @cl.contextmanager + def transact(self, retry=True): + """Context manager to perform a transaction by locking the cache. + + While the cache is locked, no other write operation is permitted. + Transactions should therefore be as short as possible. Read and write + operations performed in a transaction are atomic. Read operations may + occur concurrent to a transaction. + + Transactions may be nested and may not be shared between threads. + + Blocks until transactions are held on all cache shards by retrying as + necessary. + + >>> cache = FanoutCache() + >>> with cache.transact(): # Atomically increment two keys. + ... _ = cache.incr('total', 123.4) + ... _ = cache.incr('count', 1) + >>> with cache.transact(): # Atomically calculate average. + ... average = cache['total'] / cache['count'] + >>> average + 123.4 + + :return: context manager for use in `with` statement + + """ + assert retry, 'retry must be True in FanoutCache' + with cl.ExitStack() as stack: + for shard in self._shards: + shard_transaction = shard.transact(retry=True) + stack.enter_context(shard_transaction) + yield def set(self, key, value, expire=None, read=False, tag=None, retry=False): """Set `key` and `value` item in cache. @@ -98,7 +125,6 @@ def set(self, key, value, expire=None, read=False, tag=None, retry=False): except Timeout: return False - def __setitem__(self, key, value): """Set `key` and `value` item in cache. @@ -112,7 +138,6 @@ def __setitem__(self, key, value): shard = self._shards[index] shard[key] = value - def touch(self, key, expire=None, retry=False): """Touch `key` in cache and update `expire` time. @@ -133,7 +158,6 @@ def touch(self, key, expire=None, retry=False): except Timeout: return False - def add(self, key, value, expire=None, read=False, tag=None, retry=False): """Add `key` and `value` item to cache. @@ -165,7 +189,6 @@ def add(self, key, value, expire=None, read=False, tag=None, retry=False): except Timeout: return False - def incr(self, key, delta=1, default=0, retry=False): """Increment value by delta for item with key. @@ -197,7 +220,6 @@ def incr(self, key, delta=1, default=0, retry=False): except Timeout: return None - def decr(self, key, delta=1, default=0, retry=False): """Decrement value by delta for item with key. @@ -232,9 +254,15 @@ def decr(self, key, delta=1, default=0, retry=False): except Timeout: return None - - def get(self, key, default=None, read=False, expire_time=False, tag=False, - retry=False): + def get( + self, + key, + default=None, + read=False, + expire_time=False, + tag=False, + retry=False, + ): """Retrieve value from cache. If `key` is missing, return `default`. If database timeout occurs then returns `default` unless `retry` is set @@ -258,7 +286,6 @@ def get(self, key, default=None, read=False, expire_time=False, tag=False, except (Timeout, sqlite3.OperationalError): return default - def __getitem__(self, key): """Return corresponding value for `key` from cache. @@ -273,7 +300,6 @@ def __getitem__(self, key): shard = self._shards[index] return shard[key] - def read(self, key): """Return file handle corresponding to `key` from cache. @@ -287,7 +313,6 @@ def read(self, key): raise KeyError(key) return handle - def __contains__(self, key): """Return `True` if `key` matching item is found in cache. @@ -299,8 +324,9 @@ def __contains__(self, key): shard = self._shards[index] return key in shard - - def pop(self, key, default=None, expire_time=False, tag=False, retry=False): + def pop( + self, key, default=None, expire_time=False, tag=False, retry=False + ): # noqa: E501 """Remove corresponding item for `key` from cache and return value. If `key` is missing, return `default`. @@ -326,7 +352,6 @@ def pop(self, key, default=None, expire_time=False, tag=False, retry=False): except Timeout: return default - def delete(self, key, retry=False): """Delete corresponding item for `key` from cache. @@ -347,7 +372,6 @@ def delete(self, key, retry=False): except Timeout: return False - def __delitem__(self, key): """Delete corresponding item for `key` from cache. @@ -361,7 +385,6 @@ def __delitem__(self, key): shard = self._shards[index] del shard[key] - def check(self, fix=False, retry=False): """Check database and file system consistency. @@ -383,8 +406,7 @@ def check(self, fix=False, retry=False): """ warnings = (shard.check(fix, retry) for shard in self._shards) - return reduce(operator.iadd, warnings, []) - + return functools.reduce(operator.iadd, warnings, []) def expire(self, retry=False): """Remove expired items from cache. @@ -398,7 +420,6 @@ def expire(self, retry=False): """ return self._remove('expire', args=(time.time(),), retry=retry) - def create_tag_index(self): """Create tag index on cache database. @@ -410,7 +431,6 @@ def create_tag_index(self): for shard in self._shards: shard.create_tag_index() - def drop_tag_index(self): """Drop tag index on cache database. @@ -420,7 +440,6 @@ def drop_tag_index(self): for shard in self._shards: shard.drop_tag_index() - def evict(self, tag, retry=False): """Remove items with matching `tag` from cache. @@ -434,7 +453,6 @@ def evict(self, tag, retry=False): """ return self._remove('evict', args=(tag,), retry=retry) - def cull(self, retry=False): """Cull items from cache until volume is less than size limit. @@ -447,7 +465,6 @@ def cull(self, retry=False): """ return self._remove('cull', retry=retry) - def clear(self, retry=False): """Remove all items from cache. @@ -460,7 +477,6 @@ def clear(self, retry=False): """ return self._remove('clear', retry=retry) - def _remove(self, name, args=(), retry=False): total = 0 for shard in self._shards: @@ -475,7 +491,6 @@ def _remove(self, name, args=(), retry=False): break return total - def stats(self, enable=True, reset=False): """Return cache statistics hits and misses. @@ -489,7 +504,6 @@ def stats(self, enable=True, reset=False): total_misses = sum(misses for _, misses in results) return total_hits, total_misses - def volume(self): """Return estimated total size of cache on disk. @@ -498,49 +512,40 @@ def volume(self): """ return sum(shard.volume() for shard in self._shards) - def close(self): - "Close database connection." + """Close database connection.""" for shard in self._shards: shard.close() self._caches.clear() self._deques.clear() self._indexes.clear() - def __enter__(self): return self - def __exit__(self, *exception): self.close() - def __getstate__(self): return (self._directory, self._count, self.timeout, type(self.disk)) - def __setstate__(self, state): self.__init__(*state) - def __iter__(self): - "Iterate keys in cache including expired items." + """Iterate keys in cache including expired items.""" iterators = (iter(shard) for shard in self._shards) return it.chain.from_iterable(iterators) - def __reversed__(self): - "Reverse iterate keys in cache including expired items." + """Reverse iterate keys in cache including expired items.""" iterators = (reversed(shard) for shard in reversed(self._shards)) return it.chain.from_iterable(iterators) - def __len__(self): - "Count of items in cache including expired items." + """Count of items in cache including expired items.""" return sum(len(shard) for shard in self._shards) - def reset(self, key, value=ENOVAL): """Reset `key` and `value` item from Settings table. @@ -569,10 +574,11 @@ def reset(self, key, value=ENOVAL): break return result - - def cache(self, name): + def cache(self, name, timeout=60, disk=None, **settings): """Return Cache with given `name` in subdirectory. + If disk is none (default), uses the fanout cache disk. + >>> fanout_cache = FanoutCache() >>> cache = fanout_cache.cache('test') >>> cache.set('abc', 123) @@ -585,6 +591,9 @@ def cache(self, name): True :param str name: subdirectory name for Cache + :param float timeout: SQLite connection timeout + :param disk: Disk type or subclass for serialization + :param settings: any of DEFAULT_SETTINGS :return: Cache with given name """ @@ -595,12 +604,16 @@ def cache(self, name): except KeyError: parts = name.split('/') directory = op.join(self._directory, 'cache', *parts) - temp = Cache(directory=directory) + temp = Cache( + directory=directory, + timeout=timeout, + disk=self._disk if disk is None else Disk, + **settings, + ) _caches[name] = temp return temp - - def deque(self, name): + def deque(self, name, maxlen=None): """Return Deque with given `name` in subdirectory. >>> cache = FanoutCache() @@ -614,6 +627,7 @@ def deque(self, name): 1 :param str name: subdirectory name for Deque + :param maxlen: max length (default None, no max) :return: Deque with given name """ @@ -624,10 +638,14 @@ def deque(self, name): except KeyError: parts = name.split('/') directory = op.join(self._directory, 'deque', *parts) - temp = Deque(directory=directory) - _deques[name] = temp - return temp - + cache = Cache( + directory=directory, + disk=self._disk, + eviction_policy='none', + ) + deque = Deque.fromcache(cache, maxlen=maxlen) + _deques[name] = deque + return deque def index(self, name): """Return Index with given `name` in subdirectory. @@ -656,22 +674,14 @@ def index(self, name): except KeyError: parts = name.split('/') directory = op.join(self._directory, 'index', *parts) - temp = Index(directory) - _indexes[name] = temp - return temp - - -############################################################################ -# BEGIN Python 2/3 Shims -############################################################################ + cache = Cache( + directory=directory, + disk=self._disk, + eviction_policy='none', + ) + index = Index.fromcache(cache) + _indexes[name] = index + return index -if sys.hexversion < 0x03000000: - import types - memoize_func = Cache.__dict__['memoize'] # pylint: disable=invalid-name - FanoutCache.memoize = types.MethodType(memoize_func, None, FanoutCache) -else: - FanoutCache.memoize = Cache.memoize -############################################################################ -# END Python 2/3 Shims -############################################################################ +FanoutCache.memoize = Cache.memoize # type: ignore diff --git a/diskcache/persistent.py b/diskcache/persistent.py index 9de5835..522bb74 100644 --- a/diskcache/persistent.py +++ b/diskcache/persistent.py @@ -1,40 +1,26 @@ """Persistent Data Types - """ import operator as op -import sys - from collections import OrderedDict +from collections.abc import ( + ItemsView, + KeysView, + MutableMapping, + Sequence, + ValuesView, +) from contextlib import contextmanager from shutil import rmtree -from .core import BytesType, Cache, ENOVAL, TextType - -############################################################################ -# BEGIN Python 2/3 Shims -############################################################################ - -try: - from collections.abc import MutableMapping, Sequence - from collections.abc import KeysView, ValuesView, ItemsView -except ImportError: - from collections import MutableMapping, Sequence - from collections import KeysView, ValuesView, ItemsView - -if sys.hexversion < 0x03000000: - from itertools import izip as zip # pylint: disable=redefined-builtin,no-name-in-module,ungrouped-imports - range = xrange # pylint: disable=redefined-builtin,invalid-name,undefined-variable - -############################################################################ -# END Python 2/3 Shims -############################################################################ +from .core import ENOVAL, Cache def _make_compare(seq_op, doc): - "Make compare method with Sequence semantics." + """Make compare method with Sequence semantics.""" + def compare(self, that): - "Compare method for deque and sequence." + """Compare method for deque and sequence.""" if not isinstance(that, Sequence): return NotImplemented @@ -88,7 +74,8 @@ class Deque(Sequence): [3, 2, 1, 0, 0, -1, -2, -3] """ - def __init__(self, iterable=(), directory=None): + + def __init__(self, iterable=(), directory=None, maxlen=None): """Initialize deque instance. If directory is None then temporary directory created. The directory @@ -99,12 +86,11 @@ def __init__(self, iterable=(), directory=None): """ self._cache = Cache(directory, eviction_policy='none') - with self.transact(): - self.extend(iterable) - + self._maxlen = float('inf') if maxlen is None else maxlen + self._extend(iterable) @classmethod - def fromcache(cls, cache, iterable=()): + def fromcache(cls, cache, iterable=(), maxlen=None): """Initialize deque using `cache`. >>> cache = Cache() @@ -126,21 +112,44 @@ def fromcache(cls, cache, iterable=()): # pylint: disable=no-member,protected-access self = cls.__new__(cls) self._cache = cache - self.extend(iterable) + self._maxlen = float('inf') if maxlen is None else maxlen + self._extend(iterable) return self - @property def cache(self): - "Cache used by deque." + """Cache used by deque.""" return self._cache - @property def directory(self): - "Directory path where deque is stored." + """Directory path where deque is stored.""" return self._cache.directory + @property + def maxlen(self): + """Max length of the deque.""" + return self._maxlen + + @maxlen.setter + def maxlen(self, value): + """Set max length of the deque. + + Pops items from left while length greater than max. + + >>> deque = Deque() + >>> deque.extendleft('abcde') + >>> deque.maxlen = 3 + >>> list(deque) + ['c', 'd', 'e'] + + :param value: max length + + """ + self._maxlen = value + with self._cache.transact(retry=True): + while len(self._cache) > self._maxlen: + self._popleft() def _index(self, index, func): len_self = len(self) @@ -172,7 +181,6 @@ def _index(self, index, func): raise IndexError('deque index out of range') - def __getitem__(self, index): """deque.__getitem__(index) <==> deque[index] @@ -195,7 +203,6 @@ def __getitem__(self, index): """ return self._index(index, self._cache.__getitem__) - def __setitem__(self, index, value): """deque.__setitem__(index, value) <==> deque[index] = value @@ -214,9 +221,11 @@ def __setitem__(self, index, value): :raises IndexError: if index out of range """ - set_value = lambda key: self._cache.__setitem__(key, value) - self._index(index, set_value) + def _set_value(key): + return self._cache.__setitem__(key, value) + + self._index(index, _set_value) def __delitem__(self, index): """deque.__delitem__(index) <==> del deque[index] @@ -237,7 +246,6 @@ def __delitem__(self, index): """ self._index(index, self._cache.__delitem__) - def __repr__(self): """deque.__repr__() <==> repr(deque) @@ -247,7 +255,6 @@ def __repr__(self): name = type(self).__name__ return '{0}(directory={1!r})'.format(name, self.directory) - __eq__ = _make_compare(op.eq, 'equal to') __ne__ = _make_compare(op.ne, 'not equal to') __lt__ = _make_compare(op.lt, 'less than') @@ -255,7 +262,6 @@ def __repr__(self): __le__ = _make_compare(op.le, 'less than or equal to') __ge__ = _make_compare(op.ge, 'greater than or equal to') - def __iadd__(self, iterable): """deque.__iadd__(iterable) <==> deque += iterable @@ -265,10 +271,9 @@ def __iadd__(self, iterable): :return: deque with added items """ - self.extend(iterable) + self._extend(iterable) return self - def __iter__(self): """deque.__iter__() <==> iter(deque) @@ -283,7 +288,6 @@ def __iter__(self): except KeyError: pass - def __len__(self): """deque.__len__() <==> len(deque) @@ -292,7 +296,6 @@ def __len__(self): """ return len(self._cache) - def __reversed__(self): """deque.__reversed__() <==> reversed(deque) @@ -315,14 +318,12 @@ def __reversed__(self): except KeyError: pass - def __getstate__(self): - return self.directory - + return self.directory, self.maxlen def __setstate__(self, state): - self.__init__(directory=state) - + directory, maxlen = state + self.__init__(directory=directory, maxlen=maxlen) def append(self, value): """Add `value` to back of deque. @@ -337,8 +338,12 @@ def append(self, value): :param value: value to add to back of deque """ - self._cache.push(value, retry=True) + with self._cache.transact(retry=True): + self._cache.push(value, retry=True) + if len(self._cache) > self._maxlen: + self._popleft() + _append = append def appendleft(self, value): """Add `value` to front of deque. @@ -353,8 +358,12 @@ def appendleft(self, value): :param value: value to add to front of deque """ - self._cache.push(value, side='front', retry=True) + with self._cache.transact(retry=True): + self._cache.push(value, side='front', retry=True) + if len(self._cache) > self._maxlen: + self._pop() + _appendleft = appendleft def clear(self): """Remove all elements from deque. @@ -369,6 +378,12 @@ def clear(self): """ self._cache.clear(retry=True) + _clear = clear + + def copy(self): + """Copy deque with same directory and max length.""" + TypeSelf = type(self) + return TypeSelf(directory=self.directory, maxlen=self.maxlen) def count(self, value): """Return number of occurrences of `value` in deque. @@ -388,7 +403,6 @@ def count(self, value): """ return sum(1 for item in self if value == item) - def extend(self, iterable): """Extend back side of deque with values from `iterable`. @@ -396,8 +410,9 @@ def extend(self, iterable): """ for value in iterable: - self.append(value) + self._append(value) + _extend = extend def extendleft(self, iterable): """Extend front side of deque with value from `iterable`. @@ -411,8 +426,7 @@ def extendleft(self, iterable): """ for value in iterable: - self.appendleft(value) - + self._appendleft(value) def peek(self): """Peek at value at back of deque. @@ -440,7 +454,6 @@ def peek(self): raise IndexError('peek from an empty deque') return value - def peekleft(self): """Peek at value at front of deque. @@ -467,7 +480,6 @@ def peekleft(self): raise IndexError('peek from an empty deque') return value - def pop(self): """Remove and return value at back of deque. @@ -494,6 +506,7 @@ def pop(self): raise IndexError('pop from an empty deque') return value + _pop = pop def popleft(self): """Remove and return value at front of deque. @@ -519,6 +532,7 @@ def popleft(self): raise IndexError('pop from an empty deque') return value + _popleft = popleft def remove(self, value): """Remove first occurrence of `value` in deque. @@ -557,7 +571,6 @@ def remove(self, value): raise ValueError('deque.remove(value): value not in deque') - def reverse(self): """Reverse deque in place. @@ -568,19 +581,20 @@ def reverse(self): ['c', 'b', 'a'] """ + # pylint: disable=protected-access # GrantJ 2019-03-22 Consider using an algorithm that swaps the values # at two keys. Like self._cache.swap(key1, key2, retry=True) The swap # method would exchange the values at two given keys. Then, using a - # forward iterator and a reverse iterator, the reversis method could + # forward iterator and a reverse iterator, the reverse method could # avoid making copies of the values. temp = Deque(iterable=reversed(self)) - self.clear() - self.extend(temp) + self._clear() + self._extend(temp) directory = temp.directory + temp._cache.close() del temp rmtree(directory) - def rotate(self, steps=1): """Rotate deque right by `steps`. @@ -612,26 +626,24 @@ def rotate(self, steps=1): for _ in range(steps): try: - value = self.pop() + value = self._pop() except IndexError: return else: - self.appendleft(value) + self._appendleft(value) else: steps *= -1 steps %= len_self for _ in range(steps): try: - value = self.popleft() + value = self._popleft() except IndexError: return else: - self.append(value) - - - __hash__ = None + self._append(value) + __hash__ = None # type: ignore @contextmanager def transact(self): @@ -682,6 +694,7 @@ class Index(MutableMapping): ('c', 3) """ + def __init__(self, *args, **kwargs): """Initialize index in directory and update items. @@ -699,7 +712,7 @@ def __init__(self, *args, **kwargs): 4 """ - if args and isinstance(args[0], (BytesType, TextType)): + if args and isinstance(args[0], (bytes, str)): directory = args[0] args = args[1:] else: @@ -707,8 +720,9 @@ def __init__(self, *args, **kwargs): args = args[1:] directory = None self._cache = Cache(directory, eviction_policy='none') - self.update(*args, **kwargs) + self._update(*args, **kwargs) + _update = MutableMapping.update @classmethod def fromcache(cls, cache, *args, **kwargs): @@ -734,22 +748,19 @@ def fromcache(cls, cache, *args, **kwargs): # pylint: disable=no-member,protected-access self = cls.__new__(cls) self._cache = cache - self.update(*args, **kwargs) + self._update(*args, **kwargs) return self - @property def cache(self): - "Cache used by index." + """Cache used by index.""" return self._cache - @property def directory(self): - "Directory path where items are stored." + """Directory path where items are stored.""" return self._cache.directory - def __getitem__(self, key): """index.__getitem__(key) <==> index[key] @@ -773,7 +784,6 @@ def __getitem__(self, key): """ return self._cache[key] - def __setitem__(self, key, value): """index.__setitem__(key, value) <==> index[key] = value @@ -791,7 +801,6 @@ def __setitem__(self, key, value): """ self._cache[key] = value - def __delitem__(self, key): """index.__delitem__(key) <==> del index[key] @@ -814,7 +823,6 @@ def __delitem__(self, key): """ del self._cache[key] - def setdefault(self, key, default=None): """Set and get value for `key` in index using `default`. @@ -839,7 +847,6 @@ def setdefault(self, key, default=None): except KeyError: _cache.add(key, default, retry=True) - def peekitem(self, last=True): """Peek at key and value item pair in index based on iteration order. @@ -858,7 +865,6 @@ def peekitem(self, last=True): """ return self._cache.peekitem(last, retry=True) - def pop(self, key, default=ENOVAL): """Remove corresponding item for `key` from index and return value. @@ -889,7 +895,6 @@ def pop(self, key, default=ENOVAL): raise KeyError(key) return value - def popitem(self, last=True): """Remove and return item pair. @@ -915,7 +920,7 @@ def popitem(self, last=True): :raises KeyError: if index is empty """ - # pylint: disable=arguments-differ + # pylint: disable=arguments-differ,unbalanced-tuple-unpacking _cache = self._cache with _cache.transact(retry=True): @@ -924,7 +929,6 @@ def popitem(self, last=True): return key, value - def push(self, value, prefix=None, side='back'): """Push `value` onto `side` of queue in index identified by `prefix`. @@ -956,7 +960,6 @@ def push(self, value, prefix=None, side='back'): """ return self._cache.push(value, prefix, side, retry=True) - def pull(self, prefix=None, default=(None, None), side='front'): """Pull key and value item pair from `side` of queue in index. @@ -997,7 +1000,6 @@ def pull(self, prefix=None, default=(None, None), side='front'): """ return self._cache.pull(prefix, default, side, retry=True) - def clear(self): """Remove all items from index. @@ -1011,7 +1013,6 @@ def clear(self): """ self._cache.clear(retry=True) - def __iter__(self): """index.__iter__() <==> iter(index) @@ -1020,7 +1021,6 @@ def __iter__(self): """ return iter(self._cache) - def __reversed__(self): """index.__reversed__() <==> reversed(index) @@ -1037,7 +1037,6 @@ def __reversed__(self): """ return reversed(self._cache) - def __len__(self): """index.__len__() <==> len(index) @@ -1046,210 +1045,56 @@ def __len__(self): """ return len(self._cache) + def keys(self): + """Set-like object providing a view of index keys. - if sys.hexversion < 0x03000000: - def keys(self): - """List of index keys. - - >>> index = Index() - >>> index.update([('a', 1), ('b', 2), ('c', 3)]) - >>> index.keys() - ['a', 'b', 'c'] - - :return: list of keys - - """ - return list(self._cache) - - - def values(self): - """List of index values. - - >>> index = Index() - >>> index.update([('a', 1), ('b', 2), ('c', 3)]) - >>> index.values() - [1, 2, 3] - - :return: list of values - - """ - return list(self.itervalues()) - - - def items(self): - """List of index items. - - >>> index = Index() - >>> index.update([('a', 1), ('b', 2), ('c', 3)]) - >>> index.items() - [('a', 1), ('b', 2), ('c', 3)] - - :return: list of items - - """ - return list(self.iteritems()) - - - def iterkeys(self): - """Iterator of index keys. - - >>> index = Index() - >>> index.update([('a', 1), ('b', 2), ('c', 3)]) - >>> list(index.iterkeys()) - ['a', 'b', 'c'] - - :return: iterator of keys - - """ - return iter(self._cache) - - - def itervalues(self): - """Iterator of index values. - - >>> index = Index() - >>> index.update([('a', 1), ('b', 2), ('c', 3)]) - >>> list(index.itervalues()) - [1, 2, 3] - - :return: iterator of values - - """ - _cache = self._cache - - for key in _cache: - while True: - try: - yield _cache[key] - except KeyError: - pass - break - - - def iteritems(self): - """Iterator of index items. - - >>> index = Index() - >>> index.update([('a', 1), ('b', 2), ('c', 3)]) - >>> list(index.iteritems()) - [('a', 1), ('b', 2), ('c', 3)] - - :return: iterator of items - - """ - _cache = self._cache - - for key in _cache: - while True: - try: - yield key, _cache[key] - except KeyError: - pass - break - - - def viewkeys(self): - """Set-like object providing a view of index keys. - - >>> index = Index() - >>> index.update({'a': 1, 'b': 2, 'c': 3}) - >>> keys_view = index.viewkeys() - >>> 'b' in keys_view - True - - :return: keys view - - """ - return KeysView(self) - - - def viewvalues(self): - """Set-like object providing a view of index values. - - >>> index = Index() - >>> index.update({'a': 1, 'b': 2, 'c': 3}) - >>> values_view = index.viewvalues() - >>> 2 in values_view - True - - :return: values view - - """ - return ValuesView(self) - - - def viewitems(self): - """Set-like object providing a view of index items. - - >>> index = Index() - >>> index.update({'a': 1, 'b': 2, 'c': 3}) - >>> items_view = index.viewitems() - >>> ('b', 2) in items_view - True - - :return: items view - - """ - return ItemsView(self) - - - else: - def keys(self): - """Set-like object providing a view of index keys. - - >>> index = Index() - >>> index.update({'a': 1, 'b': 2, 'c': 3}) - >>> keys_view = index.keys() - >>> 'b' in keys_view - True - - :return: keys view - - """ - return KeysView(self) - - - def values(self): - """Set-like object providing a view of index values. + >>> index = Index() + >>> index.update({'a': 1, 'b': 2, 'c': 3}) + >>> keys_view = index.keys() + >>> 'b' in keys_view + True - >>> index = Index() - >>> index.update({'a': 1, 'b': 2, 'c': 3}) - >>> values_view = index.values() - >>> 2 in values_view - True + :return: keys view - :return: values view + """ + return KeysView(self) - """ - return ValuesView(self) + def values(self): + """Set-like object providing a view of index values. + >>> index = Index() + >>> index.update({'a': 1, 'b': 2, 'c': 3}) + >>> values_view = index.values() + >>> 2 in values_view + True - def items(self): - """Set-like object providing a view of index items. + :return: values view - >>> index = Index() - >>> index.update({'a': 1, 'b': 2, 'c': 3}) - >>> items_view = index.items() - >>> ('b', 2) in items_view - True + """ + return ValuesView(self) - :return: items view + def items(self): + """Set-like object providing a view of index items. - """ - return ItemsView(self) + >>> index = Index() + >>> index.update({'a': 1, 'b': 2, 'c': 3}) + >>> items_view = index.items() + >>> ('b', 2) in items_view + True + :return: items view - __hash__ = None + """ + return ItemsView(self) + __hash__ = None # type: ignore def __getstate__(self): return self.directory - def __setstate__(self, state): self.__init__(state) - def __eq__(self, other): """index.__eq__(other) <==> index == other @@ -1283,7 +1128,6 @@ def __eq__(self, other): else: return all(self[key] == other.get(key, ENOVAL) for key in self) - def __ne__(self, other): """index.__ne__(other) <==> index != other @@ -1307,8 +1151,7 @@ def __ne__(self, other): """ return not self == other - - def memoize(self, name=None, typed=False): + def memoize(self, name=None, typed=False, ignore=()): """Memoizing cache decorator. Decorator to wrap callable with memoizing function using cache. @@ -1359,11 +1202,11 @@ def memoize(self, name=None, typed=False): :param str name: name given for callable (default None, automatic) :param bool typed: cache different types separately (default False) + :param set ignore: positional or keyword args to ignore (default ()) :return: callable decorator """ - return self._cache.memoize(name, typed) - + return self._cache.memoize(name, typed, ignore=ignore) @contextmanager def transact(self): @@ -1392,7 +1235,6 @@ def transact(self): with self._cache.transact(retry=True): yield - def __repr__(self): """index.__repr__() <==> repr(index) diff --git a/diskcache/recipes.py b/diskcache/recipes.py index fb64250..babb68f 100644 --- a/diskcache/recipes.py +++ b/diskcache/recipes.py @@ -1,37 +1,25 @@ """Disk Cache Recipes - """ import functools import math import os import random -import sys import threading import time from .core import ENOVAL, args_to_key, full_name -############################################################################ -# BEGIN Python 2/3 Shims -############################################################################ - -if sys.hexversion < 0x03000000: - from thread import get_ident # pylint: disable=import-error -else: - from threading import get_ident - -############################################################################ -# END Python 2/3 Shims -############################################################################ - -class Averager(object): +class Averager: """Recipe for calculating a running average. Sometimes known as "online statistics," the running average maintains the total and count. The average can then be calculated at any time. + Assumes the key will not be evicted. Set the eviction policy to 'none' on + the cache to guarantee the key is not evicted. + >>> import diskcache >>> cache = diskcache.FanoutCache() >>> ave = Averager(cache, 'latency') @@ -46,6 +34,7 @@ class Averager(object): None """ + def __init__(self, cache, key, expire=None, tag=None): self._cache = cache self._key = key @@ -53,29 +42,35 @@ def __init__(self, cache, key, expire=None, tag=None): self._tag = tag def add(self, value): - "Add `value` to average." + """Add `value` to average.""" with self._cache.transact(retry=True): total, count = self._cache.get(self._key, default=(0.0, 0)) total += value count += 1 self._cache.set( - self._key, (total, count), expire=self._expire, tag=self._tag, + self._key, + (total, count), + expire=self._expire, + tag=self._tag, ) def get(self): - "Get current average or return `None` if count equals zero." + """Get current average or return `None` if count equals zero.""" total, count = self._cache.get(self._key, default=(0.0, 0), retry=True) return None if count == 0 else total / count def pop(self): - "Return current average and delete key." + """Return current average and delete key.""" total, count = self._cache.pop(self._key, default=(0.0, 0), retry=True) return None if count == 0 else total / count -class Lock(object): +class Lock: """Recipe for cross-process and cross-thread lock. + Assumes the key will not be evicted. Set the eviction policy to 'none' on + the cache to guarantee the key is not evicted. + >>> import diskcache >>> cache = diskcache.Cache() >>> lock = Lock(cache, 'report-123') @@ -85,6 +80,7 @@ class Lock(object): ... pass """ + def __init__(self, cache, key, expire=None, tag=None): self._cache = cache self._key = key @@ -92,19 +88,27 @@ def __init__(self, cache, key, expire=None, tag=None): self._tag = tag def acquire(self): - "Acquire lock using spin-lock algorithm." + """Acquire lock using spin-lock algorithm.""" while True: added = self._cache.add( - self._key, None, expire=self._expire, tag=self._tag, retry=True, + self._key, + None, + expire=self._expire, + tag=self._tag, + retry=True, ) if added: break time.sleep(0.001) def release(self): - "Release lock by deleting key." + """Release lock by deleting key.""" self._cache.delete(self._key, retry=True) + def locked(self): + """Return true if the lock is acquired.""" + return self._key in self._cache + def __enter__(self): self.acquire() @@ -112,9 +116,12 @@ def __exit__(self, *exc_info): self.release() -class RLock(object): +class RLock: """Recipe for cross-process and cross-thread re-entrant lock. + Assumes the key will not be evicted. Set the eviction policy to 'none' on + the cache to guarantee the key is not evicted. + >>> import diskcache >>> cache = diskcache.Cache() >>> rlock = RLock(cache, 'user-123') @@ -130,6 +137,7 @@ class RLock(object): AssertionError: cannot release un-acquired lock """ + def __init__(self, cache, key, expire=None, tag=None): self._cache = cache self._key = key @@ -137,9 +145,9 @@ def __init__(self, cache, key, expire=None, tag=None): self._tag = tag def acquire(self): - "Acquire lock by incrementing count using spin-lock algorithm." + """Acquire lock by incrementing count using spin-lock algorithm.""" pid = os.getpid() - tid = get_ident() + tid = threading.get_ident() pid_tid = '{}-{}'.format(pid, tid) while True: @@ -147,16 +155,18 @@ def acquire(self): value, count = self._cache.get(self._key, default=(None, 0)) if pid_tid == value or count == 0: self._cache.set( - self._key, (pid_tid, count + 1), - expire=self._expire, tag=self._tag, + self._key, + (pid_tid, count + 1), + expire=self._expire, + tag=self._tag, ) return time.sleep(0.001) def release(self): - "Release lock by decrementing count." + """Release lock by decrementing count.""" pid = os.getpid() - tid = get_ident() + tid = threading.get_ident() pid_tid = '{}-{}'.format(pid, tid) with self._cache.transact(retry=True): @@ -164,8 +174,10 @@ def release(self): is_owned = pid_tid == value and count > 0 assert is_owned, 'cannot release un-acquired lock' self._cache.set( - self._key, (value, count - 1), - expire=self._expire, tag=self._tag, + self._key, + (value, count - 1), + expire=self._expire, + tag=self._tag, ) def __enter__(self): @@ -175,9 +187,12 @@ def __exit__(self, *exc_info): self.release() -class BoundedSemaphore(object): +class BoundedSemaphore: """Recipe for cross-process and cross-thread bounded semaphore. + Assumes the key will not be evicted. Set the eviction policy to 'none' on + the cache to guarantee the key is not evicted. + >>> import diskcache >>> cache = diskcache.Cache() >>> semaphore = BoundedSemaphore(cache, 'max-cons', value=2) @@ -193,6 +208,7 @@ class BoundedSemaphore(object): AssertionError: cannot release un-acquired semaphore """ + def __init__(self, cache, key, value=1, expire=None, tag=None): self._cache = cache self._key = key @@ -201,26 +217,31 @@ def __init__(self, cache, key, value=1, expire=None, tag=None): self._tag = tag def acquire(self): - "Acquire semaphore by decrementing value using spin-lock algorithm." + """Acquire semaphore by decrementing value using spin-lock algorithm.""" while True: with self._cache.transact(retry=True): value = self._cache.get(self._key, default=self._value) if value > 0: self._cache.set( - self._key, value - 1, - expire=self._expire, tag=self._tag, + self._key, + value - 1, + expire=self._expire, + tag=self._tag, ) return time.sleep(0.001) def release(self): - "Release semaphore by incrementing value." + """Release semaphore by incrementing value.""" with self._cache.transact(retry=True): value = self._cache.get(self._key, default=self._value) assert self._value > value, 'cannot release un-acquired semaphore' value += 1 self._cache.set( - self._key, value, expire=self._expire, tag=self._tag, + self._key, + value, + expire=self._expire, + tag=self._tag, ) def __enter__(self): @@ -230,10 +251,21 @@ def __exit__(self, *exc_info): self.release() -def throttle(cache, count, seconds, name=None, expire=None, tag=None, - time_func=time.time, sleep_func=time.sleep): +def throttle( + cache, + count, + seconds, + name=None, + expire=None, + tag=None, + time_func=time.time, + sleep_func=time.sleep, +): """Decorator to throttle calls to function. + Assumes keys will not be evicted. Set the eviction policy to 'none' on the + cache to guarantee the keys are not evicted. + >>> import diskcache, time >>> cache = diskcache.Cache() >>> count = 0 @@ -248,6 +280,7 @@ def throttle(cache, count, seconds, name=None, expire=None, tag=None, True """ + def decorator(func): rate = count / float(seconds) key = full_name(func) if name is None else name @@ -287,6 +320,9 @@ def barrier(cache, lock_factory, name=None, expire=None, tag=None): Supports different kinds of locks: Lock, RLock, BoundedSemaphore. + Assumes keys will not be evicted. Set the eviction policy to 'none' on the + cache to guarantee the keys are not evicted. + >>> import diskcache, time >>> cache = diskcache.Cache() >>> @barrier(cache, Lock) @@ -304,6 +340,7 @@ def barrier(cache, lock_factory, name=None, expire=None, tag=None): >>> pool.terminate() """ + def decorator(func): key = full_name(func) if name is None else name lock = lock_factory(cache, key, expire=expire, tag=tag) @@ -318,7 +355,9 @@ def wrapper(*args, **kwargs): return decorator -def memoize_stampede(cache, expire, name=None, typed=False, tag=None, beta=1): +def memoize_stampede( + cache, expire, name=None, typed=False, tag=None, beta=1, ignore=() +): """Memoizing cache decorator with cache stampede protection. Cache stampedes are a type of system overload that can occur when parallel @@ -371,16 +410,17 @@ def memoize_stampede(cache, expire, name=None, typed=False, tag=None, beta=1): :param str name: name given for callable (default None, automatic) :param bool typed: cache different types separately (default False) :param str tag: text to associate with arguments (default None) + :param set ignore: positional or keyword args to ignore (default ()) :return: callable decorator """ # Caution: Nearly identical code exists in Cache.memoize def decorator(func): - "Decorator created by memoize call for callable." + """Decorator created by memoize call for callable.""" base = (full_name(func),) if name is None else (name,) def timer(*args, **kwargs): - "Time execution of `func` and return result and time delta." + """Time execution of `func` and return result and time delta.""" start = time.time() result = func(*args, **kwargs) delta = time.time() - start @@ -388,10 +428,13 @@ def timer(*args, **kwargs): @functools.wraps(func) def wrapper(*args, **kwargs): - "Wrapper for callable to cache arguments and return values." + """Wrapper for callable to cache arguments and return values.""" key = wrapper.__cache_key__(*args, **kwargs) pair, expire_time = cache.get( - key, default=ENOVAL, expire_time=True, retry=True, + key, + default=ENOVAL, + expire_time=True, + retry=True, ) if pair is not ENOVAL: @@ -406,7 +449,10 @@ def wrapper(*args, **kwargs): thread_key = key + (ENOVAL,) thread_added = cache.add( - thread_key, None, expire=delta, retry=True, + thread_key, + None, + expire=delta, + retry=True, ) if thread_added: @@ -415,8 +461,13 @@ def recompute(): with cache: pair = timer(*args, **kwargs) cache.set( - key, pair, expire=expire, tag=tag, retry=True, + key, + pair, + expire=expire, + tag=tag, + retry=True, ) + thread = threading.Thread(target=recompute) thread.daemon = True thread.start() @@ -428,8 +479,8 @@ def recompute(): return pair[0] def __cache_key__(*args, **kwargs): - "Make key for cache given function arguments." - return args_to_key(base, args, kwargs, typed) + """Make key for cache given function arguments.""" + return args_to_key(base, args, kwargs, typed, ignore) wrapper.__cache_key__ = __cache_key__ return wrapper diff --git a/docs/Makefile b/docs/Makefile index e3bd50b..d4bb2cb 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -1,216 +1,20 @@ -# Makefile for Sphinx documentation +# Minimal makefile for Sphinx documentation # -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . BUILDDIR = _build -# User-friendly check for sphinx-build -ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) -endif - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help +# Put it first so that "make" without argument is like "make help". help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " applehelp to make an Apple Help Book" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " xml to make Docutils-native XML files" - @echo " pseudoxml to make pseudoxml-XML files for display purposes" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - @echo " coverage to run coverage check of the documentation (if enabled)" - -.PHONY: clean -clean: - rm -rf $(BUILDDIR)/* - -.PHONY: html -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -.PHONY: dirhtml -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -.PHONY: singlehtml -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -.PHONY: pickle -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -.PHONY: json -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -.PHONY: htmlhelp -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -.PHONY: qthelp -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/DiskCache.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/DiskCache.qhc" - -.PHONY: applehelp -applehelp: - $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp - @echo - @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." - @echo "N.B. You won't be able to view it unless you put it in" \ - "~/Library/Documentation/Help or install it in your application" \ - "bundle." - -.PHONY: devhelp -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/DiskCache" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/DiskCache" - @echo "# devhelp" - -.PHONY: epub -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -.PHONY: latex -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -.PHONY: latexpdf -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -.PHONY: latexpdfja -latexpdfja: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through platex and dvipdfmx..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -.PHONY: text -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -.PHONY: man -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -.PHONY: texinfo -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -.PHONY: info -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -.PHONY: gettext -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -.PHONY: changes -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -.PHONY: linkcheck -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -.PHONY: doctest -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." - -.PHONY: coverage -coverage: - $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage - @echo "Testing of coverage in the sources finished, look at the " \ - "results in $(BUILDDIR)/coverage/python.txt." + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -.PHONY: xml -xml: - $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml - @echo - @echo "Build finished. The XML files are in $(BUILDDIR)/xml." +.PHONY: help Makefile -.PHONY: pseudoxml -pseudoxml: - $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml - @echo - @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/conf.py b/docs/conf.py index 683dd3e..92bf3ec 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,31 +1,33 @@ -# -*- coding: utf-8 -*- +# Configuration file for the Sphinx documentation builder. # -# DiskCache documentation build configuration file, created by -# sphinx-quickstart on Wed Feb 10 20:20:15 2016. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html -import sys -import os +# -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. + +import os +import sys sys.path.insert(0, os.path.abspath('..')) + import diskcache -from diskcache import __version__ -# -- General configuration ------------------------------------------------ -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# -- Project information ----------------------------------------------------- + +project = 'DiskCache' +copyright = '2023, Grant Jenks' +author = 'Grant Jenks' + +# The full version, including alpha/beta/rc tags +release = diskcache.__version__ + + +# -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -38,77 +40,13 @@ # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'DiskCache' -copyright = u'2019, Grant Jenks' -author = u'Grant Jenks' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = __version__ -# The full version, including alpha/beta/rc tags. -release = version - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] -# -- Options for HTML output ---------------------------------------------- +# -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. @@ -121,7 +59,6 @@ 'logo': 'gj-logo.png', 'logo_name': True, 'logo_text_align': 'center', - 'travis_button': True, 'analytics_id': 'UA-19364636-2', 'show_powered_by': False, 'show_related': True, @@ -130,43 +67,11 @@ 'github_type': 'star', } -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -#html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - # Custom sidebar templates, maps document names to template names. html_sidebars = { '**': [ @@ -178,134 +83,5 @@ ] } -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -#html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -#html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -#html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = 'DiskCacheDoc' - def setup(app): - app.add_stylesheet('custom.css') - - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', - -# Latex figure (float) alignment -#'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, 'DiskCache.tex', u'DiskCache Documentation', - u'Grant Jenks', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'diskcache', u'DiskCache Documentation', - [author], 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - (master_doc, 'DiskCache', u'DiskCache Documentation', - author, 'DiskCache', 'Disk and file backed cache.', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False + app.add_css_file('custom.css') diff --git a/docs/development.rst b/docs/development.rst index 6d16d44..828c3be 100644 --- a/docs/development.rst +++ b/docs/development.rst @@ -67,12 +67,10 @@ Testing :doc:`DiskCache ` currently tests against five versions of Python: -* CPython 2.7 -* CPython 3.4 * CPython 3.5 * CPython 3.6 * CPython 3.7 -* PyPy2 +* CPython 3.8 Testing uses `tox `_. If you don't want to install all the development requirements, then, after downloading, you can diff --git a/docs/make.bat b/docs/make.bat index e1a063b..2119f51 100644 --- a/docs/make.bat +++ b/docs/make.bat @@ -1,62 +1,18 @@ @ECHO OFF +pushd %~dp0 + REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) +set SOURCEDIR=. set BUILDDIR=_build -set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . -set I18NSPHINXOPTS=%SPHINXOPTS% . -if NOT "%PAPER%" == "" ( - set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% - set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% -) if "%1" == "" goto help -if "%1" == "help" ( - :help - echo.Please use `make ^` where ^ is one of - echo. html to make standalone HTML files - echo. dirhtml to make HTML files named index.html in directories - echo. singlehtml to make a single large HTML file - echo. pickle to make pickle files - echo. json to make JSON files - echo. htmlhelp to make HTML files and a HTML help project - echo. qthelp to make HTML files and a qthelp project - echo. devhelp to make HTML files and a Devhelp project - echo. epub to make an epub - echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter - echo. text to make text files - echo. man to make manual pages - echo. texinfo to make Texinfo files - echo. gettext to make PO message catalogs - echo. changes to make an overview over all changed/added/deprecated items - echo. xml to make Docutils-native XML files - echo. pseudoxml to make pseudoxml-XML files for display purposes - echo. linkcheck to check all external links for integrity - echo. doctest to run all doctests embedded in the documentation if enabled - echo. coverage to run coverage check of the documentation if enabled - goto end -) - -if "%1" == "clean" ( - for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i - del /q /s %BUILDDIR%\* - goto end -) - - -REM Check if sphinx-build is available and fallback to Python version if any -%SPHINXBUILD% 1>NUL 2>NUL -if errorlevel 9009 goto sphinx_python -goto sphinx_ok - -:sphinx_python - -set SPHINXBUILD=python -m sphinx.__init__ -%SPHINXBUILD% 2> nul +%SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx @@ -69,195 +25,11 @@ if errorlevel 9009 ( exit /b 1 ) -:sphinx_ok - - -if "%1" == "html" ( - %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/html. - goto end -) - -if "%1" == "dirhtml" ( - %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. - goto end -) - -if "%1" == "singlehtml" ( - %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. - goto end -) - -if "%1" == "pickle" ( - %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the pickle files. - goto end -) - -if "%1" == "json" ( - %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the JSON files. - goto end -) - -if "%1" == "htmlhelp" ( - %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run HTML Help Workshop with the ^ -.hhp project file in %BUILDDIR%/htmlhelp. - goto end -) +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end -if "%1" == "qthelp" ( - %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run "qcollectiongenerator" with the ^ -.qhcp project file in %BUILDDIR%/qthelp, like this: - echo.^> qcollectiongenerator %BUILDDIR%\qthelp\DiskCache.qhcp - echo.To view the help file: - echo.^> assistant -collectionFile %BUILDDIR%\qthelp\DiskCache.ghc - goto end -) - -if "%1" == "devhelp" ( - %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. - goto end -) - -if "%1" == "epub" ( - %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The epub file is in %BUILDDIR%/epub. - goto end -) - -if "%1" == "latex" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdf" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf - cd %~dp0 - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdfja" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf-ja - cd %~dp0 - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "text" ( - %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The text files are in %BUILDDIR%/text. - goto end -) - -if "%1" == "man" ( - %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The manual pages are in %BUILDDIR%/man. - goto end -) - -if "%1" == "texinfo" ( - %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. - goto end -) - -if "%1" == "gettext" ( - %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The message catalogs are in %BUILDDIR%/locale. - goto end -) - -if "%1" == "changes" ( - %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes - if errorlevel 1 exit /b 1 - echo. - echo.The overview file is in %BUILDDIR%/changes. - goto end -) - -if "%1" == "linkcheck" ( - %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck - if errorlevel 1 exit /b 1 - echo. - echo.Link check complete; look for any errors in the above output ^ -or in %BUILDDIR%/linkcheck/output.txt. - goto end -) - -if "%1" == "doctest" ( - %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest - if errorlevel 1 exit /b 1 - echo. - echo.Testing of doctests in the sources finished, look at the ^ -results in %BUILDDIR%/doctest/output.txt. - goto end -) - -if "%1" == "coverage" ( - %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage - if errorlevel 1 exit /b 1 - echo. - echo.Testing of coverage in the sources finished, look at the ^ -results in %BUILDDIR%/coverage/python.txt. - goto end -) - -if "%1" == "xml" ( - %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The XML files are in %BUILDDIR%/xml. - goto end -) - -if "%1" == "pseudoxml" ( - %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. - goto end -) +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% :end +popd diff --git a/docs/tutorial.rst b/docs/tutorial.rst index b29322c..2eb454d 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -535,7 +535,7 @@ they are guaranteed to be stored in files. The full path is available on the file handle in the `name` attribute. Remember to also include the `Content-Type` header if known. -.. _`Django documentation on caching`: https://docs.djangoproject.com/en/1.9/topics/cache/#the-low-level-cache-api +.. _`Django documentation on caching`: https://docs.djangoproject.com/en/3.2/topics/cache/#the-low-level-cache-api Deque ----- @@ -565,12 +565,15 @@ access and editing at both front and back sides. :class:`Deque 4 >>> other.popleft() 'foo' + >>> thing = Deque('abcde', maxlen=3) + >>> list(thing) + ['c', 'd', 'e'] :class:`Deque ` objects provide an efficient and safe means of cross-thread and cross-process communication. :class:`Deque ` objects are also useful in scenarios where contents should remain persistent or limitations prohibit holding all items in memory at the same time. The deque -uses a fixed amout of memory regardless of the size or number of items stored +uses a fixed amount of memory regardless of the size or number of items stored inside it. Index @@ -603,7 +606,7 @@ interface. :class:`Index ` objects inherit all the benefits of cross-thread and cross-process communication. :class:`Index ` objects are also useful in scenarios where contents should remain persistent or limitations prohibit holding all items in memory at the same time. The index -uses a fixed amout of memory regardless of the size or number of items stored +uses a fixed amount of memory regardless of the size or number of items stored inside it. .. _tutorial-transactions: @@ -818,30 +821,28 @@ example below uses compressed JSON, available for convenience as .. code-block:: python - import json, zlib - class JSONDisk(diskcache.Disk): def __init__(self, directory, compress_level=1, **kwargs): self.compress_level = compress_level - super(JSONDisk, self).__init__(directory, **kwargs) + super().__init__(directory, **kwargs) def put(self, key): json_bytes = json.dumps(key).encode('utf-8') data = zlib.compress(json_bytes, self.compress_level) - return super(JSONDisk, self).put(data) + return super().put(data) def get(self, key, raw): - data = super(JSONDisk, self).get(key, raw) + data = super().get(key, raw) return json.loads(zlib.decompress(data).decode('utf-8')) - def store(self, value, read): + def store(self, value, read, key=UNKNOWN): if not read: json_bytes = json.dumps(value).encode('utf-8') value = zlib.compress(json_bytes, self.compress_level) - return super(JSONDisk, self).store(value, read) + return super().store(value, read, key=key) def fetch(self, mode, filename, value, read): - data = super(JSONDisk, self).fetch(mode, filename, value, read) + data = super().fetch(mode, filename, value, read) if not read: data = json.loads(zlib.decompress(data).decode('utf-8')) return data @@ -863,8 +864,17 @@ protocol`_ is not used. Neither the `__hash__` nor `__eq__` methods are used for lookups. Instead lookups depend on the serialization method defined by :class:`Disk ` objects. For strings, bytes, integers, and floats, equality matches Python's definition. But large integers and all other -types will be converted to bytes using pickling and the bytes representation -will define equality. +types will be converted to bytes and the bytes representation will define +equality. + +The default :class:`diskcache.Disk` serialization uses pickling for both keys +and values. Unfortunately, pickling produces inconsistencies sometimes when +applied to container data types like tuples. Two equal tuples may serialize to +different bytes objects using pickle. The likelihood of differences is reduced +by using `pickletools.optimize` but still inconsistencies occur (`#54`_). The +inconsistent serialized pickle values is particularly problematic when applied +to the key in the cache. Consider using an alternative Disk type, like +:class:`JSONDisk `, for consistent serialization of keys. SQLite is used to synchronize database access between threads and processes and as such inherits all SQLite caveats. Most notably SQLite is `not recommended`_ @@ -892,6 +902,13 @@ thread-pool executor asynchronously. For example:: asyncio.run(set_async('test-key', 'test-value')) +The cache :meth:`volume ` is based on the size of the +database that stores metadata and the size of the values stored in files. It +does not account the size of directories themselves or other filesystem +metadata. If directory count or size is a concern then consider implementing an +alternative :class:`Disk `. + +.. _`#54`: https://github.com/grantjenks/python-diskcache/issues/54 .. _`hash protocol`: https://docs.python.org/library/functions.html#hash .. _`not recommended`: https://www.sqlite.org/faq.html#q5 .. _`performs poorly`: https://www.pythonanywhere.com/forums/topic/1847/ diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 0000000..053b283 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,4 @@ +[mypy] + +[mypy-django.*] +ignore_missing_imports = True diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..6149361 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,23 @@ +-e . +blue +coverage +django==4.2.* +django_redis +doc8 +flake8 +ipython +jedi +pickleDB +pylibmc +pylint +pytest +pytest-cov +pytest-django +pytest-env +pytest-xdist +rstcheck +sphinx +sqlitedict +tox +twine +wheel diff --git a/requirements.txt b/requirements.txt index 6c1a277..d6e1198 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,18 +1 @@ -coverage -django==2.2.* -django_redis -doc8 -gj -nose -pylibmc -pylint -pytest -pytest-cov -pytest-django -pytest-env -pytest-xdist -rstcheck -sphinx -tox -twine -wheel +-e . diff --git a/setup.py b/setup.py index 90dc280..841dfb9 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,3 @@ -from io import open from setuptools import setup from setuptools.command.test import test as TestCommand @@ -10,8 +9,10 @@ def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True + def run_tests(self): import tox + errno = tox.cmdline(self.test_args) exit(errno) @@ -27,10 +28,17 @@ def run_tests(self): author='Grant Jenks', author_email='contact@grantjenks.com', url='http://www.grantjenks.com/docs/diskcache/', + project_urls={ + 'Documentation': 'http://www.grantjenks.com/docs/diskcache/', + 'Funding': 'https://gum.co/diskcache', + 'Source': 'https://github.com/grantjenks/python-diskcache', + 'Tracker': 'https://github.com/grantjenks/python-diskcache/issues', + }, license='Apache 2.0', packages=['diskcache'], tests_require=['tox'], cmdclass={'test': Tox}, + python_requires='>=3', install_requires=[], classifiers=( 'Development Status :: 5 - Production/Stable', @@ -38,14 +46,12 @@ def run_tests(self): 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: Implementation :: CPython', - 'Programming Language :: Python :: Implementation :: PyPy', ), ) diff --git a/tests/benchmark_core.py b/tests/benchmark_core.py index 1811de0..7d64595 100644 --- a/tests/benchmark_core.py +++ b/tests/benchmark_core.py @@ -3,26 +3,17 @@ $ export PYTHONPATH=/Users/grantj/repos/python-diskcache $ python tests/benchmark_core.py -p 1 > tests/timings_core_p1.txt $ python tests/benchmark_core.py -p 8 > tests/timings_core_p8.txt - """ -from __future__ import print_function - import collections as co import multiprocessing as mp import os +import pickle import random import shutil -import sys import time import warnings -if sys.hexversion < 0x03000000: - range = xrange - import cPickle as pickle -else: - import pickle - from utils import display PROCS = 8 @@ -37,21 +28,32 @@ # Disk Cache Benchmarks ############################################################################### -import diskcache +import diskcache # noqa -caches.append(('diskcache.Cache', diskcache.Cache, ('tmp',), {},)) -caches.append(( - 'diskcache.FanoutCache(shards=4, timeout=1.0)', - diskcache.FanoutCache, - ('tmp',), - {'shards': 4, 'timeout': 1.0} -)) -caches.append(( - 'diskcache.FanoutCache(shards=8, timeout=0.010)', - diskcache.FanoutCache, - ('tmp',), - {'shards': 8, 'timeout': 0.010} -)) +caches.append( + ( + 'diskcache.Cache', + diskcache.Cache, + ('tmp',), + {}, + ) +) +caches.append( + ( + 'diskcache.FanoutCache(shards=4, timeout=1.0)', + diskcache.FanoutCache, + ('tmp',), + {'shards': 4, 'timeout': 1.0}, + ) +) +caches.append( + ( + 'diskcache.FanoutCache(shards=8, timeout=0.010)', + diskcache.FanoutCache, + ('tmp',), + {'shards': 8, 'timeout': 0.010}, + ) +) ############################################################################### @@ -61,12 +63,17 @@ try: import pylibmc - caches.append(( - 'pylibmc.Client', - pylibmc.Client, - (['127.0.0.1'],), - {'binary': True, 'behaviors': {'tcp_nodelay': True, 'ketama': True}}, - )) + caches.append( + ( + 'pylibmc.Client', + pylibmc.Client, + (['127.0.0.1'],), + { + 'binary': True, + 'behaviors': {'tcp_nodelay': True, 'ketama': True}, + }, + ) + ) except ImportError: warnings.warn('skipping pylibmc') @@ -78,12 +85,14 @@ try: import redis - caches.append(( - 'redis.StrictRedis', - redis.StrictRedis, - (), - {'host': 'localhost', 'port': 6379, 'db': 0}, - )) + caches.append( + ( + 'redis.StrictRedis', + redis.StrictRedis, + (), + {'host': 'localhost', 'port': 6379, 'db': 0}, + ) + ) except ImportError: warnings.warn('skipping redis') @@ -91,7 +100,7 @@ def worker(num, kind, args, kwargs): random.seed(num) - time.sleep(0.01) # Let other processes start. + time.sleep(0.01) # Let other processes start. obj = kind(*args, **kwargs) @@ -112,13 +121,13 @@ def worker(num, kind, args, kwargs): start = time.time() result = obj.set(key, value) end = time.time() - miss = result == False + miss = result is False action = 'set' else: start = time.time() result = obj.delete(key) end = time.time() - miss = result == False + miss = result is False action = 'delete' if count > WARMUP: @@ -143,7 +152,7 @@ def dispatch(): try: obj.close() - except: + except Exception: pass processes = [ @@ -180,19 +189,31 @@ def dispatch(): formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( - '-p', '--processes', type=int, default=PROCS, + '-p', + '--processes', + type=int, + default=PROCS, help='Number of processes to start', ) parser.add_argument( - '-n', '--operations', type=float, default=OPS, + '-n', + '--operations', + type=float, + default=OPS, help='Number of operations to perform', ) parser.add_argument( - '-r', '--range', type=int, default=RANGE, + '-r', + '--range', + type=int, + default=RANGE, help='Range of keys', ) parser.add_argument( - '-w', '--warmup', type=float, default=WARMUP, + '-w', + '--warmup', + type=float, + default=WARMUP, help='Number of warmup operations before timings', ) diff --git a/tests/benchmark_djangocache.py b/tests/benchmark_djangocache.py index 898188f..61a80bf 100644 --- a/tests/benchmark_djangocache.py +++ b/tests/benchmark_djangocache.py @@ -2,26 +2,15 @@ $ export PYTHONPATH=/Users/grantj/repos/python-diskcache $ python tests/benchmark_djangocache.py > tests/timings_djangocache.txt - - """ -from __future__ import print_function - import collections as co import multiprocessing as mp import os +import pickle import random import shutil -import sys import time -import warnings - -if sys.hexversion < 0x03000000: - range = xrange - import cPickle as pickle -else: - import pickle from utils import display @@ -34,6 +23,7 @@ def setup(): os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tests.settings_benchmark') import django + django.setup() @@ -48,7 +38,7 @@ def worker(num, name): timings = co.defaultdict(list) - time.sleep(0.01) # Let other processes start. + time.sleep(0.01) # Let other processes start. for count in range(OPS): key = str(random.randrange(RANGE)).encode('utf-8') @@ -65,13 +55,13 @@ def worker(num, name): start = time.time() result = obj.set(key, value) end = time.time() - miss = result == False + miss = result is False action = 'set' else: start = time.time() result = obj.delete(key) end = time.time() - miss = result == False + miss = result is False action = 'delete' if count > WARMUP: @@ -97,14 +87,14 @@ def prepare(name): try: obj.close() - except: + except Exception: pass def dispatch(): setup() - from django.core.cache import caches + from django.core.cache import caches # noqa for name in ['locmem', 'memcached', 'redis', 'diskcache', 'filebased']: shutil.rmtree('tmp', ignore_errors=True) @@ -147,19 +137,31 @@ def dispatch(): formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( - '-p', '--processes', type=int, default=PROCS, + '-p', + '--processes', + type=int, + default=PROCS, help='Number of processes to start', ) parser.add_argument( - '-n', '--operations', type=float, default=OPS, + '-n', + '--operations', + type=float, + default=OPS, help='Number of operations to perform', ) parser.add_argument( - '-r', '--range', type=int, default=RANGE, + '-r', + '--range', + type=int, + default=RANGE, help='Range of keys', ) parser.add_argument( - '-w', '--warmup', type=float, default=WARMUP, + '-w', + '--warmup', + type=float, + default=WARMUP, help='Number of warmup operations before timings', ) diff --git a/tests/benchmark_glob.py b/tests/benchmark_glob.py index 0402ef8..7da5fd3 100644 --- a/tests/benchmark_glob.py +++ b/tests/benchmark_glob.py @@ -1,6 +1,4 @@ -"Benchmark glob.glob1 as used by django.core.cache.backends.filebased." - -from __future__ import print_function +"""Benchmark glob.glob1 as used by django.core.cache.backends.filebased.""" import os import os.path as op @@ -24,15 +22,13 @@ print(template % ('Count', 'Time')) print(' '.join(['=' * size] * len(cols))) -for count in [10 ** exp for exp in range(6)]: +for count in [10**exp for exp in range(6)]: for value in range(count): with open(op.join('tmp', '%s.tmp' % value), 'wb') as writer: pass - + delta = timeit.timeit( - stmt="glob.glob1('tmp', '*.tmp')", - setup='import glob', - number=100 + stmt="glob.glob1('tmp', '*.tmp')", setup='import glob', number=100 ) print(template % (count, secs(delta))) diff --git a/tests/benchmark_incr.py b/tests/benchmark_incr.py index 4a01628..4f758aa 100644 --- a/tests/benchmark_incr.py +++ b/tests/benchmark_incr.py @@ -1,9 +1,6 @@ """Benchmark cache.incr method. - """ -from __future__ import print_function - import json import multiprocessing as mp import shutil @@ -18,7 +15,7 @@ def worker(num): - "Rapidly increment key and time operation." + """Rapidly increment key and time operation.""" time.sleep(0.1) # Let other workers start. cache = dc.Cache('tmp') @@ -35,7 +32,7 @@ def worker(num): def main(): - "Run workers and print percentile results." + """Run workers and print percentile results.""" shutil.rmtree('tmp', ignore_errors=True) processes = [ diff --git a/tests/benchmark_kv_store.py b/tests/benchmark_kv_store.py index d1459e7..7015470 100644 --- a/tests/benchmark_kv_store.py +++ b/tests/benchmark_kv_store.py @@ -1,48 +1,77 @@ -import dbm +"""Benchmarking Key-Value Stores + +$ python -m IPython tests/benchmark_kv_store.py +""" + +from IPython import get_ipython + import diskcache -import pickledb -import shelve -import sqlitedict -import timeit + +ipython = get_ipython() +assert ipython is not None, 'No IPython! Run with $ ipython ...' value = 'value' print('diskcache set') dc = diskcache.FanoutCache('/tmp/diskcache') -%timeit -n 100 -r 7 dc['key'] = value +ipython.magic("timeit -n 100 -r 7 dc['key'] = value") print('diskcache get') -%timeit -n 100 -r 7 dc['key'] +ipython.magic("timeit -n 100 -r 7 dc['key']") print('diskcache set/delete') -%timeit -n 100 -r 7 dc['key'] = value; del dc['key'] - -print('dbm set') -d = dbm.open('/tmp/dbm', 'c') -%timeit -n 100 -r 7 d['key'] = value; d.sync() -print('dbm get') -%timeit -n 100 -r 7 d['key'] -print('dbm set/delete') -%timeit -n 100 -r 7 d['key'] = value; del d['key']; d.sync() - -print('shelve set') -s = shelve.open('/tmp/shelve') -%timeit -n 100 -r 7 s['key'] = value; s.sync() -print('shelve get') -%timeit -n 100 -r 7 s['key'] -print('shelve set/delete') -%timeit -n 100 -r 7 s['key'] = value; del s['key']; s.sync() - -print('sqlitedict set') -sd = sqlitedict.SqliteDict('/tmp/sqlitedict', autocommit=True) -%timeit -n 100 -r 7 sd['key'] = value -print('sqlitedict get') -%timeit -n 100 -r 7 sd['key'] -print('sqlitedict set/delete') -%timeit -n 100 -r 7 sd['key'] = value; del sd['key'] - -print('pickledb set') -p = pickledb.load('/tmp/pickledb', True) -%timeit -n 100 -r 7 p['key'] = value -print('pickledb get') -%timeit -n 100 -r 7 p = pickledb.load('/tmp/pickledb', True); p['key'] -print('pickledb set/delete') -%timeit -n 100 -r 7 p['key'] = value; del p['key'] +ipython.magic("timeit -n 100 -r 7 dc['key'] = value; del dc['key']") + +try: + import dbm.gnu # Only trust GNU DBM +except ImportError: + print('Error: Cannot import dbm.gnu') + print('Error: Skipping import shelve') +else: + print('dbm set') + d = dbm.gnu.open('/tmp/dbm', 'c') + ipython.magic("timeit -n 100 -r 7 d['key'] = value; d.sync()") + print('dbm get') + ipython.magic("timeit -n 100 -r 7 d['key']") + print('dbm set/delete') + ipython.magic( + "timeit -n 100 -r 7 d['key'] = value; d.sync(); del d['key']; d.sync()" + ) + + import shelve + + print('shelve set') + s = shelve.open('/tmp/shelve') + ipython.magic("timeit -n 100 -r 7 s['key'] = value; s.sync()") + print('shelve get') + ipython.magic("timeit -n 100 -r 7 s['key']") + print('shelve set/delete') + ipython.magic( + "timeit -n 100 -r 7 s['key'] = value; s.sync(); del s['key']; s.sync()" + ) + +try: + import sqlitedict +except ImportError: + print('Error: Cannot import sqlitedict') +else: + print('sqlitedict set') + sd = sqlitedict.SqliteDict('/tmp/sqlitedict', autocommit=True) + ipython.magic("timeit -n 100 -r 7 sd['key'] = value") + print('sqlitedict get') + ipython.magic("timeit -n 100 -r 7 sd['key']") + print('sqlitedict set/delete') + ipython.magic("timeit -n 100 -r 7 sd['key'] = value; del sd['key']") + +try: + import pickledb +except ImportError: + print('Error: Cannot import pickledb') +else: + print('pickledb set') + p = pickledb.load('/tmp/pickledb', True) + ipython.magic("timeit -n 100 -r 7 p['key'] = value") + print('pickledb get') + ipython.magic( + "timeit -n 100 -r 7 p = pickledb.load('/tmp/pickledb', True); p['key']" + ) + print('pickledb set/delete') + ipython.magic("timeit -n 100 -r 7 p['key'] = value; del p['key']") diff --git a/tests/issue_109.py b/tests/issue_109.py index a650b4c..a649c58 100644 --- a/tests/issue_109.py +++ b/tests/issue_109.py @@ -1,13 +1,14 @@ """Benchmark for Issue #109 - """ import time + import diskcache as dc def main(): import argparse + parser = argparse.ArgumentParser() parser.add_argument('--cache-dir', default='/tmp/test') parser.add_argument('--iterations', type=int, default=100) @@ -31,7 +32,7 @@ def main(): delays.append(diff) # Discard warmup delays, first two iterations. - del delays[:(len(values) * 2)] + del delays[: (len(values) * 2)] # Convert seconds to microseconds. delays = sorted(delay * 1e6 for delay in delays) diff --git a/tests/issue_85.py b/tests/issue_85.py index a52de97..cb8789b 100644 --- a/tests/issue_85.py +++ b/tests/issue_85.py @@ -2,11 +2,9 @@ $ export PYTHONPATH=`pwd` $ python tests/issue_85.py - """ import collections -import django import os import random import shutil @@ -14,6 +12,8 @@ import threading import time +import django + def remove_cache_dir(): print('REMOVING CACHE DIRECTORY') @@ -26,6 +26,7 @@ def init_django(): os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tests.settings') django.setup() from django.core.cache import cache + shard = cache._cache._shards[0] @@ -107,7 +108,7 @@ def run(statements): shard._sql(statement) if index == 0: values.append(('BEGIN', ident)) - except sqlite3.OperationalError as exc: + except sqlite3.OperationalError: values.append(('ERROR', ident)) diff --git a/tests/models.py b/tests/models.py index 349fd87..a546822 100644 --- a/tests/models.py +++ b/tests/models.py @@ -10,4 +10,6 @@ def expensive_calculation(): class Poll(models.Model): question = models.CharField(max_length=200) answer = models.CharField(max_length=200) - pub_date = models.DateTimeField('date published', default=expensive_calculation) + pub_date = models.DateTimeField( + 'date published', default=expensive_calculation + ) diff --git a/tests/plot.py b/tests/plot.py index d8d1be0..fcac0bc 100644 --- a/tests/plot.py +++ b/tests/plot.py @@ -2,18 +2,18 @@ $ export PYTHONPATH=/Users/grantj/repos/python-diskcache $ python tests/plot.py --show tests/timings_core_p1.txt - """ import argparse import collections as co -import matplotlib.pyplot as plt import re import sys +import matplotlib.pyplot as plt + def parse_timing(timing, limit): - "Parse timing." + """Parse timing.""" if timing.endswith('ms'): value = float(timing[:-2]) * 1e-3 elif timing.endswith('us'): @@ -25,12 +25,12 @@ def parse_timing(timing, limit): def parse_row(row, line): - "Parse row." + """Parse row.""" return [val.strip() for val in row.match(line).groups()] def parse_data(infile): - "Parse data from `infile`." + """Parse data from `infile`.""" blocks = re.compile(' '.join(['=' * 9] * 8)) dashes = re.compile('^-{79}$') title = re.compile('^Timings for (.*)$') @@ -47,7 +47,7 @@ def parse_data(infile): if blocks.match(line): try: name = title.match(lines[index + 1]).group(1) - except: + except Exception: index += 1 continue @@ -82,7 +82,7 @@ def parse_data(infile): def make_plot(data, action, save=False, show=False, limit=0.005): - "Make plot." + """Make plot.""" fig, ax = plt.subplots(figsize=(8, 10)) colors = ['#ff7f00', '#377eb8', '#4daf4a', '#984ea3', '#e41a1c'] width = 0.15 @@ -93,12 +93,17 @@ def make_plot(data, action, save=False, show=False, limit=0.005): bars = [] for pos, (name, color) in enumerate(zip(names, colors)): - bars.append(ax.bar( - [val + pos * width for val in index], - [parse_timing(data[name][action][tick], limit) for tick in ticks], - width, - color=color, - )) + bars.append( + ax.bar( + [val + pos * width for val in index], + [ + parse_timing(data[name][action][tick], limit) + for tick in ticks + ], + width, + color=color, + ) + ) ax.set_ylabel('Time (microseconds)') ax.set_title('"%s" Time vs Percentile' % action) @@ -106,12 +111,14 @@ def make_plot(data, action, save=False, show=False, limit=0.005): ax.set_xticklabels(ticks) box = ax.get_position() - ax.set_position([box.x0, box.y0 + box.height * 0.2, box.width, box.height * 0.8]) + ax.set_position( + [box.x0, box.y0 + box.height * 0.2, box.width, box.height * 0.8] + ) ax.legend( [bar[0] for bar in bars], names, loc='lower center', - bbox_to_anchor=(0.5, -0.25) + bbox_to_anchor=(0.5, -0.25), ) if show: diff --git a/tests/plot_early_recompute.py b/tests/plot_early_recompute.py index bbebc1a..1508c45 100644 --- a/tests/plot_early_recompute.py +++ b/tests/plot_early_recompute.py @@ -1,14 +1,14 @@ """Early Recomputation Measurements - """ -import diskcache as dc import functools as ft import multiprocessing.pool import shutil import threading import time +import diskcache as dc + def make_timer(times): """Make a decorator which accumulates (start, end) in `times` for function @@ -16,15 +16,18 @@ def make_timer(times): """ lock = threading.Lock() + def timer(func): @ft.wraps(func) def wrapper(*args, **kwargs): start = time.time() - result = func(*args, **kwargs) + func(*args, **kwargs) pair = start, time.time() with lock: times.append(pair) + return wrapper + return timer @@ -33,9 +36,11 @@ def make_worker(times, delay=0.2): `delay` seconds. """ + @make_timer(times) def worker(): time.sleep(delay) + return worker @@ -44,24 +49,27 @@ def make_repeater(func, total=10, delay=0.01): repeatedly until `total` seconds have elapsed. """ + def repeat(num): start = time.time() while time.time() - start < total: func() time.sleep(delay) + return repeat def frange(start, stop, step=1e-3): - "Generator for floating point values from `start` to `stop` by `step`." + """Generator for floating point values from `start` to `stop` by `step`.""" while start < stop: yield start start += step def plot(option, filename, cache_times, worker_times): - "Plot concurrent workers and latency." + """Plot concurrent workers and latency.""" import matplotlib.pyplot as plt + fig, (workers, latency) = plt.subplots(2, sharex=True) fig.suptitle(option) diff --git a/tests/settings.py b/tests/settings.py index 1a2f569..04aee85 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -25,7 +25,7 @@ # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True -ALLOWED_HOSTS = [u'testserver'] +ALLOWED_HOSTS = ['testserver'] # Application definition diff --git a/tests/settings_benchmark.py b/tests/settings_benchmark.py index 5b614a5..c734e68 100644 --- a/tests/settings_benchmark.py +++ b/tests/settings_benchmark.py @@ -1,9 +1,9 @@ -from .settings import * +from .settings import * # noqa CACHES = { 'default': { 'BACKEND': 'diskcache.DjangoCache', - 'LOCATION': CACHE_DIR, + 'LOCATION': CACHE_DIR, # noqa }, 'memcached': { 'BACKEND': 'django.core.cache.backends.memcached.PyLibMCCache', @@ -14,7 +14,7 @@ 'LOCATION': 'redis://127.0.0.1:6379/1', 'OPTIONS': { 'CLIENT_CLASS': 'django_redis.client.DefaultClient', - } + }, }, 'filebased': { 'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', @@ -22,7 +22,7 @@ 'OPTIONS': { 'CULL_FREQUENCY': 10, 'MAX_ENTRIES': 1000, - } + }, }, 'locmem': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', @@ -30,7 +30,7 @@ 'OPTIONS': { 'CULL_FREQUENCY': 10, 'MAX_ENTRIES': 1000, - } + }, }, 'diskcache': { 'BACKEND': 'diskcache.DjangoCache', diff --git a/tests/stress_test_core.py b/tests/stress_test_core.py index bce0d16..2b2578b 100644 --- a/tests/stress_test_core.py +++ b/tests/stress_test_core.py @@ -1,28 +1,17 @@ -"Stress test diskcache.core.Cache." - -from __future__ import print_function +"""Stress test diskcache.core.Cache.""" import collections as co -from diskcache import Cache, UnknownFileWarning, EmptyDirWarning, Timeout import multiprocessing as mp import os +import pickle +import queue import random import shutil -import sys import threading import time import warnings -try: - import Queue -except ImportError: - import queue as Queue - -if sys.hexversion < 0x03000000: - range = xrange - import cPickle as pickle -else: - import pickle +from diskcache import Cache, EmptyDirWarning, Timeout, UnknownFileWarning from .utils import display @@ -44,13 +33,15 @@ def make_long(): def make_unicode(): word_size = random.randint(1, 26) - word = u''.join(random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size)) + word = ''.join(random.sample('abcdefghijklmnopqrstuvwxyz', word_size)) size = random.randint(1, int(200 / 13)) return word * size def make_bytes(): word_size = random.randint(1, 26) - word = u''.join(random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size)).encode('utf-8') + word = ''.join( + random.sample('abcdefghijklmnopqrstuvwxyz', word_size) + ).encode('utf-8') size = random.randint(1, int(200 / 13)) return word * size @@ -60,7 +51,14 @@ def make_float(): def make_object(): return (make_float(),) * random.randint(1, 20) - funcs = [make_int, make_long, make_unicode, make_bytes, make_float, make_object] + funcs = [ + make_int, + make_long, + make_unicode, + make_bytes, + make_float, + make_object, + ] while True: func = random.choice(funcs) @@ -77,14 +75,16 @@ def make_long(): def make_unicode(): word_size = random.randint(1, 26) - word = u''.join(random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size)) - size = random.randint(1, int(2 ** 16 / 13)) + word = ''.join(random.sample('abcdefghijklmnopqrstuvwxyz', word_size)) + size = random.randint(1, int(2**16 / 13)) return word * size def make_bytes(): word_size = random.randint(1, 26) - word = u''.join(random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size)).encode('utf-8') - size = random.randint(1, int(2 ** 16 / 13)) + word = ''.join( + random.sample('abcdefghijklmnopqrstuvwxyz', word_size) + ).encode('utf-8') + size = random.randint(1, int(2**16 / 13)) return word * size def make_float(): @@ -93,7 +93,14 @@ def make_float(): def make_object(): return [make_float()] * random.randint(1, int(2e3)) - funcs = [make_int, make_long, make_unicode, make_bytes, make_float, make_object] + funcs = [ + make_int, + make_long, + make_unicode, + make_bytes, + make_float, + make_object, + ] while True: func = random.choice(funcs) @@ -145,7 +152,12 @@ def worker(queue, eviction_policy, processes, threads): stop = time.time() - if action == 'get' and processes == 1 and threads == 1 and EXPIRE is None: + if ( + action == 'get' + and processes == 1 + and threads == 1 + and EXPIRE is None + ): assert result == value if index > WARMUP: @@ -163,11 +175,13 @@ def dispatch(num, eviction_policy, processes, threads): with open('input-%s.pkl' % num, 'rb') as reader: process_queue = pickle.load(reader) - thread_queues = [Queue.Queue() for _ in range(threads)] + thread_queues = [queue.Queue() for _ in range(threads)] subthreads = [ threading.Thread( - target=worker, args=(thread_queue, eviction_policy, processes, threads) - ) for thread_queue in thread_queues + target=worker, + args=(thread_queue, eviction_policy, processes, threads), + ) + for thread_queue in thread_queues ] for index, triplet in enumerate(process_queue): @@ -177,7 +191,7 @@ def dispatch(num, eviction_policy, processes, threads): for thread_queue in thread_queues: thread_queue.put(None) - start = time.time() + # start = time.time() for thread in subthreads: thread.start() @@ -185,7 +199,7 @@ def dispatch(num, eviction_policy, processes, threads): for thread in subthreads: thread.join() - stop = time.time() + # stop = time.time() timings = co.defaultdict(list) @@ -212,9 +226,13 @@ def percentile(sequence, percent): return values[pos] -def stress_test(create=True, delete=True, - eviction_policy=u'least-recently-stored', - processes=1, threads=1): +def stress_test( + create=True, + delete=True, + eviction_policy='least-recently-stored', + processes=1, + threads=1, +): shutil.rmtree('tmp', ignore_errors=True) if processes == 1: @@ -270,22 +288,22 @@ def stress_test(create=True, delete=True, def stress_test_lru(): - "Stress test least-recently-used eviction policy." - stress_test(eviction_policy=u'least-recently-used') + """Stress test least-recently-used eviction policy.""" + stress_test(eviction_policy='least-recently-used') def stress_test_lfu(): - "Stress test least-frequently-used eviction policy." - stress_test(eviction_policy=u'least-frequently-used') + """Stress test least-frequently-used eviction policy.""" + stress_test(eviction_policy='least-frequently-used') def stress_test_none(): - "Stress test 'none' eviction policy." - stress_test(eviction_policy=u'none') + """Stress test 'none' eviction policy.""" + stress_test(eviction_policy='none') def stress_test_mp(): - "Stress test multiple threads and processes." + """Stress test multiple threads and processes.""" stress_test(processes=4, threads=4) @@ -296,52 +314,85 @@ def stress_test_mp(): formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( - '-n', '--operations', type=float, default=OPERATIONS, + '-n', + '--operations', + type=float, + default=OPERATIONS, help='Number of operations to perform', ) parser.add_argument( - '-g', '--get-average', type=float, default=GET_AVERAGE, + '-g', + '--get-average', + type=float, + default=GET_AVERAGE, help='Expected value of exponential variate used for GET count', ) parser.add_argument( - '-k', '--key-count', type=float, default=KEY_COUNT, - help='Number of unique keys' + '-k', + '--key-count', + type=float, + default=KEY_COUNT, + help='Number of unique keys', ) parser.add_argument( - '-d', '--del-chance', type=float, default=DEL_CHANCE, + '-d', + '--del-chance', + type=float, + default=DEL_CHANCE, help='Likelihood of a key deletion', ) parser.add_argument( - '-w', '--warmup', type=float, default=WARMUP, + '-w', + '--warmup', + type=float, + default=WARMUP, help='Number of warmup operations before timings', ) parser.add_argument( - '-e', '--expire', type=float, default=EXPIRE, + '-e', + '--expire', + type=float, + default=EXPIRE, help='Number of seconds before key expires', ) parser.add_argument( - '-t', '--threads', type=int, default=1, + '-t', + '--threads', + type=int, + default=1, help='Number of threads to start in each process', ) parser.add_argument( - '-p', '--processes', type=int, default=1, + '-p', + '--processes', + type=int, + default=1, help='Number of processes to start', ) parser.add_argument( - '-s', '--seed', type=int, default=0, + '-s', + '--seed', + type=int, + default=0, help='Random seed', ) parser.add_argument( - '--no-create', action='store_false', dest='create', + '--no-create', + action='store_false', + dest='create', help='Do not create operations data', ) parser.add_argument( - '--no-delete', action='store_false', dest='delete', + '--no-delete', + action='store_false', + dest='delete', help='Do not delete operations data', ) parser.add_argument( - '-v', '--eviction-policy', type=unicode, - default=u'least-recently-stored', + '-v', + '--eviction-policy', + type=str, + default='least-recently-stored', ) args = parser.parse_args() diff --git a/tests/stress_test_deque.py b/tests/stress_test_deque.py index cf48812..845b2c2 100644 --- a/tests/stress_test_deque.py +++ b/tests/stress_test_deque.py @@ -1,10 +1,7 @@ """Stress test diskcache.persistent.Deque.""" -from __future__ import print_function - import collections as co import functools as ft -import itertools as it import random import diskcache as dc diff --git a/tests/stress_test_deque_mp.py b/tests/stress_test_deque_mp.py index 4624d71..f3b8a48 100644 --- a/tests/stress_test_deque_mp.py +++ b/tests/stress_test_deque_mp.py @@ -1,11 +1,7 @@ """Stress test diskcache.persistent.Deque.""" -from __future__ import print_function - -import functools as ft import itertools as it import multiprocessing as mp -import os import random import time @@ -110,12 +106,6 @@ def stress(seed, deque): def test(status=False): - if os.environ.get('TRAVIS') == 'true': - return - - if os.environ.get('APPVEYOR') == 'True': - return - random.seed(SEED) deque = dc.Deque(range(SIZE)) processes = [] diff --git a/tests/stress_test_fanout.py b/tests/stress_test_fanout.py index 080b8d8..e78dda5 100644 --- a/tests/stress_test_fanout.py +++ b/tests/stress_test_fanout.py @@ -1,28 +1,16 @@ -"Stress test diskcache.core.Cache." +"""Stress test diskcache.core.Cache.""" -from __future__ import print_function - -import collections as co -from diskcache import FanoutCache, UnknownFileWarning, EmptyDirWarning import multiprocessing as mp import os +import pickle +import queue import random import shutil -import sys import threading import time import warnings -try: - import Queue -except ImportError: - import queue as Queue - -if sys.hexversion < 0x03000000: - range = xrange - import cPickle as pickle -else: - import pickle +from diskcache import EmptyDirWarning, FanoutCache, UnknownFileWarning from .utils import display @@ -44,13 +32,15 @@ def make_long(): def make_unicode(): word_size = random.randint(1, 26) - word = u''.join(random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size)) + word = ''.join(random.sample('abcdefghijklmnopqrstuvwxyz', word_size)) size = random.randint(1, int(200 / 13)) return word * size def make_bytes(): word_size = random.randint(1, 26) - word = u''.join(random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size)).encode('utf-8') + word = ''.join( + random.sample('abcdefghijklmnopqrstuvwxyz', word_size) + ).encode('utf-8') size = random.randint(1, int(200 / 13)) return word * size @@ -60,7 +50,14 @@ def make_float(): def make_object(): return (make_float(),) * random.randint(1, 20) - funcs = [make_int, make_long, make_unicode, make_bytes, make_float, make_object] + funcs = [ + make_int, + make_long, + make_unicode, + make_bytes, + make_float, + make_object, + ] while True: func = random.choice(funcs) @@ -77,14 +74,16 @@ def make_long(): def make_unicode(): word_size = random.randint(1, 26) - word = u''.join(random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size)) - size = random.randint(1, int(2 ** 16 / 13)) + word = ''.join(random.sample('abcdefghijklmnopqrstuvwxyz', word_size)) + size = random.randint(1, int(2**16 / 13)) return word * size def make_bytes(): word_size = random.randint(1, 26) - word = u''.join(random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size)).encode('utf-8') - size = random.randint(1, int(2 ** 16 / 13)) + word = ''.join( + random.sample('abcdefghijklmnopqrstuvwxyz', word_size) + ).encode('utf-8') + size = random.randint(1, int(2**16 / 13)) return word * size def make_float(): @@ -93,7 +92,14 @@ def make_float(): def make_object(): return [make_float()] * random.randint(1, int(2e3)) - funcs = [make_int, make_long, make_unicode, make_bytes, make_float, make_object] + funcs = [ + make_int, + make_long, + make_unicode, + make_bytes, + make_float, + make_object, + ] while True: func = random.choice(funcs) @@ -140,7 +146,12 @@ def worker(queue, eviction_policy, processes, threads): stop = time.time() - if action == 'get' and processes == 1 and threads == 1 and EXPIRE is None: + if ( + action == 'get' + and processes == 1 + and threads == 1 + and EXPIRE is None + ): assert result == value if index > WARMUP: @@ -155,11 +166,13 @@ def dispatch(num, eviction_policy, processes, threads): with open('input-%s.pkl' % num, 'rb') as reader: process_queue = pickle.load(reader) - thread_queues = [Queue.Queue() for _ in range(threads)] + thread_queues = [queue.Queue() for _ in range(threads)] subthreads = [ threading.Thread( - target=worker, args=(thread_queue, eviction_policy, processes, threads) - ) for thread_queue in thread_queues + target=worker, + args=(thread_queue, eviction_policy, processes, threads), + ) + for thread_queue in thread_queues ] for index, triplet in enumerate(process_queue): @@ -204,9 +217,13 @@ def percentile(sequence, percent): return values[pos] -def stress_test(create=True, delete=True, - eviction_policy=u'least-recently-stored', - processes=1, threads=1): +def stress_test( + create=True, + delete=True, + eviction_policy='least-recently-stored', + processes=1, + threads=1, +): shutil.rmtree('tmp', ignore_errors=True) if processes == 1: @@ -262,22 +279,22 @@ def stress_test(create=True, delete=True, def stress_test_lru(): - "Stress test least-recently-used eviction policy." - stress_test(eviction_policy=u'least-recently-used') + """Stress test least-recently-used eviction policy.""" + stress_test(eviction_policy='least-recently-used') def stress_test_lfu(): - "Stress test least-frequently-used eviction policy." - stress_test(eviction_policy=u'least-frequently-used') + """Stress test least-frequently-used eviction policy.""" + stress_test(eviction_policy='least-frequently-used') def stress_test_none(): - "Stress test 'none' eviction policy." - stress_test(eviction_policy=u'none') + """Stress test 'none' eviction policy.""" + stress_test(eviction_policy='none') def stress_test_mp(): - "Stress test multiple threads and processes." + """Stress test multiple threads and processes.""" stress_test(processes=4, threads=4) @@ -288,52 +305,85 @@ def stress_test_mp(): formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( - '-n', '--operations', type=float, default=OPERATIONS, + '-n', + '--operations', + type=float, + default=OPERATIONS, help='Number of operations to perform', ) parser.add_argument( - '-g', '--get-average', type=float, default=GET_AVERAGE, + '-g', + '--get-average', + type=float, + default=GET_AVERAGE, help='Expected value of exponential variate used for GET count', ) parser.add_argument( - '-k', '--key-count', type=float, default=KEY_COUNT, - help='Number of unique keys' + '-k', + '--key-count', + type=float, + default=KEY_COUNT, + help='Number of unique keys', ) parser.add_argument( - '-d', '--del-chance', type=float, default=DEL_CHANCE, + '-d', + '--del-chance', + type=float, + default=DEL_CHANCE, help='Likelihood of a key deletion', ) parser.add_argument( - '-w', '--warmup', type=float, default=WARMUP, + '-w', + '--warmup', + type=float, + default=WARMUP, help='Number of warmup operations before timings', ) parser.add_argument( - '-e', '--expire', type=float, default=EXPIRE, + '-e', + '--expire', + type=float, + default=EXPIRE, help='Number of seconds before key expires', ) parser.add_argument( - '-t', '--threads', type=int, default=1, + '-t', + '--threads', + type=int, + default=1, help='Number of threads to start in each process', ) parser.add_argument( - '-p', '--processes', type=int, default=1, + '-p', + '--processes', + type=int, + default=1, help='Number of processes to start', ) parser.add_argument( - '-s', '--seed', type=int, default=0, + '-s', + '--seed', + type=int, + default=0, help='Random seed', ) parser.add_argument( - '--no-create', action='store_false', dest='create', + '--no-create', + action='store_false', + dest='create', help='Do not create operations data', ) parser.add_argument( - '--no-delete', action='store_false', dest='delete', + '--no-delete', + action='store_false', + dest='delete', help='Do not delete operations data', ) parser.add_argument( - '-v', '--eviction-policy', type=unicode, - default=u'least-recently-stored', + '-v', + '--eviction-policy', + type=str, + default='least-recently-stored', ) args = parser.parse_args() diff --git a/tests/stress_test_index.py b/tests/stress_test_index.py index 2846d9c..e7ba3f6 100644 --- a/tests/stress_test_index.py +++ b/tests/stress_test_index.py @@ -1,7 +1,5 @@ """Stress test diskcache.persistent.Index.""" -from __future__ import print_function - import collections as co import itertools as it import random diff --git a/tests/stress_test_index_mp.py b/tests/stress_test_index_mp.py index b3ed813..06ed102 100644 --- a/tests/stress_test_index_mp.py +++ b/tests/stress_test_index_mp.py @@ -1,10 +1,7 @@ """Stress test diskcache.persistent.Index.""" -from __future__ import print_function - import itertools as it import multiprocessing as mp -import os import random import time @@ -96,12 +93,6 @@ def stress(seed, index): def test(status=False): - if os.environ.get('TRAVIS') == 'true': - return - - if os.environ.get('APPVEYOR') == 'True': - return - random.seed(SEED) index = dc.Index(enumerate(range(KEYS))) processes = [] diff --git a/tests/test_core.py b/tests/test_core.py index 7c38874..788afef 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -1,34 +1,27 @@ -"Test diskcache.core.Cache." +"""Test diskcache.core.Cache.""" -import collections as co import errno -import functools as ft import hashlib import io import os import os.path as op +import pathlib import pickle -import pytest -import random import shutil import sqlite3 import subprocess as sp -import sys import tempfile import threading import time -import unittest import warnings - from unittest import mock -import diskcache +import pytest + import diskcache as dc pytestmark = pytest.mark.filterwarnings('ignore', category=dc.EmptyDirWarning) -if sys.hexversion < 0x03000000: - range = xrange @pytest.fixture def cache(): @@ -45,12 +38,19 @@ def test_init(cache): cache.close() +def test_init_path(cache): + path = pathlib.Path(cache.directory) + other = dc.Cache(path) + other.close() + assert cache.directory == other.directory + + def test_init_disk(): - with dc.Cache(disk_pickle_protocol=1, disk_min_file_size=2 ** 20) as cache: + with dc.Cache(disk_pickle_protocol=1, disk_min_file_size=2**20) as cache: key = (None, 0, 'abc') cache[key] = 0 cache.check() - assert cache.disk_min_file_size == 2 ** 20 + assert cache.disk_min_file_size == 2**20 assert cache.disk_pickle_protocol == 1 shutil.rmtree(cache.directory, ignore_errors=True) @@ -67,15 +67,15 @@ def test_disk_reset(): assert cache._disk.min_file_size == 0 assert cache._disk.pickle_protocol == 0 - cache.reset('disk_min_file_size', 2 ** 10) + cache.reset('disk_min_file_size', 2**10) cache.reset('disk_pickle_protocol', 2) cache[1] = value cache.check() - assert cache.disk_min_file_size == 2 ** 10 + assert cache.disk_min_file_size == 2**10 assert cache.disk_pickle_protocol == 2 - assert cache._disk.min_file_size == 2 ** 10 + assert cache._disk.min_file_size == 2**10 assert cache._disk.pickle_protocol == 2 shutil.rmtree(cache.directory, ignore_errors=True) @@ -97,10 +97,15 @@ def test_custom_disk(): for value in values: assert cache[value] == value + for key, value in zip(cache, values): + assert key == value + + test_memoize_iter(cache) + shutil.rmtree(cache.directory, ignore_errors=True) -class SHA256FilenameDisk(diskcache.Disk): +class SHA256FilenameDisk(dc.Disk): def filename(self, key=dc.UNKNOWN, value=dc.UNKNOWN): filename = hashlib.sha256(key).hexdigest()[:32] full_path = op.join(self._directory, filename) @@ -133,7 +138,7 @@ def test_init_makedirs(): with pytest.raises(EnvironmentError): try: with mock.patch('os.makedirs', makedirs): - cache = dc.Cache(cache_dir) + dc.Cache(cache_dir) except EnvironmentError: shutil.rmtree(cache_dir, ignore_errors=True) raise @@ -153,7 +158,7 @@ def test_pragma_error(cache): cursor.fetchall = fetchall fetchall.side_effect = [sqlite3.OperationalError] * 60000 - size = 2 ** 28 + size = 2**28 with mock.patch('time.sleep', lambda num: 0): with mock.patch.object(cache, '_local', local): @@ -165,6 +170,7 @@ def test_close_error(cache): class LocalTest(object): def __init__(self): self._calls = 0 + def __getattr__(self, name): if self._calls: raise AttributeError @@ -179,15 +185,15 @@ def __getattr__(self, name): def test_getsetdel(cache): values = [ (None, False), - ((None,) * 2 ** 20, False), + ((None,) * 2**20, False), (1234, False), - (2 ** 512, False), + (2**512, False), (56.78, False), - (u'hello', False), - (u'hello' * 2 ** 20, False), + ('hello', False), + ('hello' * 2**20, False), (b'world', False), - (b'world' * 2 ** 20, False), - (io.BytesIO(b'world' * 2 ** 20), True), + (b'world' * 2**20, False), + (io.BytesIO(b'world' * 2**20), True), ] for key, (value, file_like) in enumerate(values): @@ -231,7 +237,7 @@ def test_get_keyerror4(cache): func = mock.Mock(side_effect=IOError(errno.ENOENT, '')) cache.reset('statistics', True) - cache[0] = b'abcd' * 2 ** 20 + cache[0] = b'abcd' * 2**20 with mock.patch('diskcache.core.open', func): with pytest.raises((IOError, KeyError, OSError)): @@ -239,19 +245,19 @@ def test_get_keyerror4(cache): def test_read(cache): - cache.set(0, b'abcd' * 2 ** 20) + cache.set(0, b'abcd' * 2**20) with cache.read(0) as reader: assert reader is not None def test_read_keyerror(cache): with pytest.raises(KeyError): - with cache.read(0) as reader: + with cache.read(0): pass def test_set_twice(cache): - large_value = b'abcd' * 2 ** 20 + large_value = b'abcd' * 2**20 cache[0] = 0 cache[0] = 1 @@ -285,7 +291,7 @@ def test_set_timeout(cache): with pytest.raises(dc.Timeout): try: with mock.patch.object(cache, '_local', local): - cache.set('a', 'b' * 2 ** 20) + cache.set('a', 'b' * 2**20) finally: cache.check() @@ -301,11 +307,12 @@ def test_get(cache): assert cache.get(2, {}) == {} assert cache.get(0, expire_time=True, tag=True) == (None, None, None) - assert cache.set(0, 0, expire=None, tag=u'number') + assert cache.set(0, 0, expire=None, tag='number') assert cache.get(0, expire_time=True) == (0, None) - assert cache.get(0, tag=True) == (0, u'number') - assert cache.get(0, expire_time=True, tag=True) == (0, None, u'number') + assert cache.get(0, tag=True) == (0, 'number') + assert cache.get(0, expire_time=True, tag=True) == (0, None, 'number') + def test_get_expired_fast_path(cache): assert cache.set(0, 0, expire=0.001) @@ -339,26 +346,6 @@ def test_get_expired_slow_path(cache): assert cache.get(0) is None -def test_get_ioerror_slow_path(cache): - cache.reset('eviction_policy', 'least-recently-used') - cache.set(0, 0) - - disk = mock.Mock() - put = mock.Mock() - fetch = mock.Mock() - - disk.put = put - put.side_effect = [(0, True)] - disk.fetch = fetch - io_error = IOError() - io_error.errno = errno.EACCES - fetch.side_effect = io_error - - with mock.patch.object(cache, '_disk', disk): - with pytest.raises(IOError): - cache.get(0) - - def test_pop(cache): assert cache.incr('alpha') == 1 assert cache.pop('alpha') == 1 @@ -380,8 +367,8 @@ def test_pop(cache): assert cache.set('delta', 210) assert cache.pop('delta', expire_time=True) == (210, None) - assert cache.set('epsilon', '0' * 2 ** 20) - assert cache.pop('epsilon') == '0' * 2 ** 20 + assert cache.set('epsilon', '0' * 2**20) + assert cache.pop('epsilon') == '0' * 2**20 def test_pop_ioerror(cache): @@ -402,25 +389,6 @@ def test_pop_ioerror(cache): assert cache.pop(0) is None -def test_pop_ioerror_eacces(cache): - assert cache.set(0, 0) - - disk = mock.Mock() - put = mock.Mock() - fetch = mock.Mock() - - disk.put = put - put.side_effect = [(0, True)] - disk.fetch = fetch - io_error = IOError() - io_error.errno = errno.EACCES - fetch.side_effect = io_error - - with mock.patch.object(cache, '_disk', disk): - with pytest.raises(IOError): - cache.pop(0) - - def test_delete(cache): cache[0] = 0 assert cache.delete(0) @@ -466,11 +434,11 @@ def test_stats(cache): def test_path(cache): - cache[0] = u'abc' - large_value = b'abc' * 2 ** 20 + cache[0] = 'abc' + large_value = b'abc' * 2**20 cache[1] = large_value - assert cache.get(0, read=True) == u'abc' + assert cache.get(0, read=True) == 'abc' with cache.get(1, read=True) as reader: assert reader.name is not None @@ -505,7 +473,7 @@ def test_expire_rows(cache): def test_least_recently_stored(cache): - cache.reset('eviction_policy', u'least-recently-stored') + cache.reset('eviction_policy', 'least-recently-stored') cache.reset('size_limit', int(10.1e6)) cache.reset('cull_limit', 2) @@ -540,7 +508,7 @@ def test_least_recently_stored(cache): def test_least_recently_used(cache): - cache.reset('eviction_policy', u'least-recently-used') + cache.reset('eviction_policy', 'least-recently-used') cache.reset('size_limit', int(10.1e6)) cache.reset('cull_limit', 5) @@ -570,7 +538,7 @@ def test_least_recently_used(cache): def test_least_frequently_used(cache): - cache.reset('eviction_policy', u'least-frequently-used') + cache.reset('eviction_policy', 'least-frequently-used') cache.reset('size_limit', int(10.1e6)) cache.reset('cull_limit', 5) @@ -597,32 +565,9 @@ def test_least_frequently_used(cache): assert len(cache.check()) == 0 -def test_filename_error(cache): - func = mock.Mock(side_effect=OSError(errno.EACCES)) - - with mock.patch('os.makedirs', func): - with pytest.raises(OSError): - cache._disk.filename() - - -def test_remove_error(cache): - func = mock.Mock(side_effect=OSError(errno.EACCES)) - - try: - with mock.patch('os.remove', func): - cache._disk.remove('ab/cd/efg.val') - except OSError: - pass - else: - if os.name == 'nt': - pass # File delete errors ignored on Windows. - else: - raise Exception('test_remove_error failed') - - def test_check(cache): - blob = b'a' * 2 ** 20 - keys = (0, 1, 1234, 56.78, u'hello', b'world', None) + blob = b'a' * 2**20 + keys = (0, 1, 1234, 56.78, 'hello', b'world', None) for key in keys: cache[key] = blob @@ -646,7 +591,7 @@ def test_check(cache): cache.check() cache.check(fix=True) - assert len(cache.check()) == 0 # Should display no warnings. + assert len(cache.check()) == 0 # Should display no warnings. def test_integrity_check(cache): @@ -657,7 +602,7 @@ def test_integrity_check(cache): with io.open(op.join(cache.directory, 'cache.db'), 'r+b') as writer: writer.seek(52) - writer.write(b'\x00\x01') # Should be 0, change it. + writer.write(b'\x00\x01') # Should be 0, change it. cache = dc.Cache(cache.directory) @@ -725,12 +670,12 @@ def test_clear_timeout(cache): def test_tag(cache): - assert cache.set(0, None, tag=u'zero') + assert cache.set(0, None, tag='zero') assert cache.set(1, None, tag=1234) assert cache.set(2, None, tag=5.67) assert cache.set(3, None, tag=b'three') - assert cache.get(0, tag=True) == (None, u'zero') + assert cache.get(0, tag=True) == (None, 'zero') assert cache.get(1, tag=True) == (None, 1234) assert cache.get(2, tag=True) == (None, 5.67) assert cache.get(3, tag=True) == (None, b'three') @@ -738,11 +683,11 @@ def test_tag(cache): def test_with(cache): with dc.Cache(cache.directory) as tmp: - tmp[u'a'] = 0 - tmp[u'b'] = 1 + tmp['a'] = 0 + tmp['b'] = 1 - assert cache[u'a'] == 0 - assert cache[u'b'] == 1 + assert cache['a'] == 0 + assert cache['b'] == 1 def test_contains(cache): @@ -771,7 +716,7 @@ def test_add(cache): def test_add_large_value(cache): - value = b'abcd' * 2 ** 20 + value = b'abcd' * 2**20 assert cache.add(b'test-key', value) assert cache.get(b'test-key') == value assert not cache.add(b'test-key', value * 2) @@ -982,7 +927,7 @@ def test_push_peek_expire(cache): def test_push_pull_large_value(cache): - value = b'test' * (2 ** 20) + value = b'test' * (2**20) cache.push(value) assert cache.pull() == (500000000000000, value) assert len(cache) == 0 @@ -990,7 +935,7 @@ def test_push_pull_large_value(cache): def test_push_peek_large_value(cache): - value = b'test' * (2 ** 20) + value = b'test' * (2**20) cache.push(value) assert cache.peek() == (500000000000000, value) assert len(cache) == 1 @@ -1034,44 +979,6 @@ def test_peek_ioerror(cache): assert value == 0 -def test_pull_ioerror_eacces(cache): - assert cache.push(0) == 500000000000000 - - disk = mock.Mock() - put = mock.Mock() - fetch = mock.Mock() - - disk.put = put - put.side_effect = [(0, True)] - disk.fetch = fetch - io_error = IOError() - io_error.errno = errno.EACCES - fetch.side_effect = io_error - - with mock.patch.object(cache, '_disk', disk): - with pytest.raises(IOError): - cache.pull() - - -def test_peek_ioerror_eacces(cache): - assert cache.push(0) == 500000000000000 - - disk = mock.Mock() - put = mock.Mock() - fetch = mock.Mock() - - disk.put = put - put.side_effect = [(0, True)] - disk.fetch = fetch - io_error = IOError() - io_error.errno = errno.EACCES - fetch.side_effect = io_error - - with mock.patch.object(cache, '_disk', disk): - with pytest.raises(IOError): - cache.peek() - - def test_peekitem_extras(cache): with pytest.raises(KeyError): cache.peekitem() @@ -1123,27 +1030,6 @@ def test_peekitem_ioerror(cache): assert value == 2 -def test_peekitem_ioerror_eacces(cache): - assert cache.set('a', 0) - assert cache.set('b', 1) - assert cache.set('c', 2) - - disk = mock.Mock() - put = mock.Mock() - fetch = mock.Mock() - - disk.put = put - put.side_effect = [(0, True)] - disk.fetch = fetch - io_error = IOError() - io_error.errno = errno.EACCES - fetch.side_effect = io_error - - with mock.patch.object(cache, '_disk', disk): - with pytest.raises(IOError): - cache.peekitem() - - def test_iterkeys(cache): assert list(cache.iterkeys()) == [] @@ -1266,8 +1152,8 @@ def test_cull_timeout(cache): def test_key_roundtrip(cache): - key_part_0 = u"part0" - key_part_1 = u"part1" + key_part_0 = 'part0' + key_part_1 = 'part1' to_test = [ (key_part_0, key_part_1), [key_part_0, key_part_1], @@ -1285,6 +1171,7 @@ def test_key_roundtrip(cache): def test_constant(): import diskcache.core + assert repr(diskcache.core.ENOVAL) == 'ENOVAL' @@ -1466,6 +1353,55 @@ def fibrec(num): assert misses2 == misses1 -if __name__ == '__main__': - import nose - nose.runmodule() +def test_memoize_kwargs(cache): + @cache.memoize(typed=True) + def foo(*args, **kwargs): + return args, kwargs + + assert foo(1, 2, 3, a=4, b=5) == ((1, 2, 3), {'a': 4, 'b': 5}) + + +def test_cleanup_dirs(cache): + value = b'\0' * 2**20 + start_count = len(os.listdir(cache.directory)) + for i in range(10): + cache[i] = value + set_count = len(os.listdir(cache.directory)) + assert set_count > start_count + for i in range(10): + del cache[i] + del_count = len(os.listdir(cache.directory)) + assert start_count == del_count + + +def test_disk_write_os_error(cache): + func = mock.Mock(side_effect=[OSError] * 10) + with mock.patch('diskcache.core.open', func): + with pytest.raises(OSError): + cache[0] = '\0' * 2**20 + + +def test_memoize_ignore(cache): + @cache.memoize(ignore={1, 'arg1'}) + def test(*args, **kwargs): + return args, kwargs + + cache.stats(enable=True) + assert test('a', 'b', 'c', arg0='d', arg1='e', arg2='f') + assert test('a', 'w', 'c', arg0='d', arg1='x', arg2='f') + assert test('a', 'y', 'c', arg0='d', arg1='z', arg2='f') + assert cache.stats() == (2, 1) + + +def test_memoize_iter(cache): + @cache.memoize() + def test(*args, **kwargs): + return sum(args) + sum(kwargs.values()) + + cache.clear() + assert test(1, 2, 3) + assert test(a=1, b=2, c=3) + assert test(-1, 0, 1, a=1, b=2, c=3) + assert len(cache) == 3 + for key in cache: + assert cache[key] == 6 diff --git a/tests/test_deque.py b/tests/test_deque.py index ddf2338..f997a86 100644 --- a/tests/test_deque.py +++ b/tests/test_deque.py @@ -1,12 +1,12 @@ -"Test diskcache.persistent.Deque." +"""Test diskcache.persistent.Deque.""" -import functools as ft import pickle -import pytest import shutil - +import tempfile from unittest import mock +import pytest + import diskcache as dc from diskcache.core import ENOVAL @@ -26,7 +26,7 @@ def deque(): def test_init(): - directory = '/tmp/diskcache/deque' + directory = tempfile.mkdtemp() sequence = list('abcde') deque = dc.Deque(sequence, None) @@ -77,6 +77,20 @@ def test_getsetdel(deque): assert len(deque) == 0 +def test_append(deque): + deque.maxlen = 3 + for item in 'abcde': + deque.append(item) + assert deque == 'cde' + + +def test_appendleft(deque): + deque.maxlen = 3 + for item in 'abcde': + deque.appendleft(item) + assert deque == 'edc' + + def test_index_positive(deque): cache = mock.MagicMock() cache.__len__.return_value = 3 @@ -131,9 +145,12 @@ def test_state(deque): sequence = list('abcde') deque.extend(sequence) assert deque == sequence + deque.maxlen = 3 + assert list(deque) == sequence[-3:] state = pickle.dumps(deque) values = pickle.loads(state) - assert values == sequence + assert values == sequence[-3:] + assert values.maxlen == 3 def test_compare(deque): @@ -156,11 +173,19 @@ def test_indexerror(deque): def test_repr(): - directory = '/tmp/diskcache/deque' + directory = tempfile.mkdtemp() deque = dc.Deque(directory=directory) assert repr(deque) == 'Deque(directory=%r)' % directory +def test_copy(deque): + sequence = list('abcde') + deque.extend(sequence) + temp = deque.copy() + assert deque == sequence + assert temp == sequence + + def test_count(deque): deque += 'abbcccddddeeeee' @@ -279,5 +304,11 @@ def test_rotate_indexerror_negative(deque): deque.rotate(-1) -def test_repr(deque): - assert repr(deque).startswith('Deque(') +def test_peek(deque): + value = b'x' * 100_000 + deque.append(value) + assert len(deque) == 1 + assert deque.peek() == value + assert len(deque) == 1 + assert deque.peek() == value + assert len(deque) == 1 diff --git a/tests/test_djangocache.py b/tests/test_djangocache.py index 84d2f95..734ba1b 100644 --- a/tests/test_djangocache.py +++ b/tests/test_djangocache.py @@ -1,50 +1,24 @@ # Most of this file was copied from: -# https://raw.githubusercontent.com/django/django/stable/2.2.x/tests/cache/tests.py +# https://raw.githubusercontent.com/django/django/stable/3.2.x/tests/cache/tests.py # Unit tests for cache framework # Uses whatever cache backend is set in the test settings file. -import copy -import io import os import pickle -import re import shutil import tempfile -import threading import time -import unittest -import warnings - from unittest import mock from django.conf import settings -from django.core import management, signals -from django.core.cache import ( - DEFAULT_CACHE_ALIAS, CacheKeyWarning, InvalidCacheKey, cache, caches, -) -from django.core.cache.utils import make_template_fragment_key -from django.db import close_old_connections, connection, connections -from django.http import ( - HttpRequest, HttpResponse, HttpResponseNotModified, StreamingHttpResponse, -) +from django.core.cache import CacheKeyWarning, cache, caches +from django.http import HttpResponse from django.middleware.cache import ( - CacheMiddleware, FetchFromCacheMiddleware, UpdateCacheMiddleware, -) -from django.middleware.csrf import CsrfViewMiddleware -from django.template import engines -from django.template.context_processors import csrf -from django.template.response import TemplateResponse -from django.test import ( - RequestFactory, SimpleTestCase, TestCase, TransactionTestCase, - override_settings, + FetchFromCacheMiddleware, + UpdateCacheMiddleware, ) +from django.test import RequestFactory, TestCase, override_settings from django.test.signals import setting_changed -from django.utils import timezone, translation -from django.utils.cache import ( - get_cache_key, learn_cache_key, patch_cache_control, patch_vary_headers, -) -from django.utils.encoding import force_text -from django.views.decorators.cache import cache_page ################################################################################ # Setup Django for models import. @@ -53,6 +27,7 @@ os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tests.settings') import django + django.setup() from .models import Poll, expensive_calculation @@ -73,6 +48,10 @@ def __getstate__(self): raise pickle.PickleError() +def empty_response(request): + return HttpResponse() + + KEY_ERRORS_WITH_MEMCACHED_MSG = ( 'Cache key contains characters that will cause errors if used with ' 'memcached: %r' @@ -81,11 +60,11 @@ def __getstate__(self): class UnpicklableType(object): # Unpicklable using the default pickling protocol on Python 2. - __slots__ = 'a', + __slots__ = ('a',) def custom_key_func(key, key_prefix, version): - "A customized cache key function" + """A customized cache key function""" return 'CUSTOM-' + '-'.join([key_prefix, str(version), key]) @@ -110,7 +89,9 @@ def caches_setting_for_tests(base=None, exclude=None, **params): # params -> _caches_setting_base -> base base = base or {} exclude = exclude or set() - setting = {k: base.copy() for k in _caches_setting_base if k not in exclude} + setting = { + k: base.copy() for k in _caches_setting_base if k not in exclude + } for key, cache_params in setting.items(): cache_params.update(_caches_setting_base[key]) cache_params.update(params) @@ -121,27 +102,39 @@ class BaseCacheTests: # A common set of tests to apply to all cache backends factory = RequestFactory() + # RemovedInDjango41Warning: python-memcached doesn't support .get() with + # default. + supports_get_with_default = True + + # Some clients raise custom exceptions when .incr() or .decr() are called + # with a non-integer value. + incr_decr_type_error = TypeError + def tearDown(self): cache.clear() def test_simple(self): # Simple cache set/get works - cache.set("key", "value") - self.assertEqual(cache.get("key"), "value") + cache.set('key', 'value') + self.assertEqual(cache.get('key'), 'value') + + def test_default_used_when_none_is_set(self): + """If None is cached, get() returns it instead of the default.""" + cache.set('key_default_none', None) + self.assertIsNone(cache.get('key_default_none', default='default')) def test_add(self): # A key can be added to a cache - cache.add("addkey1", "value") - result = cache.add("addkey1", "newvalue") - self.assertFalse(result) - self.assertEqual(cache.get("addkey1"), "value") + self.assertIs(cache.add('addkey1', 'value'), True) + self.assertIs(cache.add('addkey1', 'newvalue'), False) + self.assertEqual(cache.get('addkey1'), 'value') def test_prefix(self): # Test for same cache key conflicts between shared backend cache.set('somekey', 'value') # should not be set in the prefixed cache - self.assertFalse(caches['prefix'].has_key('somekey')) + self.assertIs(caches['prefix'].has_key('somekey'), False) caches['prefix'].set('somekey', 'value2') @@ -150,37 +143,56 @@ def test_prefix(self): def test_non_existent(self): """Nonexistent cache keys return as None/default.""" - self.assertIsNone(cache.get("does_not_exist")) - self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!") + self.assertIsNone(cache.get('does_not_exist')) + self.assertEqual(cache.get('does_not_exist', 'bang!'), 'bang!') def test_get_many(self): # Multiple cache keys can be returned using get_many cache.set_many({'a': 'a', 'b': 'b', 'c': 'c', 'd': 'd'}) - self.assertEqual(cache.get_many(['a', 'c', 'd']), {'a': 'a', 'c': 'c', 'd': 'd'}) + self.assertEqual( + cache.get_many(['a', 'c', 'd']), {'a': 'a', 'c': 'c', 'd': 'd'} + ) self.assertEqual(cache.get_many(['a', 'b', 'e']), {'a': 'a', 'b': 'b'}) - self.assertEqual(cache.get_many(iter(['a', 'b', 'e'])), {'a': 'a', 'b': 'b'}) + self.assertEqual( + cache.get_many(iter(['a', 'b', 'e'])), {'a': 'a', 'b': 'b'} + ) + cache.set_many({'x': None, 'y': 1}) + self.assertEqual(cache.get_many(['x', 'y']), {'x': None, 'y': 1}) def test_delete(self): # Cache keys can be deleted cache.set_many({'key1': 'spam', 'key2': 'eggs'}) - self.assertEqual(cache.get("key1"), "spam") - cache.delete("key1") - self.assertIsNone(cache.get("key1")) - self.assertEqual(cache.get("key2"), "eggs") + self.assertEqual(cache.get('key1'), 'spam') + self.assertIs(cache.delete('key1'), True) + self.assertIsNone(cache.get('key1')) + self.assertEqual(cache.get('key2'), 'eggs') + + def test_delete_nonexistent(self): + self.assertIs(cache.delete('nonexistent_key'), False) def test_has_key(self): # The cache can be inspected for cache keys - cache.set("hello1", "goodbye1") - self.assertTrue(cache.has_key("hello1")) - self.assertFalse(cache.has_key("goodbye1")) - cache.set("no_expiry", "here", None) - self.assertTrue(cache.has_key("no_expiry")) + cache.set('hello1', 'goodbye1') + self.assertIs(cache.has_key('hello1'), True) + self.assertIs(cache.has_key('goodbye1'), False) + cache.set('no_expiry', 'here', None) + self.assertIs(cache.has_key('no_expiry'), True) + cache.set('null', None) + self.assertIs( + cache.has_key('null'), + True if self.supports_get_with_default else False, + ) def test_in(self): # The in operator can be used to inspect cache contents - cache.set("hello2", "goodbye2") - self.assertIn("hello2", cache) - self.assertNotIn("goodbye2", cache) + cache.set('hello2', 'goodbye2') + self.assertIn('hello2', cache) + self.assertNotIn('goodbye2', cache) + cache.set('null', None) + if self.supports_get_with_default: + self.assertIn('null', cache) + else: + self.assertNotIn('null', cache) def test_incr(self): # Cache values can be incremented @@ -192,6 +204,9 @@ def test_incr(self): self.assertEqual(cache.incr('answer', -10), 42) with self.assertRaises(ValueError): cache.incr('does_not_exist') + cache.set('null', None) + with self.assertRaises(self.incr_decr_type_error): + cache.incr('null') def test_decr(self): # Cache values can be decremented @@ -203,6 +218,9 @@ def test_decr(self): self.assertEqual(cache.decr('answer', -10), 42) with self.assertRaises(ValueError): cache.decr('does_not_exist') + cache.set('null', None) + with self.assertRaises(self.incr_decr_type_error): + cache.decr('null') def test_close(self): self.assertTrue(hasattr(cache, 'close')) @@ -219,14 +237,14 @@ def test_data_types(self): 'function': f, 'class': C, } - cache.set("stuff", stuff) - self.assertEqual(cache.get("stuff"), stuff) + cache.set('stuff', stuff) + self.assertEqual(cache.get('stuff'), stuff) def test_cache_read_for_model_instance(self): # Don't want fields with callable as default to be called on cache read expensive_calculation.num_runs = 0 Poll.objects.all().delete() - my_poll = Poll.objects.create(question="Well?") + my_poll = Poll.objects.create(question='Well?') self.assertEqual(Poll.objects.count(), 1) pub_date = my_poll.pub_date cache.set('question', my_poll) @@ -239,7 +257,7 @@ def test_cache_write_for_model_instance_with_deferred(self): # Don't want fields with callable as default to be called on cache write expensive_calculation.num_runs = 0 Poll.objects.all().delete() - Poll.objects.create(question="What?") + Poll.objects.create(question='What?') self.assertEqual(expensive_calculation.num_runs, 1) defer_qs = Poll.objects.all().defer('question') self.assertEqual(defer_qs.count(), 1) @@ -252,7 +270,7 @@ def test_cache_read_for_model_instance_with_deferred(self): # Don't want fields with callable as default to be called on cache read expensive_calculation.num_runs = 0 Poll.objects.all().delete() - Poll.objects.create(question="What?") + Poll.objects.create(question='What?') self.assertEqual(expensive_calculation.num_runs, 1) defer_qs = Poll.objects.all().defer('question') self.assertEqual(defer_qs.count(), 1) @@ -261,7 +279,9 @@ def test_cache_read_for_model_instance_with_deferred(self): runs_before_cache_read = expensive_calculation.num_runs cache.get('deferred_queryset') # We only want the default expensive calculation run on creation and set - self.assertEqual(expensive_calculation.num_runs, runs_before_cache_read) + self.assertEqual( + expensive_calculation.num_runs, runs_before_cache_read + ) def test_expiration(self): # Cache values can be set to expire @@ -270,26 +290,25 @@ def test_expiration(self): cache.set('expire3', 'very quickly', 1) time.sleep(2) - self.assertIsNone(cache.get("expire1")) + self.assertIsNone(cache.get('expire1')) - cache.add("expire2", "newvalue") - self.assertEqual(cache.get("expire2"), "newvalue") - self.assertFalse(cache.has_key("expire3")) + self.assertIs(cache.add('expire2', 'newvalue'), True) + self.assertEqual(cache.get('expire2'), 'newvalue') + self.assertIs(cache.has_key('expire3'), False) def test_touch(self): # cache.touch() updates the timeout. cache.set('expire1', 'very quickly', timeout=1) self.assertIs(cache.touch('expire1', timeout=4), True) time.sleep(2) - self.assertTrue(cache.has_key('expire1')) + self.assertIs(cache.has_key('expire1'), True) time.sleep(3) - self.assertFalse(cache.has_key('expire1')) - + self.assertIs(cache.has_key('expire1'), False) # cache.touch() works without the timeout argument. cache.set('expire1', 'very quickly', timeout=1) self.assertIs(cache.touch('expire1'), True) time.sleep(2) - self.assertTrue(cache.has_key('expire1')) + self.assertIs(cache.has_key('expire1'), True) self.assertIs(cache.touch('nonexistent'), False) @@ -299,7 +318,7 @@ def test_unicode(self): 'ascii': 'ascii_value', 'unicode_ascii': 'Iñtërnâtiônàlizætiøn1', 'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2', - 'ascii2': {'x': 1} + 'ascii2': {'x': 1}, } # Test `set` for (key, value) in stuff.items(): @@ -310,13 +329,13 @@ def test_unicode(self): # Test `add` for (key, value) in stuff.items(): with self.subTest(key=key): - cache.delete(key) - cache.add(key, value) + self.assertIs(cache.delete(key), True) + self.assertIs(cache.add(key, value), True) self.assertEqual(cache.get(key), value) # Test `set_many` for (key, value) in stuff.items(): - cache.delete(key) + self.assertIs(cache.delete(key), True) cache.set_many(stuff) for (key, value) in stuff.items(): with self.subTest(key=key): @@ -325,6 +344,7 @@ def test_unicode(self): def test_binary_string(self): # Binary strings should be cacheable from zlib import compress, decompress + value = 'value_to_be_compressed' compressed_value = compress(value.encode()) @@ -335,7 +355,7 @@ def test_binary_string(self): self.assertEqual(value, decompress(compressed_result).decode()) # Test add - cache.add('binary1-add', compressed_value) + self.assertIs(cache.add('binary1-add', compressed_value), True) compressed_result = cache.get('binary1-add') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) @@ -348,9 +368,9 @@ def test_binary_string(self): def test_set_many(self): # Multiple keys can be set using set_many - cache.set_many({"key1": "spam", "key2": "eggs"}) - self.assertEqual(cache.get("key1"), "spam") - self.assertEqual(cache.get("key2"), "eggs") + cache.set_many({'key1': 'spam', 'key2': 'eggs'}) + self.assertEqual(cache.get('key1'), 'spam') + self.assertEqual(cache.get('key2'), 'eggs') def test_set_many_returns_empty_list_on_success(self): """set_many() returns an empty list when all keys are inserted.""" @@ -359,39 +379,42 @@ def test_set_many_returns_empty_list_on_success(self): def test_set_many_expiration(self): # set_many takes a second ``timeout`` parameter - cache.set_many({"key1": "spam", "key2": "eggs"}, 1) + cache.set_many({'key1': 'spam', 'key2': 'eggs'}, 1) time.sleep(2) - self.assertIsNone(cache.get("key1")) - self.assertIsNone(cache.get("key2")) + self.assertIsNone(cache.get('key1')) + self.assertIsNone(cache.get('key2')) def test_delete_many(self): # Multiple keys can be deleted using delete_many cache.set_many({'key1': 'spam', 'key2': 'eggs', 'key3': 'ham'}) - cache.delete_many(["key1", "key2"]) - self.assertIsNone(cache.get("key1")) - self.assertIsNone(cache.get("key2")) - self.assertEqual(cache.get("key3"), "ham") + cache.delete_many(['key1', 'key2']) + self.assertIsNone(cache.get('key1')) + self.assertIsNone(cache.get('key2')) + self.assertEqual(cache.get('key3'), 'ham') def test_clear(self): # The cache can be emptied using clear cache.set_many({'key1': 'spam', 'key2': 'eggs'}) cache.clear() - self.assertIsNone(cache.get("key1")) - self.assertIsNone(cache.get("key2")) + self.assertIsNone(cache.get('key1')) + self.assertIsNone(cache.get('key2')) def test_long_timeout(self): """ - Followe memcached's convention where a timeout greater than 30 days is + Follow memcached's convention where a timeout greater than 30 days is treated as an absolute expiration timestamp instead of a relative offset (#12399). """ cache.set('key1', 'eggs', 60 * 60 * 24 * 30 + 1) # 30 days + 1 second self.assertEqual(cache.get('key1'), 'eggs') - cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1) + self.assertIs(cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1), True) self.assertEqual(cache.get('key2'), 'ham') - cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60 * 60 * 24 * 30 + 1) + cache.set_many( + {'key3': 'sausage', 'key4': 'lobster bisque'}, + 60 * 60 * 24 * 30 + 1, + ) self.assertEqual(cache.get('key3'), 'sausage') self.assertEqual(cache.get('key4'), 'lobster bisque') @@ -402,10 +425,9 @@ def test_forever_timeout(self): cache.set('key1', 'eggs', None) self.assertEqual(cache.get('key1'), 'eggs') - cache.add('key2', 'ham', None) + self.assertIs(cache.add('key2', 'ham', None), True) self.assertEqual(cache.get('key2'), 'ham') - added = cache.add('key1', 'new eggs', None) - self.assertIs(added, False) + self.assertIs(cache.add('key1', 'new eggs', None), False) self.assertEqual(cache.get('key1'), 'eggs') cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, None) @@ -413,7 +435,7 @@ def test_forever_timeout(self): self.assertEqual(cache.get('key4'), 'lobster bisque') cache.set('key5', 'belgian fries', timeout=1) - cache.touch('key5', timeout=None) + self.assertIs(cache.touch('key5', timeout=None), True) time.sleep(2) self.assertEqual(cache.get('key5'), 'belgian fries') @@ -424,7 +446,7 @@ def test_zero_timeout(self): cache.set('key1', 'eggs', 0) self.assertIsNone(cache.get('key1')) - cache.add('key2', 'ham', 0) + self.assertIs(cache.add('key2', 'ham', 0), True) self.assertIsNone(cache.get('key2')) cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 0) @@ -432,15 +454,20 @@ def test_zero_timeout(self): self.assertIsNone(cache.get('key4')) cache.set('key5', 'belgian fries', timeout=5) - cache.touch('key5', timeout=0) + self.assertIs(cache.touch('key5', timeout=0), True) self.assertIsNone(cache.get('key5')) def test_float_timeout(self): # Make sure a timeout given as a float doesn't crash anything. - cache.set("key1", "spam", 100.2) - self.assertEqual(cache.get("key1"), "spam") + cache.set('key1', 'spam', 100.2) + self.assertEqual(cache.get('key1'), 'spam') + + def _perform_cull_test(self, cull_cache_name, initial_count, final_count): + try: + cull_cache = caches[cull_cache_name] + except InvalidCacheBackendError: + self.skipTest("Culling isn't implemented.") - def _perform_cull_test(self, cull_cache, initial_count, final_count): # Create initial cache key entries. This will overflow the cache, # causing a cull. for i in range(1, initial_count): @@ -453,10 +480,24 @@ def _perform_cull_test(self, cull_cache, initial_count, final_count): self.assertEqual(count, final_count) def test_cull(self): - self._perform_cull_test(caches['cull'], 50, 29) + self._perform_cull_test('cull', 50, 29) def test_zero_cull(self): - self._perform_cull_test(caches['zero_cull'], 50, 19) + self._perform_cull_test('zero_cull', 50, 19) + + def test_cull_delete_when_store_empty(self): + try: + cull_cache = caches['cull'] + except InvalidCacheBackendError: + self.skipTest("Culling isn't implemented.") + old_max_entries = cull_cache._max_entries + # Force _cull to delete on first cached record. + cull_cache._max_entries = -1 + try: + cull_cache.set('force_cull_delete', 'value', 1000) + self.assertIs(cull_cache.has_key('force_cull_delete'), True) + finally: + cull_cache._max_entries = old_max_entries def _perform_invalid_key_test(self, key, expected_warning): """ @@ -473,17 +514,33 @@ def func(key, *args): old_func = cache.key_func cache.key_func = func + tests = [ + ('add', [key, 1]), + ('get', [key]), + ('set', [key, 1]), + ('incr', [key]), + ('decr', [key]), + ('touch', [key]), + ('delete', [key]), + ('get_many', [[key, 'b']]), + ('set_many', [{key: 1, 'b': 2}]), + ('delete_many', [{key: 1, 'b': 2}]), + ] try: - with self.assertWarns(CacheKeyWarning) as cm: - cache.set(key, 'value') - self.assertEqual(str(cm.warning), expected_warning) + for operation, args in tests: + with self.subTest(operation=operation): + with self.assertWarns(CacheKeyWarning) as cm: + getattr(cache, operation)(*args) + self.assertEqual(str(cm.warning), expected_warning) finally: cache.key_func = old_func def test_invalid_key_characters(self): # memcached doesn't allow whitespace or control characters in keys. key = 'key with spaces and 清' - self._perform_invalid_key_test(key, KEY_ERRORS_WITH_MEMCACHED_MSG % key) + self._perform_invalid_key_test( + key, KEY_ERRORS_WITH_MEMCACHED_MSG % key + ) def test_invalid_key_length(self): # memcached limits key length to 250. @@ -538,41 +595,41 @@ def test_cache_versioning_get_set(self): def test_cache_versioning_add(self): # add, default version = 1, but manually override version = 2 - cache.add('answer1', 42, version=2) + self.assertIs(cache.add('answer1', 42, version=2), True) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) - cache.add('answer1', 37, version=2) + self.assertIs(cache.add('answer1', 37, version=2), False) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) - cache.add('answer1', 37, version=1) + self.assertIs(cache.add('answer1', 37, version=1), True) self.assertEqual(cache.get('answer1', version=1), 37) self.assertEqual(cache.get('answer1', version=2), 42) # v2 add, using default version = 2 - caches['v2'].add('answer2', 42) + self.assertIs(caches['v2'].add('answer2', 42), True) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) - caches['v2'].add('answer2', 37) + self.assertIs(caches['v2'].add('answer2', 37), False) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) - caches['v2'].add('answer2', 37, version=1) + self.assertIs(caches['v2'].add('answer2', 37, version=1), True) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 42) # v2 add, default version = 2, but manually override version = 1 - caches['v2'].add('answer3', 42, version=1) + self.assertIs(caches['v2'].add('answer3', 42, version=1), True) self.assertEqual(cache.get('answer3', version=1), 42) self.assertIsNone(cache.get('answer3', version=2)) - caches['v2'].add('answer3', 37, version=1) + self.assertIs(caches['v2'].add('answer3', 37, version=1), False) self.assertEqual(cache.get('answer3', version=1), 42) self.assertIsNone(cache.get('answer3', version=2)) - caches['v2'].add('answer3', 37) + self.assertIs(caches['v2'].add('answer3', 37), True) self.assertEqual(cache.get('answer3', version=1), 42) self.assertEqual(cache.get('answer3', version=2), 37) @@ -580,116 +637,158 @@ def test_cache_versioning_has_key(self): cache.set('answer1', 42) # has_key - self.assertTrue(cache.has_key('answer1')) - self.assertTrue(cache.has_key('answer1', version=1)) - self.assertFalse(cache.has_key('answer1', version=2)) + self.assertIs(cache.has_key('answer1'), True) + self.assertIs(cache.has_key('answer1', version=1), True) + self.assertIs(cache.has_key('answer1', version=2), False) - self.assertFalse(caches['v2'].has_key('answer1')) - self.assertTrue(caches['v2'].has_key('answer1', version=1)) - self.assertFalse(caches['v2'].has_key('answer1', version=2)) + self.assertIs(caches['v2'].has_key('answer1'), False) + self.assertIs(caches['v2'].has_key('answer1', version=1), True) + self.assertIs(caches['v2'].has_key('answer1', version=2), False) def test_cache_versioning_delete(self): cache.set('answer1', 37, version=1) cache.set('answer1', 42, version=2) - cache.delete('answer1') + self.assertIs(cache.delete('answer1'), True) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) cache.set('answer2', 37, version=1) cache.set('answer2', 42, version=2) - cache.delete('answer2', version=2) + self.assertIs(cache.delete('answer2', version=2), True) self.assertEqual(cache.get('answer2', version=1), 37) self.assertIsNone(cache.get('answer2', version=2)) cache.set('answer3', 37, version=1) cache.set('answer3', 42, version=2) - caches['v2'].delete('answer3') + self.assertIs(caches['v2'].delete('answer3'), True) self.assertEqual(cache.get('answer3', version=1), 37) self.assertIsNone(cache.get('answer3', version=2)) cache.set('answer4', 37, version=1) cache.set('answer4', 42, version=2) - caches['v2'].delete('answer4', version=1) + self.assertIs(caches['v2'].delete('answer4', version=1), True) self.assertIsNone(cache.get('answer4', version=1)) self.assertEqual(cache.get('answer4', version=2), 42) def test_cache_versioning_incr_decr(self): cache.set('answer1', 37, version=1) cache.set('answer1', 42, version=2) - cache.incr('answer1') + self.assertEqual(cache.incr('answer1'), 38) self.assertEqual(cache.get('answer1', version=1), 38) self.assertEqual(cache.get('answer1', version=2), 42) - cache.decr('answer1') + self.assertEqual(cache.decr('answer1'), 37) self.assertEqual(cache.get('answer1', version=1), 37) self.assertEqual(cache.get('answer1', version=2), 42) cache.set('answer2', 37, version=1) cache.set('answer2', 42, version=2) - cache.incr('answer2', version=2) + self.assertEqual(cache.incr('answer2', version=2), 43) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 43) - cache.decr('answer2', version=2) + self.assertEqual(cache.decr('answer2', version=2), 42) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 42) cache.set('answer3', 37, version=1) cache.set('answer3', 42, version=2) - caches['v2'].incr('answer3') + self.assertEqual(caches['v2'].incr('answer3'), 43) self.assertEqual(cache.get('answer3', version=1), 37) self.assertEqual(cache.get('answer3', version=2), 43) - caches['v2'].decr('answer3') + self.assertEqual(caches['v2'].decr('answer3'), 42) self.assertEqual(cache.get('answer3', version=1), 37) self.assertEqual(cache.get('answer3', version=2), 42) cache.set('answer4', 37, version=1) cache.set('answer4', 42, version=2) - caches['v2'].incr('answer4', version=1) + self.assertEqual(caches['v2'].incr('answer4', version=1), 38) self.assertEqual(cache.get('answer4', version=1), 38) self.assertEqual(cache.get('answer4', version=2), 42) - caches['v2'].decr('answer4', version=1) + self.assertEqual(caches['v2'].decr('answer4', version=1), 37) self.assertEqual(cache.get('answer4', version=1), 37) self.assertEqual(cache.get('answer4', version=2), 42) def test_cache_versioning_get_set_many(self): # set, using default version = 1 cache.set_many({'ford1': 37, 'arthur1': 42}) - self.assertEqual(cache.get_many(['ford1', 'arthur1']), {'ford1': 37, 'arthur1': 42}) - self.assertEqual(cache.get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42}) + self.assertEqual( + cache.get_many(['ford1', 'arthur1']), {'ford1': 37, 'arthur1': 42} + ) + self.assertEqual( + cache.get_many(['ford1', 'arthur1'], version=1), + {'ford1': 37, 'arthur1': 42}, + ) self.assertEqual(cache.get_many(['ford1', 'arthur1'], version=2), {}) self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1']), {}) - self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42}) - self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=2), {}) + self.assertEqual( + caches['v2'].get_many(['ford1', 'arthur1'], version=1), + {'ford1': 37, 'arthur1': 42}, + ) + self.assertEqual( + caches['v2'].get_many(['ford1', 'arthur1'], version=2), {} + ) # set, default version = 1, but manually override version = 2 cache.set_many({'ford2': 37, 'arthur2': 42}, version=2) self.assertEqual(cache.get_many(['ford2', 'arthur2']), {}) self.assertEqual(cache.get_many(['ford2', 'arthur2'], version=1), {}) - self.assertEqual(cache.get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42}) + self.assertEqual( + cache.get_many(['ford2', 'arthur2'], version=2), + {'ford2': 37, 'arthur2': 42}, + ) - self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2']), {'ford2': 37, 'arthur2': 42}) - self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=1), {}) - self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42}) + self.assertEqual( + caches['v2'].get_many(['ford2', 'arthur2']), + {'ford2': 37, 'arthur2': 42}, + ) + self.assertEqual( + caches['v2'].get_many(['ford2', 'arthur2'], version=1), {} + ) + self.assertEqual( + caches['v2'].get_many(['ford2', 'arthur2'], version=2), + {'ford2': 37, 'arthur2': 42}, + ) # v2 set, using default version = 2 caches['v2'].set_many({'ford3': 37, 'arthur3': 42}) self.assertEqual(cache.get_many(['ford3', 'arthur3']), {}) self.assertEqual(cache.get_many(['ford3', 'arthur3'], version=1), {}) - self.assertEqual(cache.get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42}) + self.assertEqual( + cache.get_many(['ford3', 'arthur3'], version=2), + {'ford3': 37, 'arthur3': 42}, + ) - self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3']), {'ford3': 37, 'arthur3': 42}) - self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=1), {}) - self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42}) + self.assertEqual( + caches['v2'].get_many(['ford3', 'arthur3']), + {'ford3': 37, 'arthur3': 42}, + ) + self.assertEqual( + caches['v2'].get_many(['ford3', 'arthur3'], version=1), {} + ) + self.assertEqual( + caches['v2'].get_many(['ford3', 'arthur3'], version=2), + {'ford3': 37, 'arthur3': 42}, + ) # v2 set, default version = 2, but manually override version = 1 caches['v2'].set_many({'ford4': 37, 'arthur4': 42}, version=1) - self.assertEqual(cache.get_many(['ford4', 'arthur4']), {'ford4': 37, 'arthur4': 42}) - self.assertEqual(cache.get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42}) + self.assertEqual( + cache.get_many(['ford4', 'arthur4']), {'ford4': 37, 'arthur4': 42} + ) + self.assertEqual( + cache.get_many(['ford4', 'arthur4'], version=1), + {'ford4': 37, 'arthur4': 42}, + ) self.assertEqual(cache.get_many(['ford4', 'arthur4'], version=2), {}) self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4']), {}) - self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42}) - self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=2), {}) + self.assertEqual( + caches['v2'].get_many(['ford4', 'arthur4'], version=1), + {'ford4': 37, 'arthur4': 42}, + ) + self.assertEqual( + caches['v2'].get_many(['ford4', 'arthur4'], version=2), {} + ) def test_incr_version(self): cache.set('answer', 42, version=2) @@ -719,6 +818,13 @@ def test_incr_version(self): with self.assertRaises(ValueError): cache.incr_version('does_not_exist') + cache.set('null', None) + if self.supports_get_with_default: + self.assertEqual(cache.incr_version('null'), 2) + else: + with self.assertRaises(self.incr_decr_type_error): + cache.incr_version('null') + def test_decr_version(self): cache.set('answer', 42, version=2) self.assertIsNone(cache.get('answer')) @@ -743,6 +849,13 @@ def test_decr_version(self): with self.assertRaises(ValueError): cache.decr_version('does_not_exist', version=2) + cache.set('null', None, version=2) + if self.supports_get_with_default: + self.assertEqual(cache.decr_version('null', version=2), 1) + else: + with self.assertRaises(self.incr_decr_type_error): + cache.decr_version('null', version=2) + def test_custom_key_func(self): # Two caches with different key functions aren't visible to each other cache.set('answer1', 42) @@ -756,30 +869,31 @@ def test_custom_key_func(self): self.assertEqual(caches['custom_key2'].get('answer2'), 42) def test_cache_write_unpicklable_object(self): - update_middleware = UpdateCacheMiddleware() - update_middleware.cache = cache - - fetch_middleware = FetchFromCacheMiddleware() - fetch_middleware.cache = cache + fetch_middleware = FetchFromCacheMiddleware(empty_response) request = self.factory.get('/cache/test') request._cache_update_cache = True - get_cache_data = FetchFromCacheMiddleware().process_request(request) + get_cache_data = FetchFromCacheMiddleware( + empty_response + ).process_request(request) self.assertIsNone(get_cache_data) - response = HttpResponse() content = 'Testing cookie serialization.' - response.content = content - response.set_cookie('foo', 'bar') - update_middleware.process_response(request, response) + def get_response(req): + response = HttpResponse(content) + response.set_cookie('foo', 'bar') + return response + + update_middleware = UpdateCacheMiddleware(get_response) + response = update_middleware(request) get_cache_data = fetch_middleware.process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) self.assertEqual(get_cache_data.cookies, response.cookies) - update_middleware.process_response(request, get_cache_data) + UpdateCacheMiddleware(lambda req: get_cache_data)(request) get_cache_data = fetch_middleware.process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) @@ -798,7 +912,12 @@ def test_get_or_set(self): self.assertIsNone(cache.get('projector')) self.assertEqual(cache.get_or_set('projector', 42), 42) self.assertEqual(cache.get('projector'), 42) - self.assertEqual(cache.get_or_set('null', None), None) + self.assertIsNone(cache.get_or_set('null', None)) + if self.supports_get_with_default: + # Previous get_or_set() stores None in the cache. + self.assertIsNone(cache.get('null', 'default')) + else: + self.assertEqual(cache.get('null', 'default'), 'default') def test_get_or_set_callable(self): def my_callable(): @@ -807,14 +926,16 @@ def my_callable(): self.assertEqual(cache.get_or_set('mykey', my_callable), 'value') self.assertEqual(cache.get_or_set('mykey', my_callable()), 'value') - def test_get_or_set_callable_returning_none(self): - self.assertIsNone(cache.get_or_set('mykey', lambda: None)) - # Previous get_or_set() doesn't store None in the cache. - self.assertEqual(cache.get('mykey', 'default'), 'default') + self.assertIsNone(cache.get_or_set('null', lambda: None)) + if self.supports_get_with_default: + # Previous get_or_set() stores None in the cache. + self.assertIsNone(cache.get('null', 'default')) + else: + self.assertEqual(cache.get('null', 'default'), 'default') def test_get_or_set_version(self): msg = "get_or_set() missing 1 required positional argument: 'default'" - cache.get_or_set('brian', 1979, version=2) + self.assertEqual(cache.get_or_set('brian', 1979, version=2), 1979) with self.assertRaisesMessage(TypeError, msg): cache.get_or_set('brian') with self.assertRaisesMessage(TypeError, msg): @@ -825,7 +946,9 @@ def test_get_or_set_version(self): self.assertIsNone(cache.get('brian', version=3)) def test_get_or_set_racing(self): - with mock.patch('%s.%s' % (settings.CACHES['default']['BACKEND'], 'add')) as cache_add: + with mock.patch( + '%s.%s' % (settings.CACHES['default']['BACKEND'], 'add') + ) as cache_add: # Simulate cache.add() failing to add a value. In that case, the # default value should be returned. cache_add.return_value = False @@ -833,7 +956,6 @@ def test_get_or_set_racing(self): class PicklingSideEffect: - def __init__(self, cache): self.cache = cache self.locked = False @@ -843,11 +965,14 @@ def __getstate__(self): return {} -@override_settings(CACHES=caches_setting_for_tests( - BACKEND='diskcache.DjangoCache', -)) +@override_settings( + CACHES=caches_setting_for_tests( + BACKEND='diskcache.DjangoCache', + ) +) class DiskCacheTests(BaseCacheTests, TestCase): - "Specific test cases for diskcache.DjangoCache." + """Specific test cases for diskcache.DjangoCache.""" + def setUp(self): super().setUp() self.dirname = tempfile.mkdtemp() @@ -868,14 +993,23 @@ def test_ignores_non_cache_files(self): with open(fname, 'w'): os.utime(fname, None) cache.clear() - self.assertTrue(os.path.exists(fname), - 'Expected cache.clear to ignore non cache files') + self.assertTrue( + os.path.exists(fname), + 'Expected cache.clear to ignore non cache files', + ) os.remove(fname) + def test_creates_cache_dir_if_nonexistent(self): + os.rmdir(self.dirname) + cache.set('foo', 'bar') + self.assertTrue(os.path.exists(self.dirname)) + def test_clear_does_not_remove_cache_dir(self): cache.clear() - self.assertTrue(os.path.exists(self.dirname), - 'Expected cache.clear to keep the cache dir') + self.assertTrue( + os.path.exists(self.dirname), + 'Expected cache.clear to keep the cache dir', + ) def test_cache_write_unpicklable_type(self): # This fails if not using the highest pickling protocol on Python 2. @@ -897,7 +1031,7 @@ def test_directory(self): self.assertTrue('tmp' in cache.directory) def test_read(self): - value = b'abcd' * 2 ** 20 + value = b'abcd' * 2**20 result = cache.set(b'test-key', value) self.assertTrue(result) @@ -942,22 +1076,11 @@ def test_pop(self): self.assertEqual(cache.pop(0, default=1), 1) self.assertEqual(cache.pop(1, expire_time=True), (1, None)) self.assertEqual(cache.pop(2, tag=True), (2, None)) - self.assertEqual(cache.pop(3, expire_time=True, tag=True), (3, None, None)) + self.assertEqual( + cache.pop(3, expire_time=True, tag=True), (3, None, None) + ) self.assertEqual(cache.pop(4, retry=False), 4) - def test_pickle(self): - letters = 'abcde' - cache.clear() - - for num, val in enumerate(letters): - cache.set(val, num) - - data = pickle.dumps(cache) - other = pickle.loads(data) - - for key in letters: - self.assertEqual(other.get(key), cache.get(key)) - def test_cache(self): subcache = cache.cache('test') directory = os.path.join(cache.directory, 'cache', 'test') @@ -975,6 +1098,7 @@ def test_index(self): def test_memoize(self): with self.assertRaises(TypeError): + @cache.memoize # <-- Missing parens! def test(): pass diff --git a/tests/test_doctest.py b/tests/test_doctest.py index 70fa61c..822d8db 100644 --- a/tests/test_doctest.py +++ b/tests/test_doctest.py @@ -1,6 +1,4 @@ import doctest -import shutil -import sys import diskcache.core import diskcache.djangocache diff --git a/tests/test_fanout.py b/tests/test_fanout.py index f5d9b70..af221b6 100644 --- a/tests/test_fanout.py +++ b/tests/test_fanout.py @@ -1,36 +1,27 @@ -"Test diskcache.fanout.FanoutCache." - -from __future__ import print_function +"""Test diskcache.fanout.FanoutCache.""" import collections as co -import errno -import functools as ft import hashlib import io import os import os.path as op +import pathlib import pickle -import pytest -import random import shutil -import sqlite3 import subprocess as sp -import sys import tempfile import threading import time import warnings - from unittest import mock +import pytest + import diskcache as dc warnings.simplefilter('error') warnings.simplefilter('ignore', category=dc.EmptyDirWarning) -if sys.hexversion < 0x03000000: - range = xrange - @pytest.fixture def cache(): @@ -44,7 +35,7 @@ def test_init(cache): del default_settings['size_limit'] for key, value in default_settings.items(): assert getattr(cache, key) == value - assert cache.size_limit == 2 ** 27 + assert cache.size_limit == 2**27 cache.check() @@ -54,6 +45,13 @@ def test_init(cache): cache.check() +def test_init_path(cache): + path = pathlib.Path(cache.directory) + other = dc.FanoutCache(path) + other.close() + assert cache.directory == other.directory + + def test_set_get_delete(cache): for value in range(100): cache.set(value, value) @@ -72,7 +70,7 @@ def test_set_get_delete(cache): for value in range(100): assert cache.delete(value) - assert cache.delete(100) == False + assert cache.delete(100) is False cache.check() @@ -239,15 +237,15 @@ def test_incr_concurrent(): def test_getsetdel(cache): values = [ (None, False), - ((None,) * 2 ** 10, False), + ((None,) * 2**10, False), (1234, False), - (2 ** 512, False), + (2**512, False), (56.78, False), - (u'hello', False), - (u'hello' * 2 ** 10, False), + ('hello', False), + ('hello' * 2**10, False), (b'world', False), - (b'world' * 2 ** 10, False), - (io.BytesIO(b'world' * 2 ** 10), True), + (b'world' * 2**10, False), + (io.BytesIO(b'world' * 2**10), True), ] for key, (value, file_like) in enumerate(values): @@ -351,14 +349,14 @@ def test_tag_index(cache): def test_read(cache): - cache.set(0, b'abcd' * 2 ** 20) + cache.set(0, b'abcd' * 2**20) with cache.read(0) as reader: assert reader is not None def test_read_keyerror(cache): with pytest.raises(KeyError): - with cache.read(0) as reader: + with cache.read(0): pass @@ -685,8 +683,3 @@ def test_custom_filename_disk(): assert content == str(count) * int(1e5) shutil.rmtree(cache.directory, ignore_errors=True) - - -if __name__ == '__main__': - import nose - nose.runmodule() diff --git a/tests/test_index.py b/tests/test_index.py index ef2a0d0..742daf3 100644 --- a/tests/test_index.py +++ b/tests/test_index.py @@ -1,12 +1,10 @@ -"Test diskcache.persistent.Index." +"""Test diskcache.persistent.Index.""" -import functools as ft import pickle -import pytest import shutil -import sys +import tempfile -from unittest import mock +import pytest import diskcache as dc @@ -26,7 +24,7 @@ def index(): def test_init(): - directory = '/tmp/diskcache/index' + directory = tempfile.mkdtemp() mapping = {'a': 5, 'b': 4, 'c': 3, 'd': 2, 'e': 1} index = dc.Index(None, mapping) diff --git a/tests/test_query_only.py b/tests/test_query_only.py new file mode 100644 index 0000000..83bcb92 --- /dev/null +++ b/tests/test_query_only.py @@ -0,0 +1,64 @@ +import contextlib +import os +import shutil +import sqlite3 +import stat +import tempfile + +import pytest + +import diskcache as dc +from diskcache.core import DBNAME, ReadOnlyError + + +@pytest.fixture +def cache_directory(): + with contextlib.nullcontext( + tempfile.mkdtemp(prefix='diskcache-') + ) as directory: + yield directory + shutil.rmtree(directory, ignore_errors=True) + + +def test_cannot_create(cache_directory): + with pytest.raises(sqlite3.OperationalError): + dc.Cache(directory=cache_directory, sqlite_query_only=True) + + +def test_can_read_only(cache_directory): + key = 'some' + obj1 = [5, 6, 7] + + # create the cache, must be in read write mode + rw = dc.Cache(directory=cache_directory) + rw[key] = obj1 + rw = None + + # make the file RO + os.chmod(os.path.join(cache_directory, DBNAME), stat.S_IREAD) + + # with sqlite_query_only=True we can read the DB + ro = dc.Cache(directory=cache_directory, sqlite_query_only=True) + obj2 = ro[key] + ro = None + + assert obj2 == obj1 + + # default cache cannot read a ro file + with pytest.raises(sqlite3.OperationalError): + dc.Cache(directory=cache_directory) + + +def test_cannot_update(cache_directory): + # create the cache, must be in read write mode + rw = dc.Cache(directory=cache_directory) + rw['key'] = 'old' + rw = None + + # re-open ro: cannot update + ro = dc.Cache(directory=cache_directory, sqlite_query_only=True) + with pytest.raises(ReadOnlyError): + ro['key'] = 'new' + + with pytest.raises(ReadOnlyError): + ro.clear() diff --git a/tests/test_recipes.py b/tests/test_recipes.py index b26a239..ae74459 100644 --- a/tests/test_recipes.py +++ b/tests/test_recipes.py @@ -1,12 +1,12 @@ -"Test diskcache.recipes." +"""Test diskcache.recipes.""" -import diskcache as dc -import pytest import shutil import threading import time -from unittest import mock +import pytest + +import diskcache as dc @pytest.fixture @@ -28,14 +28,37 @@ def test_averager(cache): assert nums.pop() == 9.5 +def test_lock(cache): + state = {'num': 0} + lock = dc.Lock(cache, 'demo') + + def worker(): + state['num'] += 1 + with lock: + assert lock.locked() + state['num'] += 1 + time.sleep(0.1) + + with lock: + thread = threading.Thread(target=worker) + thread.start() + time.sleep(0.1) + assert state['num'] == 1 + thread.join() + assert state['num'] == 2 + + def test_rlock(cache): state = {'num': 0} rlock = dc.RLock(cache, 'demo') + def worker(): state['num'] += 1 with rlock: - state['num'] += 1 - time.sleep(0.1) + with rlock: + state['num'] += 1 + time.sleep(0.1) + with rlock: thread = threading.Thread(target=worker) thread.start() @@ -48,11 +71,13 @@ def worker(): def test_semaphore(cache): state = {'num': 0} semaphore = dc.BoundedSemaphore(cache, 'demo', value=3) + def worker(): state['num'] += 1 with semaphore: state['num'] += 1 time.sleep(0.1) + semaphore.acquire() semaphore.acquire() with semaphore: @@ -68,11 +93,13 @@ def worker(): def test_memoize_stampede(cache): state = {'num': 0} + @dc.memoize_stampede(cache, 0.1) def worker(num): time.sleep(0.01) state['num'] += 1 return num + start = time.time() while (time.time() - start) < 1: worker(100) diff --git a/tests/utils.py b/tests/utils.py index f2370da..38e5d33 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,5 +1,3 @@ -from __future__ import print_function - import os import subprocess as sp @@ -20,7 +18,6 @@ def percentile(sequence, percent): def secs(value): units = ['s ', 'ms', 'us', 'ns'] - pos = 0 if value is None: return ' 0.000ns' @@ -35,7 +32,7 @@ def secs(value): def run(*args): - "Run command, print output, and return output." + """Run command, print output, and return output.""" print('utils$', *args) result = sp.check_output(args) print(result) @@ -43,7 +40,7 @@ def run(*args): def mount_ramdisk(size, path): - "Mount RAM disk at `path` with `size` in bytes." + """Mount RAM disk at `path` with `size` in bytes.""" sectors = size / 512 os.makedirs(path) @@ -56,30 +53,12 @@ def mount_ramdisk(size, path): def unmount_ramdisk(dev_path, path): - "Unmount RAM disk with `dev_path` and `path`." + """Unmount RAM disk with `dev_path` and `path`.""" run('umount', path) run('diskutil', 'eject', dev_path) run('rm', '-r', path) -def retry(sql, query): - pause = 0.001 - error = sqlite3.OperationalError - - for _ in range(int(LIMITS[u'timeout'] / pause)): - try: - sql(query).fetchone() - except sqlite3.OperationalError as exc: - error = exc - time.sleep(pause) - else: - break - else: - raise error - - del error - - def display(name, timings): cols = ('Action', 'Count', 'Miss', 'Median', 'P90', 'P99', 'Max', 'Total') template = ' '.join(['%9s'] * len(cols)) @@ -98,16 +77,19 @@ def display(name, timings): len_total += len(values) sum_total += sum(values) - print(template % ( - action, - len(values), - len(timings.get(action + '-miss', [])), - secs(percentile(values, 0.5)), - secs(percentile(values, 0.9)), - secs(percentile(values, 0.99)), - secs(percentile(values, 1.0)), - secs(sum(values)), - )) + print( + template + % ( + action, + len(values), + len(timings.get(action + '-miss', [])), + secs(percentile(values, 0.5)), + secs(percentile(values, 0.9)), + secs(percentile(values, 0.99)), + secs(percentile(values, 1.0)), + secs(sum(values)), + ) + ) totals = ('Total', len_total, '', '', '', '', '', secs(sum_total)) print(template % totals) diff --git a/tox.ini b/tox.ini index b2b6d45..e7217a7 100644 --- a/tox.ini +++ b/tox.ini @@ -1,37 +1,101 @@ [tox] -envlist=py35,py36,py37,py38,pylint +envlist=bluecheck,doc8,docs,isortcheck,flake8,mypy,pylint,rstcheck,py38,py39,py310,py311 skip_missing_interpreters=True [testenv] +commands=pytest deps= - django==2.2.* + django==4.2.* pytest + pytest-cov pytest-django pytest-xdist -commands=python -m pytest -setenv = +setenv= DJANGO_SETTINGS_MODULE=tests.settings PYTHONPATH={toxinidir} +[testenv:blue] +commands=blue {toxinidir}/setup.py {toxinidir}/diskcache {toxinidir}/tests +deps=blue + +[testenv:bluecheck] +commands=blue --check {toxinidir}/setup.py {toxinidir}/diskcache {toxinidir}/tests +deps=blue + +[testenv:doc8] +commands=doc8 docs --ignore-path docs/_build +deps=doc8 + +[testenv:docs] +allowlist_externals=make +changedir=docs +commands=make html +deps= + django==4.2.* + sphinx + +[testenv:flake8] +commands=flake8 {toxinidir}/setup.py {toxinidir}/diskcache {toxinidir}/tests +deps=flake8 + +[testenv:isort] +commands=isort {toxinidir}/setup.py {toxinidir}/diskcache {toxinidir}/tests +deps=isort + +[testenv:isortcheck] +commands=isort --check {toxinidir}/setup.py {toxinidir}/diskcache {toxinidir}/tests +deps=isort + +[testenv:mypy] +commands=mypy {toxinidir}/diskcache +deps=mypy + +[testenv:pylint] +commands=pylint {toxinidir}/diskcache +deps= + django==4.2.* + pylint + +[testenv:rstcheck] +commands=rstcheck {toxinidir}/README.rst +deps=rstcheck + +[testenv:uploaddocs] +allowlist_externals=rsync +changedir=docs +commands= + rsync --rsync-path 'sudo -u herokuish rsync' -azP --stats --delete \ + _build/html/ \ + grantjenks:/srv/www/grantjenks.com/public/docs/diskcache/ + +[isort] +multi_line_output = 3 +include_trailing_comma = True +force_grid_wrap = 0 +use_parentheses = True +ensure_newline_before_comments = True +line_length = 79 + [pytest] addopts= -n auto + --cov-branch + --cov-fail-under=98 + --cov-report=term-missing + --cov=diskcache + --doctest-glob="*.rst" + --ignore docs/case-study-web-crawler.rst + --ignore docs/sf-python-2017-meetup-talk.rst --ignore tests/benchmark_core.py --ignore tests/benchmark_djangocache.py --ignore tests/benchmark_glob.py --ignore tests/issue_85.py --ignore tests/plot.py -norecursedirs=site-packages -testpaths=docs diskcache tests -env = - DJANGO_SETTINGS_MODULE=tests.settings - PYTHONPATH={PWD}:{PWD}/tests - -[testenv:pylint] -deps= - django==2.2.* - pylint -commands=pylint diskcache [doc8] -ignore=D000 +# ignore=D000 + +[flake8] +exclude=tests/test_djangocache.py +extend-ignore=E203 +max-line-length=120