Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Applying snake_case module name standards (PART 3) #3611

Merged
merged 7 commits into from
May 11, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ nupic
│   ├── sdr_classifier_factory.py [OK]
│   ├── backtracking_tm.py [TODO]
│   ├── backtracking_tm_cpp.py [TODO]
│   ├── BacktrackingTM_shim.py [TODO]
│   ├── backtracking_tm_shim.py [TODO]
│   ├── connections.py [TODO]
│   ├── fdrutilities.py [TODO]
│   ├── monitor_mixin
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from nupic.data import FunctionSource
from nupic.frameworks.prediction.callbacks import displaySPCoincidences, printSPCoincidences
from nupic.data.dictutils import DictObj
from nupic.data.dict_utils import DictObj


nCoincidences = 30
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
# This must be less than float32 size since storage is float32 size
DUTY_CYCLE_UPDATE_INTERVAL = numpy.finfo(numpy.float32).max / (2 ** 20)

g_debugPrefix = "CLAClassifier"
g_debugPrefix = "cla_classifier"


def _pFormatArray(array_, fmt="%.2f"):
Expand Down Expand Up @@ -249,8 +249,8 @@ class CLAClassifier(object):
"activationPattern") and information from the sensor and encoders (the
"classification") describing the input to the system at that time step.

When learning, for every bit in activation pattern, it records a history of
the classification each time that bit was active. The history is weighted so
When learning, for every bit in activation pattern, it records a history of
the classification each time that bit was active. The history is weighted so
that more recent activity has a bigger impact than older activity. The alpha
parameter controls this weighting.

Expand Down Expand Up @@ -382,7 +382,7 @@ def compute(self, recordNum, patternNZ, classification, learn, infer):
self._patternNZHistory.append((self._learnIteration, patternNZ))

# To allow multi-class classification, we need to be able to run learning
# without inference being on. So initialize retval outside
# without inference being on. So initialize retval outside
# of the inference block.
retval = None

Expand All @@ -392,7 +392,7 @@ def compute(self, recordNum, patternNZ, classification, learn, infer):
# votes
if infer:
retval = self.infer(patternNZ, classification)

# ------------------------------------------------------------------------
# Learning:
# For each active bit in the activationPattern, store the classification
Expand Down Expand Up @@ -468,13 +468,13 @@ def compute(self, recordNum, patternNZ, classification, learn, infer):
print

return retval


def infer(self, patternNZ, classification):
"""
Return the inference value from one input sample. The actual
learning happens in compute(). The method customCompute() is here to
maintain backward compatibility.
Return the inference value from one input sample. The actual
learning happens in compute(). The method customCompute() is here to
maintain backward compatibility.

Parameters:
--------------------------------------------------------------------
Expand All @@ -493,7 +493,7 @@ def infer(self, patternNZ, classification):
1 : [0.1, 0.3, 0.2, 0.7]
4 : [0.2, 0.4, 0.3, 0.5]}
"""

# Return value dict. For buckets which we don't have an actual value
# for yet, just plug in any valid actual value. It doesn't matter what
# we use because that bucket won't have non-zero likelihood anyways.
Expand Down Expand Up @@ -540,9 +540,9 @@ def infer(self, patternNZ, classification):
sumVotes /= sumVotes.size

retval[nSteps] = sumVotes

return retval


def __getstate__(self):
return self.__dict__
Expand Down
2 changes: 1 addition & 1 deletion src/nupic/algorithms/cla_classifier_diff.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import cPickle as pickle
import numbers

from nupic.algorithms.CLAClassifier import CLAClassifier
from nupic.algorithms.cla_classifier import CLAClassifier
from nupic.bindings.algorithms import FastCLAClassifier


Expand Down
2 changes: 1 addition & 1 deletion src/nupic/algorithms/cla_classifier_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@

"""Module providing a factory for instantiating a CLA classifier."""

from nupic.algorithms.CLAClassifier import CLAClassifier
from nupic.algorithms.cla_classifier import CLAClassifier
from nupic.algorithms.cla_classifier_diff import CLAClassifierDiff
from nupic.bindings.algorithms import FastCLAClassifier
from nupic.support.configuration import Configuration
Expand Down
2 changes: 1 addition & 1 deletion src/nupic/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,4 +29,4 @@

SENTINEL_VALUE_FOR_MISSING_DATA = None

from functionsource import FunctionSource
from function_source import FunctionSource
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ def test():
import sys

schemaDict = {
"description":"JSON schema for jsonhelpers.py test code",
"description":"JSON schema for json_helpers.py test code",
"type":"object",
"additionalProperties":False,
"properties":{
Expand Down
6 changes: 3 additions & 3 deletions src/nupic/data/stream_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
from nupic.data.aggregator import Aggregator
from nupic.data.field_meta import FieldMetaInfo, FieldMetaType, FieldMetaSpecial
from nupic.data.file_record_stream import FileRecordStream
from nupic.data import jsonhelpers
from nupic.data import json_helpers
from nupic.data.record_stream import RecordStreamIface
from nupic.frameworks.opf import jsonschema
import nupic.support
Expand Down Expand Up @@ -137,8 +137,8 @@ def __init__(self, streamDef, bookmark=None, saveOutput=False,

loggerPrefix = 'com.numenta.nupic.data.StreamReader'
self._logger = logging.getLogger(loggerPrefix)
jsonhelpers.validate(streamDef,
schemaPath=pkg_resources.resource_filename(
json_helpers.validate(streamDef,
schemaPath=pkg_resources.resource_filename(
jsonschema.__name__, "stream_def.json"))
assert len(streamDef['streams']) == 1, "Only 1 source stream is supported"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
import uuid

from nupic.support.decorators import logExceptions #, logEntryExit
from nupic.database.Connection import ConnectionFactory
from nupic.database.connection import ConnectionFactory
from nupic.support.configuration import Configuration
from nupic.support import pymysqlhelpers

Expand Down
File renamed without changes.
2 changes: 1 addition & 1 deletion src/nupic/frameworks/opf/exp_description_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import os
import imp

from nupic.data.dictutils import rUpdate
from nupic.data.dict_utils import rUpdate


# This file contains utility functions that are used
Expand Down
6 changes: 3 additions & 3 deletions src/nupic/frameworks/opf/experiment_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
import random
import numpy

from nupic.data import jsonhelpers
from nupic.data import json_helpers
from nupic.frameworks.opf import opf_basic_environment, opf_helpers
from nupic.frameworks.opf.exp_description_api import OpfEnvironment
from nupic.frameworks.opf.model_factory import ModelFactory
Expand Down Expand Up @@ -373,8 +373,8 @@ def _runExperimentImpl(options, model=None):
is provided to aid with debugging) or None, if none was
created.
"""
jsonhelpers.validate(options.privateOptions,
schemaDict=g_parsedPrivateCommandLineOptionsSchema)
json_helpers.validate(options.privateOptions,
schemaDict=g_parsedPrivateCommandLineOptionsSchema)

# Load the experiment's description.py module
experimentDir = options.experimentDir
Expand Down
2 changes: 1 addition & 1 deletion src/nupic/frameworks/opf/opf_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
import re
from collections import namedtuple

import nupic.data.jsonhelpers as jsonhelpers
import nupic.data.json_helpers as jsonhelpers
from nupic.support.enum import Enum


Expand Down
4 changes: 2 additions & 2 deletions src/nupic/regions/KNNClassifierRegion.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
"""
import numpy
from nupic.bindings.regions.PyRegion import PyRegion
from nupic.algorithms import KNNClassifier
from nupic.algorithms import knn_classifier
from nupic.bindings.math import Random


Expand Down Expand Up @@ -574,7 +574,7 @@ def _initEphemerals(self):
self._protoScores = None
self._categoryDistances = None

self._knn = KNNClassifier.KNNClassifier(**self.knnParams)
self._knn = knn_classifier.KNNClassifier(**self.knnParams)

for x in ('_partitions', '_useAuxiliary', '_doSphering',
'_scanInfo', '_protoScores'):
Expand Down
10 changes: 5 additions & 5 deletions src/nupic/regions/TMRegion.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from nupic.bindings.regions.PyRegion import PyRegion

from nupic.algorithms import (anomaly, backtracking_tm, backtracking_tm_cpp,
BacktrackingTM_shim)
backtracking_tm_shim)
from nupic.support import getArgumentDescriptions

gDefaultTemporalImp = 'py'
Expand All @@ -40,13 +40,13 @@ def _getTPClass(temporalImp):
elif temporalImp == 'cpp':
return backtracking_tm_cpp.BacktrackingTMCPP
elif temporalImp == 'tm_py':
return BacktrackingTM_shim.TMShim
return backtracking_tm_shim.TMShim
elif temporalImp == 'tm_cpp':
return BacktrackingTM_shim.TMCPPShim
return backtracking_tm_shim.TMCPPShim
elif temporalImp == 'tm_py_fast':
return BacktrackingTM_shim.FastTMShim
return backtracking_tm_shim.FastTMShim
elif temporalImp == 'monitored_tm_py':
return BacktrackingTM_shim.MonitoredTMShim
return backtracking_tm_shim.MonitoredTMShim
else:
raise RuntimeError("Invalid temporalImp '%s'. Legal values are: 'py', "
"'cpp', 'tm_py', 'monitored_tm_py'" % (temporalImp))
Expand Down
2 changes: 1 addition & 1 deletion src/nupic/regions/TestRegion.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@

from nupic.bindings.regions.PyRegion import PyRegion

from nupic.data.dictutils import DictObj
from nupic.data.dict_utils import DictObj



Expand Down
2 changes: 1 addition & 1 deletion src/nupic/swarming/HypersearchV2.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
from nupic.swarming.utils import sortedJSONDumpS, rApply, rCopy
from nupic.swarming.utils import clippedObj
from nupic.swarming.utils import (runModelGivenBaseAndParams, runDummyModel)
from nupic.database.ClientJobsDAO import (
from nupic.database.client_jobs_dao import (
ClientJobsDAO, InvalidConnectionException)
from nupic.swarming.exp_generator.ExpGenerator import expGenerator

Expand Down
28 changes: 14 additions & 14 deletions src/nupic/swarming/HypersearchWorker.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
from nupic.swarming.hypersearch.ExtendedLogger import ExtendedLogger
from nupic.swarming.hypersearch.errorcodes import ErrorCodes
from nupic.swarming.utils import clippedObj, validate
from nupic.database.ClientJobsDAO import ClientJobsDAO
from nupic.database.client_jobs_dao import ClientJobsDAO
from HypersearchV2 import HypersearchV2


Expand Down Expand Up @@ -267,7 +267,7 @@ def run(self):
wID = options.workerID
else:
wID = self._workerID

buildID = Configuration.get('nupic.software.buildNumber', 'N/A')
logPrefix = '<BUILDID=%s, WORKER=HW, WRKID=%s, JOBID=%s> ' % \
(buildID, wID, options.jobID)
Expand Down Expand Up @@ -332,7 +332,7 @@ def run(self):
# changed and new models, and sends those to the Hypersearch
# implementation's self._hs.recordModelProgress() method.
self._processUpdatedModels(cjDAO)

# --------------------------------------------------------------------
# Create a new batch of models
(exit, newModels) = self._hs.createModels(numModels = batchSize)
Expand All @@ -344,13 +344,13 @@ def run(self):
# orphan if it detects one.
if len(newModels) == 0:
continue

# Try and insert one that we will run
for (modelParams, modelParamsHash, particleHash) in newModels:
jsonModelParams = json.dumps(modelParams)
(modelID, ours) = cjDAO.modelInsertAndStart(options.jobID,
jsonModelParams, modelParamsHash, particleHash)

# Some other worker is already running it, tell the Hypersearch object
# so that it doesn't try and insert it again
if not ours:
Expand All @@ -359,16 +359,16 @@ def run(self):
results = mResult.results
if results is not None:
results = json.loads(results)

modelParams = json.loads(mParamsAndHash.params)
particleHash = cjDAO.modelsGetFields(modelID,
particleHash = cjDAO.modelsGetFields(modelID,
['engParticleHash'])[0]
particleInst = "%s.%s" % (
modelParams['particleState']['id'],
modelParams['particleState']['genIdx'])
self.logger.info("Adding model %d to our internal DB " \
"because modelInsertAndStart() failed to insert it: " \
"paramsHash=%s, particleHash=%s, particleId='%s'", modelID,
"paramsHash=%s, particleHash=%s, particleId='%s'", modelID,
mParamsAndHash.engParamsHash.encode('hex'),
particleHash.encode('hex'), particleInst)
self._hs.recordModelProgress(modelID = modelID,
Expand All @@ -382,14 +382,14 @@ def run(self):
else:
modelIDToRun = modelID
break

else:
# A specific modelID was passed on the command line
modelIDToRun = int(options.modelID)
mParamsAndHash = cjDAO.modelsGetParams([modelIDToRun])[0]
modelParams = json.loads(mParamsAndHash.params)
modelParamsHash = mParamsAndHash.engParamsHash

# Make us the worker
cjDAO.modelSetFields(modelIDToRun,
dict(engWorkerConnId=self._workerID))
Expand All @@ -413,12 +413,12 @@ def run(self):
if not success:
raise RuntimeError("Unexpected failure to change paramsHash and "
"particleHash of orphaned model")

(modelIDToRun, ours) = cjDAO.modelInsertAndStart(options.jobID,
mParamsAndHash.params, modelParamsHash)



# ^^^ end while modelIDToRun ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

# ---------------------------------------------------------------
Expand Down Expand Up @@ -597,7 +597,7 @@ def main(argv):
buildID = Configuration.get('nupic.software.buildNumber', 'N/A')
logPrefix = '<BUILDID=%s, WORKER=HS, WRKID=N/A, JOBID=N/A> ' % buildID
ExtendedLogger.setLogPrefix(logPrefix)

try:
main(sys.argv)
except:
Expand Down
2 changes: 1 addition & 1 deletion src/nupic/swarming/ModelRunner.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
from nupic.swarming.hypersearch import regression
from nupic.swarming.hypersearch.errorcodes import ErrorCodes

from nupic.database.ClientJobsDAO import ClientJobsDAO
from nupic.database.client_jobs_dao import ClientJobsDAO
from nupic.frameworks.opf import opf_helpers
from nupic.frameworks.opf.model_factory import ModelFactory
from nupic.frameworks.opf.opf_basic_environment import BasicPredictionLogger
Expand Down
2 changes: 1 addition & 1 deletion src/nupic/swarming/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
import tempfile

from nupic.frameworks.opf import opf_helpers
from nupic.database.ClientJobsDAO import ClientJobsDAO
from nupic.database.client_jobs_dao import ClientJobsDAO
from nupic.support.configuration import Configuration


Expand Down
4 changes: 2 additions & 2 deletions src/nupic/swarming/permutations_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@

from nupic.swarming.hypersearch import object_json as json

import nupic.database.ClientJobsDAO as cjdao
import nupic.database.client_jobs_dao as cjdao
from nupic.swarming import HypersearchWorker
from nupic.swarming.HypersearchV2 import HypersearchV2
from nupic.swarming.exp_generator.ExpGenerator import expGenerator
Expand Down Expand Up @@ -1423,7 +1423,7 @@ class JobStatus(object):
""" @private
Our Nupic Job Info abstraction class"""

# Job Status values (per ClientJobsDAO.py):
# Job Status values (per client_jobs_dao.py):
__nupicJobStatus_NotStarted = cjdao.ClientJobsDAO.STATUS_NOTSTARTED
__nupicJobStatus_Starting = cjdao.ClientJobsDAO.STATUS_STARTING
__nupicJobStatus_running = cjdao.ClientJobsDAO.STATUS_RUNNING
Expand Down
Loading