Skip to content

Commit

Permalink
Merge branch 'staging_0_1_1' into issue_98
Browse files Browse the repository at this point in the history
  • Loading branch information
maaquib authored May 2, 2020
2 parents de9184d + 286a284 commit 9784ed2
Show file tree
Hide file tree
Showing 20 changed files with 260 additions and 388 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,9 @@ public void pollBatch(String threadId, long waitTime, Map<String, Job> jobsRepo)
}
logger.trace("sending jobs, size: {}", jobsRepo.size());
} finally {
lock.unlock();
if (lock.isHeldByCurrentThread()) {
lock.unlock();
}
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,8 @@ public void run() {
req = null;
}
} catch (InterruptedException e) {
if (state == WorkerState.WORKER_SCALED_DOWN) {
logger.debug("System state is : " + state);
if (state == WorkerState.WORKER_SCALED_DOWN || state == WorkerState.WORKER_STOPPED) {
logger.debug("Shutting down the thread .. Scaling down.");
} else {
logger.debug(
Expand Down
11 changes: 7 additions & 4 deletions torchserve_sanity.sh
Original file line number Diff line number Diff line change
Expand Up @@ -67,14 +67,15 @@ cleanup()
rm -rf logs
}


pip install mock pytest pylint pytest-mock pytest-cov
# set pylint to version 2.4.4 because of following bug with pylint 2.5 released on 27th April 2020
# https://github.com/PyCQA/pylint/issues/3524
pip install mock pytest pylint==2.4.4 pytest-mock pytest-cov

cd frontend

if ./gradlew clean build;
then
echo "Frontend build suite execution successfully"
echo "Frontend build suite execution successful"
else
echo "Frontend build suite execution failed!!! Check logs for more details"
exit 1
Expand All @@ -83,12 +84,14 @@ fi
cd ..
if python -m pytest --cov-report html:htmlcov --cov=ts/ ts/tests/unit_tests/;
then
echo "Backend test suite execution successfully"
echo "Backend test suite execution successful"
else
echo "Backend test suite execution failed!!! Check logs for more details"
exit 1
fi

pylint -rn --rcfile=./ts/tests/pylintrc ts/.

pip uninstall --yes torchserve
pip uninstall --yes torch-model-archiver

Expand Down
68 changes: 5 additions & 63 deletions ts/model_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

from ts.metrics.metrics_store import MetricsStore
from ts.service import Service
from .utils.util import list_classes_from_module


class ModelLoaderFactory(object):
Expand All @@ -23,14 +24,8 @@ class ModelLoaderFactory(object):
"""

@staticmethod
def get_model_loader(model_dir):
manifest_file = os.path.join(model_dir, "MAR-INF/MANIFEST.json")
if os.path.exists(manifest_file):
return TsModelLoader()
elif os.path.exists(os.path.join(model_dir, "MANIFEST.json")):
return LegacyModelLoader()
else:
return TsModelLoader()
def get_model_loader():
return TsModelLoader()


class ModelLoader(object):
Expand Down Expand Up @@ -88,7 +83,8 @@ def load(self, model_name, model_dir, handler, gpu_id, batch_size):
module_name = module_name[:-3]
module_name = module_name.split("/")[-1]
module = importlib.import_module(module_name)
except Exception as e:
# pylint: disable=unused-variable
except ImportError as e:
module_name = ".{0}".format(handler)
module = importlib.import_module(module_name, 'ts.torch_handler')
function_name = None
Expand All @@ -105,7 +101,6 @@ def load(self, model_name, model_dir, handler, gpu_id, batch_size):
# initialize model at load time
entry_point(None, service.context)
else:
from .utils.util import list_classes_from_module
model_class_definitions = list_classes_from_module(module)
if len(model_class_definitions) != 1:
raise ValueError("Expected only one class in custom service code or a function entry point {}".format(
Expand Down Expand Up @@ -133,56 +128,3 @@ def load(self, model_name, model_dir, handler, gpu_id, batch_size):
pass

return service

#TODO Shall we remove this?
class LegacyModelLoader(ModelLoader):
"""
TorchServe 0.4 Model Loader
"""

def load(self, model_name, model_dir, handler, gpu_id, batch_size):
"""
Load TorchServe 0.3 model from file.
:param model_name:
:param model_dir:
:param handler:
:param gpu_id:
:param batch_size:
:return:
"""
manifest_file = os.path.join(model_dir, "MANIFEST.json")

manifest = None
if os.path.isfile(manifest_file):
with open(manifest_file) as f:
manifest = json.load(f)
if not handler.endswith(".py"):
handler = handler + ".py"

service_file = os.path.join(model_dir, handler)
name = os.path.splitext(os.path.basename(service_file))[0]
if sys.version_info[0] > 2:
from importlib import util

spec = util.spec_from_file_location(name, service_file)
module = util.module_from_spec(spec)
spec.loader.exec_module(module)
else:
import imp
module = imp.load_source(name, service_file)

if module is None:
raise ValueError("Unable to load module {}".format(service_file))

from ts.model_service.model_service import SingleNodeService
from .utils.util import list_classes_from_module
model_class_definitions = list_classes_from_module(module, SingleNodeService)
module_class = model_class_definitions[0]

module = module_class(model_name, model_dir, manifest, gpu_id)
service = Service(model_name, model_dir, manifest, module.handle, gpu_id, batch_size)

module.initialize(service.context)

return service
19 changes: 8 additions & 11 deletions ts/model_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,16 +28,14 @@ def start():

# pylint: disable=too-many-nested-blocks
if args.version:
print("TorchServe Version is {}".format(__version__));
print("TorchServe Version is {}".format(__version__))
return
if args.stop:
if pid is None:
print("TorchServe is not currently running.")
else:
try:
parent = psutil.Process(pid)
for child in parent.children(recursive=True):
child.terminate()
parent.terminate()
print("TorchServe has stopped.")
except (OSError, psutil.Error):
Expand All @@ -48,7 +46,7 @@ def start():
try:
psutil.Process(pid)
print("TorchServe is already running, please use torchserve --stop to stop TorchServe.")
exit(1)
sys.exit(1)
except psutil.Error:
print("Removing orphan pid file.")
os.remove(pid_file)
Expand All @@ -62,15 +60,15 @@ def start():
log_config = os.path.realpath(args.log_config)
if not os.path.isfile(log_config):
print("--log-config file not found: {}".format(log_config))
exit(1)
sys.exit(1)

cmd.append("-Dlog4j.configuration=file://{}".format(log_config))

tmp_dir = os.environ.get("TEMP")
if tmp_dir:
if not os.path.isdir(tmp_dir):
print("Invalid temp directory: {}, please check TEMP environment variable.".format(tmp_dir))
exit(1)
sys.exit(1)

cmd.append("-Djava.io.tmpdir={}".format(tmp_dir))

Expand All @@ -79,7 +77,7 @@ def start():
if ts_config:
if not os.path.isfile(ts_config):
print("--ts-config file not found: {}".format(ts_config))
exit(1)
sys.exit(1)
ts_conf_file = ts_config

class_path = \
Expand Down Expand Up @@ -118,13 +116,13 @@ def start():
if args.model_store:
if not os.path.isdir(args.model_store):
print("--model-store directory not found: {}".format(args.model_store))
exit(1)
sys.exit(1)

cmd.append("-s")
cmd.append(args.model_store)
else:
print("Missing mandatory parameter --model-store")
exit(1)
sys.exit(1)

if args.no_config_snapshots:
cmd.append("-ncs")
Expand All @@ -137,8 +135,7 @@ def start():
for model_url in args.models:
if not pattern.match(model_url) and model_url != "ALL":
print("--model-store is required to load model locally.")
exit(1)

sys.exit(1)

try:
process = subprocess.Popen(cmd)
Expand Down
4 changes: 2 additions & 2 deletions ts/model_service_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def load_model(load_model_request):
if "gpu" in load_model_request:
gpu = int(load_model_request["gpu"])

model_loader = ModelLoaderFactory.get_model_loader(model_dir)
model_loader = ModelLoaderFactory.get_model_loader()
service = model_loader.load(model_name, model_dir, handler, gpu, batch_size)

logging.debug("Model %s loaded.", model_name)
Expand Down Expand Up @@ -169,4 +169,4 @@ def run_server(self):
if sock_type == 'unix' and os.path.exists(socket_name):
os.remove(socket_name)

exit(1)
sys.exit(1)
1 change: 0 additions & 1 deletion ts/protocol/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +0,0 @@

3 changes: 2 additions & 1 deletion ts/protocol/otf_message_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import json
import logging
import struct
import sys
import os

from builtins import bytearray
Expand Down Expand Up @@ -152,7 +153,7 @@ def _retrieve_buffer(conn, length):
pkt = conn.recv(length)
if len(pkt) == 0:
logging.info("Frontend disconnected.")
exit(0)
sys.exit(0)

data += pkt
length -= len(pkt)
Expand Down
32 changes: 5 additions & 27 deletions ts/tests/unit_tests/test_model_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
import mock
import pytest

from ts.model_loader import LegacyModelLoader
from ts.model_loader import TsModelLoader
from ts.model_loader import ModelLoaderFactory
from ts.model_service.model_service import SingleNodeService
Expand All @@ -20,16 +19,8 @@
# noinspection PyClassHasNoInit
# @pytest.mark.skip(reason="Disabling it currently until the PR #467 gets merged")
class TestModelFactory:

def test_model_loader_factory_legacy(self):
model_loader = ModelLoaderFactory.get_model_loader(
os.path.abspath('ts/tests/unit_tests/model_service/dummy_model'))

assert isinstance(model_loader, LegacyModelLoader)

def test_model_loader_factory(self):
model_loader = ModelLoaderFactory.get_model_loader(
os.path.abspath('ts/tests/unit_tests/test_utils/'))
model_loader = ModelLoaderFactory.get_model_loader()

assert isinstance(model_loader, TsModelLoader)

Expand Down Expand Up @@ -71,25 +62,12 @@ def patches(self, mocker):
)
return patches

def test_load_model_legacy(self, patches):
patches.mock_open.side_effect = [mock.mock_open(read_data=self.mock_manifest).return_value]
patches.open_signature.side_effect = [mock.mock_open(read_data='{}').return_value]
patches.is_file.return_value = True
patches.os_path.side_effect = [False, True]
sys.path.append(self.model_dir)
handler = 'dummy_model_service'
model_loader = ModelLoaderFactory.get_model_loader(self.model_dir)
assert isinstance(model_loader, LegacyModelLoader)
service = model_loader.load(self.model_name, self.model_dir, handler, 0, 1)

assert inspect.ismethod(service._entry_point)

def test_load_class_model(self, patches):
patches.mock_open.side_effect = [mock.mock_open(read_data=self.mock_manifest).return_value]
sys.path.append(os.path.abspath('ts/tests/unit_tests/test_utils/'))
patches.os_path.return_value = True
handler = 'dummy_class_model_service'
model_loader = ModelLoaderFactory.get_model_loader(os.path.abspath('ts/unit_tests/test_utils/'))
model_loader = ModelLoaderFactory.get_model_loader()
service = model_loader.load(self.model_name, self.model_dir, handler, 0, 1)

assert inspect.ismethod(service._entry_point)
Expand All @@ -99,7 +77,7 @@ def test_load_func_model(self, patches):
sys.path.append(os.path.abspath('ts/tests/unit_tests/test_utils/'))
patches.os_path.return_value = True
handler = 'dummy_func_model_service:infer'
model_loader = ModelLoaderFactory.get_model_loader(os.path.abspath('ts/unit_tests/test_utils/'))
model_loader = ModelLoaderFactory.get_model_loader()
service = model_loader.load(self.model_name, self.model_dir, handler, 0, 1)

assert isinstance(service._entry_point, types.FunctionType)
Expand All @@ -110,7 +88,7 @@ def test_load_func_model_with_error(self, patches):
sys.path.append(os.path.abspath('ts/tests/unit_tests/test_utils/'))
patches.os_path.return_value = True
handler = 'dummy_func_model_service:wrong'
model_loader = ModelLoaderFactory.get_model_loader(os.path.abspath('ts/unit_tests/test_utils/'))
model_loader = ModelLoaderFactory.get_model_loader()
with pytest.raises(ValueError, match=r"Expected only one class .*"):
model_loader.load(self.model_name, self.model_dir, handler, 0, 1)

Expand All @@ -120,6 +98,6 @@ def test_load_model_with_error(self, patches):
sys.path.append(os.path.abspath('ts/tests/unit_tests/test_utils/'))
patches.os_path.return_value = True
handler = 'dummy_func_model_service'
model_loader = ModelLoaderFactory.get_model_loader(os.path.abspath('ts/unit_tests/test_utils/'))
model_loader = ModelLoaderFactory.get_model_loader()
with pytest.raises(ValueError, match=r"Expected only one class .*"):
model_loader.load(self.model_name, self.model_dir, handler, 0, 1)
Loading

0 comments on commit 9784ed2

Please sign in to comment.