Skip to content

Commit

Permalink
Fixed #30: os.path.exists('...') evaluates as True and errored out.
Browse files Browse the repository at this point in the history
  • Loading branch information
meyersbs committed Nov 8, 2017
1 parent 9d6a236 commit fd211e4
Show file tree
Hide file tree
Showing 9 changed files with 11 additions and 11 deletions.
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

setup(
name='SPLAT-library',
version='0.4.3',
version='0.4.4',
description='Speech Processing & Linguistic Analysis Tool',
long_description="SPLAT is a command-line application designed to make it easy for linguists (both computer-oriented and non-computer-oriented) to use the Natural Language Tool Kit (NLTK) for analyzing virtually any text file.\n\nSPLAT is designed to help you gather linguistic features from text files and it is assumed that most input files will not be already annotated. In order for SPLAT to function properly, you should ensure that the input files that you provide do not contain any annotations. Because there are so many variations of linguistic annotation schemes, it would simply be impossible to account for all of them in the initial parsing of input files; it is easier for you to remove any existing annotations than it is for me to do so.",
url='http://splat-library.org',
Expand All @@ -23,7 +23,7 @@
'splat.taggers',
'splat.tokenizers'
],
download_url='https://github.com/meyersbs/SPLAT/archive/v0.4.3.tar.gz',
download_url='https://github.com/meyersbs/SPLAT/archive/v0.4.4.tar.gz',
requires=['matplotlib', 'nltk'],
classifiers=[
'Development Status :: 3 - Alpha',
Expand Down
2 changes: 1 addition & 1 deletion splat/SPLAT.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def __init__(self, text):
"""
Creates a SPLAT Object.
"""
if os.path.exists(text):
if os.path.isfile(text):
temp_text = ""
temp_utts = []
self.__name = text
Expand Down
2 changes: 1 addition & 1 deletion splat/sentenizers/NLTKSentenizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def sentenize(self, text):
"""
sentences = ""
if type(text) == str:
if os.path.exists(text):
if os.path.isfile(text):
sentences = " ".join(self.__sentenize_file(self, text))
else:
sentences = text
Expand Down
2 changes: 1 addition & 1 deletion splat/sentenizers/Sentenizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ def sentenize(self, text):
"""
sentences = []
if type(text) == str:
if os.path.exists(text):
if os.path.isfile(text):
sentences = self.__sentenize_file(self, text)
else:
sentences = self.__sentenize_string(self, text)
Expand Down
4 changes: 2 additions & 2 deletions splat/splat
Original file line number Diff line number Diff line change
Expand Up @@ -208,10 +208,10 @@ def run_command(args):

def load_splat(args):
global my_splat
if os.path.exists(args[-1] + ".splat"):
if os.path.isfile(args[-1] + ".splat"):
with open(args[-1] + ".splat", 'r') as f:
my_splat.load(f)
elif os.path.exists(args[-1][0:20] + ".splat"):
elif os.path.isfile(args[-1][0:20] + ".splat"):
with open(args[-1][0:20] + ".splat", 'r') as f:
my_splat.load(f)
else:
Expand Down
2 changes: 1 addition & 1 deletion splat/tokenizers/NLTKCleanTokenizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ class NLTKCleanTokenizer(Tokenizer):
def tokenize(self, text):
raw_tokens = []
if type(text) == str:
if os.path.exists(text):
if os.path.isfile(text):
raw_tokens = self.__tokenize_file(text)
else:
raw_tokens = text
Expand Down
2 changes: 1 addition & 1 deletion splat/tokenizers/NLTKPunctTokenizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def tokenize(self, text):
raw_text = ""
raw_tokens = []
if type(text) == str:
if os.path.exists(text):
if os.path.isfile(text):
raw_text = " ".join(self.__tokenize_file(text))
else:
raw_text = text
Expand Down
2 changes: 1 addition & 1 deletion splat/tokenizers/NLTKRawTokenizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def tokenize(self, text):
raw_text = ""
raw_tokens = []
if type(text) == str:
if os.path.exists(text):
if os.path.isfile(text):
raw_text = " ".join(self.__tokenize_file(text))
else:
raw_text = text
Expand Down
2 changes: 1 addition & 1 deletion splat/tokenizers/Tokenizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def __tokenize_file(text):
def tokenize(self, text):
raw_tokens = []
if type(text) == str:
if os.path.exists(text):
if os.path.isfile(text):
raw_tokens = self.__tokenize_file(text)
else:
raw_tokens = self.__tokenize_string(text)
Expand Down

0 comments on commit fd211e4

Please sign in to comment.