Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

New methods: get and refresh #1

Open
wants to merge 13 commits into
base: master
Choose a base branch
from
6 changes: 6 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -34,3 +34,9 @@ nosetests.xml
.mr.developer.cfg
.project
.pydevproject

# vim
.*.swp
.*.swo
.swp
.swo
1 change: 1 addition & 0 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ ESTester tests
--------------

In order to run ESTester tests, make sure you have ElasticSearch installed locally and it is up ::

make setup
make test

Expand Down
120 changes: 111 additions & 9 deletions estester/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,32 @@ def _post_teardown(self):
if self.reset_index:
self.delete_index()

def refresh_index(self, index=None):
"""
Calls ElasticSearch's _refresh method on <index>. If index is None,
then refresh is called on every index.

This method makes all operations performed since the last refresh
available for search.
"""
if index is None:
url = "{0}_refresh".format(self.host)
else:
url = "{0}{1}/_refresh".format(self.host, index)
response = requests.post(url, proxies=self.proxies)
if response.status_code not in [200, 201]:
raise ElasticSearchException(response.text)
return json.loads(response.text)

def refresh(self):
"""
Calls ElasticSearch's _refresh method on the test case default index.

This method makes all operations performed since the last refresh
available for search.
"""
return self.refresh_index(self.index)

def create_index(self):
"""
Use the following class attributes:
Expand Down Expand Up @@ -157,7 +183,10 @@ def load_fixtures(self):
proxies=self.proxies)
if not response.status_code in [200, 201]:
raise ElasticSearchException(response.text)
time.sleep(self.timeout)
if self.timeout is None:
self.refresh()
else:
time.sleep(self.timeout)
# http://0.0.0.0:9200/sample.test/_search

def delete_index(self):
Expand Down Expand Up @@ -193,6 +222,17 @@ def tokenize(self, text, analyzer):
proxies=self.proxies)
return json.loads(response.text)

def get(self, doc_type, doc_id):
index = urllib.quote_plus(self.index)
doc_type = urllib.quote_plus(doc_type)
doc_id = urllib.quote_plus(doc_id)
url = "{0}{1}/{2}/{3}".format(self.host, index, doc_type, doc_id)
response = requests.get(url)
if not response.status_code in [200, 201]:
raise ElasticSearchException(response.text)
else:
return json.loads(response.text)


class MultipleIndexesQueryTestCase(ElasticSearchQueryTestCase):
"""
Expand All @@ -214,8 +254,9 @@ class MultipleIndexesQueryTestCase(ElasticSearchQueryTestCase):
Are replaced by:
data = {
"index.name": {
"mappings": {}
"settings": {}
"mappings": {},
"settings": {},
"aliases": [],
"fixtures": []
}
}
Expand All @@ -237,8 +278,11 @@ def _pre_setup(self):
settings = index.get("settings", {})
mappings = index.get("mappings", {})
fixtures = index.get("fixtures", {})
aliases = index.get("aliases", [])
self.create_index(index_name, settings, mappings)
self.load_fixtures(index_name, fixtures)
if aliases:
self.create_aliases(index_name, aliases)

def _post_teardown(self):
"""
Expand All @@ -253,6 +297,30 @@ def _post_teardown(self):
if self.reset_index:
self.delete_index(index_name)

def create_aliases(self, index, aliases):
"""
Create <aliases> (a list of aliases) for the index identified by
<index>
"""
payload = {
"actions": []
}
for alias in aliases:
action = {
"add": {
"index": index,
"alias": alias
}
}
payload["actions"].append(action)
url = "{0}/_aliases".format(self.host)
json_data = json.dumps(payload)
response = requests.post(url, proxies=self.proxies, data=json_data)
if not response.status_code in [200, 201]:
raise ElasticSearchException(response.text)
else:
return json.loads(response.text)

def create_index(self, index_name="", settings="", mappings=""):
"""
Use the following class attributes:
Expand All @@ -270,13 +338,33 @@ def create_index(self, index_name="", settings="", mappings=""):
index = index_name or self.index
url = url.format(self.host, index)
data = {}
if self.mappings:
data["mappings"] = mappings or self.mappings
if self.settings:
data["settings"] = settings or self.settings
mappings = mappings or self.mappings
if mappings:
data["mappings"] = mappings
settings = settings or self.settings
if settings:
data["settings"] = settings
json_data = json.dumps(data)
response = requests.put(url, proxies=self.proxies, data=json_data)

def get_aliases(self, index):
"""
Gets aliases of <index>
"""
url = '{0}{1}/_aliases'.format(self.host, index)
response = requests.get(url, proxies=self.proxies)
# Elasticsearch 0.90 used to return 404 when there were no aliases
if response.status_code == 404:
return []
elif not response.status_code in [200, 201]:
raise ElasticSearchException(response.text)
else:
aliases = json.loads(response.text)
if index in aliases:
return aliases[index]['aliases'].keys()
else:
return []

def load_fixtures(self, index_name="", fixtures=""):
"""
Use the following class attributes:
Expand Down Expand Up @@ -319,7 +407,10 @@ def load_fixtures(self, index_name="", fixtures=""):
proxies=self.proxies)
if not response.status_code in [200, 201]:
raise ElasticSearchException(response.text)
time.sleep(self.timeout)
if self.timeout is None:
self.refresh_index(index_name)
else:
time.sleep(self.timeout)
# http://0.0.0.0:9200/sample.test/_search

def delete_index(self, index_name=""):
Expand All @@ -328,7 +419,7 @@ def delete_index(self, index_name=""):
index: name of the index to be deleted
"""
index = index_name or self.index
url = "{0}{1}/".format(self.host, self.index)
url = "{0}{1}/".format(self.host, index)
requests.delete(url, proxies=self.proxies)

def search(self, query=None):
Expand All @@ -354,3 +445,14 @@ def search_in_index(self, index, query=None):
data=json.dumps(query),
proxies=self.proxies)
return json.loads(response.text)

def get(self, index, doc_type, doc_id):
index = urllib.quote_plus(index)
doc_type = urllib.quote_plus(doc_type)
doc_id = urllib.quote_plus(doc_id)
url = "{0}{1}/{2}/{3}".format(self.host, index, doc_type, doc_id)
response = requests.get(url)
if not response.status_code in [200, 201]:
raise ElasticSearchException(response.text)
else:
return json.loads(response.text)
134 changes: 134 additions & 0 deletions tests/test_aliases.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
import json
from operator import itemgetter
from mock import patch
from estester import MultipleIndexesQueryTestCase, ElasticSearchException


class AliasMultipleIndexesTestCase(MultipleIndexesQueryTestCase):

data = {
"beatles": {
"aliases": [
"band"
],
"fixtures": [
{
"type": "member",
"id": "lenon",
"body": {
"name": "John Lenon",
"role": "singer"
}
},
{
"type": "member",
"id": "mccartney",
"body": {
"name": "Paul McCartney",
"role": "guitar"
}
},
{
"type": "member",
"id": "harrison",
"body": {
"name": "George Harrison",
"role": "bass"
}
}
]
},
"thepolice": {
"aliases": [
"band",
"single-man-band"
],
"fixtures": [
{
"type": "member",
"id": "sting",
"body": {
"name": "Gordon Matthew Thomas Sumner",
"role": "singer"
}
}
]
}
}
timeout = None

def test_finds_aliases_for_all_indices(self):
self.assertEqual(set(self.get_aliases("thepolice")),
{"band", "single-man-band"})
self.assertEqual(self.get_aliases("beatles"), ["band"])

def test_create_alias_for_an_index(self):
self.create_aliases('thepolice', ['stingband'])
self.assertEqual(set(self.get_aliases('thepolice')),
{'band', 'single-man-band', 'stingband'})

def test_get_aliases_on_missing_index_must_return_empty(self):
self.assertEqual(self.get_aliases('hoodoogurus'), [])

def test_get_aliases_on_index_with_no_aliases_must_return_empty(self):
self.create_index('metallica')
self.assertEqual(self.get_aliases('metallica'), [])

# I could not make elasticsearch's _aliases to return an error
@patch('requests.get')
def test_failure_in_getting_alias(self, get):
attrs = {
"text": 'Error',
"status_code": 400
}
get.return_value.configure_mock(**attrs)
with self.assertRaises(ElasticSearchException) as cm:
self.get_aliases('hoodoogurus')
self.assertEqual(cm.exception.message, 'Error')

def test_creating_alias_for_missing_index_must_fail(self):
with self.assertRaises(ElasticSearchException) as cm:
self.create_aliases('hoodoogurus', ['stingband'])
expected = {
"error": "IndexMissingException[[hoodoogurus] missing]",
"status": 404
}
self.assertDictEqual(json.loads(cm.exception.message), expected)

def test_search_alias_for_sting(self):
query = {
"query": {
"match": {
"name": "Gordon"
}
}
}
response = self.search_in_index("single-man-band", query)
self.assertEqual(response["hits"]["total"], 1)
self.assertEqual(response['hits']['hits'][0]['_id'], 'sting')

def test_single_index_alias_must_return_only_one_singer(self):
query = {
"query": {
"match": {
"role": "singer"
}
}
}
response = self.search_in_index('single-man-band', query)
self.assertEqual(response["hits"]["total"], 1)
self.assertEqual(response["hits"]["hits"][0]['_id'], 'sting')

def test_search_bands_for_singer(self):
query = {
"query": {
"match": {
"role": "singer"
}
}
}
response = self.search_in_index('band', query)
self.assertEqual(response["hits"]["total"], 2)
ids = map(itemgetter('_id'), response["hits"]["hits"])
self.assertIn('sting', ids)
self.assertIn('lenon', ids)
Loading