Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 29 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,15 @@
<version>${scala.plugin.version}</version>
<configuration>
<launchers>
<launcher>
<id>clouseau</id>
<mainClass>com.cloudant.clouseau.Main</mainClass>
<jvmArgs>
<jvmArg>[email protected]</jvmArg>
<jvmArg>-Dclouseau.cookie=monster</jvmArg>
<jvmArg>-Dclouseau.dir=${basedir}/target/clouseau</jvmArg>
</jvmArgs>
</launcher>
<launcher>
<id>clouseau1</id>
<mainClass>com.cloudant.clouseau.Main</mainClass>
Expand Down Expand Up @@ -353,6 +362,26 @@
</execution>
</executions>
</plugin>
<!-- Code Coverage report generation -->
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>0.7.9</version>
<executions>
<execution>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>generate-code-coverage-report</id>
<phase>test</phase>
<goals>
<goal>report</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
<extensions>
<extension>
Expand Down
51 changes: 51 additions & 0 deletions test/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
# Locust Test

Test `Clouseau` using [Locust](https://github.com/locustio/locust) and [Faker](https://github.com/joke2k/faker).

## Configuration options

Locust configuration options.

Command line | Description
--- | ---
--headless | Disable the web interface, and start the test
--only-summary | Only print the summary stats
--host | Host to load test
-u | Peak number of concurrent Locust users
-r | Rate to spawn users at (users per second)
-t | Stop after the specified amount of time
--docs-number | The number of generated documents (default: 10)

```
locust -f locustfile.py --headless --only-summary --docs-number 10 -u 1 -r 1 -t 10
```

## Basic Usage

Run `CouchDB` and `Clouseau` in different terminals, and then run the locust test:

```
# Open 4 different terminals and run the command:
./dev/run --admin=adm:pass
mvn scala:run -Dlauncher=clouseau1
mvn scala:run -Dlauncher=clouseau2
mvn scala:run -Dlauncher=clouseau3
```

### Install dependencies:

```
./run install
```

### Run random_tree_generator tests:

```
./run locust
```

### Cleanup

```
./run clean
```
57 changes: 57 additions & 0 deletions test/data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
import math
import json
from datetime import date
from random import choice
from faker import Faker


def write_to_files(data, filename, files_number=0):
if files_number:
chunks = [data[files_number * i:files_number * (i + 1)]
for i in range(math.ceil(len(data) / files_number))]
idx = filename.find('.json')
for i in range(len(chunks)):
with open(filename[:idx] + str(i) + filename[idx:], 'w') as outfile:
json.dump(chunks[i], outfile)
else:
with open(filename, 'w') as outfile:
json.dump(data, outfile)


def gen_data(n=10, files_number=10, latmin=0, latmax=50, lonmin=0, lonmax=50):
data = []
counter = {}
fake = Faker()
fields = ['married', 'ethnicity', 'gender']
counter['total_rows'] = n

for i in range(n):
data.append({'_id': str(i)})
data[i]['gender'] = choice(['M', 'F'])
data[i]['name'] = fake.name_male() if data[i]['gender'] == 'M' else fake.name_female()
data[i]['date_of_birth'] = fake.iso8601()
data[i]['age'] = date.today().year - int(data[i]['date_of_birth'][:4])
data[i]['married'] = 'False' if data[i]['age'] < 22 else choice(['True', 'False'])
data[i]['ethnicity'] = choice(['White', 'Black', 'Asian', 'Hispanic', 'non-Hispanic'])
data[i]['address'] = {'full_address': fake.address()}
data[i]['address']['city'] = data[i]['address']['full_address'][
data[i]['address']['full_address'].find('\n') + 1: -10]
data[i]['address']['area'] = data[i]['address']['full_address'][-8:-6]
data[i]['address']['zip'] = data[i]['address']['full_address'][-5:]
data[i]['lat'] = float(fake.latitude())
data[i]['lon'] = float(fake.longitude())

for field in fields:
if field not in counter:
counter[field] = {}
counter[field].update({data[i][field]: counter[field].get(data[i][field], 0) + 1})

if latmin <= data[i]['lat'] <= latmax:
counter['lat'] = counter.get('lat', 0) + 1
if lonmin < data[i]['lon'] < lonmax:
counter['lon'] = counter.get('lon', 0) + 1
if latmin <= data[i]['lat'] <= latmax and lonmin < data[i]['lon'] < lonmax:
counter['geo'] = counter.get('geo', 0) + 1

write_to_files(data, 'data.json', files_number)
write_to_files(counter, 'analysis.json')
24 changes: 24 additions & 0 deletions test/data_partition.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import json
from random import *
from faker import Faker


def write_to_files(data, filename):
with open(filename, 'w') as outfile:
json.dump(data, outfile)


def gen_data(n=10, id='sensor-0'):
data = []
fake = Faker()

for i in range(n):
data.append({'_id': id + ':sensor-reading-' + fake.uuid4()})
data[i]['sensor_id'] = id
data[i]['location'] = [float(fake.latitude()), float(fake.longitude())]
data[i]['field_name'] = fake.sentence(nb_words=5, variable_nb_words=False)
data[i]['readings'] = []
for j in range(randint(1, 5)):
data[i]['readings'].append([fake.iso8601(), round(uniform(0.1, 0.12), 2)])

write_to_files(data, id + '.json')
195 changes: 195 additions & 0 deletions test/locustfile.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,195 @@
import math
import json
import time
import requests
from locust import events, HttpUser, constant, task, tag

import data
from logger import logger

URL = "http://adm:pass@localhost:5984"
DB = "http://adm:pass@localhost:5984/demo"
SESSION = requests.session()
timer = [time.perf_counter()]


def create_database():
if SESSION.get(DB).status_code == 200:
SESSION.delete(DB)
SESSION.put(DB)


def insert_docs(docs_number, files_number):
for i in range(math.ceil(docs_number / files_number)):
payload = {"docs": []}
with open("data" + str(i) + ".json") as json_file:
payload["docs"].extend(json.load(json_file))
SESSION.post(DB + "/_bulk_docs", json=payload, headers={"Content-Type": "application/json"})


def create_indexes():
design_docs = {
"_id": "_design/search",
"indexes": {
"search_index": {
"index": "function(doc) {if(doc.gender) {index(\"gender\", doc.gender, {\"store\": true} );};"
"if(doc.age) {index(\"age\", doc.age, {\"store\": true} );};"
"if(doc.married) {index(\"married\", doc.married, {\"store\": true} );};"
"if(doc.ethnicity) {index(\"ethnicity\", doc.ethnicity, {\"store\": true} );};"
"if(doc.ethnicity) {index(\"ethnicity\", doc.ethnicity, {\"store\": true} );}}"
},
"geo_index": {
"index": "function(doc) {"
"if(doc.address.city) {"
"index(\"city\", doc.address.city, {\"store\": true} );"
"index(\"lat\", doc.lat, {\"store\": true} );"
"index(\"lon\", doc.lon, {\"store\": true} );}}"
}
}
}
SESSION.put(f"{DB}/_design/search", data=json.dumps(design_docs))


def get_result(condition, response, func_name):
response.success() if condition else response.failure(func_name + " FAILED.")


@events.init_command_line_parser.add_listener
def _(parser):
parser.add_argument("--docs-number", type=int, env_var="LOCUST_DOCS_NUMBER", default=100_000,
help="How many documents do you want to generate")
parser.add_argument("--files-number", type=int, env_var="LOCUST_FILES_NUMBER", default=5000,
help="How many documents are stored in each JSON file")


@events.test_start.add_listener
def _(environment, **kw):
data.gen_data(environment.parsed_options.docs_number, environment.parsed_options.files_number)
timer.append(time.perf_counter())
logger.critical(f"1. Generate documents ---- TIME: {timer[-1] - timer[-2]}")


class LoadTest(HttpUser):
host = URL
wait_time = constant(1)

def on_start(self):
self.client.get("/", name=self.on_start.__name__)
create_database()
timer.append(time.perf_counter())
logger.debug(f"2. Create Database ---- TIME: {timer[-1] - timer[-2]}")
insert_docs(self.environment.parsed_options.docs_number, self.environment.parsed_options.files_number)
timer.append(time.perf_counter())
logger.info(f"2. Insert docs ---- TIME: {timer[-1] - timer[-2]}")
create_indexes()
timer.append(time.perf_counter())
logger.info(f"2. Create design docs ---- TIME: {timer[-1] - timer[-2]}")
logger.critical("3. Start testing ... ")
with open("analysis.json") as json_file:
self.data = json.load(json_file)

@tag("search")
@task
def search_all_docs(self):
with self.client.get("/demo/_design/search/_search/search_index?query=*:*",
catch_response=True, name="Search All Docs") as response:
get_result(
response.status_code == 200 and response.json()["total_rows"] == self.data["total_rows"],
response, self.search_all_docs.__name__)

@tag("search")
@task
def search_gender_is_male(self):
with self.client.get("/demo/_design/search/_search/search_index?query=gender:m",
catch_response=True, name="Search Gender is Male") as response:
get_result(
response.status_code == 200 and response.json()["total_rows"] == self.data["gender"]["M"],
response, self.search_gender_is_male.__name__)

@tag("search")
@task
def search_gender_is_male_with_limit_2(self):
with self.client.get("/demo/_design/search/_search/search_index?query=gender:m&limit=2",
catch_response=True, name="Search Gender Male with Limit 2") as response:
get_result(
response.status_code == 200 and len(response.json()["rows"]) == 2,
response, self.search_gender_is_male_with_limit_2.__name__)

@tag("search")
@task
def search_gender_is_female_and_sort_by_age(self):
with self.client.get("/demo/_design/search/_search/search_index?query=gender:f&sort=\"age\"",
catch_response=True, name="Search Gender is Female AND Sort by age") as response:
result = response.json()
if self.data["gender"]["F"] >= 2:
conditions = result["total_rows"] == self.data["gender"]["F"] and \
result["rows"][0]["order"][0] <= result["rows"][1]["order"][0]
else:
conditions = result["total_rows"] == self.data["gender"]["F"]
get_result(conditions, response, self.search_gender_is_female_and_sort_by_age.__name__)

@tag("search")
@task
def search_married_people_age_should_greater_than_21(self):
with self.client.get(
"/demo/_design/search/_search/search_index?query=married:true",
catch_response=True, name="Search married people age > 21") as response:
result = response.json()
for i in result["rows"]:
if i["fields"]["age"] <= 21:
response.failure(self.search_married_people_age_should_greater_than_21.__name__)
response.success()

@tag("search")
@task
def search_ethnicity_white_or_asian(self):
with self.client.get(
"/demo/_design/search/_search/search_index?query=ethnicity:White OR ethnicity:Asian",
catch_response=True, name="Search ethnicity White OR Asian") as response:
result = response.json()
get_result(
response.status_code == 200 and
result["total_rows"] == self.data["ethnicity"]["White"] + self.data["ethnicity"]["Asian"],
response, self.search_ethnicity_white_or_asian.__name__)

@tag("geo")
@task
def search_lat_within_range_0_to_50_include(self):
with self.client.get(
"/demo/_design/search/_search/geo_index?query=lat:[0+TO+50]",
catch_response=True, name="Search latitude within [0, 50]") as response:
result = response.json()
get_result(
response.status_code == 200 and
result["total_rows"] == self.data["lat"],
response, self.search_lat_within_range_0_to_50_include.__name__)

@tag("geo")
@task
def search_lon_within_range_0_to_50_exclude(self):
with self.client.get(
"/demo/_design/search/_search/geo_index?query=lon:{0+TO+50}",
catch_response=True, name="Search longitude within {0, 50}") as response:
result = response.json()
get_result(
response.status_code == 200 and
result["total_rows"] == self.data["lon"],
response, self.search_lon_within_range_0_to_50_exclude.__name__)

@tag("geo")
@task
def search_geo_within_range_0_to_50(self):
with self.client.get(
"/demo/_design/search/_search/geo_index?query=lat:[0+TO+50] AND lon:{0+TO+50}",
catch_response=True, name="Search geo within [0, 50] (AND)") as response:
result = response.json()
get_result(
response.status_code == 200 and
result["total_rows"] == self.data["geo"],
response, self.search_geo_within_range_0_to_50.__name__)

def on_stop(self):
self.client.get("/", name=self.on_stop.__name__)
timer.append(time.perf_counter())
logger.debug(f"4. Delete database, and shut down the locust ---- TIME: {timer[-1] - timer[-2]}")
SESSION.delete(DB)
Loading