Skip to content

Commit 705a57d

Browse files
Uprev to new pydantic and black formatting (#275)
* Uprev to new pydantic and black formatting * Removing travis * Change badge * Remove travis
1 parent 62cb2e5 commit 705a57d

32 files changed

+952
-1238
lines changed

.travis.yml

-36
This file was deleted.

Makefile

+6
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,12 @@
44
install:
55
pip install -r requirements.txt
66

7+
.PHONY: format
8+
format:
9+
isort -rc -w 120 tcsocket
10+
isort -rc -w 120 tests
11+
black -S -l 120 --target-version py38 tcsocket tests
12+
713
.PHONY: isort
814
isort:
915
isort -rc -w 120 tcsocket

README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
socket-server
22
=============
33

4-
[![Build Status](https://travis-ci.org/tutorcruncher/socket-server.svg?branch=master)](https://travis-ci.org/tutorcruncher/socket-server)
4+
[![Build Status](https://github.com/tutorcruncher/socket-server/workflows/CI/badge.svg)
55
[![codecov](https://codecov.io/gh/tutorcruncher/socket-server/branch/master/graph/badge.svg)](https://codecov.io/gh/tutorcruncher/socket-server)
66

77
Backend application for [TutorCruncher's](https://tutorcruncher.com) web integration.

setup.cfg

+8-2
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,7 @@ addopts = --isort --aiohttp-loop uvloop --aiohttp-fast --tb=native
55
[flake8]
66
max-line-length = 120
77
max-complexity = 12
8-
# remove E252 once https://github.com/PyCQA/pycodestyle/issues/753 is fixed
9-
ignore = W504, E252
8+
ignore = E203, W503, W504
109

1110
[coverage:run]
1211
source = tcsocket
@@ -23,3 +22,10 @@ exclude_lines =
2322
raise AssertionError
2423
raise NotImplementedError
2524
raise NotImplemented
25+
26+
[isort]
27+
line_length = 120
28+
multi_line_output=3
29+
include_trailing_comma=True
30+
force_grid_wrap=0
31+
combine_as_imports=True

tcsocket/app/geo.py

+11-4
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,9 @@ async def geocode(request):
3939
loc_data = await redis.get(loc_key)
4040
if loc_data:
4141
result = json.loads(loc_data)
42-
logger.info('cached geocode result "%s|%s" > "%s"', location_str, region,
43-
result.get('error') or result['pretty'])
42+
logger.info(
43+
'cached geocode result "%s|%s" > "%s"', location_str, region, result.get('error') or result['pretty']
44+
)
4445
return result
4546

4647
ip_key = 'geoip:' + ip_address
@@ -76,6 +77,12 @@ async def geocode(request):
7677
else:
7778
result = {'error': 'no_results'}
7879
await redis.setex(loc_key, NINETY_DAYS, json.dumps(result).encode())
79-
logger.info('new geocode result "%s|%s" > "%s" (%d from "%s")',
80-
location_str, region, result.get('error') or result['pretty'], geo_attempts, ip_address)
80+
logger.info(
81+
'new geocode result "%s|%s" > "%s" (%d from "%s")',
82+
location_str,
83+
region,
84+
result.get('error') or result['pretty'],
85+
geo_attempts,
86+
ip_address,
87+
)
8188
return result

tcsocket/app/logs.py

+7-24
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import os
44

55

6-
def setup_logging(verbose: bool=False):
6+
def setup_logging(verbose: bool = False):
77
"""
88
setup logging config for socket by updating the arq logging config
99
"""
@@ -15,38 +15,21 @@ def setup_logging(verbose: bool=False):
1515
config = {
1616
'version': 1,
1717
'disable_existing_loggers': False,
18-
'formatters': {
19-
'socket.default': {
20-
'format': '%(levelname)s %(name)s %(message)s',
21-
},
22-
},
18+
'formatters': {'socket.default': {'format': '%(levelname)s %(name)s %(message)s'}},
2319
'handlers': {
24-
'socket.default': {
25-
'level': log_level,
26-
'class': 'logging.StreamHandler',
27-
'formatter': 'socket.default',
28-
},
20+
'socket.default': {'level': log_level, 'class': 'logging.StreamHandler', 'formatter': 'socket.default'},
2921
'sentry': {
3022
'level': 'WARNING',
3123
'class': 'raven.handlers.logging.SentryHandler',
3224
'dsn': raven_dsn,
3325
'release': os.getenv('COMMIT', None),
34-
'name': os.getenv('SERVER_NAME', '-')
26+
'name': os.getenv('SERVER_NAME', '-'),
3527
},
3628
},
3729
'loggers': {
38-
'socket': {
39-
'handlers': ['socket.default', 'sentry'],
40-
'level': log_level,
41-
},
42-
'gunicorn.error': {
43-
'handlers': ['sentry'],
44-
'level': 'ERROR',
45-
},
46-
'arq': {
47-
'handlers': ['socket.default', 'sentry'],
48-
'level': log_level,
49-
},
30+
'socket': {'handlers': ['socket.default', 'sentry'], 'level': log_level},
31+
'gunicorn.error': {'handlers': ['sentry'], 'level': 'ERROR'},
32+
'arq': {'handlers': ['socket.default', 'sentry'], 'level': log_level},
5033
},
5134
}
5235
logging.config.dictConfig(config)

tcsocket/app/main.py

+13-8
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,14 @@
99
from .middleware import middleware
1010
from .settings import THIS_DIR, Settings
1111
from .views import favicon, index, labels_list, qual_level_list, robots_txt, subject_list
12-
from .views.appointments import (appointment_list, appointment_webhook, appointment_webhook_delete, book_appointment,
13-
check_client, service_list)
12+
from .views.appointments import (
13+
appointment_list,
14+
appointment_webhook,
15+
appointment_webhook_delete,
16+
book_appointment,
17+
check_client,
18+
service_list,
19+
)
1420
from .views.company import company_create, company_list, company_options, company_update
1521
from .views.contractor import contractor_get, contractor_list, contractor_set
1622
from .views.enquiry import clear_enquiry, enquiry
@@ -20,9 +26,7 @@ async def startup(app: web.Application):
2026
settings: Settings = app['settings']
2127
redis = await create_pool(settings.redis_settings)
2228
app.update(
23-
pg_engine=await create_engine(settings.pg_dsn),
24-
redis=redis,
25-
session=ClientSession(),
29+
pg_engine=await create_engine(settings.pg_dsn), redis=redis, session=ClientSession(),
2630
)
2731

2832

@@ -48,8 +52,9 @@ def setup_routes(app):
4852
app.router.add_post(r'/{company}/webhook/contractor', contractor_set, name='webhook-contractor')
4953
app.router.add_post(r'/{company}/webhook/clear-enquiry', clear_enquiry, name='webhook-clear-enquiry')
5054
app.router.add_post(r'/{company}/webhook/appointments/{id:\d+}', appointment_webhook, name='webhook-appointment')
51-
app.router.add_delete(r'/{company}/webhook/appointments/{id:\d+}', appointment_webhook_delete,
52-
name='webhook-appointment-delete')
55+
app.router.add_delete(
56+
r'/{company}/webhook/appointments/{id:\d+}', appointment_webhook_delete, name='webhook-appointment-delete'
57+
)
5358

5459
app.router.add_get(r'/{company}/contractors', contractor_list, name='contractor-list')
5560
app.router.add_get(r'/{company}/contractors/{id:\d+}', contractor_get, name='contractor-get')
@@ -64,7 +69,7 @@ def setup_routes(app):
6469
app.router.add_post(r'/{company}/book-appointment', book_appointment, name='book-appointment')
6570

6671

67-
def create_app(loop, *, settings: Settings=None):
72+
def create_app(loop, *, settings: Settings = None):
6873
app = web.Application(middlewares=middleware)
6974
settings = settings or Settings()
7075
app['settings'] = settings

tcsocket/app/management.py

+25-26
Original file line numberDiff line numberDiff line change
@@ -34,12 +34,7 @@
3434

3535
def lenient_connection(settings: Settings, retries=5):
3636
try:
37-
return psycopg2.connect(
38-
password=settings.pg_password,
39-
host=settings.pg_host,
40-
port=settings.pg_port,
41-
user=settings.pg_user,
42-
)
37+
return psycopg2.connect(password=settings.pg_password, dsn=settings.pg_dsn,)
4338
except psycopg2.Error as e:
4439
if retries <= 0:
4540
raise
@@ -73,14 +68,14 @@ def populate_db(engine):
7368
"""
7469

7570

76-
def prepare_database(delete_existing: Union[bool, callable]) -> bool:
71+
def prepare_database(delete_existing: Union[bool, callable], settings: Settings = None) -> bool:
7772
"""
7873
(Re)create a fresh database and run migrations.
7974
8075
:param delete_existing: whether or not to drop an existing database if it exists
8176
:return: whether or not a database as (re)created
8277
"""
83-
settings = Settings()
78+
settings = settings or Settings()
8479

8580
with psycopg2_cursor(settings) as cur:
8681
cur.execute('SELECT EXISTS (SELECT datname FROM pg_catalog.pg_database WHERE datname=%s)', (settings.pg_name,))
@@ -96,13 +91,12 @@ def prepare_database(delete_existing: Union[bool, callable]) -> bool:
9691
else:
9792
print(f'dropping existing connections to "{settings.pg_name}"...')
9893
cur.execute(DROP_CONNECTIONS, (settings.pg_name,))
99-
print(f'dropping database "{settings.pg_name}" as it already exists...')
100-
cur.execute(f'DROP DATABASE {settings.pg_name}')
101-
else:
102-
print(f'database "{settings.pg_name}" does not yet exist')
10394

104-
print(f'creating database "{settings.pg_name}"...')
105-
cur.execute(f'CREATE DATABASE {settings.pg_name}')
95+
logger.debug('dropping and re-creating the schema...')
96+
cur.execute('drop schema public cascade;\ncreate schema public;')
97+
else:
98+
print(f'database "{settings.pg_name}" does not yet exist, creating')
99+
cur.execute(f'CREATE DATABASE {settings.pg_name}')
106100

107101
engine = create_engine(settings.pg_dsn)
108102
print('creating tables from model definition...')
@@ -122,9 +116,11 @@ def patch(func):
122116

123117
def run_patch(live, patch_name):
124118
if patch_name is None:
125-
print('available patches:\n{}'.format(
126-
'\n'.join(' {}: {}'.format(p.__name__, p.__doc__.strip('\n ')) for p in patches)
127-
))
119+
print(
120+
'available patches:\n{}'.format(
121+
'\n'.join(' {}: {}'.format(p.__name__, p.__doc__.strip('\n ')) for p in patches)
122+
)
123+
)
128124
return
129125
patch_lookup = {p.__name__: p for p in patches}
130126
try:
@@ -168,8 +164,11 @@ def print_tables(conn):
168164
'float8': 'FLOAT',
169165
}
170166
for table_name, *_ in result:
171-
r = conn.execute("SELECT column_name, udt_name, character_maximum_length, is_nullable, column_default "
172-
"FROM information_schema.columns WHERE table_name=%s", table_name)
167+
r = conn.execute(
168+
"SELECT column_name, udt_name, character_maximum_length, is_nullable, column_default "
169+
"FROM information_schema.columns WHERE table_name=%s",
170+
table_name,
171+
)
173172
fields = []
174173
for name, col_type, max_chars, nullable, dft in r:
175174
col_type = type_lookup.get(col_type, col_type.upper())
@@ -204,11 +203,13 @@ def add_labels(conn):
204203
add labels field to contractors
205204
"""
206205
conn.execute('ALTER TABLE contractors ADD labels VARCHAR(255)[]')
207-
conn.execute("""
206+
conn.execute(
207+
"""
208208
CREATE INDEX ix_contractors_labels
209209
ON contractors
210210
USING btree (labels);
211-
""")
211+
"""
212+
)
212213

213214

214215
@patch
@@ -220,11 +221,9 @@ def add_domains_options(conn):
220221
conn.execute('ALTER TABLE companies ADD options JSONB')
221222
updated = 0
222223
for id, domain in conn.execute('SELECT id, domain FROM companies WHERE domain IS NOT NULL'):
223-
conn.execute((
224-
update(sa_companies)
225-
.values({'domains': [domain, 'www.' + domain]})
226-
.where(sa_companies.c.id == id)
227-
))
224+
conn.execute(
225+
(update(sa_companies).values({'domains': [domain, 'www.' + domain]}).where(sa_companies.c.id == id))
226+
)
228227
updated += 1
229228
print(f'domains updated for {updated} companies')
230229
conn.execute('ALTER TABLE companies DROP COLUMN domain')

0 commit comments

Comments
 (0)