Skip to content

Commit 5b99ce4

Browse files
authored
Fixes truncate method and adds some tooling (#1)
* Fixes truncate method by rewriting 3 SQL statements into one so it can work also in AUTOCOMMIT mode. * Adds Makefile for easy tooling * Adds a script that will copy the setup.cfg version to the module
2 parents 18a00ba + 86dc6b0 commit 5b99ce4

File tree

10 files changed

+180
-46
lines changed

10 files changed

+180
-46
lines changed

Diff for: MANIFEST.in

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
include src/asyncpg_lostream/__version__

Diff for: Makefile

+93
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,93 @@
1+
# Copyright 2022 Red Hat, Inc.
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
# Makefile for standardized tasks.
16+
17+
help:
18+
@echo "Targets for asyncpg-lostream development:"
19+
@echo " start_db : Start up the test database container (uses port 5432)"
20+
@echo " stop_db : Stop the test database container (destroys the container volume also)"
21+
@echo " verify_db : Verify that the database in the container is accepting connections"
22+
@echo " run_tests : Execute 'verify_db', then execute pytest"
23+
@echo " clean : Execute 'stop_db' then remove build output disk files"
24+
@echo " test : Execute 'clean', 'start_db', 'run_tests', 'stop_db'"
25+
@echo " sync_version : Sync the module version file to the version in setup.cfg"
26+
@echo " build : Execute 'clean', 'test', 'sync_version' then execute the build"
27+
@echo " verify_dist_dir : Ensure that the './dist' dir exists"
28+
@echo " verify_dist_contents : Ensure that the './dist' dir is not empty"
29+
@echo " verify_artifacts : Execute 'verify_dist_dir', 'verify_dist_contents'"
30+
@echo " twine_check : Validate that the artifacts can be successfully uploaded to PyPI"
31+
@echo
32+
33+
all: clean
34+
@make build
35+
36+
verify_db:
37+
@printf "Checking availability of test database: "
38+
@while ! docker exec apl-postgres pg_isready -qd apl_postgres; do printf "."; sleep 1; done
39+
@echo " PostgreSQL database is available."
40+
41+
start_db:
42+
@echo "Starting up test database..."
43+
@docker-compose -f ./docker/docker-compose.yml up -d postgres
44+
45+
stop_db:
46+
@echo "Shutting down test database..."
47+
@docker-compose -f ./docker/docker-compose.yml down -v
48+
49+
clean: stop_db
50+
@echo "Cleaning up any former builds..."
51+
@rm -rf ./dist
52+
@rm -rf ./src/asyncpg_lostream.egg-info/
53+
54+
run_tests: verify_db
55+
@echo "Running tests..."
56+
@python -m pytest
57+
58+
test: clean start_db run_tests
59+
@make stop_db
60+
61+
sync_version:
62+
@echo "Syncing version from setup.cfg..."
63+
@./scripts/sync_version.py
64+
65+
build: clean test sync_version
66+
@echo "Building targets..."
67+
@python -m build
68+
69+
verify_dist_dir:
70+
@if [[ ! -d ./dist ]]; then echo "The 'dist' directory does not exist."; false; else true; fi
71+
72+
verify_dist_contents:
73+
@if [[ ! "$(shell ls -A ./dist)" ]]; then echo "The 'dist' directory is empty."; false; else true; fi
74+
75+
verify_artifacts: verify_dist_dir
76+
@make verify_dist_contents
77+
78+
twine_check: verify_artifacts
79+
@echo "Making sure targets will successfully upload."
80+
@python -m twine check --strict ./dist/*
81+
82+
83+
84+
# ==============================================================================
85+
# ==============================================================================
86+
# For the code owner(s) only!!
87+
deploy_testpypi: twine_check
88+
@echo "Deploying to testpypi..."
89+
@python -m twine upload --verbose --repository testpypi dist/*
90+
91+
deploy_pypi: twine_check
92+
@echo "Deploying to testpypi..."
93+
@python -m twine upload --verbose --repository pypi dist/*

Diff for: README.md

+11-5
Original file line numberDiff line numberDiff line change
@@ -95,16 +95,22 @@ async with PGLargeObject(async_connection, existing_lob_oid, "r") as pgl:
9595

9696
### Development
9797

98-
Start a local PostgreSQL container using `docker-compose -f ./docker/docker-compose.yml up -d`
98+
`make`, `docker-compose` and `docker` are required for development.
9999

100-
After making changes, create your unit tests in the `asyncpg-lostream/tests` directory.
100+
To list the make targets, use `make help`
101+
102+
To start a local PostgreSQL 13 container: `make start_db`
101103

102-
Test your changes with the command `python -m pytest`
104+
To shutdown the local PostgreSQL 13 container: `make stop_db`
103105

104-
Shutdown the local PostgreSQL container `docker-compose -f ./docker/docker-compose.yml down -v`
106+
After making changes, create your unit tests in the `asyncpg-lostream/tests` directory.
107+
108+
Test your changes with the command `make test`
105109

106110
## Packaging
107111

108-
Build the package using `python -m build`. This will put built packages into the `dist/` directory.
112+
If you intend to make a releaase, change the version in the `setup.cfg` file. This value will be copied to the module's `__version__` file.
113+
114+
Build the package using `make build`. This will run the tests and then build the artifacts. These will be put into the `dist/` directory.
109115
110116
For instructions on upload to PyPI, see the [Packaging Python Projects Dcoumentation](https://packaging.python.org/en/latest/tutorials/packaging-projects/#uploading-the-distribution-archives)

Diff for: requirements.txt

+4
Original file line numberDiff line numberDiff line change
@@ -9,3 +9,7 @@ pytest
99
pytest-asyncio
1010
twine
1111
build
12+
wheel
13+
configparser
14+
packaging
15+

Diff for: scripts/sync_version.py

+40
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
#!/usr/bin/env python3
2+
# Copyright 2022 Red Hat, Inc.
3+
#
4+
# Licensed under the Apache License, Version 2.0 (the "License");
5+
# you may not use this file except in compliance with the License.
6+
# You may obtain a copy of the License at
7+
#
8+
# http://www.apache.org/licenses/LICENSE-2.0
9+
#
10+
# Unless required by applicable law or agreed to in writing, software
11+
# distributed under the License is distributed on an "AS IS" BASIS,
12+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
# See the License for the specific language governing permissions and
14+
# limitations under the License.
15+
16+
import configparser
17+
import pathlib
18+
19+
from packaging import version
20+
21+
MY_DIR = pathlib.Path(__file__).parent
22+
BASE_DIR = MY_DIR.parent
23+
SETUP_CFG = BASE_DIR / "setup.cfg"
24+
ASYNCPG_LOSTREAM = BASE_DIR / "src" / "asyncpg_lostream" / "__version__"
25+
26+
27+
setup_config = configparser.ConfigParser()
28+
setup_config.read(SETUP_CFG)
29+
if not ASYNCPG_LOSTREAM.exists():
30+
mod_version = "0.0.0"
31+
else:
32+
mod_version = open(ASYNCPG_LOSTREAM, "rt").read().strip()
33+
34+
cfg_ver = version.Version(setup_config["metadata"]["version"])
35+
mod_ver = version.Version(mod_version)
36+
37+
if cfg_ver != mod_ver:
38+
print(f"Copying setup.cfg version {cfg_ver} to __version__ file")
39+
with open(ASYNCPG_LOSTREAM, "wt") as ver_file:
40+
ver_file.write(setup_config["metadata"]["version"])

Diff for: setup.cfg

+4-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
[metadata]
22
name = asyncpg-lostream
3-
version = 0.1.1
3+
# Changing the version here will change the value in the module version
4+
# if you use the Makefile to build the artifacts.
5+
version = 0.1.2b1
46
author = Red Hat, Inc.
57
author_email = [email protected]
68
maintainer = HAP
@@ -40,6 +42,7 @@ package_dir =
4042
python_requires = >=3.9
4143
install_requires =
4244
sqlalchemy[asyncio] >= 1.4
45+
asyncpg
4346

4447
[options.packages.find]
4548
where = src

Diff for: src/asyncpg_lostream/__init__.py

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
import pathlib
2+
3+
MY_DIR = pathlib.Path(__file__).parent
4+
5+
VERSION = open(MY_DIR / "__version__", "rt").read().strip()

Diff for: src/asyncpg_lostream/__version__

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
0.1.2b1

Diff for: src/asyncpg_lostream/lostream.py

+21-38
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
from __future__ import annotations
1616

1717
import logging
18-
from typing import AsyncIterator, List, Optional, Tuple, Union
18+
from typing import AsyncIterator, List, Optional, Tuple
1919

2020
from sqlalchemy import column, func, select, text
2121
from sqlalchemy.dialects.postgresql import ARRAY, OID, array
@@ -35,6 +35,7 @@
3535

3636
class PGLargeObjectError(Exception):
3737
"""Base PGLargeObject Exception"""
38+
3839
pass
3940

4041

@@ -100,9 +101,7 @@ def __init__(
100101
):
101102
self.session = session
102103
if chunk_size <= 0:
103-
raise ValueError(
104-
"chunk_size must be an integer greater than zero."
105-
)
104+
raise ValueError("chunk_size must be an integer greater than zero.")
106105
self.chunk_size = chunk_size
107106
self.oid = oid
108107
self.closed = False
@@ -130,9 +129,7 @@ async def __anext__(self) -> bytes:
130129
def resolve_mode(mode: str) -> Tuple[int, bool]:
131130
"""Translates string mode to the proper binary setting."""
132131
if mode not in VALID_MODES:
133-
raise ValueError(
134-
f"Mode {mode} must be one of {sorted(VALID_MODES)}"
135-
)
132+
raise ValueError(f"Mode {mode} must be one of {sorted(VALID_MODES)}")
136133

137134
append = mode.startswith("a")
138135
imode = 0
@@ -147,23 +144,16 @@ async def create_large_object(session: AsyncSession) -> int:
147144
sql = select(func.lo_create(0).label("loid"))
148145
oid = await session.scalar(sql)
149146
if oid == 0:
150-
raise PGLargeObjectNotCreated(
151-
"Requested large object was not created."
152-
)
147+
raise PGLargeObjectNotCreated("Requested large object was not created.")
148+
LOG.debug(f"Created large object with oid = {oid}")
153149
return oid
154150

155151
@staticmethod
156-
async def verify_large_object(
157-
session: AsyncSession, oid: int
158-
) -> Tuple[bool, int]:
152+
async def verify_large_object(session: AsyncSession, oid: int) -> Tuple[bool, int]:
159153
"""Verify that a large object exists. Returns oid and size."""
160154
sel_cols = [
161155
column("oid"),
162-
sum_(
163-
func.length(
164-
coalesce(text("data"), func.convert_to("", "utf-8"))
165-
)
166-
).label("data_length"),
156+
sum_(func.length(coalesce(text("data"), func.convert_to("", "utf-8")))).label("data_length"),
167157
]
168158
sql = (
169159
select(sel_cols)
@@ -182,15 +172,14 @@ async def verify_large_object(
182172
return rec
183173

184174
@staticmethod
185-
async def delete_large_object(
186-
session: AsyncSession, oids: List[int]
187-
) -> None:
175+
async def delete_large_object(session: AsyncSession, oids: List[int]) -> None:
188176
"""Deletes large objects. This will deallocate the large object oids specified."""
189177
if oids and all(o > 0 for o in oids):
190178
sql = select(func.lo_unlink(text("oid"))).select_from(
191179
func.unnest(array(oids, type_=ARRAY(OID))).alias("oid")
192180
)
193181
await session.execute(sql)
182+
LOG.debug(f"Deleted large objects with these OIDs: {oids}")
194183

195184
def closed_check(self):
196185
"""Verify that an object interface is closed."""
@@ -202,9 +191,7 @@ async def read(self, chunk_size: Optional[int] = None) -> bytes:
202191
self.closed_check()
203192

204193
if self.imode == INV_WRITE:
205-
raise PGLargeObjectUnsupportedOp(
206-
"Large Object class instance is set for write only"
207-
)
194+
raise PGLargeObjectUnsupportedOp("Large Object class instance is set for write only")
208195

209196
_chunk_size = chunk_size or self.chunk_size
210197

@@ -222,7 +209,7 @@ def _get_buffer_chunk(self, buffer: bytes, chunk_size: int) -> bytes:
222209
start = 0
223210
chunk = None
224211
while True:
225-
chunk = buffer[start:(start + chunk_size)]
212+
chunk = buffer[start : (start + chunk_size)] # noqa: E203
226213
if not chunk:
227214
break
228215
start += chunk_size
@@ -233,9 +220,7 @@ async def write(self, buff: bytes, chunk_size: Optional[int] = None) -> int:
233220
self.closed_check()
234221

235222
if self.imode == INV_READ:
236-
raise PGLargeObjectUnsupportedOp(
237-
"Large Object class instance is set for read only"
238-
)
223+
raise PGLargeObjectUnsupportedOp("Large Object class instance is set for read only")
239224

240225
if not buff:
241226
return 0
@@ -260,13 +245,13 @@ async def truncate(self) -> None:
260245
if self.imode == INV_READ:
261246
raise PGLargeObjectUnsupportedOp("not writeable")
262247

263-
open_sql = select(func.lo_open(self.oid, self.imode))
264-
fd = await self.session.scalar(open_sql)
265-
266-
trunc_sql = select(func.lo_truncate(fd, self.pos))
267-
await self.session.execute(trunc_sql)
268-
269-
close_sql = select(func.lo_close(fd))
248+
open_cte = select(func.lo_open(self.oid, self.imode).label("descriptor")).cte("fd")
249+
trunc_cte = (
250+
select(func.lo_truncate(open_cte.c.descriptor, self.pos).label("res"), open_cte.c.descriptor)
251+
.select_from(open_cte)
252+
.cte("trnc")
253+
)
254+
close_sql = select(func.lo_close(trunc_cte.c.descriptor)).select_from(trunc_cte)
270255
await self.session.execute(close_sql)
271256

272257
self.length = self.pos
@@ -288,9 +273,7 @@ async def open(self) -> None:
288273
self.length = length
289274
else:
290275
if not self.imode & INV_WRITE:
291-
raise PGLargeObjectNotFound(
292-
f"Large object with oid {self.oid} does not exist."
293-
)
276+
raise PGLargeObjectNotFound(f"Large object with oid {self.oid} does not exist.")
294277
else:
295278
self.oid = await self.create_large_object(self.session)
296279
self.length = self.pos = 0

Diff for: tests/integration/conftest.py

-2
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,6 @@
2323
from sqlalchemy.ext.asyncio import AsyncConnection, AsyncSession, create_async_engine
2424
from sqlalchemy.orm import sessionmaker
2525

26-
# from .utils.db import create_database, drop_database
27-
2826

2927
class Settings:
3028
database_url = "postgresql+asyncpg://postgres:secret@localhost:5432/postgres"

0 commit comments

Comments
 (0)