Skip to content

Commit bf942a9

Browse files
committed
Fully removed all async dependencies
1 parent b64c771 commit bf942a9

File tree

15 files changed

+14
-294
lines changed

15 files changed

+14
-294
lines changed

.gitmodules

-3
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,3 @@
1-
[submodule "async"]
2-
path = gitdb/ext/async
3-
url = https://github.com/gitpython-developers/async.git
41
[submodule "smmap"]
52
path = gitdb/ext/smmap
63
url = https://github.com/Byron/smmap.git

doc/source/changes.rst

+7
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,13 @@
22
Changelog
33
#########
44

5+
*****
6+
0.6.0
7+
*****
8+
9+
* Added support got python 3.X
10+
* Removed all `async` dependencies and all `*_async` versions of methods with it.
11+
512
*****
613
0.5.4
714
*****

gitdb/__init__.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
#{ Initialization
1111
def _init_externals():
1212
"""Initialize external projects by putting them into the path"""
13-
for module in ('async', 'smmap'):
13+
for module in ('smmap',):
1414
sys.path.append(os.path.join(os.path.dirname(__file__), 'ext', module))
1515

1616
try:
@@ -27,7 +27,7 @@ def _init_externals():
2727
__author__ = "Sebastian Thiel"
2828
__contact__ = "[email protected]"
2929
__homepage__ = "https://github.com/gitpython-developers/gitdb"
30-
version_info = (0, 5, 5)
30+
version_info = (0, 6, 0)
3131
__version__ = '.'.join(str(i) for i in version_info)
3232

3333

gitdb/db/base.py

-55
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
55
"""Contains implementations of database retrieveing objects"""
66
from gitdb.util import (
7-
pool,
87
join,
98
LazyMixin,
109
hex_to_bin
@@ -15,10 +14,6 @@
1514
AmbiguousObjectName
1615
)
1716

18-
from async import (
19-
ChannelThreadTask
20-
)
21-
2217
from itertools import chain
2318
from functools import reduce
2419

@@ -41,47 +36,18 @@ def has_object(self, sha):
4136
binary sha is contained in the database"""
4237
raise NotImplementedError("To be implemented in subclass")
4338

44-
def has_object_async(self, reader):
45-
"""Return a reader yielding information about the membership of objects
46-
as identified by shas
47-
:param reader: Reader yielding 20 byte shas.
48-
:return: async.Reader yielding tuples of (sha, bool) pairs which indicate
49-
whether the given sha exists in the database or not"""
50-
task = ChannelThreadTask(reader, str(self.has_object_async), lambda sha: (sha, self.has_object(sha)))
51-
return pool.add_task(task)
52-
5339
def info(self, sha):
5440
""" :return: OInfo instance
5541
:param sha: bytes binary sha
5642
:raise BadObject:"""
5743
raise NotImplementedError("To be implemented in subclass")
5844

59-
def info_async(self, reader):
60-
"""Retrieve information of a multitude of objects asynchronously
61-
:param reader: Channel yielding the sha's of the objects of interest
62-
:return: async.Reader yielding OInfo|InvalidOInfo, in any order"""
63-
task = ChannelThreadTask(reader, str(self.info_async), self.info)
64-
return pool.add_task(task)
65-
6645
def stream(self, sha):
6746
""":return: OStream instance
6847
:param sha: 20 bytes binary sha
6948
:raise BadObject:"""
7049
raise NotImplementedError("To be implemented in subclass")
7150

72-
def stream_async(self, reader):
73-
"""Retrieve the OStream of multiple objects
74-
:param reader: see ``info``
75-
:param max_threads: see ``ObjectDBW.store``
76-
:return: async.Reader yielding OStream|InvalidOStream instances in any order
77-
78-
**Note:** depending on the system configuration, it might not be possible to
79-
read all OStreams at once. Instead, read them individually using reader.read(x)
80-
where x is small enough."""
81-
# base implementation just uses the stream method repeatedly
82-
task = ChannelThreadTask(reader, str(self.stream_async), self.stream)
83-
return pool.add_task(task)
84-
8551
def size(self):
8652
""":return: amount of objects in this database"""
8753
raise NotImplementedError()
@@ -129,27 +95,6 @@ def store(self, istream):
12995
:raise IOError: if data could not be written"""
13096
raise NotImplementedError("To be implemented in subclass")
13197

132-
def store_async(self, reader):
133-
"""
134-
Create multiple new objects in the database asynchronously. The method will
135-
return right away, returning an output channel which receives the results as
136-
they are computed.
137-
138-
:return: Channel yielding your IStream which served as input, in any order.
139-
The IStreams sha will be set to the sha it received during the process,
140-
or its error attribute will be set to the exception informing about the error.
141-
142-
:param reader: async.Reader yielding IStream instances.
143-
The same instances will be used in the output channel as were received
144-
in by the Reader.
145-
146-
**Note:** As some ODB implementations implement this operation atomic, they might
147-
abort the whole operation if one item could not be processed. Hence check how
148-
many items have actually been produced."""
149-
# base implementation uses store to perform the work
150-
task = ChannelThreadTask(reader, str(self.store_async), self.store)
151-
return pool.add_task(task)
152-
15398
#} END edit interface
15499

155100

gitdb/db/mem.py

+1-7
Original file line numberDiff line numberDiff line change
@@ -32,10 +32,7 @@ class MemoryDB(ObjectDBR, ObjectDBW):
3232
"""A memory database stores everything to memory, providing fast IO and object
3333
retrieval. It should be used to buffer results and obtain SHAs before writing
3434
it to the actual physical storage, as it allows to query whether object already
35-
exists in the target storage before introducing actual IO
36-
37-
**Note:** memory is currently not threadsafe, hence the async methods cannot be used
38-
for storing"""
35+
exists in the target storage before introducing actual IO"""
3936

4037
def __init__(self):
4138
super(MemoryDB, self).__init__()
@@ -62,9 +59,6 @@ def store(self, istream):
6259

6360
return istream
6461

65-
def store_async(self, reader):
66-
raise UnsupportedOperation("MemoryDBs cannot currently be used for async write access")
67-
6862
def has_object(self, sha):
6963
return sha in self._cache
7064

gitdb/db/pack.py

-4
Original file line numberDiff line numberDiff line change
@@ -125,10 +125,6 @@ def store(self, istream):
125125
inefficient"""
126126
raise UnsupportedOperation()
127127

128-
def store_async(self, reader):
129-
# TODO: add ObjectDBRW before implementing this
130-
raise NotImplementedError()
131-
132128
#} END object db write
133129

134130

gitdb/ext/async

-1
This file was deleted.

gitdb/test/__init__.py

-12
Original file line numberDiff line numberDiff line change
@@ -2,15 +2,3 @@
22
#
33
# This module is part of GitDB and is released under
44
# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
5-
6-
import gitdb.util
7-
8-
#{ Initialization
9-
def _init_pool():
10-
"""Assure the pool is actually threaded"""
11-
size = 2
12-
print("Setting ThreadPool to %i" % size)
13-
gitdb.util.pool.set_size(size)
14-
15-
16-
#} END initialization

gitdb/test/db/lib.py

-83
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,6 @@
1010
TestBase
1111
)
1212

13-
14-
1513
from gitdb.stream import (
1614
Sha1Writer,
1715
ZippedStoreShaWriter
@@ -28,8 +26,6 @@
2826
from gitdb.utils.encoding import force_bytes
2927
from gitdb.utils.compat import xrange
3028

31-
from async import IteratorReader
32-
3329
from io import BytesIO
3430

3531
from struct import pack
@@ -132,82 +128,3 @@ def _assert_object_writing(self, db):
132128
# END for each data set
133129
# END for each dry_run mode
134130

135-
def _assert_object_writing_async(self, db):
136-
"""Test generic object writing using asynchronous access"""
137-
ni = 5000
138-
def istream_generator(offset=0, ni=ni):
139-
for data_src in xrange(ni):
140-
data = bytes(data_src + offset)
141-
yield IStream(str_blob_type, len(data), BytesIO(data))
142-
# END for each item
143-
# END generator utility
144-
145-
# for now, we are very trusty here as we expect it to work if it worked
146-
# in the single-stream case
147-
148-
# write objects
149-
reader = IteratorReader(istream_generator())
150-
istream_reader = db.store_async(reader)
151-
istreams = istream_reader.read() # read all
152-
assert istream_reader.task().error() is None
153-
assert len(istreams) == ni
154-
155-
for stream in istreams:
156-
assert stream.error is None
157-
assert len(stream.binsha) == 20
158-
assert isinstance(stream, IStream)
159-
# END assert each stream
160-
161-
# test has-object-async - we must have all previously added ones
162-
reader = IteratorReader( istream.binsha for istream in istreams )
163-
hasobject_reader = db.has_object_async(reader)
164-
count = 0
165-
for sha, has_object in hasobject_reader:
166-
assert has_object
167-
count += 1
168-
# END for each sha
169-
assert count == ni
170-
171-
# read the objects we have just written
172-
reader = IteratorReader( istream.binsha for istream in istreams )
173-
ostream_reader = db.stream_async(reader)
174-
175-
# read items individually to prevent hitting possible sys-limits
176-
count = 0
177-
for ostream in ostream_reader:
178-
assert isinstance(ostream, OStream)
179-
count += 1
180-
# END for each ostream
181-
assert ostream_reader.task().error() is None
182-
assert count == ni
183-
184-
# get info about our items
185-
reader = IteratorReader( istream.binsha for istream in istreams )
186-
info_reader = db.info_async(reader)
187-
188-
count = 0
189-
for oinfo in info_reader:
190-
assert isinstance(oinfo, OInfo)
191-
count += 1
192-
# END for each oinfo instance
193-
assert count == ni
194-
195-
196-
# combined read-write using a converter
197-
# add 2500 items, and obtain their output streams
198-
nni = 2500
199-
reader = IteratorReader(istream_generator(offset=ni, ni=nni))
200-
istream_to_sha = lambda istreams: [ istream.binsha for istream in istreams ]
201-
202-
istream_reader = db.store_async(reader)
203-
istream_reader.set_post_cb(istream_to_sha)
204-
205-
ostream_reader = db.stream_async(istream_reader)
206-
207-
count = 0
208-
# read it individually, otherwise we might run into the ulimit
209-
for ostream in ostream_reader:
210-
assert isinstance(ostream, OStream)
211-
count += 1
212-
# END for each ostream
213-
assert count == nni

gitdb/test/db/test_git.py

-1
Original file line numberDiff line numberDiff line change
@@ -48,4 +48,3 @@ def test_writing(self, path):
4848

4949
# its possible to write objects
5050
self._assert_object_writing(gdb)
51-
self._assert_object_writing_async(gdb)

gitdb/test/db/test_loose.py

-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@ def test_basics(self, path):
1818

1919
# write data
2020
self._assert_object_writing(ldb)
21-
self._assert_object_writing_async(ldb)
2221

2322
# verify sha iteration and size
2423
shas = list(ldb.sha_iter())

0 commit comments

Comments
 (0)