Skip to content

Commit

Permalink
Merge pull request cloudwu#221 from cloudwu/dev
Browse files Browse the repository at this point in the history
release 0.9.3
  • Loading branch information
cloudwu committed Jan 5, 2015
2 parents f15cd1d + f65ecdf commit 4a2b253
Show file tree
Hide file tree
Showing 10 changed files with 151 additions and 26 deletions.
7 changes: 7 additions & 0 deletions HISTORY.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,10 @@
v0.9.3 (2014-1-5)
-----------
* Add : mongo createIndex
* Update : sproto
* bugfix : sharedata check dirty flag when len/pairs metamethod
* bugfix : multicast

v0.9.2 (2014-12-8)
-----------
* Simplify the message queue
Expand Down
13 changes: 8 additions & 5 deletions lualib-src/lua-multicast.c
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ mc_unpacklocal(lua_State *L) {
if (sz != sizeof(*pack)) {
return luaL_error(L, "Invalid multicast package size %d", sz);
}
lua_settop(L, 1);
lua_pushlightuserdata(L, *pack);
lua_pushlightuserdata(L, (*pack)->data);
lua_pushunsigned(L, (*pack)->size);
return 3;
Expand All @@ -92,6 +92,8 @@ mc_unpacklocal(lua_State *L) {
/*
lightuserdata struct mc_package **
integer reference
return mc_package *
*/
static int
mc_bindrefer(lua_State *L) {
Expand All @@ -102,16 +104,17 @@ mc_bindrefer(lua_State *L) {
}
(*pack)->reference = ref;

return 0;
lua_pushlightuserdata(L, *pack);

return 1;
}

/*
lightuserdata struct mc_package **
lightuserdata struct mc_package *
*/
static int
mc_closelocal(lua_State *L) {
struct mc_package **ptr = lua_touserdata(L,1);
struct mc_package *pack = *ptr;
struct mc_package *pack = lua_touserdata(L,1);

int ref = __sync_sub_and_fetch(&pack->reference, 1);
if (ref <= 0) {
Expand Down
28 changes: 21 additions & 7 deletions lualib/mongo.lua
Original file line number Diff line number Diff line change
Expand Up @@ -284,25 +284,39 @@ end

-- collection:createIndex({username = 1}, {unique = true})
function mongo_collection:createIndex(keys, option)
local name
for k, v in pairs(keys) do
assert(v == 1)
name = (name == nil) and k or (name .. "_" .. k)
local name = option.name
option.name = nil

if not name then
for k, v in pairs(keys) do
name = (name == nil) and k or (name .. "_" .. k)
name = name .. "_" .. v
end
end


local doc = {};
doc.name = name
doc.key = keys
for k, v in pairs(option) do
if v then
doc[k] = true
end
doc[k] = v
end
return self.database:runCommand("createIndexes", self.name, "indexes", {doc})
end

mongo_collection.ensureIndex = mongo_collection.createIndex;


function mongo_collection:drop()
return self.database:runCommand("drop", self.name)
end

-- collection:dropIndex("age_1")
-- collection:dropIndex("*")
function mongo_collection:dropIndex(indexName)
return self.database:runCommand("dropIndexes", self.name, "index", indexName)
end

-- collection:findAndModify({query = {name = "userid"}, update = {["$inc"] = {nextid = 1}}, })
-- keys, value type
-- query, table
Expand Down
14 changes: 10 additions & 4 deletions lualib/sharedata/corelib.lua
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ local function genkey(self)
return key
end

function meta:__index(key)
local function getcobj(self)
local obj = self.__obj
if isdirty(obj) then
local newobj, newtbl = needupdate(self.__gcobj)
Expand All @@ -67,6 +67,11 @@ function meta:__index(key)
obj = self.__obj
end
end
return obj
end

function meta:__index(key)
local obj = getcobj(self)
local v = index(obj, key)
if type(v) == "userdata" then
local r = setmetatable({
Expand All @@ -83,11 +88,11 @@ function meta:__index(key)
end

function meta:__len()
return len(self.__obj)
return len(getcobj(self))
end

local function conf_ipairs(self, index)
local obj = self.__obj
local obj = getcobj(self)
index = index + 1
local value = rawget(self, index)
if value then
Expand All @@ -109,7 +114,8 @@ function meta:__pairs()
end

function conf.next(obj, key)
local nextkey = core.nextkey(obj.__obj, key)
local cobj = getcobj(obj)
local nextkey = core.nextkey(cobj, key)
if nextkey then
return nextkey, obj[nextkey]
end
Expand Down
2 changes: 1 addition & 1 deletion lualib/socket.lua
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ socket_message[1] = function(id, size, data)
end
else
if s.buffer_limit and sz > s.buffer_limit then
skynet.error(string.format("socket buffer overlow: fd=%d size=%d", id , sz))
skynet.error(string.format("socket buffer overflow: fd=%d size=%d", id , sz))
driver.clear(s.buffer,buffer_pool)
driver.close(id)
return
Expand Down
5 changes: 4 additions & 1 deletion lualib/sprotoparser.lua
Original file line number Diff line number Diff line change
Expand Up @@ -313,9 +313,12 @@ local function packgroup(t,p)
local tt, tp
local alltypes = {}
for name in pairs(t) do
alltypes[name] = #alltypes
table.insert(alltypes, name)
end
table.sort(alltypes) -- make result stable
for idx, name in ipairs(alltypes) do
alltypes[name] = idx - 1
end
tt = {}
for _,name in ipairs(alltypes) do
table.insert(tt, packtype(name, t[name], alltypes))
Expand Down
6 changes: 4 additions & 2 deletions service/multicastd.lua
Original file line number Diff line number Diff line change
Expand Up @@ -68,18 +68,20 @@ end
local function publish(c , source, pack, size)
local group = channel[c]
if group == nil then
-- dead channel, delete the pack
mc.bind(pack, 1)
-- dead channel, delete the pack. mc.bind returns the pointer in pack
local pack = mc.bind(pack, 1)
mc.close(pack)
return
end
mc.bind(pack, channel_n[c])
local msg = skynet.tostring(pack, size)
for k in pairs(group) do
-- the msg is a pointer to the real message, publish pointer in local is ok.
skynet.redirect(k, source, "multicast", c , msg)
end
local remote = channel_remote[c]
if remote then
-- remote publish should unpack the pack, because we should not publish the pointer out.
local _, msg, sz = mc.unpack(pack, size)
local msg = skynet.tostring(msg,sz)
for node in pairs(remote) do
Expand Down
2 changes: 0 additions & 2 deletions skynet-src/skynet_mq.c
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,6 @@ static struct global_queue *Q = NULL;
#define LOCK(q) while (__sync_lock_test_and_set(&(q)->lock,1)) {}
#define UNLOCK(q) __sync_lock_release(&(q)->lock);

#define GP(p) ((p) % MAX_GLOBAL_MQ)

void
skynet_globalmq_push(struct message_queue * queue) {
struct global_queue *q= Q;
Expand Down
90 changes: 90 additions & 0 deletions test/testmongodb.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
local skynet = require "skynet"
local mongo = require "mongo"
local bson = require "bson"

local host, db_name = ...

function test_insert_without_index()
local db = mongo.client({host = host})

db[db_name].testdb:dropIndex("*")
db[db_name].testdb:drop()

local ret = db[db_name].testdb:safe_insert({test_key = 1});
assert(ret and ret.n == 1)

local ret = db[db_name].testdb:safe_insert({test_key = 1});
assert(ret and ret.n == 1)
end

function test_insert_with_index()
local db = mongo.client({host = host})

db[db_name].testdb:dropIndex("*")
db[db_name].testdb:drop()

db[db_name].testdb:ensureIndex({test_key = 1}, {unique = true, name = "test_key_index"})

local ret = db[db_name].testdb:safe_insert({test_key = 1})
assert(ret and ret.n == 1)

local ret = db[db_name].testdb:safe_insert({test_key = 1})
assert(ret and ret.n == 0)
end

function test_find_and_remove()
local db = mongo.client({host = host})

db[db_name].testdb:dropIndex("*")
db[db_name].testdb:drop()

db[db_name].testdb:ensureIndex({test_key = 1}, {unique = true, name = "test_key_index"})

local ret = db[db_name].testdb:safe_insert({test_key = 1})
assert(ret and ret.n == 1)

local ret = db[db_name].testdb:findOne({test_key = 1})
assert(ret and ret.test_key == 1)

db[db_name].testdb:delete({test_key = 1})

local ret = db[db_name].testdb:findOne({test_key = 1})
assert(ret == nil)
end


function test_expire_index()
local db = mongo.client({host = host})

db[db_name].testdb:dropIndex("*")
db[db_name].testdb:drop()

db[db_name].testdb:ensureIndex({test_key = 1}, {unique = true, name = "test_key_index", expireAfterSeconds = 1, })
db[db_name].testdb:ensureIndex({test_date = 1}, {expireAfterSeconds = 1, })

local ret = db[db_name].testdb:safe_insert({test_key = 1, test_date = bson.date(os.time())})
assert(ret and ret.n == 1)

local ret = db[db_name].testdb:findOne({test_key = 1})
assert(ret and ret.test_key == 1)

for i = 1, 1000 do
skynet.sleep(11);

local ret = db[db_name].testdb:findOne({test_key = 1})
if ret == nil then
return
end
end

assert(false, "test expire index failed");
end

skynet.start(function()
test_insert_without_index()
test_insert_with_index()
test_find_and_remove()
test_expire_index()

print("mongodb test finish.");
end)
10 changes: 6 additions & 4 deletions test/testmysql.lua
Original file line number Diff line number Diff line change
Expand Up @@ -63,14 +63,14 @@ local function test3( db)
local res = db:query("select * from cats order by id asc")
print ( "test3 loop times=" ,i,"\n","query result=",dump( res ) )
res = db:query("select * from cats order by id asc")
print ( "test3 loop times=" ,i,"\n","query result=",dump( res ) )
print ( "test3 loop times=" ,i,"\n","query result=",dump( res ) )
skynet.sleep(1000)
i=i+1
end
end
skynet.start(function()

local db=mysql.connect{
local db=mysql.connect{
host="127.0.0.1",
port=3306,
database="skynet",
Expand All @@ -83,8 +83,10 @@ skynet.start(function()
end
print("testmysql success to connect to mysql server")

db:query("set names utf8")

local res = db:query("drop table if exists cats")
res = db:query("create table cats "
res = db:query("create table cats "
.."(id serial primary key, ".. "name varchar(5))")
print( dump( res ) )

Expand Down Expand Up @@ -112,7 +114,7 @@ skynet.start(function()
while true do
local res = db:query("select * from cats order by id asc")
print ( "test1 loop times=" ,i,"\n","query result=",dump( res ) )

res = db:query("select * from cats order by id asc")
print ( "test1 loop times=" ,i,"\n","query result=",dump( res ) )

Expand Down

0 comments on commit 4a2b253

Please sign in to comment.