diff --git a/alembictests_origin.ini b/alembictests_origin.ini new file mode 100644 index 000000000..468c5d47b --- /dev/null +++ b/alembictests_origin.ini @@ -0,0 +1,154 @@ +### +# app configuration +# http://docs.pylonsproject.org/projects/pyramid/en/1.5-branch/narr/environment.html +### + +[alembic] +# path to migration scripts +script_location = alembic +sqlalchemy.url = postgresql+psycopg2://postgres@/lingvodoc_testing + + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# max length of characters to apply to the +# "slug" field +#truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; this defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path +# version_locations = %(here)s/bar %(here)s/bat alembic/versions + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +[app:main] +use = egg:lingvodoc +secret = 'secret string' + +pyramid.reload_templates = true +pyramid.debug_authorization = false +pyramid.debug_notfound = false +pyramid.debug_routematch = false +pyramid.default_locale_name = en +pyramid.includes = + pyramid_debugtoolbar + pyramid_tm + +sqlalchemy.url = postgresql+psycopg2://postgres@/lingvodoc_testing + +# By default, the toolbar only appears for clients from IP addresses +# '127.0.0.1' and '::1'. +# debugtoolbar.hosts = 127.0.0.1 ::1 + +### +# wsgi server configuration +### + +[server:main] +;use = egg:waitress#main +use = egg:gunicorn#main +workers = 3 +timeout = 3000 +proc_name = lingvodoc +bind = "0.0.0.0:6543" +host = 0.0.0.0 +port = 6543 +max_request_body_size = 2147483648 + +### +# logging configuration +# http://docs.pylonsproject.org/projects/pyramid/en/1.5-branch/narr/logging.html +### + +[loggers] +keys = root, lingvodoc, sqlalchemy + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = ERROR +handlers = console + +[logger_lingvodoc] +level = ERROR +handlers = +qualname = lingvodoc + +[logger_sqlalchemy] +level = ERROR +handlers = +qualname = sqlalchemy.engine +# "level = INFO" logs SQL queries. +# "level = DEBUG" logs SQL queries and results. +# "level = WARN" logs neither. (Recommended for production systems.) + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(asctime)s %(levelname)-5.5s [%(name)s][%(threadName)s] %(message)s + +[app:accounts] +use = egg:lingvodoc +# no emphasis or special symbols should be used. +administrator_login = admin +administrator_password = password + +[backend:storage] +# disk or openstack +type = disk +path = /tmp/ +prefix = http://localhost:6543/ +static_route = objects/ +#authurl = http://10.10.10.121:5000/v2.0 +#store = http://adelaide.intra.ispras.ru/horizon/project/containers +#user = admin +#key = tester +#auth_version = 2.0 +#tenant_name = admin + +[uwsgi] +socket = 0.0.0.0:6543 +protocol = http +master = true + +processes = 4 + +harakiri = 60 +harakiri-verbose = true +limit-post = 0 +post-buffering = 8192 + +listen = 256 + +max-requests = 1000 + +reload-on-as = 128 +reload-on-rss = 96 +no-orphans = true + +log-slow = true + +virtualenv = /Users/al/environments/lingvodocenv/ + +callable = lingvodoc diff --git a/lingvodoc/__init__.py b/lingvodoc/__init__.py index d768ab861..9ab3f27bb 100755 --- a/lingvodoc/__init__.py +++ b/lingvodoc/__init__.py @@ -37,15 +37,15 @@ def configure_routes(config): # web-view #GET # login page - config.add_route(name='login', pattern='/login') + config.add_route(name='login', pattern='/login') # tested # API #POST # this is the same operation as login - but params are sent via json - config.add_route(name='signin', pattern='/signin') # 100% ready + config.add_route(name='signin', pattern='/signin') config.add_route(name='cheatlogin', pattern='/cheatlogin') # web-view #POST - config.add_route(name='logout', pattern='/logout') + config.add_route(name='logout', pattern='/logout') # tested # web-view #GET config.add_route(name='dashboard', pattern='/dashboard') @@ -62,24 +62,24 @@ def configure_routes(config): # API #GET && PUT && DELETE # Gets/puts info about language config.add_route(name='language', pattern='/language/{client_id}/{object_id}', - factory='lingvodoc.models.LanguageAcl') # 100% ready + factory='lingvodoc.models.LanguageAcl') # tested # API #POST # Convert blob - config.add_route(name='convert', pattern='/convert/blob') # 100% ready + config.add_route(name='convert', pattern='/convert/blob') # TODO: test # API #POST # Convert markup - config.add_route(name='convert_markup', pattern='/convert/markup') # 100% ready + config.add_route(name='convert_markup', pattern='/convert/markup') # TODO: test # API #POST # Create language config.add_route(name='create_language', pattern='/language', - factory='lingvodoc.models.LanguageAcl') # 100% ready + factory='lingvodoc.models.LanguageAcl') # tested # API #GET # view languages list - config.add_route(name='get_languages', pattern='/languages') + config.add_route(name='get_languages', pattern='/languages') # tested # API #GET # Dictionaries list. The following filters should be supported: @@ -89,18 +89,19 @@ def configure_routes(config): # c) Organization participated # d) By language group (all the languages that point to the given one). Need to build a lang tree here. # e) Maps location (coordinates) +- radius in kilometers - config.add_route('dictionaries', 'dictionaries') # 100% ready + config.add_route('dictionaries', 'dictionaries') # TODO: test - - config.add_route(name='published_dictionaries', pattern='/published_dictionaries') + config.add_route(name='published_dictionaries', pattern='/published_dictionaries') # TODO: test # API #GET # Perspective list # 1. Filter by: # a) template (param is_template=true/false) # b) state (param state=) - config.add_route('all_perspectives', '/perspectives') - config.add_route('users', '/users') + config.add_route('all_perspectives', '/perspectives') # TODO: test + + # API #GET + config.add_route('users', '/users') # tested # web-view config.add_route(name='new_dictionary', pattern='/create_dictionary') @@ -111,56 +112,52 @@ def configure_routes(config): # API #POST # Creating dictionary config.add_route(name='create_dictionary', pattern='/dictionary', - factory='lingvodoc.models.DictionaryAcl') # 100% ready + factory='lingvodoc.models.DictionaryAcl') # tested # API #GET && PUT && DELETE # Gets/puts info about dictionary (name/additional authors/etc) config.add_route(name='dictionary', pattern='/dictionary/{client_id}/{object_id}', - factory='lingvodoc.models.DictionaryAcl') # 100% ready + factory='lingvodoc.models.DictionaryAcl') # tested config.add_route(name='dictionary_copy', pattern='/dictionary/{client_id}/{object_id}/copy', - factory='lingvodoc.models.DictionaryAcl') - + factory='lingvodoc.models.DictionaryAcl') # not done yet config.add_route(name='dictionary_info', pattern='/dictionary/{client_id}/{object_id}/info', - factory='lingvodoc.models.DictionaryAcl') - + factory='lingvodoc.models.DictionaryAcl') # TODO: test # API #DELETE config.add_route(name='dictionary_delete', pattern='/dictionary/{client_id}/{object_id}/delete', - factory='lingvodoc.models.AdminAcl') - + factory='lingvodoc.models.AdminAcl') # TODO: ?test? impossible with current test paradigm # web-view config.add_route(name='organizations', pattern='/organizations') - # API #GET - config.add_route(name='organization_list', pattern='/organization_list') + config.add_route(name='organization_list', pattern='/organization_list') # TODO: test # API #POST # Creating organization config.add_route(name='create_organization', pattern='/organization', - factory='lingvodoc.models.OrganizationAcl') # 100% ready + factory='lingvodoc.models.OrganizationAcl') # TODO: test # API #GET && PUT && DELETE # Gets/puts info about organization config.add_route(name='organization', pattern='/organization/{organization_id}', - factory='lingvodoc.models.OrganizationAcl') + factory='lingvodoc.models.OrganizationAcl') # TODO: test # API #GET && POST && DELETE # Gets, creates and deletes roles related to dictionary (for now: who can create and modify perspectives) # Request format: {[user id: , role_name: ]}. Get request is empty and returns list of roles. config.add_route(name='dictionary_roles', pattern='/dictionary/{client_id}/{object_id}/roles', - factory='lingvodoc.models.DictionaryRolesAcl') # 100% ready + factory='lingvodoc.models.DictionaryRolesAcl') # tested (except delete?) # API #GET && PUT # Change visibility state for dictionary. States are: 'frozen', 'WiP', 'published', 'merging' config.add_route(name='dictionary_status', pattern='/dictionary/{client_id}/{object_id}/state', - factory='lingvodoc.models.DictionaryAcl') # 100% ready + factory='lingvodoc.models.DictionaryAcl') # tested # API #GET && PUT && DELETE # Gets/puts info about perspective. @@ -168,20 +165,20 @@ def configure_routes(config): config.add_route(name='perspective', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}/' 'perspective/{perspective_client_id}/{perspective_id}', - factory='lingvodoc.models.PerspectiveAcl') + factory='lingvodoc.models.PerspectiveAcl') # tested config.add_route(name='perspective_outside', pattern='perspective/{perspective_client_id}/{perspective_id}', - factory='lingvodoc.models.PerspectiveAcl') + factory='lingvodoc.models.PerspectiveAcl') # tested # API #POST # creates hash in metadata on objects config.add_route(name='perspective_hash', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}/' 'perspective/{perspective_client_id}/{perspective_id}/hash', - factory='lingvodoc.models.PerspectiveAcl') + factory='lingvodoc.models.PerspectiveAcl') # TODO: ?test? only was used one time config.add_route(name='dangerous_perspectives_hash', pattern='/perspectives/hash', - factory='lingvodoc.models.AdminAcl') + factory='lingvodoc.models.AdminAcl') # TODO: ?test? same as above # API #GET && PUT && DELETE # {:{"type"", "content":},} # for geo: {"location":{"type":"location", "content":{"lat":, "lng":}}} @@ -196,34 +193,33 @@ def configure_routes(config): config.add_route(name='perspective_meta', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}/' 'perspective/{perspective_client_id}/{perspective_id}/meta', - factory='lingvodoc.models.PerspectiveAcl') + factory='lingvodoc.models.PerspectiveAcl') # tested config.add_route(name='perspective_tree', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}/' 'perspective/{perspective_client_id}/{perspective_id}/tree', - factory='lingvodoc.models.PerspectiveAcl') + factory='lingvodoc.models.PerspectiveAcl') # tested (?) config.add_route(name='perspective_outside_tree', pattern='perspective/{perspective_client_id}/{perspective_id}/tree', - factory='lingvodoc.models.PerspectiveAcl') # 100% ready - + factory='lingvodoc.models.PerspectiveAcl') # tested (?) config.add_route(name='perspective_info', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}/' 'perspective/{perspective_client_id}/{perspective_id}/info', - factory='lingvodoc.models.PerspectiveAcl') + factory='lingvodoc.models.PerspectiveAcl') # TODO: test # API #POST # Creating perspective config.add_route(name='create_perspective', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}/' 'perspective', - factory='lingvodoc.models.PerspectiveCreateAcl') # 100% ready + factory='lingvodoc.models.PerspectiveCreateAcl') # tested # API #GET # list perspectives config.add_route(name='perspectives', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}/' - 'perspectives') + 'perspectives') # TODO: test # API #GET && POST && DELETE # Gets, creates and deletes roles related to dictionary (for now: who can create entities, view entities, create @@ -233,14 +229,14 @@ def configure_routes(config): config.add_route(name='perspective_roles', pattern='/dictionary/{client_id}/{object_id}/' 'perspective/{perspective_client_id}/{perspective_id}/roles', - factory='lingvodoc.models.PerspectiveRolesAcl') # 100% ready + factory='lingvodoc.models.PerspectiveRolesAcl') # TODO: test # API #GET && PUT # Get or change visibility state for perspective. States are: 'frozen', 'WiP', 'published' config.add_route(name='perspective_status', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}' '/perspective/{perspective_client_id}/{perspective_id}/state', - factory='lingvodoc.models.PerspectiveAcl') # 100% ready + factory='lingvodoc.models.PerspectiveAcl') # TODO: test # API #GET && POST && DELETE # Configuring columns in perspective table. @@ -266,19 +262,19 @@ def configure_routes(config): config.add_route(name='perspective_fields', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}' '/perspective/{perspective_client_id}/{perspective_id}/fields', - factory='lingvodoc.models.PerspectiveAcl') # 100% ready + factory='lingvodoc.models.PerspectiveAcl') # tested (except delete, because it not used anywhere(?)) # API #POST # should be done with standard form enctype="multipart/form-data" config.add_route(name="upload_user_blob", - pattern="/blob") + pattern="/blob") # TODO: test # seems to be redundant # not anymore # API #GET # no params, returns file config.add_route(name="get_user_blob", - pattern="/blobs/{client_id}/{object_id}") + pattern="/blobs/{client_id}/{object_id}") # TODO: test # API #GET # no params, lists only own blobs config.add_route(name="list_user_blobs", @@ -289,7 +285,7 @@ def configure_routes(config): # [{'entity_type': '', 'parent_object_id': , 'parent_client_id': , # 'content': <'content'>, 'locale_id': }] config.add_route(name='get_level_one_entity', pattern='/leveloneentity/{client_id}/{object_id}', - factory='lingvodoc.models.PerspectiveEntityOneAcl') # ready 100% + factory='lingvodoc.models.PerspectiveEntityOneAcl') config.add_route(name='get_level_one_entity_indict', pattern='/dictionary/' '{dictionary_client_id}/{dictionary_object_id}' '/perspective/' @@ -298,10 +294,10 @@ def configure_routes(config): '{lexical_entry_client_id}/{lexical_entry_object_id}/' 'leveloneentity/' '{client_id}/{object_id}', - factory='lingvodoc.models.PerspectiveEntityOneAcl') # ready 100% + factory='lingvodoc.models.PerspectiveEntityOneAcl') # tested (no del) config.add_route(name='get_level_two_entity', pattern='/leveltwoentity/{client_id}/{object_id}', - factory='lingvodoc.models.PerspectiveEntityTwoAcl') # ready, not tested + factory='lingvodoc.models.PerspectiveEntityTwoAcl') # TODO: test config.add_route(name='get_level_two_entity_indict', pattern='/dictionary/' '{dictionary_client_id}/{dictionary_object_id}' '/perspective/' @@ -312,38 +308,40 @@ def configure_routes(config): '{leveloneentity_client_id}/{leveloneentity_object_id}/' 'leveltwoentity/' '{client_id}/{object_id}', - factory='lingvodoc.models.PerspectiveEntityTwoAcl') # ready, not tested + factory='lingvodoc.models.PerspectiveEntityTwoAcl') # tested (no del) # API #GET && DELETE # {entity_type: , content: , connections: [{object_id: , client_id: } - config.add_route(name='get_group_entity', pattern='/group_entity/{client_id}/{object_id}') # ready, not tested + config.add_route(name='get_group_entity', pattern='/group_entity/{client_id}/{object_id}') # in testing TODO: fix + # tags are different there and in connected words # API #GET # GET parameter: entity_type = (e.g: "etymology") - config.add_route(name='get_connected_words', pattern='/lexical_entry/{client_id}/{object_id}/connected') + config.add_route(name='get_connected_words', pattern='/lexical_entry/{client_id}/{object_id}/connected') # TODO: + # same as above config.add_route(name='get_connected_words_indict', pattern='/dictionary/' '{dictionary_client_id}/{dictionary_object_id}' '/perspective/' '{perspective_client_id}/{perspective_id}/' 'lexical_entry/' '{client_id}/{object_id}/' - 'connected') + 'connected') # TODO: test # API #POST (TODO: change to PATCH method later) # {entity_type: , content: , connections: [{object_id: , client_id: } - config.add_route(name='add_group_entity', pattern='/group_entity') # ready, not tested + config.add_route(name='add_group_entity', pattern='/group_entity') # tested config.add_route(name='add_group_indict', pattern='/dictionary/' '{dictionary_client_id}/{dictionary_object_id}' '/perspective/' '{perspective_client_id}/{perspective_id}/' 'lexical_entry/' - 'connect') # ready, not tested + 'connect') # TODO: test # API #GET # like # perspective_client_id # perspective_object_id - config.add_route(name='basic_search', pattern='/basic_search') + config.add_route(name='basic_search', pattern='/basic_search') # TODO: test # API #POST # {"searchstrings":[{"searchstring":, "entity_type":, "search_by_or":true/false}, @@ -354,15 +352,15 @@ def configure_routes(config): # "adopted_type":, # "count":True/False, # "with_etimology":True/False} - config.add_route(name='advanced_search', pattern='/advanced_search') + config.add_route(name='advanced_search', pattern='/advanced_search') # TODO: test # API #GET # like - config.add_route(name='entity_metadata_search', pattern='/meta_search') + config.add_route(name='entity_metadata_search', pattern='/meta_search') # TODO: test # API #GET # like - config.add_route(name='basic_search_old', pattern='/basic_search_old') + config.add_route(name='basic_search_old', pattern='/basic_search_old') # TODO: remove # API #POST @@ -371,12 +369,12 @@ def configure_routes(config): config.add_route(name='create_lexical_entry', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}' '/perspective/{perspective_client_id}/{perspective_id}/' 'lexical_entry', - factory='lingvodoc.models.LexicalEntriesEntitiesAcl') # ready, tested + factory='lingvodoc.models.LexicalEntriesEntitiesAcl') # tested config.add_route(name='create_lexical_entry_bulk', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}' '/perspective/{perspective_client_id}/{perspective_id}/' 'lexical_entries', - factory='lingvodoc.models.LexicalEntriesEntitiesAcl') # ready + factory='lingvodoc.models.LexicalEntriesEntitiesAcl') # tested # API #POST # {'entity_type': , 'content': , 'locale_id': , 'metadata': } @@ -385,11 +383,11 @@ def configure_routes(config): '/perspective/{perspective_client_id}/{perspective_id}/' 'lexical_entry/{lexical_entry_client_id}/' '{lexical_entry_object_id}/leveloneentity', - factory='lingvodoc.models.LexicalEntriesEntitiesAcl') # ready, tested + factory='lingvodoc.models.LexicalEntriesEntitiesAcl') # tested config.add_route(name='create_entities_bulk', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}' '/perspective/{perspective_client_id}/{perspective_id}/entities', - factory='lingvodoc.models.LexicalEntriesEntitiesAcl') + factory='lingvodoc.models.LexicalEntriesEntitiesAcl') # TODO: test # API #POST # {'entity_type': , 'content': , 'locale_id': , 'metadata': } @@ -399,47 +397,49 @@ def configure_routes(config): 'lexical_entry/{lexical_entry_client_id}/' '{lexical_entry_object_id}/leveloneentity/{level_one_client_id}/' '{level_one_object_id}/leveltwoentity', - factory='lingvodoc.models.LexicalEntriesEntitiesAcl') # ready, not tested + factory='lingvodoc.models.LexicalEntriesEntitiesAcl') # tested # API #GET # params: start_from=M, count=N, sort_by= config.add_route(name='lexical_entries_all', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}' '/perspective/{perspective_client_id}/{perspective_id}/all', - factory='lingvodoc.models.PerspectiveLexicalViewAcl') # filter not ready + factory='lingvodoc.models.PerspectiveLexicalViewAcl') # TODO: test config.add_route(name='lexical_entries_all_count', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}' '/perspective/{perspective_client_id}/{perspective_id}/all_count', - factory='lingvodoc.models.PerspectiveLexicalViewAcl') + factory='lingvodoc.models.PerspectiveLexicalViewAcl') # tested config.add_route(name='lexical_entries_published', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}' '/perspective/{perspective_client_id}/{perspective_id}/published', - factory='lingvodoc.models.PerspectivePublishAcl') # filter not ready + factory='lingvodoc.models.PerspectivePublishAcl') # TODO: test config.add_route(name='lexical_entries_published_count', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}' '/perspective/{perspective_client_id}/{perspective_id}/published_count', - factory='lingvodoc.models.PerspectivePublishAcl') # filter not ready + factory='lingvodoc.models.PerspectivePublishAcl') # made only return list of ids, because another route fol full info exist # API #GET, DELETE # all children config.add_route(name='lexical_entry', pattern='/lexical_entry/{client_id}/{object_id}', - factory='lingvodoc.models.LexicalViewAcl') # ready, not tested + factory='lingvodoc.models.LexicalViewAcl') # TODO: test config.add_route(name='lexical_entry_in_perspective', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}' '/perspective/{perspective_client_id}/{perspective_id}/' 'lexical_entry/{client_id}/{object_id}', - factory='lingvodoc.models.LexicalViewAcl') + factory='lingvodoc.models.LexicalViewAcl') # TODO: test # API #PATCH # Publishers view: this can approve word versions. # [{"type": , "client_id": , "object_id": , "enabled": }, ] config.add_route(name='approve_entity', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}' '/perspective/{perspective_client_id}/{perspective_id}/approve', - factory='lingvodoc.models.PerspectivePublishAcl') # ready, not tested + factory='lingvodoc.models.PerspectivePublishAcl') # TODO: test + # todo: DANGEROUS! Anyone can approve, if the have their own dictionary and know ids of entity, they want to approve + # todo: fix this config.add_route(name='approve_all', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}' '/perspective/{perspective_client_id}/{perspective_id}/approve_all', - factory='lingvodoc.models.PerspectivePublishAcl') # ready, not tested + factory='lingvodoc.models.PerspectivePublishAcl') # TODO: test config.add_route(name='approve_all_outer', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}' '/perspective/{perspective_client_id}/{perspective_id}/approve_all_outer', - factory='lingvodoc.models.PerspectivePublishAcl') # ready, not tested + factory='lingvodoc.models.PerspectivePublishAcl') # TODO: test # web-view config.add_route(name='edit_dictionary', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}' @@ -489,7 +489,7 @@ def configure_routes(config): # {"dictionary_client_id": , "dictionary_object_id": } # ] # Returns new dictionary client and object ids. - config.add_route(name='merge_dictionaries', pattern='/merge/dictionaries') # not tested + config.add_route(name='merge_dictionaries', pattern='/merge/dictionaries') # TODO: test # API #POST # { @@ -510,7 +510,7 @@ def configure_routes(config): # ] # } # Returns new perspective object and client ids. - config.add_route(name='merge_perspectives', pattern='/merge/perspectives') + config.add_route(name='merge_perspectives', pattern='/merge/perspectives') # TODO: test # API #POST # {'entity_type_primary':, @@ -520,9 +520,9 @@ def configure_routes(config): # 'client_id' : , # 'object_id' : # } - config.add_route(name='merge_suggestions', pattern='/merge/suggestions') + config.add_route(name='merge_suggestions', pattern='/merge/suggestions') # TODO: test - config.add_route(name='merge_suggestions_old', pattern='/merge/suggestionsold/' + config.add_route(name='merge_suggestions_old', pattern='/merge/suggestionsold/' # should be removed? '{dictionary_client_id_1}/{dictionary_object_id_1}/' '{perspective_client_id_1}/{perspective_object_id_1}/' '{dictionary_client_id_2}/{dictionary_object_id_2}/' @@ -533,7 +533,7 @@ def configure_routes(config): # API #PATCH # {'client_id':, 'object_id':, 'real_delete':true/false} id's of entry where moving to config.add_route(name='move_lexical_entry', pattern='/lexical_entry/{client_id}/{object_id}/move', - factory='lingvodoc.models.LexicalViewAcl') + factory='lingvodoc.models.LexicalViewAcl') # TODO: test # {real_delete':true/false, # 'move_list':[{'client_id': , @@ -541,7 +541,7 @@ def configure_routes(config): # 'lexical_entries': [{'client_id': , 'object_id': }, ]} # ,] # } - config.add_route(name='move_lexical_entry_bulk', pattern='/move/lexical_entries') + config.add_route(name='move_lexical_entry_bulk', pattern='/move/lexical_entries') # TODO: test # web-view config.add_route(name='merge_master', pattern='/dashboard/merge') @@ -549,16 +549,16 @@ def configure_routes(config): # API #GET # Response example: # [{"id": , "login": , "name": , "intl_name": , "userpic": }, ] - config.add_route(name='dictionary_authors', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}') # 0% ready + config.add_route(name='dictionary_authors', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}') # TODO: do it config.add_route(name='perspective_authors', pattern='/dictionary/{dictionary_client_id}/{dictionary_object_id}' - '/perspective/{perspective_client_id}/{perspective_id}') # 0% ready + '/perspective/{perspective_client_id}/{perspective_id}') # TODO: just do it # API #GET # params: # object_type = # client_id = # object_id = - config.add_route(name='get_object_info', pattern='/info') # 0% ready + config.add_route(name='get_object_info', pattern='/info') # TODO:IT # API #GET # This methods gets info about user by his client_id @@ -569,12 +569,11 @@ def configure_routes(config): # {"client_id": OR "user_id":, # "new_password": , "old_password":, # "name":, "birthday":, "email":, "about":} - config.add_route(name='get_user_info', pattern='/user') # ready, not tested + config.add_route(name='get_user_info', pattern='/user') # tested # API #GET # Returns translations for a list of words for current or default or fallback locale # ["translation_string", "translation_string2", ...] - config.add_route(name='get_translations', pattern='/translations') # ready, not tested - + config.add_route(name='get_translations', pattern='/translations') # TODO: completely redo translation part # web-view #GET config.add_route(name='blob_upload', pattern='/blob_upload') @@ -582,8 +581,8 @@ def configure_routes(config): # API #POST # params: # {"blob_client_id": , "blob_object_id": , "parent_client_id": , "parent_object_id": } - config.add_route(name='convert_dictionary', pattern='/convert') - config.add_route(name='convert_dictionary_check', pattern='/convert_check') + config.add_route(name='convert_dictionary', pattern='/convert') # TODO: test + config.add_route(name='convert_dictionary_check', pattern='/convert_check') # TODO: test def main(global_config, **settings): @@ -614,8 +613,6 @@ def main(global_config, **settings): configure_routes(config) config.add_route('testing', '/testing') # config.add_route('example', 'some/route/{object_id}/{client_id}/of/perspective', factory = 'lingvodoc.models.DictAcl') - - # config.add_route('home', '/') # config.add_route('login', 'login') # config.add_route('logout', 'logout') diff --git a/lingvodoc/models.py b/lingvodoc/models.py index 7dbaac236..2ea9c83b3 100755 --- a/lingvodoc/models.py +++ b/lingvodoc/models.py @@ -212,7 +212,7 @@ class CompositeIdMixin(object): """ #object_id = Column(BigInteger, primary_key=True) object_id = Column(SLBigInteger(), primary_key=True, autoincrement=True) - client_id = Column(BigInteger, primary_key=True) + client_id = Column(BigInteger, primary_key=True) # SLBigInteger() ? class CompositeKeysHelper(object): @@ -847,7 +847,6 @@ def acl_by_groups(object_id, client_id, subject): base_group = group.parent if group.subject_override: group_name = base_group.action + ":" + base_group.subject + ":" + str(group.subject_override) - group_name = base_group.action + ":" + base_group.subject + ":" + str(group.subject_override) else: group_name = base_group.action + ":" + base_group.subject \ + ":" + str(group.subject_client_id) + ":" + str(group.subject_object_id) diff --git a/lingvodoc/scripts/approve.py b/lingvodoc/scripts/approve.py index 2d0069bc7..8555bd6bc 100644 --- a/lingvodoc/scripts/approve.py +++ b/lingvodoc/scripts/approve.py @@ -64,7 +64,7 @@ def approve(session, server_url,dictionary_client_id, dictionary_object_id, clie return {'time': time() - start} -def approve_all_outer(login, password_hash,dictionary_client_id, dictionary_object_id, +def approve_all_outer(login, password_hash, dictionary_client_id, dictionary_object_id, perspective_client_id, perspective_object_id, server_url="http://localhost:6543/"): log = logging.getLogger(__name__) log.debug("Starting convert_one") diff --git a/lingvodoc/views.py b/lingvodoc/views.py index 9b8143f47..5ff453087 100755 --- a/lingvodoc/views.py +++ b/lingvodoc/views.py @@ -113,7 +113,7 @@ def __str__(self): @view_config(route_name='entity_metadata_search', renderer='json', request_method='GET') -def entity_metadata_search(request): +def entity_metadata_search(request): # TODO: test # TODO: add same check for permission as in basic_search searchstring = request.params.get('searchstring') if type(searchstring) != str: @@ -143,7 +143,7 @@ def entity_metadata_search(request): @view_config(route_name='basic_search_old', renderer='json', request_method='GET') -def basic_search_old(request): +def basic_search_old(request): # TODO: test? should not be used anywhere searchstring = request.params.get('leveloneentity') results_cursor = DBSession.query(LevelOneEntity).filter(LevelOneEntity.content.like('%'+searchstring+'%')).all() results = [] @@ -172,7 +172,7 @@ def basic_search_old(request): @view_config(route_name='basic_search', renderer='json', request_method='GET') -def basic_search(request): +def basic_search(request): # TODO: test can_add_tags = request.params.get('can_add_tags') searchstring = request.params.get('leveloneentity') perspective_client_id = request.params.get('perspective_client_id') @@ -239,7 +239,7 @@ def basic_search(request): @view_config(route_name='advanced_search', renderer='json', request_method='POST') -def advanced_search(request): +def advanced_search(request): # TODO: test req = request.json perspectives = req.get('perspectives') searchstrings = req.get('searchstrings') or [] @@ -267,9 +267,9 @@ def advanced_search(request): request.response.status = HTTPBadRequest.code return {'error': 'No search'} length_search = [len(o['searchstring']) for o in searchstrings if len(o['searchstring']) >= 1] - if length_search == []: - request.response.status = HTTPBadRequest.code - return {'error': 'search is too short'} + # if length_search == []: + # request.response.status = HTTPBadRequest.code + # return {'error': 'search is too short'} if adopted is not None: if adopted: sub = results_cursor.subquery() @@ -347,6 +347,7 @@ def advanced_search(request): if searchstring: if len(searchstring) >= 1: searchstring = searchstring.split(' ') + print(searchstring) if entity_type: if search_by_or: new_results_cursor = DBSession.query(sublexes)\ @@ -359,6 +360,8 @@ def advanced_search(request): LevelOneEntity.marked_for_deletion == False, or_(*[LevelOneEntity.content.like('%'+name+'%') for name in searchstring]), LevelOneEntity.entity_type == entity_type).union_all(new_results_cursor) + for e in new_results_cursor: + print(e) # .filter(or_(*[MyTable.my_column.like(name) for name in foo])) else: new_results_cursor = DBSession.query(sublexes)\ @@ -371,6 +374,8 @@ def advanced_search(request): LevelOneEntity.marked_for_deletion == False, and_(*[LevelOneEntity.content.like('%'+name+'%') for name in searchstring]), LevelOneEntity.entity_type == entity_type).union_all(new_results_cursor) + for e in new_results_cursor: + print(e) else: if search_by_or: new_results_cursor = DBSession.query(sublexes)\ @@ -382,6 +387,13 @@ def advanced_search(request): .filter(PublishLevelOneEntity.marked_for_deletion == False, LevelOneEntity.marked_for_deletion == False, or_(*[LevelOneEntity.content.like('%'+name+'%') for name in searchstring])).union_all(new_results_cursor) + for e in new_results_cursor: + print(e) + what = DBSession.query(LevelOneEntity)\ + .filter( + or_(*[LevelOneEntity.content.like('%'+name+'%') for name in searchstring])) + for e in what: + print(e.client_id, e.object_id) # .filter(or_(*[MyTable.my_column.like(name) for name in foo])) else: new_results_cursor = DBSession.query(sublexes)\ @@ -393,6 +405,8 @@ def advanced_search(request): .filter(PublishLevelOneEntity.marked_for_deletion == False, LevelOneEntity.marked_for_deletion == False, and_(*[LevelOneEntity.content.like('%'+name+'%') for name in searchstring])).union_all(new_results_cursor) + for e in new_results_cursor: + print(e) results_cursor = new_results_cursor results_cursor = results_cursor.group_by(LexicalEntry) # return {'result': str(results_cursor.statement)} @@ -425,7 +439,7 @@ def advanced_search(request): @view_config(route_name='convert_dictionary_check', renderer='json', request_method='POST') -def convert_dictionary_check(request): +def convert_dictionary_check(request): # TODO: test import sqlite3 req = request.json_body @@ -460,7 +474,7 @@ def convert_dictionary_check(request): @view_config(route_name='convert_dictionary', renderer='json', request_method='POST') -def convert_dictionary(request): +def convert_dictionary(request): # TODO: test req = request.json_body client_id = req['blob_client_id'] @@ -501,7 +515,7 @@ def convert_dictionary(request): @view_config(route_name='language', renderer='json', request_method='GET') -def view_language(request): +def view_language(request): # tested & in docs response = dict() client_id = request.matchdict.get('client_id') object_id = request.matchdict.get('object_id') @@ -548,7 +562,7 @@ def language_info(lang, request): @view_config(route_name='get_languages', renderer='json', request_method='GET') -def view_languages_list(request): +def view_languages_list(request): # tested & in docs response = dict() langs = [] languages = DBSession.query(Language).filter_by(parent = None).all() @@ -562,7 +576,7 @@ def view_languages_list(request): @view_config(route_name='language', renderer='json', request_method='PUT') -def edit_language(request): +def edit_language(request): # tested & in docs try: response = dict() client_id = request.matchdict.get('client_id') @@ -590,7 +604,7 @@ def edit_language(request): @view_config(route_name='language', renderer='json', request_method='DELETE', permission='delete') -def delete_language(request): +def delete_language(request): # tested & in docs response = dict() client_id = request.matchdict.get('client_id') object_id = request.matchdict.get('object_id') @@ -605,7 +619,7 @@ def delete_language(request): @view_config(route_name='create_language', renderer='json', request_method='POST', permission='create') -def create_language(request): +def create_language(request): # tested & in docs try: variables = {'auth': request.authenticated_userid} @@ -660,7 +674,7 @@ def create_language(request): @view_config(route_name='dictionary', renderer='json', request_method='GET') # Authors -- names of users, who can edit? -def view_dictionary(request): +def view_dictionary(request): # tested & in docs response = dict() client_id = request.matchdict.get('client_id') object_id = request.matchdict.get('object_id') @@ -683,7 +697,7 @@ def view_dictionary(request): @view_config(route_name='dictionary', renderer='json', request_method='PUT', permission='edit') -def edit_dictionary(request): +def edit_dictionary(request): # tested & in docs try: response = dict() client_id = request.matchdict.get('client_id') @@ -719,7 +733,7 @@ def edit_dictionary(request): @view_config(route_name='dictionary', renderer='json', request_method='DELETE', permission='delete') -def delete_dictionary(request): +def delete_dictionary(request): # tested & in docs response = dict() client_id = request.matchdict.get('client_id') object_id = request.matchdict.get('object_id') @@ -734,7 +748,7 @@ def delete_dictionary(request): @view_config(route_name='create_dictionary', renderer='json', request_method='POST', permission='create') -def create_dictionary(request): +def create_dictionary(request): # tested & in docs try: variables = {'auth': request.authenticated_userid} @@ -788,7 +802,7 @@ def create_dictionary(request): @view_config(route_name='dictionary_status', renderer='json', request_method='GET') -def view_dictionary_status(request): +def view_dictionary_status(request): # tested & in docs response = dict() client_id = request.matchdict.get('client_id') object_id = request.matchdict.get('object_id') @@ -803,7 +817,7 @@ def view_dictionary_status(request): @view_config(route_name='dictionary_status', renderer='json', request_method='PUT', permission='edit') -def edit_dictionary_status(request): +def edit_dictionary_status(request): # tested & in docs response = dict() client_id = request.matchdict.get('client_id') object_id = request.matchdict.get('object_id') @@ -826,7 +840,7 @@ def edit_dictionary_status(request): @view_config(route_name='dictionary_copy', renderer='json', request_method='POST', permission='edit') -def copy_dictionary(request): +def copy_dictionary(request): # TODO: test. or not. was this ever finished? response = dict() parent_client_id = request.matchdict.get('client_id') parent_object_id = request.matchdict.get('object_id') @@ -1067,7 +1081,7 @@ def copy_dictionary(request): @view_config(route_name='dictionary_delete', renderer='json', request_method='DELETE', permission='delete') -def real_delete_dictionary(request): +def real_delete_dictionary(request): # TODO: test. how? response = dict() parent_client_id = request.matchdict.get('client_id') parent_object_id = request.matchdict.get('object_id') @@ -1162,8 +1176,7 @@ def view_perspective_from_object(request, perspective): @view_config(route_name='perspective', renderer='json', request_method='GET') @view_config(route_name='perspective_outside', renderer='json', request_method='GET') -def view_perspective(request): - # response = dict() +def view_perspective(request): # tested & in docs client_id = request.matchdict.get('perspective_client_id') object_id = request.matchdict.get('perspective_id') parent_client_id = request.matchdict.get('dictionary_client_id') @@ -1176,49 +1189,6 @@ def view_perspective(request): return {'error': str("No such dictionary in the system")} perspective = DBSession.query(DictionaryPerspective).filter_by(client_id=client_id, object_id=object_id).first() - # if perspective: - # if not perspective.marked_for_deletion: - # if parent: - # if perspective.parent != parent: - # request.response.status = HTTPNotFound.code - # return {'error': str("No such pair of dictionary/perspective in the system")} - # response['parent_client_id'] = perspective.parent_client_id - # response['parent_object_id'] = perspective.parent_object_id - # response['client_id'] = perspective.client_id - # response['object_id'] = perspective.object_id - # translation_string = perspective.get_translation(request) - # response['translation_string'] = translation_string['translation_string'] - # response['translation'] = translation_string['translation'] - # response['status'] = perspective.state - # response['marked_for_deletion'] = perspective.marked_for_deletion - # response['is_template'] = perspective.is_template - # response['additional_metadata'] = perspective.additional_metadata - # if perspective.additional_metadata: - # meta = json.loads(perspective.additional_metadata) - # if 'location' in meta: - # response['location'] = meta['location'] - # if 'info' in meta: - # response['info'] = meta['info'] - # remove_list = [] - # info_list = response['info']['content'] - # for info in info_list: - # content = info['info']['content'] - # path = request.route_url('get_user_blob', - # client_id=content['client_id'], - # object_id=content['object_id']) - # subreq = Request.blank(path) - # subreq.method = 'GET' - # subreq.headers = request.headers - # resp = request.invoke_subrequest(subreq) - # if 'error' not in resp.json: - # info['info']['content'] = resp.json - # else: - # if info not in remove_list: - # remove_list.append(info) - # for info in remove_list: - # info_list.remove(info) - # request.response.status = HTTPOk.code - # return response response = view_perspective_from_object(request, perspective) if 'error' in response: request.response.status = HTTPNotFound.code @@ -1227,10 +1197,9 @@ def view_perspective(request): return response - @view_config(route_name='perspective_tree', renderer='json', request_method='GET') @view_config(route_name='perspective_outside_tree', renderer='json', request_method='GET') -def view_perspective_tree(request): +def view_perspective_tree(request): # tested & in docs response = dict() client_id = request.matchdict.get('perspective_client_id') object_id = request.matchdict.get('perspective_id') @@ -1296,7 +1265,7 @@ def view_perspective_tree(request): @view_config(route_name='perspective_meta', renderer='json', request_method='PUT', permission='edit') -def edit_perspective_meta(request): +def edit_perspective_meta(request): # tested & in docs response = dict() client_id = request.matchdict.get('perspective_client_id') object_id = request.matchdict.get('perspective_id') @@ -1331,7 +1300,7 @@ def edit_perspective_meta(request): @view_config(route_name='perspective_meta', renderer='json', request_method='DELETE', permission='edit') -def delete_perspective_meta(request): +def delete_perspective_meta(request): # tested & in docs response = dict() client_id = request.matchdict.get('perspective_client_id') object_id = request.matchdict.get('perspective_id') @@ -1366,7 +1335,7 @@ def delete_perspective_meta(request): @view_config(route_name='perspective_meta', renderer='json', request_method='GET') -def view_perspective_meta(request): +def view_perspective_meta(request): # tested & in docs response = dict() client_id = request.matchdict.get('perspective_client_id') object_id = request.matchdict.get('perspective_id') @@ -1392,9 +1361,8 @@ def view_perspective_meta(request): return {'error': str("No such perspective in the system")} - @view_config(route_name='perspective', renderer='json', request_method='PUT', permission='edit') -def edit_perspective(request): +def edit_perspective(request): # tested & in docs try: response = dict() client_id = request.matchdict.get('perspective_client_id') @@ -1438,7 +1406,7 @@ def edit_perspective(request): @view_config(route_name='perspective', renderer='json', request_method='DELETE', permission='delete') -def delete_perspective(request): +def delete_perspective(request): # tested & in docs response = dict() client_id = request.matchdict.get('perspective_client_id') object_id = request.matchdict.get('perspective_id') @@ -1463,7 +1431,7 @@ def delete_perspective(request): @view_config(route_name='perspectives', renderer='json', request_method='GET') -def view_perspectives(request): +def view_perspectives(request): # tested & in docs response = dict() parent_client_id = request.matchdict.get('dictionary_client_id') parent_object_id = request.matchdict.get('dictionary_object_id') @@ -1490,7 +1458,7 @@ def view_perspectives(request): @view_config(route_name = 'create_perspective', renderer = 'json', request_method = 'POST', permission='create') -def create_perspective(request): +def create_perspective(request): # tested & in docs try: variables = {'auth': authenticated_userid(request)} parent_client_id = request.matchdict.get('dictionary_client_id') @@ -1566,7 +1534,7 @@ def create_perspective(request): @view_config(route_name = 'perspective_status', renderer = 'json', request_method = 'GET') -def view_perspective_status(request): +def view_perspective_status(request): # tested & in docs response = dict() client_id = request.matchdict.get('perspective_client_id') object_id = request.matchdict.get('perspective_id') @@ -1591,7 +1559,7 @@ def view_perspective_status(request): @view_config(route_name = 'perspective_status', renderer = 'json', request_method = 'PUT', permission='edit') -def edit_perspective_status(request): +def edit_perspective_status(request): # tested & in docs response = dict() client_id = request.matchdict.get('perspective_client_id') object_id = request.matchdict.get('perspective_id') @@ -1621,7 +1589,7 @@ def edit_perspective_status(request): @view_config(route_name = 'dictionary_roles', renderer = 'json', request_method = 'GET', permission='view') -def view_dictionary_roles(request): +def view_dictionary_roles(request): # tested & in docs response = dict() client_id = request.matchdict.get('client_id') object_id = request.matchdict.get('object_id') @@ -1654,7 +1622,7 @@ def view_dictionary_roles(request): @view_config(route_name = 'dictionary_roles', renderer = 'json', request_method = 'POST', permission='create') -def edit_dictionary_roles(request): +def edit_dictionary_roles(request): # tested & in docs response = dict() client_id = request.matchdict.get('client_id') object_id = request.matchdict.get('object_id') @@ -1749,7 +1717,7 @@ def edit_dictionary_roles(request): @view_config(route_name = 'dictionary_roles', renderer = 'json', request_method = 'DELETE', permission='delete') -def delete_dictionary_roles(request): +def delete_dictionary_roles(request): # & in docs response = dict() client_id = request.matchdict.get('client_id') object_id = request.matchdict.get('object_id') @@ -1843,7 +1811,7 @@ def delete_dictionary_roles(request): @view_config(route_name = 'perspective_roles', renderer = 'json', request_method = 'GET', permission='view') -def view_perspective_roles(request): +def view_perspective_roles(request): # TODO: test response = dict() client_id = request.matchdict.get('perspective_client_id') object_id = request.matchdict.get('perspective_id') @@ -1883,7 +1851,7 @@ def view_perspective_roles(request): @view_config(route_name = 'perspective_roles', renderer = 'json', request_method = 'POST', permission='create') -def edit_perspective_roles(request): +def edit_perspective_roles(request): # TODO: test response = dict() client_id = request.matchdict.get('perspective_client_id') object_id = request.matchdict.get('perspective_id') @@ -1985,7 +1953,7 @@ def edit_perspective_roles(request): @view_config(route_name = 'perspective_roles', renderer = 'json', request_method = 'DELETE', permission='delete') -def delete_perspective_roles(request): +def delete_perspective_roles(request): # TODO: test response = dict() client_id = request.matchdict.get('perspective_client_id') object_id = request.matchdict.get('perspective_id') @@ -2086,7 +2054,7 @@ def delete_perspective_roles(request): @view_config(route_name='signin', renderer='json', request_method='POST') -def signin(request): +def signin(request): # TODO: find out if it used anywhere. And the get rid of it next = request.params.get('next') or request.route_url('home') req = request.json_body login = req['login'] @@ -2263,7 +2231,7 @@ def group_by_organizations(dicts, request): @view_config(route_name = 'published_dictionaries', renderer = 'json', request_method='POST') -def published_dictionaries_list(request): +def published_dictionaries_list(request): # tested. # TODO: test with org req = request.json_body response = dict() group_by_org = None @@ -2324,7 +2292,7 @@ def published_dictionaries_list(request): @view_config(route_name = 'dictionaries', renderer = 'json', request_method='POST') -def dictionaries_list(request): +def dictionaries_list(request): # TODO: test req = request.json_body response = dict() user_created = None @@ -2456,7 +2424,8 @@ def dictionaries_list(request): dicts = prevdicts else: dicts = DBSession.query(Dictionary).filter(sqlalchemy.sql.false()) - # TODO: fix + # TODO: fix. + # TODO: start writing todos with more information dictionaries = list() # dictionaries = [{'object_id':o.object_id,'client_id':o.client_id, 'translation': o.get_translation(request)['translation'],'translation_string': o.get_translation(request)['translation_string'], 'status':o.state,'parent_client_id':o.parent_client_id,'parent_object_id':o.parent_object_id} for o in dicts] dicts = dicts.order_by("client_id", "object_id") @@ -2478,7 +2447,7 @@ def dictionaries_list(request): @view_config(route_name='all_perspectives', renderer = 'json', request_method='GET') -def perspectives_list(request): +def perspectives_list(request): # tested response = dict() is_template = None try: @@ -2522,8 +2491,9 @@ def perspectives_list(request): return response + @view_config(route_name='users', renderer='json', request_method='GET') -def users_list(request): +def users_list(request): # tested response = dict() search = None try: @@ -2550,7 +2520,7 @@ def users_list(request): @view_config(route_name='perspective_fields', renderer='json', request_method='GET') -def view_perspective_fields(request): +def view_perspective_fields(request): # tested response = dict() client_id = request.matchdict.get('perspective_client_id') object_id = request.matchdict.get('perspective_id') @@ -2581,9 +2551,9 @@ def view_perspective_fields(request): ent_type = field2.get_entity_type(request) data2['entity_type'] = ent_type['translation_string'] data2['entity_type_translation'] = ent_type['translation'] - data_type = field2.get_data_type(request) - data2['data_type'] = data_type['translation_string'] - data2['data_type_translation'] = data_type['translation'] + data_type2 = field2.get_data_type(request) + data2['data_type'] = data_type2['translation_string'] + data2['data_type_translation'] = data_type2['translation'] data2['status'] = field2.state data2['position'] = field2.position data2['level'] = field2.level @@ -2607,7 +2577,7 @@ def view_perspective_fields(request): @view_config(route_name='perspective_fields', renderer = 'json', request_method='DELETE', permission='edit') -def delete_perspective_fields(request): +def delete_perspective_fields(request): # tested response = dict() client_id = request.matchdict.get('perspective_client_id') object_id = request.matchdict.get('perspective_id') @@ -2631,7 +2601,7 @@ def delete_perspective_fields(request): @view_config(route_name='perspective_fields', renderer='json', request_method='POST', permission='edit') -def create_perspective_fields(request): +def create_perspective_fields(request): # tested # TODO: stop recreating fields. Needs to be done both there and in web try: variables = {'auth': authenticated_userid(request)} @@ -2690,7 +2660,7 @@ def create_perspective_fields(request): field2.position = subentry['position'] translation = subentry['data_type'] if 'data_type_translation' in subentry: - translation = entry['data_type_translation'] + translation = subentry['data_type_translation'] field2.set_data_type(request, translation, subentry['data_type']) translation = subentry['entity_type'] if 'entity_type_translation' in subentry: @@ -2780,8 +2750,9 @@ def create_object(request, content, obj, data_type, filename, json_input=True): filename)) return real_location, url + @view_config(route_name='upload_user_blob', renderer='json', request_method='POST') -def upload_user_blob(request): +def upload_user_blob(request): # TODO: test variables = {'auth': authenticated_userid(request)} response = dict() filename = request.POST['blob'].filename @@ -2817,7 +2788,7 @@ class Object(object): @view_config(route_name='get_user_blob', renderer='json', request_method='GET') -def get_user_blob(request): +def get_user_blob(request): # TODO: test variables = {'auth': authenticated_userid(request)} response = dict() client_id = request.matchdict.get('client_id') @@ -2844,7 +2815,7 @@ def get_user_blob(request): @view_config(route_name='list_user_blobs', renderer='json', request_method='GET') -def list_user_blobs(request): +def list_user_blobs(request): # TODO: test variables = {'auth': authenticated_userid(request)} # user_client_ids = [cl_id.id for cl_id in DBSession.query(Client).filter_by(id=variables['auth']).all()] # user_blobs = DBSession.query(UserBlobs).filter_by(client_id.in_(user_client_ids)).all() @@ -2857,7 +2828,7 @@ def list_user_blobs(request): @view_config(route_name='create_level_one_entity', renderer='json', request_method='POST', permission='create') -def create_l1_entity(request): +def create_l1_entity(request): # tested try: variables = {'auth': authenticated_userid(request)} response = dict() @@ -2927,7 +2898,7 @@ def create_l1_entity(request): @view_config(route_name='create_entities_bulk', renderer='json', request_method='POST', permission='create') -def create_entities_bulk(request): +def create_entities_bulk(request): # TODO: test try: variables = {'auth': authenticated_userid(request)} response = dict() @@ -3014,7 +2985,7 @@ def create_entities_bulk(request): @view_config(route_name='get_level_one_entity_indict', renderer='json', request_method='GET', permission='view') @view_config(route_name='get_level_one_entity', renderer='json', request_method='GET', permission='view') -def view_l1_entity(request): +def view_l1_entity(request): # tested response = dict() client_id = request.matchdict.get('client_id') object_id = request.matchdict.get('object_id') @@ -3031,7 +3002,7 @@ def view_l1_entity(request): @view_config(route_name='get_level_one_entity_indict', renderer='json', request_method='DELETE', permission='delete') @view_config(route_name='get_level_one_entity', renderer='json', request_method='DELETE', permission='delete') -def delete_l1_entity(request): +def delete_l1_entity(request): # TODO: test response = dict() client_id = request.matchdict.get('client_id') object_id = request.matchdict.get('object_id') @@ -3049,14 +3020,14 @@ def delete_l1_entity(request): @view_config(route_name='get_level_two_entity_indict', renderer='json', request_method='GET', permission='view') @view_config(route_name='get_level_two_entity', renderer='json', request_method='GET', permission='view') -def view_l2_entity(request): +def view_l2_entity(request): # tested response = dict() client_id = request.matchdict.get('client_id') object_id = request.matchdict.get('object_id') entity = DBSession.query(LevelTwoEntity).filter_by(client_id=client_id, object_id=object_id).first() if entity: if not entity.marked_for_deletion: - + # TODO: use track response['entity_type'] = entity.entity_type response['parent_client_id'] = entity.parent_client_id response['parent_object_id'] = entity.parent_object_id @@ -3070,7 +3041,7 @@ def view_l2_entity(request): @view_config(route_name='get_level_two_entity_indict', renderer='json', request_method='DELETE', permission='delete') @view_config(route_name='get_level_two_entity', renderer='json', request_method='DELETE', permission='delete') -def delete_l2_entity(request): +def delete_l2_entity(request): # TODO: test response = dict() client_id = request.matchdict.get('client_id') object_id = request.matchdict.get('object_id') @@ -3087,7 +3058,7 @@ def delete_l2_entity(request): @view_config(route_name='create_level_two_entity', renderer='json', request_method='POST', permission='create') -def create_l2_entity(request): +def create_l2_entity(request): # tested try: variables = {'auth': authenticated_userid(request)} @@ -3159,7 +3130,7 @@ def create_l2_entity(request): @view_config(route_name='get_group_entity', renderer='json', request_method='GET') -def view_group_entity(request): +def view_group_entity(request): # TODO: test response = dict() client_id = request.matchdict.get('client_id') object_id = request.matchdict.get('object_id') @@ -3186,7 +3157,7 @@ def view_group_entity(request): @view_config(route_name='get_connected_words', renderer='json', request_method='GET') @view_config(route_name='get_connected_words_indict', renderer='json', request_method='GET') -def view_connected_words(request): +def view_connected_words(request): # tested, found some shit(tags here are not the same, as in view_group_entity) # TODO: fix response = dict() client_id = request.matchdict.get('client_id') object_id = request.matchdict.get('object_id') @@ -3258,7 +3229,7 @@ def view_connected_words(request): @view_config(route_name='get_group_entity', renderer='json', request_method='DELETE', permission='delete') -def delete_group_entity(request): +def delete_group_entity(request): # TODO: test response = dict() client_id = request.matchdict.get('client_id') object_id = request.matchdict.get('object_id') @@ -3275,7 +3246,7 @@ def delete_group_entity(request): @view_config(route_name='add_group_indict', renderer='json', request_method='POST') # TODO: check for permission @view_config(route_name='add_group_entity', renderer='json', request_method='POST') -def create_group_entity(request): +def create_group_entity(request): # tested try: variables = {'auth': authenticated_userid(request)} response = dict() @@ -3290,7 +3261,6 @@ def create_group_entity(request): tags = [] if 'tag' in req: tags += [req['tag']] - for par in req['connections']: parent = DBSession.query(LexicalEntry).\ filter_by(client_id=par['client_id'], object_id=par['object_id']).first() @@ -3334,7 +3304,7 @@ def create_group_entity(request): @view_config(route_name='create_lexical_entry', renderer='json', request_method='POST', permission='create') -def create_lexical_entry(request): +def create_lexical_entry(request): # tested try: dictionary_client_id = request.matchdict.get('dictionary_client_id') dictionary_object_id = request.matchdict.get('dictionary_object_id') @@ -3375,8 +3345,6 @@ def create_lexical_entry(request): request.response.status = HTTPConflict.code return {'error': str(e)} - - # @view_config(route_name='lexical_entry_in_perspective', renderer='json', request_method='DELETE', permission='delete')#, permission='view') # @view_config(route_name='lexical_entry', renderer='json', request_method='DELETE', permission='delete') # def delete_lexical_entry(request): @@ -3394,14 +3362,14 @@ def create_lexical_entry(request): @view_config(route_name='create_lexical_entry_bulk', renderer='json', request_method='POST', permission='create') -def create_lexical_entry_bulk(request): +def create_lexical_entry_bulk(request): # TODO: test try: dictionary_client_id = request.matchdict.get('dictionary_client_id') dictionary_object_id = request.matchdict.get('dictionary_object_id') perspective_client_id = request.matchdict.get('perspective_client_id') perspective_id = request.matchdict.get('perspective_id') - count = request.json_body.get('count') + count = request.json_body.get('count') or 0 variables = {'auth': request.authenticated_userid} @@ -3445,7 +3413,7 @@ def create_lexical_entry_bulk(request): @view_config(route_name='lexical_entries_all', renderer='json', request_method='GET', permission='view') -def lexical_entries_all(request): +def lexical_entries_all(request): # TODO: test response = dict() client_id = request.matchdict.get('perspective_client_id') object_id = request.matchdict.get('perspective_id') @@ -3484,7 +3452,7 @@ def lexical_entries_all(request): @view_config(route_name='lexical_entries_all_count', renderer='json', request_method='GET', permission='view') -def lexical_entries_all_count(request): +def lexical_entries_all_count(request): # tested response = dict() client_id = request.matchdict.get('perspective_client_id') object_id = request.matchdict.get('perspective_id') @@ -3505,7 +3473,7 @@ def lexical_entries_all_count(request): @view_config(route_name='lexical_entries_published', renderer='json', request_method='GET', permission='view') -def lexical_entries_published(request): +def lexical_entries_published(request): # TODO: test response = dict() client_id = request.matchdict.get('perspective_client_id') object_id = request.matchdict.get('perspective_id') @@ -3551,7 +3519,7 @@ def lexical_entries_published(request): @view_config(route_name='lexical_entries_published_count', renderer='json', request_method='GET', permission='view') -def lexical_entries_published_count(request): +def lexical_entries_published_count(request): # tested todo:? client_id = request.matchdict.get('perspective_client_id') object_id = request.matchdict.get('perspective_id') @@ -3577,7 +3545,7 @@ def lexical_entries_published_count(request): @view_config(route_name='lexical_entry_in_perspective', renderer='json', request_method='GET', permission='view') @view_config(route_name='lexical_entry', renderer='json', request_method='GET', permission='view') -def view_lexical_entry(request): +def view_lexical_entry(request): # TODO: test response = dict() client_id = request.matchdict.get('client_id') object_id = request.matchdict.get('object_id') @@ -3610,7 +3578,7 @@ def view_lexical_entry(request): @view_config(route_name='get_user_info', renderer='json', request_method='GET') -def get_user_info(request): +def get_user_info(request): # tested response = dict() client_id = None try: @@ -3677,7 +3645,7 @@ def get_user_info(request): @view_config(route_name='get_user_info', renderer='json', request_method='PUT') -def edit_user_info(request): +def edit_user_info(request): # TODO: test from passlib.hash import bcrypt response = dict() @@ -3713,6 +3681,7 @@ def edit_user_info(request): return {'error': str("No such user in the system")} new_password = req.get('new_password') old_password = req.get('old_password') + if new_password: if not old_password: request.response.status = HTTPBadRequest.code @@ -3762,7 +3731,7 @@ def edit_user_info(request): @view_config(route_name='approve_all', renderer='json', request_method='PATCH', permission='create') -def approve_all(request): +def approve_all(request): # TODO: test response = dict() client_id = request.matchdict.get('perspective_client_id') object_id = request.matchdict.get('perspective_id') @@ -3810,7 +3779,7 @@ def approve_all(request): @view_config(route_name='approve_all_outer', renderer='json', request_method='PATCH', permission='create') -def approve_outer(request): # TODO: create test. +def approve_outer(request): # TODO: create test. from .scripts.approve import approve_all_outer client_id = request.matchdict.get('perspective_client_id') object_id = request.matchdict.get('perspective_id') @@ -3840,7 +3809,7 @@ def approve_outer(request): # TODO: create test. @view_config(route_name='approve_entity', renderer='json', request_method='PATCH', permission='create') -def approve_entity(request): +def approve_entity(request): # TODO: test try: if type(request.json_body) == str: req = json.loads(request.json_body) @@ -3910,7 +3879,7 @@ def approve_entity(request): @view_config(route_name='approve_entity', renderer='json', request_method='DELETE', permission='delete') -def disapprove_entity(request): +def disapprove_entity(request): # TODO: test try: req = request.json_body variables = {'auth': request.authenticated_userid} @@ -3964,7 +3933,7 @@ def disapprove_entity(request): @view_config(route_name='get_translations', renderer='json', request_method='GET') -def get_translations(request): +def get_translations(request): # TODO: test from .models import find_by_translation_string, find_locale_id req = request.json_body response = [] @@ -3974,12 +3943,8 @@ def get_translations(request): return response - - - - @view_config(route_name='merge_dictionaries', renderer='json', request_method='POST') -def merge_dictionaries(request): +def merge_dictionaries(request): # TODO: test try: req = request.json_body variables = {'auth': request.authenticated_userid} @@ -4107,7 +4072,7 @@ def merge_dictionaries(request): @view_config(route_name='merge_perspectives', renderer='json', request_method='POST') -def merge_perspectives_api(request): +def merge_perspectives_api(request): # TODO: test try: req = request.json_body variables = {'auth': request.authenticated_userid} @@ -4269,7 +4234,7 @@ def merge_perspectives_api(request): @view_config(route_name='move_lexical_entry', renderer='json', request_method='PATCH', permission='create') -def move_lexical_entry(request): +def move_lexical_entry(request): # TODO: test req = request.json_body variables = {'auth': request.authenticated_userid} client = DBSession.query(Client).filter_by(id=variables['auth']).first() @@ -4339,7 +4304,7 @@ def move_lexical_entry(request): @view_config(route_name='move_lexical_entry_bulk', renderer='json', request_method='PATCH') -def move_lexical_entry_bulk(request): +def move_lexical_entry_bulk(request): # TODO: test req = request.json_body real_delete = req.get('real_delete') # With great power comes great responsibility # Maybe there needs to be check for permission of some sort (can really delete only when updating dictionary) @@ -4428,7 +4393,7 @@ def move_lexical_entry_bulk(request): @view_config(route_name='organization_list', renderer='json', request_method='GET') -def view_organization_list(request): +def view_organization_list(request): # TODO: test response = dict() organizations = [] for organization in DBSession.query(Organization).filter_by(marked_for_deletion=False).all(): @@ -4448,7 +4413,7 @@ def view_organization_list(request): @view_config(route_name='organization', renderer='json', request_method='GET') -def view_organization(request): +def view_organization(request): # TODO: test response = dict() organization_id = request.matchdict.get('organization_id') organization = DBSession.query(Organization).filter_by(id=organization_id).first() @@ -4467,7 +4432,7 @@ def view_organization(request): @view_config(route_name='organization', renderer='json', request_method='PUT', permission='edit') -def edit_organization(request): +def edit_organization(request): # TODO: test try: response = dict() organization_id = request.matchdict.get('organization_id') @@ -4522,7 +4487,7 @@ def edit_organization(request): @view_config(route_name='organization', renderer='json', request_method='DELETE', permission='delete') -def delete_organization(request): +def delete_organization(request): # TODO: test response = dict() organization_id = request.matchdict.get('organization_id') organization = DBSession.query(Organization).filter_by(id=organization_id).first() @@ -4596,7 +4561,7 @@ def cache_clients(): @view_config(route_name='perspective_info', renderer='json', request_method='GET', permission='view') -def perspective_info(request): +def perspective_info(request): # TODO: test response = dict() client_id = request.matchdict.get('perspective_client_id') object_id = request.matchdict.get('perspective_id') @@ -4661,7 +4626,7 @@ def perspective_info(request): @view_config(route_name='dictionary_info', renderer='json', request_method='GET', permission='view') -def dictionary_info(request): +def dictionary_info(request): # TODO: test response = dict() client_id = request.matchdict.get('client_id') object_id = request.matchdict.get('object_id') @@ -4689,7 +4654,7 @@ def dictionary_info(request): @view_config(route_name='create_organization', renderer='json', request_method='POST', permission='create') -def create_organization(request): +def create_organization(request): # TODO: test try: variables = {'auth': request.authenticated_userid} @@ -4747,7 +4712,7 @@ def create_organization(request): @view_config(route_name='dangerous_perspectives_hash', renderer='json', request_method='PUT', permission='edit') -def dangerous_perspectives_hash(request): +def dangerous_perspectives_hash(request): # TODO: test? response = dict() perspectives = DBSession.query(DictionaryPerspective) for perspective in perspectives: @@ -4766,7 +4731,7 @@ def dangerous_perspectives_hash(request): @view_config(route_name='perspective_hash', renderer='json', request_method='PUT', permission='edit') -def edit_perspective_hash(request): +def edit_perspective_hash(request): # TODO: test? import requests try: response = dict() @@ -4914,7 +4879,7 @@ def convert(request): # TODO: test when convert in blobs will be needed @view_config(route_name='convert_markup', renderer='json', request_method='POST') -def convert_markup(request): +def convert_markup(request): # TODO: test import requests from .scripts.convert_rules import praat_to_elan try: @@ -5005,7 +4970,7 @@ def login_get(request): @view_config(route_name='login', request_method='POST') -def login_post(request): +def login_post(request): # tested next = request.params.get('next') or request.route_url('home') login = request.POST.get('login', '') password = request.POST.get('password', '') @@ -5028,8 +4993,9 @@ def login_post(request): return HTTPFound(location=next, headers=response.headers) return HTTPUnauthorized(location=request.route_url('login')) + @view_config(route_name='cheatlogin', request_method='POST') -def login_cheat(request): +def login_cheat(request): # TODO: test next = request.params.get('next') or request.route_url('dashboard') login = request.json_body.get('login', '') passwordhash = request.json_body.get('passwordhash', '') @@ -5054,8 +5020,9 @@ def login_cheat(request): log.debug("Login unsuccessful for " + login) return HTTPUnauthorized(location=request.route_url('login')) + @view_config(route_name='logout', renderer='json') -def logout_any(request): +def logout_any(request): # tested next = request.params.get('next') or request.route_url('login') headers = forget(request) return HTTPFound(location=next, headers=headers) @@ -5066,8 +5033,9 @@ def signup_get(request): variables = {'auth': authenticated_userid(request)} return render_to_response('templates/signup.pt', variables, request=request) + @view_config(route_name='signup', renderer='json', request_method='POST') -def signup_post(request): +def signup_post(request): # tested try: login = request.POST.getone('login') name = request.POST.getone('name') @@ -5293,7 +5261,7 @@ def remove_deleted(lst): @view_config(route_name='merge_suggestions', renderer='json', request_method='POST') -def merge_suggestions(request): +def merge_suggestions(request): # TODO: test req = request.json entity_type_primary = req.get('entity_type_primary') or 'Transcription' entity_type_secondary = req.get('entity_type_secondary') or 'Translation' diff --git a/tests/tests.py b/tests/tests.py index 1df56fa6e..31e4dbedb 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -20,64 +20,84 @@ ) from subprocess import PIPE, Popen -inifile = 'andrey.ini' -alembicini = 'alembictests.ini' -dbname = 'postgresql+psycopg2://postgres@/lingvodoc_testing' # TODO: read from alembicini +from configparser import ConfigParser +alembicini = 'alembictests.ini' +parser = ConfigParser() +parser.read('../' + alembicini) +alembic_conf = dict() +for k, v in parser.items('alembic'): + alembic_conf[k] = v +dbname = alembic_conf['sqlalchemy.url'] from lingvodoc.scripts.initializedb import data_init -print_deb = False -def new_dict(d, key_set): +def debug_print(debug_flag, mssg): + if debug_flag: + for entry in mssg: + print(entry) + +# from copy import deepcopy + + +def new_dict(d, key_set, stop_words=list(), debug_flag=False): new_d = dict() + empty_lst = [None, {}, [], ()] + empty_lst += [str(o) for o in empty_lst] for key in d: - el = d[key] - empty_lst = [None, {}, [], ()] - empty_lst += [str(o) for o in empty_lst] - if el not in empty_lst: - new_d[key] = el - key_set.add(key) + if key not in stop_words: + # debug_print(debug_flag, ['key', key]) + el = d[key] + if el not in empty_lst: + new_d[key] = el # deepcopy(el) + key_set.add(key) return new_d -def is_equal(el1, el2): +def is_equal(el1, el2, stop_words=list(), set_like=False, debug_flag=False): t1, t2 = type(el1), type(el2) - # print('elems', el1, el2) if t1 != t2: - # print('type false') + debug_print(debug_flag, ['diff types', t1, t2]) return False if t1 == dict: - if not dict_diff(el1,el2): - # print('dict false') + if not dict_diff(el1,el2, stop_words, set_like, debug_flag): + debug_print(debug_flag, ['diff dicts', el1, el2]) return False elif t1 == list: - if not list_diff(el1,el2): + if not list_diff(el1,el2, stop_words, set_like, debug_flag): + debug_print(debug_flag, ['diff lists', el1, el2, 'setlike: %s' % set_like]) return False elif el1 != el2: - # print('simple false') + debug_print(debug_flag, ['diff elements', el1, el2]) return False return True -def list_diff(l1, l2): - for i in range(len(l1)): - if not is_equal(l1[i], l2[i]): - # print('list false') - return False +def list_diff(l1, l2, stop_words=list(), set_like=False, debug_flag=False): + if not set_like: + for i in range(len(l1)): + if not is_equal(l1[i], l2[i], stop_words, set_like, debug_flag): + debug_print(debug_flag, ['diff lists']) + return False + else: + for el1 in l1: + no_same_el = True + for el2 in l2: + if is_equal(el1, el2, stop_words, set_like): + no_same_el = False + if no_same_el: + return False return True -def dict_diff(d1,d2): +def dict_diff(d1, d2, stop_words=list(), set_like=False, debug_flag=False): keyset = set() - nd1 = new_dict(d1, keyset) - nd2 = new_dict(d2, keyset) - # print(nd1, nd2) - # print(keyset) + nd1 = new_dict(d1, keyset, stop_words, debug_flag) + nd2 = new_dict(d2, keyset, stop_words, debug_flag) + debug_print(debug_flag, ['keyset:', keyset, 'new dicts:', nd1, nd2]) for key in keyset: el1, el2 = nd1.get(key), nd2.get(key) - # print('key', key) - if not is_equal(el1, el2): - # print('elems in dict false') + if not is_equal(el1, el2, stop_words, set_like, debug_flag): return False return True @@ -89,7 +109,7 @@ def commonSetUp(self): from pyramid import paster from sqlalchemy import create_engine engine = create_engine(dbname) - myapp = paster.get_app(inifile) + myapp = paster.get_app('../' + alembicini) self.app = webtest.TestApp(myapp) DBSession.remove() DBSession.configure(bind=engine) @@ -100,7 +120,7 @@ def commonSetUp(self): my_env = os.environ proc = Popen(args, cwd=pathdir, env=my_env) proc.communicate() - accounts = get_appsettings(inifile, 'accounts') + accounts = get_appsettings('../' + alembicini, 'accounts') data_init(transaction.manager, accounts) @@ -125,40 +145,96 @@ def setUp(self): def tearDown(self): commonTearDown(self) - def assertDictEqual(self, d1, d2, msg=None): - self.assertEqual(dict_diff(d1, d2), True, msg) + def assertDictEqual(self, d1, d2, msg=None, stop_words=list(), set_like=False, debug_flag=False): + self.assertEqual(dict_diff(d1, d2, stop_words=stop_words, set_like=set_like, debug_flag=debug_flag), True, msg) + + def assertListEqual(self, l1, l2, msg=None, stop_words=list(), set_like=False, debug_flag=False): + self.assertEqual(list_diff(l1, l2, stop_words=stop_words, set_like=set_like, debug_flag=debug_flag), True, msg) - def assertListEqual(self, l1, l2, msg=None): - self.assertEqual(list_diff(l1, l2), True, msg) -def login_common(self): - response = self.app.post('/signup', params={'login': 'test', +def signup_common(self, username='test', prev_log = 'test'): + email = username + '@test.com' + response = self.app.post('/signup', params={'login': username, 'password': 'pass', 'name': 'test', - 'email': 'test@test.com', + 'email': email, 'day': '1', 'month': '1', 'year': '1970'}) + + self.assertEqual(response.status_int, HTTPFound.code) + response = self.app.post('/logout') + self.assertEqual(response.status_int, HTTPFound.code) + response = self.app.post('/login', params={'login': username, + 'password': 'pass'}) self.assertEqual(response.status_int, HTTPFound.code) - response = self.app.post('/login', params={'login': 'test', + response = self.app.get('/user') + correct_answer = {'intl_name': username, 'login': username, 'organizations': [], + 'is_active': 'True', 'name': 'test', 'email': email, + 'default_locale_id': None, 'birthday': '1970-01-01', 'about': None} + + self.assertDictEqual(response.json, correct_answer, stop_words=['id', 'signup_date']) + id = response.json['id'] + + response = self.app.post('/logout') + self.assertEqual(response.status_int, HTTPFound.code) + response = self.app.post('/login', params={'login': prev_log, 'password': 'pass'}) + self.assertEqual(response.status_int, HTTPFound.code) + return id -def create_language(self, translation_string): - response = self.app.post_json('/language', params={'translation_string': translation_string}) +def login_common(self, username='test'): + response = self.app.post('/login', params={'login': username, + 'password': 'pass'}) + self.assertEqual(response.status_int, HTTPFound.code) + + +def create_language(self, translation_string, par_ids={'client_id': None, 'object_id': None}): + + response = self.app.post_json('/language', params={'translation_string': translation_string, + 'parent_client_id': par_ids['client_id'], + 'parent_object_id': par_ids['object_id']}) self.assertEqual(response.status_int, HTTPOk.code) ids = response.json response = self.app.get('/language/%s/%s' % (ids['client_id'], ids['object_id'])) correct_answer = {'client_id': ids['client_id'], 'object_id': ids['object_id'], 'locale_exist': False, 'translation': translation_string, - 'parent_client_id': None, 'translation_string': translation_string, 'parent_object_id': None} + 'parent_client_id': par_ids['client_id'], + 'translation_string': translation_string, + 'parent_object_id': par_ids['object_id']} self.assertEqual(response.status_int, HTTPOk.code) self.assertDictEqual(response.json, correct_answer) return ids -def create_dictionary(self, translation_string, par_ids): +def dictionary_change_state(self, dict_ids, state): + response = self.app.put_json('/dictionary/%s/%s/state' % (dict_ids['client_id'], dict_ids['object_id']), + params={'status':state}) + self.assertEqual(response.status_int, HTTPOk.code) + response = self.app.get('/dictionary/%s/%s/state' % (dict_ids['client_id'], dict_ids['object_id'])) + self.assertEqual(response.status_int, HTTPOk.code) + self.assertEqual(response.status_int, HTTPOk.code) + self.assertDictEqual(response.json, {'status': state}) + + +def perspective_change_state(self, dict_ids, persp_ids, state): + response = self.app.put_json('/dictionary/%s/%s/perspective/%s/%s/state' + % (dict_ids['client_id'],dict_ids['object_id'], + persp_ids['client_id'], persp_ids['object_id']), + params={'status':state}) + + self.assertEqual(response.status_int, HTTPOk.code) + + response = self.app.get('/dictionary/%s/%s/perspective/%s/%s/state' % (dict_ids['client_id'],dict_ids['object_id'], + persp_ids['client_id'], persp_ids['object_id'])) + + self.assertEqual(response.status_int, HTTPOk.code) + self.assertDictEqual(response.json, {'status': state}) + + +def create_dictionary(self, translation_string, par_ids, state=None): response = self.app.post_json('/dictionary', params={'translation_string': translation_string, 'parent_client_id': par_ids['client_id'], 'parent_object_id': par_ids['object_id']}) @@ -175,10 +251,12 @@ def create_dictionary(self, translation_string, par_ids): self.assertEqual(response.status_int, HTTPOk.code) self.assertDictEqual(response.json, correct_answer) + if state: + dictionary_change_state(self,ids,state) return ids -def create_perspective(self, translation_string, par_ids): +def create_perspective(self, translation_string, par_ids, state=None): response = self.app.post_json('/dictionary/%s/%s/perspective' % (par_ids['client_id'],par_ids['object_id']), params={'translation_string': translation_string}) self.assertEqual(response.status_int, HTTPOk.code) @@ -200,9 +278,61 @@ def create_perspective(self, translation_string, par_ids): response = self.app.get('/perspective/%s/%s' % (ids['client_id'], ids['object_id'])) self.assertEqual(response.status_int, HTTPOk.code) self.assertDictEqual(response.json, correct_answer) + if state: + perspective_change_state(self,par_ids,ids,state) return ids +def add_l1e(self, dict_ids, persp_ids, lex_ids, content='content', entity_type='Word', data_type='text'): + + response = self.app.post_json('/dictionary/%s/%s/perspective/%s/%s/lexical_entry/%s/%s/leveloneentity' + % (dict_ids['client_id'], + dict_ids['object_id'], + persp_ids['client_id'], + persp_ids['object_id'], + lex_ids['client_id'], + lex_ids['object_id']), + params={'entity_type':entity_type, + 'data_type':data_type, + 'content': content, + 'locale_id': 1}) + self.assertEqual(response.status_int, HTTPOk.code) + l1e_ids = response.json + self.assertDictEqual(response.json, {}, stop_words=['object_id', 'client_id']) + + response = self.app.get('/dictionary/%s/%s/perspective/%s/%s/lexical_entry/%s/%s/leveloneentity/%s/%s' + % (dict_ids['client_id'], + dict_ids['object_id'], + persp_ids['client_id'], + persp_ids['object_id'], + lex_ids['client_id'], + lex_ids['object_id'], + l1e_ids['client_id'], + l1e_ids['object_id'])) + self.assertEqual(response.status_int, HTTPOk.code) + correct_answer = {'client_id': l1e_ids['client_id'], + 'parent_client_id': lex_ids['client_id'], + 'parent_object_id': lex_ids['object_id'], + 'object_id': l1e_ids['object_id'], + 'entity_type': entity_type, + 'level': 'leveloneentity', + 'marked_for_deletion': False, + 'locale_id': 1, + 'content': content} + self.assertDictEqual(response.json, correct_answer) + return l1e_ids + + +def add_grouping(self, first_lex, second_lex, tag=None): + params = {'connections': [first_lex, second_lex], 'entity_type':'Etymology'} + if tag: + params['tag'] = tag + response = self.app.post_json('/group_entity', params=params) + self.assertEqual(response.status_int, HTTPOk.code) + self.assertDictEqual(response.json,{}) + + + class TestSignUp(MyTestCase): def one_big_test(self): @@ -211,6 +341,7 @@ def one_big_test(self): status=HTTPForbidden.code) self.assertEqual(response.status_int, HTTPForbidden.code) # test signup & login + user_id = signup_common(self) login_common(self) # test creating language lang_name = 'test_lang' @@ -236,21 +367,27 @@ def one_big_test(self): {'translation': 'German language', 'client_id': 1, 'translation_string': 'German language', 'object_id': 5, 'locale_exist': True}, - {'translation': lang_name, 'client_id': 3, + {'translation': lang_name, 'client_id': par_ids['client_id'], 'translation_string': lang_name, - 'object_id': 1, 'locale_exist': False}]} - self.assertDictEqual(response.json, correct_answer) + 'object_id': par_ids['object_id'], 'locale_exist': False}]} + self.assertDictEqual(response.json, correct_answer, stop_words=['client_id', 'object_id']) + firstlang = response.json['languages'][0] + firstlangids = {'client_id': firstlang['client_id'], 'object_id': firstlang['object_id']} # test all params when editing language response = self.app.put_json('/language/%s/%s' % (par_ids['client_id'], par_ids['object_id']), params={'translation':'new_translation', - 'parent_client_id':1, - 'parent_object_id':1}) + 'parent_client_id':firstlangids['client_id'], + 'parent_object_id':firstlangids['object_id']}) self.assertEqual(response.status_int, HTTPOk.code) response = self.app.get('/language/%s/%s' % (par_ids['client_id'], par_ids['object_id'])) - correct_answer = {'client_id': par_ids['client_id'], 'object_id': par_ids['object_id'], - 'locale_exist': False, 'translation': 'new_translation', - 'parent_client_id': 1, 'translation_string': lang_name, 'parent_object_id': 1} + correct_answer = {'client_id': par_ids['client_id'], + 'object_id': par_ids['object_id'], + 'locale_exist': False, + 'translation': 'new_translation', + 'parent_client_id': firstlangids['client_id'], + 'translation_string': lang_name, + 'parent_object_id': firstlangids['object_id']} self.assertEqual(response.status_int, HTTPOk.code) self.assertDictEqual(response.json, correct_answer) # test all params when creating language @@ -274,29 +411,33 @@ def one_big_test(self): # test edit dictionary response = self.app.put_json('/dictionary/%s/%s' % (dict_ids['client_id'], dict_ids['object_id']), params={'translation':'new_translation', - 'parent_client_id':1, - 'parent_object_id':1}) + 'parent_client_id':firstlangids['client_id'], + 'parent_object_id':firstlangids['object_id']}) self.assertEqual(response.status_int, HTTPOk.code) response = self.app.get('/dictionary/%s/%s' % (dict_ids['client_id'], dict_ids['object_id'])) - correct_answer = {'client_id': dict_ids['client_id'], 'object_id': dict_ids['object_id'], + correct_answer = {'client_id': dict_ids['client_id'], + 'object_id': dict_ids['object_id'], 'additional_metadata': '[]', 'translation': 'new_translation', - 'parent_client_id': 1, 'translation_string': dict_name, 'parent_object_id': 1, + 'parent_client_id': firstlangids['client_id'], + 'translation_string': dict_name, + 'parent_object_id': firstlangids['object_id'], 'status': 'WiP'} self.assertEqual(response.status_int, HTTPOk.code) self.assertDictEqual(response.json, correct_answer) - response = self.app.put_json('/dictionary/%s/%s' % (dict_ids['client_id'], dict_ids['object_id']), params={'translation':'new_translation', 'parent_client_id':par_ids['client_id'], 'parent_object_id':par_ids['object_id']}) self.assertEqual(response.status_int, HTTPOk.code) response = self.app.get('/dictionary/%s/%s' % (dict_ids['client_id'], dict_ids['object_id'])) - correct_answer = {'client_id': dict_ids['client_id'], 'object_id': dict_ids['object_id'], + correct_answer = {'client_id': dict_ids['client_id'], + 'object_id': dict_ids['object_id'], 'additional_metadata': '[]', 'translation': 'new_translation', - 'parent_client_id': par_ids['client_id'], 'translation_string': dict_name, + 'parent_client_id': par_ids['client_id'], + 'translation_string': dict_name, 'parent_object_id': par_ids['object_id'], 'status': 'WiP'} self.assertEqual(response.status_int, HTTPOk.code) @@ -307,20 +448,18 @@ def one_big_test(self): self.assertEqual(response.status_int, HTTPOk.code) self.assertDictEqual(response.json, {'status': 'WiP'}) # test edit dictionary state - response = self.app.put_json('/dictionary/%s/%s/state' % (dict_ids['client_id'], dict_ids['object_id']), - params={'status':'test state'}) - self.assertEqual(response.status_int, HTTPOk.code) - self.assertDictEqual(response.json, {'status': 'test state'}) - + dictionary_change_state(self, dict_ids, 'test state') persp_name = 'test_persp' # test creating perspective persp_ids = create_perspective(self, persp_name, dict_ids) # test perspective edit - response = self.app.put_json('/dictionary/%s/%s/perspective/%s/%s' % (dict_ids['client_id'],dict_ids['object_id'], - persp_ids['client_id'], persp_ids['object_id']), + response = self.app.put_json('/dictionary/%s/%s/perspective/%s/%s' + % (dict_ids['client_id'], + dict_ids['object_id'], + persp_ids['client_id'], persp_ids['object_id']), params={'translation':'new_translation', - 'parent_client_id':1, - 'parent_object_id':1, + 'parent_client_id': firstlangids['client_id'], + 'parent_object_id': firstlangids['object_id'], 'is_template': True}) self.assertEqual(response.status_int, HTTPOk.code) response = self.app.get('/dictionary/%s/%s/perspective/%s/%s' % (dict_ids['client_id'], dict_ids['object_id'], @@ -328,81 +467,74 @@ def one_big_test(self): correct_answer = {'client_id': persp_ids['client_id'], 'object_id': persp_ids['object_id'], 'additional__metadata': '[]', 'translation': 'new_translation', - 'parent_client_id': 1, 'translation_string': persp_name, - 'parent_object_id': 1, + 'parent_client_id': firstlangids['client_id'], 'translation_string': persp_name, + 'parent_object_id': firstlangids['object_id'], 'is_template': True, 'marked_for_deletion': False, 'status': 'WiP'} self.assertEqual(response.status_int, HTTPOk.code) self.assertDictEqual(response.json, correct_answer) # return old parent to perspective - response = self.app.put_json('/dictionary/%s/%s/perspective/%s/%s' % (1,1, - persp_ids['client_id'], persp_ids['object_id']), + response = self.app.put_json('/dictionary/%s/%s/perspective/%s/%s' + % (firstlangids['client_id'], + firstlangids['object_id'], + persp_ids['client_id'], + persp_ids['object_id']), params={'parent_client_id': dict_ids['client_id'], 'parent_object_id': dict_ids['object_id'], 'is_template': True}) self.assertEqual(response.status_int, HTTPOk.code) # test view perspective state - response = self.app.get('/dictionary/%s/%s' - '/perspective/%s/%s/state' % (dict_ids['client_id'],dict_ids['object_id'], - persp_ids['client_id'], persp_ids['object_id'])) + response = self.app.get('/dictionary/%s/%s/perspective/%s/%s/state' + % (dict_ids['client_id'], + dict_ids['object_id'], + persp_ids['client_id'], + persp_ids['object_id'])) self.assertEqual(response.status_int, HTTPOk.code) self.assertDictEqual(response.json, {'status': 'WiP'}) # test edit perspective state - response = self.app.put_json('/dictionary/%s/%s/perspective/%s/%s/state' - % (dict_ids['client_id'],dict_ids['object_id'], - persp_ids['client_id'], persp_ids['object_id']), - params={'status':'test state'}) - - self.assertEqual(response.status_int, HTTPOk.code) - - response = self.app.get('/dictionary/%s/%s/perspective/%s/%s/state' % (dict_ids['client_id'],dict_ids['object_id'], - persp_ids['client_id'], persp_ids['object_id'])) - - self.assertEqual(response.status_int, HTTPOk.code) - self.assertDictEqual(response.json, {'status': 'test state'}) - + perspective_change_state(self, dict_ids, persp_ids, 'test state') # test view perspective tree - response = self.app.get('/dictionary/%s/%s' - '/perspective/%s/%s/tree' % (dict_ids['client_id'],dict_ids['object_id'], - persp_ids['client_id'], persp_ids['object_id'])) + response = self.app.get('/dictionary/%s/%s/perspective/%s/%s/tree' + % (dict_ids['client_id'], + dict_ids['object_id'], + persp_ids['client_id'], + persp_ids['object_id'])) self.assertEqual(response.status_int, HTTPOk.code) - # correct_answer = [ - # {'translation': persp_name, 'client_id': persp_ids['client_id'], - # 'type': 'perspective', 'object_id': persp_ids['object_id'], - # 'is_template': True, 'status': 'test state', - # 'translation_string': 'new_translation', - # 'parent_client_id': dict_ids['client_id'], 'parent_object_id': dict_ids['object_id'], - # 'marked_for_deletion': False}, - # {'client_id': dict_ids['client_id'], 'translation': 'new_translation', - # 'type': 'dictionary', 'object_id': dict_ids['object_id'], 'status': 'test state', - # 'translation_string': dict_name, 'additional_metadata': None, - # 'parent_client_id': par_ids['client_id'], 'parent_object_id': par_ids['object_id']}, - # {'client_id': par_ids['client_id'], 'translation': 'new_translation', 'locale_exist': False, - # 'object_id': par_ids['object_id'], 'type': 'language', 'translation_string': lang_name, - # 'parent_client_id': 1, 'parent_object_id': 1}, - # {'parent_object_id': None, 'parent_client_id': None, - # 'object_id': 1, 'translation_string': 'Russian language', - # 'client_id': 1, 'locale_exist': True, 'translation': 'Russian language', 'type': 'language'}] #TODO: check diff - - correct_answer = [{'parent_object_id': dict_ids['object_id'], 'parent_client_id': dict_ids['client_id'], - 'object_id': persp_ids['object_id'], 'client_id': persp_ids['client_id'], + + correct_answer = [{'parent_object_id': dict_ids['object_id'], + 'parent_client_id': dict_ids['client_id'], + 'object_id': persp_ids['object_id'], + 'client_id': persp_ids['client_id'], 'translation_string': persp_name, - 'is_template': True, 'status': 'test state', + 'is_template': True, + 'status': 'test state', 'marked_for_deletion': False, - 'translation': 'new_translation', 'type': 'perspective'}, - {'additional_metadata': None, 'parent_object_id': par_ids['object_id'], - 'parent_client_id': par_ids['client_id'], 'client_id': dict_ids['client_id'], + 'translation': 'new_translation', + 'type': 'perspective'}, + {'additional_metadata': None, + 'parent_object_id': par_ids['object_id'], + 'parent_client_id': par_ids['client_id'], + 'client_id': dict_ids['client_id'], 'translation_string': dict_name, - 'object_id': dict_ids['object_id'], 'status': 'test state', - 'translation': 'new_translation', 'type': 'dictionary'}, - {'parent_object_id': 1, 'parent_client_id': 1, - 'locale_exist': False, 'translation_string': lang_name, - 'object_id': par_ids['object_id'], 'client_id': par_ids['client_id'], + 'object_id': dict_ids['object_id'], + 'status': 'test state', + 'translation': 'new_translation', + 'type': 'dictionary'}, + {'parent_object_id': firstlangids['object_id'], + 'parent_client_id': firstlangids['client_id'], + 'locale_exist': False, + 'translation_string': lang_name, + 'object_id': par_ids['object_id'], + 'client_id': par_ids['client_id'], 'translation': 'new_translation', 'type': 'language'}, - {'parent_object_id': None, 'parent_client_id': None, - 'locale_exist': True, 'translation_string': 'Russian language', - 'object_id': 1, 'client_id': 1, 'translation': 'Russian language', + {'parent_object_id': None, + 'parent_client_id': None, + 'locale_exist': True, + 'translation_string': 'Russian language', + 'object_id': firstlangids['object_id'], + 'client_id': firstlangids['client_id'], + 'translation': 'Russian language', 'type': 'language'}] first_answ = response.json self.assertListEqual(first_answ, correct_answer) @@ -413,49 +545,337 @@ def one_big_test(self): # testing perspective meta metadict = {'a':'b', 'c':{'d':'e'}} response = self.app.put_json('/dictionary/%s/%s/perspective/%s/%s/meta' - % (dict_ids['client_id'],dict_ids['object_id'], - persp_ids['client_id'], persp_ids['object_id']), + % (dict_ids['client_id'], + dict_ids['object_id'], + persp_ids['client_id'], + persp_ids['object_id']), params = metadict) self.assertEqual(response.status_int, HTTPOk.code) response = self.app.get('/dictionary/%s/%s/perspective/%s/%s/meta' - % (dict_ids['client_id'],dict_ids['object_id'], - persp_ids['client_id'], persp_ids['object_id']), - ) + % (dict_ids['client_id'], + dict_ids['object_id'], + persp_ids['client_id'], + persp_ids['object_id'])) self.assertEqual(response.status_int, HTTPOk.code) self.assertDictEqual(response.json, metadict) - metaupd = {'a':{'f':'g'}, 'h':'i', 'j':['k','l', {'m':'n', 'o':'p'}]} + + metaupd = {'a': {'f': 'g'}, 'h': 'i', 'j': ['k', 'l', {'m': 'n', 'o': 'p'}]} response = self.app.put_json('/dictionary/%s/%s/perspective/%s/%s/meta' - % (dict_ids['client_id'],dict_ids['object_id'], - persp_ids['client_id'], persp_ids['object_id']), - params = metaupd) + % (dict_ids['client_id'], + dict_ids['object_id'], + persp_ids['client_id'], + persp_ids['object_id']), + params=metaupd) self.assertEqual(response.status_int, HTTPOk.code) metadict.update(metaupd) response = self.app.get('/dictionary/%s/%s/perspective/%s/%s/meta' - % (dict_ids['client_id'],dict_ids['object_id'], - persp_ids['client_id'], persp_ids['object_id']), - ) + % (dict_ids['client_id'], + dict_ids['object_id'], + persp_ids['client_id'], + persp_ids['object_id'])) self.assertEqual(response.status_int, HTTPOk.code) self.assertDictEqual(response.json, metadict) metadel = ['j', 'c'] response = self.app.delete_json('/dictionary/%s/%s/perspective/%s/%s/meta' - % (dict_ids['client_id'],dict_ids['object_id'], - persp_ids['client_id'], persp_ids['object_id']), - params = metadel - ) + % (dict_ids['client_id'], + dict_ids['object_id'], + persp_ids['client_id'], + persp_ids['object_id']), + params=metadel) self.assertEqual(response.status_int, HTTPOk.code) for key in metadel: del metadict[key] response = self.app.get('/dictionary/%s/%s/perspective/%s/%s/meta' - % (dict_ids['client_id'],dict_ids['object_id'], - persp_ids['client_id'], persp_ids['object_id']), - ) + % (dict_ids['client_id'], + dict_ids['object_id'], + persp_ids['client_id'], + persp_ids['object_id'])) self.assertEqual(response.status_int, HTTPOk.code) self.assertDictEqual(response.json, metadict) + # test roles + response = self.app.get('/dictionary/%s/%s/roles' + % (dict_ids['client_id'], + dict_ids['object_id'])) + self.assertEqual(response.status_int, HTTPOk.code) + correct_answer = {'roles_users': + {'Can resign users from perspective editors': [user_id], + 'Can create perspectives': [user_id], + 'Can merge dictionaries and perspectives': [user_id], + 'Can delete dictionary': [user_id], + 'Can create dictionary roles and assign collaborators': [user_id], + 'Can get dictionary role list': [user_id], + 'Can edit dictionary options': [user_id]}, + 'roles_organizations': + {'Can resign users from perspective editors': [], + 'Can create perspectives': [], + 'Can merge dictionaries and perspectives': [], + 'Can delete dictionary': [], + 'Can create dictionary roles and assign collaborators': [], + 'Can get dictionary role list': [], + 'Can edit dictionary options': []}} + self.assertDictEqual(response.json, correct_answer) + + # TODO: create two new users. share roles with first (not all). login into him, share roles with second. + # TODO: Should be able to share shared roles and not be able not shared. Login into 0th user + + user_id2 = signup_common(self, 'test2') + user_id3 = signup_common(self, 'test3') + params = {'roles_users': + {'Can resign users from perspective editors': [user_id2], + 'Can create perspectives': [user_id2], + 'Can merge dictionaries and perspectives': [user_id3], + 'Can create dictionary roles and assign collaborators': [user_id2], + 'Can edit dictionary options': [user_id3]}} + response = self.app.get('/dictionary/%s/%s/roles' % (dict_ids['client_id'], + dict_ids['object_id']), params=params) + print(response.json) + self.assertEqual(response.status_int, HTTPOk.code) + correct_answer = {'roles_users': + {'Can resign users from perspective editors': [user_id, user_id2], + 'Can create perspectives': [user_id, user_id2], + 'Can merge dictionaries and perspectives': [user_id, user_id3], + 'Can delete dictionary': [user_id], + 'Can create dictionary roles and assign collaborators': [user_id, user_id2], + 'Can get dictionary role list': [user_id], + 'Can edit dictionary options': [user_id, user_id3]}, + 'roles_organizations': + {'Can resign users from perspective editors': [], + 'Can create perspectives': [], + 'Can merge dictionaries and perspectives': [], + 'Can delete dictionary': [], + 'Can create dictionary roles and assign collaborators': [], + 'Can get dictionary role list': [], + 'Can edit dictionary options': []}} + self.assertDictEqual(response.json, correct_answer) + login_common(self, 'test2') + + login_common(self, 'test') + response = self.app.get('/users') + self.assertEqual(response.status_int, HTTPOk.code) + correct_answer = {'users': [{'login': 'admin', 'name': 'Администратор', 'id': 1, + 'intl_name': 'System Administrator'}, + {'login': 'test', 'name': 'test', 'id': 2, 'intl_name': 'test'}, + {'login': 'test2', 'name': 'test', 'id': 3, 'intl_name': 'test2'}, + {'login': 'test3', 'name': 'test', 'id': 4, 'intl_name': 'test3'}]} + self.assertDictEqual(response.json, correct_answer) + + response = self.app.get('/dictionary/%s/%s/perspective/%s/%s/fields' + % (1, 1, 1, 1)) # TODO: remove ids. use dictionaries list, probably. + # todo: Or just create new db with new object_ids and find needed pairs of ids + + self.assertEqual(response.status_int, HTTPOk.code) + fields = response.json + response = self.app.post_json('/dictionary/%s/%s/perspective/%s/%s/fields' + % (dict_ids['client_id'], + dict_ids['object_id'], + persp_ids['client_id'], + persp_ids['object_id']), + params=fields) + + self.assertEqual(response.status_int, HTTPOk.code) + + response = self.app.get('/dictionary/%s/%s/perspective/%s/%s/fields' + % (dict_ids['client_id'], + dict_ids['object_id'], + persp_ids['client_id'], + persp_ids['object_id'])) + + self.assertEqual(response.status_int, HTTPOk.code) + self.assertDictEqual(response.json, fields, stop_words=['client_id', 'object_id']) + + response = self.app.post_json('/dictionary/%s/%s/perspective/%s/%s/lexical_entry'% + (dict_ids['client_id'], dict_ids['object_id'], + persp_ids['client_id'], persp_ids['object_id']), params={}) + self.assertEqual(response.status_int, HTTPOk.code) + lex_ids = response.json + + response = self.app.post_json('/dictionary/%s/%s/perspective/%s/%s/lexical_entries'% + (dict_ids['client_id'], dict_ids['object_id'], + persp_ids['client_id'], persp_ids['object_id']), params={'count': 41}) + self.assertEqual(response.status_int, HTTPOk.code) + correct_answer = [{} for o in range(41)] + self.assertListEqual(response.json, correct_answer, stop_words=['object_id', 'client_id']) + lexes_ids = response.json + l1e_ids = add_l1e(self,dict_ids,persp_ids,lex_ids, content='testing level one entity') + + response = self.app.post_json('/dictionary/%s/%s/perspective/%s/%s/lexical_entry/%s/%s/leveloneentity' + '/%s/%s/leveltwoentity' + % (dict_ids['client_id'], + dict_ids['object_id'], + persp_ids['client_id'], + persp_ids['object_id'], + lex_ids['client_id'], + lex_ids['object_id'], + l1e_ids['client_id'], + l1e_ids['object_id']), + params={'entity_type':'Word', + 'data_type':'text', + 'content': 'testing level two entity', + 'locale_id': 1}) + self.assertEqual(response.status_int, HTTPOk.code) + l2e_ids = response.json + self.assertDictEqual(response.json, {}, stop_words=['object_id', 'client_id']) + + response = self.app.get('/dictionary/%s/%s/perspective/%s/%s/lexical_entry/%s/%s/leveloneentity' + '/%s/%s/leveltwoentity/%s/%s' + % (dict_ids['client_id'], + dict_ids['object_id'], + persp_ids['client_id'], + persp_ids['object_id'], + lex_ids['client_id'], + lex_ids['object_id'], + l1e_ids['client_id'], + l1e_ids['object_id'], + l2e_ids['client_id'], + l2e_ids['object_id'])) + self.assertEqual(response.status_int, HTTPOk.code) + # correct_answer = {'client_id': l2e_ids['client_id'], # TODO: uncomment when fixed in refactoring + # 'parent_client_id': l1e_ids['client_id'], + # 'parent_object_id': l1e_ids['object_id'], + # 'object_id': l2e_ids['object_id'], + # 'entity_type': 'Word', + # 'level': 'leveltwoentity', + # 'marked_for_deletion': False, + # 'locale_id': 1, + # 'content': 'testing level two entity'} + correct_answer = {'parent_client_id': l1e_ids['client_id'], + 'parent_object_id': l1e_ids['object_id'], + 'entity_type': 'Word', + 'locale_id': 1, + 'content': 'testing level two entity'} + self.assertDictEqual(response.json, correct_answer) + grouping_lexes = lexes_ids[:6:] + lexes_ids = lexes_ids[6::] + grouping_contents = list() + counter = 0 + for iter_lex_ids in grouping_lexes: + content = 'grouping word ' + str(counter) + grouping_contents.append(content) + add_l1e(self, dict_ids, persp_ids, iter_lex_ids, content=content) + counter += 1 + + add_grouping(self, grouping_lexes[0], grouping_lexes[1]) + correct_answer = {'words': [{'lexical_entry': {'object_id': 2, 'contains': [{'additional_metadata': None, 'object_id': 2, 'parent_client_id': 11, 'published': False, 'locale_id': 1, 'level': 'leveloneentity', 'content': "grouping word {'object_id': 2, 'client_id': 11}", 'contains': None, 'parent_object_id': 2, 'client_id': 11, 'entity_type': 'Word', 'marked_for_deletion': False}, {'additional_metadata': None, 'object_id': 1, 'parent_client_id': 11, 'published': False, 'locale_id': None, 'level': 'groupingentity', 'content': 'Wed Feb 10 13:50:08 2016MNAZGRV22A', 'contains': None, 'parent_object_id': 2, 'client_id': 11, 'entity_type': 'Etymology', 'marked_for_deletion': False}], 'published': False, 'client_id': 11, 'level': 'lexicalentry', 'came_from': None, 'marked_for_deletion': False, 'parent_client_id': 5, 'parent_object_id': 1}}, {'lexical_entry': {'object_id': 3, 'contains': [{'additional_metadata': None, 'object_id': 3, 'parent_client_id': 11, 'published': False, 'locale_id': 1, 'level': 'leveloneentity', 'content': "grouping word {'object_id': 3, 'client_id': 11}", 'contains': None, 'parent_object_id': 3, 'client_id': 11, 'entity_type': 'Word', 'marked_for_deletion': False}, {'additional_metadata': None, 'object_id': 2, 'parent_client_id': 11, 'published': False, 'locale_id': None, 'level': 'groupingentity', 'content': 'Wed Feb 10 13:50:08 2016MNAZGRV22A', 'contains': None, 'parent_object_id': 3, 'client_id': 11, 'entity_type': 'Etymology', 'marked_for_deletion': False}], 'published': False, 'client_id': 11, 'level': 'lexicalentry', 'came_from': None, 'marked_for_deletion': False, 'parent_client_id': 5, 'parent_object_id': 1}}]} + + response = self.app.get('/lexical_entry/%s/%s/connected' + % ( + grouping_lexes[0]['client_id'], + grouping_lexes[0]['object_id'])) + + self.assertEqual(response.status_int, HTTPOk.code) + self.assertDictEqual(response.json, correct_answer, stop_words=['content', 'client_id', 'object_id', 'parent_client_id', 'parent_object_id'], set_like=True) # TODO: do not ignore everything. Some other equality check needs to be done + + add_grouping(self, grouping_lexes[2], grouping_lexes[3]) + correct_answer = {'words': [{'lexical_entry': {'contains': [{'parent_client_id': 11, 'marked_for_deletion': False, 'additional_metadata': None, 'contains': None, 'object_id': 4, 'content': "grouping word {'object_id': 4, 'client_id': 11}", 'entity_type': 'Word', 'parent_object_id': 4, 'client_id': 11, 'level': 'leveloneentity', 'published': False, 'locale_id': 1}, {'parent_client_id': 11, 'marked_for_deletion': False, 'additional_metadata': None, 'contains': None, 'object_id': 3, 'content': 'Wed Feb 10 15:36:45 2016ZV06I8ZRYW', 'entity_type': 'Etymology', 'parent_object_id': 4, 'client_id': 11, 'level': 'groupingentity', 'published': False, 'locale_id': None}], 'parent_client_id': 5, 'parent_object_id': 1, 'client_id': 11, 'level': 'lexicalentry', 'object_id': 4, 'published': False, 'marked_for_deletion': False, 'came_from': None}}, {'lexical_entry': {'contains': [{'parent_client_id': 11, 'marked_for_deletion': False, 'additional_metadata': None, 'contains': None, 'object_id': 5, 'content': "grouping word {'object_id': 5, 'client_id': 11}", 'entity_type': 'Word', 'parent_object_id': 5, 'client_id': 11, 'level': 'leveloneentity', 'published': False, 'locale_id': 1}, {'parent_client_id': 11, 'marked_for_deletion': False, 'additional_metadata': None, 'contains': None, 'object_id': 4, 'content': 'Wed Feb 10 15:36:45 2016ZV06I8ZRYW', 'entity_type': 'Etymology', 'parent_object_id': 5, 'client_id': 11, 'level': 'groupingentity', 'published': False, 'locale_id': None}], 'parent_client_id': 5, 'parent_object_id': 1, 'client_id': 11, 'level': 'lexicalentry', 'object_id': 5, 'published': False, 'marked_for_deletion': False, 'came_from': None}}]} + + response = self.app.get('/lexical_entry/%s/%s/connected' + % ( + grouping_lexes[2]['client_id'], + grouping_lexes[2]['object_id'])) + + self.assertEqual(response.status_int, HTTPOk.code) + # print(response.json) + self.assertDictEqual(response.json, correct_answer, stop_words=['content', 'client_id', 'object_id', 'parent_client_id', 'parent_object_id'], set_like=True) + + add_grouping(self, grouping_lexes[3], grouping_lexes[4]) + correct_answer = {'words': [{'lexical_entry': {'published': False, 'came_from': None, 'object_id': 4, 'client_id': 11, 'level': 'lexicalentry', 'marked_for_deletion': False, 'parent_object_id': 1, 'parent_client_id': 5, 'contains': [{'entity_type': 'Word', 'parent_object_id': 4, 'content': "grouping word {'client_id': 11, 'object_id': 4}", 'published': False, 'locale_id': 1, 'object_id': 4, 'client_id': 11, 'level': 'leveloneentity', 'marked_for_deletion': False, 'additional_metadata': None, 'parent_client_id': 11, 'contains': None}, {'entity_type': 'Etymology', 'parent_object_id': 4, 'content': 'Wed Feb 10 15:49:45 20166TGFY3S1LX', 'published': False, 'locale_id': None, 'object_id': 3, 'client_id': 11, 'level': 'groupingentity', 'marked_for_deletion': False, 'additional_metadata': None, 'parent_client_id': 11, 'contains': None}]}}, {'lexical_entry': {'published': False, 'came_from': None, 'object_id': 5, 'client_id': 11, 'level': 'lexicalentry', 'marked_for_deletion': False, 'parent_object_id': 1, 'parent_client_id': 5, 'contains': [{'entity_type': 'Word', 'parent_object_id': 5, 'content': "grouping word {'client_id': 11, 'object_id': 5}", 'published': False, 'locale_id': 1, 'object_id': 5, 'client_id': 11, 'level': 'leveloneentity', 'marked_for_deletion': False, 'additional_metadata': None, 'parent_client_id': 11, 'contains': None}, {'entity_type': 'Etymology', 'parent_object_id': 5, 'content': 'Wed Feb 10 15:49:45 20166TGFY3S1LX', 'published': False, 'locale_id': None, 'object_id': 4, 'client_id': 11, 'level': 'groupingentity', 'marked_for_deletion': False, 'additional_metadata': None, 'parent_client_id': 11, 'contains': None}]}}, {'lexical_entry': {'published': False, 'came_from': None, 'object_id': 6, 'client_id': 11, 'level': 'lexicalentry', 'marked_for_deletion': False, 'parent_object_id': 1, 'parent_client_id': 5, 'contains': [{'entity_type': 'Word', 'parent_object_id': 6, 'content': "grouping word {'client_id': 11, 'object_id': 6}", 'published': False, 'locale_id': 1, 'object_id': 6, 'client_id': 11, 'level': 'leveloneentity', 'marked_for_deletion': False, 'additional_metadata': None, 'parent_client_id': 11, 'contains': None}, {'entity_type': 'Etymology', 'parent_object_id': 6, 'content': 'Wed Feb 10 15:49:45 20166TGFY3S1LX', 'published': False, 'locale_id': None, 'object_id': 5, 'client_id': 11, 'level': 'groupingentity', 'marked_for_deletion': False, 'additional_metadata': None, 'parent_client_id': 11, 'contains': None}]}}]} + + response = self.app.get('/lexical_entry/%s/%s/connected' + % ( + grouping_lexes[2]['client_id'], + grouping_lexes[2]['object_id'])) + + self.assertEqual(response.status_int, HTTPOk.code) + # print(response.json) + self.assertDictEqual(response.json, correct_answer, stop_words=['content', 'client_id', 'object_id', 'parent_client_id', 'parent_object_id'], set_like=True) + + add_grouping(self, grouping_lexes[1], grouping_lexes[4]) + correct_answer = {'words': [{'lexical_entry': {'level': 'lexicalentry', 'came_from': None, 'parent_client_id': 5, 'published': False, 'object_id': 2, 'contains': [{'level': 'leveloneentity', 'entity_type': 'Word', 'parent_client_id': 11, 'marked_for_deletion': False, 'additional_metadata': None, 'client_id': 11, 'parent_object_id': 2, 'published': False, 'content': "grouping word {'object_id': 2, 'client_id': 11}", 'object_id': 2, 'contains': None, 'locale_id': 1}, {'level': 'groupingentity', 'entity_type': 'Etymology', 'parent_client_id': 11, 'marked_for_deletion': False, 'additional_metadata': None, 'client_id': 11, 'parent_object_id': 2, 'published': False, 'content': 'Wed Feb 10 15:54:08 2016KPBGYM63DB', 'object_id': 1, 'contains': None, 'locale_id': None}], 'marked_for_deletion': False, 'client_id': 11, 'parent_object_id': 1}}, {'lexical_entry': {'level': 'lexicalentry', 'came_from': None, 'parent_client_id': 5, 'published': False, 'object_id': 3, 'contains': [{'level': 'leveloneentity', 'entity_type': 'Word', 'parent_client_id': 11, 'marked_for_deletion': False, 'additional_metadata': None, 'client_id': 11, 'parent_object_id': 3, 'published': False, 'content': "grouping word {'object_id': 3, 'client_id': 11}", 'object_id': 3, 'contains': None, 'locale_id': 1}, {'level': 'groupingentity', 'entity_type': 'Etymology', 'parent_client_id': 11, 'marked_for_deletion': False, 'additional_metadata': None, 'client_id': 11, 'parent_object_id': 3, 'published': False, 'content': 'Wed Feb 10 15:54:08 20167VF6IEU0O7', 'object_id': 6, 'contains': None, 'locale_id': None}, {'level': 'groupingentity', 'entity_type': 'Etymology', 'parent_client_id': 11, 'marked_for_deletion': False, 'additional_metadata': None, 'client_id': 11, 'parent_object_id': 3, 'published': False, 'content': 'Wed Feb 10 15:54:08 2016KPBGYM63DB', 'object_id': 2, 'contains': None, 'locale_id': None}], 'marked_for_deletion': False, 'client_id': 11, 'parent_object_id': 1}}, {'lexical_entry': {'level': 'lexicalentry', 'came_from': None, 'parent_client_id': 5, 'published': False, 'object_id': 6, 'contains': [{'level': 'leveloneentity', 'entity_type': 'Word', 'parent_client_id': 11, 'marked_for_deletion': False, 'additional_metadata': None, 'client_id': 11, 'parent_object_id': 6, 'published': False, 'content': "grouping word {'object_id': 6, 'client_id': 11}", 'object_id': 6, 'contains': None, 'locale_id': 1}, {'level': 'groupingentity', 'entity_type': 'Etymology', 'parent_client_id': 11, 'marked_for_deletion': False, 'additional_metadata': None, 'client_id': 11, 'parent_object_id': 6, 'published': False, 'content': 'Wed Feb 10 15:54:08 2016KPBGYM63DB', 'object_id': 7, 'contains': None, 'locale_id': None}, {'level': 'groupingentity', 'entity_type': 'Etymology', 'parent_client_id': 11, 'marked_for_deletion': False, 'additional_metadata': None, 'client_id': 11, 'parent_object_id': 6, 'published': False, 'content': 'Wed Feb 10 15:54:08 20167VF6IEU0O7', 'object_id': 5, 'contains': None, 'locale_id': None}], 'marked_for_deletion': False, 'client_id': 11, 'parent_object_id': 1}}, {'lexical_entry': {'level': 'lexicalentry', 'came_from': None, 'parent_client_id': 5, 'published': False, 'object_id': 4, 'contains': [{'level': 'leveloneentity', 'entity_type': 'Word', 'parent_client_id': 11, 'marked_for_deletion': False, 'additional_metadata': None, 'client_id': 11, 'parent_object_id': 4, 'published': False, 'content': "grouping word {'object_id': 4, 'client_id': 11}", 'object_id': 4, 'contains': None, 'locale_id': 1}, {'level': 'groupingentity', 'entity_type': 'Etymology', 'parent_client_id': 11, 'marked_for_deletion': False, 'additional_metadata': None, 'client_id': 11, 'parent_object_id': 4, 'published': False, 'content': 'Wed Feb 10 15:54:08 20167VF6IEU0O7', 'object_id': 3, 'contains': None, 'locale_id': None}], 'marked_for_deletion': False, 'client_id': 11, 'parent_object_id': 1}}, {'lexical_entry': {'level': 'lexicalentry', 'came_from': None, 'parent_client_id': 5, 'published': False, 'object_id': 5, 'contains': [{'level': 'leveloneentity', 'entity_type': 'Word', 'parent_client_id': 11, 'marked_for_deletion': False, 'additional_metadata': None, 'client_id': 11, 'parent_object_id': 5, 'published': False, 'content': "grouping word {'object_id': 5, 'client_id': 11}", 'object_id': 5, 'contains': None, 'locale_id': 1}, {'level': 'groupingentity', 'entity_type': 'Etymology', 'parent_client_id': 11, 'marked_for_deletion': False, 'additional_metadata': None, 'client_id': 11, 'parent_object_id': 5, 'published': False, 'content': 'Wed Feb 10 15:54:08 20167VF6IEU0O7', 'object_id': 4, 'contains': None, 'locale_id': None}], 'marked_for_deletion': False, 'client_id': 11, 'parent_object_id': 1}}]} + + response = self.app.get('/lexical_entry/%s/%s/connected' + % (grouping_lexes[0]['client_id'], + grouping_lexes[0]['object_id'])) + + self.assertEqual(response.status_int, HTTPOk.code) + # print(response.json) + self.assertDictEqual(response.json, correct_answer, stop_words=['content', 'client_id', 'object_id', 'parent_client_id', 'parent_object_id'], set_like=True) + some_lex = correct_answer['words'][0] + print('some lex:', some_lex) + ge_ids = None + ge = None + tag = None + for entry in some_lex['lexical_entry']['contains']: + if 'word' not in entry['content']: + ge_ids = {'client_id': entry['client_id'],'object_id': entry['object_id']} + ge = entry + tag = entry['content'] + if not ge_ids: + self.assertEqual('Error:', 'No tag') + combined_words = list() + for lex in correct_answer['words']: + for entry in lex['lexical_entry']['contains']: + if entry['content'] == tag: + combined_words.append({'client_id': lex['lexical_entry']['client_id'], + 'object_id': lex['lexical_entry']['object_id']}) + + response = self.app.get('/group_entity/%s/%s' % (ge_ids['client_id'], ge_ids['object_id'])) + self.assertEqual(response.status_int, HTTPOk.code) + correct_answer = {'entity_type': ge['entity_type'], 'tag': tag, 'connections':combined_words} + # TODO: find what's wrong with tags + # print(response.json) + # print(correct_answer) + # + # print('wtf?') + # print(ge) + # self.assertDictEqual(response.json, correct_answer, set_like=True) + + # ____ + # ____ + + response = self.app.get('/dictionary/%s/%s/perspective/%s/%s/all_count' + % (dict_ids['client_id'], + dict_ids['object_id'], + persp_ids['client_id'], + persp_ids['object_id'])) + correct_answer = {'count': 42} + self.assertEqual(response.status_int, HTTPOk.code) + self.assertDictEqual(response.json, correct_answer) + response = self.app.get('/dictionary/%s/%s/perspective/%s/%s/all' + % (dict_ids['client_id'], + dict_ids['object_id'], + persp_ids['client_id'], + persp_ids['object_id'])) + + self.assertEqual(response.status_int, HTTPOk.code) + # self.assertDictEqual(response.json, correct_answer) # TODO: change correct answer and uncomment + # print(response.json) + + + + # response = self.app.post_json('/dictionary/%s/%s/roles' % (dict_ids['client_id'],dict_ids['object_id'])) + # self.assertEqual(response.status_int, HTTPOk.code) + # correct_answer = {'roles_users': + # {'Can resign users from perspective editors': [user_id], + # 'Can create perspectives': [user_id], + # 'Can merge dictionaries and perspectives': [user_id], + # 'Can delete dictionary': [user_id], + # 'Can create dictionary roles and assign collaborators': [user_id], + # 'Can get dictionary role list': [user_id], + # 'Can edit dictionary options': [user_id]}, + # 'roles_organizations': + # {'Can resign users from perspective editors': [], + # 'Can create perspectives': [], + # 'Can merge dictionaries and perspectives': [], + # 'Can delete dictionary': [], + # 'Can create dictionary roles and assign collaborators': [], + # 'Can get dictionary role list': [], + # 'Can edit dictionary options': []}} + # self.assertDictEqual(response.json, correct_answer) # _________________________________________________________________________ # _________________________________________________________________________ - # Tests on deleting part + # Tests on delete # _________________________________________________________________________ lang_ids = create_language(self, 'test_lang_del') @@ -485,7 +905,7 @@ def one_big_test(self): self.assertEqual(response.status_int, HTTPNotFound.code) # _________________________________________________________________________ - # test logout (TODO: add tests on protections here) + # test logout create_language(self, 'test_logout') response = self.app.post('/logout') @@ -494,6 +914,45 @@ def one_big_test(self): status=HTTPForbidden.code) self.assertEqual(response.status_int, HTTPForbidden.code) + def test_dict_lang_tree(self): + id = signup_common(self) + login_common(self) + root_ids = create_language(self,'Корень') + first_child = create_language(self,'Первый ребенок', root_ids) + second_child = create_language(self,'Второй ребенок', root_ids) + dict_root = create_dictionary(self, 'Словарь корня', root_ids, 'Published') + dict_first = create_dictionary(self, 'Словарь первого ребенка', first_child, 'Published') + dict_second = create_dictionary(self, 'Словарь второго ребенка', second_child, 'Published') + persp_root = create_perspective(self, 'Root Perspective', dict_root, 'Published') + persp_first = create_perspective(self, '1st Perspective', dict_first, 'Published') + persp_second = create_perspective(self, '2nd Perspective', dict_second, 'Published') + empty_lang = create_language(self, 'Пустой язык', first_child) + many_dicts_lang = create_language(self, 'Язык с многими словарями', empty_lang) + complete_emptyness = create_language(self, 'Абсолютная пустота', second_child) + many_dicts = list() + for i in range(10): + many_dicts += [create_dictionary(self, 'Словарь №%s' % i, many_dicts_lang, 'Published')] + many_persps = list() + i = 0 + for dict_ids in many_dicts: + i += 1 + many_persps += [create_perspective(self, 'Перспектива №%s' % i, dict_ids, 'Published')] + response = self.app.post_json('/published_dictionaries', params = {}) + self.assertEqual(response.status_int, HTTPOk.code) + # TODO: change from numbers to ids, returned in previous responses. + correct_answer = {'dictionaries': [{'object_id': 1, 'parent_client_id': 5, 'translation': 'Словарь корня', 'status': 'Published', 'additional_metadata': None, 'translation_string': 'Словарь корня', 'client_id': 5, 'parent_object_id': 1}, {'object_id': 2, 'parent_client_id': 5, 'translation': 'Словарь первого ребенка', 'status': 'Published', 'additional_metadata': None, 'translation_string': 'Словарь первого ребенка', 'client_id': 5, 'parent_object_id': 2}, {'object_id': 3, 'parent_client_id': 5, 'translation': 'Словарь второго ребенка', 'status': 'Published', 'additional_metadata': None, 'translation_string': 'Словарь второго ребенка', 'client_id': 5, 'parent_object_id': 3}, {'object_id': 4, 'parent_client_id': 5, 'translation': 'Словарь №0', 'status': 'Published', 'additional_metadata': None, 'translation_string': 'Словарь №0', 'client_id': 5, 'parent_object_id': 5}, {'object_id': 5, 'parent_client_id': 5, 'translation': 'Словарь №1', 'status': 'Published', 'additional_metadata': None, 'translation_string': 'Словарь №1', 'client_id': 5, 'parent_object_id': 5}, {'object_id': 6, 'parent_client_id': 5, 'translation': 'Словарь №2', 'status': 'Published', 'additional_metadata': None, 'translation_string': 'Словарь №2', 'client_id': 5, 'parent_object_id': 5}, {'object_id': 7, 'parent_client_id': 5, 'translation': 'Словарь №3', 'status': 'Published', 'additional_metadata': None, 'translation_string': 'Словарь №3', 'client_id': 5, 'parent_object_id': 5}, {'object_id': 8, 'parent_client_id': 5, 'translation': 'Словарь №4', 'status': 'Published', 'additional_metadata': None, 'translation_string': 'Словарь №4', 'client_id': 5, 'parent_object_id': 5}, {'object_id': 9, 'parent_client_id': 5, 'translation': 'Словарь №5', 'status': 'Published', 'additional_metadata': None, 'translation_string': 'Словарь №5', 'client_id': 5, 'parent_object_id': 5}, {'object_id': 10, 'parent_client_id': 5, 'translation': 'Словарь №6', 'status': 'Published', 'additional_metadata': None, 'translation_string': 'Словарь №6', 'client_id': 5, 'parent_object_id': 5}, {'object_id': 11, 'parent_client_id': 5, 'translation': 'Словарь №7', 'status': 'Published', 'additional_metadata': None, 'translation_string': 'Словарь №7', 'client_id': 5, 'parent_object_id': 5}, {'object_id': 12, 'parent_client_id': 5, 'translation': 'Словарь №8', 'status': 'Published', 'additional_metadata': None, 'translation_string': 'Словарь №8', 'client_id': 5, 'parent_object_id': 5}, {'object_id': 13, 'parent_client_id': 5, 'translation': 'Словарь №9', 'status': 'Published', 'additional_metadata': None, 'translation_string': 'Словарь №9', 'client_id': 5, 'parent_object_id': 5}]} + self.assertDictEqual(response.json, correct_answer) + response = self.app.post_json('/published_dictionaries', params = {'group_by_lang':True}) + self.assertEqual(response.status_int, HTTPOk.code) + correct_answer = [{'translation_string': 'Корень', 'client_id': 5, 'locale_exist': False, 'contains': [{'translation_string': 'Первый ребенок', 'client_id': 5, 'locale_exist': False, 'contains': [{'translation_string': 'Пустой язык', 'client_id': 5, 'locale_exist': False, 'contains': [{'translation_string': 'Язык с многими словарями', 'dicts': [{'parent_object_id': 5, 'translation_string': 'Словарь №0', 'client_id': 5, 'additional_metadata': None, 'parent_client_id': 5, 'translation': 'Словарь №0', 'object_id': 4, 'status': 'Published'}, {'parent_object_id': 5, 'translation_string': 'Словарь №1', 'client_id': 5, 'additional_metadata': None, 'parent_client_id': 5, 'translation': 'Словарь №1', 'object_id': 5, 'status': 'Published'}, {'parent_object_id': 5, 'translation_string': 'Словарь №2', 'client_id': 5, 'additional_metadata': None, 'parent_client_id': 5, 'translation': 'Словарь №2', 'object_id': 6, 'status': 'Published'}, {'parent_object_id': 5, 'translation_string': 'Словарь №3', 'client_id': 5, 'additional_metadata': None, 'parent_client_id': 5, 'translation': 'Словарь №3', 'object_id': 7, 'status': 'Published'}, {'parent_object_id': 5, 'translation_string': 'Словарь №4', 'client_id': 5, 'additional_metadata': None, 'parent_client_id': 5, 'translation': 'Словарь №4', 'object_id': 8, 'status': 'Published'}, {'parent_object_id': 5, 'translation_string': 'Словарь №5', 'client_id': 5, 'additional_metadata': None, 'parent_client_id': 5, 'translation': 'Словарь №5', 'object_id': 9, 'status': 'Published'}, {'parent_object_id': 5, 'translation_string': 'Словарь №6', 'client_id': 5, 'additional_metadata': None, 'parent_client_id': 5, 'translation': 'Словарь №6', 'object_id': 10, 'status': 'Published'}, {'parent_object_id': 5, 'translation_string': 'Словарь №7', 'client_id': 5, 'additional_metadata': None, 'parent_client_id': 5, 'translation': 'Словарь №7', 'object_id': 11, 'status': 'Published'}, {'parent_object_id': 5, 'translation_string': 'Словарь №8', 'client_id': 5, 'additional_metadata': None, 'parent_client_id': 5, 'translation': 'Словарь №8', 'object_id': 12, 'status': 'Published'}, {'parent_object_id': 5, 'translation_string': 'Словарь №9', 'client_id': 5, 'additional_metadata': None, 'parent_client_id': 5, 'translation': 'Словарь №9', 'object_id': 13, 'status': 'Published'}], 'client_id': 5, 'translation': 'Язык с многими словарями', 'object_id': 5, 'locale_exist': False}], 'translation': 'Пустой язык', 'object_id': 4, 'dicts': []}], 'translation': 'Первый ребенок', 'object_id': 2, 'dicts': [{'parent_object_id': 2, 'translation_string': 'Словарь первого ребенка', 'client_id': 5, 'additional_metadata': None, 'parent_client_id': 5, 'translation': 'Словарь первого ребенка', 'object_id': 2, 'status': 'Published'}]}, {'translation_string': 'Второй ребенок', 'dicts': [{'parent_object_id': 3, 'translation_string': 'Словарь второго ребенка', 'client_id': 5, 'additional_metadata': None, 'parent_client_id': 5, 'translation': 'Словарь второго ребенка', 'object_id': 3, 'status': 'Published'}], 'client_id': 5, 'translation': 'Второй ребенок', 'object_id': 3, 'locale_exist': False}], 'translation': 'Корень', 'object_id': 1, 'dicts': [{'parent_object_id': 1, 'translation_string': 'Словарь корня', 'client_id': 5, 'additional_metadata': None, 'parent_client_id': 5, 'translation': 'Словарь корня', 'object_id': 1, 'status': 'Published'}]}] + self.assertListEqual(response.json, correct_answer) + response = self.app.get('/perspectives') + self.assertEqual(response.status_int, HTTPOk.code) + correct_answer = {'perspectives': [{'is_template': True, 'translation': 'Lingvodoc desktop version', 'parent_client_id': 1, 'translation_string': 'Lingvodoc desktop version', 'additional_metadata': None, 'status': 'Service', 'object_id': 1, 'client_id': 1, 'parent_object_id': 1, 'marked_for_deletion': False}, {'is_template': True, 'translation': 'Regular dictionary', 'parent_client_id': 1, 'translation_string': 'Regular dictionary', 'additional_metadata': None, 'status': 'Service', 'object_id': 2, 'client_id': 1, 'parent_object_id': 1, 'marked_for_deletion': False}, {'is_template': True, 'translation': 'Morhological dictionary', 'parent_client_id': 1, 'translation_string': 'Morhological dictionary', 'additional_metadata': None, 'status': 'Service', 'object_id': 3, 'client_id': 1, 'parent_object_id': 1, 'marked_for_deletion': False}, {'is_template': False, 'translation': 'Root Perspective', 'parent_client_id': 5, 'translation_string': 'Root Perspective', 'additional_metadata': '{}', 'status': 'Published', 'object_id': 1, 'client_id': 5, 'parent_object_id': 1, 'marked_for_deletion': False}, {'is_template': False, 'translation': '1st Perspective', 'parent_client_id': 5, 'translation_string': '1st Perspective', 'additional_metadata': '{}', 'status': 'Published', 'object_id': 2, 'client_id': 5, 'parent_object_id': 2, 'marked_for_deletion': False}, {'is_template': False, 'translation': '2nd Perspective', 'parent_client_id': 5, 'translation_string': '2nd Perspective', 'additional_metadata': '{}', 'status': 'Published', 'object_id': 3, 'client_id': 5, 'parent_object_id': 3, 'marked_for_deletion': False}, {'is_template': False, 'translation': 'Перспектива №1', 'parent_client_id': 5, 'translation_string': 'Перспектива №1', 'additional_metadata': '{}', 'status': 'Published', 'object_id': 4, 'client_id': 5, 'parent_object_id': 4, 'marked_for_deletion': False}, {'is_template': False, 'translation': 'Перспектива №2', 'parent_client_id': 5, 'translation_string': 'Перспектива №2', 'additional_metadata': '{}', 'status': 'Published', 'object_id': 5, 'client_id': 5, 'parent_object_id': 5, 'marked_for_deletion': False}, {'is_template': False, 'translation': 'Перспектива №3', 'parent_client_id': 5, 'translation_string': 'Перспектива №3', 'additional_metadata': '{}', 'status': 'Published', 'object_id': 6, 'client_id': 5, 'parent_object_id': 6, 'marked_for_deletion': False}, {'is_template': False, 'translation': 'Перспектива №4', 'parent_client_id': 5, 'translation_string': 'Перспектива №4', 'additional_metadata': '{}', 'status': 'Published', 'object_id': 7, 'client_id': 5, 'parent_object_id': 7, 'marked_for_deletion': False}, {'is_template': False, 'translation': 'Перспектива №5', 'parent_client_id': 5, 'translation_string': 'Перспектива №5', 'additional_metadata': '{}', 'status': 'Published', 'object_id': 8, 'client_id': 5, 'parent_object_id': 8, 'marked_for_deletion': False}, {'is_template': False, 'translation': 'Перспектива №6', 'parent_client_id': 5, 'translation_string': 'Перспектива №6', 'additional_metadata': '{}', 'status': 'Published', 'object_id': 9, 'client_id': 5, 'parent_object_id': 9, 'marked_for_deletion': False}, {'is_template': False, 'translation': 'Перспектива №7', 'parent_client_id': 5, 'translation_string': 'Перспектива №7', 'additional_metadata': '{}', 'status': 'Published', 'object_id': 10, 'client_id': 5, 'parent_object_id': 10, 'marked_for_deletion': False}, {'is_template': False, 'translation': 'Перспектива №8', 'parent_client_id': 5, 'translation_string': 'Перспектива №8', 'additional_metadata': '{}', 'status': 'Published', 'object_id': 11, 'client_id': 5, 'parent_object_id': 11, 'marked_for_deletion': False}, {'is_template': False, 'translation': 'Перспектива №9', 'parent_client_id': 5, 'translation_string': 'Перспектива №9', 'additional_metadata': '{}', 'status': 'Published', 'object_id': 12, 'client_id': 5, 'parent_object_id': 12, 'marked_for_deletion': False}, {'is_template': False, 'translation': 'Перспектива №10', 'parent_client_id': 5, 'translation_string': 'Перспектива №10', 'additional_metadata': '{}', 'status': 'Published', 'object_id': 13, 'client_id': 5, 'parent_object_id': 13, 'marked_for_deletion': False}]} + self.assertDictEqual(response.json, correct_answer) + # print(response.json) + + class TestFuncs(unittest.TestCase): def test_dict_diff_empty(self): @@ -534,3 +993,9 @@ def test_dict_diff_not_eq_2(self): 'translation_string': 'test_child', 'parent_object_id': 1} self.assertNotEqual(dict_diff(d1, d2), True) + + def test_dict_diff_set_like(self): + d1 = {'words': [{'lexical_entry': {'came_from': None, 'object_id': 2, 'parent_client_id': 5, 'client_id': 11, 'level': 'lexicalentry', 'contains': [{'marked_for_deletion': False, 'published': False, 'parent_object_id': 2, 'locale_id': None, 'parent_client_id': 11, 'contains': None, 'entity_type': 'Etymology', 'level': 'groupingentity', 'object_id': 1, 'additional_metadata': None, 'content': 'Wed Feb 10 14:26:14 2016G6599C1A8X', 'client_id': 11}, {'marked_for_deletion': False, 'published': False, 'parent_object_id': 2, 'locale_id': 1, 'parent_client_id': 11, 'contains': None, 'entity_type': 'Word', 'level': 'leveloneentity', 'object_id': 2, 'additional_metadata': None, 'content': "grouping word {'object_id': 2, 'client_id': 11}", 'client_id': 11}], 'published': False, 'marked_for_deletion': False, 'parent_object_id': 1}}, {'lexical_entry': {'came_from': None, 'object_id': 3, 'parent_client_id': 5, 'client_id': 11, 'level': 'lexicalentry', 'contains': [{'marked_for_deletion': False, 'published': False, 'parent_object_id': 3, 'locale_id': None, 'parent_client_id': 11, 'contains': None, 'entity_type': 'Etymology', 'level': 'groupingentity', 'object_id': 2, 'additional_metadata': None, 'content': 'Wed Feb 10 14:26:14 2016G6599C1A8X', 'client_id': 11}, {'marked_for_deletion': False, 'published': False, 'parent_object_id': 3, 'locale_id': 1, 'parent_client_id': 11, 'contains': None, 'entity_type': 'Word', 'level': 'leveloneentity', 'object_id': 3, 'additional_metadata': None, 'content': "grouping word {'object_id': 3, 'client_id': 11}", 'client_id': 11}], 'published': False, 'marked_for_deletion': False, 'parent_object_id': 1}}]} + d2 = {'words': [{'lexical_entry': {'came_from': None, 'marked_for_deletion': False, 'parent_client_id': 5, 'parent_object_id': 1, 'level': 'lexicalentry', 'object_id': 2, 'published': False, 'contains': [{'contains': None, 'published': False, 'parent_object_id': 2, 'marked_for_deletion': False, 'locale_id': 1, 'parent_client_id': 11, 'entity_type': 'Word', 'level': 'leveloneentity', 'object_id': 2, 'additional_metadata': None, 'content': "grouping word {'object_id': 2, 'client_id': 11}", 'client_id': 11}, {'contains': None, 'published': False, 'parent_object_id': 2, 'marked_for_deletion': False, 'locale_id': None, 'parent_client_id': 11, 'entity_type': 'Etymology', 'level': 'groupingentity', 'object_id': 1, 'additional_metadata': None, 'content': 'Wed Feb 10 13:50:08 2016MNAZGRV22A', 'client_id': 11}], 'client_id': 11}}, {'lexical_entry': {'came_from': None, 'marked_for_deletion': False, 'parent_client_id': 5, 'parent_object_id': 1, 'level': 'lexicalentry', 'object_id': 3, 'published': False, 'contains': [{'contains': None, 'published': False, 'parent_object_id': 3, 'marked_for_deletion': False, 'locale_id': 1, 'parent_client_id': 11, 'entity_type': 'Word', 'level': 'leveloneentity', 'object_id': 3, 'additional_metadata': None, 'content': "grouping word {'object_id': 3, 'client_id': 11}", 'client_id': 11}, {'contains': None, 'published': False, 'parent_object_id': 3, 'marked_for_deletion': False, 'locale_id': None, 'parent_client_id': 11, 'entity_type': 'Etymology', 'level': 'groupingentity', 'object_id': 2, 'additional_metadata': None, 'content': 'Wed Feb 10 13:50:08 2016MNAZGRV22A', 'client_id': 11}], 'client_id': 11}}]} + + self.assertEqual(dict_diff(d1, d2, stop_words=['content'], set_like=True), True)