diff --git a/CHANGELOG b/CHANGELOG index 9997466d0..f6329e6b2 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,3 +1,21 @@ +## 2.9.12 + +- Tornado HTTPS support, thanks Diego +- Modular DAL, thanks Giovanni +- Added coverage support, thanks Niphlod +- More tests, thanks Niphlod and Paolo Valleri +- Added support for show_if in readonly sqlform, thanks Paolo +- Improved scheduler, thanks Niphlod +- Email timeout support +- Made web2py's custom_import work with circular imports, thanks Jack Kuan +- Added Portuguese, Catalan, and Burmese translations +- Allow map_hyphen to work for application names, thanks Tim Nyborg +- New module appconfig.py, thanks Niphlod +- Added geospatial support to Teradata adaptor, thanks Andrew Willimott +- Many bug fixes + + + ## 2.9.6 - 2.9.10 - fixed support of GAE + SQL diff --git a/Makefile b/Makefile index 14ff4d921..42f7375b8 100644 --- a/Makefile +++ b/Makefile @@ -37,7 +37,7 @@ update: echo "remember that pymysql was tweaked" src: ### Use semantic versioning - echo 'Version 2.9.12-beta+timestamp.'`date +%Y.%m.%d.%H.%M.%S` > VERSION + echo 'Version 2.9.12-stable+timestamp.'`date +%Y.%m.%d.%H.%M.%S` > VERSION ### rm -f all junk files make clean ### clean up baisc apps diff --git a/VERSION b/VERSION index 3374f8b21..3af04ccf0 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -Version 2.9.12-beta+timestamp.2015.01.15.09.58.09 +Version 2.9.12-stable+timestamp.2015.01.17.00.07.04 diff --git a/applications/admin/controllers/default.py b/applications/admin/controllers/default.py index 547d6c947..4623c517c 100644 --- a/applications/admin/controllers/default.py +++ b/applications/admin/controllers/default.py @@ -589,7 +589,7 @@ def edit(): if 'settings' in request.vars: if request.post_vars: #save new preferences post_vars = request.post_vars.items() - # Since unchecked checkbox are not serialized, we must set them as false by hand to store the correct preference in the settings + # Since unchecked checkbox are not serialized, we must set them as false by hand to store the correct preference in the settings post_vars+= [(opt, 'false') for opt in preferences if opt not in request.post_vars ] if config.save(post_vars): response.headers["web2py-component-flash"] = T('Preferences saved correctly') @@ -775,12 +775,12 @@ def edit(): view_link=view_link, editviewlinks=editviewlinks, id=IS_SLUG()(filename)[0], - force= True if (request.vars.restore or + force= True if (request.vars.restore or request.vars.revert) else False) plain_html = response.render('default/edit_js.html', file_details) file_details['plain_html'] = plain_html if is_mobile: - return response.render('default.mobile/edit.html', + return response.render('default.mobile/edit.html', file_details, editor_settings=preferences) else: return response.json(file_details) @@ -1278,7 +1278,7 @@ def create_file(): path = abspath(request.vars.location) else: if request.vars.dir: - request.vars.location += request.vars.dir + '/' + request.vars.location += request.vars.dir + '/' app = get_app(name=request.vars.location.split('/')[0]) path = apath(request.vars.location, r=request) filename = re.sub('[^\w./-]+', '_', request.vars.filename) @@ -1387,7 +1387,7 @@ def create_file(): from gluon import *\n""")[1:] elif (path[-8:] == '/static/') or (path[-9:] == '/private/'): - if (request.vars.plugin and + if (request.vars.plugin and not filename.startswith('plugin_%s/' % request.vars.plugin)): filename = 'plugin_%s/%s' % (request.vars.plugin, filename) text = '' @@ -1434,37 +1434,37 @@ def create_file(): """ % URL('edit', args=[app,request.vars.dir,filename]) return '' else: - redirect(request.vars.sender + anchor) + redirect(request.vars.sender + anchor) def listfiles(app, dir, regexp='.*\.py$'): - files = sorted( + files = sorted( listdir(apath('%(app)s/%(dir)s/' % {'app':app, 'dir':dir}, r=request), regexp)) - files = [x.replace('\\', '/') for x in files if not x.endswith('.bak')] - return files - + files = [x.replace('\\', '/') for x in files if not x.endswith('.bak')] + return files + def editfile(path,file,vars={}, app = None): - args=(path,file) if 'app' in vars else (app,path,file) - url = URL('edit', args=args, vars=vars) - return A(file, _class='editor_filelink', _href=url, _style='word-wrap: nowrap;') - + args=(path,file) if 'app' in vars else (app,path,file) + url = URL('edit', args=args, vars=vars) + return A(file, _class='editor_filelink', _href=url, _style='word-wrap: nowrap;') + def files_menu(): - app = request.vars.app or 'welcome' - dirs=[{'name':'models', 'reg':'.*\.py$'}, + app = request.vars.app or 'welcome' + dirs=[{'name':'models', 'reg':'.*\.py$'}, {'name':'controllers', 'reg':'.*\.py$'}, {'name':'views', 'reg':'[\w/\-]+(\.\w+)+$'}, {'name':'modules', 'reg':'.*\.py$'}, {'name':'static', 'reg': '[^\.#].*'}, {'name':'private', 'reg':'.*\.py$'}] - result_files = [] - for dir in dirs: - result_files.append(TAG[''](LI(dir['name'], _class="nav-header component", _onclick="collapse('" + dir['name'] + "_files');"), - LI(UL(*[LI(editfile(dir['name'], f, dict(id=dir['name'] + f.replace('.','__')), app), _style="overflow:hidden", _id=dir['name']+"__"+f.replace('.','__')) - for f in listfiles(app, dir['name'], regexp=dir['reg'])], - _class="nav nav-list small-font"), - _id=dir['name'] + '_files', _style="display: none;"))) - return dict(result_files = result_files) - + result_files = [] + for dir in dirs: + result_files.append(TAG[''](LI(dir['name'], _class="nav-header component", _onclick="collapse('" + dir['name'] + "_files');"), + LI(UL(*[LI(editfile(dir['name'], f, dict(id=dir['name'] + f.replace('.','__')), app), _style="overflow:hidden", _id=dir['name']+"__"+f.replace('.','__')) + for f in listfiles(app, dir['name'], regexp=dir['reg'])], + _class="nav nav-list small-font"), + _id=dir['name'] + '_files', _style="display: none;"))) + return dict(result_files = result_files) + def upload_file(): """ File uploading handler """ if request.vars and not request.vars.token == session.token: @@ -1941,4 +1941,3 @@ def install_plugin(): T('unable to install plugin "%s"', filename) redirect(URL(f="plugins", args=[app,])) return dict(form=form, app=app, plugin=plugin, source=source) - diff --git a/applications/admin/languages/cs.py b/applications/admin/languages/cs.py index 9c8c7b63a..1b9481efe 100644 --- a/applications/admin/languages/cs.py +++ b/applications/admin/languages/cs.py @@ -1,480 +1,480 @@ -# -*- coding: utf-8 -*- -{ -'!langcode!': 'cs-cz', -'!langname!': 'čeština', -'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': 'Kolonka "Upravit" je nepovinný výraz, například "pole1=\'nováhodnota\'". Výsledky databázového JOINu nemůžete mazat ani upravovat.', -'"User Exception" debug mode. An error ticket could be issued!': '"User Exception" debug mode. An error ticket could be issued!', -'%%{Row} in Table': '%%{řádek} v tabulce', -'%%{Row} selected': 'označených %%{řádek}', -'%s %%{row} deleted': '%s smazaných %%{záznam}', -'%s %%{row} updated': '%s upravených %%{záznam}', -'%s selected': '%s označených', -'%Y-%m-%d': '%d.%m.%Y', -'%Y-%m-%d %H:%M:%S': '%d.%m.%Y %H:%M:%S', -'(requires internet access)': '(vyžaduje připojení k internetu)', -'(requires internet access, experimental)': '(requires internet access, experimental)', -'(something like "it-it")': '(například "cs-cs")', -'@markmin\x01(file **gluon/contrib/plural_rules/%s.py** is not found)': '(soubor **gluon/contrib/plural_rules/%s.py** nenalezen)', -'@markmin\x01Searching: **%s** %%{file}': 'Hledání: **%s** %%{soubor}', -'About': 'O programu', -'About application': 'O aplikaci', -'Access Control': 'Řízení přístupu', -'Add breakpoint': 'Přidat bod přerušení', -'Additional code for your application': 'Další kód pro Vaši aplikaci', -'Admin design page': 'Admin design page', -'Admin language': 'jazyk rozhraní', -'Administrative interface': 'pro administrátorské rozhraní klikněte sem', -'Administrative Interface': 'Administrátorské rozhraní', -'administrative interface': 'rozhraní pro správu', -'Administrator Password:': 'Administrátorské heslo:', -'Ajax Recipes': 'Recepty s ajaxem', -'An error occured, please %s the page': 'An error occured, please %s the page', -'and rename it:': 'a přejmenovat na:', -'appadmin': 'appadmin', -'appadmin is disabled because insecure channel': 'appadmin je zakázaná bez zabezpečeného spojení', -'Application': 'Application', -'application "%s" uninstalled': 'application "%s" odinstalována', -'application compiled': 'aplikace zkompilována', -'Application name:': 'Název aplikace:', -'are not used': 'nepoužita', -'are not used yet': 'ještě nepoužita', -'Are you sure you want to delete this object?': 'Opravdu chcete odstranit tento objekt?', -'Are you sure you want to uninstall application "%s"?': 'Opravdu chcete odinstalovat aplikaci "%s"?', -'arguments': 'arguments', -'at char %s': 'at char %s', -'at line %s': 'at line %s', -'ATTENTION:': 'ATTENTION:', -'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.', -'Available Databases and Tables': 'Dostupné databáze a tabulky', -'back': 'zpět', -'Back to wizard': 'Back to wizard', -'Basics': 'Basics', -'Begin': 'Začít', -'breakpoint': 'bod přerušení', -'Breakpoints': 'Body přerušení', -'breakpoints': 'body přerušení', -'Buy this book': 'Koupit web2py knihu', -'Cache': 'Cache', -'cache': 'cache', -'Cache Keys': 'Klíče cache', -'cache, errors and sessions cleaned': 'cache, chyby a relace byly pročištěny', -'can be a git repo': 'může to být git repo', -'Cancel': 'Storno', -'Cannot be empty': 'Nemůže být prázdné', -'Change Admin Password': 'Změnit heslo pro správu', -'Change admin password': 'Změnit heslo pro správu aplikací', -'Change password': 'Změna hesla', -'check all': 'vše označit', -'Check for upgrades': 'Zkusit aktualizovat', -'Check to delete': 'Označit ke smazání', -'Check to delete:': 'Označit ke smazání:', -'Checking for upgrades...': 'Zjišťuji, zda jsou k dispozici aktualizace...', -'Clean': 'Pročistit', -'Clear CACHE?': 'Vymazat CACHE?', -'Clear DISK': 'Vymazat DISK', -'Clear RAM': 'Vymazat RAM', -'Click row to expand traceback': 'Pro rozbalení stopy, klikněte na řádek', -'Click row to view a ticket': 'Pro zobrazení chyby (ticketu), klikněte na řádku...', -'Client IP': 'IP adresa klienta', -'code': 'code', -'Code listing': 'Code listing', -'collapse/expand all': 'vše sbalit/rozbalit', -'Community': 'Komunita', -'Compile': 'Zkompilovat', -'compiled application removed': 'zkompilovaná aplikace smazána', -'Components and Plugins': 'Komponenty a zásuvné moduly', -'Condition': 'Podmínka', -'continue': 'continue', -'Controller': 'Kontrolér (Controller)', -'Controllers': 'Kontroléry', -'controllers': 'kontroléry', -'Copyright': 'Copyright', -'Count': 'Počet', -'Create': 'Vytvořit', -'create file with filename:': 'vytvořit soubor s názvem:', -'created by': 'vytvořil', -'Created By': 'Vytvořeno - kým', -'Created On': 'Vytvořeno - kdy', -'crontab': 'crontab', -'Current request': 'Aktuální požadavek', -'Current response': 'Aktuální odpověď', -'Current session': 'Aktuální relace', -'currently running': 'právě běží', -'currently saved or': 'uloženo nebo', -'customize me!': 'upravte mě!', -'data uploaded': 'data nahrána', -'Database': 'Rozhraní databáze', -'Database %s select': 'databáze %s výběr', -'Database administration': 'Database administration', -'database administration': 'správa databáze', -'Date and Time': 'Datum a čas', -'day': 'den', -'db': 'db', -'DB Model': 'Databázový model', -'Debug': 'Ladění', -'defines tables': 'defines tables', -'Delete': 'Smazat', -'delete': 'smazat', -'delete all checked': 'smazat vše označené', -'delete plugin': 'delete plugin', -'Delete this file (you will be asked to confirm deletion)': 'Smazat tento soubor (budete požádán o potvrzení mazání)', -'Delete:': 'Smazat:', -'deleted after first hit': 'smazat po prvním dosažení', -'Demo': 'Demo', -'Deploy': 'Nahrát', -'Deploy on Google App Engine': 'Nahrát na Google App Engine', -'Deploy to OpenShift': 'Nahrát na OpenShift', -'Deployment Recipes': 'Postupy pro deployment', -'Description': 'Popis', -'design': 'návrh', -'Detailed traceback description': 'Podrobný výpis prostředí', -'details': 'podrobnosti', -'direction: ltr': 'směr: ltr', -'Disable': 'Zablokovat', -'DISK': 'DISK', -'Disk Cache Keys': 'Klíče diskové cache', -'Disk Cleared': 'Disk smazán', -'docs': 'dokumentace', -'Documentation': 'Dokumentace', -"Don't know what to do?": 'Nevíte kudy kam?', -'done!': 'hotovo!', -'Download': 'Stáhnout', -'download layouts': 'stáhnout moduly rozvržení stránky', -'download plugins': 'stáhnout zásuvné moduly', -'E-mail': 'E-mail', -'Edit': 'Upravit', -'edit all': 'edit all', -'Edit application': 'Správa aplikace', -'edit controller': 'edit controller', -'Edit current record': 'Upravit aktuální záznam', -'Edit Profile': 'Upravit profil', -'edit views:': 'upravit pohled:', -'Editing file "%s"': 'Úprava souboru "%s"', -'Editing Language file': 'Úprava jazykového souboru', -'Editing Plural Forms File': 'Editing Plural Forms File', -'Email and SMS': 'Email a SMS', -'Enable': 'Odblokovat', -'enter a number between %(min)g and %(max)g': 'zadejte číslo mezi %(min)g a %(max)g', -'enter an integer between %(min)g and %(max)g': 'zadejte celé číslo mezi %(min)g a %(max)g', -'Error': 'Chyba', -'Error logs for "%(app)s"': 'Seznam výskytu chyb pro aplikaci "%(app)s"', -'Error snapshot': 'Snapshot chyby', -'Error ticket': 'Ticket chyby', -'Errors': 'Chyby', -'Exception %(extype)s: %(exvalue)s': 'Exception %(extype)s: %(exvalue)s', -'Exception %s': 'Exception %s', -'Exception instance attributes': 'Prvky instance výjimky', -'Expand Abbreviation': 'Expand Abbreviation', -'export as csv file': 'exportovat do .csv souboru', -'exposes': 'vystavuje', -'exposes:': 'vystavuje funkce:', -'extends': 'rozšiřuje', -'failed to compile file because:': 'soubor se nepodařilo zkompilovat, protože:', -'FAQ': 'Často kladené dotazy', -'File': 'Soubor', -'file': 'soubor', -'file "%(filename)s" created': 'file "%(filename)s" created', -'file saved on %(time)s': 'soubor uložen %(time)s', -'file saved on %s': 'soubor uložen %s', -'Filename': 'Název souboru', -'filter': 'filtr', -'Find Next': 'Najít další', -'Find Previous': 'Najít předchozí', -'First name': 'Křestní jméno', -'Forgot username?': 'Zapomněl jste svoje přihlašovací jméno?', -'forgot username?': 'zapomněl jste svoje přihlašovací jméno?', -'Forms and Validators': 'Formuláře a validátory', -'Frames': 'Frames', -'Free Applications': 'Aplikace zdarma', -'Functions with no doctests will result in [passed] tests.': 'Functions with no doctests will result in [passed] tests.', -'Generate': 'Vytvořit', -'Get from URL:': 'Stáhnout z internetu:', -'Git Pull': 'Git Pull', -'Git Push': 'Git Push', -'Globals##debug': 'Globální proměnné', -'go!': 'OK!', -'Goto': 'Goto', -'graph model': 'graph model', -'Group %(group_id)s created': 'Skupina %(group_id)s vytvořena', -'Group ID': 'ID skupiny', -'Groups': 'Skupiny', -'Hello World': 'Ahoj světe', -'Help': 'Nápověda', -'Hide/Show Translated strings': 'Skrýt/Zobrazit přeložené texty', -'Hits': 'Kolikrát dosaženo', -'Home': 'Domovská stránka', -'honored only if the expression evaluates to true': 'brát v potaz jen když se tato podmínka vyhodnotí kladně', -'How did you get here?': 'Jak jste se sem vlastně dostal?', -'If start the upgrade, be patient, it may take a while to download': 'If start the upgrade, be patient, it may take a while to download', -'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.', -'import': 'import', -'Import/Export': 'Import/Export', -'includes': 'zahrnuje', -'Index': 'Index', -'insert new': 'vložit nový záznam ', -'insert new %s': 'vložit nový záznam %s', -'inspect attributes': 'inspect attributes', -'Install': 'Instalovat', -'Installed applications': 'Nainstalované aplikace', -'Interaction at %s line %s': 'Interakce v %s, na řádce %s', -'Interactive console': 'Interaktivní příkazová řádka', -'Internal State': 'Vnitřní stav', -'Introduction': 'Úvod', -'Invalid email': 'Neplatný email', -'Invalid password': 'Nesprávné heslo', -'invalid password.': 'neplatné heslo', -'Invalid Query': 'Neplatný dotaz', -'invalid request': 'Neplatný požadavek', -'Is Active': 'Je aktivní', -'It is %s %%{day} today.': 'Dnes je to %s %%{den}.', -'Key': 'Klíč', -'Key bindings': 'Vazby klíčů', -'Key bindings for ZenCoding Plugin': 'Key bindings for ZenCoding Plugin', -'languages': 'jazyky', -'Languages': 'Jazyky', -'Last name': 'Příjmení', -'Last saved on:': 'Naposledy uloženo:', -'Layout': 'Rozvržení stránky (layout)', -'Layout Plugins': 'Moduly rozvržení stránky (Layout Plugins)', -'Layouts': 'Rozvržení stránek', -'License for': 'Licence pro', -'Line number': 'Číslo řádku', -'LineNo': 'Č.řádku', -'Live Chat': 'Online pokec', -'loading...': 'nahrávám...', -'locals': 'locals', -'Locals##debug': 'Lokální proměnné', -'Logged in': 'Přihlášení proběhlo úspěšně', -'Logged out': 'Odhlášení proběhlo úspěšně', -'Login': 'Přihlásit se', -'login': 'přihlásit se', -'Login to the Administrative Interface': 'Přihlásit se do Správce aplikací', -'logout': 'odhlásit se', -'Logout': 'Odhlásit se', -'Lost Password': 'Zapomněl jste heslo', -'Lost password?': 'Zapomněl jste heslo?', -'lost password?': 'zapomněl jste heslo?', -'Manage': 'Manage', -'Manage Cache': 'Manage Cache', -'Menu Model': 'Model rozbalovací nabídky', -'Models': 'Modely', -'models': 'modely', -'Modified By': 'Změněno - kým', -'Modified On': 'Změněno - kdy', -'Modules': 'Moduly', -'modules': 'moduly', -'My Sites': 'Správa aplikací', -'Name': 'Jméno', -'new application "%s" created': 'nová aplikace "%s" vytvořena', -'New Application Wizard': 'Nový průvodce aplikací', -'New application wizard': 'Nový průvodce aplikací', -'New password': 'Nové heslo', -'New Record': 'Nový záznam', -'new record inserted': 'nový záznam byl založen', -'New simple application': 'Vytvořit primitivní aplikaci', -'next': 'next', -'next 100 rows': 'dalších 100 řádků', -'No databases in this application': 'V této aplikaci nejsou žádné databáze', -'No Interaction yet': 'Ještě žádná interakce nenastala', -'No ticket_storage.txt found under /private folder': 'Soubor ticket_storage.txt v adresáři /private nenalezen', -'Object or table name': 'Objekt či tabulka', -'Old password': 'Původní heslo', -'online designer': 'online návrhář', -'Online examples': 'Příklady online', -'Open new app in new window': 'Open new app in new window', -'or alternatively': 'or alternatively', -'Or Get from URL:': 'Or Get from URL:', -'or import from csv file': 'nebo importovat z .csv souboru', -'Origin': 'Původ', -'Original/Translation': 'Originál/Překlad', -'Other Plugins': 'Ostatní moduly', -'Other Recipes': 'Ostatní zásuvné moduly', -'Overview': 'Přehled', -'Overwrite installed app': 'Přepsat instalovanou aplikaci', -'Pack all': 'Zabalit', -'Pack compiled': 'Zabalit zkompilované', -'pack plugin': 'pack plugin', -'password': 'heslo', -'Password': 'Heslo', -"Password fields don't match": 'Hesla se neshodují', -'Peeking at file': 'Peeking at file', -'Please': 'Prosím', -'Plugin "%s" in application': 'Plugin "%s" in application', -'plugins': 'zásuvné moduly', -'Plugins': 'Zásuvné moduly', -'Plural Form #%s': 'Plural Form #%s', -'Plural-Forms:': 'Množná čísla:', -'Powered by': 'Poháněno', -'Preface': 'Předmluva', -'previous 100 rows': 'předchozích 100 řádků', -'Private files': 'Soukromé soubory', -'private files': 'soukromé soubory', -'profile': 'profil', -'Project Progress': 'Vývoj projektu', -'Python': 'Python', -'Query:': 'Dotaz:', -'Quick Examples': 'Krátké příklady', -'RAM': 'RAM', -'RAM Cache Keys': 'Klíče RAM Cache', -'Ram Cleared': 'RAM smazána', -'Readme': 'Nápověda', -'Recipes': 'Postupy jak na to', -'Record': 'Záznam', -'record does not exist': 'záznam neexistuje', -'Record ID': 'ID záznamu', -'Record id': 'id záznamu', -'refresh': 'obnovte', -'register': 'registrovat', -'Register': 'Zaregistrovat se', -'Registration identifier': 'Registrační identifikátor', -'Registration key': 'Registrační klíč', -'reload': 'reload', -'Reload routes': 'Znovu nahrát cesty', -'Remember me (for 30 days)': 'Zapamatovat na 30 dní', -'Remove compiled': 'Odstranit zkompilované', -'Removed Breakpoint on %s at line %s': 'Bod přerušení smazán - soubor %s na řádce %s', -'Replace': 'Zaměnit', -'Replace All': 'Zaměnit vše', -'request': 'request', -'Reset Password key': 'Reset registračního klíče', -'response': 'response', -'restart': 'restart', -'restore': 'obnovit', -'Retrieve username': 'Získat přihlašovací jméno', -'return': 'return', -'revert': 'vrátit se k původnímu', -'Role': 'Role', -'Rows in Table': 'Záznamy v tabulce', -'Rows selected': 'Záznamů zobrazeno', -'rules are not defined': 'pravidla nejsou definována', -"Run tests in this file (to run all files, you may also use the button labelled 'test')": "Spustí testy v tomto souboru (ke spuštění všech testů, použijte tlačítko 'test')", -'Running on %s': 'Běží na %s', -'Save': 'Uložit', -'Save file:': 'Save file:', -'Save via Ajax': 'Uložit pomocí Ajaxu', -'Saved file hash:': 'hash uloženého souboru:', -'Semantic': 'Modul semantic', -'Services': 'Služby', -'session': 'session', -'session expired': 'session expired', -'Set Breakpoint on %s at line %s: %s': 'Bod přerušení nastaven v souboru %s na řádce %s: %s', -'shell': 'příkazová řádka', -'Singular Form': 'Singular Form', -'Site': 'Správa aplikací', -'Size of cache:': 'Velikost cache:', -'skip to generate': 'skip to generate', -'Sorry, could not find mercurial installed': 'Bohužel mercurial není nainstalován.', -'Start a new app': 'Vytvořit novou aplikaci', -'Start searching': 'Začít hledání', -'Start wizard': 'Spustit průvodce', -'state': 'stav', -'Static': 'Static', -'static': 'statické soubory', -'Static files': 'Statické soubory', -'Statistics': 'Statistika', -'Step': 'Step', -'step': 'step', -'stop': 'stop', -'Stylesheet': 'CSS styly', -'submit': 'odeslat', -'Submit': 'Odeslat', -'successful': 'úspěšně', -'Support': 'Podpora', -'Sure you want to delete this object?': 'Opravdu chcete smazat tento objekt?', -'Table': 'tabulka', -'Table name': 'Název tabulky', -'Temporary': 'Dočasný', -'test': 'test', -'Testing application': 'Testing application', -'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"Dotaz" je podmínka, například "db.tabulka1.pole1==\'hodnota\'". Podmínka "db.tabulka1.pole1==db.tabulka2.pole2" pak vytvoří SQL JOIN.', -'The application logic, each URL path is mapped in one exposed function in the controller': 'Logika aplikace: každá URL je mapována na funkci vystavovanou kontrolérem.', -'The Core': 'Jádro (The Core)', -'The data representation, define database tables and sets': 'Reprezentace dat: definovat tabulky databáze a záznamy', -'The output of the file is a dictionary that was rendered by the view %s': 'Výstup ze souboru je slovník, který se zobrazil v pohledu %s.', -'The presentations layer, views are also known as templates': 'Prezentační vrstva: pohledy či templaty (šablony)', -'The Views': 'Pohledy (The Views)', -'There are no controllers': 'There are no controllers', -'There are no modules': 'There are no modules', -'There are no plugins': 'Žádné moduly nejsou instalovány.', -'There are no private files': 'Žádné soukromé soubory neexistují.', -'There are no static files': 'There are no static files', -'There are no translators, only default language is supported': 'There are no translators, only default language is supported', -'There are no views': 'There are no views', -'These files are not served, they are only available from within your app': 'Tyto soubory jsou klientům nepřístupné. K dispozici jsou pouze v rámci aplikace.', -'These files are served without processing, your images go here': 'Tyto soubory jsou servírovány bez přídavné logiky, sem patří např. obrázky.', -'This App': 'Tato aplikace', -'This is a copy of the scaffolding application': 'Toto je kopie aplikace skelet.', -'This is an experimental feature and it needs more testing. If you decide to upgrade you do it at your own risk': 'This is an experimental feature and it needs more testing. If you decide to upgrade you do it at your own risk', -'This is the %(filename)s template': 'This is the %(filename)s template', -'this page to see if a breakpoint was hit and debug interaction is required.': 'tuto stránku, abyste uviděli, zda se dosáhlo bodu přerušení.', -'Ticket': 'Ticket', -'Ticket ID': 'Ticket ID', -'Time in Cache (h:m:s)': 'Čas v Cache (h:m:s)', -'Timestamp': 'Časové razítko', -'to previous version.': 'k předchozí verzi.', -'To create a plugin, name a file/folder plugin_[name]': 'Zásuvný modul vytvoříte tak, že pojmenujete soubor/adresář plugin_[jméno modulu]', -'To emulate a breakpoint programatically, write:': 'K nastavení bodu přerušení v kódu programu, napište:', -'to use the debugger!': ', abyste mohli ladící program používat!', -'toggle breakpoint': 'vyp./zap. bod přerušení', -'Toggle Fullscreen': 'Na celou obrazovku a zpět', -'too short': 'Příliš krátké', -'Traceback': 'Traceback', -'Translation strings for the application': 'Překlad textů pro aplikaci', -'try something like': 'try something like', -'Try the mobile interface': 'Zkuste rozhraní pro mobilní zařízení', -'try view': 'try view', -'Twitter': 'Twitter', -'Type python statement in here and hit Return (Enter) to execute it.': 'Type python statement in here and hit Return (Enter) to execute it.', -'Type some Python code in here and hit Return (Enter) to execute it.': 'Type some Python code in here and hit Return (Enter) to execute it.', -'Unable to check for upgrades': 'Unable to check for upgrades', -'unable to parse csv file': 'csv soubor nedá sa zpracovat', -'uncheck all': 'vše odznačit', -'Uninstall': 'Odinstalovat', -'update': 'aktualizovat', -'update all languages': 'aktualizovat všechny jazyky', -'Update:': 'Upravit:', -'Upgrade': 'Upgrade', -'upgrade now': 'upgrade now', -'upgrade now to %s': 'upgrade now to %s', -'upload': 'nahrát', -'Upload': 'Upload', -'Upload a package:': 'Nahrát balík:', -'Upload and install packed application': 'Nahrát a instalovat zabalenou aplikaci', -'upload file:': 'nahrát soubor:', -'upload plugin file:': 'nahrát soubor modulu:', -'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Použijte (...)&(...) pro AND, (...)|(...) pro OR a ~(...) pro NOT pro sestavení složitějších dotazů.', -'User %(id)s Logged-in': 'Uživatel %(id)s přihlášen', -'User %(id)s Logged-out': 'Uživatel %(id)s odhlášen', -'User %(id)s Password changed': 'Uživatel %(id)s změnil heslo', -'User %(id)s Profile updated': 'Uživatel %(id)s upravil profil', -'User %(id)s Registered': 'Uživatel %(id)s se zaregistroval', -'User %(id)s Username retrieved': 'Uživatel %(id)s si nachal zaslat přihlašovací jméno', -'User ID': 'ID uživatele', -'Username': 'Přihlašovací jméno', -'variables': 'variables', -'Verify Password': 'Zopakujte heslo', -'Version': 'Verze', -'Version %s.%s.%s (%s) %s': 'Verze %s.%s.%s (%s) %s', -'Versioning': 'Verzování', -'Videos': 'Videa', -'View': 'Pohled (View)', -'Views': 'Pohledy', -'views': 'pohledy', -'Web Framework': 'Web Framework', -'web2py is up to date': 'Máte aktuální verzi web2py.', -'web2py online debugger': 'Ladící online web2py program', -'web2py Recent Tweets': 'Štěbetání na Twitteru o web2py', -'web2py upgrade': 'web2py upgrade', -'web2py upgraded; please restart it': 'web2py upgraded; please restart it', -'Welcome': 'Vítejte', -'Welcome to web2py': 'Vitejte ve web2py', -'Welcome to web2py!': 'Vítejte ve web2py!', -'Which called the function %s located in the file %s': 'která zavolala funkci %s v souboru (kontroléru) %s.', -'You are successfully running web2py': 'Úspěšně jste spustili web2py.', -'You can also set and remove breakpoint in the edit window, using the Toggle Breakpoint button': 'Nastavovat a mazat body přerušení je též možno v rámci editování zdrojového souboru přes tlačítko Vyp./Zap. bod přerušení', -'You can modify this application and adapt it to your needs': 'Tuto aplikaci si můžete upravit a přizpůsobit ji svým potřebám.', -'You need to set up and reach a': 'Je třeba nejprve nastavit a dojít až na', -'You visited the url %s': 'Navštívili jste stránku %s,', -'Your application will be blocked until you click an action button (next, step, continue, etc.)': 'Aplikace bude blokována než se klikne na jedno z tlačítek (další, krok, pokračovat, atd.)', -'You can inspect variables using the console bellow': 'Níže pomocí příkazové řádky si můžete prohlédnout proměnné', -} +# -*- coding: utf-8 -*- +{ +'!langcode!': 'cs-cz', +'!langname!': 'čeština', +'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': 'Kolonka "Upravit" je nepovinný výraz, například "pole1=\'nováhodnota\'". Výsledky databázového JOINu nemůžete mazat ani upravovat.', +'"User Exception" debug mode. An error ticket could be issued!': '"User Exception" debug mode. An error ticket could be issued!', +'%%{Row} in Table': '%%{řádek} v tabulce', +'%%{Row} selected': 'označených %%{řádek}', +'%s %%{row} deleted': '%s smazaných %%{záznam}', +'%s %%{row} updated': '%s upravených %%{záznam}', +'%s selected': '%s označených', +'%Y-%m-%d': '%d.%m.%Y', +'%Y-%m-%d %H:%M:%S': '%d.%m.%Y %H:%M:%S', +'(requires internet access)': '(vyžaduje připojení k internetu)', +'(requires internet access, experimental)': '(requires internet access, experimental)', +'(something like "it-it")': '(například "cs-cs")', +'@markmin\x01(file **gluon/contrib/plural_rules/%s.py** is not found)': '(soubor **gluon/contrib/plural_rules/%s.py** nenalezen)', +'@markmin\x01Searching: **%s** %%{file}': 'Hledání: **%s** %%{soubor}', +'About': 'O programu', +'About application': 'O aplikaci', +'Access Control': 'Řízení přístupu', +'Add breakpoint': 'Přidat bod přerušení', +'Additional code for your application': 'Další kód pro Vaši aplikaci', +'Admin design page': 'Admin design page', +'Admin language': 'jazyk rozhraní', +'Administrative interface': 'pro administrátorské rozhraní klikněte sem', +'Administrative Interface': 'Administrátorské rozhraní', +'administrative interface': 'rozhraní pro správu', +'Administrator Password:': 'Administrátorské heslo:', +'Ajax Recipes': 'Recepty s ajaxem', +'An error occured, please %s the page': 'An error occured, please %s the page', +'and rename it:': 'a přejmenovat na:', +'appadmin': 'appadmin', +'appadmin is disabled because insecure channel': 'appadmin je zakázaná bez zabezpečeného spojení', +'Application': 'Application', +'application "%s" uninstalled': 'application "%s" odinstalována', +'application compiled': 'aplikace zkompilována', +'Application name:': 'Název aplikace:', +'are not used': 'nepoužita', +'are not used yet': 'ještě nepoužita', +'Are you sure you want to delete this object?': 'Opravdu chcete odstranit tento objekt?', +'Are you sure you want to uninstall application "%s"?': 'Opravdu chcete odinstalovat aplikaci "%s"?', +'arguments': 'arguments', +'at char %s': 'at char %s', +'at line %s': 'at line %s', +'ATTENTION:': 'ATTENTION:', +'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.', +'Available Databases and Tables': 'Dostupné databáze a tabulky', +'back': 'zpět', +'Back to wizard': 'Back to wizard', +'Basics': 'Basics', +'Begin': 'Začít', +'breakpoint': 'bod přerušení', +'Breakpoints': 'Body přerušení', +'breakpoints': 'body přerušení', +'Buy this book': 'Koupit web2py knihu', +'Cache': 'Cache', +'cache': 'cache', +'Cache Keys': 'Klíče cache', +'cache, errors and sessions cleaned': 'cache, chyby a relace byly pročištěny', +'can be a git repo': 'může to být git repo', +'Cancel': 'Storno', +'Cannot be empty': 'Nemůže být prázdné', +'Change Admin Password': 'Změnit heslo pro správu', +'Change admin password': 'Změnit heslo pro správu aplikací', +'Change password': 'Změna hesla', +'check all': 'vše označit', +'Check for upgrades': 'Zkusit aktualizovat', +'Check to delete': 'Označit ke smazání', +'Check to delete:': 'Označit ke smazání:', +'Checking for upgrades...': 'Zjišťuji, zda jsou k dispozici aktualizace...', +'Clean': 'Pročistit', +'Clear CACHE?': 'Vymazat CACHE?', +'Clear DISK': 'Vymazat DISK', +'Clear RAM': 'Vymazat RAM', +'Click row to expand traceback': 'Pro rozbalení stopy, klikněte na řádek', +'Click row to view a ticket': 'Pro zobrazení chyby (ticketu), klikněte na řádku...', +'Client IP': 'IP adresa klienta', +'code': 'code', +'Code listing': 'Code listing', +'collapse/expand all': 'vše sbalit/rozbalit', +'Community': 'Komunita', +'Compile': 'Zkompilovat', +'compiled application removed': 'zkompilovaná aplikace smazána', +'Components and Plugins': 'Komponenty a zásuvné moduly', +'Condition': 'Podmínka', +'continue': 'continue', +'Controller': 'Kontrolér (Controller)', +'Controllers': 'Kontroléry', +'controllers': 'kontroléry', +'Copyright': 'Copyright', +'Count': 'Počet', +'Create': 'Vytvořit', +'create file with filename:': 'vytvořit soubor s názvem:', +'created by': 'vytvořil', +'Created By': 'Vytvořeno - kým', +'Created On': 'Vytvořeno - kdy', +'crontab': 'crontab', +'Current request': 'Aktuální požadavek', +'Current response': 'Aktuální odpověď', +'Current session': 'Aktuální relace', +'currently running': 'právě běží', +'currently saved or': 'uloženo nebo', +'customize me!': 'upravte mě!', +'data uploaded': 'data nahrána', +'Database': 'Rozhraní databáze', +'Database %s select': 'databáze %s výběr', +'Database administration': 'Database administration', +'database administration': 'správa databáze', +'Date and Time': 'Datum a čas', +'day': 'den', +'db': 'db', +'DB Model': 'Databázový model', +'Debug': 'Ladění', +'defines tables': 'defines tables', +'Delete': 'Smazat', +'delete': 'smazat', +'delete all checked': 'smazat vše označené', +'delete plugin': 'delete plugin', +'Delete this file (you will be asked to confirm deletion)': 'Smazat tento soubor (budete požádán o potvrzení mazání)', +'Delete:': 'Smazat:', +'deleted after first hit': 'smazat po prvním dosažení', +'Demo': 'Demo', +'Deploy': 'Nahrát', +'Deploy on Google App Engine': 'Nahrát na Google App Engine', +'Deploy to OpenShift': 'Nahrát na OpenShift', +'Deployment Recipes': 'Postupy pro deployment', +'Description': 'Popis', +'design': 'návrh', +'Detailed traceback description': 'Podrobný výpis prostředí', +'details': 'podrobnosti', +'direction: ltr': 'směr: ltr', +'Disable': 'Zablokovat', +'DISK': 'DISK', +'Disk Cache Keys': 'Klíče diskové cache', +'Disk Cleared': 'Disk smazán', +'docs': 'dokumentace', +'Documentation': 'Dokumentace', +"Don't know what to do?": 'Nevíte kudy kam?', +'done!': 'hotovo!', +'Download': 'Stáhnout', +'download layouts': 'stáhnout moduly rozvržení stránky', +'download plugins': 'stáhnout zásuvné moduly', +'E-mail': 'E-mail', +'Edit': 'Upravit', +'edit all': 'edit all', +'Edit application': 'Správa aplikace', +'edit controller': 'edit controller', +'Edit current record': 'Upravit aktuální záznam', +'Edit Profile': 'Upravit profil', +'edit views:': 'upravit pohled:', +'Editing file "%s"': 'Úprava souboru "%s"', +'Editing Language file': 'Úprava jazykového souboru', +'Editing Plural Forms File': 'Editing Plural Forms File', +'Email and SMS': 'Email a SMS', +'Enable': 'Odblokovat', +'enter a number between %(min)g and %(max)g': 'zadejte číslo mezi %(min)g a %(max)g', +'enter an integer between %(min)g and %(max)g': 'zadejte celé číslo mezi %(min)g a %(max)g', +'Error': 'Chyba', +'Error logs for "%(app)s"': 'Seznam výskytu chyb pro aplikaci "%(app)s"', +'Error snapshot': 'Snapshot chyby', +'Error ticket': 'Ticket chyby', +'Errors': 'Chyby', +'Exception %(extype)s: %(exvalue)s': 'Exception %(extype)s: %(exvalue)s', +'Exception %s': 'Exception %s', +'Exception instance attributes': 'Prvky instance výjimky', +'Expand Abbreviation': 'Expand Abbreviation', +'export as csv file': 'exportovat do .csv souboru', +'exposes': 'vystavuje', +'exposes:': 'vystavuje funkce:', +'extends': 'rozšiřuje', +'failed to compile file because:': 'soubor se nepodařilo zkompilovat, protože:', +'FAQ': 'Často kladené dotazy', +'File': 'Soubor', +'file': 'soubor', +'file "%(filename)s" created': 'file "%(filename)s" created', +'file saved on %(time)s': 'soubor uložen %(time)s', +'file saved on %s': 'soubor uložen %s', +'Filename': 'Název souboru', +'filter': 'filtr', +'Find Next': 'Najít další', +'Find Previous': 'Najít předchozí', +'First name': 'Křestní jméno', +'Forgot username?': 'Zapomněl jste svoje přihlašovací jméno?', +'forgot username?': 'zapomněl jste svoje přihlašovací jméno?', +'Forms and Validators': 'Formuláře a validátory', +'Frames': 'Frames', +'Free Applications': 'Aplikace zdarma', +'Functions with no doctests will result in [passed] tests.': 'Functions with no doctests will result in [passed] tests.', +'Generate': 'Vytvořit', +'Get from URL:': 'Stáhnout z internetu:', +'Git Pull': 'Git Pull', +'Git Push': 'Git Push', +'Globals##debug': 'Globální proměnné', +'go!': 'OK!', +'Goto': 'Goto', +'graph model': 'graph model', +'Group %(group_id)s created': 'Skupina %(group_id)s vytvořena', +'Group ID': 'ID skupiny', +'Groups': 'Skupiny', +'Hello World': 'Ahoj světe', +'Help': 'Nápověda', +'Hide/Show Translated strings': 'Skrýt/Zobrazit přeložené texty', +'Hits': 'Kolikrát dosaženo', +'Home': 'Domovská stránka', +'honored only if the expression evaluates to true': 'brát v potaz jen když se tato podmínka vyhodnotí kladně', +'How did you get here?': 'Jak jste se sem vlastně dostal?', +'If start the upgrade, be patient, it may take a while to download': 'If start the upgrade, be patient, it may take a while to download', +'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.', +'import': 'import', +'Import/Export': 'Import/Export', +'includes': 'zahrnuje', +'Index': 'Index', +'insert new': 'vložit nový záznam ', +'insert new %s': 'vložit nový záznam %s', +'inspect attributes': 'inspect attributes', +'Install': 'Instalovat', +'Installed applications': 'Nainstalované aplikace', +'Interaction at %s line %s': 'Interakce v %s, na řádce %s', +'Interactive console': 'Interaktivní příkazová řádka', +'Internal State': 'Vnitřní stav', +'Introduction': 'Úvod', +'Invalid email': 'Neplatný email', +'Invalid password': 'Nesprávné heslo', +'invalid password.': 'neplatné heslo', +'Invalid Query': 'Neplatný dotaz', +'invalid request': 'Neplatný požadavek', +'Is Active': 'Je aktivní', +'It is %s %%{day} today.': 'Dnes je to %s %%{den}.', +'Key': 'Klíč', +'Key bindings': 'Vazby klíčů', +'Key bindings for ZenCoding Plugin': 'Key bindings for ZenCoding Plugin', +'languages': 'jazyky', +'Languages': 'Jazyky', +'Last name': 'Příjmení', +'Last saved on:': 'Naposledy uloženo:', +'Layout': 'Rozvržení stránky (layout)', +'Layout Plugins': 'Moduly rozvržení stránky (Layout Plugins)', +'Layouts': 'Rozvržení stránek', +'License for': 'Licence pro', +'Line number': 'Číslo řádku', +'LineNo': 'Č.řádku', +'Live Chat': 'Online pokec', +'loading...': 'nahrávám...', +'locals': 'locals', +'Locals##debug': 'Lokální proměnné', +'Logged in': 'Přihlášení proběhlo úspěšně', +'Logged out': 'Odhlášení proběhlo úspěšně', +'Login': 'Přihlásit se', +'login': 'přihlásit se', +'Login to the Administrative Interface': 'Přihlásit se do Správce aplikací', +'logout': 'odhlásit se', +'Logout': 'Odhlásit se', +'Lost Password': 'Zapomněl jste heslo', +'Lost password?': 'Zapomněl jste heslo?', +'lost password?': 'zapomněl jste heslo?', +'Manage': 'Manage', +'Manage Cache': 'Manage Cache', +'Menu Model': 'Model rozbalovací nabídky', +'Models': 'Modely', +'models': 'modely', +'Modified By': 'Změněno - kým', +'Modified On': 'Změněno - kdy', +'Modules': 'Moduly', +'modules': 'moduly', +'My Sites': 'Správa aplikací', +'Name': 'Jméno', +'new application "%s" created': 'nová aplikace "%s" vytvořena', +'New Application Wizard': 'Nový průvodce aplikací', +'New application wizard': 'Nový průvodce aplikací', +'New password': 'Nové heslo', +'New Record': 'Nový záznam', +'new record inserted': 'nový záznam byl založen', +'New simple application': 'Vytvořit primitivní aplikaci', +'next': 'next', +'next 100 rows': 'dalších 100 řádků', +'No databases in this application': 'V této aplikaci nejsou žádné databáze', +'No Interaction yet': 'Ještě žádná interakce nenastala', +'No ticket_storage.txt found under /private folder': 'Soubor ticket_storage.txt v adresáři /private nenalezen', +'Object or table name': 'Objekt či tabulka', +'Old password': 'Původní heslo', +'online designer': 'online návrhář', +'Online examples': 'Příklady online', +'Open new app in new window': 'Open new app in new window', +'or alternatively': 'or alternatively', +'Or Get from URL:': 'Or Get from URL:', +'or import from csv file': 'nebo importovat z .csv souboru', +'Origin': 'Původ', +'Original/Translation': 'Originál/Překlad', +'Other Plugins': 'Ostatní moduly', +'Other Recipes': 'Ostatní zásuvné moduly', +'Overview': 'Přehled', +'Overwrite installed app': 'Přepsat instalovanou aplikaci', +'Pack all': 'Zabalit', +'Pack compiled': 'Zabalit zkompilované', +'pack plugin': 'pack plugin', +'password': 'heslo', +'Password': 'Heslo', +"Password fields don't match": 'Hesla se neshodují', +'Peeking at file': 'Peeking at file', +'Please': 'Prosím', +'Plugin "%s" in application': 'Plugin "%s" in application', +'plugins': 'zásuvné moduly', +'Plugins': 'Zásuvné moduly', +'Plural Form #%s': 'Plural Form #%s', +'Plural-Forms:': 'Množná čísla:', +'Powered by': 'Poháněno', +'Preface': 'Předmluva', +'previous 100 rows': 'předchozích 100 řádků', +'Private files': 'Soukromé soubory', +'private files': 'soukromé soubory', +'profile': 'profil', +'Project Progress': 'Vývoj projektu', +'Python': 'Python', +'Query:': 'Dotaz:', +'Quick Examples': 'Krátké příklady', +'RAM': 'RAM', +'RAM Cache Keys': 'Klíče RAM Cache', +'Ram Cleared': 'RAM smazána', +'Readme': 'Nápověda', +'Recipes': 'Postupy jak na to', +'Record': 'Záznam', +'record does not exist': 'záznam neexistuje', +'Record ID': 'ID záznamu', +'Record id': 'id záznamu', +'refresh': 'obnovte', +'register': 'registrovat', +'Register': 'Zaregistrovat se', +'Registration identifier': 'Registrační identifikátor', +'Registration key': 'Registrační klíč', +'reload': 'reload', +'Reload routes': 'Znovu nahrát cesty', +'Remember me (for 30 days)': 'Zapamatovat na 30 dní', +'Remove compiled': 'Odstranit zkompilované', +'Removed Breakpoint on %s at line %s': 'Bod přerušení smazán - soubor %s na řádce %s', +'Replace': 'Zaměnit', +'Replace All': 'Zaměnit vše', +'request': 'request', +'Reset Password key': 'Reset registračního klíče', +'response': 'response', +'restart': 'restart', +'restore': 'obnovit', +'Retrieve username': 'Získat přihlašovací jméno', +'return': 'return', +'revert': 'vrátit se k původnímu', +'Role': 'Role', +'Rows in Table': 'Záznamy v tabulce', +'Rows selected': 'Záznamů zobrazeno', +'rules are not defined': 'pravidla nejsou definována', +"Run tests in this file (to run all files, you may also use the button labelled 'test')": "Spustí testy v tomto souboru (ke spuštění všech testů, použijte tlačítko 'test')", +'Running on %s': 'Běží na %s', +'Save': 'Uložit', +'Save file:': 'Save file:', +'Save via Ajax': 'Uložit pomocí Ajaxu', +'Saved file hash:': 'hash uloženého souboru:', +'Semantic': 'Modul semantic', +'Services': 'Služby', +'session': 'session', +'session expired': 'session expired', +'Set Breakpoint on %s at line %s: %s': 'Bod přerušení nastaven v souboru %s na řádce %s: %s', +'shell': 'příkazová řádka', +'Singular Form': 'Singular Form', +'Site': 'Správa aplikací', +'Size of cache:': 'Velikost cache:', +'skip to generate': 'skip to generate', +'Sorry, could not find mercurial installed': 'Bohužel mercurial není nainstalován.', +'Start a new app': 'Vytvořit novou aplikaci', +'Start searching': 'Začít hledání', +'Start wizard': 'Spustit průvodce', +'state': 'stav', +'Static': 'Static', +'static': 'statické soubory', +'Static files': 'Statické soubory', +'Statistics': 'Statistika', +'Step': 'Step', +'step': 'step', +'stop': 'stop', +'Stylesheet': 'CSS styly', +'submit': 'odeslat', +'Submit': 'Odeslat', +'successful': 'úspěšně', +'Support': 'Podpora', +'Sure you want to delete this object?': 'Opravdu chcete smazat tento objekt?', +'Table': 'tabulka', +'Table name': 'Název tabulky', +'Temporary': 'Dočasný', +'test': 'test', +'Testing application': 'Testing application', +'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"Dotaz" je podmínka, například "db.tabulka1.pole1==\'hodnota\'". Podmínka "db.tabulka1.pole1==db.tabulka2.pole2" pak vytvoří SQL JOIN.', +'The application logic, each URL path is mapped in one exposed function in the controller': 'Logika aplikace: každá URL je mapována na funkci vystavovanou kontrolérem.', +'The Core': 'Jádro (The Core)', +'The data representation, define database tables and sets': 'Reprezentace dat: definovat tabulky databáze a záznamy', +'The output of the file is a dictionary that was rendered by the view %s': 'Výstup ze souboru je slovník, který se zobrazil v pohledu %s.', +'The presentations layer, views are also known as templates': 'Prezentační vrstva: pohledy či templaty (šablony)', +'The Views': 'Pohledy (The Views)', +'There are no controllers': 'There are no controllers', +'There are no modules': 'There are no modules', +'There are no plugins': 'Žádné moduly nejsou instalovány.', +'There are no private files': 'Žádné soukromé soubory neexistují.', +'There are no static files': 'There are no static files', +'There are no translators, only default language is supported': 'There are no translators, only default language is supported', +'There are no views': 'There are no views', +'These files are not served, they are only available from within your app': 'Tyto soubory jsou klientům nepřístupné. K dispozici jsou pouze v rámci aplikace.', +'These files are served without processing, your images go here': 'Tyto soubory jsou servírovány bez přídavné logiky, sem patří např. obrázky.', +'This App': 'Tato aplikace', +'This is a copy of the scaffolding application': 'Toto je kopie aplikace skelet.', +'This is an experimental feature and it needs more testing. If you decide to upgrade you do it at your own risk': 'This is an experimental feature and it needs more testing. If you decide to upgrade you do it at your own risk', +'This is the %(filename)s template': 'This is the %(filename)s template', +'this page to see if a breakpoint was hit and debug interaction is required.': 'tuto stránku, abyste uviděli, zda se dosáhlo bodu přerušení.', +'Ticket': 'Ticket', +'Ticket ID': 'Ticket ID', +'Time in Cache (h:m:s)': 'Čas v Cache (h:m:s)', +'Timestamp': 'Časové razítko', +'to previous version.': 'k předchozí verzi.', +'To create a plugin, name a file/folder plugin_[name]': 'Zásuvný modul vytvoříte tak, že pojmenujete soubor/adresář plugin_[jméno modulu]', +'To emulate a breakpoint programatically, write:': 'K nastavení bodu přerušení v kódu programu, napište:', +'to use the debugger!': ', abyste mohli ladící program používat!', +'toggle breakpoint': 'vyp./zap. bod přerušení', +'Toggle Fullscreen': 'Na celou obrazovku a zpět', +'too short': 'Příliš krátké', +'Traceback': 'Traceback', +'Translation strings for the application': 'Překlad textů pro aplikaci', +'try something like': 'try something like', +'Try the mobile interface': 'Zkuste rozhraní pro mobilní zařízení', +'try view': 'try view', +'Twitter': 'Twitter', +'Type python statement in here and hit Return (Enter) to execute it.': 'Type python statement in here and hit Return (Enter) to execute it.', +'Type some Python code in here and hit Return (Enter) to execute it.': 'Type some Python code in here and hit Return (Enter) to execute it.', +'Unable to check for upgrades': 'Unable to check for upgrades', +'unable to parse csv file': 'csv soubor nedá sa zpracovat', +'uncheck all': 'vše odznačit', +'Uninstall': 'Odinstalovat', +'update': 'aktualizovat', +'update all languages': 'aktualizovat všechny jazyky', +'Update:': 'Upravit:', +'Upgrade': 'Upgrade', +'upgrade now': 'upgrade now', +'upgrade now to %s': 'upgrade now to %s', +'upload': 'nahrát', +'Upload': 'Upload', +'Upload a package:': 'Nahrát balík:', +'Upload and install packed application': 'Nahrát a instalovat zabalenou aplikaci', +'upload file:': 'nahrát soubor:', +'upload plugin file:': 'nahrát soubor modulu:', +'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Použijte (...)&(...) pro AND, (...)|(...) pro OR a ~(...) pro NOT pro sestavení složitějších dotazů.', +'User %(id)s Logged-in': 'Uživatel %(id)s přihlášen', +'User %(id)s Logged-out': 'Uživatel %(id)s odhlášen', +'User %(id)s Password changed': 'Uživatel %(id)s změnil heslo', +'User %(id)s Profile updated': 'Uživatel %(id)s upravil profil', +'User %(id)s Registered': 'Uživatel %(id)s se zaregistroval', +'User %(id)s Username retrieved': 'Uživatel %(id)s si nachal zaslat přihlašovací jméno', +'User ID': 'ID uživatele', +'Username': 'Přihlašovací jméno', +'variables': 'variables', +'Verify Password': 'Zopakujte heslo', +'Version': 'Verze', +'Version %s.%s.%s (%s) %s': 'Verze %s.%s.%s (%s) %s', +'Versioning': 'Verzování', +'Videos': 'Videa', +'View': 'Pohled (View)', +'Views': 'Pohledy', +'views': 'pohledy', +'Web Framework': 'Web Framework', +'web2py is up to date': 'Máte aktuální verzi web2py.', +'web2py online debugger': 'Ladící online web2py program', +'web2py Recent Tweets': 'Štěbetání na Twitteru o web2py', +'web2py upgrade': 'web2py upgrade', +'web2py upgraded; please restart it': 'web2py upgraded; please restart it', +'Welcome': 'Vítejte', +'Welcome to web2py': 'Vitejte ve web2py', +'Welcome to web2py!': 'Vítejte ve web2py!', +'Which called the function %s located in the file %s': 'která zavolala funkci %s v souboru (kontroléru) %s.', +'You are successfully running web2py': 'Úspěšně jste spustili web2py.', +'You can also set and remove breakpoint in the edit window, using the Toggle Breakpoint button': 'Nastavovat a mazat body přerušení je též možno v rámci editování zdrojového souboru přes tlačítko Vyp./Zap. bod přerušení', +'You can modify this application and adapt it to your needs': 'Tuto aplikaci si můžete upravit a přizpůsobit ji svým potřebám.', +'You need to set up and reach a': 'Je třeba nejprve nastavit a dojít až na', +'You visited the url %s': 'Navštívili jste stránku %s,', +'Your application will be blocked until you click an action button (next, step, continue, etc.)': 'Aplikace bude blokována než se klikne na jedno z tlačítek (další, krok, pokračovat, atd.)', +'You can inspect variables using the console bellow': 'Níže pomocí příkazové řádky si můžete prohlédnout proměnné', +} diff --git a/applications/admin/models/0.py b/applications/admin/models/0.py index 9f0a1d5ac..adf95f7f4 100644 --- a/applications/admin/models/0.py +++ b/applications/admin/models/0.py @@ -47,5 +47,3 @@ #set static_version from gluon.settings import global_settings response.static_version = global_settings.web2py_version.split('-')[0] - - diff --git a/applications/admin/models/menu.py b/applications/admin/models/menu.py index 5534bcc91..f29f904c7 100644 --- a/applications/admin/models/menu.py +++ b/applications/admin/models/menu.py @@ -34,4 +34,3 @@ URL(_a, 'default', f='logout'))) response.menu.append((T('Debug'), False, URL(_a, 'debug', 'interact'))) - diff --git a/applications/admin/models/plugin_statebutton.py b/applications/admin/models/plugin_statebutton.py index a857c5b06..21d0377e7 100644 --- a/applications/admin/models/plugin_statebutton.py +++ b/applications/admin/models/plugin_statebutton.py @@ -7,11 +7,10 @@ def stateWidget(field, value, data={'on-label':'Enabled', 'off-label':'Disabled' except: fieldName = field - div = DIV(INPUT( _type='checkbox', _name='%s' % fieldName, _checked= 'checked' if value == 'true' else None, _value='true'), - _class='make-bootstrap-switch', + div = DIV(INPUT( _type='checkbox', _name='%s' % fieldName, _checked= 'checked' if value == 'true' else None, _value='true'), + _class='make-bootstrap-switch', data=data) script = SCRIPT(""" jQuery(".make-bootstrap-switch input[name='%s']").parent().bootstrapSwitch(); """ % fieldName) return DIV(div, script) - diff --git a/extras/build_web2py/setup_app.py b/extras/build_web2py/setup_app.py index aa68bbff0..929854186 100755 --- a/extras/build_web2py/setup_app.py +++ b/extras/build_web2py/setup_app.py @@ -25,9 +25,9 @@ import re import zipfile -#read web2py version from VERSION file +#read web2py version from VERSION file web2py_version_line = readlines_file('VERSION')[0] -#use regular expression to get just the version number +#use regular expression to get just the version number v_re = re.compile('[0-9]+\.[0-9]+\.[0-9]+') web2py_version = v_re.search(web2py_version_line).group(0) diff --git a/extras/build_web2py/setup_exe.py b/extras/build_web2py/setup_exe.py index 19b2b2fa4..ae0d00eec 100755 --- a/extras/build_web2py/setup_exe.py +++ b/extras/build_web2py/setup_exe.py @@ -1,8 +1,8 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- - + #Adapted from http://bazaar.launchpad.net/~flavour/sahana-eden/trunk/view/head:/static/scripts/tools/standalone_exe.py - + USAGE = """ Usage: Copy this and setup_exe.conf to web2py root folder @@ -13,7 +13,7 @@ Install bbfreeze: https://pypi.python.org/pypi/bbfreeze/ run python setup_exe.py bbfreeze """ - + from distutils.core import setup from gluon.import_all import base_modules, contributed_modules from gluon.fileutils import readlines_file @@ -24,7 +24,7 @@ import sys import re import zipfile - + if len(sys.argv) != 2 or not os.path.isfile('web2py.py'): print USAGE sys.exit(1) @@ -32,11 +32,11 @@ if not BUILD_MODE in ('py2exe', 'bbfreeze'): print USAGE sys.exit(1) - + def unzip(source_filename, dest_dir): with zipfile.ZipFile(source_filename) as zf: zf.extractall(dest_dir) - + #borrowed from http://bytes.com/topic/python/answers/851018-how-zip-directory-python-using-zipfile def recursive_zip(zipf, directory, folder=""): for item in os.listdir(directory): @@ -45,14 +45,14 @@ def recursive_zip(zipf, directory, folder=""): elif os.path.isdir(os.path.join(directory, item)): recursive_zip( zipf, os.path.join(directory, item), folder + os.sep + item) - - + + #read web2py version from VERSION file web2py_version_line = readlines_file('VERSION')[0] #use regular expression to get just the version number v_re = re.compile('[0-9]+\.[0-9]+\.[0-9]+') web2py_version = v_re.search(web2py_version_line).group(0) - + #pull in preferences from config file import ConfigParser Config = ConfigParser.ConfigParser() @@ -68,12 +68,12 @@ def recursive_zip(zipf, directory, folder=""): # Python base version python_version = sys.version_info[:3] - - - + + + if BUILD_MODE == 'py2exe': import py2exe - + setup( console=[{'script':'web2py.py', 'icon_resources': [(0, 'extras/icons/web2py.ico')] @@ -88,8 +88,8 @@ def recursive_zip(zipf, directory, folder=""): author="Massimo DiPierro", license="LGPL v3", data_files=[ - 'ABOUT', - 'LICENSE', + 'ABOUT', + 'LICENSE', 'VERSION' ], options={'py2exe': { @@ -108,7 +108,7 @@ def recursive_zip(zipf, directory, folder=""): zipl.close() shutil.rmtree(library_temp_dir) print "web2py binary successfully built" - + elif BUILD_MODE == 'bbfreeze': modules = base_modules + contributed_modules from bbfreeze import Freezer @@ -131,26 +131,26 @@ def recursive_zip(zipf, directory, folder=""): for req in ['ABOUT', 'LICENSE', 'VERSION']: shutil.copy(req, os.path.join('dist', req)) print "web2py binary successfully built" - + try: os.unlink('storage.sqlite') except: pass - -#This need to happen after bbfreeze is run because Freezer() deletes distdir before starting! + +#This need to happen after bbfreeze is run because Freezer() deletes distdir before starting! if python_version > (2,5): # Python26 compatibility: http://www.py2exe.org/index.cgi/Tutorial#Step52 try: shutil.copytree('C:\Bin\Microsoft.VC90.CRT', 'dist/Microsoft.VC90.CRT/') except: print "You MUST copy Microsoft.VC90.CRT folder into the archive" - + def copy_folders(source, destination): """Copy files & folders from source to destination (within dist/)""" if os.path.exists(os.path.join('dist', destination)): shutil.rmtree(os.path.join('dist', destination)) shutil.copytree(os.path.join(source), os.path.join('dist', destination)) - + #should we remove Windows OS dlls user is unlikely to be able to distribute if remove_msft_dlls: print "Deleted Microsoft files not licensed for open source distribution" @@ -166,7 +166,7 @@ def copy_folders(source, destination): os.unlink(os.path.join('dist', f)) except: print "unable to delete dist/" + f - + #Should we include applications? if copy_apps: copy_folders('applications', 'applications') @@ -177,12 +177,12 @@ def copy_folders(source, destination): copy_folders('applications/welcome', 'applications/welcome') copy_folders('applications/examples', 'applications/examples') print "Only web2py's admin, examples & welcome applications have been added" - + copy_folders('extras', 'extras') copy_folders('examples', 'examples') copy_folders('handlers', 'handlers') - - + + #should we copy project's site-packages into dist/site-packages if copy_site_packages: #copy site-packages @@ -190,7 +190,7 @@ def copy_folders(source, destination): else: #no worries, web2py will create the (empty) folder first run print "Skipping site-packages" - + #should we copy project's scripts into dist/scripts if copy_scripts: #copy scripts @@ -198,7 +198,7 @@ def copy_folders(source, destination): else: #no worries, web2py will create the (empty) folder first run print "Skipping scripts" - + #should we create a zip file of the build? if make_zip: #create a web2py folder & copy dist's files into it @@ -209,13 +209,13 @@ def copy_folders(source, destination): # just temp so the web2py directory is included in our zip file path = 'zip_temp' # leave the first folder as None, as path is root. - recursive_zip(zipf, path) + recursive_zip(zipf, path) zipf.close() shutil.rmtree('zip_temp') print "Your Windows binary version of web2py can be found in " + \ zip_filename + ".zip" print "You may extract the archive anywhere and then run web2py/web2py.exe" - + #should py2exe build files be removed? if remove_build_files: if BUILD_MODE == 'py2exe': @@ -223,10 +223,10 @@ def copy_folders(source, destination): shutil.rmtree('deposit') shutil.rmtree('dist') print "build files removed" - + #final info if not make_zip and not remove_build_files: print "Your Windows binary & associated files can also be found in /dist" - + print "Finished!" -print "Enjoy web2py " + web2py_version_line \ No newline at end of file +print "Enjoy web2py " + web2py_version_line diff --git a/gluon/__init__.py b/gluon/__init__.py index 457b061d7..d489d9238 100644 --- a/gluon/__init__.py +++ b/gluon/__init__.py @@ -11,7 +11,7 @@ """ __all__ = ['A', 'B', 'BEAUTIFY', 'BODY', 'BR', 'CAT', 'CENTER', 'CLEANUP', 'CODE', 'CRYPT', 'DAL', 'DIV', 'EM', 'EMBED', 'FIELDSET', 'FORM', 'Field', 'H1', 'H2', 'H3', 'H4', 'H5', 'H6', 'HEAD', 'HR', 'HTML', 'HTTP', 'I', 'IFRAME', 'IMG', 'INPUT', 'IS_ALPHANUMERIC', 'IS_DATE', 'IS_DATETIME', 'IS_DATETIME_IN_RANGE', 'IS_DATE_IN_RANGE', 'IS_DECIMAL_IN_RANGE', 'IS_EMAIL', 'IS_LIST_OF_EMAILS', 'IS_EMPTY_OR', 'IS_EQUAL_TO', 'IS_EXPR', 'IS_FLOAT_IN_RANGE', 'IS_IMAGE', 'IS_JSON', 'IS_INT_IN_RANGE', 'IS_IN_DB', 'IS_IN_SET', 'IS_IPV4', 'IS_LENGTH', 'IS_LIST_OF', 'IS_LOWER', 'IS_MATCH', 'IS_NOT_EMPTY', 'IS_NOT_IN_DB', 'IS_NULL_OR', 'IS_SLUG', 'IS_STRONG', 'IS_TIME', 'IS_UPLOAD_FILENAME', 'IS_UPPER', 'IS_URL', 'LABEL', 'LEGEND', 'LI', 'LINK', 'LOAD', 'MARKMIN', 'MENU', 'META', 'OBJECT', 'OL', 'ON', 'OPTGROUP', 'OPTION', 'P', 'PRE', 'SCRIPT', 'SELECT', 'SPAN', 'SQLFORM', 'SQLTABLE', 'STRONG', 'STYLE', 'TABLE', 'TAG', 'TBODY', 'TD', 'TEXTAREA', 'TFOOT', 'TH', 'THEAD', 'TITLE', 'TR', 'TT', 'UL', 'URL', 'XHTML', 'XML', 'redirect', 'current', 'embed64'] - + from globals import current from html import * from validators import * diff --git a/gluon/cache.py b/gluon/cache.py index 3a03a19cc..671abe7e1 100644 --- a/gluon/cache.py +++ b/gluon/cache.py @@ -1,693 +1,693 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -""" -| This file is part of the web2py Web Framework -| Copyrighted by Massimo Di Pierro -| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html) - -Basic caching classes and methods ---------------------------------- - -- Cache - The generic caching object interfacing with the others -- CacheInRam - providing caching in ram -- CacheOnDisk - provides caches on disk - -Memcache is also available via a different module (see gluon.contrib.memcache) - -When web2py is running on Google App Engine, -caching will be provided by the GAE memcache -(see gluon.contrib.gae_memcache) -""" -import time -import thread -import os -import sys -import logging -import re -import hashlib -import datetime -import tempfile -from gluon import recfile -try: - from gluon import settings - have_settings = True -except ImportError: - have_settings = False - -try: - import cPickle as pickle -except: - import pickle - -logger = logging.getLogger("web2py.cache") - -__all__ = ['Cache', 'lazy_cache'] - - -DEFAULT_TIME_EXPIRE = 300 - - - -class CacheAbstract(object): - """ - Abstract class for cache implementations. - Main function just provides referenced api documentation. - - Use CacheInRam or CacheOnDisk instead which are derived from this class. - - Note: - Michele says: there are signatures inside gdbm files that are used - directly by the python gdbm adapter that often are lagging behind in the - detection code in python part. - On every occasion that a gdbm store is probed by the python adapter, - the probe fails, because gdbm file version is newer. - Using gdbm directly from C would work, because there is backward - compatibility, but not from python! - The .shelve file is discarded and a new one created (with new - signature) and it works until it is probed again... - The possible consequences are memory leaks and broken sessions. - """ - - cache_stats_name = 'web2py_cache_statistics' - - def __init__(self, request=None): - """Initializes the object - - Args: - request: the global request object - """ - raise NotImplementedError - - def __call__(self, key, f, - time_expire=DEFAULT_TIME_EXPIRE): - """ - Tries to retrieve the value corresponding to `key` from the cache if the - object exists and if it did not expire, else it calls the function `f` - and stores the output in the cache corresponding to `key`. It always - returns the function that is returned. - - Args: - key(str): the key of the object to be stored or retrieved - f(function): the function whose output is to be cached. - - If `f` is `None` the cache is cleared. - time_expire(int): expiration of the cache in seconds. - - It's used to compare the current time with the time - when the requested object was last saved in cache. It does not - affect future requests. Setting `time_expire` to 0 or negative - value forces the cache to refresh. - """ - raise NotImplementedError - - def clear(self, regex=None): - """ - Clears the cache of all keys that match the provided regular expression. - If no regular expression is provided, it clears all entries in cache. - - Args: - regex: if provided, only keys matching the regex will be cleared, - otherwise all keys are cleared. - """ - - raise NotImplementedError - - def increment(self, key, value=1): - """ - Increments the cached value for the given key by the amount in value - - Args: - key(str): key for the cached object to be incremeneted - value(int): amount of the increment (defaults to 1, can be negative) - """ - raise NotImplementedError - - def _clear(self, storage, regex): - """ - Auxiliary function called by `clear` to search and clear cache entries - """ - r = re.compile(regex) - for key in storage.keys(): - if r.match(str(key)): - del storage[key] - return - - -class CacheInRam(CacheAbstract): - """ - Ram based caching - - This is implemented as global (per process, shared by all threads) - dictionary. - A mutex-lock mechanism avoid conflicts. - """ - - locker = thread.allocate_lock() - meta_storage = {} - - def __init__(self, request=None): - self.initialized = False - self.request = request - self.storage = {} - - def initialize(self): - if self.initialized: - return - else: - self.initialized = True - self.locker.acquire() - request = self.request - if request: - app = request.application - else: - app = '' - if not app in self.meta_storage: - self.storage = self.meta_storage[app] = { - CacheAbstract.cache_stats_name: {'hit_total': 0, 'misses': 0}} - else: - self.storage = self.meta_storage[app] - self.locker.release() - - def clear(self, regex=None): - self.initialize() - self.locker.acquire() - storage = self.storage - if regex is None: - storage.clear() - else: - self._clear(storage, regex) - - if not CacheAbstract.cache_stats_name in storage.keys(): - storage[CacheAbstract.cache_stats_name] = { - 'hit_total': 0, 'misses': 0} - - self.locker.release() - - def __call__(self, key, f, - time_expire=DEFAULT_TIME_EXPIRE, - destroyer=None): - """ - Attention! cache.ram does not copy the cached object. - It just stores a reference to it. Turns out the deepcopying the object - has some problems: - - - would break backward compatibility - - would be limiting because people may want to cache live objects - - would work unless we deepcopy no storage and retrival which would make - things slow. - - Anyway. You can deepcopy explicitly in the function generating the value - to be cached. - """ - self.initialize() - - dt = time_expire - now = time.time() - - self.locker.acquire() - item = self.storage.get(key, None) - if item and f is None: - del self.storage[key] - if destroyer: - destroyer(item[1]) - self.storage[CacheAbstract.cache_stats_name]['hit_total'] += 1 - self.locker.release() - - if f is None: - return None - if item and (dt is None or item[0] > now - dt): - return item[1] - elif item and (item[0] < now - dt) and destroyer: - destroyer(item[1]) - value = f() - - self.locker.acquire() - self.storage[key] = (now, value) - self.storage[CacheAbstract.cache_stats_name]['misses'] += 1 - self.locker.release() - return value - - def increment(self, key, value=1): - self.initialize() - self.locker.acquire() - try: - if key in self.storage: - value = self.storage[key][1] + value - self.storage[key] = (time.time(), value) - except BaseException, e: - self.locker.release() - raise e - self.locker.release() - return value - - -class CacheOnDisk(CacheAbstract): - """ - Disk based cache - - This is implemented as a key value store where each key corresponds to a - single file in disk which is replaced when the value changes. - - Disk cache provides persistance when web2py is started/stopped but it is - slower than `CacheInRam` - - Values stored in disk cache must be pickable. - """ - - class PersistentStorage(object): - """ - Implements a key based storage in disk. - """ - - def __init__(self, folder): - self.folder = folder - self.key_filter_in = lambda key: key - self.key_filter_out = lambda key: key - # Check the best way to do atomic file replacement. - if sys.version_info >= (3, 3): - self.replace = os.replace - elif sys.platform == "win32": - import ctypes - from ctypes import wintypes - ReplaceFile = ctypes.windll.kernel32.ReplaceFileW - ReplaceFile.restype = wintypes.BOOL - ReplaceFile.argtypes = [ - wintypes.LPWSTR, - wintypes.LPWSTR, - wintypes.LPWSTR, - wintypes.DWORD, - wintypes.LPVOID, - wintypes.LPVOID, - ] - - def replace_windows(src, dst): - if not ReplaceFile(dst, src, None, 0, 0, 0): - os.rename(src, dst) - - self.replace = replace_windows - else: - # POSIX rename() is always atomic - self.replace = os.rename - - # Make sure we use valid filenames. - if sys.platform == "win32": - import base64 - def key_filter_in_windows(key): - """ - Windows doesn't allow \ / : * ? "< > | in filenames. - To go around this encode the keys with base32. - """ - return base64.b32encode(key) - - def key_filter_out_windows(key): - """ - We need to decode the keys so regex based removal works. - """ - return base64.b32decode(key) - - self.key_filter_in = key_filter_in_windows - self.key_filter_out = key_filter_out_windows - - - def __setitem__(self, key, value): - tmp_name, tmp_path = tempfile.mkstemp(dir=self.folder) - tmp = os.fdopen(tmp_name, 'wb') - try: - pickle.dump((time.time(), value), tmp, pickle.HIGHEST_PROTOCOL) - finally: - tmp.close() - key = self.key_filter_in(key) - fullfilename = os.path.join(self.folder, recfile.generate(key)) - if not os.path.exists(os.path.dirname(fullfilename)): - os.makedirs(os.path.dirname(fullfilename)) - self.replace(tmp_path, fullfilename) - - - def __getitem__(self, key): - key = self.key_filter_in(key) - if recfile.exists(key, path=self.folder): - timestamp, value = pickle.load(recfile.open(key, 'rb', path=self.folder)) - return value - else: - raise KeyError - - - def __contains__(self, key): - key = self.key_filter_in(key) - return recfile.exists(key, path=self.folder) - - - def __delitem__(self, key): - key = self.key_filter_in(key) - recfile.remove(key, path=self.folder) - - - def __iter__(self): - for dirpath, dirnames, filenames in os.walk(self.folder): - for filename in filenames: - yield self.key_filter_out(filename) - - - def keys(self): - return list(self.__iter__()) - - - def get(self, key, default=None): - try: - return self[key] - except KeyError: - return default - - - def clear(self): - for key in self: - del self[key] - - - def __init__(self, request=None, folder=None): - self.initialized = False - self.request = request - self.folder = folder - self.storage = None - - - def initialize(self): - if self.initialized: - return - else: - self.initialized = True - - folder = self.folder - request = self.request - - # Lets test if the cache folder exists, if not - # we are going to create it - folder = os.path.join(folder or request.folder, 'cache') - - if not os.path.exists(folder): - os.mkdir(folder) - - self.storage = CacheOnDisk.PersistentStorage(folder) - - if not CacheAbstract.cache_stats_name in self.storage: - self.storage[CacheAbstract.cache_stats_name] = {'hit_total': 0, 'misses': 0} - - - def __call__(self, key, f, - time_expire=DEFAULT_TIME_EXPIRE): - self.initialize() - - dt = time_expire - item = self.storage.get(key) - self.storage[CacheAbstract.cache_stats_name]['hit_total'] += 1 - - if item and f is None: - del self.storage[key] - - if f is None: - return None - - now = time.time() - - if item and ((dt is None) or (item[0] > now - dt)): - value = item[1] - else: - value = f() - self.storage[key] = (now, value) - self.storage[CacheAbstract.cache_stats_name]['misses'] += 1 - - return value - - - def clear(self, regex=None): - self.initialize() - storage = self.storage - if regex is None: - storage.clear() - else: - self._clear(storage, regex) - - if not CacheAbstract.cache_stats_name in storage: - storage[CacheAbstract.cache_stats_name] = { - 'hit_total': 0, 'misses': 0} - - - def increment(self, key, value=1): - self.initialize() - storage = self.storage - try: - if key in storage: - value = storage[key][1] + value - storage[key] = (time.time(), value) - except: - pass - return value - - - -class CacheAction(object): - def __init__(self, func, key, time_expire, cache, cache_model): - self.__name__ = func.__name__ - self.__doc__ = func.__doc__ - self.func = func - self.key = key - self.time_expire = time_expire - self.cache = cache - self.cache_model = cache_model - - def __call__(self, *a, **b): - if not self.key: - key2 = self.__name__ + ':' + repr(a) + ':' + repr(b) - else: - key2 = self.key.replace('%(name)s', self.__name__)\ - .replace('%(args)s', str(a)).replace('%(vars)s', str(b)) - cache_model = self.cache_model - if not cache_model or isinstance(cache_model, str): - cache_model = getattr(self.cache, cache_model or 'ram') - return cache_model(key2, - lambda a=a, b=b: self.func(*a, **b), - self.time_expire) - - -class Cache(object): - """ - Sets up generic caching, creating an instance of both CacheInRam and - CacheOnDisk. - In case of GAE will make use of gluon.contrib.gae_memcache. - - - self.ram is an instance of CacheInRam - - self.disk is an instance of CacheOnDisk - """ - - autokey = ':%(name)s:%(args)s:%(vars)s' - - def __init__(self, request): - """ - Args: - request: the global request object - """ - # GAE will have a special caching - if have_settings and settings.global_settings.web2py_runtime_gae: - from gluon.contrib.gae_memcache import MemcacheClient - self.ram = self.disk = MemcacheClient(request) - else: - # Otherwise use ram (and try also disk) - self.ram = CacheInRam(request) - try: - self.disk = CacheOnDisk(request) - except IOError: - logger.warning('no cache.disk (IOError)') - except AttributeError: - # normally not expected anymore, as GAE has already - # been accounted for - logger.warning('no cache.disk (AttributeError)') - - def action(self, time_expire=DEFAULT_TIME_EXPIRE, cache_model=None, - prefix=None, session=False, vars=True, lang=True, - user_agent=False, public=True, valid_statuses=None, - quick=None): - """Better fit for caching an action - - Warning: - Experimental! - - Currently only HTTP 1.1 compliant - reference : http://code.google.com/p/doctype-mirror/wiki/ArticleHttpCaching - - Args: - time_expire(int): same as @cache - cache_model(str): same as @cache - prefix(str): add a prefix to the calculated key - session(bool): adds response.session_id to the key - vars(bool): adds request.env.query_string - lang(bool): adds T.accepted_language - user_agent(bool or dict): if True, adds is_mobile and is_tablet to the key. - Pass a dict to use all the needed values (uses str(.items())) - (e.g. user_agent=request.user_agent()). Used only if session is - not True - public(bool): if False forces the Cache-Control to be 'private' - valid_statuses: by default only status codes starting with 1,2,3 will be cached. - pass an explicit list of statuses on which turn the cache on - quick: Session,Vars,Lang,User-agent,Public: - fast overrides with initials, e.g. 'SVLP' or 'VLP', or 'VLP' - """ - from gluon import current - from gluon.http import HTTP - def wrap(func): - def wrapped_f(): - if current.request.env.request_method != 'GET': - return func() - if time_expire: - cache_control = 'max-age=%(time_expire)s, s-maxage=%(time_expire)s' % dict(time_expire=time_expire) - if quick: - session_ = True if 'S' in quick else False - vars_ = True if 'V' in quick else False - lang_ = True if 'L' in quick else False - user_agent_ = True if 'U' in quick else False - public_ = True if 'P' in quick else False - else: - session_, vars_, lang_, user_agent_, public_ = session, vars, lang, user_agent, public - if not session_ and public_: - cache_control += ', public' - expires = (current.request.utcnow + datetime.timedelta(seconds=time_expire)).strftime('%a, %d %b %Y %H:%M:%S GMT') - else: - cache_control += ', private' - expires = 'Fri, 01 Jan 1990 00:00:00 GMT' - if cache_model: - #figure out the correct cache key - cache_key = [current.request.env.path_info, current.response.view] - if session_: - cache_key.append(current.response.session_id) - elif user_agent_: - if user_agent_ is True: - cache_key.append("%(is_mobile)s_%(is_tablet)s" % current.request.user_agent()) - else: - cache_key.append(str(user_agent_.items())) - if vars_: - cache_key.append(current.request.env.query_string) - if lang_: - cache_key.append(current.T.accepted_language) - cache_key = hashlib.md5('__'.join(cache_key)).hexdigest() - if prefix: - cache_key = prefix + cache_key - try: - #action returns something - rtn = cache_model(cache_key, lambda : func(), time_expire=time_expire) - http, status = None, current.response.status - except HTTP, e: - #action raises HTTP (can still be valid) - rtn = cache_model(cache_key, lambda : e.body, time_expire=time_expire) - http, status = HTTP(e.status, rtn, **e.headers), e.status - else: - #action raised a generic exception - http = None - else: - #no server-cache side involved - try: - #action returns something - rtn = func() - http, status = None, current.response.status - except HTTP, e: - #action raises HTTP (can still be valid) - status = e.status - http = HTTP(e.status, e.body, **e.headers) - else: - #action raised a generic exception - http = None - send_headers = False - if http and isinstance(valid_statuses, list): - if status in valid_statuses: - send_headers = True - elif valid_statuses is None: - if str(status)[0] in '123': - send_headers = True - if send_headers: - headers = { - 'Pragma' : None, - 'Expires' : expires, - 'Cache-Control' : cache_control - } - current.response.headers.update(headers) - if cache_model and not send_headers: - #we cached already the value, but the status is not valid - #so we need to delete the cached value - cache_model(cache_key, None) - if http: - if send_headers: - http.headers.update(current.response.headers) - raise http - return rtn - wrapped_f.__name__ = func.__name__ - wrapped_f.__doc__ = func.__doc__ - return wrapped_f - return wrap - - def __call__(self, - key=None, - time_expire=DEFAULT_TIME_EXPIRE, - cache_model=None): - """ - Decorator function that can be used to cache any function/method. - - Args: - key(str) : the key of the object to be store or retrieved - time_expire(int) : expiration of the cache in seconds - `time_expire` is used to compare the current time with the time - when the requested object was last saved in cache. - It does not affect future requests. - Setting `time_expire` to 0 or negative value forces the cache to - refresh. - cache_model(str): can be "ram", "disk" or other (like "memcache"). - Defaults to "ram" - - When the function `f` is called, web2py tries to retrieve - the value corresponding to `key` from the cache if the - object exists and if it did not expire, else it calles the function `f` - and stores the output in the cache corresponding to `key`. In the case - the output of the function is returned. - - Example: :: - - @cache('key', 5000, cache.ram) - def f(): - return time.ctime() - - Note: - If the function `f` is an action, we suggest using - @cache.action instead - """ - - def tmp(func, cache=self, cache_model=cache_model): - return CacheAction(func, key, time_expire, self, cache_model) - return tmp - - @staticmethod - def with_prefix(cache_model, prefix): - """ - allow replacing cache.ram with cache.with_prefix(cache.ram,'prefix') - it will add prefix to all the cache keys used. - """ - return lambda key, f, time_expire=DEFAULT_TIME_EXPIRE, prefix=prefix:\ - cache_model(prefix + key, f, time_expire) - - -def lazy_cache(key=None, time_expire=None, cache_model='ram'): - """ - Can be used to cache any function including ones in modules, - as long as the cached function is only called within a web2py request - - If a key is not provided, one is generated from the function name - `time_expire` defaults to None (no cache expiration) - - If cache_model is "ram" then the model is current.cache.ram, etc. - """ - def decorator(f, key=key, time_expire=time_expire, cache_model=cache_model): - key = key or repr(f) - - def g(*c, **d): - from gluon import current - return current.cache(key, time_expire, cache_model)(f)(*c, **d) - g.__name__ = f.__name__ - return g - return decorator +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +""" +| This file is part of the web2py Web Framework +| Copyrighted by Massimo Di Pierro +| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html) + +Basic caching classes and methods +--------------------------------- + +- Cache - The generic caching object interfacing with the others +- CacheInRam - providing caching in ram +- CacheOnDisk - provides caches on disk + +Memcache is also available via a different module (see gluon.contrib.memcache) + +When web2py is running on Google App Engine, +caching will be provided by the GAE memcache +(see gluon.contrib.gae_memcache) +""" +import time +import thread +import os +import sys +import logging +import re +import hashlib +import datetime +import tempfile +from gluon import recfile +try: + from gluon import settings + have_settings = True +except ImportError: + have_settings = False + +try: + import cPickle as pickle +except: + import pickle + +logger = logging.getLogger("web2py.cache") + +__all__ = ['Cache', 'lazy_cache'] + + +DEFAULT_TIME_EXPIRE = 300 + + + +class CacheAbstract(object): + """ + Abstract class for cache implementations. + Main function just provides referenced api documentation. + + Use CacheInRam or CacheOnDisk instead which are derived from this class. + + Note: + Michele says: there are signatures inside gdbm files that are used + directly by the python gdbm adapter that often are lagging behind in the + detection code in python part. + On every occasion that a gdbm store is probed by the python adapter, + the probe fails, because gdbm file version is newer. + Using gdbm directly from C would work, because there is backward + compatibility, but not from python! + The .shelve file is discarded and a new one created (with new + signature) and it works until it is probed again... + The possible consequences are memory leaks and broken sessions. + """ + + cache_stats_name = 'web2py_cache_statistics' + + def __init__(self, request=None): + """Initializes the object + + Args: + request: the global request object + """ + raise NotImplementedError + + def __call__(self, key, f, + time_expire=DEFAULT_TIME_EXPIRE): + """ + Tries to retrieve the value corresponding to `key` from the cache if the + object exists and if it did not expire, else it calls the function `f` + and stores the output in the cache corresponding to `key`. It always + returns the function that is returned. + + Args: + key(str): the key of the object to be stored or retrieved + f(function): the function whose output is to be cached. + + If `f` is `None` the cache is cleared. + time_expire(int): expiration of the cache in seconds. + + It's used to compare the current time with the time + when the requested object was last saved in cache. It does not + affect future requests. Setting `time_expire` to 0 or negative + value forces the cache to refresh. + """ + raise NotImplementedError + + def clear(self, regex=None): + """ + Clears the cache of all keys that match the provided regular expression. + If no regular expression is provided, it clears all entries in cache. + + Args: + regex: if provided, only keys matching the regex will be cleared, + otherwise all keys are cleared. + """ + + raise NotImplementedError + + def increment(self, key, value=1): + """ + Increments the cached value for the given key by the amount in value + + Args: + key(str): key for the cached object to be incremeneted + value(int): amount of the increment (defaults to 1, can be negative) + """ + raise NotImplementedError + + def _clear(self, storage, regex): + """ + Auxiliary function called by `clear` to search and clear cache entries + """ + r = re.compile(regex) + for key in storage.keys(): + if r.match(str(key)): + del storage[key] + return + + +class CacheInRam(CacheAbstract): + """ + Ram based caching + + This is implemented as global (per process, shared by all threads) + dictionary. + A mutex-lock mechanism avoid conflicts. + """ + + locker = thread.allocate_lock() + meta_storage = {} + + def __init__(self, request=None): + self.initialized = False + self.request = request + self.storage = {} + + def initialize(self): + if self.initialized: + return + else: + self.initialized = True + self.locker.acquire() + request = self.request + if request: + app = request.application + else: + app = '' + if not app in self.meta_storage: + self.storage = self.meta_storage[app] = { + CacheAbstract.cache_stats_name: {'hit_total': 0, 'misses': 0}} + else: + self.storage = self.meta_storage[app] + self.locker.release() + + def clear(self, regex=None): + self.initialize() + self.locker.acquire() + storage = self.storage + if regex is None: + storage.clear() + else: + self._clear(storage, regex) + + if not CacheAbstract.cache_stats_name in storage.keys(): + storage[CacheAbstract.cache_stats_name] = { + 'hit_total': 0, 'misses': 0} + + self.locker.release() + + def __call__(self, key, f, + time_expire=DEFAULT_TIME_EXPIRE, + destroyer=None): + """ + Attention! cache.ram does not copy the cached object. + It just stores a reference to it. Turns out the deepcopying the object + has some problems: + + - would break backward compatibility + - would be limiting because people may want to cache live objects + - would work unless we deepcopy no storage and retrival which would make + things slow. + + Anyway. You can deepcopy explicitly in the function generating the value + to be cached. + """ + self.initialize() + + dt = time_expire + now = time.time() + + self.locker.acquire() + item = self.storage.get(key, None) + if item and f is None: + del self.storage[key] + if destroyer: + destroyer(item[1]) + self.storage[CacheAbstract.cache_stats_name]['hit_total'] += 1 + self.locker.release() + + if f is None: + return None + if item and (dt is None or item[0] > now - dt): + return item[1] + elif item and (item[0] < now - dt) and destroyer: + destroyer(item[1]) + value = f() + + self.locker.acquire() + self.storage[key] = (now, value) + self.storage[CacheAbstract.cache_stats_name]['misses'] += 1 + self.locker.release() + return value + + def increment(self, key, value=1): + self.initialize() + self.locker.acquire() + try: + if key in self.storage: + value = self.storage[key][1] + value + self.storage[key] = (time.time(), value) + except BaseException, e: + self.locker.release() + raise e + self.locker.release() + return value + + +class CacheOnDisk(CacheAbstract): + """ + Disk based cache + + This is implemented as a key value store where each key corresponds to a + single file in disk which is replaced when the value changes. + + Disk cache provides persistance when web2py is started/stopped but it is + slower than `CacheInRam` + + Values stored in disk cache must be pickable. + """ + + class PersistentStorage(object): + """ + Implements a key based storage in disk. + """ + + def __init__(self, folder): + self.folder = folder + self.key_filter_in = lambda key: key + self.key_filter_out = lambda key: key + # Check the best way to do atomic file replacement. + if sys.version_info >= (3, 3): + self.replace = os.replace + elif sys.platform == "win32": + import ctypes + from ctypes import wintypes + ReplaceFile = ctypes.windll.kernel32.ReplaceFileW + ReplaceFile.restype = wintypes.BOOL + ReplaceFile.argtypes = [ + wintypes.LPWSTR, + wintypes.LPWSTR, + wintypes.LPWSTR, + wintypes.DWORD, + wintypes.LPVOID, + wintypes.LPVOID, + ] + + def replace_windows(src, dst): + if not ReplaceFile(dst, src, None, 0, 0, 0): + os.rename(src, dst) + + self.replace = replace_windows + else: + # POSIX rename() is always atomic + self.replace = os.rename + + # Make sure we use valid filenames. + if sys.platform == "win32": + import base64 + def key_filter_in_windows(key): + """ + Windows doesn't allow \ / : * ? "< > | in filenames. + To go around this encode the keys with base32. + """ + return base64.b32encode(key) + + def key_filter_out_windows(key): + """ + We need to decode the keys so regex based removal works. + """ + return base64.b32decode(key) + + self.key_filter_in = key_filter_in_windows + self.key_filter_out = key_filter_out_windows + + + def __setitem__(self, key, value): + tmp_name, tmp_path = tempfile.mkstemp(dir=self.folder) + tmp = os.fdopen(tmp_name, 'wb') + try: + pickle.dump((time.time(), value), tmp, pickle.HIGHEST_PROTOCOL) + finally: + tmp.close() + key = self.key_filter_in(key) + fullfilename = os.path.join(self.folder, recfile.generate(key)) + if not os.path.exists(os.path.dirname(fullfilename)): + os.makedirs(os.path.dirname(fullfilename)) + self.replace(tmp_path, fullfilename) + + + def __getitem__(self, key): + key = self.key_filter_in(key) + if recfile.exists(key, path=self.folder): + timestamp, value = pickle.load(recfile.open(key, 'rb', path=self.folder)) + return value + else: + raise KeyError + + + def __contains__(self, key): + key = self.key_filter_in(key) + return recfile.exists(key, path=self.folder) + + + def __delitem__(self, key): + key = self.key_filter_in(key) + recfile.remove(key, path=self.folder) + + + def __iter__(self): + for dirpath, dirnames, filenames in os.walk(self.folder): + for filename in filenames: + yield self.key_filter_out(filename) + + + def keys(self): + return list(self.__iter__()) + + + def get(self, key, default=None): + try: + return self[key] + except KeyError: + return default + + + def clear(self): + for key in self: + del self[key] + + + def __init__(self, request=None, folder=None): + self.initialized = False + self.request = request + self.folder = folder + self.storage = None + + + def initialize(self): + if self.initialized: + return + else: + self.initialized = True + + folder = self.folder + request = self.request + + # Lets test if the cache folder exists, if not + # we are going to create it + folder = os.path.join(folder or request.folder, 'cache') + + if not os.path.exists(folder): + os.mkdir(folder) + + self.storage = CacheOnDisk.PersistentStorage(folder) + + if not CacheAbstract.cache_stats_name in self.storage: + self.storage[CacheAbstract.cache_stats_name] = {'hit_total': 0, 'misses': 0} + + + def __call__(self, key, f, + time_expire=DEFAULT_TIME_EXPIRE): + self.initialize() + + dt = time_expire + item = self.storage.get(key) + self.storage[CacheAbstract.cache_stats_name]['hit_total'] += 1 + + if item and f is None: + del self.storage[key] + + if f is None: + return None + + now = time.time() + + if item and ((dt is None) or (item[0] > now - dt)): + value = item[1] + else: + value = f() + self.storage[key] = (now, value) + self.storage[CacheAbstract.cache_stats_name]['misses'] += 1 + + return value + + + def clear(self, regex=None): + self.initialize() + storage = self.storage + if regex is None: + storage.clear() + else: + self._clear(storage, regex) + + if not CacheAbstract.cache_stats_name in storage: + storage[CacheAbstract.cache_stats_name] = { + 'hit_total': 0, 'misses': 0} + + + def increment(self, key, value=1): + self.initialize() + storage = self.storage + try: + if key in storage: + value = storage[key][1] + value + storage[key] = (time.time(), value) + except: + pass + return value + + + +class CacheAction(object): + def __init__(self, func, key, time_expire, cache, cache_model): + self.__name__ = func.__name__ + self.__doc__ = func.__doc__ + self.func = func + self.key = key + self.time_expire = time_expire + self.cache = cache + self.cache_model = cache_model + + def __call__(self, *a, **b): + if not self.key: + key2 = self.__name__ + ':' + repr(a) + ':' + repr(b) + else: + key2 = self.key.replace('%(name)s', self.__name__)\ + .replace('%(args)s', str(a)).replace('%(vars)s', str(b)) + cache_model = self.cache_model + if not cache_model or isinstance(cache_model, str): + cache_model = getattr(self.cache, cache_model or 'ram') + return cache_model(key2, + lambda a=a, b=b: self.func(*a, **b), + self.time_expire) + + +class Cache(object): + """ + Sets up generic caching, creating an instance of both CacheInRam and + CacheOnDisk. + In case of GAE will make use of gluon.contrib.gae_memcache. + + - self.ram is an instance of CacheInRam + - self.disk is an instance of CacheOnDisk + """ + + autokey = ':%(name)s:%(args)s:%(vars)s' + + def __init__(self, request): + """ + Args: + request: the global request object + """ + # GAE will have a special caching + if have_settings and settings.global_settings.web2py_runtime_gae: + from gluon.contrib.gae_memcache import MemcacheClient + self.ram = self.disk = MemcacheClient(request) + else: + # Otherwise use ram (and try also disk) + self.ram = CacheInRam(request) + try: + self.disk = CacheOnDisk(request) + except IOError: + logger.warning('no cache.disk (IOError)') + except AttributeError: + # normally not expected anymore, as GAE has already + # been accounted for + logger.warning('no cache.disk (AttributeError)') + + def action(self, time_expire=DEFAULT_TIME_EXPIRE, cache_model=None, + prefix=None, session=False, vars=True, lang=True, + user_agent=False, public=True, valid_statuses=None, + quick=None): + """Better fit for caching an action + + Warning: + Experimental! + + Currently only HTTP 1.1 compliant + reference : http://code.google.com/p/doctype-mirror/wiki/ArticleHttpCaching + + Args: + time_expire(int): same as @cache + cache_model(str): same as @cache + prefix(str): add a prefix to the calculated key + session(bool): adds response.session_id to the key + vars(bool): adds request.env.query_string + lang(bool): adds T.accepted_language + user_agent(bool or dict): if True, adds is_mobile and is_tablet to the key. + Pass a dict to use all the needed values (uses str(.items())) + (e.g. user_agent=request.user_agent()). Used only if session is + not True + public(bool): if False forces the Cache-Control to be 'private' + valid_statuses: by default only status codes starting with 1,2,3 will be cached. + pass an explicit list of statuses on which turn the cache on + quick: Session,Vars,Lang,User-agent,Public: + fast overrides with initials, e.g. 'SVLP' or 'VLP', or 'VLP' + """ + from gluon import current + from gluon.http import HTTP + def wrap(func): + def wrapped_f(): + if current.request.env.request_method != 'GET': + return func() + if time_expire: + cache_control = 'max-age=%(time_expire)s, s-maxage=%(time_expire)s' % dict(time_expire=time_expire) + if quick: + session_ = True if 'S' in quick else False + vars_ = True if 'V' in quick else False + lang_ = True if 'L' in quick else False + user_agent_ = True if 'U' in quick else False + public_ = True if 'P' in quick else False + else: + session_, vars_, lang_, user_agent_, public_ = session, vars, lang, user_agent, public + if not session_ and public_: + cache_control += ', public' + expires = (current.request.utcnow + datetime.timedelta(seconds=time_expire)).strftime('%a, %d %b %Y %H:%M:%S GMT') + else: + cache_control += ', private' + expires = 'Fri, 01 Jan 1990 00:00:00 GMT' + if cache_model: + #figure out the correct cache key + cache_key = [current.request.env.path_info, current.response.view] + if session_: + cache_key.append(current.response.session_id) + elif user_agent_: + if user_agent_ is True: + cache_key.append("%(is_mobile)s_%(is_tablet)s" % current.request.user_agent()) + else: + cache_key.append(str(user_agent_.items())) + if vars_: + cache_key.append(current.request.env.query_string) + if lang_: + cache_key.append(current.T.accepted_language) + cache_key = hashlib.md5('__'.join(cache_key)).hexdigest() + if prefix: + cache_key = prefix + cache_key + try: + #action returns something + rtn = cache_model(cache_key, lambda : func(), time_expire=time_expire) + http, status = None, current.response.status + except HTTP, e: + #action raises HTTP (can still be valid) + rtn = cache_model(cache_key, lambda : e.body, time_expire=time_expire) + http, status = HTTP(e.status, rtn, **e.headers), e.status + else: + #action raised a generic exception + http = None + else: + #no server-cache side involved + try: + #action returns something + rtn = func() + http, status = None, current.response.status + except HTTP, e: + #action raises HTTP (can still be valid) + status = e.status + http = HTTP(e.status, e.body, **e.headers) + else: + #action raised a generic exception + http = None + send_headers = False + if http and isinstance(valid_statuses, list): + if status in valid_statuses: + send_headers = True + elif valid_statuses is None: + if str(status)[0] in '123': + send_headers = True + if send_headers: + headers = { + 'Pragma' : None, + 'Expires' : expires, + 'Cache-Control' : cache_control + } + current.response.headers.update(headers) + if cache_model and not send_headers: + #we cached already the value, but the status is not valid + #so we need to delete the cached value + cache_model(cache_key, None) + if http: + if send_headers: + http.headers.update(current.response.headers) + raise http + return rtn + wrapped_f.__name__ = func.__name__ + wrapped_f.__doc__ = func.__doc__ + return wrapped_f + return wrap + + def __call__(self, + key=None, + time_expire=DEFAULT_TIME_EXPIRE, + cache_model=None): + """ + Decorator function that can be used to cache any function/method. + + Args: + key(str) : the key of the object to be store or retrieved + time_expire(int) : expiration of the cache in seconds + `time_expire` is used to compare the current time with the time + when the requested object was last saved in cache. + It does not affect future requests. + Setting `time_expire` to 0 or negative value forces the cache to + refresh. + cache_model(str): can be "ram", "disk" or other (like "memcache"). + Defaults to "ram" + + When the function `f` is called, web2py tries to retrieve + the value corresponding to `key` from the cache if the + object exists and if it did not expire, else it calles the function `f` + and stores the output in the cache corresponding to `key`. In the case + the output of the function is returned. + + Example: :: + + @cache('key', 5000, cache.ram) + def f(): + return time.ctime() + + Note: + If the function `f` is an action, we suggest using + @cache.action instead + """ + + def tmp(func, cache=self, cache_model=cache_model): + return CacheAction(func, key, time_expire, self, cache_model) + return tmp + + @staticmethod + def with_prefix(cache_model, prefix): + """ + allow replacing cache.ram with cache.with_prefix(cache.ram,'prefix') + it will add prefix to all the cache keys used. + """ + return lambda key, f, time_expire=DEFAULT_TIME_EXPIRE, prefix=prefix:\ + cache_model(prefix + key, f, time_expire) + + +def lazy_cache(key=None, time_expire=None, cache_model='ram'): + """ + Can be used to cache any function including ones in modules, + as long as the cached function is only called within a web2py request + + If a key is not provided, one is generated from the function name + `time_expire` defaults to None (no cache expiration) + + If cache_model is "ram" then the model is current.cache.ram, etc. + """ + def decorator(f, key=key, time_expire=time_expire, cache_model=cache_model): + key = key or repr(f) + + def g(*c, **d): + from gluon import current + return current.cache(key, time_expire, cache_model)(f)(*c, **d) + g.__name__ = f.__name__ + return g + return decorator diff --git a/gluon/compileapp.py b/gluon/compileapp.py index 9ada3cfbd..d04fd0d6e 100644 --- a/gluon/compileapp.py +++ b/gluon/compileapp.py @@ -407,7 +407,7 @@ def build_environment(request, response, session, store_current=True): # Enable standard conditional models (i.e., /*.py, /[controller]/*.py, and # /[controller]/[function]/*.py) response.models_to_run = [ - r'^\w+\.py$', + r'^\w+\.py$', r'^%s/\w+\.py$' % request.controller, r'^%s/%s/\w+\.py$' % (request.controller, request.function) ] @@ -514,7 +514,7 @@ def compile_controllers(folder): for function in exposed: command = data + "\nresponse._vars=response._caller(%s)\n" % \ function - filename = pjoin(folder, 'compiled', + filename = pjoin(folder, 'compiled', 'controllers.%s.%s.py' % (fname[:-3],function)) write_file(filename, command) save_pyc(filename) diff --git a/gluon/contrib/hypermedia.py b/gluon/contrib/hypermedia.py index 504945165..86926e6fa 100644 --- a/gluon/contrib/hypermedia.py +++ b/gluon/contrib/hypermedia.py @@ -47,7 +47,7 @@ def row2data(self,table,row,text=False): if self.compact: for fieldname in (self.table_policy.get('fields',table.fields)): field = table[fieldname] - if not ((field.type=='text' and text==False) or + if not ((field.type=='text' and text==False) or field.type=='blob' or field.type.startswith('reference ') or field.type.startswith('list:reference ')) and field.name in row: @@ -56,7 +56,7 @@ def row2data(self,table,row,text=False): for fieldname in (self.table_policy.get('fields',table.fields)): field = table[fieldname] if not ((field.type=='text' and text==False) or - field.type=='blob' or + field.type=='blob' or field.type.startswith('reference ') or field.type.startswith('list:reference ')) and field.name in row: data.append({'name':field.name,'value':row[field.name], @@ -128,10 +128,10 @@ def request2query(self,table,vars): for key,value in vars.items(): if key=='_offset': limitby[0] = int(value) # MAY FAIL - elif key == '_limit': + elif key == '_limit': limitby[1] = int(value)+1 # MAY FAIL elif key=='_orderby': - orderby = value + orderby = value elif key in fieldnames: queries.append(table[key] == value) elif key.endswith('.eq') and key[:-3] in fieldnames: # for completeness (useless) @@ -156,14 +156,14 @@ def request2query(self,table,vars): if filter_query: queries.append(filter_query) query = reduce(lambda a,b:a&b,queries[1:]) if len(queries)>1 else queries[0] - orderby = [table[f] if f[0]!='~' else ~table[f[1:]] for f in orderby.split(',')] + orderby = [table[f] if f[0]!='~' else ~table[f[1:]] for f in orderby.split(',')] return (query, limitby, orderby) def table2queries(self,table, href): """ generates a set of collection.queries examples for the table """ data = [] for fieldname in (self.table_policy.get('fields', table.fields)): - data.append({'name':fieldname,'value':''}) + data.append({'name':fieldname,'value':''}) if self.extensions: data.append({'name':fieldname+'.ne','value':''}) # NEW !!! data.append({'name':fieldname+'.lt','value':''}) @@ -192,7 +192,7 @@ def process(self,request,response,policies=None): if not tablename: r['href'] = URL(scheme=True), # https://github.com/collection-json/extensions/blob/master/model.md - r['links'] = [{'rel' : t, 'href' : URL(args=t,scheme=True), 'model':t} + r['links'] = [{'rel' : t, 'href' : URL(args=t,scheme=True), 'model':t} for t in tablenames] response.headers['Content-Type'] = 'application/vnd.collection+json' return response.json({'collection':r}) @@ -207,7 +207,7 @@ def process(self,request,response,policies=None): # process GET if request.env.request_method=='GET': table = db[tablename] - r['href'] = URL(args=tablename) + r['href'] = URL(args=tablename) r['items'] = items = [] try: (query, limitby, orderby) = self.request2query(table,request.get_vars) @@ -258,7 +258,7 @@ def process(self,request,response,policies=None): return response.json({'collection':r}) # process DELETE elif request.env.request_method=='DELETE': - table = db[tablename] + table = db[tablename] if not request.get_vars: return self.error(400, "BAD REQUEST", "Nothing to delete") else: @@ -312,7 +312,7 @@ def error(self,code="400", title="BAD REQUEST", message="UNKNOWN", form_errors={ request, response = self.request, self.response r = OrderedDict({ "version" : self.VERSION, - "href" : URL(args=request.args,vars=request.vars), + "href" : URL(args=request.args,vars=request.vars), "error" : { "title" : title, "code" : code, @@ -340,4 +340,3 @@ def error(self,code="400", title="BAD REQUEST", message="UNKNOWN", form_errors={ 'DELETE':{'query':None}, }, } - diff --git a/gluon/contrib/memdb.py b/gluon/contrib/memdb.py index 6ad9b1591..e4b56ef78 100644 --- a/gluon/contrib/memdb.py +++ b/gluon/contrib/memdb.py @@ -254,12 +254,12 @@ def drop(self): self._db(self.id > 0).delete() - def insert(self, **fields): - # Checks 3 times that the id is new. 3 times is enough! - for i in range(3): - id = self._create_id() - if self.get(id) is None and self.update(id, **fields): - return long(id) + def insert(self, **fields): + # Checks 3 times that the id is new. 3 times is enough! + for i in range(3): + id = self._create_id() + if self.get(id) is None and self.update(id, **fields): + return long(id) else: raise RuntimeError("Too many ID conflicts") diff --git a/gluon/contrib/mockimaplib.py b/gluon/contrib/mockimaplib.py index 87f8b4795..899759004 100644 --- a/gluon/contrib/mockimaplib.py +++ b/gluon/contrib/mockimaplib.py @@ -141,7 +141,7 @@ def _fetch(self, value, arg): parts = "complete" return ("OK", (("%s " % message_id, message[parts]), message["flags"])) - + def _get_messages(self, query): if query.strip().isdigit(): return [self.spam[self._mailbox][int(query.strip()) - 1],] @@ -151,7 +151,7 @@ def _get_messages(self, query): for item in self.spam[self._mailbox]: if item["uid"] == query[1:-1].replace("UID", "").strip(): return [item,] - messages = [] + messages = [] try: for m in self.results[self._mailbox][query]: try: @@ -169,7 +169,7 @@ def _get_messages(self, query): return messages except KeyError: raise ValueError("The client issued an unexpected query: %s" % query) - + def setup(self, spam={}, results={}): """adds custom message and query databases or sets the values to the module defaults. @@ -252,4 +252,3 @@ def __new__(self, *args, **kwargs): return Connection() IMAP4_SSL = IMAP4 - diff --git a/gluon/contrib/pbkdf2_ctypes.py b/gluon/contrib/pbkdf2_ctypes.py index 685ab5d1f..6aa623432 100644 --- a/gluon/contrib/pbkdf2_ctypes.py +++ b/gluon/contrib/pbkdf2_ctypes.py @@ -11,7 +11,7 @@ Note: This module is intended as a plugin replacement of pbkdf2.py by Armin Ronacher. - Git repository: + Git repository: $ git clone https://github.com/michele-comitini/pbkdf2_ctypes.git :copyright: Copyright (c) 2013: Michele Comitini @@ -86,7 +86,7 @@ def _openssl_hashlib_to_crypto_map_get(hashfunc): crypto_hashfunc.restype = ctypes.c_void_p return crypto_hashfunc() - + def _openssl_pbkdf2(data, salt, iterations, digest, keylen): """OpenSSL compatibile wrapper """ @@ -99,7 +99,7 @@ def _openssl_pbkdf2(data, salt, iterations, digest, keylen): c_iter = ctypes.c_int(iterations) c_keylen = ctypes.c_int(keylen) c_buff = ctypes.create_string_buffer(keylen) - + # PKCS5_PBKDF2_HMAC(const char *pass, int passlen, # const unsigned char *salt, int saltlen, int iter, # const EVP_MD *digest, @@ -109,7 +109,7 @@ def _openssl_pbkdf2(data, salt, iterations, digest, keylen): ctypes.c_char_p, ctypes.c_int, ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_char_p] - + crypto.PKCS5_PBKDF2_HMAC.restype = ctypes.c_int err = crypto.PKCS5_PBKDF2_HMAC(c_pass, c_passlen, c_salt, c_saltlen, diff --git a/gluon/contrib/pypyodbc.py b/gluon/contrib/pypyodbc.py index 7d359ccb1..bb2a9ceb1 100644 --- a/gluon/contrib/pypyodbc.py +++ b/gluon/contrib/pypyodbc.py @@ -7,18 +7,18 @@ # Copyright (c) 2014 Henry Zhou and PyPyODBC contributors # Copyright (c) 2004 Michele Petrazzo -# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated +# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated # documentation files (the "Software"), to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, # and to permit persons to whom the Software is furnished to do so, subject to the following conditions: # -# The above copyright notice and this permission notice shall be included in all copies or substantial portions +# The above copyright notice and this permission notice shall be included in all copies or substantial portions # of the Software. # -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO -# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF -# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO +# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF +# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. pooling = True @@ -52,7 +52,7 @@ use_unicode = False if py_ver < '2.6': bytearray = str - + if not hasattr(ctypes, 'c_ssize_t'): if ctypes.sizeof(ctypes.c_uint) == ctypes.sizeof(ctypes.c_void_p): @@ -69,7 +69,7 @@ #determin the size of Py_UNICODE #sys.maxunicode > 65536 and 'UCS4' or 'UCS2' -UNICODE_SIZE = sys.maxunicode > 65536 and 4 or 2 +UNICODE_SIZE = sys.maxunicode > 65536 and 4 or 2 # Define ODBC constants. They are widly used in ODBC documents and programs @@ -402,15 +402,15 @@ def __init__(self, error_code, error_desc): self.value = (error_code, error_desc) self.args = (error_code, error_desc) - - - -############################################################################ + + + +############################################################################ # # Find the ODBC library on the platform and connect to it using ctypes # ############################################################################ -# Get the References of the platform's ODBC functions via ctypes +# Get the References of the platform's ODBC functions via ctypes odbc_decoding = 'utf_16' odbc_encoding = 'utf_16_le' @@ -421,7 +421,7 @@ def __init__(self, error_code, error_desc): # On Windows, the size of SQLWCHAR is hardcoded to 2-bytes. SQLWCHAR_SIZE = ctypes.sizeof(ctypes.c_ushort) else: - # Set load the library on linux + # Set load the library on linux try: # First try direct loading libodbc.so ODBC_API = ctypes.cdll.LoadLibrary('libodbc.so') @@ -446,7 +446,7 @@ def __init__(self, error_code, error_desc): except: # If still fail loading, abort. raise OdbcLibraryError('Error while loading ' + library) - + # only iODBC uses utf-32 / UCS4 encoding data, others normally use utf-16 / UCS2 # So we set those for handling. if 'libiodbc.dylib' in library: @@ -506,17 +506,17 @@ def UCS_buf(s): raise OdbcLibraryError('Using narrow Python build with ODBC library ' 'expecting wide unicode is not supported.') - - - - - - - - - - - + + + + + + + + + + + ############################################################ # Database value to Python data type mappings @@ -551,11 +551,11 @@ def UCS_buf(s): SQL_C_TYPE_DATE = SQL_TYPE_DATE = 91 SQL_C_TYPE_TIME = SQL_TYPE_TIME = 92 SQL_C_BINARY = SQL_BINARY = -2 -SQL_C_SBIGINT = SQL_BIGINT + SQL_SIGNED_OFFSET +SQL_C_SBIGINT = SQL_BIGINT + SQL_SIGNED_OFFSET SQL_C_TINYINT = SQL_TINYINT = -6 SQL_C_BIT = SQL_BIT = -7 SQL_C_WCHAR = SQL_WCHAR = -8 -SQL_C_GUID = SQL_GUID = -11 +SQL_C_GUID = SQL_GUID = -11 SQL_C_TYPE_TIMESTAMP = SQL_TYPE_TIMESTAMP = 93 SQL_C_DEFAULT = 99 @@ -581,19 +581,19 @@ def dt_cvt(x): def Decimal_cvt(x): if py_v3: - x = x.decode('ascii') + x = x.decode('ascii') return Decimal(x) - + bytearray_cvt = bytearray if sys.platform == 'cli': bytearray_cvt = lambda x: bytearray(buffer(x)) - + # Below Datatype mappings referenced the document at # http://infocenter.sybase.com/help/index.jsp?topic=/com.sybase.help.sdk_12.5.1.aseodbc/html/aseodbc/CACFDIGH.htm SQL_data_type_dict = { \ #SQL Data TYPE 0.Python Data Type 1.Default Output Converter 2.Buffer Type 3.Buffer Allocator 4.Default Size 5.Variable Length -SQL_TYPE_NULL : (None, lambda x: None, SQL_C_CHAR, create_buffer, 2 , False ), +SQL_TYPE_NULL : (None, lambda x: None, SQL_C_CHAR, create_buffer, 2 , False ), SQL_CHAR : (str, lambda x: x, SQL_C_CHAR, create_buffer, 2048 , False ), SQL_NUMERIC : (Decimal, Decimal_cvt, SQL_C_CHAR, create_buffer, 150 , False ), SQL_DECIMAL : (Decimal, Decimal_cvt, SQL_C_CHAR, create_buffer, 150 , False ), @@ -620,8 +620,8 @@ def Decimal_cvt(x): SQL_WLONGVARCHAR : (unicode, lambda x: x, SQL_C_WCHAR, create_buffer_u, 20500 , True ), SQL_TYPE_DATE : (datetime.date, dt_cvt, SQL_C_CHAR, create_buffer, 30 , False ), SQL_TYPE_TIME : (datetime.time, tm_cvt, SQL_C_CHAR, create_buffer, 20 , False ), -SQL_TYPE_TIMESTAMP : (datetime.datetime, dttm_cvt, SQL_C_CHAR, create_buffer, 30 , False ), -SQL_SS_VARIANT : (str, lambda x: x, SQL_C_CHAR, create_buffer, 2048 , True ), +SQL_TYPE_TIMESTAMP : (datetime.datetime, dttm_cvt, SQL_C_CHAR, create_buffer, 30 , False ), +SQL_SS_VARIANT : (str, lambda x: x, SQL_C_CHAR, create_buffer, 2048 , True ), SQL_SS_XML : (unicode, lambda x: x, SQL_C_WCHAR, create_buffer_u, 20500 , True ), SQL_SS_UDT : (bytearray, bytearray_cvt, SQL_C_BINARY, create_buffer, 5120 , True ), } @@ -744,7 +744,7 @@ def Decimal_cvt(x): ODBC_API.SQLDrivers.argtypes = [ ctypes.c_void_p, ctypes.c_ushort, - ctypes.c_char_p, ctypes.c_short, ctypes.POINTER(ctypes.c_short), + ctypes.c_char_p, ctypes.c_short, ctypes.POINTER(ctypes.c_short), ctypes.c_char_p, ctypes.c_short, ctypes.POINTER(ctypes.c_short), ] @@ -920,7 +920,7 @@ def to_wchar(argtypes): def ctrl_err(ht, h, val_ret, ansi): """Classify type of ODBC error from (type of handle, handle, return value) , and raise with a list""" - + if ansi: state = create_buffer(22) Message = create_buffer(1024*4) @@ -938,7 +938,7 @@ def ctrl_err(ht, h, val_ret, ansi): Buffer_len = c_short() err_list = [] number_errors = 1 - + while 1: ret = ODBC_func(ht, h, number_errors, state, \ ADDR(NativeError), Message, 1024, ADDR(Buffer_len)) @@ -985,19 +985,19 @@ def check_success(ODBC_obj, ret): ctrl_err(SQL_HANDLE_DBC, ODBC_obj.dbc_h, ret, ODBC_obj.ansi) else: ctrl_err(SQL_HANDLE_ENV, ODBC_obj, ret, False) - - + + def AllocateEnv(): if pooling: ret = ODBC_API.SQLSetEnvAttr(SQL_NULL_HANDLE, SQL_ATTR_CONNECTION_POOLING, SQL_CP_ONE_PER_HENV, SQL_IS_UINTEGER) check_success(SQL_NULL_HANDLE, ret) - ''' + ''' Allocate an ODBC environment by initializing the handle shared_env_h ODBC enviroment needed to be created, so connections can be created under it connections pooling can be shared under one environment ''' - global shared_env_h + global shared_env_h shared_env_h = ctypes.c_void_p() ret = ODBC_API.SQLAllocHandle(SQL_HANDLE_ENV, SQL_NULL_HANDLE, ADDR(shared_env_h)) check_success(shared_env_h, ret) @@ -1005,7 +1005,7 @@ def AllocateEnv(): # Set the ODBC environment's compatibil leve to ODBC 3.0 ret = ODBC_API.SQLSetEnvAttr(shared_env_h, SQL_ATTR_ODBC_VERSION, SQL_OV_ODBC3, 0) check_success(shared_env_h, ret) - + """ Here, we have a few callables that determine how a result row is returned. @@ -1022,20 +1022,20 @@ def TupleRow(cursor): """ class Row(tuple): cursor_description = cursor.description - + def get(self, field): if not hasattr(self, 'field_dict'): self.field_dict = {} for i,item in enumerate(self): self.field_dict[self.cursor_description[i][0]] = item return self.field_dict.get(field) - + def __getitem__(self, field): if isinstance(field, (unicode,str)): return self.get(field) else: return tuple.__getitem__(self,field) - + return Row @@ -1088,19 +1088,19 @@ def __setitem__(self, index, value): return Row # When Null is used in a binary parameter, database usually would not -# accept the None for a binary field, so the work around is to use a +# accept the None for a binary field, so the work around is to use a # Specical None that the pypyodbc moudle would know this NULL is for # a binary field. class BinaryNullType(): pass BinaryNull = BinaryNullType() -# The get_type function is used to determine if parameters need to be re-binded +# The get_type function is used to determine if parameters need to be re-binded # against the changed parameter types # 'b' for bool, 'U' for long unicode string, 'u' for short unicode string # 'S' for long 8 bit string, 's' for short 8 bit string, 'l' for big integer, 'i' for normal integer # 'f' for float, 'D' for Decimal, 't' for datetime.time, 'd' for datetime.datetime, 'dt' for datetime.datetime # 'bi' for binary -def get_type(v): +def get_type(v): if isinstance(v, bool): return ('b',) @@ -1130,7 +1130,7 @@ def get_type(v): t = v.as_tuple() #1.23 -> (1,2,3),-2 , 1.23*E7 -> (1,2,3),5 return ('D',(len(t[1]),0 - t[2])) # number of digits, and number of decimal digits - + elif isinstance (v, datetime.datetime): return ('dt',) elif isinstance (v, datetime.date): @@ -1139,7 +1139,7 @@ def get_type(v): return ('t',) elif isinstance (v, (bytearray, buffer)): return ('bi',(len(v)//1000+1)*1000) - + return type(v) @@ -1169,16 +1169,16 @@ def __init__(self, conx, row_type_callable=None): ret = ODBC_API.SQLAllocHandle(SQL_HANDLE_STMT, self.connection.dbc_h, ADDR(self.stmt_h)) check_success(self, ret) self._PARAM_SQL_TYPE_LIST = [] - self.closed = False + self.closed = False + - def prepare(self, query_string): """prepare a query""" - + #self._free_results(FREE_STATEMENT) if not self.connection: self.close() - + if type(query_string) == unicode: c_query_string = wchar_pointer(UCS_buf(query_string)) ret = ODBC_API.SQLPrepareW(self.stmt_h, c_query_string, len(query_string)) @@ -1187,10 +1187,10 @@ def prepare(self, query_string): ret = ODBC_API.SQLPrepare(self.stmt_h, c_query_string, len(query_string)) if ret != SQL_SUCCESS: check_success(self, ret) - - - self._PARAM_SQL_TYPE_LIST = [] - + + + self._PARAM_SQL_TYPE_LIST = [] + if self.connection.support_SQLDescribeParam: # SQLServer's SQLDescribeParam only supports DML SQL, so avoid the SELECT statement if True:# 'SELECT' not in query_string.upper(): @@ -1199,7 +1199,7 @@ def prepare(self, query_string): ret = ODBC_API.SQLNumParams(self.stmt_h, ADDR(NumParams)) if ret != SQL_SUCCESS: check_success(self, ret) - + for col_num in range(NumParams.value): ParameterNumber = ctypes.c_ushort(col_num + 1) DataType = c_short() @@ -1219,7 +1219,7 @@ def prepare(self, query_string): check_success(self, ret) except DatabaseError: if sys.exc_info()[1].value[0] == '07009': - self._PARAM_SQL_TYPE_LIST = [] + self._PARAM_SQL_TYPE_LIST = [] break else: raise sys.exc_info()[1] @@ -1227,7 +1227,7 @@ def prepare(self, query_string): raise sys.exc_info()[1] self._PARAM_SQL_TYPE_LIST.append((DataType.value,DecimalDigits.value)) - + self.statement = query_string @@ -1237,39 +1237,39 @@ def _BindParams(self, param_types, pram_io_list = []): if not self.connection: self.close() #self._free_results(NO_FREE_STATEMENT) - + # Get the number of query parameters judged by database. NumParams = c_short() ret = ODBC_API.SQLNumParams(self.stmt_h, ADDR(NumParams)) if ret != SQL_SUCCESS: check_success(self, ret) - + if len(param_types) != NumParams.value: # In case number of parameters provided do not same as number required error_desc = "The SQL contains %d parameter markers, but %d parameters were supplied" \ %(NumParams.value,len(param_types)) raise ProgrammingError('HY000',error_desc) - - + + # Every parameter needs to be binded to a buffer ParamBufferList = [] # Temporary holder since we can only call SQLDescribeParam before # calling SQLBindParam. temp_holder = [] for col_num in range(NumParams.value): - dec_num = 0 + dec_num = 0 buf_size = 512 - + if param_types[col_num][0] == 'u': sql_c_type = SQL_C_WCHAR - sql_type = SQL_WVARCHAR - buf_size = 255 - ParameterBuffer = create_buffer_u(buf_size) - + sql_type = SQL_WVARCHAR + buf_size = 255 + ParameterBuffer = create_buffer_u(buf_size) + elif param_types[col_num][0] == 's': sql_c_type = SQL_C_CHAR sql_type = SQL_VARCHAR - buf_size = 255 + buf_size = 255 ParameterBuffer = create_buffer(buf_size) @@ -1284,26 +1284,26 @@ def _BindParams(self, param_types, pram_io_list = []): sql_type = SQL_LONGVARCHAR buf_size = param_types[col_num][1]#len(self._inputsizers)>col_num and self._inputsizers[col_num] or 20500 ParameterBuffer = create_buffer(buf_size) - + # bool subclasses int, thus has to go first elif param_types[col_num][0] == 'b': sql_c_type = SQL_C_CHAR sql_type = SQL_BIT buf_size = SQL_data_type_dict[sql_type][4] ParameterBuffer = create_buffer(buf_size) - + elif param_types[col_num][0] == 'i': - sql_c_type = SQL_C_CHAR - sql_type = SQL_INTEGER - buf_size = SQL_data_type_dict[sql_type][4] - ParameterBuffer = create_buffer(buf_size) - + sql_c_type = SQL_C_CHAR + sql_type = SQL_INTEGER + buf_size = SQL_data_type_dict[sql_type][4] + ParameterBuffer = create_buffer(buf_size) + elif param_types[col_num][0] == 'l': - sql_c_type = SQL_C_CHAR - sql_type = SQL_BIGINT - buf_size = SQL_data_type_dict[sql_type][4] + sql_c_type = SQL_C_CHAR + sql_type = SQL_BIGINT + buf_size = SQL_data_type_dict[sql_type][4] ParameterBuffer = create_buffer(buf_size) - + elif param_types[col_num][0] == 'D': #Decimal sql_c_type = SQL_C_CHAR @@ -1311,56 +1311,56 @@ def _BindParams(self, param_types, pram_io_list = []): digit_num, dec_num = param_types[col_num][1] if dec_num > 0: # has decimal - buf_size = digit_num + buf_size = digit_num dec_num = dec_num else: # no decimal - buf_size = digit_num - dec_num + buf_size = digit_num - dec_num dec_num = 0 ParameterBuffer = create_buffer(buf_size + 4)# add extra length for sign and dot - + elif param_types[col_num][0] == 'f': sql_c_type = SQL_C_CHAR - sql_type = SQL_DOUBLE + sql_type = SQL_DOUBLE buf_size = SQL_data_type_dict[sql_type][4] ParameterBuffer = create_buffer(buf_size) - - + + # datetime subclasses date, thus has to go first elif param_types[col_num][0] == 'dt': sql_c_type = SQL_C_CHAR sql_type = SQL_TYPE_TIMESTAMP - buf_size = self.connection.type_size_dic[SQL_TYPE_TIMESTAMP][0] + buf_size = self.connection.type_size_dic[SQL_TYPE_TIMESTAMP][0] ParameterBuffer = create_buffer(buf_size) dec_num = self.connection.type_size_dic[SQL_TYPE_TIMESTAMP][1] - - + + elif param_types[col_num][0] == 'd': sql_c_type = SQL_C_CHAR if SQL_TYPE_DATE in self.connection.type_size_dic: #if DEBUG:print('conx.type_size_dic.has_key(SQL_TYPE_DATE)') sql_type = SQL_TYPE_DATE buf_size = self.connection.type_size_dic[SQL_TYPE_DATE][0] - + ParameterBuffer = create_buffer(buf_size) dec_num = self.connection.type_size_dic[SQL_TYPE_DATE][1] - + else: # SQL Sever <2008 doesn't have a DATE type. - sql_type = SQL_TYPE_TIMESTAMP - buf_size = 10 + sql_type = SQL_TYPE_TIMESTAMP + buf_size = 10 ParameterBuffer = create_buffer(buf_size) - - + + elif param_types[col_num][0] == 't': sql_c_type = SQL_C_CHAR if SQL_TYPE_TIME in self.connection.type_size_dic: sql_type = SQL_TYPE_TIME - buf_size = self.connection.type_size_dic[SQL_TYPE_TIME][0] + buf_size = self.connection.type_size_dic[SQL_TYPE_TIME][0] ParameterBuffer = create_buffer(buf_size) - dec_num = self.connection.type_size_dic[SQL_TYPE_TIME][1] + dec_num = self.connection.type_size_dic[SQL_TYPE_TIME][1] elif SQL_SS_TIME2 in self.connection.type_size_dic: # TIME type added in SQL Server 2008 sql_type = SQL_SS_TIME2 @@ -1370,16 +1370,16 @@ def _BindParams(self, param_types, pram_io_list = []): else: # SQL Sever <2008 doesn't have a TIME type. sql_type = SQL_TYPE_TIMESTAMP - buf_size = self.connection.type_size_dic[SQL_TYPE_TIMESTAMP][0] + buf_size = self.connection.type_size_dic[SQL_TYPE_TIMESTAMP][0] ParameterBuffer = create_buffer(buf_size) dec_num = 3 - + elif param_types[col_num][0] == 'BN': sql_c_type = SQL_C_BINARY - sql_type = SQL_VARBINARY - buf_size = 1 - ParameterBuffer = create_buffer(buf_size) - + sql_type = SQL_VARBINARY + buf_size = 1 + ParameterBuffer = create_buffer(buf_size) + elif param_types[col_num][0] == 'N': if len(self._PARAM_SQL_TYPE_LIST) > 0: sql_c_type = SQL_C_DEFAULT @@ -1389,64 +1389,64 @@ def _BindParams(self, param_types, pram_io_list = []): else: sql_c_type = SQL_C_CHAR sql_type = SQL_CHAR - buf_size = 1 - ParameterBuffer = create_buffer(buf_size) + buf_size = 1 + ParameterBuffer = create_buffer(buf_size) elif param_types[col_num][0] == 'bi': sql_c_type = SQL_C_BINARY - sql_type = SQL_LONGVARBINARY - buf_size = param_types[col_num][1]#len(self._inputsizers)>col_num and self._inputsizers[col_num] or 20500 + sql_type = SQL_LONGVARBINARY + buf_size = param_types[col_num][1]#len(self._inputsizers)>col_num and self._inputsizers[col_num] or 20500 ParameterBuffer = create_buffer(buf_size) - - + + else: sql_c_type = SQL_C_CHAR sql_type = SQL_LONGVARCHAR - buf_size = len(self._inputsizers)>col_num and self._inputsizers[col_num] or 20500 + buf_size = len(self._inputsizers)>col_num and self._inputsizers[col_num] or 20500 ParameterBuffer = create_buffer(buf_size) - + temp_holder.append((sql_c_type, sql_type, buf_size, dec_num, ParameterBuffer)) for col_num, (sql_c_type, sql_type, buf_size, dec_num, ParameterBuffer) in enumerate(temp_holder): BufferLen = c_ssize_t(buf_size) LenOrIndBuf = c_ssize_t() - - + + InputOutputType = SQL_PARAM_INPUT if len(pram_io_list) > col_num: InputOutputType = pram_io_list[col_num] ret = SQLBindParameter(self.stmt_h, col_num + 1, InputOutputType, sql_c_type, sql_type, buf_size,\ dec_num, ADDR(ParameterBuffer), BufferLen,ADDR(LenOrIndBuf)) - if ret != SQL_SUCCESS: + if ret != SQL_SUCCESS: check_success(self, ret) # Append the value buffer and the length buffer to the array ParamBufferList.append((ParameterBuffer,LenOrIndBuf,sql_type)) - + self._last_param_types = param_types self._ParamBufferList = ParamBufferList - + def execute(self, query_string, params=None, many_mode=False, call_mode=False): """ Execute the query string, with optional parameters. If parameters are provided, the query would first be prepared, then executed with parameters; - If parameters are not provided, only th query sting, it would be executed directly + If parameters are not provided, only th query sting, it would be executed directly """ if not self.connection: self.close() - + self._free_stmt(SQL_CLOSE) if params: # If parameters exist, first prepare the query then executed with parameters - + if not isinstance(params, (tuple, list)): raise TypeError("Params must be in a list, tuple, or Row") - + if query_string != self.statement: - # if the query is not same as last query, then it is not prepared + # if the query is not same as last query, then it is not prepared self.prepare(query_string) - - + + param_types = list(map(get_type, params)) if call_mode: @@ -1462,8 +1462,8 @@ def execute(self, query_string, params=None, many_mode=False, call_mode=False): elif sum([p_type[0] != 'N' and p_type != self._last_param_types[i] for i,p_type in enumerate(param_types)]) > 0: self._free_stmt(SQL_RESET_PARAMS) self._BindParams(param_types) - - + + # With query prepared, now put parameters into buffers col_num = 0 for param_buffer, param_buffer_len, sql_type in self._ParamBufferList: @@ -1479,24 +1479,24 @@ def execute(self, query_string, params=None, many_mode=False, call_mode=False): else: c_char_buf = str(param_val) c_buf_len = len(c_char_buf) - + elif param_types[col_num][0] in ('s','S'): c_char_buf = param_val c_buf_len = len(c_char_buf) elif param_types[col_num][0] in ('u','U'): c_char_buf = UCS_buf(param_val) c_buf_len = len(c_char_buf) - + elif param_types[col_num][0] == 'dt': max_len = self.connection.type_size_dic[SQL_TYPE_TIMESTAMP][0] datetime_str = param_val.strftime('%Y-%m-%d %H:%M:%S.%f') c_char_buf = datetime_str[:max_len] if py_v3: c_char_buf = bytes(c_char_buf,'ascii') - + c_buf_len = len(c_char_buf) # print c_buf_len, c_char_buf - + elif param_types[col_num][0] == 'd': if SQL_TYPE_DATE in self.connection.type_size_dic: max_len = self.connection.type_size_dic[SQL_TYPE_DATE][0] @@ -1507,7 +1507,7 @@ def execute(self, query_string, params=None, many_mode=False, call_mode=False): c_char_buf = bytes(c_char_buf,'ascii') c_buf_len = len(c_char_buf) #print c_char_buf - + elif param_types[col_num][0] == 't': if SQL_TYPE_TIME in self.connection.type_size_dic: max_len = self.connection.type_size_dic[SQL_TYPE_TIME][0] @@ -1526,7 +1526,7 @@ def execute(self, query_string, params=None, many_mode=False, call_mode=False): if py_v3: c_char_buf = bytes(c_char_buf,'ascii') #print c_buf_len, c_char_buf - + elif param_types[col_num][0] == 'b': if param_val == True: c_char_buf = '1' @@ -1535,7 +1535,7 @@ def execute(self, query_string, params=None, many_mode=False, call_mode=False): if py_v3: c_char_buf = bytes(c_char_buf,'ascii') c_buf_len = 1 - + elif param_types[col_num][0] == 'D': #Decimal sign = param_val.as_tuple()[0] == 0 and '+' or '-' digit_string = ''.join([str(x) for x in param_val.as_tuple()[1]]) @@ -1555,18 +1555,18 @@ def execute(self, query_string, params=None, many_mode=False, call_mode=False): else: c_char_buf = v c_buf_len = len(c_char_buf) - + elif param_types[col_num][0] == 'bi': c_char_buf = str_8b(param_val) c_buf_len = len(c_char_buf) - + else: c_char_buf = param_val - - + + if param_types[col_num][0] == 'bi': param_buffer.raw = str_8b(param_val) - + else: #print (type(param_val),param_buffer, param_buffer.value) param_buffer.value = c_char_buf @@ -1576,37 +1576,37 @@ def execute(self, query_string, params=None, many_mode=False, call_mode=False): param_buffer_len.value = SQL_NTS else: param_buffer_len.value = c_buf_len - + col_num += 1 ret = SQLExecute(self.stmt_h) if ret != SQL_SUCCESS: #print param_valparam_buffer, param_buffer.value check_success(self, ret) - + if not many_mode: self._NumOfRows() self._UpdateDesc() #self._BindCols() - + else: self.execdirect(query_string) return self - - + + def _SQLExecute(self): if not self.connection: self.close() ret = SQLExecute(self.stmt_h) if ret != SQL_SUCCESS: - check_success(self, ret) - - + check_success(self, ret) + + def execdirect(self, query_string): """Execute a query directly""" if not self.connection: self.close() - + self._free_stmt() self._last_param_types = None self.statement = None @@ -1621,25 +1621,25 @@ def execdirect(self, query_string): self._UpdateDesc() #self._BindCols() return self - - + + def callproc(self, procname, args): if not self.connection: self.close() raise Warning('', 'Still not fully implemented') self._pram_io_list = [row[4] for row in self.procedurecolumns(procedure = procname).fetchall() if row[4] not in (SQL_RESULT_COL, SQL_RETURN_VALUE)] - + print('pram_io_list: '+str(self._pram_io_list)) - - + + call_escape = '{CALL '+procname if args: call_escape += '(' + ','.join(['?' for params in args]) + ')' call_escape += '}' self.execute(call_escape, args, call_mode = True) - + result = [] for buf, buf_len, sql_type in self._ParamBufferList: @@ -1649,20 +1649,20 @@ def callproc(self, procname, args): result.append(self.connection.output_converter[sql_type](buf.value)) return result - - + + def executemany(self, query_string, params_list = [None]): if not self.connection: self.close() - + for params in params_list: self.execute(query_string, params, many_mode = True) self._NumOfRows() self.rowcount = -1 self._UpdateDesc() #self._BindCols() - - + + def _CreateColBuf(self): if not self.connection: @@ -1672,60 +1672,60 @@ def _CreateColBuf(self): self._ColBufferList = [] bind_data = True for col_num in range(NOC): - col_name = self.description[col_num][0] - col_size = self.description[col_num][2] - col_sql_data_type = self._ColTypeCodeList[col_num] + col_name = self.description[col_num][0] + col_size = self.description[col_num][2] + col_sql_data_type = self._ColTypeCodeList[col_num] target_type = SQL_data_type_dict[col_sql_data_type][2] - dynamic_length = SQL_data_type_dict[col_sql_data_type][5] + dynamic_length = SQL_data_type_dict[col_sql_data_type][5] # set default size base on the column's sql data type - total_buf_len = SQL_data_type_dict[col_sql_data_type][4] - + total_buf_len = SQL_data_type_dict[col_sql_data_type][4] + # over-write if there's pre-set size value for "large columns" - if total_buf_len > 20500: + if total_buf_len > 20500: total_buf_len = self._outputsize.get(None,total_buf_len) - # over-write if there's pre-set size value for the "col_num" column + # over-write if there's pre-set size value for the "col_num" column total_buf_len = self._outputsize.get(col_num, total_buf_len) # if the size of the buffer is very long, do not bind - # because a large buffer decrease performance, and sometimes you only get a NULL value. + # because a large buffer decrease performance, and sometimes you only get a NULL value. # in that case use sqlgetdata instead. if col_size >= 1024: - dynamic_length = True + dynamic_length = True alloc_buffer = SQL_data_type_dict[col_sql_data_type][3](total_buf_len) used_buf_len = c_ssize_t() - + force_unicode = self.connection.unicode_results - + if force_unicode and col_sql_data_type in (SQL_CHAR,SQL_VARCHAR,SQL_LONGVARCHAR): target_type = SQL_C_WCHAR alloc_buffer = create_buffer_u(total_buf_len) - + buf_cvt_func = self.connection.output_converter[self._ColTypeCodeList[col_num]] - + if bind_data: if dynamic_length: bind_data = False - self._ColBufferList.append([col_name, target_type, used_buf_len, ADDR(used_buf_len), alloc_buffer, ADDR(alloc_buffer), total_buf_len, buf_cvt_func, bind_data]) - + self._ColBufferList.append([col_name, target_type, used_buf_len, ADDR(used_buf_len), alloc_buffer, ADDR(alloc_buffer), total_buf_len, buf_cvt_func, bind_data]) + if bind_data: ret = ODBC_API.SQLBindCol(self.stmt_h, col_num + 1, target_type, ADDR(alloc_buffer), total_buf_len, ADDR(used_buf_len)) if ret != SQL_SUCCESS: check_success(self, ret) - + def _UpdateDesc(self): - "Get the information of (name, type_code, display_size, internal_size, col_precision, scale, null_ok)" + "Get the information of (name, type_code, display_size, internal_size, col_precision, scale, null_ok)" if not self.connection: self.close() - + force_unicode = self.connection.unicode_results if force_unicode: Cname = create_buffer_u(1024) else: Cname = create_buffer(1024) - + Cname_ptr = c_short() Ctype_code = c_short() Csize = ctypes.c_size_t() @@ -1736,34 +1736,34 @@ def _UpdateDesc(self): self._ColTypeCodeList = [] NOC = self._NumOfCols() for col in range(1, NOC+1): - - ret = ODBC_API.SQLColAttribute(self.stmt_h, col, SQL_DESC_DISPLAY_SIZE, ADDR(create_buffer(10)), + + ret = ODBC_API.SQLColAttribute(self.stmt_h, col, SQL_DESC_DISPLAY_SIZE, ADDR(create_buffer(10)), 10, ADDR(c_short()),ADDR(Cdisp_size)) if ret != SQL_SUCCESS: check_success(self, ret) - + if force_unicode: - + ret = ODBC_API.SQLDescribeColW(self.stmt_h, col, Cname, len(Cname), ADDR(Cname_ptr),\ ADDR(Ctype_code),ADDR(Csize),ADDR(CDecimalDigits), ADDR(Cnull_ok)) if ret != SQL_SUCCESS: check_success(self, ret) else: - + ret = ODBC_API.SQLDescribeCol(self.stmt_h, col, Cname, len(Cname), ADDR(Cname_ptr),\ ADDR(Ctype_code),ADDR(Csize),ADDR(CDecimalDigits), ADDR(Cnull_ok)) if ret != SQL_SUCCESS: check_success(self, ret) - + col_name = Cname.value if lowercase: col_name = col_name.lower() - #(name, type_code, display_size, + #(name, type_code, display_size, ColDescr.append((col_name, SQL_data_type_dict.get(Ctype_code.value,(Ctype_code.value,))[0],Cdisp_size.value,\ Csize.value, Csize.value,CDecimalDigits.value,Cnull_ok.value == 1 and True or False)) self._ColTypeCodeList.append(Ctype_code.value) - + if len(ColDescr) > 0: self.description = ColDescr # Create the row type before fetching. @@ -1771,26 +1771,26 @@ def _UpdateDesc(self): else: self.description = None self._CreateColBuf() - - + + def _NumOfRows(self): """Get the number of rows""" if not self.connection: self.close() - + NOR = c_ssize_t() ret = SQLRowCount(self.stmt_h, ADDR(NOR)) if ret != SQL_SUCCESS: check_success(self, ret) self.rowcount = NOR.value - return self.rowcount + return self.rowcount + - def _NumOfCols(self): """Get the number of cols""" if not self.connection: self.close() - + NOC = c_short() ret = SQLNumResultCols(self.stmt_h, ADDR(NOC)) if ret != SQL_SUCCESS: @@ -1801,7 +1801,7 @@ def _NumOfCols(self): def fetchall(self): if not self.connection: self.close() - + rows = [] while True: row = self.fetchone() @@ -1814,11 +1814,11 @@ def fetchall(self): def fetchmany(self, num = None): if not self.connection: self.close() - + if num is None: num = self.arraysize rows = [] - + while len(rows) < num: row = self.fetchone() if row is None: @@ -1830,12 +1830,12 @@ def fetchmany(self, num = None): def fetchone(self): if not self.connection: self.close() - + ret = SQLFetch(self.stmt_h) - - if ret in (SQL_SUCCESS,SQL_SUCCESS_WITH_INFO): + + if ret in (SQL_SUCCESS,SQL_SUCCESS_WITH_INFO): '''Bind buffers for the record set columns''' - + value_list = [] col_num = 1 for col_name, target_type, used_buf_len, ADDR_used_buf_len, alloc_buffer, ADDR_alloc_buffer, total_buf_len, buf_cvt_func, bind_data in self._ColBufferList: @@ -1844,10 +1844,10 @@ def fetchone(self): if bind_data: ret = SQL_SUCCESS else: - ret = SQLGetData(self.stmt_h, col_num, target_type, ADDR_alloc_buffer, total_buf_len, ADDR_used_buf_len) + ret = SQLGetData(self.stmt_h, col_num, target_type, ADDR_alloc_buffer, total_buf_len, ADDR_used_buf_len) if ret == SQL_SUCCESS: if used_buf_len.value == SQL_NULL_DATA: - value_list.append(None) + value_list.append(None) else: if raw_data_parts == []: # Means no previous data, no need to combine @@ -1865,21 +1865,21 @@ def fetchone(self): raw_data_parts.append(from_buffer_u(alloc_buffer)) else: raw_data_parts.append(alloc_buffer.value) - break - + break + elif ret == SQL_SUCCESS_WITH_INFO: # Means the data is only partial if target_type == SQL_C_BINARY: raw_data_parts.append(alloc_buffer.raw) else: - raw_data_parts.append(alloc_buffer.value) - + raw_data_parts.append(alloc_buffer.value) + elif ret == SQL_NO_DATA: # Means all data has been transmitted break else: - check_success(self, ret) - + check_success(self, ret) + if raw_data_parts != []: if py_v3: if target_type != SQL_C_BINARY: @@ -1893,47 +1893,47 @@ def fetchone(self): col_num += 1 return self._row_type(value_list) - + else: if ret == SQL_NO_DATA_FOUND: - + return None else: check_success(self, ret) - + def __next__(self): return self.next() - - def next(self): + + def next(self): row = self.fetchone() if row is None: raise(StopIteration) return row - + def __iter__(self): return self - + def skip(self, count = 0): if not self.connection: self.close() - + for i in range(count): ret = ODBC_API.SQLFetchScroll(self.stmt_h, SQL_FETCH_NEXT, 0) if ret != SQL_SUCCESS: check_success(self, ret) - return None - - - + return None + + + def nextset(self): if not self.connection: self.close() - + ret = ODBC_API.SQLMoreResults(self.stmt_h) if ret not in (SQL_SUCCESS, SQL_NO_DATA): check_success(self, ret) - + if ret == SQL_NO_DATA: self._free_stmt() return False @@ -1942,15 +1942,15 @@ def nextset(self): self._UpdateDesc() #self._BindCols() return True - - + + def _free_stmt(self, free_type = None): if not self.connection: self.close() - + if not self.connection.connected: raise ProgrammingError('HY000','Attempt to use a closed connection.') - + #self.description = None #self.rowcount = -1 if free_type in (SQL_CLOSE, None): @@ -1961,17 +1961,17 @@ def _free_stmt(self, free_type = None): ret = ODBC_API.SQLFreeStmt(self.stmt_h, SQL_UNBIND) if ret != SQL_SUCCESS: check_success(self, ret) - if free_type in (SQL_RESET_PARAMS, None): + if free_type in (SQL_RESET_PARAMS, None): ret = ODBC_API.SQLFreeStmt(self.stmt_h, SQL_RESET_PARAMS) if ret != SQL_SUCCESS: check_success(self, ret) - - - + + + def getTypeInfo(self, sqlType = None): if not self.connection: self.close() - + if sqlType is None: type = SQL_ALL_TYPES else: @@ -1982,89 +1982,89 @@ def getTypeInfo(self, sqlType = None): self._UpdateDesc() #self._BindCols() return self.fetchone() - - + + def tables(self, table=None, catalog=None, schema=None, tableType=None): - """Return a list with all tables""" + """Return a list with all tables""" if not self.connection: self.close() - + l_catalog = l_schema = l_table = l_tableType = 0 - + if unicode in [type(x) for x in (table, catalog, schema,tableType)]: string_p = lambda x:wchar_pointer(UCS_buf(x)) API_f = ODBC_API.SQLTablesW else: string_p = ctypes.c_char_p API_f = ODBC_API.SQLTables - - - + + + if catalog is not None: l_catalog = len(catalog) - catalog = string_p(catalog) + catalog = string_p(catalog) - if schema is not None: + if schema is not None: l_schema = len(schema) schema = string_p(schema) - + if table is not None: l_table = len(table) table = string_p(table) - - if tableType is not None: + + if tableType is not None: l_tableType = len(tableType) tableType = string_p(tableType) - + self._free_stmt() self._last_param_types = None self.statement = None ret = API_f(self.stmt_h, catalog, l_catalog, - schema, l_schema, + schema, l_schema, table, l_table, tableType, l_tableType) check_success(self, ret) - + self._NumOfRows() self._UpdateDesc() #self._BindCols() return self - - + + def columns(self, table=None, catalog=None, schema=None, column=None): - """Return a list with all columns""" + """Return a list with all columns""" if not self.connection: self.close() - + l_catalog = l_schema = l_table = l_column = 0 - + if unicode in [type(x) for x in (table, catalog, schema,column)]: string_p = lambda x:wchar_pointer(UCS_buf(x)) API_f = ODBC_API.SQLColumnsW else: string_p = ctypes.c_char_p API_f = ODBC_API.SQLColumns - - - - if catalog is not None: + + + + if catalog is not None: l_catalog = len(catalog) catalog = string_p(catalog) if schema is not None: l_schema = len(schema) schema = string_p(schema) - if table is not None: + if table is not None: l_table = len(table) table = string_p(table) - if column is not None: + if column is not None: l_column = len(column) column = string_p(column) - + self._free_stmt() self._last_param_types = None self.statement = None - + ret = API_f(self.stmt_h, catalog, l_catalog, schema, l_schema, @@ -2076,87 +2076,87 @@ def columns(self, table=None, catalog=None, schema=None, column=None): self._UpdateDesc() #self._BindCols() return self - - + + def primaryKeys(self, table=None, catalog=None, schema=None): if not self.connection: self.close() - + l_catalog = l_schema = l_table = 0 - + if unicode in [type(x) for x in (table, catalog, schema)]: string_p = lambda x:wchar_pointer(UCS_buf(x)) API_f = ODBC_API.SQLPrimaryKeysW else: string_p = ctypes.c_char_p API_f = ODBC_API.SQLPrimaryKeys - - - - if catalog is not None: + + + + if catalog is not None: l_catalog = len(catalog) catalog = string_p(catalog) - - if schema is not None: + + if schema is not None: l_schema = len(schema) schema = string_p(schema) - - if table is not None: + + if table is not None: l_table = len(table) table = string_p(table) - + self._free_stmt() self._last_param_types = None self.statement = None - + ret = API_f(self.stmt_h, catalog, l_catalog, schema, l_schema, table, l_table) check_success(self, ret) - + self._NumOfRows() self._UpdateDesc() #self._BindCols() return self - - + + def foreignKeys(self, table=None, catalog=None, schema=None, foreignTable=None, foreignCatalog=None, foreignSchema=None): if not self.connection: self.close() - + l_catalog = l_schema = l_table = l_foreignTable = l_foreignCatalog = l_foreignSchema = 0 - + if unicode in [type(x) for x in (table, catalog, schema,foreignTable,foreignCatalog,foreignSchema)]: string_p = lambda x:wchar_pointer(UCS_buf(x)) API_f = ODBC_API.SQLForeignKeysW else: string_p = ctypes.c_char_p API_f = ODBC_API.SQLForeignKeys - - if catalog is not None: + + if catalog is not None: l_catalog = len(catalog) catalog = string_p(catalog) - if schema is not None: + if schema is not None: l_schema = len(schema) schema = string_p(schema) - if table is not None: + if table is not None: l_table = len(table) table = string_p(table) - if foreignTable is not None: + if foreignTable is not None: l_foreignTable = len(foreignTable) foreignTable = string_p(foreignTable) - if foreignCatalog is not None: + if foreignCatalog is not None: l_foreignCatalog = len(foreignCatalog) foreignCatalog = string_p(foreignCatalog) - if foreignSchema is not None: + if foreignSchema is not None: l_foreignSchema = len(foreignSchema) foreignSchema = string_p(foreignSchema) - + self._free_stmt() self._last_param_types = None self.statement = None - + ret = API_f(self.stmt_h, catalog, l_catalog, schema, l_schema, @@ -2165,17 +2165,17 @@ def foreignKeys(self, table=None, catalog=None, schema=None, foreignTable=None, foreignSchema, l_foreignSchema, foreignTable, l_foreignTable) check_success(self, ret) - + self._NumOfRows() self._UpdateDesc() #self._BindCols() return self - - + + def procedurecolumns(self, procedure=None, catalog=None, schema=None, column=None): if not self.connection: self.close() - + l_catalog = l_schema = l_procedure = l_column = 0 if unicode in [type(x) for x in (procedure, catalog, schema,column)]: string_p = lambda x:wchar_pointer(UCS_buf(x)) @@ -2183,74 +2183,74 @@ def procedurecolumns(self, procedure=None, catalog=None, schema=None, column=Non else: string_p = ctypes.c_char_p API_f = ODBC_API.SQLProcedureColumns - - - if catalog is not None: + + + if catalog is not None: l_catalog = len(catalog) catalog = string_p(catalog) - if schema is not None: + if schema is not None: l_schema = len(schema) schema = string_p(schema) - if procedure is not None: + if procedure is not None: l_procedure = len(procedure) procedure = string_p(procedure) - if column is not None: + if column is not None: l_column = len(column) column = string_p(column) - - + + self._free_stmt() self._last_param_types = None self.statement = None - + ret = API_f(self.stmt_h, catalog, l_catalog, schema, l_schema, procedure, l_procedure, column, l_column) check_success(self, ret) - + self._NumOfRows() self._UpdateDesc() return self - - + + def procedures(self, procedure=None, catalog=None, schema=None): if not self.connection: self.close() - + l_catalog = l_schema = l_procedure = 0 - + if unicode in [type(x) for x in (procedure, catalog, schema)]: string_p = lambda x:wchar_pointer(UCS_buf(x)) API_f = ODBC_API.SQLProceduresW else: string_p = ctypes.c_char_p API_f = ODBC_API.SQLProcedures - - - - if catalog is not None: + + + + if catalog is not None: l_catalog = len(catalog) catalog = string_p(catalog) - if schema is not None: + if schema is not None: l_schema = len(schema) schema = string_p(schema) - if procedure is not None: + if procedure is not None: l_procedure = len(procedure) procedure = string_p(procedure) - - + + self._free_stmt() self._last_param_types = None self.statement = None - + ret = API_f(self.stmt_h, catalog, l_catalog, schema, l_schema, procedure, l_procedure) check_success(self, ret) - + self._NumOfRows() self._UpdateDesc() return self @@ -2259,27 +2259,27 @@ def procedures(self, procedure=None, catalog=None, schema=None): def statistics(self, table, catalog=None, schema=None, unique=False, quick=True): if not self.connection: self.close() - + l_table = l_catalog = l_schema = 0 - + if unicode in [type(x) for x in (table, catalog, schema)]: string_p = lambda x:wchar_pointer(UCS_buf(x)) API_f = ODBC_API.SQLStatisticsW else: string_p = ctypes.c_char_p API_f = ODBC_API.SQLStatistics - - - if catalog is not None: + + + if catalog is not None: l_catalog = len(catalog) catalog = string_p(catalog) - if schema is not None: + if schema is not None: l_schema = len(schema) schema = string_p(schema) - if table is not None: + if table is not None: l_table = len(table) table = string_p(table) - + if unique: Unique = SQL_INDEX_UNIQUE else: @@ -2288,23 +2288,23 @@ def statistics(self, table, catalog=None, schema=None, unique=False, quick=True) Reserved = SQL_QUICK else: Reserved = SQL_ENSURE - + self._free_stmt() self._last_param_types = None self.statement = None - + ret = API_f(self.stmt_h, catalog, l_catalog, - schema, l_schema, + schema, l_schema, table, l_table, Unique, Reserved) check_success(self, ret) - + self._NumOfRows() self._UpdateDesc() #self._BindCols() return self - + def commit(self): if not self.connection: @@ -2315,12 +2315,12 @@ def rollback(self): if not self.connection: self.close() self.connection.rollback() - + def setoutputsize(self, size, column = None): if not self.connection: self.close() self._outputsize[column] = size - + def setinputsizes(self, sizes): if not self.connection: self.close() @@ -2331,7 +2331,7 @@ def close(self): """ Call SQLCloseCursor API to free the statement handle""" # ret = ODBC_API.SQLCloseCursor(self.stmt_h) # check_success(self, ret) -# +# if self.connection.connected: ret = ODBC_API.SQLFreeStmt(self.stmt_h, SQL_CLOSE) check_success(self, ret) @@ -2344,32 +2344,32 @@ def close(self): ret = ODBC_API.SQLFreeHandle(SQL_HANDLE_STMT, self.stmt_h) check_success(self, ret) - - + + self.closed = True - - - def __del__(self): + + + def __del__(self): if not self.closed: self.close() - + def __exit__(self, type, value, traceback): if not self.connection: self.close() - + if value: self.rollback() else: self.commit() - + self.close() - - + + def __enter__(self): return self - -# This class implement a odbc connection. + +# This class implement a odbc connection. # # @@ -2390,7 +2390,7 @@ def __init__(self, connectString = '', autocommit = False, ansi = False, timeout connectString = connectString + key + '=' + value + ';' self.connectString = connectString - + self.clear_output_converters() try: @@ -2400,23 +2400,23 @@ def __init__(self, connectString = '', autocommit = False, ansi = False, timeout AllocateEnv() finally: lock.release() - + # Allocate an DBC handle self.dbc_h under the environment shared_env_h # This DBC handle is actually the basis of a "connection" - # The handle of self.dbc_h will be used to connect to a certain source + # The handle of self.dbc_h will be used to connect to a certain source # in the self.connect and self.ConnectByDSN method - + ret = ODBC_API.SQLAllocHandle(SQL_HANDLE_DBC, shared_env_h, ADDR(self.dbc_h)) check_success(self, ret) - + self.connect(connectString, autocommit, ansi, timeout, unicode_results, readonly) - - - + + + def connect(self, connectString = '', autocommit = False, ansi = False, timeout = 0, unicode_results = use_unicode, readonly = False): """Connect to odbc, using connect strings and set the connection's attributes like autocommit and timeout by calling SQLSetConnectAttr - """ + """ # Before we establish the connection by the connection string # Set the connection's attribute of "timeout" (Actully LOGIN_TIMEOUT) @@ -2431,9 +2431,9 @@ def connect(self, connectString = '', autocommit = False, ansi = False, timeout # Convert the connetsytring to encoded string - # so it can be converted to a ctypes c_char array object + # so it can be converted to a ctypes c_char array object + - self.ansi = ansi if not ansi: c_connectString = wchar_pointer(UCS_buf(self.connectString)) @@ -2459,43 +2459,43 @@ def connect(self, connectString = '', autocommit = False, ansi = False, timeout else: ret = odbc_func(self.dbc_h, 0, c_connectString, len(self.connectString), None, 0, None, SQL_DRIVER_NOPROMPT) check_success(self, ret) - - - # Set the connection's attribute of "autocommit" + + + # Set the connection's attribute of "autocommit" # self.autocommit = autocommit - + if self.autocommit == True: ret = ODBC_API.SQLSetConnectAttr(self.dbc_h, SQL_ATTR_AUTOCOMMIT, SQL_AUTOCOMMIT_ON, SQL_IS_UINTEGER) else: ret = ODBC_API.SQLSetConnectAttr(self.dbc_h, SQL_ATTR_AUTOCOMMIT, SQL_AUTOCOMMIT_OFF, SQL_IS_UINTEGER) check_success(self, ret) - - # Set the connection's attribute of "readonly" + + # Set the connection's attribute of "readonly" # self.readonly = readonly - + ret = ODBC_API.SQLSetConnectAttr(self.dbc_h, SQL_ATTR_ACCESS_MODE, self.readonly and SQL_MODE_READ_ONLY or SQL_MODE_READ_WRITE, SQL_IS_UINTEGER) check_success(self, ret) - + self.unicode_results = unicode_results self.connected = 1 self.update_db_special_info() - + def clear_output_converters(self): self.output_converter = {} for sqltype, profile in SQL_data_type_dict.items(): self.output_converter[sqltype] = profile[1] - - + + def add_output_converter(self, sqltype, func): self.output_converter[sqltype] = func - + def settimeout(self, timeout): ret = ODBC_API.SQLSetConnectAttr(self.dbc_h, SQL_ATTR_CONNECTION_TIMEOUT, timeout, SQL_IS_UINTEGER); check_success(self, ret) self.timeout = timeout - + def ConnectByDSN(self, dsn, user, passwd = ''): """Connect to odbc, we need dsn, user and optionally password""" @@ -2504,21 +2504,21 @@ def ConnectByDSN(self, dsn, user, passwd = ''): self.passwd = passwd sn = create_buffer(dsn) - un = create_buffer(user) + un = create_buffer(user) pw = create_buffer(passwd) - + ret = ODBC_API.SQLConnect(self.dbc_h, sn, len(sn), un, len(un), pw, len(pw)) check_success(self, ret) self.update_db_special_info() self.connected = 1 - - - def cursor(self, row_type_callable=None): + + + def cursor(self, row_type_callable=None): #self.settimeout(self.timeout) if not self.connected: raise ProgrammingError('HY000','Attempt to use a closed connection.') - cur = Cursor(self, row_type_callable=row_type_callable) + cur = Cursor(self, row_type_callable=row_type_callable) # self._cursors.append(cur) return cur @@ -2538,7 +2538,7 @@ def update_db_special_info(self): except: pass cur.close() - + self.support_SQLDescribeParam = False try: driver_name = self.getinfo(SQL_DRIVER_NAME) @@ -2546,11 +2546,11 @@ def update_db_special_info(self): self.support_SQLDescribeParam = True except: pass - + def commit(self): if not self.connected: raise ProgrammingError('HY000','Attempt to use a closed connection.') - + ret = SQLEndTran(SQL_HANDLE_DBC, self.dbc_h, SQL_COMMIT) if ret != SQL_SUCCESS: check_success(self, ret) @@ -2561,14 +2561,14 @@ def rollback(self): ret = SQLEndTran(SQL_HANDLE_DBC, self.dbc_h, SQL_ROLLBACK) if ret != SQL_SUCCESS: check_success(self, ret) - - - + + + def getinfo(self,infotype): if infotype not in list(aInfoTypes.keys()): - raise ProgrammingError('HY000','Invalid getinfo value: '+str(infotype)) - - + raise ProgrammingError('HY000','Invalid getinfo value: '+str(infotype)) + + if aInfoTypes[infotype] == 'GI_UINTEGER': total_buf_len = 1000 alloc_buffer = ctypes.c_ulong() @@ -2577,7 +2577,7 @@ def getinfo(self,infotype): ADDR(used_buf_len)) check_success(self, ret) result = alloc_buffer.value - + elif aInfoTypes[infotype] == 'GI_USMALLINT': total_buf_len = 1000 alloc_buffer = ctypes.c_ushort() @@ -2607,25 +2607,25 @@ def getinfo(self,infotype): result = True else: result = False - + return result - + def __exit__(self, type, value, traceback): if value: self.rollback() else: self.commit() - + if self.connected: self.close() - + def __enter__(self): return self def __del__(self): if self.connected: self.close() - + def close(self): if not self.connected: raise ProgrammingError('HY000','Attempt to close a closed connection.') @@ -2633,7 +2633,7 @@ def close(self): # if not cur is None: # if not cur.closed: # cur.close() - + if self.connected: #if DEBUG:print 'disconnect' if not self.autocommit: @@ -2648,7 +2648,7 @@ def close(self): # ret = ODBC_API.SQLFreeHandle(SQL_HANDLE_ENV, shared_env_h) # check_success(shared_env_h, ret) self.connected = 0 - + odbc = Connection connect = odbc ''' @@ -2665,7 +2665,7 @@ def drivers(): AllocateEnv() finally: lock.release() - + DriverDescription = create_buffer_u(1000) BufferLength1 = c_short(1000) DescriptionLength = c_short() @@ -2683,14 +2683,14 @@ def drivers(): if Direction == SQL_FETCH_FIRST: Direction = SQL_FETCH_NEXT return DriverList - + def win_create_mdb(mdb_path, sort_order = "General\0\0"): if sys.platform not in ('win32','cli'): raise Exception('This function is available for use in Windows only.') - + mdb_driver = [d for d in drivers() if 'Microsoft Access Driver (*.mdb' in d] if mdb_driver == []: raise Exception('Access Driver is not found.') @@ -2706,17 +2706,17 @@ def win_create_mdb(mdb_path, sort_order = "General\0\0"): else: c_Path = "CREATE_DB=" + mdb_path + " " + sort_order ODBC_ADD_SYS_DSN = 1 - - + + ret = ctypes.windll.ODBCCP32.SQLConfigDataSource(None,ODBC_ADD_SYS_DSN,driver_name, c_Path) if not ret: raise Exception('Failed to create Access mdb file - "%s". Please check file path, permission and Access driver readiness.' %mdb_path) - - + + def win_connect_mdb(mdb_path): if sys.platform not in ('win32','cli'): raise Exception('This function is available for use in Windows only.') - + mdb_driver = [d for d in drivers() if 'Microsoft Access Driver (*.mdb' in d] if mdb_driver == []: raise Exception('Access Driver is not found.') @@ -2724,20 +2724,20 @@ def win_connect_mdb(mdb_path): driver_name = mdb_driver[0] return connect('Driver={'+driver_name+"};DBQ="+mdb_path, unicode_results = use_unicode, readonly = False) - - - + + + def win_compact_mdb(mdb_path, compacted_mdb_path, sort_order = "General\0\0"): if sys.platform not in ('win32','cli'): raise Exception('This function is available for use in Windows only.') - - + + mdb_driver = [d for d in drivers() if 'Microsoft Access Driver (*.mdb' in d] if mdb_driver == []: raise Exception('Access Driver is not found.') else: driver_name = mdb_driver[0].encode('mbcs') - + #COMPACT_DB= ctypes.windll.ODBCCP32.SQLConfigDataSource.argtypes = [ctypes.c_void_p,ctypes.c_ushort,ctypes.c_char_p,ctypes.c_char_p] #driver_name = "Microsoft Access Driver (*.mdb)" @@ -2751,7 +2751,7 @@ def win_compact_mdb(mdb_path, compacted_mdb_path, sort_order = "General\0\0"): ret = ctypes.windll.ODBCCP32.SQLConfigDataSource(None,ODBC_ADD_SYS_DSN,driver_name, c_Path) if not ret: raise Exception('Failed to compact Access mdb file - "%s". Please check file path, permission and Access driver readiness.' %compacted_mdb_path) - + def dataSources(): """Return a list with [name, descrition]""" diff --git a/gluon/contrib/stripe.py b/gluon/contrib/stripe.py index b9602ace4..a0f6ce6df 100644 --- a/gluon/contrib/stripe.py +++ b/gluon/contrib/stripe.py @@ -37,7 +37,7 @@ def pay(): elif form.errors: redirect(URL('pay_error')) return dict(form=form) - + """ URL_CHARGE = 'https://%s:@api.stripe.com/v1/charges' @@ -114,7 +114,7 @@ def __init__(self, def process(self): from gluon import current - request = current.request + request = current.request if request.post_vars: if self.signature == request.post_vars.signature: self.response = Stripe(self.sk).charge( @@ -127,7 +127,7 @@ def process(self): return self self.errors = True return self - + def xml(self): from gluon.template import render if self.accepted: @@ -135,8 +135,8 @@ def xml(self): elif self.errors: return "There was an processing error" else: - context = dict(amount=self.amount, - signature=self.signature, pk=self.pk, + context = dict(amount=self.amount, + signature=self.signature, pk=self.pk, currency_symbol=self.currency_symbol, security_notice=self.security_notice, disclosure_notice=self.disclosure_notice) @@ -145,14 +145,14 @@ def xml(self): TEMPLATE = """ -