code
stringlengths
501
5.19M
package
stringlengths
2
81
path
stringlengths
9
304
filename
stringlengths
4
145
import sre_constants import cgi import time import re from operator import itemgetter from anki.lang import ngettext from aqt.qt import * import anki import aqt.forms from anki.utils import fmtTimeSpan, ids2str, stripHTMLMedia, isWin, intTime, isMac from aqt.utils import saveGeom, restoreGeom, saveSplitter, restoreSplitter, \ saveHeader, restoreHeader, saveState, restoreState, applyStyles, getTag, \ showInfo, askUser, tooltip, openHelp, showWarning, shortcut, getBase, mungeQA from anki.hooks import runHook, addHook, remHook from aqt.webview import AnkiWebView from aqt.toolbar import Toolbar from anki.consts import * from anki.sound import playFromText, clearAudioQueue COLOUR_SUSPENDED = "#FFFFB2" COLOUR_MARKED = "#D9B2E9" # fixme: need to refresh after undo # Data model ########################################################################## class DataModel(QAbstractTableModel): def __init__(self, browser): QAbstractTableModel.__init__(self) self.browser = browser self.col = browser.col self.sortKey = None self.activeCols = self.col.conf.get( "activeCols", ["noteFld", "template", "cardDue", "deck"]) self.cards = [] self.cardObjs = {} def getCard(self, index): id = self.cards[index.row()] if not id in self.cardObjs: self.cardObjs[id] = self.col.getCard(id) return self.cardObjs[id] def refreshNote(self, note): refresh = False for c in note.cards(): if c.id in self.cardObjs: del self.cardObjs[c.id] refresh = True if refresh: self.emit(SIGNAL("layoutChanged()")) # Model interface ###################################################################### def rowCount(self, index): return len(self.cards) def columnCount(self, index): return len(self.activeCols) def data(self, index, role): if not index.isValid(): return if role == Qt.FontRole: if self.activeCols[index.column()] not in ( "question", "answer", "noteFld"): return f = QFont() row = index.row() c = self.getCard(index) t = c.template() f.setFamily(t.get("bfont", self.browser.mw.fontFamily)) f.setPixelSize(t.get("bsize", self.browser.mw.fontHeight)) return f elif role == Qt.TextAlignmentRole: align = Qt.AlignVCenter if self.activeCols[index.column()] not in ("question", "answer", "template", "deck", "noteFld", "note"): align |= Qt.AlignHCenter return align elif role == Qt.DisplayRole or role == Qt.EditRole: return self.columnData(index) else: return def headerData(self, section, orientation, role): if orientation == Qt.Vertical: return elif role == Qt.DisplayRole and section < len(self.activeCols): type = self.columnType(section) txt = None for stype, name in self.browser.columns: if type == stype: txt = name break # handle case where extension has set an invalid column type if not txt: txt = self.browser.columns[0][1] return txt else: return def flags(self, index): return Qt.ItemFlag(Qt.ItemIsEnabled | Qt.ItemIsSelectable) # Filtering ###################################################################### def search(self, txt, reset=True): if reset: self.beginReset() t = time.time() # the db progress handler may cause a refresh, so we need to zero out # old data first self.cards = [] self.cards = self.col.findCards(txt, order=True) #self.browser.mw.pm.profile['fullSearch']) #print "fetch cards in %dms" % ((time.time() - t)*1000) if reset: self.endReset() def reset(self): self.beginReset() self.endReset() def beginReset(self): self.browser.editor.saveNow() self.browser.editor.setNote(None, hide=False) self.browser.mw.progress.start() self.saveSelection() self.beginResetModel() self.cardObjs = {} def endReset(self): t = time.time() self.endResetModel() self.restoreSelection() self.browser.mw.progress.finish() def reverse(self): self.beginReset() self.cards.reverse() self.endReset() def saveSelection(self): cards = self.browser.selectedCards() self.selectedCards = dict([(id, True) for id in cards]) if getattr(self.browser, 'card', None): self.focusedCard = self.browser.card.id else: self.focusedCard = None def restoreSelection(self): if not self.cards: return sm = self.browser.form.tableView.selectionModel() sm.clear() # restore selection items = QItemSelection() count = 0 firstIdx = None focusedIdx = None for row, id in enumerate(self.cards): # if the id matches the focused card, note the index if self.focusedCard == id: focusedIdx = self.index(row, 0) items.select(focusedIdx, focusedIdx) self.focusedCard = None # if the card was previously selected, select again if id in self.selectedCards: count += 1 idx = self.index(row, 0) items.select(idx, idx) # note down the first card of the selection, in case we don't # have a focused card if not firstIdx: firstIdx = idx # focus previously focused or first in selection idx = focusedIdx or firstIdx tv = self.browser.form.tableView if idx: tv.selectRow(idx.row()) tv.scrollTo(idx, tv.PositionAtCenter) if count < 500: # discard large selections; they're too slow sm.select(items, QItemSelectionModel.SelectCurrent | QItemSelectionModel.Rows) else: tv.selectRow(0) # Column data ###################################################################### def columnType(self, column): return self.activeCols[column] def columnData(self, index): row = index.row() col = index.column() type = self.columnType(col) c = self.getCard(index) if type == "question": return self.question(c) elif type == "answer": return self.answer(c) elif type == "noteFld": f = c.note() return self.formatQA(f.fields[self.col.models.sortIdx(f.model())]) elif type == "template": t = c.template()['name'] if c.model()['type'] == MODEL_CLOZE: t += " %d" % (c.ord+1) return t elif type == "cardDue": # catch invalid dates try: t = self.nextDue(c, index) except: t = "" if c.queue < 0: t = "(" + t + ")" return t elif type == "noteCrt": return time.strftime("%Y-%m-%d", time.localtime(c.note().id/1000)) elif type == "noteMod": return time.strftime("%Y-%m-%d", time.localtime(c.note().mod)) elif type == "cardMod": return time.strftime("%Y-%m-%d", time.localtime(c.mod)) elif type == "cardReps": return str(c.reps) elif type == "cardLapses": return str(c.lapses) elif type == "noteTags": return " ".join(c.note().tags) elif type == "note": return c.model()['name'] elif type == "cardIvl": if c.type == 0: return _("(new)") elif c.type == 1: return _("(learning)") return fmtTimeSpan(c.ivl*86400) elif type == "cardEase": if c.type == 0: return _("(new)") return "%d%%" % (c.factor/10) elif type == "deck": if c.odid: # in a cram deck return "%s (%s)" % ( self.browser.mw.col.decks.name(c.did), self.browser.mw.col.decks.name(c.odid)) # normal deck return self.browser.mw.col.decks.name(c.did) def question(self, c): return self.formatQA(c.q(browser=True)) def answer(self, c): if c.template().get('bafmt'): # they have provided a template, use it verbatim c.q(browser=True) return self.formatQA(c.a()) # need to strip question from answer q = self.question(c) a = self.formatQA(c.a()) if a.startswith(q): return a[len(q):].strip() return a def formatQA(self, txt): s = txt.replace("<br>", u" ") s = s.replace("<br />", u" ") s = s.replace("<div>", u" ") s = s.replace("\n", u" ") s = re.sub("\[sound:[^]]+\]", "", s) s = re.sub("\[\[type:[^]]+\]\]", "", s) s = stripHTMLMedia(s) s = s.strip() return s def nextDue(self, c, index): if c.odid: return _("(filtered)") elif c.queue == 1: date = c.due elif c.queue == 0 or c.type == 0: return str(c.due) elif c.queue in (2,3) or (c.type == 2 and c.queue < 0): date = time.time() + ((c.due - self.col.sched.today)*86400) else: return "" return time.strftime("%Y-%m-%d", time.localtime(date)) # Line painter ###################################################################### class StatusDelegate(QItemDelegate): def __init__(self, browser, model): QItemDelegate.__init__(self, browser) self.browser = browser self.model = model def paint(self, painter, option, index): self.browser.mw.progress.blockUpdates = True try: c = self.model.getCard(index) except: # in the the middle of a reset; return nothing so this row is not # rendered until we have a chance to reset the model return finally: self.browser.mw.progress.blockUpdates = True col = None if c.note().hasTag("Marked"): col = COLOUR_MARKED elif c.queue == -1: col = COLOUR_SUSPENDED if col: brush = QBrush(QColor(col)) painter.save() painter.fillRect(option.rect, brush) painter.restore() return QItemDelegate.paint(self, painter, option, index) # Browser window ###################################################################### # fixme: respond to reset+edit hooks class Browser(QMainWindow): def __init__(self, mw): QMainWindow.__init__(self, None, Qt.Window) applyStyles(self) self.mw = mw self.col = self.mw.col self.lastFilter = "" self._previewWindow = None self.form = aqt.forms.browser.Ui_Dialog() self.form.setupUi(self) restoreGeom(self, "editor", 0) restoreState(self, "editor") restoreSplitter(self.form.splitter_2, "editor2") restoreSplitter(self.form.splitter, "editor3") self.form.splitter_2.setChildrenCollapsible(False) self.form.splitter.setChildrenCollapsible(False) self.card = None self.setupToolbar() self.setupColumns() self.setupTable() self.setupMenus() self.setupSearch() self.setupTree() self.setupHeaders() self.setupHooks() self.setupEditor() self.updateFont() self.onUndoState(self.mw.form.actionUndo.isEnabled()) self.form.searchEdit.setFocus() self.form.searchEdit.lineEdit().setText("is:current") self.form.searchEdit.lineEdit().selectAll() self.onSearch() self.show() def setupToolbar(self): self.toolbarWeb = AnkiWebView() self.toolbarWeb.setFixedHeight(32 + self.mw.fontHeightDelta) self.toolbar = BrowserToolbar(self.mw, self.toolbarWeb, self) self.form.verticalLayout_3.insertWidget(0, self.toolbarWeb) self.toolbar.draw() def setupMenus(self): # actions c = self.connect; f = self.form; s = SIGNAL("triggered()") if not isMac: f.actionClose.setVisible(False) c(f.actionReposition, s, self.reposition) c(f.actionReschedule, s, self.reschedule) c(f.actionCram, s, self.cram) c(f.actionChangeModel, s, self.onChangeModel) # edit c(f.actionUndo, s, self.mw.onUndo) c(f.previewButton, SIGNAL("clicked()"), self.onTogglePreview) f.previewButton.setToolTip(_("Preview Selected Card (%s)") % shortcut(_("Ctrl+Shift+P"))) c(f.actionInvertSelection, s, self.invertSelection) c(f.actionSelectNotes, s, self.selectNotes) c(f.actionFindReplace, s, self.onFindReplace) c(f.actionFindDuplicates, s, self.onFindDupes) # jumps c(f.actionPreviousCard, s, self.onPreviousCard) c(f.actionNextCard, s, self.onNextCard) c(f.actionFirstCard, s, self.onFirstCard) c(f.actionLastCard, s, self.onLastCard) c(f.actionFind, s, self.onFind) c(f.actionNote, s, self.onNote) c(f.actionTags, s, self.onTags) c(f.actionCardList, s, self.onCardList) # help c(f.actionGuide, s, self.onHelp) # keyboard shortcut for shift+home/end self.pgUpCut = QShortcut(QKeySequence("Shift+Home"), self) c(self.pgUpCut, SIGNAL("activated()"), self.onFirstCard) self.pgDownCut = QShortcut(QKeySequence("Shift+End"), self) c(self.pgDownCut, SIGNAL("activated()"), self.onLastCard) # card info self.infoCut = QShortcut(QKeySequence("Ctrl+Shift+I"), self) c(self.infoCut, SIGNAL("activated()"), self.showCardInfo) # set deck self.changeDeckCut = QShortcut(QKeySequence("Ctrl+D"), self) c(self.changeDeckCut, SIGNAL("activated()"), self.setDeck) # add/remove tags self.tagCut1 = QShortcut(QKeySequence("Ctrl+Shift+T"), self) c(self.tagCut1, SIGNAL("activated()"), self.addTags) self.tagCut2 = QShortcut(QKeySequence("Ctrl+Alt+T"), self) c(self.tagCut2, SIGNAL("activated()"), self.deleteTags) self.tagCut3 = QShortcut(QKeySequence("Ctrl+K"), self) c(self.tagCut3, SIGNAL("activated()"), self.onMark) # deletion self.delCut1 = QShortcut(QKeySequence("Delete"), self) self.delCut1.setAutoRepeat(False) c(self.delCut1, SIGNAL("activated()"), self.deleteNotes) # add-on hook runHook('browser.setupMenus', self) self.mw.maybeHideAccelerators(self) def updateFont(self): # we can't choose different line heights efficiently, so we need # to pick a line height big enough for any card template curmax = 16 for m in self.col.models.all(): for t in m['tmpls']: bsize = t.get("bsize", 0) if bsize > curmax: curmax = bsize self.form.tableView.verticalHeader().setDefaultSectionSize( curmax + 6) def closeEvent(self, evt): saveSplitter(self.form.splitter_2, "editor2") saveSplitter(self.form.splitter, "editor3") self.editor.saveNow() self.editor.setNote(None) saveGeom(self, "editor") saveState(self, "editor") saveHeader(self.form.tableView.horizontalHeader(), "editor") self.col.conf['activeCols'] = self.model.activeCols self.col.setMod() self.hide() aqt.dialogs.close("Browser") self.teardownHooks() self.mw.maybeReset() evt.accept() def canClose(self): return True def keyPressEvent(self, evt): "Show answer on RET or register answer." if evt.key() == Qt.Key_Escape: self.close() elif self.mw.app.focusWidget() == self.form.tree: if evt.key() in (Qt.Key_Return, Qt.Key_Enter): item = self.form.tree.currentItem() self.onTreeClick(item, 0) def setupColumns(self): self.columns = [ ('question', _("Question")), ('answer', _("Answer")), ('template', _("Card")), ('deck', _("Deck")), ('noteFld', _("Sort Field")), ('noteCrt', _("Created")), ('noteMod', _("Edited")), ('cardMod', _("Changed")), ('cardDue', _("Due")), ('cardIvl', _("Interval")), ('cardEase', _("Ease")), ('cardReps', _("Reviews")), ('cardLapses', _("Lapses")), ('noteTags', _("Tags")), ('note', _("Note")), ] self.columns.sort(key=itemgetter(1)) # Searching ###################################################################### def setupSearch(self): self.filterTimer = None self.form.searchEdit.setLineEdit(FavouritesLineEdit(self.mw, self)) self.connect(self.form.searchButton, SIGNAL("clicked()"), self.onSearch) self.connect(self.form.searchEdit.lineEdit(), SIGNAL("returnPressed()"), self.onSearch) self.form.searchEdit.setCompleter(None) self.form.searchEdit.addItems(self.mw.pm.profile['searchHistory']) def onSearch(self, reset=True): "Careful: if reset is true, the current note is saved." txt = unicode(self.form.searchEdit.lineEdit().text()).strip() prompt = _("<type here to search; hit enter to show current deck>") sh = self.mw.pm.profile['searchHistory'] # update search history if txt in sh: sh.remove(txt) sh.insert(0, txt) sh = sh[:30] self.form.searchEdit.clear() self.form.searchEdit.addItems(sh) self.mw.pm.profile['searchHistory'] = sh if self.mw.state == "review" and "is:current" in txt: # search for current card, but set search to easily display whole # deck if reset: self.model.beginReset() self.model.focusedCard = self.mw.reviewer.card.id self.model.search("nid:%d"%self.mw.reviewer.card.nid, False) if reset: self.model.endReset() self.form.searchEdit.lineEdit().setText(prompt) self.form.searchEdit.lineEdit().selectAll() return elif "is:current" in txt: self.form.searchEdit.lineEdit().setText(prompt) self.form.searchEdit.lineEdit().selectAll() elif txt == prompt: self.form.searchEdit.lineEdit().setText("deck:current ") txt = "deck:current " self.model.search(txt, reset) if not self.model.cards: # no row change will fire self.onRowChanged(None, None) elif self.mw.state == "review": self.focusCid(self.mw.reviewer.card.id) def updateTitle(self): selected = len(self.form.tableView.selectionModel().selectedRows()) cur = len(self.model.cards) self.setWindowTitle(ngettext("Browser (%(cur)d card shown; %(sel)s)", "Browser (%(cur)d cards shown; %(sel)s)", cur) % { "cur": cur, "sel": ngettext("%d selected", "%d selected", selected) % selected }) return selected def onReset(self): self.editor.setNote(None) self.onSearch() # Table view & editor ###################################################################### def setupTable(self): self.model = DataModel(self) self.form.tableView.setSortingEnabled(True) self.form.tableView.setModel(self.model) self.form.tableView.selectionModel() self.form.tableView.setItemDelegate(StatusDelegate(self, self.model)) self.connect(self.form.tableView.selectionModel(), SIGNAL("selectionChanged(QItemSelection,QItemSelection)"), self.onRowChanged) def setupEditor(self): self.editor = aqt.editor.Editor( self.mw, self.form.fieldsArea, self) self.editor.stealFocus = False def onRowChanged(self, current, previous): "Update current note and hide/show editor." update = self.updateTitle() show = self.model.cards and update == 1 self.form.splitter.widget(1).setVisible(not not show) if not show: self.editor.setNote(None) self.singleCard = False else: self.card = self.model.getCard( self.form.tableView.selectionModel().currentIndex()) self.editor.setNote(self.card.note(reload=True)) self.editor.card = self.card self.singleCard = True self._renderPreview(True) self.toolbar.draw() def refreshCurrentCard(self, note): self.model.refreshNote(note) self._renderPreview(False) def refreshCurrentCardFilter(self, flag, note, fidx): self.refreshCurrentCard(note) return flag def currentRow(self): idx = self.form.tableView.selectionModel().currentIndex() return idx.row() # Headers & sorting ###################################################################### def setupHeaders(self): vh = self.form.tableView.verticalHeader() hh = self.form.tableView.horizontalHeader() if not isWin: vh.hide() hh.show() restoreHeader(hh, "editor") hh.setHighlightSections(False) hh.setMinimumSectionSize(50) hh.setMovable(True) self.setColumnSizes() hh.setContextMenuPolicy(Qt.CustomContextMenu) hh.connect(hh, SIGNAL("customContextMenuRequested(QPoint)"), self.onHeaderContext) self.setSortIndicator() hh.connect(hh, SIGNAL("sortIndicatorChanged(int, Qt::SortOrder)"), self.onSortChanged) hh.connect(hh, SIGNAL("sectionMoved(int,int,int)"), self.onColumnMoved) def onSortChanged(self, idx, ord): type = self.model.activeCols[idx] noSort = ("question", "answer", "template", "deck", "note", "noteTags") if type in noSort: if type == "template": # fixme: change to 'card:1' to be clearer in future dev round showInfo(_("""\ This column can't be sorted on, but you can search for individual card types, \ such as 'card:Card 1'.""")) elif type == "deck": showInfo(_("""\ This column can't be sorted on, but you can search for specific decks \ by clicking on one on the left.""")) else: showInfo(_("Sorting on this column is not supported. Please " "choose another.")) type = self.col.conf['sortType'] if self.col.conf['sortType'] != type: self.col.conf['sortType'] = type # default to descending for non-text fields if type == "noteFld": ord = not ord self.col.conf['sortBackwards'] = ord self.onSearch() else: if self.col.conf['sortBackwards'] != ord: self.col.conf['sortBackwards'] = ord self.model.reverse() self.setSortIndicator() def setSortIndicator(self): hh = self.form.tableView.horizontalHeader() type = self.col.conf['sortType'] if type not in self.model.activeCols: hh.setSortIndicatorShown(False) return idx = self.model.activeCols.index(type) if self.col.conf['sortBackwards']: ord = Qt.DescendingOrder else: ord = Qt.AscendingOrder hh.blockSignals(True) hh.setSortIndicator(idx, ord) hh.blockSignals(False) hh.setSortIndicatorShown(True) def onHeaderContext(self, pos): gpos = self.form.tableView.mapToGlobal(pos) m = QMenu() for type, name in self.columns: a = m.addAction(name) a.setCheckable(True) a.setChecked(type in self.model.activeCols) a.connect(a, SIGNAL("toggled(bool)"), lambda b, t=type: self.toggleField(t)) m.exec_(gpos) def toggleField(self, type): self.model.beginReset() if type in self.model.activeCols: if len(self.model.activeCols) < 2: return showInfo(_("You must have at least one column.")) self.model.activeCols.remove(type) adding=False else: self.model.activeCols.append(type) adding=True # sorted field may have been hidden self.setSortIndicator() self.setColumnSizes() self.model.endReset() # if we added a column, scroll to it if adding: row = self.currentRow() idx = self.model.index(row, len(self.model.activeCols) - 1) self.form.tableView.scrollTo(idx) def setColumnSizes(self): hh = self.form.tableView.horizontalHeader() hh.setResizeMode(QHeaderView.Interactive) hh.setResizeMode(hh.logicalIndex(len(self.model.activeCols)-1), QHeaderView.Stretch) # this must be set post-resize or it doesn't work hh.setCascadingSectionResizes(False) def onColumnMoved(self, a, b, c): self.setColumnSizes() # Filter tree ###################################################################### class CallbackItem(QTreeWidgetItem): def __init__(self, root, name, onclick, oncollapse=None): QTreeWidgetItem.__init__(self, root, [name]) self.onclick = onclick self.oncollapse = oncollapse def setupTree(self): self.connect( self.form.tree, SIGNAL("itemClicked(QTreeWidgetItem*,int)"), self.onTreeClick) p = QPalette() p.setColor(QPalette.Base, QColor("#d6dde0")) self.form.tree.setPalette(p) self.buildTree() self.connect( self.form.tree, SIGNAL("itemExpanded(QTreeWidgetItem*)"), lambda item: self.onTreeCollapse(item)) self.connect( self.form.tree, SIGNAL("itemCollapsed(QTreeWidgetItem*)"), lambda item: self.onTreeCollapse(item)) def buildTree(self): self.form.tree.clear() root = self.form.tree self._systemTagTree(root) self._favTree(root) self._decksTree(root) self._modelTree(root) self._userTagTree(root) self.form.tree.setIndentation(15) def onTreeClick(self, item, col): if getattr(item, 'onclick', None): item.onclick() def onTreeCollapse(self, item): if getattr(item, 'oncollapse', None): item.oncollapse() def setFilter(self, *args): if len(args) == 1: txt = args[0] else: txt = "" items = [] for c, a in enumerate(args): if c % 2 == 0: txt += a + ":" else: txt += a if " " in txt or "(" in txt or ")" in txt: txt = '"%s"' % txt items.append(txt) txt = "" txt = " ".join(items) if self.mw.app.keyboardModifiers() & Qt.AltModifier: txt = "-"+txt if self.mw.app.keyboardModifiers() & Qt.ControlModifier: cur = unicode(self.form.searchEdit.lineEdit().text()) if cur and cur != \ _("<type here to search; hit enter to show current deck>"): txt = cur + " " + txt elif self.mw.app.keyboardModifiers() & Qt.ShiftModifier: cur = unicode(self.form.searchEdit.lineEdit().text()) if cur: txt = cur + " or " + txt self.form.searchEdit.lineEdit().setText(txt) self.onSearch() def _systemTagTree(self, root): tags = ( (_("Whole Collection"), "ankibw", ""), (_("Current Deck"), "deck16", "deck:current"), (_("Added Today"), "view-pim-calendar.png", "added:1"), (_("Studied Today"), "view-pim-calendar.png", "rated:1"), (_("Again Today"), "view-pim-calendar.png", "rated:1:1"), (_("New"), "plus16.png", "is:new"), (_("Learning"), "stock_new_template_red.png", "is:learn"), (_("Review"), "clock16.png", "is:review"), (_("Due"), "clock16.png", "is:due"), (_("Marked"), "star16.png", "tag:marked"), (_("Suspended"), "media-playback-pause.png", "is:suspended"), (_("Leech"), "emblem-important.png", "tag:leech")) for name, icon, cmd in tags: item = self.CallbackItem( root, name, lambda c=cmd: self.setFilter(c)) item.setIcon(0, QIcon(":/icons/" + icon)) return root def _favTree(self, root): saved = self.col.conf.get('savedFilters', []) if not saved: # Don't add favourites to tree if none saved return root = self.CallbackItem(root, _("My Searches"), None) root.setExpanded(True) root.setIcon(0, QIcon(":/icons/emblem-favorite-dark.png")) for name, filt in sorted(saved.items()): item = self.CallbackItem(root, name, lambda s=filt: self.setFilter(s)) item.setIcon(0, QIcon(":/icons/emblem-favorite-dark.png")) def _userTagTree(self, root): for t in sorted(self.col.tags.all()): if t.lower() == "marked" or t.lower() == "leech": continue item = self.CallbackItem( root, t, lambda t=t: self.setFilter("tag", t)) item.setIcon(0, QIcon(":/icons/anki-tag.png")) def _decksTree(self, root): grps = self.col.sched.deckDueTree() def fillGroups(root, grps, head=""): for g in grps: item = self.CallbackItem( root, g[0], lambda g=g: self.setFilter("deck", head+g[0]), lambda g=g: self.mw.col.decks.collapseBrowser(g[1])) item.setIcon(0, QIcon(":/icons/deck16.png")) newhead = head + g[0]+"::" collapsed = self.mw.col.decks.get(g[1]).get('browserCollapsed', False) item.setExpanded(not collapsed) fillGroups(item, g[5], newhead) fillGroups(root, grps) def _modelTree(self, root): for m in sorted(self.col.models.all(), key=itemgetter("name")): mitem = self.CallbackItem( root, m['name'], lambda m=m: self.setFilter("mid", str(m['id']))) mitem.setIcon(0, QIcon(":/icons/product_design.png")) # for t in m['tmpls']: # titem = self.CallbackItem( # t['name'], lambda m=m, t=t: self.setFilter( # "model", m['name'], "card", t['name'])) # titem.setIcon(0, QIcon(":/icons/stock_new_template.png")) # mitem.addChild(titem) # Info ###################################################################### def showCardInfo(self): if not self.card: return info, cs = self._cardInfoData() reps = self._revlogData(cs) d = QDialog(self) l = QVBoxLayout() l.setMargin(0) w = AnkiWebView() l.addWidget(w) w.stdHtml(info + "<p>" + reps) bb = QDialogButtonBox(QDialogButtonBox.Close) l.addWidget(bb) bb.connect(bb, SIGNAL("rejected()"), d, SLOT("reject()")) d.setLayout(l) d.setWindowModality(Qt.WindowModal) d.resize(500, 400) restoreGeom(d, "revlog") d.exec_() saveGeom(d, "revlog") def _cardInfoData(self): from anki.stats import CardStats cs = CardStats(self.col, self.card) rep = cs.report() m = self.card.model() rep = """ <div style='width: 400px; margin: 0 auto 0; border: 1px solid #000; padding: 3px; '>%s</div>""" % rep return rep, cs def _revlogData(self, cs): entries = self.mw.col.db.all( "select id/1000.0, ease, ivl, factor, time/1000.0, type " "from revlog where cid = ?", self.card.id) if not entries: return "" s = "<table width=100%%><tr><th align=left>%s</th>" % _("Date") s += ("<th align=right>%s</th>" * 5) % ( _("Type"), _("Rating"), _("Interval"), _("Ease"), _("Time")) cnt = 0 for (date, ease, ivl, factor, taken, type) in reversed(entries): cnt += 1 s += "<tr><td>%s</td>" % time.strftime(_("<b>%Y-%m-%d</b> @ %H:%M"), time.localtime(date)) tstr = [_("Learn"), _("Review"), _("Relearn"), _("Filtered"), _("Resched")][type] import anki.stats as st fmt = "<span style='color:%s'>%s</span>" if type == 0: tstr = fmt % (st.colLearn, tstr) elif type == 1: tstr = fmt % (st.colMature, tstr) elif type == 2: tstr = fmt % (st.colRelearn, tstr) elif type == 3: tstr = fmt % (st.colCram, tstr) else: tstr = fmt % ("#000", tstr) if ease == 1: ease = fmt % (st.colRelearn, ease) if ivl == 0: ivl = _("0d") elif ivl > 0: ivl = fmtTimeSpan(ivl*86400, short=True) else: ivl = cs.time(-ivl) s += ("<td align=right>%s</td>" * 5) % ( tstr, ease, ivl, "%d%%" % (factor/10) if factor else "", cs.time(taken)) + "</tr>" s += "</table>" if cnt < self.card.reps: s += _("""\ Note: Some of the history is missing. For more information, \ please see the browser documentation.""") return s # Menu helpers ###################################################################### def selectedCards(self): return [self.model.cards[idx.row()] for idx in self.form.tableView.selectionModel().selectedRows()] def selectedNotes(self): return self.col.db.list(""" select distinct nid from cards where id in %s""" % ids2str( [self.model.cards[idx.row()] for idx in self.form.tableView.selectionModel().selectedRows()])) def selectedNotesAsCards(self): return self.col.db.list( "select id from cards where nid in (%s)" % ",".join([str(s) for s in self.selectedNotes()])) def oneModelNotes(self): sf = self.selectedNotes() if not sf: return mods = self.col.db.scalar(""" select count(distinct mid) from notes where id in %s""" % ids2str(sf)) if mods > 1: showInfo(_("Please select cards from only one note type.")) return return sf def onHelp(self): openHelp("browser") # Misc menu options ###################################################################### def onChangeModel(self): nids = self.oneModelNotes() if nids: ChangeModel(self, nids) def cram(self): return showInfo("not yet implemented") self.close() self.mw.onCram(self.selectedCards()) # Preview ###################################################################### def onTogglePreview(self): if self._previewWindow: self._closePreview() else: self._openPreview() def _openPreview(self): c = self.connect self._previewState = "question" self._previewWindow = QDialog(None, Qt.Window) self._previewWindow.setWindowTitle(_("Preview")) c(self._previewWindow, SIGNAL("finished(int)"), self._onPreviewFinished) vbox = QVBoxLayout() vbox.setMargin(0) self._previewWeb = AnkiWebView() vbox.addWidget(self._previewWeb) bbox = QDialogButtonBox() self._previewReplay = bbox.addButton(_("Replay Audio"), QDialogButtonBox.ActionRole) self._previewReplay.setAutoDefault(False) self._previewReplay.setShortcut(QKeySequence("R")) self._previewReplay.setToolTip(_("Shortcut key: %s" % "R")) self._previewPrev = bbox.addButton("<", QDialogButtonBox.ActionRole) self._previewPrev.setAutoDefault(False) self._previewPrev.setShortcut(QKeySequence("Left")) self._previewNext = bbox.addButton(">", QDialogButtonBox.ActionRole) self._previewNext.setAutoDefault(False) self._previewNext.setShortcut(QKeySequence("Right")) c(self._previewPrev, SIGNAL("clicked()"), self._onPreviewPrev) c(self._previewNext, SIGNAL("clicked()"), self._onPreviewNext) c(self._previewReplay, SIGNAL("clicked()"), self._onReplayAudio) vbox.addWidget(bbox) self._previewWindow.setLayout(vbox) restoreGeom(self._previewWindow, "preview") self._previewWindow.show() self._renderPreview(True) def _onPreviewFinished(self, ok): saveGeom(self._previewWindow, "preview") self.mw.progress.timer(100, self._onClosePreview, False) self.form.previewButton.setChecked(False) def _onPreviewPrev(self): if self._previewState == "question": self._previewState = "answer" self._renderPreview() else: self.onPreviousCard() self._updatePreviewButtons() def _onPreviewNext(self): if self._previewState == "question": self._previewState = "answer" self._renderPreview() else: self.onNextCard() self._updatePreviewButtons() def _onReplayAudio(self): self.mw.reviewer.replayAudio(self) def _updatePreviewButtons(self): if not self._previewWindow: return canBack = self.currentRow() > 0 or self._previewState == "question" self._previewPrev.setEnabled(not not (self.singleCard and canBack)) canForward = self.currentRow() < self.model.rowCount(None) - 1 or \ self._previewState == "question" self._previewNext.setEnabled(not not (self.singleCard and canForward)) def _closePreview(self): if self._previewWindow: self._previewWindow.close() self._onClosePreview() def _onClosePreview(self): self._previewWindow = self._previewPrev = self._previewNext = None def _renderPreview(self, cardChanged=False): if not self._previewWindow: return c = self.card if not c: txt = _("(please select 1 card)") self._previewWeb.stdHtml(txt) self._updatePreviewButtons() return self._updatePreviewButtons() if cardChanged: self._previewState = "question" # need to force reload even if answer txt = c.q(reload=True) if self._previewState == "answer": txt = c.a() txt = re.sub("\[\[type:[^]]+\]\]", "", txt) ti = lambda x: x base = getBase(self.mw.col) self._previewWeb.stdHtml( ti(mungeQA(self.col, txt)), self.mw.reviewer._styles(), bodyClass="card card%d" % (c.ord+1), head=base, js=anki.js.browserSel) clearAudioQueue() if self.mw.reviewer.autoplay(c): playFromText(txt) # Card deletion ###################################################################### def deleteNotes(self): nids = self.selectedNotes() if not nids: return self.mw.checkpoint(_("Delete Notes")) self.model.beginReset() # figure out where to place the cursor after the deletion curRow = self.form.tableView.selectionModel().currentIndex().row() selectedRows = [i.row() for i in self.form.tableView.selectionModel().selectedRows()] if min(selectedRows) < curRow < max(selectedRows): # last selection in middle; place one below last selected item move = sum(1 for i in selectedRows if i > curRow) newRow = curRow - move elif max(selectedRows) <= curRow: # last selection at bottom; place one below bottommost selection newRow = max(selectedRows) - len(nids) + 1 else: # last selection at top; place one above topmost selection newRow = min(selectedRows) - 1 self.col.remNotes(nids) self.onSearch(reset=False) if len(self.model.cards): newRow = min(newRow, len(self.model.cards) - 1) newRow = max(newRow, 0) self.model.focusedCard = self.model.cards[newRow] self.model.endReset() self.mw.requireReset() tooltip(ngettext("%d note deleted.", "%d notes deleted.", len(nids)) % len(nids)) # Deck change ###################################################################### def setDeck(self): from aqt.studydeck import StudyDeck cids = self.selectedCards() if not cids: return did = self.mw.col.db.scalar( "select did from cards where id = ?", cids[0]) current=self.mw.col.decks.get(did)['name'] ret = StudyDeck( self.mw, current=current, accept=_("Move Cards"), title=_("Change Deck"), help="browse", parent=self) if not ret.name: return did = self.col.decks.id(ret.name) deck = self.col.decks.get(did) if deck['dyn']: showWarning(_("Cards can't be manually moved into a filtered deck.")) return self.model.beginReset() self.mw.checkpoint(_("Change Deck")) mod = intTime() usn = self.col.usn() # normal cards scids = ids2str(cids) # remove any cards from filtered deck first self.col.sched.remFromDyn(cids) # then move into new deck self.col.db.execute(""" update cards set usn=?, mod=?, did=? where id in """ + scids, usn, mod, did) self.model.endReset() self.mw.requireReset() # Tags ###################################################################### def addTags(self, tags=None, label=None, prompt=None, func=None): if prompt is None: prompt = _("Enter tags to add:") if tags is None: (tags, r) = getTag(self, self.col, prompt) else: r = True if not r: return if func is None: func = self.col.tags.bulkAdd if label is None: label = _("Add Tags") if label: self.mw.checkpoint(label) self.model.beginReset() func(self.selectedNotes(), tags) self.model.endReset() self.mw.requireReset() def deleteTags(self, tags=None, label=None): if label is None: label = _("Delete Tags") self.addTags(tags, label, _("Enter tags to delete:"), func=self.col.tags.bulkRem) # Suspending and marking ###################################################################### def isSuspended(self): return not not (self.card and self.card.queue == -1) def onSuspend(self, sus=None): if sus is None: sus = not self.isSuspended() # focus lost hook may not have chance to fire self.editor.saveNow() c = self.selectedCards() if sus: self.col.sched.suspendCards(c) else: self.col.sched.unsuspendCards(c) self.model.reset() self.mw.requireReset() def isMarked(self): return not not (self.card and self.card.note().hasTag("Marked")) def onMark(self, mark=None): if mark is None: mark = not self.isMarked() if mark: self.addTags(tags="marked", label=False) else: self.deleteTags(tags="marked", label=False) # Repositioning ###################################################################### def reposition(self): cids = self.selectedCards() cids2 = self.col.db.list( "select id from cards where type = 0 and id in " + ids2str(cids)) if not cids2: return showInfo(_("Only new cards can be repositioned.")) d = QDialog(self) d.setWindowModality(Qt.WindowModal) frm = aqt.forms.reposition.Ui_Dialog() frm.setupUi(d) (pmin, pmax) = self.col.db.first( "select min(due), max(due) from cards where type=0 and odid=0") pmin = pmin or 0 pmax = pmax or 0 txt = _("Queue top: %d") % pmin txt += "\n" + _("Queue bottom: %d") % pmax frm.label.setText(txt) if not d.exec_(): return self.model.beginReset() self.mw.checkpoint(_("Reposition")) self.col.sched.sortCards( cids, start=frm.start.value(), step=frm.step.value(), shuffle=frm.randomize.isChecked(), shift=frm.shift.isChecked()) self.onSearch(reset=False) self.mw.requireReset() self.model.endReset() # Rescheduling ###################################################################### def reschedule(self): d = QDialog(self) d.setWindowModality(Qt.WindowModal) frm = aqt.forms.reschedule.Ui_Dialog() frm.setupUi(d) if not d.exec_(): return self.model.beginReset() self.mw.checkpoint(_("Reschedule")) if frm.asNew.isChecked(): self.col.sched.forgetCards(self.selectedCards()) else: fmin = frm.min.value() fmax = frm.max.value() fmax = max(fmin, fmax) self.col.sched.reschedCards( self.selectedCards(), fmin, fmax) self.onSearch(reset=False) self.mw.requireReset() self.model.endReset() # Edit: selection ###################################################################### def selectNotes(self): nids = self.selectedNotes() self.form.searchEdit.lineEdit().setText("nid:"+",".join([str(x) for x in nids])) # clear the selection so we don't waste energy preserving it tv = self.form.tableView tv.selectionModel().clear() self.onSearch() tv.selectAll() def invertSelection(self): sm = self.form.tableView.selectionModel() items = sm.selection() self.form.tableView.selectAll() sm.select(items, QItemSelectionModel.Deselect | QItemSelectionModel.Rows) # Edit: undo ###################################################################### def setupHooks(self): addHook("undoState", self.onUndoState) addHook("reset", self.onReset) addHook("editTimer", self.refreshCurrentCard) addHook("editFocusLost", self.refreshCurrentCardFilter) for t in "newTag", "newModel", "newDeck": addHook(t, self.buildTree) def teardownHooks(self): remHook("reset", self.onReset) remHook("editTimer", self.refreshCurrentCard) remHook("editFocusLost", self.refreshCurrentCardFilter) remHook("undoState", self.onUndoState) for t in "newTag", "newModel", "newDeck": remHook(t, self.buildTree) def onUndoState(self, on): self.form.actionUndo.setEnabled(on) if on: self.form.actionUndo.setText(self.mw.form.actionUndo.text()) # Edit: replacing ###################################################################### def onFindReplace(self): sf = self.selectedNotes() if not sf: return import anki.find fields = sorted(anki.find.fieldNames(self.col, downcase=False)) d = QDialog(self) frm = aqt.forms.findreplace.Ui_Dialog() frm.setupUi(d) d.setWindowModality(Qt.WindowModal) frm.field.addItems([_("All Fields")] + fields) self.connect(frm.buttonBox, SIGNAL("helpRequested()"), self.onFindReplaceHelp) restoreGeom(d, "findreplace") r = d.exec_() saveGeom(d, "findreplace") if not r: return if frm.field.currentIndex() == 0: field = None else: field = fields[frm.field.currentIndex()-1] self.mw.checkpoint(_("Find and Replace")) self.mw.progress.start() self.model.beginReset() try: changed = self.col.findReplace(sf, unicode(frm.find.text()), unicode(frm.replace.text()), frm.re.isChecked(), field, frm.ignoreCase.isChecked()) except sre_constants.error: showInfo(_("Invalid regular expression."), parent=self) return else: self.onSearch() self.mw.requireReset() finally: self.model.endReset() self.mw.progress.finish() showInfo(ngettext( "%(a)d of %(b)d note updated", "%(a)d of %(b)d notes updated", len(sf)) % { 'a': changed, 'b': len(sf), }) def onFindReplaceHelp(self): openHelp("findreplace") # Edit: finding dupes ###################################################################### def onFindDupes(self): d = QDialog(self) frm = aqt.forms.finddupes.Ui_Dialog() frm.setupUi(d) restoreGeom(d, "findDupes") fields = sorted(anki.find.fieldNames(self.col, downcase=False)) frm.fields.addItems(fields) self._dupesButton = None # links frm.webView.page().setLinkDelegationPolicy( QWebPage.DelegateAllLinks) self.connect(frm.webView, SIGNAL("linkClicked(QUrl)"), self.dupeLinkClicked) def onFin(code): saveGeom(d, "findDupes") self.connect(d, SIGNAL("finished(int)"), onFin) def onClick(): field = fields[frm.fields.currentIndex()] self.duplicatesReport(frm.webView, field, frm.search.text(), frm) search = frm.buttonBox.addButton( _("Search"), QDialogButtonBox.ActionRole) self.connect(search, SIGNAL("clicked()"), onClick) d.show() def duplicatesReport(self, web, fname, search, frm): self.mw.progress.start() res = self.mw.col.findDupes(fname, search) if not self._dupesButton: self._dupesButton = b = frm.buttonBox.addButton( _("Tag Duplicates"), QDialogButtonBox.ActionRole) self.connect(b, SIGNAL("clicked()"), lambda: self._onTagDupes(res)) t = "<html><body>" groups = len(res) notes = sum(len(r[1]) for r in res) part1 = ngettext("%d group", "%d groups", groups) % groups part2 = ngettext("%d note", "%d notes", notes) % notes t += _("Found %(a)s across %(b)s.") % dict(a=part1, b=part2) t += "<p><ol>" for val, nids in res: t += '<li><a href="%s">%s</a>: %s</a>' % ( "nid:" + ",".join(str(id) for id in nids), ngettext("%d note", "%d notes", len(nids)) % len(nids), cgi.escape(val)) t += "</ol>" t += "</body></html>" web.setHtml(t) self.mw.progress.finish() def _onTagDupes(self, res): if not res: return self.model.beginReset() self.mw.checkpoint(_("Tag Duplicates")) nids = set() for s, nidlist in res: nids.update(nidlist) self.col.tags.bulkAdd(nids, _("duplicate")) self.mw.progress.finish() self.model.endReset() self.mw.requireReset() tooltip(_("Notes tagged.")) def dupeLinkClicked(self, link): self.form.searchEdit.lineEdit().setText(link.toString()) self.onSearch() self.onNote() # Jumping ###################################################################### def _moveCur(self, dir=None, idx=None): if not self.model.cards: return self.editor.saveNow() tv = self.form.tableView if idx is None: idx = tv.moveCursor(dir, self.mw.app.keyboardModifiers()) tv.selectionModel().clear() tv.setCurrentIndex(idx) def onPreviousCard(self): f = self.editor.currentField self._moveCur(QAbstractItemView.MoveUp) self.editor.web.setFocus() self.editor.web.eval("focusField(%d)" % f) def onNextCard(self): f = self.editor.currentField self._moveCur(QAbstractItemView.MoveDown) self.editor.web.setFocus() self.editor.web.eval("focusField(%d)" % f) def onFirstCard(self): sm = self.form.tableView.selectionModel() idx = sm.currentIndex() self._moveCur(None, self.model.index(0, 0)) if not self.mw.app.keyboardModifiers() & Qt.ShiftModifier: return idx2 = sm.currentIndex() item = QItemSelection(idx2, idx) sm.select(item, QItemSelectionModel.SelectCurrent| QItemSelectionModel.Rows) def onLastCard(self): sm = self.form.tableView.selectionModel() idx = sm.currentIndex() self._moveCur( None, self.model.index(len(self.model.cards) - 1, 0)) if not self.mw.app.keyboardModifiers() & Qt.ShiftModifier: return idx2 = sm.currentIndex() item = QItemSelection(idx, idx2) sm.select(item, QItemSelectionModel.SelectCurrent| QItemSelectionModel.Rows) def onFind(self): self.form.searchEdit.setFocus() self.form.searchEdit.lineEdit().selectAll() def onNote(self): self.editor.focus() self.editor.web.setFocus() self.editor.web.eval("focusField(0);") def onTags(self): self.form.tree.setFocus() def onCardList(self): self.form.tableView.setFocus() def focusCid(self, cid): try: row = self.model.cards.index(cid) except: return self.form.tableView.selectRow(row) # Change model dialog ###################################################################### class ChangeModel(QDialog): def __init__(self, browser, nids): QDialog.__init__(self, browser) self.browser = browser self.nids = nids self.oldModel = browser.card.note().model() self.form = aqt.forms.changemodel.Ui_Dialog() self.form.setupUi(self) self.setWindowModality(Qt.WindowModal) self.setup() restoreGeom(self, "changeModel") addHook("reset", self.onReset) addHook("currentModelChanged", self.onReset) self.exec_() def setup(self): # maps self.flayout = QHBoxLayout() self.flayout.setMargin(0) self.fwidg = None self.form.fieldMap.setLayout(self.flayout) self.tlayout = QHBoxLayout() self.tlayout.setMargin(0) self.twidg = None self.form.templateMap.setLayout(self.tlayout) if self.style().objectName() == "gtk+": # gtk+ requires margins in inner layout self.form.verticalLayout_2.setContentsMargins(0, 11, 0, 0) self.form.verticalLayout_3.setContentsMargins(0, 11, 0, 0) # model chooser import aqt.modelchooser self.oldModel = self.browser.col.models.get( self.browser.col.db.scalar( "select mid from notes where id = ?", self.nids[0])) self.form.oldModelLabel.setText(self.oldModel['name']) self.modelChooser = aqt.modelchooser.ModelChooser( self.browser.mw, self.form.modelChooserWidget, label=False) self.modelChooser.models.setFocus() self.connect(self.form.buttonBox, SIGNAL("helpRequested()"), self.onHelp) self.modelChanged(self.browser.mw.col.models.current()) self.pauseUpdate = False def onReset(self): self.modelChanged(self.browser.col.models.current()) def modelChanged(self, model): self.targetModel = model self.rebuildTemplateMap() self.rebuildFieldMap() def rebuildTemplateMap(self, key=None, attr=None): if not key: key = "t" attr = "tmpls" map = getattr(self, key + "widg") lay = getattr(self, key + "layout") src = self.oldModel[attr] dst = self.targetModel[attr] if map: lay.removeWidget(map) map.deleteLater() setattr(self, key + "MapWidget", None) map = QWidget() l = QGridLayout() combos = [] targets = [x['name'] for x in dst] + [_("Nothing")] indices = {} for i, x in enumerate(src): l.addWidget(QLabel(_("Change %s to:") % x['name']), i, 0) cb = QComboBox() cb.addItems(targets) idx = min(i, len(targets)-1) cb.setCurrentIndex(idx) indices[cb] = idx self.connect(cb, SIGNAL("currentIndexChanged(int)"), lambda i, cb=cb, key=key: self.onComboChanged(i, cb, key)) combos.append(cb) l.addWidget(cb, i, 1) map.setLayout(l) lay.addWidget(map) setattr(self, key + "widg", map) setattr(self, key + "layout", lay) setattr(self, key + "combos", combos) setattr(self, key + "indices", indices) def rebuildFieldMap(self): return self.rebuildTemplateMap(key="f", attr="flds") def onComboChanged(self, i, cb, key): indices = getattr(self, key + "indices") if self.pauseUpdate: indices[cb] = i return combos = getattr(self, key + "combos") if i == cb.count() - 1: # set to 'nothing' return # find another combo with same index for c in combos: if c == cb: continue if c.currentIndex() == i: self.pauseUpdate = True c.setCurrentIndex(indices[cb]) self.pauseUpdate = False break indices[cb] = i def getTemplateMap(self, old=None, combos=None, new=None): if not old: old = self.oldModel['tmpls'] combos = self.tcombos new = self.targetModel['tmpls'] map = {} for i, f in enumerate(old): idx = combos[i].currentIndex() if idx == len(new): # ignore map[f['ord']] = None else: f2 = new[idx] map[f['ord']] = f2['ord'] return map def getFieldMap(self): return self.getTemplateMap( old=self.oldModel['flds'], combos=self.fcombos, new=self.targetModel['flds']) def cleanup(self): remHook("reset", self.onReset) remHook("currentModelChanged", self.onReset) self.modelChooser.cleanup() saveGeom(self, "changeModel") def reject(self): self.cleanup() return QDialog.reject(self) def accept(self): # check maps fmap = self.getFieldMap() cmap = self.getTemplateMap() if any(True for c in cmap.values() if c is None): if not askUser(_("""\ Any cards mapped to nothing will be deleted. \ If a note has no remaining cards, it will be lost. \ Are you sure you want to continue?""")): return QDialog.accept(self) self.browser.mw.checkpoint(_("Change Note Type")) b = self.browser b.mw.progress.start() b.model.beginReset() mm = b.mw.col.models mm.change(self.oldModel, self.nids, self.targetModel, fmap, cmap) b.onSearch(reset=False) b.model.endReset() b.mw.progress.finish() b.mw.reset() self.cleanup() def onHelp(self): openHelp("browsermisc") # Toolbar ###################################################################### class BrowserToolbar(Toolbar): def __init__(self, mw, web, browser): self.browser = browser Toolbar.__init__(self, mw, web) def draw(self): mark = self.browser.isMarked() pause = self.browser.isSuspended() def borderImg(link, icon, on, title, tooltip=None): if on: fmt = '''\ <a class=hitem title="%s" href="%s">\ <img valign=bottom style='border: 1px solid #aaa;' src="qrc:/icons/%s.png"> %s</a>''' else: fmt = '''\ <a class=hitem title="%s" href="%s"><img style="padding: 1px;" valign=bottom src="qrc:/icons/%s.png"> %s</a>''' return fmt % (tooltip or title, link, icon, title) right = "<div>" right += borderImg("add", "add16", False, _("Add")) right += borderImg("info", "info", False, _("Info"), shortcut(_("Card Info (Ctrl+Shift+I)"))) right += borderImg("mark", "star16", mark, _("Mark"), shortcut(_("Mark Note (Ctrl+K)"))) right += borderImg("pause", "pause16", pause, _("Suspend")) right += borderImg("setDeck", "deck16", False, _("Change Deck"), shortcut(_("Move To Deck (Ctrl+D)"))) right += borderImg("addtag", "addtag16", False, _("Add Tags"), shortcut(_("Bulk Add Tags (Ctrl+Shift+T)"))) right += borderImg("deletetag", "deletetag16", False, _("Remove Tags"), shortcut(_( "Bulk Remove Tags (Ctrl+Alt+T)"))) right += borderImg("delete", "delete16", False, _("Delete")) right += "</div>" self.web.page().currentFrame().setScrollBarPolicy( Qt.Horizontal, Qt.ScrollBarAlwaysOff) self.web.stdHtml(self._body % ( "", #<span style='display:inline-block; width: 100px;'></span>", #self._centerLinks(), right, ""), self._css + """ #header { font-weight: normal; } a { margin-right: 1em; } .hitem { overflow: hidden; white-space: nowrap;} """) # Link handling ###################################################################### def _linkHandler(self, l): if l == "anki": self.showMenu() elif l == "add": self.browser.mw.onAddCard() elif l == "delete": self.browser.deleteNotes() elif l == "setDeck": self.browser.setDeck() # icons elif l == "info": self.browser.showCardInfo() elif l == "mark": self.browser.onMark() elif l == "pause": self.browser.onSuspend() elif l == "addtag": self.browser.addTags() elif l == "deletetag": self.browser.deleteTags() # Favourites button ###################################################################### class FavouritesLineEdit(QLineEdit): buttonClicked = pyqtSignal(bool) def __init__(self, mw, browser, parent=None): super(FavouritesLineEdit, self).__init__(parent) self.mw = mw self.browser = browser # add conf if missing if not self.mw.col.conf.has_key('savedFilters'): self.mw.col.conf['savedFilters'] = {} self.button = QToolButton(self) self.button.setStyleSheet('border: 0px;') self.button.setCursor(Qt.ArrowCursor) self.button.clicked.connect(self.buttonClicked.emit) self.setIcon(':/icons/emblem-favorite-off.png') # flag to raise save or delete dialog on button click self.doSave = True # name of current saved filter (if query matches) self.name = None self.buttonClicked.connect(self.onClicked) self.connect(self, SIGNAL("textChanged(QString)"), self.updateButton) def resizeEvent(self, event): buttonSize = self.button.sizeHint() frameWidth = self.style().pixelMetric(QStyle.PM_DefaultFrameWidth) self.button.move(self.rect().right() - frameWidth - buttonSize.width(), (self.rect().bottom() - buttonSize.height() + 1) / 2) self.setTextMargins(0, 0, buttonSize.width() * 1.5, 0) super(FavouritesLineEdit, self).resizeEvent(event) def setIcon(self, path): self.button.setIcon(QIcon(path)) def setText(self, txt): super(FavouritesLineEdit, self).setText(txt) self.updateButton() def updateButton(self, reset=True): # If search text is a saved query, switch to the delete button. # Otherwise show save button. txt = unicode(self.text()).strip() for key, value in self.mw.col.conf['savedFilters'].items(): if txt == value: self.doSave = False self.name = key self.setIcon(QIcon(":/icons/emblem-favorite.png")) return self.doSave = True self.setIcon(QIcon(":/icons/emblem-favorite-off.png")) def onClicked(self): if self.doSave: self.saveClicked() else: self.deleteClicked() def saveClicked(self): txt = unicode(self.text()).strip() dlg = QInputDialog(self) dlg.setInputMode(QInputDialog.TextInput) dlg.setLabelText(_("The current search terms will be added as a new " "item in the sidebar.\n" "Search name:")) dlg.setWindowTitle(_("Save search")) ok = dlg.exec_() name = dlg.textValue() if ok: self.mw.col.conf['savedFilters'][name] = txt self.mw.col.setMod() self.updateButton() self.browser.buildTree() def deleteClicked(self): msg = _('Remove "%s" from your saved searches?') % self.name ok = QMessageBox.question(self, _('Remove search'), msg, QMessageBox.Yes, QMessageBox.No) if ok == QMessageBox.Yes: self.mw.col.conf['savedFilters'].pop(self.name, None) self.mw.col.setMod() self.updateButton() self.browser.buildTree()
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/browser.py
browser.py
from aqt.qt import * from anki.hooks import addHook, remHook, runHook from aqt.utils import shortcut import aqt class ModelChooser(QHBoxLayout): def __init__(self, mw, widget, label=True): QHBoxLayout.__init__(self) self.widget = widget self.mw = mw self.deck = mw.col self.label = label self.setMargin(0) self.setSpacing(8) self.setupModels() addHook('reset', self.onReset) self.widget.setLayout(self) def setupModels(self): if self.label: self.modelLabel = QLabel(_("Type")) self.addWidget(self.modelLabel) # models box self.models = QPushButton() #self.models.setStyleSheet("* { text-align: left; }") self.models.setToolTip(shortcut(_("Change Note Type (Ctrl+N)"))) s = QShortcut(QKeySequence(_("Ctrl+N")), self.widget) s.connect(s, SIGNAL("activated()"), self.onModelChange) self.models.setAutoDefault(False) self.addWidget(self.models) self.connect(self.models, SIGNAL("clicked()"), self.onModelChange) # layout sizePolicy = QSizePolicy( QSizePolicy.Policy(7), QSizePolicy.Policy(0)) self.models.setSizePolicy(sizePolicy) self.updateModels() def cleanup(self): remHook('reset', self.onReset) def onReset(self): self.updateModels() def show(self): self.widget.show() def hide(self): self.widget.hide() def onEdit(self): import aqt.models aqt.models.Models(self.mw, self.widget) def onModelChange(self): from aqt.studydeck import StudyDeck current = self.deck.models.current()['name'] # edit button edit = QPushButton(_("Manage")) self.connect(edit, SIGNAL("clicked()"), self.onEdit) def nameFunc(): return sorted(self.deck.models.allNames()) ret = StudyDeck( self.mw, names=nameFunc, accept=_("Choose"), title=_("Choose Note Type"), help="_notes", current=current, parent=self.widget, buttons=[edit], cancel=True, geomKey="selectModel") if not ret.name: return m = self.deck.models.byName(ret.name) self.deck.conf['curModel'] = m['id'] cdeck = self.deck.decks.current() cdeck['mid'] = m['id'] self.deck.decks.save(cdeck) runHook("currentModelChanged") self.mw.reset() def updateModels(self): self.models.setText(self.deck.models.current()['name'])
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/modelchooser.py
modelchooser.py
import time from aqt.qt import * # fixme: if mw->subwindow opens a progress dialog with mw as the parent, mw # gets raised on finish on compiz. perhaps we should be using the progress # dialog as the parent? # Progress info ########################################################################## class ProgressManager(object): def __init__(self, mw): self.mw = mw self.app = QApplication.instance() self.inDB = False self.blockUpdates = False self._win = None self._levels = 0 # SQLite progress handler ########################################################################## def setupDB(self, db): "Install a handler in the current DB." self.lastDbProgress = 0 self.inDB = False try: db.set_progress_handler(self._dbProgress, 10000) except: print """\ Your pysqlite2 is too old. Anki will appear frozen during long operations.""" def _dbProgress(self): "Called from SQLite." # do nothing if we don't have a progress window if not self._win: return # make sure we're not executing too frequently if (time.time() - self.lastDbProgress) < 0.01: return self.lastDbProgress = time.time() # and we're in the main thread if not self.mw.inMainThread(): return # ensure timers don't fire self.inDB = True # handle GUI events if not self.blockUpdates: self._maybeShow() self.app.processEvents(QEventLoop.ExcludeUserInputEvents) self.inDB = False # DB-safe timers ########################################################################## # QTimer may fire in processEvents(). We provide a custom timer which # automatically defers until the DB is not busy. def timer(self, ms, func, repeat): def handler(): if self.inDB: # retry in 100ms self.timer(100, func, False) else: func() t = QTimer(self.mw) if not repeat: t.setSingleShot(True) t.connect(t, SIGNAL("timeout()"), handler) t.start(ms) return t # Creating progress dialogs ########################################################################## class ProgressNoCancel(QProgressDialog): def closeEvent(self, evt): evt.ignore() def keyPressEvent(self, evt): if evt.key() == Qt.Key_Escape: evt.ignore() def start(self, max=0, min=0, label=None, parent=None, immediate=False): self._levels += 1 if self._levels > 1: return # setup window parent = parent or self.app.activeWindow() or self.mw label = label or _("Processing...") self._win = self.ProgressNoCancel(label, "", min, max, parent) self._win.setWindowTitle("Anki") self._win.setCancelButton(None) self._win.setAutoClose(False) self._win.setAutoReset(False) self._win.setWindowModality(Qt.ApplicationModal) # we need to manually manage minimum time to show, as qt gets confused # by the db handler self._win.setMinimumDuration(100000) if immediate: self._shown = True self._win.show() self.app.processEvents() else: self._shown = False self._counter = min self._min = min self._max = max self._firstTime = time.time() self._lastTime = time.time() self._disabled = False def update(self, label=None, value=None, process=True, maybeShow=True): #print self._min, self._counter, self._max, label, time.time() - self._lastTime if maybeShow: self._maybeShow() self._lastTime = time.time() if label: self._win.setLabelText(label) if self._max and self._shown: self._counter = value or (self._counter+1) self._win.setValue(self._counter) if process: self.app.processEvents(QEventLoop.ExcludeUserInputEvents) def finish(self): self._levels -= 1 self._levels = max(0, self._levels) if self._levels == 0 and self._win: self._win.cancel() self._unsetBusy() def clear(self): "Restore the interface after an error." if self._levels: self._levels = 1 self.finish() def _maybeShow(self): if not self._levels: return if self._shown: self.update(maybeShow=False) return delta = time.time() - self._firstTime if delta > 0.5: self._shown = True self._win.show() self._setBusy() def _setBusy(self): self._disabled = True self.mw.app.setOverrideCursor(QCursor(Qt.WaitCursor)) def _unsetBusy(self): self._disabled = False self.app.restoreOverrideCursor() def busy(self): "True if processing." return self._levels
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/progress.py
progress.py
import re import signal import zipfile from send2trash import send2trash from aqt.qt import * from anki import Collection from anki.utils import isWin, isMac, intTime, splitFields, ids2str from anki.hooks import runHook, addHook import aqt import aqt.progress import aqt.webview import aqt.toolbar import aqt.stats from aqt.utils import saveGeom, restoreGeom, showInfo, showWarning, \ restoreState, getOnlyText, askUser, applyStyles, showText, tooltip, \ openHelp, openLink, checkInvalidFilename import anki.db class AnkiQt(QMainWindow): def __init__(self, app, profileManager, args): QMainWindow.__init__(self) self.state = "startup" aqt.mw = self self.app = app if isWin: self._xpstyle = QStyleFactory.create("WindowsXP") self.app.setStyle(self._xpstyle) self.pm = profileManager # running 2.0 for the first time? if self.pm.meta['firstRun']: # load the new deck user profile self.pm.load(self.pm.profiles()[0]) # upgrade if necessary from aqt.upgrade import Upgrader u = Upgrader(self) u.maybeUpgrade() self.pm.meta['firstRun'] = False self.pm.save() # init rest of app if qtmajor == 4 and qtminor < 8: # can't get modifiers immediately on qt4.7, so no safe mode there self.safeMode = False else: self.safeMode = self.app.queryKeyboardModifiers() & Qt.ShiftModifier try: self.setupUI() self.setupAddons() except: showInfo(_("Error during startup:\n%s") % traceback.format_exc()) sys.exit(1) # must call this after ui set up if self.safeMode: tooltip(_("Shift key was held down. Skipping automatic " "syncing and add-on loading.")) # were we given a file to import? if args and args[0]: self.onAppMsg(unicode(args[0], sys.getfilesystemencoding(), "ignore")) # Load profile in a timer so we can let the window finish init and not # close on profile load error. if isMac and qtmajor >= 5: self.show() self.progress.timer(10, self.setupProfile, False) def setupUI(self): self.col = None self.setupAppMsg() self.setupKeys() self.setupThreads() self.setupFonts() self.setupMainWindow() self.setupSystemSpecific() self.setupStyle() self.setupMenus() self.setupProgress() self.setupErrorHandler() self.setupSignals() self.setupAutoUpdate() self.setupHooks() self.setupRefreshTimer() self.updateTitleBar() # screens self.setupDeckBrowser() self.setupOverview() self.setupReviewer() # Profiles ########################################################################## def setupProfile(self): self.pendingImport = None # profile not provided on command line? if not self.pm.name: # if there's a single profile, load it automatically profs = self.pm.profiles() if len(profs) == 1: try: self.pm.load(profs[0]) except: # password protected pass if not self.pm.name: self.showProfileManager() else: self.loadProfile() def showProfileManager(self): self.state = "profileManager" d = self.profileDiag = QDialog() f = self.profileForm = aqt.forms.profiles.Ui_Dialog() f.setupUi(d) d.connect(f.login, SIGNAL("clicked()"), self.onOpenProfile) d.connect(f.profiles, SIGNAL("itemDoubleClicked(QListWidgetItem*)"), self.onOpenProfile) d.connect(f.quit, SIGNAL("clicked()"), lambda: sys.exit(0)) d.connect(f.add, SIGNAL("clicked()"), self.onAddProfile) d.connect(f.rename, SIGNAL("clicked()"), self.onRenameProfile) d.connect(f.delete_2, SIGNAL("clicked()"), self.onRemProfile) d.connect(d, SIGNAL("rejected()"), lambda: d.close()) d.connect(f.profiles, SIGNAL("currentRowChanged(int)"), self.onProfileRowChange) self.refreshProfilesList() # raise first, for osx testing d.show() d.activateWindow() d.raise_() d.exec_() def refreshProfilesList(self): f = self.profileForm f.profiles.clear() profs = self.pm.profiles() f.profiles.addItems(profs) try: idx = profs.index(self.pm.name) except: idx = 0 f.profiles.setCurrentRow(idx) def onProfileRowChange(self, n): if n < 0: # called on .clear() return name = self.pm.profiles()[n] f = self.profileForm passwd = not self.pm.load(name) f.passEdit.setVisible(passwd) f.passLabel.setVisible(passwd) def openProfile(self): name = self.pm.profiles()[self.profileForm.profiles.currentRow()] passwd = self.profileForm.passEdit.text() return self.pm.load(name, passwd) def onOpenProfile(self): if not self.openProfile(): showWarning(_("Invalid password.")) return self.profileDiag.close() self.loadProfile() return True def profileNameOk(self, str): return not checkInvalidFilename(str) def onAddProfile(self): name = getOnlyText(_("Name:")) if name: name = name.strip() if name in self.pm.profiles(): return showWarning(_("Name exists.")) if not self.profileNameOk(name): return self.pm.create(name) self.pm.name = name self.refreshProfilesList() def onRenameProfile(self): name = getOnlyText(_("New name:"), default=self.pm.name) if not self.openProfile(): return showWarning(_("Invalid password.")) if not name: return if name == self.pm.name: return if name in self.pm.profiles(): return showWarning(_("Name exists.")) if not self.profileNameOk(name): return self.pm.rename(name) self.refreshProfilesList() def onRemProfile(self): profs = self.pm.profiles() if len(profs) < 2: return showWarning(_("There must be at least one profile.")) # password correct? if not self.openProfile(): return # sure? if not askUser(_("""\ All cards, notes, and media for this profile will be deleted. \ Are you sure?""")): return self.pm.remove(self.pm.name) self.refreshProfilesList() def loadProfile(self): # show main window if self.pm.profile['mainWindowState']: restoreGeom(self, "mainWindow") restoreState(self, "mainWindow") else: self.resize(500, 400) # toolbar needs to be retranslated self.toolbar.draw() # titlebar self.setWindowTitle("Anki - " + self.pm.name) # show and raise window for osx self.show() self.activateWindow() self.raise_() # maybe sync (will load DB) if self.pendingImport and os.path.basename( self.pendingImport).startswith("backup-"): # skip sync when importing a backup self.loadCollection() else: self.onSync(auto=True) # import pending? if self.pendingImport: if self.pm.profile['key']: showInfo(_("""\ To import into a password protected profile, please open the profile before attempting to import.""")) else: self.handleImport(self.pendingImport) self.pendingImport = None runHook("profileLoaded") def unloadProfile(self, browser=True): if not self.pm.profile: # already unloaded return runHook("unloadProfile") if not self.unloadCollection(): return self.state = "profileManager" self.onSync(auto=True, reload=False) self.pm.profile['mainWindowGeom'] = self.saveGeometry() self.pm.profile['mainWindowState'] = self.saveState() self.pm.save() self.pm.profile = None self.hide() if browser: self.showProfileManager() # Collection load/unload ########################################################################## def loadCollection(self): cpath = self.pm.collectionPath() try: self.col = Collection(cpath, log=True) except anki.db.Error: # warn user showWarning(_("""\ Your collection is corrupt. Please create a new profile, then \ see the manual for how to restore from an automatic backup. Debug info: """)+traceback.format_exc()) self.unloadProfile() except Exception, e: # the custom exception handler won't catch this if we immediately # unload, so we have to manually handle it if "invalidTempFolder" in repr(str(e)): showWarning(self.errorHandler.tempFolderMsg()) self.unloadProfile() return self.unloadProfile() raise self.progress.setupDB(self.col.db) self.maybeEnableUndo() self.moveToState("deckBrowser") def unloadCollection(self): """ Unload the collection. This unloads a collection if there is one and returns True if there is no collection after the call. (Because the unload worked or because there was no collection to start with.) """ if self.col: if not self.closeAllCollectionWindows(): return self.progress.start(immediate=True) corrupt = False try: self.maybeOptimize() except: corrupt = True if not corrupt: if os.getenv("ANKIDEV", 0): corrupt = False else: corrupt = self.col.db.scalar("pragma integrity_check") != "ok" if corrupt: showWarning(_("Your collection file appears to be corrupt. \ This can happen when the file is copied or moved while Anki is open, or \ when the collection is stored on a network or cloud drive. Please see \ the manual for information on how to restore from an automatic backup.")) self.col.close() self.col = None if not corrupt: self.backup() self.progress.finish() return True # Backup and auto-optimize ########################################################################## def backup(self): nbacks = self.pm.profile['numBackups'] if self.pm.profile.get('compressBackups', True): zipStorage = zipfile.ZIP_DEFLATED else: zipStorage = zipfile.ZIP_STORED if not nbacks or os.getenv("ANKIDEV", 0): return dir = self.pm.backupFolder() path = self.pm.collectionPath() # find existing backups backups = [] for file in os.listdir(dir): m = re.search("backup-(\d+).apkg", file) if not m: # unknown file continue backups.append((int(m.group(1)), file)) backups.sort() # get next num if not backups: n = 1 else: n = backups[-1][0] + 1 # do backup newpath = os.path.join(dir, "backup-%d.apkg" % n) z = zipfile.ZipFile(newpath, "w", zipStorage) z.write(path, "collection.anki2") z.writestr("media", "{}") z.close() # remove if over if len(backups) + 1 > nbacks: delete = len(backups) + 1 - nbacks delete = backups[:delete] for file in delete: os.unlink(os.path.join(dir, file[1])) def maybeOptimize(self): # have two weeks passed? if (intTime() - self.pm.profile['lastOptimize']) < 86400*14: return self.progress.start(label=_("Optimizing..."), immediate=True) self.col.optimize() self.pm.profile['lastOptimize'] = intTime() self.pm.save() self.progress.finish() # State machine ########################################################################## def moveToState(self, state, *args): #print "-> move from", self.state, "to", state oldState = self.state or "dummy" cleanup = getattr(self, "_"+oldState+"Cleanup", None) if cleanup: cleanup(state) self.state = state runHook('beforeStateChange', state, oldState, *args) getattr(self, "_"+state+"State")(oldState, *args) runHook('afterStateChange', state, oldState, *args) def _deckBrowserState(self, oldState): self.deckBrowser.show() def _colLoadingState(self, oldState): "Run once, when col is loaded." self.enableColMenuItems() # ensure cwd is set if media dir exists self.col.media.dir() runHook("colLoading", self.col) self.moveToState("overview") def _selectedDeck(self): did = self.col.decks.selected() if not self.col.decks.nameOrNone(did): showInfo(_("Please select a deck.")) return return self.col.decks.get(did) def _overviewState(self, oldState): if not self._selectedDeck(): return self.moveToState("deckBrowser") self.col.reset() self.overview.show() def _reviewState(self, oldState): self.reviewer.show() def _reviewCleanup(self, newState): if newState != "resetRequired" and newState != "review": self.reviewer.cleanup() def noteChanged(self, nid): "Called when a card or note is edited (but not deleted)." runHook("noteChanged", nid) # Resetting state ########################################################################## def reset(self, guiOnly=False): "Called for non-trivial edits. Rebuilds queue and updates UI." if self.col: if not guiOnly: self.col.reset() runHook("reset") self.maybeEnableUndo() self.moveToState(self.state) def requireReset(self, modal=False): "Signal queue needs to be rebuilt when edits are finished or by user." self.autosave() self.resetModal = modal if self.interactiveState(): self.moveToState("resetRequired") def interactiveState(self): "True if not in profile manager, syncing, etc." return self.state in ("overview", "review", "deckBrowser") def maybeReset(self): self.autosave() if self.state == "resetRequired": self.state = self.returnState self.reset() def delayedMaybeReset(self): # if we redraw the page in a button click event it will often crash on # windows self.progress.timer(100, self.maybeReset, False) def _resetRequiredState(self, oldState): if oldState != "resetRequired": self.returnState = oldState if self.resetModal: # we don't have to change the webview, as we have a covering window return self.web.setLinkHandler(lambda url: self.delayedMaybeReset()) i = _("Waiting for editing to finish.") b = self.button("refresh", _("Resume Now"), id="resume") self.web.stdHtml(""" <center><div style="height: 100%%"> <div style="position:relative; vertical-align: middle;"> %s<br> %s</div></div></center> """ % (i, b), css=self.sharedCSS) self.bottomWeb.hide() self.web.setFocus() self.web.eval("$('#resume').focus()") # HTML helpers ########################################################################## sharedCSS = """ body { background: #f3f3f3; margin: 2em; } h1 { margin-bottom: 0.2em; } """ def button(self, link, name, key=None, class_="", id=""): class_ = "but "+ class_ if key: key = _("Shortcut key: %s") % key else: key = "" return ''' <button id="%s" class="%s" onclick="py.link('%s');return false;" title="%s">%s</button>''' % ( id, class_, link, key, name) # Main window setup ########################################################################## def setupMainWindow(self): # main window self.form = aqt.forms.main.Ui_MainWindow() self.form.setupUi(self) # toolbar tweb = aqt.webview.AnkiWebView() tweb.setObjectName("toolbarWeb") tweb.setFocusPolicy(Qt.WheelFocus) tweb.setFixedHeight(32+self.fontHeightDelta) self.toolbar = aqt.toolbar.Toolbar(self, tweb) self.toolbar.draw() # main area self.web = aqt.webview.AnkiWebView() self.web.setObjectName("mainText") self.web.setFocusPolicy(Qt.WheelFocus) self.web.setMinimumWidth(400) # bottom area sweb = self.bottomWeb = aqt.webview.AnkiWebView() #sweb.hide() sweb.setFixedHeight(100) sweb.setObjectName("bottomWeb") sweb.setFocusPolicy(Qt.WheelFocus) # add in a layout self.mainLayout = QVBoxLayout() self.mainLayout.setContentsMargins(0,0,0,0) self.mainLayout.setSpacing(0) self.mainLayout.addWidget(tweb) self.mainLayout.addWidget(self.web) self.mainLayout.addWidget(sweb) self.form.centralwidget.setLayout(self.mainLayout) def closeAllCollectionWindows(self): return aqt.dialogs.closeAll() # Components ########################################################################## def setupSignals(self): signal.signal(signal.SIGINT, self.onSigInt) def onSigInt(self, signum, frame): # interrupt any current transaction and schedule a rollback & quit self.col.db.interrupt() def quit(): self.col.db.rollback() self.close() self.progress.timer(100, quit, False) def setupProgress(self): self.progress = aqt.progress.ProgressManager(self) def setupErrorHandler(self): import aqt.errors self.errorHandler = aqt.errors.ErrorHandler(self) def setupAddons(self): import aqt.addons self.addonManager = aqt.addons.AddonManager(self) def setupThreads(self): self._mainThread = QThread.currentThread() def inMainThread(self): return self._mainThread == QThread.currentThread() def setupDeckBrowser(self): from aqt.deckbrowser import DeckBrowser self.deckBrowser = DeckBrowser(self) def setupOverview(self): from aqt.overview import Overview self.overview = Overview(self) def setupReviewer(self): from aqt.reviewer import Reviewer self.reviewer = Reviewer(self) # Syncing ########################################################################## def onSync(self, auto=False, reload=True): if not auto or (self.pm.profile['syncKey'] and self.pm.profile['autoSync'] and not self.safeMode): from aqt.sync import SyncManager if not self.unloadCollection(): return # set a sync state so the refresh timer doesn't fire while deck # unloaded self.state = "sync" self.syncer = SyncManager(self, self.pm) self.syncer.sync() if reload: if not self.col: self.loadCollection() # Tools ########################################################################## def raiseMain(self): if not self.app.activeWindow(): # make sure window is shown self.setWindowState(self.windowState() & ~Qt.WindowMinimized) return True def setStatus(self, text, timeout=3000): self.form.statusbar.showMessage(text, timeout) def setupStyle(self): applyStyles(self) # Key handling ########################################################################## def setupKeys(self): self.keyHandler = None # debug shortcut self.debugShortcut = QShortcut(QKeySequence("Ctrl+:"), self) self.connect( self.debugShortcut, SIGNAL("activated()"), self.onDebug) def keyPressEvent(self, evt): # do we have a delegate? if self.keyHandler: # did it eat the key? if self.keyHandler(evt): return # run standard handler QMainWindow.keyPressEvent(self, evt) # check global keys key = unicode(evt.text()) if key == "d": self.moveToState("deckBrowser") elif key == "s": if self.state == "overview": self.col.startTimebox() self.moveToState("review") else: self.moveToState("overview") elif key == "a": self.onAddCard() elif key == "b": self.onBrowse() elif key == "S": self.onStats() elif key == "y": self.onSync() # App exit ########################################################################## def closeEvent(self, event): "User hit the X button, etc." event.accept() self.onClose(force=True) def onClose(self, force=False): "Called from a shortcut key. Close current active window." aw = self.app.activeWindow() if not aw or aw == self or force: self.unloadProfile(browser=False) self.app.closeAllWindows() else: aw.close() # Undo & autosave ########################################################################## def onUndo(self): n = self.col.undoName() cid = self.col.undo() if cid and self.state == "review": card = self.col.getCard(cid) self.reviewer.cardQueue.append(card) else: tooltip(_("Reverted to state prior to '%s'.") % n.lower()) self.reset() self.maybeEnableUndo() def maybeEnableUndo(self): if self.col and self.col.undoName(): self.form.actionUndo.setText(_("Undo %s") % self.col.undoName()) self.form.actionUndo.setEnabled(True) runHook("undoState", True) else: self.form.actionUndo.setText(_("Undo")) self.form.actionUndo.setEnabled(False) runHook("undoState", False) def checkpoint(self, name): self.col.save(name) self.maybeEnableUndo() def autosave(self): self.col.autosave() self.maybeEnableUndo() # Other menu operations ########################################################################## def onAddCard(self): aqt.dialogs.open("AddCards", self) def onBrowse(self): aqt.dialogs.open("Browser", self) def onEditCurrent(self): aqt.dialogs.open("EditCurrent", self) def onDeckConf(self, deck=None): if not deck: deck = self.col.decks.current() if deck['dyn']: import aqt.dyndeckconf aqt.dyndeckconf.DeckConf(self, deck=deck) else: import aqt.deckconf aqt.deckconf.DeckConf(self, deck) def onOverview(self): self.col.reset() self.moveToState("overview") def onStats(self): deck = self._selectedDeck() if not deck: return aqt.stats.DeckStats(self) def onPrefs(self): import aqt.preferences aqt.preferences.Preferences(self) def onNoteTypes(self): import aqt.models aqt.models.Models(self, self, fromMain=True) def onAbout(self): import aqt.about aqt.about.show(self) def onDonate(self): openLink(aqt.appDonate) def onDocumentation(self): openHelp("") # Importing & exporting ########################################################################## def handleImport(self, path): import aqt.importing if not os.path.exists(path): return showInfo(_("Please use File>Import to import this file.")) aqt.importing.importFile(self, path) def onImport(self): import aqt.importing aqt.importing.onImport(self) def onExport(self, did=None): import aqt.exporting aqt.exporting.ExportDialog(self, did=did) # Cramming ########################################################################## def onCram(self, search=""): import aqt.dyndeckconf n = 1 deck = self.col.decks.current() if not search: if not deck['dyn']: search = 'deck:"%s" ' % deck['name'] decks = self.col.decks.allNames() while _("Filtered Deck %d") % n in decks: n += 1 name = _("Filtered Deck %d") % n did = self.col.decks.newDyn(name) diag = aqt.dyndeckconf.DeckConf(self, first=True, search=search) if not diag.ok: # user cancelled first config self.col.decks.rem(did) self.col.decks.select(deck['id']) else: self.moveToState("overview") # Menu, title bar & status ########################################################################## def setupMenus(self): m = self.form s = SIGNAL("triggered()") #self.connect(m.actionDownloadSharedPlugin, s, self.onGetSharedPlugin) self.connect(m.actionSwitchProfile, s, self.unloadProfile) self.connect(m.actionImport, s, self.onImport) self.connect(m.actionExport, s, self.onExport) self.connect(m.actionExit, s, self, SLOT("close()")) self.connect(m.actionPreferences, s, self.onPrefs) self.connect(m.actionAbout, s, self.onAbout) self.connect(m.actionUndo, s, self.onUndo) self.connect(m.actionFullDatabaseCheck, s, self.onCheckDB) self.connect(m.actionCheckMediaDatabase, s, self.onCheckMediaDB) self.connect(m.actionDocumentation, s, self.onDocumentation) self.connect(m.actionDonate, s, self.onDonate) self.connect(m.actionStudyDeck, s, self.onStudyDeck) self.connect(m.actionCreateFiltered, s, self.onCram) self.connect(m.actionEmptyCards, s, self.onEmptyCards) self.connect(m.actionNoteTypes, s, self.onNoteTypes) def updateTitleBar(self): self.setWindowTitle("Anki") # Auto update ########################################################################## def setupAutoUpdate(self): import aqt.update self.autoUpdate = aqt.update.LatestVersionFinder(self) self.connect(self.autoUpdate, SIGNAL("newVerAvail"), self.newVerAvail) self.connect(self.autoUpdate, SIGNAL("newMsg"), self.newMsg) self.connect(self.autoUpdate, SIGNAL("clockIsOff"), self.clockIsOff) self.autoUpdate.start() def newVerAvail(self, ver): if self.pm.meta.get('suppressUpdate', None) != ver: aqt.update.askAndUpdate(self, ver) def newMsg(self, data): aqt.update.showMessages(self, data) def clockIsOff(self, diff): diffText = ngettext("%s second", "%s seconds", diff) % diff warn = _("""\ In order to ensure your collection works correctly when moved between \ devices, Anki requires your computer's internal clock to be set correctly. \ The internal clock can be wrong even if your system is showing the correct \ local time. Please go to the time settings on your computer and check the following: - AM/PM - Clock drift - Day, month and year - Timezone - Daylight savings Difference to correct time: %s.""") % diffText showWarning(warn) self.app.closeAllWindows() # Count refreshing ########################################################################## def setupRefreshTimer(self): # every 10 minutes self.progress.timer(10*60*1000, self.onRefreshTimer, True) def onRefreshTimer(self): if self.state == "deckBrowser": self.deckBrowser.refresh() elif self.state == "overview": self.overview.refresh() # Permanent libanki hooks ########################################################################## def setupHooks(self): addHook("modSchema", self.onSchemaMod) addHook("remNotes", self.onRemNotes) addHook("odueInvalid", self.onOdueInvalid) def onOdueInvalid(self): showWarning(_("""\ Invalid property found on card. Please use Tools>Check Database, \ and if the problem comes up again, please ask on the support site.""")) # Log note deletion ########################################################################## def onRemNotes(self, col, nids): path = os.path.join(self.pm.profileFolder(), "deleted.txt") existed = os.path.exists(path) with open(path, "a") as f: if not existed: f.write("nid\tmid\tfields\n") for id, mid, flds in col.db.execute( "select id, mid, flds from notes where id in %s" % ids2str(nids)): fields = splitFields(flds) f.write(("\t".join([str(id), str(mid)] + fields)).encode("utf8")) f.write("\n") # Schema modifications ########################################################################## def onSchemaMod(self, arg): return askUser(_("""\ The requested change will require a full upload of the database when \ you next synchronize your collection. If you have reviews or other changes \ waiting on another device that haven't been synchronized here yet, they \ will be lost. Continue?""")) # Advanced features ########################################################################## def onCheckDB(self): "True if no problems" self.progress.start(immediate=True) ret, ok = self.col.fixIntegrity() self.progress.finish() if not ok: showText(ret) else: tooltip(ret) self.reset() return ret def onCheckMediaDB(self): self.progress.start(immediate=True) (nohave, unused, invalid) = self.col.media.check() self.progress.finish() # generate report report = "" if invalid: report += _("Invalid encoding; please rename:") report += "\n" + "\n".join(invalid) if unused: if report: report += "\n\n\n" report += _( "In media folder but not used by any cards:") report += "\n" + "\n".join(unused) if nohave: if report: report += "\n\n\n" report += _( "Used on cards but missing from media folder:") report += "\n" + "\n".join(nohave) if not report: tooltip(_("No unused or missing files found.")) return # show report and offer to delete diag = QDialog(self) diag.setWindowTitle("Anki") layout = QVBoxLayout(diag) diag.setLayout(layout) text = QTextEdit() text.setReadOnly(True) text.setPlainText(report) layout.addWidget(text) box = QDialogButtonBox(QDialogButtonBox.Close) layout.addWidget(box) b = QPushButton(_("Delete Unused")) b.setAutoDefault(False) box.addButton(b, QDialogButtonBox.ActionRole) b.connect( b, SIGNAL("clicked()"), lambda u=unused, d=diag: self.deleteUnused(u, d)) diag.connect(box, SIGNAL("rejected()"), diag, SLOT("reject()")) diag.setMinimumHeight(400) diag.setMinimumWidth(500) restoreGeom(diag, "checkmediadb") diag.exec_() saveGeom(diag, "checkmediadb") def deleteUnused(self, unused, diag): if not askUser( _("Delete unused media?")): return mdir = self.col.media.dir() for f in unused: path = os.path.join(mdir, f) if os.path.exists(path): send2trash(path) tooltip(_("Deleted.")) diag.close() def onStudyDeck(self): from aqt.studydeck import StudyDeck ret = StudyDeck( self, dyn=True, current=self.col.decks.current()['name']) if ret.name: self.col.decks.select(self.col.decks.id(ret.name)) self.moveToState("overview") def onEmptyCards(self): self.progress.start(immediate=True) cids = self.col.emptyCids() if not cids: self.progress.finish() tooltip(_("No empty cards.")) return report = self.col.emptyCardReport(cids) self.progress.finish() part1 = ngettext("%d card", "%d cards", len(cids)) % len(cids) part1 = _("%s to delete:") % part1 diag, box = showText(part1 + "\n\n" + report, run=False, geomKey="emptyCards") box.addButton(_("Delete Cards"), QDialogButtonBox.AcceptRole) box.button(QDialogButtonBox.Close).setDefault(True) def onDelete(): saveGeom(diag, "emptyCards") QDialog.accept(diag) self.checkpoint(_("Delete Empty")) self.col.remCards(cids) tooltip(ngettext("%d card deleted.", "%d cards deleted.", len(cids)) % len(cids)) self.reset() diag.connect(box, SIGNAL("accepted()"), onDelete) diag.show() # Debugging ###################################################################### def onDebug(self): d = self.debugDiag = QDialog() frm = aqt.forms.debug.Ui_Dialog() frm.setupUi(d) s = self.debugDiagShort = QShortcut(QKeySequence("ctrl+return"), d) self.connect(s, SIGNAL("activated()"), lambda: self.onDebugRet(frm)) s = self.debugDiagShort = QShortcut( QKeySequence("ctrl+shift+return"), d) self.connect(s, SIGNAL("activated()"), lambda: self.onDebugPrint(frm)) d.show() def _captureOutput(self, on): mw = self class Stream(object): def write(self, data): mw._output += data if on: self._output = "" self._oldStderr = sys.stderr self._oldStdout = sys.stdout s = Stream() sys.stderr = s sys.stdout = s else: sys.stderr = self._oldStderr sys.stdout = self._oldStdout def _debugCard(self): return self.reviewer.card.__dict__ def _debugBrowserCard(self): return aqt.dialogs._dialogs['Browser'][1].card.__dict__ def onDebugPrint(self, frm): frm.text.setPlainText("pp(%s)" % frm.text.toPlainText()) self.onDebugRet(frm) def onDebugRet(self, frm): import pprint, traceback text = frm.text.toPlainText() card = self._debugCard bcard = self._debugBrowserCard mw = self pp = pprint.pprint self._captureOutput(True) try: exec text except: self._output += traceback.format_exc() self._captureOutput(False) buf = "" for c, line in enumerate(text.strip().split("\n")): if c == 0: buf += ">>> %s\n" % line else: buf += "... %s\n" % line try: frm.log.appendPlainText(buf + (self._output or "<no output>")) except UnicodeDecodeError: frm.log.appendPlainText(_("<non-unicode text>")) frm.log.ensureCursorVisible() # System specific code ########################################################################## def setupFonts(self): f = QFontInfo(self.font()) ws = QWebSettings.globalSettings() self.fontHeight = f.pixelSize() self.fontFamily = f.family() self.fontHeightDelta = max(0, self.fontHeight - 13) ws.setFontFamily(QWebSettings.StandardFont, self.fontFamily) ws.setFontSize(QWebSettings.DefaultFontSize, self.fontHeight) def setupSystemSpecific(self): self.hideMenuAccels = False if isMac: # mac users expect a minimize option self.minimizeShortcut = QShortcut("Ctrl+M", self) self.connect(self.minimizeShortcut, SIGNAL("activated()"), self.onMacMinimize) self.hideMenuAccels = True self.maybeHideAccelerators() self.hideStatusTips() elif isWin: # make sure ctypes is bundled from ctypes import windll, wintypes _dummy = windll _dummy = wintypes def maybeHideAccelerators(self, tgt=None): if not self.hideMenuAccels: return tgt = tgt or self for action in tgt.findChildren(QAction): txt = unicode(action.text()) m = re.match("^(.+)\(&.+\)(.+)?", txt) if m: action.setText(m.group(1) + (m.group(2) or "")) def hideStatusTips(self): for action in self.findChildren(QAction): action.setStatusTip("") def onMacMinimize(self): self.setWindowState(self.windowState() | Qt.WindowMinimized) # Single instance support ########################################################################## def setupAppMsg(self): self.connect(self.app, SIGNAL("appMsg"), self.onAppMsg) def onAppMsg(self, buf): if self.state == "startup": # try again in a second return self.progress.timer(1000, lambda: self.onAppMsg(buf), False) elif self.state == "profileManager": # can't raise window while in profile manager if buf == "raise": return self.pendingImport = buf return tooltip(_("Deck will be imported when a profile is opened.")) if not self.interactiveState() or self.progress.busy(): # we can't raise the main window while in profile dialog, syncing, etc if buf != "raise": showInfo(_("""\ Please ensure a profile is open and Anki is not busy, then try again."""), parent=None) return # raise window if isWin: # on windows we can raise the window by minimizing and restoring self.showMinimized() self.setWindowState(Qt.WindowActive) self.showNormal() else: # on osx we can raise the window. on unity the icon in the tray will just flash. self.activateWindow() self.raise_() if buf == "raise": return # import if not isinstance(buf, unicode): buf = unicode(buf, "utf8", "ignore") self.handleImport(buf)
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/main.py
main.py
from aqt.qt import * class Toolbar(object): def __init__(self, mw, web): self.mw = mw self.web = web self.web.page().mainFrame().setScrollBarPolicy( Qt.Vertical, Qt.ScrollBarAlwaysOff) self.web.setLinkHandler(self._linkHandler) self.link_handlers = { "decks": self._deckLinkHandler, "study": self._studyLinkHandler, "add": self._addLinkHandler, "browse": self._browseLinkHandler, "stats": self._statsLinkHandler, "sync": self._syncLinkHandler, } def draw(self): self.web.stdHtml(self._body % ( # may want a context menu here in the future '&nbsp;'*20, self._centerLinks(), self._rightIcons()), self._css) # Available links ###################################################################### def _rightIconsList(self): return [ ["stats", "qrc:/icons/view-statistics.png", _("Show statistics. Shortcut key: %s") % "Shift+S"], ["sync", "qrc:/icons/view-refresh.png", _("Synchronize with AnkiWeb. Shortcut key: %s") % "Y"], ] def _centerLinks(self): links = [ ["decks", _("Decks"), _("Shortcut key: %s") % "D"], ["add", _("Add"), _("Shortcut key: %s") % "A"], ["browse", _("Browse"), _("Shortcut key: %s") % "B"], ] return self._linkHTML(links) def _linkHTML(self, links): buf = "" for ln, name, title in links: buf += '<a class=hitem title="%s" href="%s">%s</a>' % ( title, ln, name) buf += "&nbsp;"*3 return buf def _rightIcons(self): buf = "" for ln, icon, title in self._rightIconsList(): buf += '<a class=hitem title="%s" href="%s"><img width="16px" height="16px" src="%s"></a>' % ( title, ln, icon) return buf # Link handling ###################################################################### def _linkHandler(self, link): # first set focus back to main window, or we're left with an ugly # focus ring around the clicked item self.mw.web.setFocus() if link in self.link_handlers: self.link_handlers[link]() def _deckLinkHandler(self): self.mw.moveToState("deckBrowser") def _studyLinkHandler(self): # if overview already shown, switch to review if self.mw.state == "overview": self.mw.col.startTimebox() self.mw.moveToState("review") else: self.mw.onOverview() def _addLinkHandler(self): self.mw.onAddCard() def _browseLinkHandler(self): self.mw.onBrowse() def _statsLinkHandler(self): self.mw.onStats() def _syncLinkHandler(self): self.mw.onSync() # HTML & CSS ###################################################################### _body = """ <table id=header width=100%%> <tr> <td width=16%% align=left>%s</td> <td align=center>%s</td> <td width=15%% align=right>%s</td> </tr></table> """ _css = """ #header { margin:0; margin-top: 4px; font-weight: bold; } html { height: 100%; background: -webkit-gradient(linear, left top, left bottom, from(#ddd), to(#fff)); margin:0; padding:0; } body { margin:0; padding:0; position:absolute; top:0;left:0;right:0;bottom:0; -webkit-user-select: none; border-bottom: 1px solid #aaa; } * { -webkit-user-drag: none; } .hitem { padding-right: 6px; text-decoration: none; color: #000; } .hitem:hover { text-decoration: underline; } """ class BottomBar(Toolbar): _css = Toolbar._css + """ #header { background: -webkit-gradient(linear, left top, left bottom, from(#fff), to(#ddd)); border-bottom: 0; border-top: 1px solid #aaa; margin-bottom: 6px; margin-top: 0; } """ _centerBody = """ <center><table width=100%% height=100%% id=header><tr><td align=center> %s</td></tr></table></center> """ def draw(self, buf): self.web.show() self.web.stdHtml( self._centerBody % buf, self._css)
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/toolbar.py
toolbar.py
from aqt.qt import * import aqt from aqt.utils import showInfo, showWarning from anki.consts import * RADIO_NEW = 1 RADIO_REV = 2 RADIO_FORGOT = 3 RADIO_AHEAD = 4 RADIO_PREVIEW = 5 RADIO_CRAM = 6 TYPE_NEW = 0 TYPE_DUE = 1 TYPE_ALL = 2 class CustomStudy(QDialog): def __init__(self, mw): QDialog.__init__(self, mw) self.mw = mw self.deck = self.mw.col.decks.current() self.form = f = aqt.forms.customstudy.Ui_Dialog() f.setupUi(self) self.setWindowModality(Qt.WindowModal) self.setupSignals() f.radio1.click() self.exec_() def setupSignals(self): f = self.form; c = self.connect; s = SIGNAL("clicked()") c(f.radio1, s, lambda: self.onRadioChange(1)) c(f.radio2, s, lambda: self.onRadioChange(2)) c(f.radio3, s, lambda: self.onRadioChange(3)) c(f.radio4, s, lambda: self.onRadioChange(4)) c(f.radio5, s, lambda: self.onRadioChange(5)) c(f.radio6, s, lambda: self.onRadioChange(6)) def onRadioChange(self, idx): f = self.form; sp = f.spin smin = 1; smax = DYN_MAX_SIZE; sval = 1 post = _("cards") tit = "" spShow = True typeShow = False ok = _("OK") def plus(num): if num == 1000: num = "1000+" return "<b>"+str(num)+"</b>" if idx == RADIO_NEW: new = self.mw.col.sched.totalNewForCurrentDeck() self.deck['newToday'] tit = _("New cards in deck: %s") % plus(new) pre = _("Increase today's new card limit by") sval = min(new, self.deck.get('extendNew', 10)) smax = new elif idx == RADIO_REV: rev = self.mw.col.sched.totalRevForCurrentDeck() tit = _("Reviews due in deck: %s") % plus(rev) pre = _("Increase today's review limit by") sval = min(rev, self.deck.get('extendRev', 10)) elif idx == RADIO_FORGOT: pre = _("Review cards forgotten in last") post = _("days") smax = 30 elif idx == RADIO_AHEAD: pre = _("Review ahead by") post = _("days") elif idx == RADIO_PREVIEW: pre = _("Preview new cards added in the last") post = _("days") sval = 1 elif idx == RADIO_CRAM: pre = _("Select") post = _("cards from the deck") #tit = _("After pressing OK, you can choose which tags to include.") ok = _("Choose Tags") sval = 100 typeShow = True sp.setVisible(spShow) f.cardType.setVisible(typeShow) f.title.setText(tit) f.title.setVisible(not not tit) f.spin.setMinimum(smin) f.spin.setMaximum(smax) f.spin.setValue(sval) f.preSpin.setText(pre) f.postSpin.setText(post) f.buttonBox.button(QDialogButtonBox.Ok).setText(ok) self.radioIdx = idx def accept(self): f = self.form; i = self.radioIdx; spin = f.spin.value() if i == RADIO_NEW: self.deck['extendNew'] = spin self.mw.col.decks.save(self.deck) self.mw.col.sched.extendLimits(spin, 0) self.mw.reset() return QDialog.accept(self) elif i == RADIO_REV: self.deck['extendRev'] = spin self.mw.col.decks.save(self.deck) self.mw.col.sched.extendLimits(0, spin) self.mw.reset() return QDialog.accept(self) elif i == RADIO_CRAM: tags = self._getTags() # the rest create a filtered deck cur = self.mw.col.decks.byName(_("Custom Study Session")) if cur: if not cur['dyn']: showInfo("Please rename the existing Custom Study deck first.") return QDialog.accept(self) else: # safe to empty self.mw.col.sched.emptyDyn(cur['id']) # reuse; don't delete as it may have children dyn = cur self.mw.col.decks.select(cur['id']) else: did = self.mw.col.decks.newDyn(_("Custom Study Session")) dyn = self.mw.col.decks.get(did) # and then set various options if i == RADIO_FORGOT: dyn['delays'] = [1] dyn['terms'][0] = ['rated:%d:1' % spin, DYN_MAX_SIZE, DYN_RANDOM] dyn['resched'] = False elif i == RADIO_AHEAD: dyn['delays'] = None dyn['terms'][0] = ['prop:due<=%d' % spin, DYN_MAX_SIZE, DYN_DUE] dyn['resched'] = True elif i == RADIO_PREVIEW: dyn['delays'] = None dyn['terms'][0] = ['is:new added:%s'%spin, DYN_MAX_SIZE, DYN_OLDEST] dyn['resched'] = False elif i == RADIO_CRAM: dyn['delays'] = None type = f.cardType.currentRow() if type == TYPE_NEW: terms = "is:new " ord = DYN_ADDED dyn['resched'] = True elif type == TYPE_DUE: terms = "is:due " ord = DYN_DUE dyn['resched'] = True else: terms = "" ord = DYN_RANDOM dyn['resched'] = False dyn['terms'][0] = [(terms+tags).strip(), spin, ord] # add deck limit dyn['terms'][0][0] = "deck:\"%s\" %s " % (self.deck['name'], dyn['terms'][0][0]) # generate cards if not self.mw.col.sched.rebuildDyn(): return showWarning(_("No cards matched the criteria you provided.")) self.mw.moveToState("overview") QDialog.accept(self) def _getTags(self): from aqt.taglimit import TagLimit t = TagLimit(self.mw, self) return t.tags
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/customstudy.py
customstudy.py
import re import os import urllib2 import ctypes import urllib from anki.lang import _ from aqt.qt import * from anki.utils import stripHTML, isWin, isMac, namedtmp, json, stripHTMLMedia import anki.sound from anki.hooks import runHook, runFilter from aqt.sound import getAudio from aqt.webview import AnkiWebView from aqt.utils import shortcut, showInfo, showWarning, getBase, getFile, \ openHelp, tooltip import aqt import anki.js from BeautifulSoup import BeautifulSoup pics = ("jpg", "jpeg", "png", "tif", "tiff", "gif", "svg", "webp") audio = ("wav", "mp3", "ogg", "flac", "mp4", "swf", "mov", "mpeg", "mkv", "m4a", "3gp", "spx", "oga") _html = """ <html><head>%s<style> .field { border: 1px solid #aaa; background:#fff; color:#000; padding: 5px; } /* prevent floated images from being displayed outside field */ .field:after { content: ""; display: block; height: 0; clear: both; visibility: hidden; } .fname { vertical-align: middle; padding: 0; } img { max-width: 90%%; } body { margin: 5px; } </style><script> %s var currentField = null; var changeTimer = null; var dropTarget = null; String.prototype.format = function() { var args = arguments; return this.replace(/\{\d+\}/g, function(m){ return args[m.match(/\d+/)]; }); }; function onKey() { // esc clears focus, allowing dialog to close if (window.event.which == 27) { currentField.blur(); return; } clearChangeTimer(); if (currentField.innerHTML == "<div><br></div>") { // fix empty div bug. slight flicker, but must be done in a timer changeTimer = setTimeout(function () { currentField.innerHTML = "<br>"; sendState(); saveField("key"); }, 1); } else { changeTimer = setTimeout(function () { sendState(); saveField("key"); }, 600); } }; function sendState() { var r = { 'bold': document.queryCommandState("bold"), 'italic': document.queryCommandState("italic"), 'under': document.queryCommandState("underline"), 'super': document.queryCommandState("superscript"), 'sub': document.queryCommandState("subscript"), 'col': document.queryCommandValue("forecolor") }; py.run("state:" + JSON.stringify(r)); }; function setFormat(cmd, arg, nosave) { document.execCommand(cmd, false, arg); if (!nosave) { saveField('key'); } }; function clearChangeTimer() { if (changeTimer) { clearTimeout(changeTimer); changeTimer = null; } }; function onFocus(elem) { currentField = elem; py.run("focus:" + currentField.id.substring(1)); // don't adjust cursor on mouse clicks if (mouseDown) { return; } // do this twice so that there's no flicker on newer versions caretToEnd(); // need to do this in a timeout for older qt versions setTimeout(function () { caretToEnd() }, 1); // scroll if bottom of element off the screen function pos(obj) { var cur = 0; do { cur += obj.offsetTop; } while (obj = obj.offsetParent); return cur; } var y = pos(elem); if ((window.pageYOffset+window.innerHeight) < (y+elem.offsetHeight) || window.pageYOffset > y) { window.scroll(0,y+elem.offsetHeight-window.innerHeight); } } function focusField(n) { $("#f"+n).focus(); } function onDragOver(elem) { // if we focus the target element immediately, the drag&drop turns into a // copy, so note it down for later instead dropTarget = elem; } function caretToEnd() { var r = document.createRange() r.selectNodeContents(currentField); r.collapse(false); var s = document.getSelection(); s.removeAllRanges(); s.addRange(r); }; function onBlur() { if (currentField) { saveField("blur"); } clearChangeTimer(); // if we lose focus, assume the last field is still targeted //currentField = null; }; function saveField(type) { if (!currentField) { // no field has been focused yet return; } // type is either 'blur' or 'key' py.run(type + ":" + currentField.innerHTML); clearChangeTimer(); }; function wrappedExceptForWhitespace(text, front, back) { var match = text.match(/^(\s*)([^]*?)(\s*)$/); return match[1] + front + match[2] + back + match[3]; }; function wrap(front, back) { var s = window.getSelection(); var r = s.getRangeAt(0); var content = r.cloneContents(); var span = document.createElement("span") span.appendChild(content); var new_ = wrappedExceptForWhitespace(span.innerHTML, front, back); setFormat("inserthtml", new_); if (!span.innerHTML) { // run with an empty selection; move cursor back past postfix r = s.getRangeAt(0); r.setStart(r.startContainer, r.startOffset - back.length); r.collapse(true); s.removeAllRanges(); s.addRange(r); } }; function setFields(fields, focusTo) { var txt = ""; for (var i=0; i<fields.length; i++) { var n = fields[i][0]; var f = fields[i][1]; if (!f) { f = "<br>"; } txt += "<tr><td class=fname>{0}</td></tr><tr><td width=100%%>".format(n); txt += "<div id=f{0} onkeydown='onKey();' onmouseup='onKey();'".format(i); txt += " onfocus='onFocus(this);' onblur='onBlur();' class=field "; txt += "ondragover='onDragOver(this);' "; txt += "contentEditable=true class=field>{0}</div>".format(f); txt += "</td></tr>"; } $("#fields").html("<table cellpadding=0 width=100%%>"+txt+"</table>"); if (!focusTo) { focusTo = 0; } if (focusTo >= 0) { $("#f"+focusTo).focus(); } }; function setBackgrounds(cols) { for (var i=0; i<cols.length; i++) { $("#f"+i).css("background", cols[i]); } } function setFonts(fonts) { for (var i=0; i<fonts.length; i++) { $("#f"+i).css("font-family", fonts[i][0]); $("#f"+i).css("font-size", fonts[i][1]); $("#f"+i)[0].dir = fonts[i][2] ? "rtl" : "ltr"; } } function showDupes() { $("#dupes").show(); } function hideDupes() { $("#dupes").hide(); } var mouseDown = 0; $(function () { document.body.onmousedown = function () { mouseDown++; } document.body.onmouseup = function () { mouseDown--; } document.onclick = function (evt) { var src = window.event.srcElement; if (src.tagName == "IMG") { // image clicked; find contenteditable parent var p = src; while (p = p.parentNode) { if (p.className == "field") { $("#"+p.id).focus(); break; } } } } }); </script></head><body> <div id="fields"></div> <div id="dupes"><a href="#" onclick="py.run('dupes');return false;">%s</a></div> </body></html> """ # caller is responsible for resetting note on reset class Editor(object): def __init__(self, mw, widget, parentWindow, addMode=False): self.mw = mw self.widget = widget self.parentWindow = parentWindow self.note = None self.stealFocus = True self.addMode = addMode self._loaded = False self.currentField = 0 # current card, for card layout self.card = None self.setupOuter() self.setupButtons() self.setupWeb() self.setupTags() self.setupKeyboard() # Initial setup ############################################################ def setupOuter(self): l = QVBoxLayout() l.setMargin(0) l.setSpacing(0) self.widget.setLayout(l) self.outerLayout = l def setupWeb(self): self.web = EditorWebView(self.widget, self) self.web.allowDrops = True self.web.setBridge(self.bridge) self.outerLayout.addWidget(self.web, 1) # pick up the window colour p = self.web.palette() p.setBrush(QPalette.Base, Qt.transparent) self.web.page().setPalette(p) self.web.setAttribute(Qt.WA_OpaquePaintEvent, False) # Top buttons ###################################################################### def _addButton(self, name, func, key=None, tip=None, size=True, text="", check=False, native=False, canDisable=True): b = QPushButton(text) if check: b.connect(b, SIGNAL("clicked(bool)"), func) else: b.connect(b, SIGNAL("clicked()"), func) if size: b.setFixedHeight(20) b.setFixedWidth(20) if not native: if self.plastiqueStyle: b.setStyle(self.plastiqueStyle) b.setFocusPolicy(Qt.NoFocus) else: b.setAutoDefault(False) if not text: b.setIcon(QIcon(":/icons/%s.png" % name)) if key: b.setShortcut(QKeySequence(key)) if tip: b.setToolTip(shortcut(tip)) if check: b.setCheckable(True) self.iconsBox.addWidget(b) if canDisable: self._buttons[name] = b return b def setupButtons(self): self._buttons = {} # button styles for mac if not isMac: self.plastiqueStyle = QStyleFactory.create("plastique") if not self.plastiqueStyle: # plastique was removed in qt5 self.plastiqueStyle = QStyleFactory.create("fusion") self.widget.setStyle(self.plastiqueStyle) else: self.plastiqueStyle = None # icons self.iconsBox = QHBoxLayout() if not isMac: self.iconsBox.setMargin(6) self.iconsBox.setSpacing(0) else: self.iconsBox.setMargin(0) self.iconsBox.setSpacing(14) self.outerLayout.addLayout(self.iconsBox) b = self._addButton b("fields", self.onFields, "", shortcut(_("Customize Fields")), size=False, text=_("Fields..."), native=True, canDisable=False) self.iconsBox.addItem(QSpacerItem(6,1, QSizePolicy.Fixed)) b("layout", self.onCardLayout, _("Ctrl+L"), shortcut(_("Customize Cards (Ctrl+L)")), size=False, text=_("Cards..."), native=True, canDisable=False) # align to right self.iconsBox.addItem(QSpacerItem(20,1, QSizePolicy.Expanding)) b("text_bold", self.toggleBold, _("Ctrl+B"), _("Bold text (Ctrl+B)"), check=True) b("text_italic", self.toggleItalic, _("Ctrl+I"), _("Italic text (Ctrl+I)"), check=True) b("text_under", self.toggleUnderline, _("Ctrl+U"), _("Underline text (Ctrl+U)"), check=True) b("text_super", self.toggleSuper, _("Ctrl+Shift+="), _("Superscript (Ctrl+Shift+=)"), check=True) b("text_sub", self.toggleSub, _("Ctrl+="), _("Subscript (Ctrl+=)"), check=True) b("text_clear", self.removeFormat, _("Ctrl+R"), _("Remove formatting (Ctrl+R)")) but = b("foreground", self.onForeground, _("F7"), text=" ") but.setToolTip(_("Set foreground colour (F7)")) self.setupForegroundButton(but) but = b("change_colour", self.onChangeCol, _("F8"), _("Change colour (F8)"), text=u"▾") but.setFixedWidth(12) but = b("cloze", self.onCloze, _("Ctrl+Shift+C"), _("Cloze deletion (Ctrl+Shift+C)"), text="[...]") but.setFixedWidth(24) s = self.clozeShortcut2 = QShortcut( QKeySequence(_("Ctrl+Alt+Shift+C")), self.parentWindow) s.connect(s, SIGNAL("activated()"), self.onCloze) # fixme: better image names b("mail-attachment", self.onAddMedia, _("F3"), _("Attach pictures/audio/video (F3)")) b("media-record", self.onRecSound, _("F5"), _("Record audio (F5)")) b("adv", self.onAdvanced, text=u"▾") s = QShortcut(QKeySequence("Ctrl+T, T"), self.widget) s.connect(s, SIGNAL("activated()"), self.insertLatex) s = QShortcut(QKeySequence("Ctrl+T, E"), self.widget) s.connect(s, SIGNAL("activated()"), self.insertLatexEqn) s = QShortcut(QKeySequence("Ctrl+T, M"), self.widget) s.connect(s, SIGNAL("activated()"), self.insertLatexMathEnv) s = QShortcut(QKeySequence("Ctrl+Shift+X"), self.widget) s.connect(s, SIGNAL("activated()"), self.onHtmlEdit) # tags s = QShortcut(QKeySequence("Ctrl+Shift+T"), self.widget) s.connect(s, SIGNAL("activated()"), lambda: self.tags.setFocus()) runHook("setupEditorButtons", self) def enableButtons(self, val=True): for b in self._buttons.values(): b.setEnabled(val) def disableButtons(self): self.enableButtons(False) def onFields(self): from aqt.fields import FieldDialog self.saveNow() FieldDialog(self.mw, self.note, parent=self.parentWindow) def onCardLayout(self): from aqt.clayout import CardLayout self.saveNow() if self.card: ord = self.card.ord else: ord = 0 # passing parentWindow leads to crash on windows at the moment if isWin: parent=None else: parent=self.parentWindow CardLayout(self.mw, self.note, ord=ord, parent=parent, addMode=self.addMode) self.loadNote() if isWin: self.parentWindow.activateWindow() # JS->Python bridge ###################################################################### def bridge(self, str): if not self.note or not runHook: # shutdown return # focus lost or key/button pressed? if str.startswith("blur") or str.startswith("key"): (type, txt) = str.split(":", 1) txt = self.mungeHTML(txt) # misbehaving apps may include a null byte in the text txt = txt.replace("\x00", "") # reverse the url quoting we added to get images to display txt = self.mw.col.media.escapeImages(txt, unescape=True) self.note.fields[self.currentField] = txt if not self.addMode: self.note.flush() self.mw.requireReset() if type == "blur": self.disableButtons() # run any filters if runFilter( "editFocusLost", False, self.note, self.currentField): # something updated the note; schedule reload def onUpdate(): if not self.note: return self.stealFocus = True self.loadNote() self.checkValid() self.mw.progress.timer(100, onUpdate, False) else: self.checkValid() else: runHook("editTimer", self.note) self.checkValid() # focused into field? elif str.startswith("focus"): (type, num) = str.split(":", 1) self.enableButtons() self.currentField = int(num) runHook("editFocusGained", self.note, self.currentField) # state buttons changed? elif str.startswith("state"): (cmd, txt) = str.split(":", 1) r = json.loads(txt) self._buttons['text_bold'].setChecked(r['bold']) self._buttons['text_italic'].setChecked(r['italic']) self._buttons['text_under'].setChecked(r['under']) self._buttons['text_super'].setChecked(r['super']) self._buttons['text_sub'].setChecked(r['sub']) elif str.startswith("dupes"): self.showDupes() else: print str def mungeHTML(self, txt): if txt == "<br>": txt = "" return self._filterHTML(txt, localize=False) # Setting/unsetting the current note ###################################################################### def _loadFinished(self, w): self._loaded = True if self.note: self.loadNote() def setNote(self, note, hide=True, focus=False): "Make NOTE the current note." self.note = note self.currentField = 0 self.disableButtons() if focus: self.stealFocus = True # change timer if self.note: self.web.setHtml(_html % ( getBase(self.mw.col), anki.js.jquery, _("Show Duplicates")), loadCB=self._loadFinished) self.updateTags() self.updateKeyboard() else: self.hideCompleters() if hide: self.widget.hide() def loadNote(self): if not self.note: return if self.stealFocus: field = self.currentField else: field = -1 if not self._loaded: # will be loaded when page is ready return data = [] for fld, val in self.note.items(): data.append((fld, self.mw.col.media.escapeImages(val))) self.web.eval("setFields(%s, %d);" % ( json.dumps(data), field)) self.web.eval("setFonts(%s);" % ( json.dumps(self.fonts()))) self.checkValid() self.widget.show() if self.stealFocus: self.web.setFocus() self.stealFocus = False def focus(self): self.web.setFocus() def fonts(self): return [(f['font'], f['size'], f['rtl']) for f in self.note.model()['flds']] def saveNow(self): "Must call this before adding cards, closing dialog, etc." if not self.note: return self.saveTags() if self.mw.app.focusWidget() != self.web: # if no fields are focused, there's nothing to save return # move focus out of fields and save tags self.parentWindow.setFocus() # and process events so any focus-lost hooks fire self.mw.app.processEvents() def checkValid(self): cols = [] err = None for f in self.note.fields: cols.append("#fff") err = self.note.dupeOrEmpty() if err == 2: cols[0] = "#fcc" self.web.eval("showDupes();") else: self.web.eval("hideDupes();") self.web.eval("setBackgrounds(%s);" % json.dumps(cols)) def showDupes(self): contents = stripHTMLMedia(self.note.fields[0]) browser = aqt.dialogs.open("Browser", self.mw) browser.form.searchEdit.lineEdit().setText( '"dupe:%s,%s"' % (self.note.model()['id'], contents)) browser.onSearch() def fieldsAreBlank(self): if not self.note: return True m = self.note.model() for c, f in enumerate(self.note.fields): if f and not m['flds'][c]['sticky']: return False return True # HTML editing ###################################################################### def onHtmlEdit(self): self.saveNow() d = QDialog(self.widget) form = aqt.forms.edithtml.Ui_Dialog() form.setupUi(d) d.connect(form.buttonBox, SIGNAL("helpRequested()"), lambda: openHelp("editor")) form.textEdit.setPlainText(self.note.fields[self.currentField]) form.textEdit.moveCursor(QTextCursor.End) d.exec_() html = form.textEdit.toPlainText() # filter html through beautifulsoup so we can strip out things like a # leading </div> html = unicode(BeautifulSoup(html)) self.note.fields[self.currentField] = html self.loadNote() # focus field so it's saved self.web.setFocus() self.web.eval("focusField(%d);" % self.currentField) # Tag handling ###################################################################### def setupTags(self): import aqt.tagedit g = QGroupBox(self.widget) g.setFlat(True) tb = QGridLayout() tb.setSpacing(12) tb.setMargin(6) # tags l = QLabel(_("Tags")) tb.addWidget(l, 1, 0) self.tags = aqt.tagedit.TagEdit(self.widget) self.tags.connect(self.tags, SIGNAL("lostFocus"), self.saveTags) self.tags.setToolTip(shortcut(_("Jump to tags with Ctrl+Shift+T"))) tb.addWidget(self.tags, 1, 1) g.setLayout(tb) self.outerLayout.addWidget(g) def updateTags(self): if self.tags.col != self.mw.col: self.tags.setCol(self.mw.col) if not self.tags.text() or not self.addMode: self.tags.setText(self.note.stringTags().strip()) def saveTags(self): if not self.note: return self.note.tags = self.mw.col.tags.canonify( self.mw.col.tags.split(self.tags.text())) self.tags.setText(self.mw.col.tags.join(self.note.tags).strip()) if not self.addMode: self.note.flush() runHook("tagsUpdated", self.note) def saveAddModeVars(self): if self.addMode: # save tags to model m = self.note.model() m['tags'] = self.note.tags self.mw.col.models.save(m) def hideCompleters(self): self.tags.hideCompleter() # Format buttons ###################################################################### def toggleBold(self, bool): self.web.eval("setFormat('bold');") def toggleItalic(self, bool): self.web.eval("setFormat('italic');") def toggleUnderline(self, bool): self.web.eval("setFormat('underline');") def toggleSuper(self, bool): self.web.eval("setFormat('superscript');") def toggleSub(self, bool): self.web.eval("setFormat('subscript');") def removeFormat(self): self.web.eval("setFormat('removeFormat');") def onCloze(self): # check that the model is set up for cloze deletion if not re.search('{{(.*:)*cloze:',self.note.model()['tmpls'][0]['qfmt']): if self.addMode: tooltip(_("Warning, cloze deletions will not work until " "you switch the type at the top to Cloze.")) else: showInfo(_("""\ To make a cloze deletion on an existing note, you need to change it \ to a cloze type first, via Edit>Change Note Type.""")) return # find the highest existing cloze highest = 0 for name, val in self.note.items(): m = re.findall("\{\{c(\d+)::", val) if m: highest = max(highest, sorted([int(x) for x in m])[-1]) # reuse last? if not self.mw.app.keyboardModifiers() & Qt.AltModifier: highest += 1 # must start at 1 highest = max(1, highest) self.web.eval("wrap('{{c%d::', '}}');" % highest) # Foreground colour ###################################################################### def setupForegroundButton(self, but): self.foregroundFrame = QFrame() self.foregroundFrame.setAutoFillBackground(True) self.foregroundFrame.setFocusPolicy(Qt.NoFocus) self.fcolour = self.mw.pm.profile.get("lastColour", "#00f") self.onColourChanged() hbox = QHBoxLayout() hbox.addWidget(self.foregroundFrame) hbox.setMargin(5) but.setLayout(hbox) # use last colour def onForeground(self): self._wrapWithColour(self.fcolour) # choose new colour def onChangeCol(self): new = QColorDialog.getColor(QColor(self.fcolour), None) # native dialog doesn't refocus us for some reason self.parentWindow.activateWindow() if new.isValid(): self.fcolour = new.name() self.onColourChanged() self._wrapWithColour(self.fcolour) def _updateForegroundButton(self): self.foregroundFrame.setPalette(QPalette(QColor(self.fcolour))) def onColourChanged(self): self._updateForegroundButton() self.mw.pm.profile['lastColour'] = self.fcolour def _wrapWithColour(self, colour): self.web.eval("setFormat('forecolor', '%s')" % colour) # Audio/video/images ###################################################################### def onAddMedia(self): key = (_("Media") + " (*.jpg *.png *.gif *.tiff *.svg *.tif *.jpeg "+ "*.mp3 *.ogg *.wav *.avi *.ogv *.mpg *.mpeg *.mov *.mp4 " + "*.mkv *.ogx *.ogv *.oga *.flv *.swf *.flac)") def accept(file): self.addMedia(file, canDelete=True) file = getFile(self.widget, _("Add Media"), accept, key, key="media") self.parentWindow.activateWindow() def addMedia(self, path, canDelete=False): html = self._addMedia(path, canDelete) self.web.eval("setFormat('inserthtml', %s);" % json.dumps(html)) def _addMedia(self, path, canDelete=False): "Add to media folder and return local img or sound tag." # copy to media folder fname = self.mw.col.media.addFile(path) # remove original? if canDelete and self.mw.pm.profile['deleteMedia']: if os.path.abspath(fname) != os.path.abspath(path): try: os.unlink(path) except: pass # return a local html link return self.fnameToLink(fname) def onRecSound(self): try: file = getAudio(self.widget) except Exception, e: showWarning(_( "Couldn't record audio. Have you installed lame and sox?") + "\n\n" + repr(str(e))) return self.addMedia(file) # Media downloads ###################################################################### def urlToLink(self, url): fname = self.urlToFile(url) if not fname: return "" return self.fnameToLink(fname) def fnameToLink(self, fname): ext = fname.split(".")[-1].lower() if ext in pics: name = urllib.quote(fname.encode("utf8")) return '<img src="%s">' % name else: anki.sound.play(fname) return '[sound:%s]' % fname def urlToFile(self, url): l = url.lower() for suffix in pics+audio: if l.endswith(suffix): return self._retrieveURL(url) # not a supported type return def isURL(self, s): s = s.lower() return (s.startswith("http://") or s.startswith("https://") or s.startswith("ftp://") or s.startswith("file://")) def _retrieveURL(self, url): "Download file into media folder and return local filename or None." # urllib doesn't understand percent-escaped utf8, but requires things like # '#' to be escaped. we don't try to unquote the incoming URL, because # we should only be receiving file:// urls from url mime, which is unquoted if url.lower().startswith("file://"): url = url.replace("%", "%25") url = url.replace("#", "%23") # fetch it into a temporary folder self.mw.progress.start( immediate=True, parent=self.parentWindow) try: req = urllib2.Request(url, None, { 'User-Agent': 'Mozilla/5.0 (compatible; Anki)'}) filecontents = urllib2.urlopen(req).read() except urllib2.URLError, e: showWarning(_("An error occurred while opening %s") % e) return finally: self.mw.progress.finish() path = unicode(urllib2.unquote(url.encode("utf8")), "utf8") return self.mw.col.media.writeData(path, filecontents) # HTML filtering ###################################################################### def _filterHTML(self, html, localize=False): doc = BeautifulSoup(html) # remove implicit regular font style from outermost element if doc.span: try: attrs = doc.span['style'].split(";") except (KeyError, TypeError): attrs = [] if attrs: new = [] for attr in attrs: sattr = attr.strip() if sattr and sattr not in ("font-style: normal", "font-weight: normal"): new.append(sattr) doc.span['style'] = ";".join(new) # filter out implicit formatting from webkit for tag in doc("span", "Apple-style-span"): preserve = "" for item in tag['style'].split(";"): try: k, v = item.split(":") except ValueError: continue if k.strip() == "color" and not v.strip() == "rgb(0, 0, 0)": preserve += "color:%s;" % v if k.strip() in ("font-weight", "font-style"): preserve += item + ";" if preserve: # preserve colour attribute, delete implicit class tag['style'] = preserve del tag['class'] else: # strip completely tag.replaceWithChildren() for tag in doc("font", "Apple-style-span"): # strip all but colour attr from implicit font tags if 'color' in dict(tag.attrs): for attr in tag.attrs: if attr != "color": del tag[attr] # and apple class del tag['class'] else: # remove completely tag.replaceWithChildren() # now images for tag in doc("img"): # turn file:/// links into relative ones try: if tag['src'].lower().startswith("file://"): tag['src'] = os.path.basename(tag['src']) if localize and self.isURL(tag['src']): # convert remote image links to local ones fname = self.urlToFile(tag['src']) if fname: tag['src'] = fname except KeyError: # for some bizarre reason, mnemosyne removes src elements # from missing media pass # strip all other attributes, including implicit max-width for attr, val in tag.attrs: if attr != "src": del tag[attr] # strip superfluous elements for elem in "html", "head", "body", "meta": for tag in doc(elem): tag.replaceWithChildren() html = unicode(doc) return html # Advanced menu ###################################################################### def onAdvanced(self): m = QMenu(self.mw) a = m.addAction(_("LaTeX")) a.setShortcut(QKeySequence("Ctrl+T, T")) a.connect(a, SIGNAL("triggered()"), self.insertLatex) a = m.addAction(_("LaTeX equation")) a.setShortcut(QKeySequence("Ctrl+T, E")) a.connect(a, SIGNAL("triggered()"), self.insertLatexEqn) a = m.addAction(_("LaTeX math env.")) a.setShortcut(QKeySequence("Ctrl+T, M")) a.connect(a, SIGNAL("triggered()"), self.insertLatexMathEnv) a = m.addAction(_("Edit HTML")) a.setShortcut(QKeySequence("Ctrl+Shift+X")) a.connect(a, SIGNAL("triggered()"), self.onHtmlEdit) m.exec_(QCursor.pos()) # LaTeX ###################################################################### def insertLatex(self): self.web.eval("wrap('[latex]', '[/latex]');") def insertLatexEqn(self): self.web.eval("wrap('[$]', '[/$]');") def insertLatexMathEnv(self): self.web.eval("wrap('[$$]', '[/$$]');") # Keyboard layout ###################################################################### def setupKeyboard(self): if isWin and self.mw.pm.profile['preserveKeyboard']: a = ctypes.windll.user32.ActivateKeyboardLayout a.restype = ctypes.c_void_p a.argtypes = [ctypes.c_void_p, ctypes.c_uint] g = ctypes.windll.user32.GetKeyboardLayout g.restype = ctypes.c_void_p g.argtypes = [ctypes.c_uint] else: a = g = None self.activateKeyboard = a self.getKeyboard = g def updateKeyboard(self): self.keyboardLayouts = {} def saveKeyboard(self): if not self.getKeyboard: return self.keyboardLayouts[self.currentField] = self.getKeyboard(0) def restoreKeyboard(self): if not self.getKeyboard: return if self.currentField in self.keyboardLayouts: self.activateKeyboard(self.keyboardLayouts[self.currentField], 0) # Pasting, drag & drop, and keyboard layouts ###################################################################### class EditorWebView(AnkiWebView): def __init__(self, parent, editor): AnkiWebView.__init__(self) self.editor = editor self.strip = self.editor.mw.pm.profile['stripHTML'] def keyPressEvent(self, evt): if evt.matches(QKeySequence.Paste): self.onPaste() return evt.accept() elif evt.matches(QKeySequence.Copy): self.onCopy() return evt.accept() elif evt.matches(QKeySequence.Cut): self.onCut() return evt.accept() QWebView.keyPressEvent(self, evt) def onCut(self): self.triggerPageAction(QWebPage.Cut) self._flagAnkiText() def onCopy(self): self.triggerPageAction(QWebPage.Copy) self._flagAnkiText() def onPaste(self): mime = self.mungeClip() self.triggerPageAction(QWebPage.Paste) self.restoreClip() def mouseReleaseEvent(self, evt): if not isMac and not isWin and evt.button() == Qt.MidButton: # middle click on x11; munge the clipboard before standard # handling mime = self.mungeClip(mode=QClipboard.Selection) AnkiWebView.mouseReleaseEvent(self, evt) self.restoreClip(mode=QClipboard.Selection) else: AnkiWebView.mouseReleaseEvent(self, evt) def focusInEvent(self, evt): window = False if evt.reason() in (Qt.ActiveWindowFocusReason, Qt.PopupFocusReason): # editor area got focus again; need to tell js not to adjust cursor self.eval("mouseDown++;") window = True AnkiWebView.focusInEvent(self, evt) if evt.reason() == Qt.TabFocusReason: self.eval("focusField(0);") elif evt.reason() == Qt.BacktabFocusReason: n = len(self.editor.note.fields) - 1 self.eval("focusField(%d);" % n) elif window: self.eval("mouseDown--;") def dropEvent(self, evt): oldmime = evt.mimeData() # coming from this program? if evt.source(): if oldmime.hasHtml(): mime = QMimeData() mime.setHtml(self.editor._filterHTML(oldmime.html())) else: # old qt on linux won't give us html when dragging an image; # in that case just do the default action (which is to ignore # the drag) return AnkiWebView.dropEvent(self, evt) else: mime = self._processMime(oldmime) # create a new event with the new mime data and run it new = QDropEvent(evt.pos(), evt.possibleActions(), mime, evt.mouseButtons(), evt.keyboardModifiers()) evt.accept() QWebView.dropEvent(self, new) # tell the drop target to take focus so the drop contents are saved self.eval("dropTarget.focus();") self.setFocus() def mungeClip(self, mode=QClipboard.Clipboard): clip = self.editor.mw.app.clipboard() mime = clip.mimeData(mode=mode) self.saveClip(mode=mode) mime = self._processMime(mime) clip.setMimeData(mime, mode=mode) return mime def restoreClip(self, mode=QClipboard.Clipboard): clip = self.editor.mw.app.clipboard() clip.setMimeData(self.savedClip, mode=mode) def saveClip(self, mode): # we don't own the clipboard object, so we need to copy it or we'll crash mime = self.editor.mw.app.clipboard().mimeData(mode=mode) n = QMimeData() if mime.hasText(): n.setText(mime.text()) if mime.hasHtml(): n.setHtml(mime.html()) if mime.hasUrls(): n.setUrls(mime.urls()) if mime.hasImage(): n.setImageData(mime.imageData()) self.savedClip = n def _processMime(self, mime): # print "html=%s image=%s urls=%s txt=%s" % ( # mime.hasHtml(), mime.hasImage(), mime.hasUrls(), mime.hasText()) # print "html", mime.html() # print "urls", mime.urls() # print "text", mime.text() if mime.hasHtml(): return self._processHtml(mime) elif mime.hasUrls(): return self._processUrls(mime) elif mime.hasText(): return self._processText(mime) elif mime.hasImage(): return self._processImage(mime) else: # nothing return QMimeData() # when user is dragging a file from a file manager on any platform, the # url type should be set, and it is not URL-encoded. on a mac no text type # is returned, and on windows the text type is not returned in cases like # "foo's bar.jpg" def _processUrls(self, mime): url = mime.urls()[0].toString() # chrome likes to give us the URL twice with a \n url = url.splitlines()[0] newmime = QMimeData() link = self.editor.urlToLink(url) if link: newmime.setHtml(link) elif mime.hasImage(): # if we couldn't convert the url to a link and there's an # image on the clipboard (such as copy&paste from # google images in safari), use that instead return self._processImage(mime) else: newmime.setText(url) return newmime # if the user has used 'copy link location' in the browser, the clipboard # will contain the URL as text, and no URLs or HTML. the URL will already # be URL-encoded, and shouldn't be a file:// url unless they're browsing # locally, which we don't support def _processText(self, mime): txt = unicode(mime.text()) html = None # if the user is pasting an image or sound link, convert it to local if self.editor.isURL(txt): txt = txt.split("\r\n")[0] html = self.editor.urlToLink(txt) new = QMimeData() if html: new.setHtml(html) else: new.setText(txt) return new def _processHtml(self, mime): html = mime.html() newMime = QMimeData() if self.strip and not html.startswith("<!--anki-->"): # special case for google images: if after stripping there's no text # and there are image links, we'll paste those as html instead if not stripHTML(html).strip(): newHtml = "" mid = self.editor.note.mid for url in self.editor.mw.col.media.filesInStr( mid, html, includeRemote=True): newHtml += self.editor.urlToLink(url) if not newHtml and mime.hasImage(): return self._processImage(mime) newMime.setHtml(newHtml) else: # use .text() if available so newlines are preserved; otherwise strip if mime.hasText(): return self._processText(mime) else: newMime.setText(stripHTML(mime.text())) else: if html.startswith("<!--anki-->"): html = html[11:] # no html stripping html = self.editor._filterHTML(html, localize=True) newMime.setHtml(html) return newMime def _processImage(self, mime): im = QImage(mime.imageData()) uname = namedtmp("paste-%d" % im.cacheKey()) if self.editor.mw.pm.profile.get("pastePNG", False): ext = ".png" im.save(uname+ext, None, 50) else: ext = ".jpg" im.save(uname+ext, None, 80) # invalid image? if not os.path.exists(uname+ext): return QMimeData() mime = QMimeData() mime.setHtml(self.editor._addMedia(uname+ext)) return mime def _flagAnkiText(self): # add a comment in the clipboard html so we can tell text is copied # from us and doesn't need to be stripped clip = self.editor.mw.app.clipboard() mime = clip.mimeData() if not mime.hasHtml(): return html = mime.html() mime.setHtml("<!--anki-->" + mime.html()) def contextMenuEvent(self, evt): m = QMenu(self) a = m.addAction(_("Cut")) a.connect(a, SIGNAL("triggered()"), self.onCut) a = m.addAction(_("Copy")) a.connect(a, SIGNAL("triggered()"), self.onCopy) a = m.addAction(_("Paste")) a.connect(a, SIGNAL("triggered()"), self.onPaste) runHook("EditorWebView.contextMenuEvent", self, m) m.popup(QCursor.pos())
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/editor.py
editor.py
from operator import itemgetter from anki.consts import NEW_CARDS_RANDOM from aqt.qt import * import aqt from aqt.utils import showInfo, showWarning, openHelp, getOnlyText, askUser, \ tooltip, saveGeom, restoreGeom class DeckConf(QDialog): def __init__(self, mw, deck): QDialog.__init__(self, mw) self.mw = mw self.deck = deck self.childDids = [ d[1] for d in self.mw.col.decks.children(self.deck['id'])] self._origNewOrder = None self.form = aqt.forms.dconf.Ui_Dialog() self.form.setupUi(self) self.mw.checkpoint(_("Options")) self.setupCombos() self.setupConfs() self.setWindowModality(Qt.WindowModal) self.connect(self.form.buttonBox, SIGNAL("helpRequested()"), lambda: openHelp("deckoptions")) self.connect(self.form.confOpts, SIGNAL("clicked()"), self.confOpts) self.form.confOpts.setText(u"▾") self.connect(self.form.buttonBox.button(QDialogButtonBox.RestoreDefaults), SIGNAL("clicked()"), self.onRestore) self.setWindowTitle(_("Options for %s") % self.deck['name']) # qt doesn't size properly with altered fonts otherwise restoreGeom(self, "deckconf", adjustSize=True) self.show() self.exec_() saveGeom(self, "deckconf") def setupCombos(self): import anki.consts as cs f = self.form f.newOrder.addItems(cs.newCardOrderLabels().values()) self.connect(f.newOrder, SIGNAL("currentIndexChanged(int)"), self.onNewOrderChanged) # Conf list ###################################################################### def setupConfs(self): self.connect(self.form.dconf, SIGNAL("currentIndexChanged(int)"), self.onConfChange) self.conf = None self.loadConfs() def loadConfs(self): current = self.deck['conf'] self.confList = self.mw.col.decks.allConf() self.confList.sort(key=itemgetter('name')) startOn = 0 self.ignoreConfChange = True self.form.dconf.clear() for idx, conf in enumerate(self.confList): self.form.dconf.addItem(conf['name']) if str(conf['id']) == str(current): startOn = idx self.ignoreConfChange = False self.form.dconf.setCurrentIndex(startOn) if self._origNewOrder is None: self._origNewOrder = self.confList[startOn]['new']['order'] self.onConfChange(startOn) def confOpts(self): m = QMenu(self.mw) a = m.addAction(_("Add")) a.connect(a, SIGNAL("triggered()"), self.addGroup) a = m.addAction(_("Delete")) a.connect(a, SIGNAL("triggered()"), self.remGroup) a = m.addAction(_("Rename")) a.connect(a, SIGNAL("triggered()"), self.renameGroup) a = m.addAction(_("Set for all subdecks")) a.connect(a, SIGNAL("triggered()"), self.setChildren) if not self.childDids: a.setEnabled(False) m.exec_(QCursor.pos()) def onConfChange(self, idx): if self.ignoreConfChange: return if self.conf: self.saveConf() conf = self.confList[idx] self.deck['conf'] = conf['id'] self.loadConf() cnt = 0 for d in self.mw.col.decks.all(): if d['dyn']: continue if d['conf'] == conf['id']: cnt += 1 if cnt > 1: txt = _("Your changes will affect multiple decks. If you wish to " "change only the current deck, please add a new options group first.") else: txt = "" self.form.count.setText(txt) def addGroup(self): name = getOnlyText(_("New options group name:")) if not name: return # first, save currently entered data to current conf self.saveConf() # then clone the conf id = self.mw.col.decks.confId(name, cloneFrom=self.conf) # set the deck to the new conf self.deck['conf'] = id # then reload the conf list self.loadConfs() def remGroup(self): if self.conf['id'] == 1: showInfo(_("The default configuration can't be removed."), self) else: self.mw.col.decks.remConf(self.conf['id']) self.deck['conf'] = 1 self.loadConfs() def renameGroup(self): old = self.conf['name'] name = getOnlyText(_("New name:"), default=old) if not name or name == old: return self.conf['name'] = name self.loadConfs() def setChildren(self): if not askUser( _("Set all decks below %s to this option group?") % self.deck['name']): return for did in self.childDids: deck = self.mw.col.decks.get(did) if deck['dyn']: continue deck['conf'] = self.deck['conf'] self.mw.col.decks.save(deck) tooltip(ngettext("%d deck updated.", "%d decks updated.", \ len(self.childDids)) % len(self.childDids)) # Loading ################################################## def listToUser(self, l): return " ".join([str(x) for x in l]) def parentLimText(self, type="new"): # top level? if "::" not in self.deck['name']: return "" lim = -1 for d in self.mw.col.decks.parents(self.deck['id']): c = self.mw.col.decks.confForDid(d['id']) x = c[type]['perDay'] if lim == -1: lim = x else: lim = min(x, lim) return _("(parent limit: %d)") % lim def loadConf(self): self.conf = self.mw.col.decks.confForDid(self.deck['id']) # new c = self.conf['new'] f = self.form f.lrnSteps.setText(self.listToUser(c['delays'])) f.lrnGradInt.setValue(c['ints'][0]) f.lrnEasyInt.setValue(c['ints'][1]) f.lrnEasyInt.setValue(c['ints'][1]) f.lrnFactor.setValue(c['initialFactor']/10.0) f.newOrder.setCurrentIndex(c['order']) f.newPerDay.setValue(c['perDay']) f.bury.setChecked(c.get("bury", True)) f.newplim.setText(self.parentLimText('new')) # rev c = self.conf['rev'] f.revPerDay.setValue(c['perDay']) f.easyBonus.setValue(c['ease4']*100) f.fi1.setValue(c['ivlFct']*100) f.maxIvl.setValue(c['maxIvl']) f.revplim.setText(self.parentLimText('rev')) f.buryRev.setChecked(c.get("bury", True)) # lapse c = self.conf['lapse'] f.lapSteps.setText(self.listToUser(c['delays'])) f.lapMult.setValue(c['mult']*100) f.lapMinInt.setValue(c['minInt']) f.leechThreshold.setValue(c['leechFails']) f.leechAction.setCurrentIndex(c['leechAction']) # general c = self.conf f.maxTaken.setValue(c['maxTaken']) f.showTimer.setChecked(c.get('timer', 0)) f.autoplaySounds.setChecked(c['autoplay']) f.replayQuestion.setChecked(c.get('replayq', True)) # description f.desc.setPlainText(self.deck['desc']) def onRestore(self): self.mw.progress.start() self.mw.col.decks.restoreToDefault(self.conf) self.mw.progress.finish() self.loadConf() # New order ################################################## def onNewOrderChanged(self, new): old = self.conf['new']['order'] if old == new: return self.conf['new']['order'] = new self.mw.progress.start() self.mw.col.sched.resortConf(self.conf) self.mw.progress.finish() # Saving ################################################## def updateList(self, conf, key, w, minSize=1): items = unicode(w.text()).split(" ") ret = [] for i in items: if not i: continue try: i = float(i) assert i > 0 if i == int(i): i = int(i) ret.append(i) except: # invalid, don't update showWarning(_("Steps must be numbers.")) return if len(ret) < minSize: showWarning(_("At least one step is required.")) return conf[key] = ret def saveConf(self): # new c = self.conf['new'] f = self.form self.updateList(c, 'delays', f.lrnSteps) c['ints'][0] = f.lrnGradInt.value() c['ints'][1] = f.lrnEasyInt.value() c['initialFactor'] = f.lrnFactor.value()*10 c['order'] = f.newOrder.currentIndex() c['perDay'] = f.newPerDay.value() c['bury'] = f.bury.isChecked() if self._origNewOrder != c['order']: # order of current deck has changed, so have to resort if c['order'] == NEW_CARDS_RANDOM: self.mw.col.sched.randomizeCards(self.deck['id']) else: self.mw.col.sched.orderCards(self.deck['id']) # rev c = self.conf['rev'] c['perDay'] = f.revPerDay.value() c['ease4'] = f.easyBonus.value()/100.0 c['ivlFct'] = f.fi1.value()/100.0 c['maxIvl'] = f.maxIvl.value() c['bury'] = f.buryRev.isChecked() # lapse c = self.conf['lapse'] self.updateList(c, 'delays', f.lapSteps, minSize=0) c['mult'] = f.lapMult.value()/100.0 c['minInt'] = f.lapMinInt.value() c['leechFails'] = f.leechThreshold.value() c['leechAction'] = f.leechAction.currentIndex() # general c = self.conf c['maxTaken'] = f.maxTaken.value() c['timer'] = f.showTimer.isChecked() and 1 or 0 c['autoplay'] = f.autoplaySounds.isChecked() c['replayq'] = f.replayQuestion.isChecked() # description self.deck['desc'] = f.desc.toPlainText() self.mw.col.decks.save(self.deck) self.mw.col.decks.save(self.conf) def reject(self): self.accept() def accept(self): self.saveConf() self.mw.reset() QDialog.accept(self)
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/deckconf.py
deckconf.py
from __future__ import division import socket import time import traceback import gc from aqt.qt import * import aqt from anki import Collection from anki.sync import Syncer, RemoteServer, FullSyncer, MediaSyncer, \ RemoteMediaServer from anki.hooks import addHook, remHook from aqt.utils import tooltip, askUserDialog, showWarning, showText, showInfo # Sync manager ###################################################################### class SyncManager(QObject): def __init__(self, mw, pm): QObject.__init__(self, mw) self.mw = mw self.pm = pm def sync(self): if not self.pm.profile['syncKey']: auth = self._getUserPass() if not auth: return self.pm.profile['syncUser'] = auth[0] self._sync(auth) else: self._sync() def _sync(self, auth=None): # to avoid gui widgets being garbage collected in the worker thread, # run gc in advance self._didFullUp = False self._didError = False gc.collect() # create the thread, setup signals and start running t = self.thread = SyncThread( self.pm.collectionPath(), self.pm.profile['syncKey'], auth=auth, media=self.pm.profile['syncMedia']) self.connect(t, SIGNAL("event"), self.onEvent) self.label = _("Connecting...") self.mw.progress.start(immediate=True, label=self.label) self.sentBytes = self.recvBytes = 0 self._updateLabel() self.thread.start() while not self.thread.isFinished(): self.mw.app.processEvents() self.thread.wait(100) self.mw.progress.finish() if self.thread.syncMsg: showText(self.thread.syncMsg) if self.thread.uname: self.pm.profile['syncUser'] = self.thread.uname def delayedInfo(): if self._didFullUp and not self._didError: showInfo(_("""\ Your collection was successfully uploaded to AnkiWeb. If you use any other devices, please sync them now, and choose \ to download the collection you have just uploaded from this computer. \ After doing so, future reviews and added cards will be merged \ automatically.""")) self.mw.progress.timer(1000, delayedInfo, False) def _updateLabel(self): self.mw.progress.update(label="%s\n%s" % ( self.label, _("%(a)dkB up, %(b)dkB down") % dict( a=self.sentBytes // 1024, b=self.recvBytes // 1024))) def onEvent(self, evt, *args): pu = self.mw.progress.update if evt == "badAuth": tooltip( _("AnkiWeb ID or password was incorrect; please try again."), parent=self.mw) # blank the key so we prompt user again self.pm.profile['syncKey'] = None self.pm.save() elif evt == "corrupt": pass elif evt == "newKey": self.pm.profile['syncKey'] = args[0] self.pm.save() elif evt == "offline": tooltip(_("Syncing failed; internet offline.")) elif evt == "upbad": self._didFullUp = False self._checkFailed() elif evt == "sync": m = None; t = args[0] if t == "login": m = _("Syncing...") elif t == "upload": self._didFullUp = True m = _("Uploading to AnkiWeb...") elif t == "download": m = _("Downloading from AnkiWeb...") elif t == "sanity": m = _("Checking...") elif t == "findMedia": m = _("Syncing Media...") elif t == "upgradeRequired": showText(_("""\ Please visit AnkiWeb, upgrade your deck, then try again.""")) if m: self.label = m self._updateLabel() elif evt == "syncMsg": self.label = args[0] self._updateLabel() elif evt == "error": self._didError = True showText(_("Syncing failed:\n%s")% self._rewriteError(args[0])) elif evt == "clockOff": self._clockOff() elif evt == "checkFailed": self._checkFailed() elif evt == "mediaSanity": showWarning(_("""\ A problem occurred while syncing media. Please use Tools>Check Media, then \ sync again to correct the issue.""")) elif evt == "noChanges": pass elif evt == "fullSync": self._confirmFullSync() elif evt == "send": # posted events not guaranteed to arrive in order self.sentBytes = max(self.sentBytes, args[0]) self._updateLabel() elif evt == "recv": self.recvBytes = max(self.recvBytes, args[0]) self._updateLabel() def _rewriteError(self, err): if "Errno 61" in err: return _("""\ Couldn't connect to AnkiWeb. Please check your network connection \ and try again.""") elif "timed out" in err or "10060" in err: return _("""\ The connection to AnkiWeb timed out. Please check your network \ connection and try again.""") elif "code: 500" in err: return _("""\ AnkiWeb encountered an error. Please try again in a few minutes, and if \ the problem persists, please file a bug report.""") elif "code: 501" in err: return _("""\ Please upgrade to the latest version of Anki.""") # 502 is technically due to the server restarting, but we reuse the # error message elif "code: 502" in err: return _("AnkiWeb is under maintenance. Please try again in a few minutes.") elif "code: 503" in err: return _("""\ AnkiWeb is too busy at the moment. Please try again in a few minutes.""") elif "code: 504" in err: return _("504 gateway timeout error received. Please try temporarily disabling your antivirus.") elif "code: 409" in err: return _("Only one client can access AnkiWeb at a time. If a previous sync failed, please try again in a few minutes.") elif "10061" in err or "10013" in err or "10053" in err: return _( "Antivirus or firewall software is preventing Anki from connecting to the internet.") elif "10054" in err or "Broken pipe" in err: return _("Connection timed out. Either your internet connection is experiencing problems, or you have a very large file in your media folder.") elif "Unable to find the server" in err: return _( "Server not found. Either your connection is down, or antivirus/firewall " "software is blocking Anki from connecting to the internet.") elif "code: 407" in err: return _("Proxy authentication required.") elif "code: 413" in err: return _("Your collection or a media file is too large to sync.") elif "EOF occurred in violation of protocol" in err: return _("Error establishing a secure connection. This is usually caused by antivirus, firewall or VPN software, or problems with your ISP.") elif "certificate verify failed" in err: return _("Error establishing a secure connection. This is usually caused by antivirus, firewall or VPN software, or problems with your ISP.") return err def _getUserPass(self): d = QDialog(self.mw) d.setWindowTitle("Anki") d.setWindowModality(Qt.WindowModal) vbox = QVBoxLayout() l = QLabel(_("""\ <h1>Account Required</h1> A free account is required to keep your collection synchronized. Please \ <a href="%s">sign up</a> for an account, then \ enter your details below.""") % "https://ankiweb.net/account/login") l.setOpenExternalLinks(True) l.setWordWrap(True) vbox.addWidget(l) vbox.addSpacing(20) g = QGridLayout() l1 = QLabel(_("AnkiWeb ID:")) g.addWidget(l1, 0, 0) user = QLineEdit() g.addWidget(user, 0, 1) l2 = QLabel(_("Password:")) g.addWidget(l2, 1, 0) passwd = QLineEdit() passwd.setEchoMode(QLineEdit.Password) g.addWidget(passwd, 1, 1) vbox.addLayout(g) bb = QDialogButtonBox(QDialogButtonBox.Ok|QDialogButtonBox.Cancel) bb.button(QDialogButtonBox.Ok).setAutoDefault(True) self.connect(bb, SIGNAL("accepted()"), d.accept) self.connect(bb, SIGNAL("rejected()"), d.reject) vbox.addWidget(bb) d.setLayout(vbox) d.show() accepted = d.exec_() u = user.text() p = passwd.text() if not accepted or not u or not p: return return (u, p) def _confirmFullSync(self): diag = askUserDialog(_("""\ Your decks here and on AnkiWeb differ in such a way that they can't \ be merged together, so it's necessary to overwrite the decks on one \ side with the decks from the other. If you choose download, Anki will download the collection from AnkiWeb, \ and any changes you have made on your computer since the last sync will \ be lost. If you choose upload, Anki will upload your collection to AnkiWeb, and \ any changes you have made on AnkiWeb or your other devices since the \ last sync to this device will be lost. After all devices are in sync, future reviews and added cards can be merged \ automatically."""), [_("Upload to AnkiWeb"), _("Download from AnkiWeb"), _("Cancel")]) diag.setDefault(2) ret = diag.run() if ret == _("Upload to AnkiWeb"): self.thread.fullSyncChoice = "upload" elif ret == _("Download from AnkiWeb"): self.thread.fullSyncChoice = "download" else: self.thread.fullSyncChoice = "cancel" def _clockOff(self): showWarning(_("""\ Syncing requires the clock on your computer to be set correctly. Please \ fix the clock and try again.""")) def _checkFailed(self): showWarning(_("""\ Your collection is in an inconsistent state. Please run Tools>\ Check Database, then sync again.""")) def badUserPass(self): aqt.preferences.Preferences(self, self.pm.profile).dialog.tabWidget.\ setCurrentIndex(1) # Sync thread ###################################################################### class SyncThread(QThread): def __init__(self, path, hkey, auth=None, media=True): QThread.__init__(self) self.path = path self.hkey = hkey self.auth = auth self.media = media def run(self): # init this first so an early crash doesn't cause an error # in the main thread self.syncMsg = "" self.uname = "" try: self.col = Collection(self.path, log=True) except: self.fireEvent("corrupt") return self.server = RemoteServer(self.hkey) self.client = Syncer(self.col, self.server) self.sentTotal = 0 self.recvTotal = 0 # throttle updates; qt doesn't handle lots of posted events well self.byteUpdate = time.time() def syncEvent(type): self.fireEvent("sync", type) def syncMsg(msg): self.fireEvent("syncMsg", msg) def canPost(): if (time.time() - self.byteUpdate) > 0.1: self.byteUpdate = time.time() return True def sendEvent(bytes): self.sentTotal += bytes if canPost(): self.fireEvent("send", self.sentTotal) def recvEvent(bytes): self.recvTotal += bytes if canPost(): self.fireEvent("recv", self.recvTotal) addHook("sync", syncEvent) addHook("syncMsg", syncMsg) addHook("httpSend", sendEvent) addHook("httpRecv", recvEvent) # run sync and catch any errors try: self._sync() except: err = traceback.format_exc() if not isinstance(err, unicode): err = unicode(err, "utf8", "replace") self.fireEvent("error", err) finally: # don't bump mod time unless we explicitly save self.col.close(save=False) remHook("sync", syncEvent) remHook("syncMsg", syncMsg) remHook("httpSend", sendEvent) remHook("httpRecv", recvEvent) def _sync(self): if self.auth: # need to authenticate and obtain host key self.hkey = self.server.hostKey(*self.auth) if not self.hkey: # provided details were invalid return self.fireEvent("badAuth") else: # write new details and tell calling thread to save self.fireEvent("newKey", self.hkey) # run sync and check state try: ret = self.client.sync() except Exception, e: log = traceback.format_exc() err = repr(str(e)) if ("Unable to find the server" in err or "Errno 2" in err): self.fireEvent("offline") else: if not err: err = log if not isinstance(err, unicode): err = unicode(err, "utf8", "replace") self.fireEvent("error", err) return if ret == "badAuth": return self.fireEvent("badAuth") elif ret == "clockOff": return self.fireEvent("clockOff") elif ret == "basicCheckFailed" or ret == "sanityCheckFailed": return self.fireEvent("checkFailed") # full sync? if ret == "fullSync": return self._fullSync() # save and note success state if ret == "noChanges": self.fireEvent("noChanges") elif ret == "success": self.fireEvent("success") elif ret == "serverAbort": pass else: self.fireEvent("error", "Unknown sync return code.") self.syncMsg = self.client.syncMsg self.uname = self.client.uname # then move on to media sync self._syncMedia() def _fullSync(self): # if the local deck is empty, assume user is trying to download if self.col.isEmpty(): f = "download" else: # tell the calling thread we need a decision on sync direction, and # wait for a reply self.fullSyncChoice = False self.fireEvent("fullSync") while not self.fullSyncChoice: time.sleep(0.1) f = self.fullSyncChoice if f == "cancel": return self.client = FullSyncer(self.col, self.hkey, self.server.con) if f == "upload": if not self.client.upload(): self.fireEvent("upbad") else: self.client.download() # reopen db and move on to media sync self.col.reopen() self._syncMedia() def _syncMedia(self): if not self.media: return self.server = RemoteMediaServer(self.col, self.hkey, self.server.con) self.client = MediaSyncer(self.col, self.server) ret = self.client.sync() if ret == "noChanges": self.fireEvent("noMediaChanges") elif ret == "sanityCheckFailed": self.fireEvent("mediaSanity") else: self.fireEvent("mediaSuccess") def fireEvent(self, *args): self.emit(SIGNAL("event"), *args) # Monkey-patch httplib & httplib2 so we can get progress info ###################################################################### CHUNK_SIZE = 65536 import httplib, httplib2 from cStringIO import StringIO from anki.hooks import runHook # sending in httplib def _incrementalSend(self, data): """Send `data' to the server.""" if self.sock is None: if self.auto_open: self.connect() else: raise httplib.NotConnected() # if it's not a file object, make it one if not hasattr(data, 'read'): if isinstance(data, unicode): data = data.encode("utf8") data = StringIO(data) while 1: block = data.read(CHUNK_SIZE) if not block: break self.sock.sendall(block) runHook("httpSend", len(block)) httplib.HTTPConnection.send = _incrementalSend # receiving in httplib2 # this is an augmented version of httplib's request routine that: # - doesn't assume requests will be tried more than once # - calls a hook for each chunk of data so we can update the gui # - retries only when keep-alive connection is closed def _conn_request(self, conn, request_uri, method, body, headers): for i in range(2): try: if conn.sock is None: conn.connect() conn.request(method, request_uri, body, headers) except socket.timeout: raise except socket.gaierror: conn.close() raise httplib2.ServerNotFoundError( "Unable to find the server at %s" % conn.host) except httplib2.ssl_SSLError: conn.close() raise except socket.error, e: conn.close() raise except httplib.HTTPException: conn.close() raise try: response = conn.getresponse() except httplib.BadStatusLine: print "retry bad line" conn.close() conn.connect() continue except (socket.error, httplib.HTTPException): raise else: content = "" if method == "HEAD": response.close() else: buf = StringIO() while 1: data = response.read(CHUNK_SIZE) if not data: break buf.write(data) runHook("httpRecv", len(data)) content = buf.getvalue() response = httplib2.Response(response) if method != "HEAD": content = httplib2._decompressContent(response, content) return (response, content) httplib2.Http._conn_request = _conn_request
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/sync.py
sync.py
from aqt.qt import * from anki.hooks import addHook, remHook from aqt.utils import shortcut class DeckChooser(QHBoxLayout): def __init__(self, mw, widget, label=True, start=None): QHBoxLayout.__init__(self) self.widget = widget self.mw = mw self.deck = mw.col self.label = label self.setMargin(0) self.setSpacing(8) self.setupDecks() self.widget.setLayout(self) addHook('currentModelChanged', self.onModelChange) def setupDecks(self): if self.label: self.deckLabel = QLabel(_("Deck")) self.addWidget(self.deckLabel) # decks box self.deck = QPushButton() self.deck.setToolTip(shortcut(_("Target Deck (Ctrl+D)"))) s = QShortcut(QKeySequence(_("Ctrl+D")), self.widget) s.connect(s, SIGNAL("activated()"), self.onDeckChange) self.addWidget(self.deck) self.connect(self.deck, SIGNAL("clicked()"), self.onDeckChange) # starting label if self.mw.col.conf.get("addToCur", True): col = self.mw.col did = col.conf['curDeck'] if col.decks.isDyn(did): # if they're reviewing, try default to current card c = self.mw.reviewer.card if self.mw.state == "review" and c: if not c.odid: did = c.did else: did = c.odid else: did = 1 self.deck.setText(self.mw.col.decks.nameOrNone( did) or _("Default")) else: self.deck.setText(self.mw.col.decks.nameOrNone( self.mw.col.models.current()['did']) or _("Default")) # layout sizePolicy = QSizePolicy( QSizePolicy.Policy(7), QSizePolicy.Policy(0)) self.deck.setSizePolicy(sizePolicy) def show(self): self.widget.show() def hide(self): self.widget.hide() def cleanup(self): remHook('currentModelChanged', self.onModelChange) def onModelChange(self): if not self.mw.col.conf.get("addToCur", True): self.deck.setText(self.mw.col.decks.nameOrNone( self.mw.col.models.current()['did']) or _("Default")) def onDeckChange(self): from aqt.studydeck import StudyDeck current = self.deck.text() ret = StudyDeck( self.mw, current=current, accept=_("Choose"), title=_("Choose Deck"), help="addingnotes", cancel=False, parent=self.widget, geomKey="selectDeck") self.deck.setText(ret.name) def selectedId(self): # save deck name name = self.deck.text() if not name.strip(): did = 1 else: did = self.mw.col.decks.id(name) return did
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/deckchooser.py
deckchooser.py
from __future__ import division import difflib import re import cgi import unicodedata as ucd import HTMLParser from anki.lang import _, ngettext from aqt.qt import * from anki.utils import stripHTML, isMac, json from anki.hooks import addHook, runHook from anki.sound import playFromText, clearAudioQueue, play from aqt.utils import mungeQA, getBase, openLink, tooltip, askUserDialog from aqt.sound import getAudio import aqt class Reviewer(object): "Manage reviews. Maintains a separate state." def __init__(self, mw): self.mw = mw self.web = mw.web self.card = None self.cardQueue = [] self.hadCardQueue = False self._answeredIds = [] self._recordedAudio = None self.typeCorrect = None # web init happens before this is set self.state = None self.bottom = aqt.toolbar.BottomBar(mw, mw.bottomWeb) # qshortcut so we don't autorepeat self.delShortcut = QShortcut(QKeySequence("Delete"), self.mw) self.delShortcut.setAutoRepeat(False) self.mw.connect(self.delShortcut, SIGNAL("activated()"), self.onDelete) addHook("leech", self.onLeech) def show(self): self.mw.col.reset() self.mw.keyHandler = self._keyHandler self.web.setLinkHandler(self._linkHandler) self.web.setKeyHandler(self._catchEsc) if isMac: self.bottom.web.setFixedHeight(46) else: self.bottom.web.setFixedHeight(52+self.mw.fontHeightDelta*4) self.bottom.web.setLinkHandler(self._linkHandler) self._reps = None self.nextCard() def lastCard(self): if self._answeredIds: if not self.card or self._answeredIds[-1] != self.card.id: try: return self.mw.col.getCard(self._answeredIds[-1]) except TypeError: # id was deleted return def cleanup(self): runHook("reviewCleanup") # Fetching a card ########################################################################## def nextCard(self): elapsed = self.mw.col.timeboxReached() if elapsed: part1 = ngettext("%d card studied in", "%d cards studied in", elapsed[1]) % elapsed[1] mins = int(round(elapsed[0]/60)) part2 = ngettext("%s minute.", "%s minutes.", mins) % mins fin = _("Finish") diag = askUserDialog("%s %s" % (part1, part2), [_("Continue"), fin]) diag.setIcon(QMessageBox.Information) if diag.run() == fin: return self.mw.moveToState("deckBrowser") self.mw.col.startTimebox() if self.cardQueue: # undone/edited cards to show c = self.cardQueue.pop() c.startTimer() self.hadCardQueue = True else: if self.hadCardQueue: # the undone/edited cards may be sitting in the regular queue; # need to reset self.mw.col.reset() self.hadCardQueue = False c = self.mw.col.sched.getCard() self.card = c clearAudioQueue() if not c: self.mw.moveToState("overview") return if self._reps is None or self._reps % 100 == 0: # we recycle the webview periodically so webkit can free memory self._initWeb() else: self._showQuestion() # Audio ########################################################################## def replayAudio(self, previewer=None): if previewer: state = previewer._previewState c = previewer.card else: state = self.state c = self.card clearAudioQueue() if state == "question": playFromText(c.q()) elif state == "answer": txt = "" if self._replayq(c, previewer): txt = c.q() txt += c.a() playFromText(txt) # Initializing the webview ########################################################################## _revHtml = """ <img src="qrc:/icons/rating.png" id=star class=marked> <div id=qa></div> <script> var ankiPlatform = "desktop"; var typeans; function _updateQA (q, answerMode, klass) { $("#qa").html(q); typeans = document.getElementById("typeans"); if (typeans) { typeans.focus(); } if (answerMode) { var e = $("#answer"); if (e[0]) { e[0].scrollIntoView(); } } else { window.scrollTo(0, 0); } if (klass) { document.body.className = klass; } // don't allow drags of images, which cause them to be deleted $("img").attr("draggable", false); }; function _toggleStar (show) { if (show) { $(".marked").show(); } else { $(".marked").hide(); } } function _getTypedText () { if (typeans) { py.link("typeans:"+typeans.value); } }; function _typeAnsPress() { if (window.event.keyCode === 13) { py.link("ansHack"); } } </script> """ def _initWeb(self): self._reps = 0 self._bottomReady = False base = getBase(self.mw.col) # main window self.web.stdHtml(self._revHtml, self._styles(), loadCB=lambda x: self._showQuestion(), head=base) # show answer / ease buttons self.bottom.web.show() self.bottom.web.stdHtml( self._bottomHTML(), self.bottom._css + self._bottomCSS, loadCB=lambda x: self._showAnswerButton()) # Showing the question ########################################################################## def _mungeQA(self, buf): return self.typeAnsFilter(mungeQA(self.mw.col, buf)) def _showQuestion(self): self._reps += 1 self.state = "question" self.typedAnswer = None c = self.card # grab the question and play audio if c.isEmpty(): q = _("""\ The front of this card is empty. Please run Tools>Empty Cards.""") else: q = c.q() if self.autoplay(c): playFromText(q) # render & update bottom q = self._mungeQA(q) klass = "card card%d" % (c.ord+1) self.web.eval("_updateQA(%s, false, '%s');" % (json.dumps(q), klass)) self._toggleStar() if self._bottomReady: self._showAnswerButton() # if we have a type answer field, focus main web if self.typeCorrect: self.mw.web.setFocus() # user hook runHook('showQuestion') def autoplay(self, card): return self.mw.col.decks.confForDid( card.odid or card.did)['autoplay'] def _replayq(self, card, previewer=None): s = previewer if previewer else self return s.mw.col.decks.confForDid( s.card.odid or s.card.did).get('replayq', True) def _toggleStar(self): self.web.eval("_toggleStar(%s);" % json.dumps( self.card.note().hasTag("marked"))) # Showing the answer ########################################################################## def _showAnswer(self): if self.mw.state != "review": # showing resetRequired screen; ignore space return self.state = "answer" c = self.card a = c.a() # play audio? if self.autoplay(c): playFromText(a) # render and update bottom a = self._mungeQA(a) self.web.eval("_updateQA(%s, true);" % json.dumps(a)) self._showEaseButtons() # user hook runHook('showAnswer') # Answering a card ############################################################ def _answerCard(self, ease): "Reschedule card and show next." if self.mw.state != "review": # showing resetRequired screen; ignore key return if self.state != "answer": return if self.mw.col.sched.answerButtons(self.card) < ease: return self.mw.col.sched.answerCard(self.card, ease) self._answeredIds.append(self.card.id) self.mw.autosave() self.nextCard() # Handlers ############################################################ def _catchEsc(self, evt): if evt.key() == Qt.Key_Escape: self.web.eval("$('#typeans').blur();") return True def _showAnswerHack(self): # on <qt4.8, calling _showAnswer() directly fails to show images on # the answer side. But if we trigger it via the bottom web's python # link, it inexplicably works. self.bottom.web.eval("py.link('ans');") def _keyHandler(self, evt): key = unicode(evt.text()) if key == "e": self.mw.onEditCurrent() elif (key == " " or evt.key() in (Qt.Key_Return, Qt.Key_Enter)): if self.state == "question": self._showAnswerHack() elif self.state == "answer": self._answerCard(self._defaultEase()) elif key == "r" or evt.key() == Qt.Key_F5: self.replayAudio() elif key == "*": self.onMark() elif key == "=": self.onBuryNote() elif key == "-": self.onBuryCard() elif key == "!": self.onSuspend() elif key == "@": self.onSuspendCard() elif key == "V": self.onRecordVoice() elif key == "o": self.onOptions() elif key in ("1", "2", "3", "4"): self._answerCard(int(key)) elif key == "v": self.onReplayRecorded() def _linkHandler(self, url): if url == "ans": self._showAnswer() elif url == "ansHack": self.mw.progress.timer(100, self._showAnswerHack, False) elif url.startswith("ease"): self._answerCard(int(url[4:])) elif url == "edit": self.mw.onEditCurrent() elif url == "more": self.showContextMenu() elif url.startswith("typeans:"): (cmd, arg) = url.split(":", 1) self.typedAnswer = arg else: openLink(url) # CSS ########################################################################## _css = """ hr { background-color:#ccc; margin: 1em; } body { margin:1.5em; } img { max-width: 95%; max-height: 95%; } .marked { position:fixed; right: 7px; top: 7px; display: none; } #typeans { width: 100%; } .typeGood { background: #0f0; } .typeBad { background: #f00; } .typeMissed { background: #ccc; } """ def _styles(self): return self._css # Type in the answer ########################################################################## typeAnsPat = "\[\[type:(.+?)\]\]" def typeAnsFilter(self, buf): if self.state == "question": return self.typeAnsQuestionFilter(buf) else: return self.typeAnsAnswerFilter(buf) def typeAnsQuestionFilter(self, buf): self.typeCorrect = None clozeIdx = None m = re.search(self.typeAnsPat, buf) if not m: return buf fld = m.group(1) # if it's a cloze, extract data if fld.startswith("cloze:"): # get field and cloze position clozeIdx = self.card.ord + 1 fld = fld.split(":")[1] # loop through fields for a match for f in self.card.model()['flds']: if f['name'] == fld: self.typeCorrect = self.card.note()[f['name']] if clozeIdx: # narrow to cloze self.typeCorrect = self._contentForCloze( self.typeCorrect, clozeIdx) self.typeFont = f['font'] self.typeSize = f['size'] break if not self.typeCorrect: if self.typeCorrect is None: if clozeIdx: warn = _("""\ Please run Tools>Empty Cards""") else: warn = _("Type answer: unknown field %s") % fld return re.sub(self.typeAnsPat, warn, buf) else: # empty field, remove type answer pattern return re.sub(self.typeAnsPat, "", buf) return re.sub(self.typeAnsPat, """ <center> <input type=text id=typeans onkeypress="_typeAnsPress();" style="font-family: '%s'; font-size: %spx;"> </center> """ % (self.typeFont, self.typeSize), buf) def typeAnsAnswerFilter(self, buf): # tell webview to call us back with the input content self.web.eval("_getTypedText();") if not self.typeCorrect: return re.sub(self.typeAnsPat, "", buf) origSize = len(buf) buf = buf.replace("<hr id=answer>", "") hadHR = len(buf) != origSize # munge correct value parser = HTMLParser.HTMLParser() cor = stripHTML(self.mw.col.media.strip(self.typeCorrect)) # ensure we don't chomp multiple whitespace cor = cor.replace(" ", "&nbsp;") cor = parser.unescape(cor) cor = cor.replace(u"\xa0", " ") given = self.typedAnswer # compare with typed answer res = self.correct(given, cor, showBad=False) # and update the type answer area def repl(match): # can't pass a string in directly, and can't use re.escape as it # escapes too much s = """ <span style="font-family: '%s'; font-size: %spx">%s</span>""" % ( self.typeFont, self.typeSize, res) if hadHR: # a hack to ensure the q/a separator falls before the answer # comparison when user is using {{FrontSide}} s = "<hr id=answer>" + s return s return re.sub(self.typeAnsPat, repl, buf) def _contentForCloze(self, txt, idx): matches = re.findall("\{\{c%s::(.+?)\}\}"%idx, txt) if not matches: return None def noHint(txt): if "::" in txt: return txt.split("::")[0] return txt matches = [noHint(txt) for txt in matches] uniqMatches = set(matches) if len(uniqMatches) == 1: txt = matches[0] else: txt = ", ".join(matches) return txt def tokenizeComparison(self, given, correct): # compare in NFC form so accents appear correct given = ucd.normalize("NFC", given) correct = ucd.normalize("NFC", correct) try: s = difflib.SequenceMatcher(None, given, correct, autojunk=False) except: # autojunk was added in python 2.7.1 s = difflib.SequenceMatcher(None, given, correct) givenElems = [] correctElems = [] givenPoint = 0 correctPoint = 0 offby = 0 def logBad(old, new, str, array): if old != new: array.append((False, str[old:new])) def logGood(start, cnt, str, array): if cnt: array.append((True, str[start:start+cnt])) for x, y, cnt in s.get_matching_blocks(): # if anything was missed in correct, pad given if cnt and y-offby > x: givenElems.append((False, "-"*(y-x-offby))) offby = y-x # log any proceeding bad elems logBad(givenPoint, x, given, givenElems) logBad(correctPoint, y, correct, correctElems) givenPoint = x+cnt correctPoint = y+cnt # log the match logGood(x, cnt, given, givenElems) logGood(y, cnt, correct, correctElems) return givenElems, correctElems def correct(self, given, correct, showBad=True): "Diff-corrects the typed-in answer." givenElems, correctElems = self.tokenizeComparison(given, correct) def good(s): return "<span class=typeGood>"+cgi.escape(s)+"</span>" def bad(s): return "<span class=typeBad>"+cgi.escape(s)+"</span>" def missed(s): return "<span class=typeMissed>"+cgi.escape(s)+"</span>" if given == correct: res = good(given) else: res = "" for ok, txt in givenElems: if ok: res += good(txt) else: res += bad(txt) res += "<br>&darr;<br>" for ok, txt in correctElems: if ok: res += good(txt) else: res += missed(txt) res = "<div><code id=typeans>" + res + "</code></div>" return res # Bottom bar ########################################################################## _bottomCSS = """ body { background: -webkit-gradient(linear, left top, left bottom, from(#fff), to(#ddd)); border-bottom: 0; border-top: 1px solid #aaa; margin: 0; padding: 0px; padding-left: 5px; padding-right: 5px; } button { min-width: 60px; white-space: nowrap; } .hitem { margin-top: 2px; } .stat { padding-top: 5px; } .stat2 { padding-top: 3px; font-weight: normal; } .stattxt { padding-left: 5px; padding-right: 5px; white-space: nowrap; } .nobold { font-weight: normal; display: inline-block; padding-top: 4px; } .spacer { height: 18px; } .spacer2 { height: 16px; } """ def _bottomHTML(self): return """ <table width=100%% cellspacing=0 cellpadding=0> <tr> <td align=left width=50 valign=top class=stat> <br> <button title="%(editkey)s" onclick="py.link('edit');">%(edit)s</button></td> <td align=center valign=top id=middle> </td> <td width=50 align=right valign=top class=stat><span id=time class=stattxt> </span><br> <button onclick="py.link('more');">%(more)s &#9662;</button> </td> </tr> </table> <script> var time = %(time)d; var maxTime = 0; $(function () { $("#ansbut").focus(); updateTime(); setInterval(function () { time += 1; updateTime() }, 1000); }); var updateTime = function () { if (!maxTime) { $("#time").text(""); return; } time = Math.min(maxTime, time); var m = Math.floor(time / 60); var s = time %% 60; if (s < 10) { s = "0" + s; } var e = $("#time"); if (maxTime == time) { e.html("<font color=red>" + m + ":" + s + "</font>"); } else { e.text(m + ":" + s); } } function showQuestion(txt, maxTime_) { // much faster than jquery's .html() $("#middle")[0].innerHTML = txt; $("#ansbut").focus(); time = 0; maxTime = maxTime_; } function showAnswer(txt) { $("#middle")[0].innerHTML = txt; $("#defease").focus(); } </script> """ % dict(rem=self._remaining(), edit=_("Edit"), editkey=_("Shortcut key: %s") % "E", more=_("More"), time=self.card.timeTaken() // 1000) def _showAnswerButton(self): self._bottomReady = True if not self.typeCorrect: self.bottom.web.setFocus() middle = ''' <span class=stattxt>%s</span><br> <button title="%s" id=ansbut onclick='py.link(\"ans\");'>%s</button>''' % ( self._remaining(), _("Shortcut key: %s") % _("Space"), _("Show Answer")) # wrap it in a table so it has the same top margin as the ease buttons middle = "<table cellpadding=0><tr><td class=stat2 align=center>%s</td></tr></table>" % middle if self.card.shouldShowTimer(): maxTime = self.card.timeLimit() / 1000 else: maxTime = 0 self.bottom.web.eval("showQuestion(%s,%d);" % ( json.dumps(middle), maxTime)) def _showEaseButtons(self): self.bottom.web.setFocus() middle = self._answerButtons() self.bottom.web.eval("showAnswer(%s);" % json.dumps(middle)) def _remaining(self): if not self.mw.col.conf['dueCounts']: return "" if self.hadCardQueue: # if it's come from the undo queue, don't count it separately counts = list(self.mw.col.sched.counts()) else: counts = list(self.mw.col.sched.counts(self.card)) idx = self.mw.col.sched.countIdx(self.card) counts[idx] = "<u>%s</u>" % (counts[idx]) space = " + " ctxt = '<font color="#000099">%s</font>' % counts[0] ctxt += space + '<font color="#C35617">%s</font>' % counts[1] ctxt += space + '<font color="#007700">%s</font>' % counts[2] return ctxt def _defaultEase(self): if self.mw.col.sched.answerButtons(self.card) == 4: return 3 else: return 2 def _answerButtonList(self): l = ((1, _("Again")),) cnt = self.mw.col.sched.answerButtons(self.card) if cnt == 2: return l + ((2, _("Good")),) elif cnt == 3: return l + ((2, _("Good")), (3, _("Easy"))) else: return l + ((2, _("Hard")), (3, _("Good")), (4, _("Easy"))) def _answerButtons(self): times = [] default = self._defaultEase() def but(i, label): if i == default: extra = "id=defease" else: extra = "" due = self._buttonTime(i) return ''' <td align=center>%s<button %s title="%s" onclick='py.link("ease%d");'>\ %s</button></td>''' % (due, extra, _("Shortcut key: %s") % i, i, label) buf = "<center><table cellpading=0 cellspacing=0><tr>" for ease, label in self._answerButtonList(): buf += but(ease, label) buf += "</tr></table>" script = """ <script>$(function () { $("#defease").focus(); });</script>""" return buf + script def _buttonTime(self, i): if not self.mw.col.conf['estTimes']: return "<div class=spacer></div>" txt = self.mw.col.sched.nextIvlStr(self.card, i, True) or "&nbsp;" return '<span class=nobold>%s</span><br>' % txt # Leeches ########################################################################## def onLeech(self, card): # for now s = _("Card was a leech.") if card.queue < 0: s += " " + _("It has been suspended.") tooltip(s) # Context menu ########################################################################## # note the shortcuts listed here also need to be defined above def showContextMenu(self): opts = [ [_("Mark Note"), "*", self.onMark], [_("Bury Card"), "-", self.onBuryCard], [_("Bury Note"), "=", self.onBuryNote], [_("Suspend Card"), "@", self.onSuspendCard], [_("Suspend Note"), "!", self.onSuspend], [_("Delete Note"), "Delete", self.onDelete], [_("Options"), "O", self.onOptions], None, [_("Replay Audio"), "R", self.replayAudio], [_("Record Own Voice"), "Shift+V", self.onRecordVoice], [_("Replay Own Voice"), "V", self.onReplayRecorded], ] m = QMenu(self.mw) for row in opts: if not row: m.addSeparator() continue label, scut, func = row a = m.addAction(label) a.setShortcut(QKeySequence(scut)) a.connect(a, SIGNAL("triggered()"), func) runHook("Reviewer.contextMenuEvent",self,m) m.exec_(QCursor.pos()) def onOptions(self): self.mw.onDeckConf(self.mw.col.decks.get( self.card.odid or self.card.did)) def onMark(self): f = self.card.note() if f.hasTag("marked"): f.delTag("marked") else: f.addTag("marked") f.flush() self._toggleStar() def onSuspend(self): self.mw.checkpoint(_("Suspend")) self.mw.col.sched.suspendCards( [c.id for c in self.card.note().cards()]) tooltip(_("Note suspended.")) self.mw.reset() def onSuspendCard(self): self.mw.checkpoint(_("Suspend")) self.mw.col.sched.suspendCards([self.card.id]) tooltip(_("Card suspended.")) self.mw.reset() def onDelete(self): # need to check state because the shortcut is global to the main # window if self.mw.state != "review" or not self.card: return self.mw.checkpoint(_("Delete")) cnt = len(self.card.note().cards()) self.mw.col.remNotes([self.card.note().id]) self.mw.reset() tooltip(ngettext( "Note and its %d card deleted.", "Note and its %d cards deleted.", cnt) % cnt) def onBuryCard(self): self.mw.checkpoint(_("Bury")) self.mw.col.sched.buryCards([self.card.id]) self.mw.reset() tooltip(_("Card buried.")) def onBuryNote(self): self.mw.checkpoint(_("Bury")) self.mw.col.sched.buryNote(self.card.nid) self.mw.reset() tooltip(_("Note buried.")) def onRecordVoice(self): self._recordedAudio = getAudio(self.mw, encode=False) self.onReplayRecorded() def onReplayRecorded(self): if not self._recordedAudio: return tooltip(_("You haven't recorded your voice yet.")) clearAudioQueue() play(self._recordedAudio)
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/reviewer.py
reviewer.py
import urllib import urllib2 import time from aqt.qt import * import aqt from aqt.utils import openLink from anki.utils import json, platDesc from aqt.utils import showText class LatestVersionFinder(QThread): def __init__(self, main): QThread.__init__(self) self.main = main self.config = main.pm.meta def _data(self): d = {"ver": aqt.appVersion, "os": platDesc(), "id": self.config['id'], "lm": self.config['lastMsg'], "crt": self.config['created']} return d def run(self): if not self.config['updates']: return d = self._data() d['proto'] = 1 d = urllib.urlencode(d) try: f = urllib2.urlopen(aqt.appUpdate, d) resp = f.read() if not resp: return resp = json.loads(resp) except: # behind proxy, corrupt message, etc return if resp['msg']: self.emit(SIGNAL("newMsg"), resp) if resp['ver']: self.emit(SIGNAL("newVerAvail"), resp['ver']) diff = resp['time'] - time.time() if abs(diff) > 300: self.emit(SIGNAL("clockIsOff"), diff) def askAndUpdate(mw, ver): baseStr = ( _('''<h1>Anki Updated</h1>Anki %s has been released.<br><br>''') % ver) msg = QMessageBox(mw) msg.setStandardButtons(QMessageBox.Yes | QMessageBox.No) msg.setIcon(QMessageBox.Information) msg.setText(baseStr + _("Would you like to download it now?")) button = QPushButton(_("Ignore this update")) msg.addButton(button, QMessageBox.RejectRole) msg.setDefaultButton(QMessageBox.Yes) ret = msg.exec_() if msg.clickedButton() == button: # ignore this update mw.pm.meta['suppressUpdate'] = ver elif ret == QMessageBox.Yes: openLink(aqt.appWebsite) def showMessages(mw, data): showText(data['msg'], parent=mw, type="html") mw.pm.meta['lastMsg'] = data['msgId']
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/update.py
update.py
import re from aqt.qt import * from anki.consts import * import aqt from anki.sound import playFromText, clearAudioQueue from aqt.utils import saveGeom, restoreGeom, getBase, mungeQA,\ showInfo, askUser, getOnlyText, \ showWarning, openHelp from anki.utils import isMac, isWin, joinFields from aqt.webview import AnkiWebView import anki.js class CardLayout(QDialog): def __init__(self, mw, note, ord=0, parent=None, addMode=False): QDialog.__init__(self, parent or mw, Qt.Window) self.mw = aqt.mw self.parent = parent or mw self.note = note self.ord = ord self.col = self.mw.col self.mm = self.mw.col.models self.model = note.model() self.mw.checkpoint(_("Card Types")) self.addMode = addMode if addMode: # save it to DB temporarily self.emptyFields = [] for name, val in note.items(): if val.strip(): continue self.emptyFields.append(name) note[name] = "(%s)" % name note.flush() self.setupTabs() self.setupButtons() self.setWindowTitle(_("Card Types for %s") % self.model['name']) v1 = QVBoxLayout() v1.addWidget(self.tabs) v1.addLayout(self.buttons) self.setLayout(v1) self.redraw() restoreGeom(self, "CardLayout") self.exec_() def redraw(self): self.cards = self.col.previewCards(self.note, 2) self.redrawing = True self.updateTabs() self.redrawing = False idx = self.ord if idx >= len(self.cards): idx = len(self.cards) - 1 self.selectCard(idx) def setupTabs(self): c = self.connect cloze = self.model['type'] == MODEL_CLOZE self.tabs = QTabWidget() self.tabs.setTabsClosable(not cloze) self.tabs.setUsesScrollButtons(True) if not cloze: add = QPushButton("+") add.setFixedWidth(30) add.setToolTip(_("Add new card")) c(add, SIGNAL("clicked()"), self.onAddCard) self.tabs.setCornerWidget(add) c(self.tabs, SIGNAL("currentChanged(int)"), self.onCardSelected) c(self.tabs, SIGNAL("tabCloseRequested(int)"), self.onRemoveTab) def updateTabs(self): self.forms = [] self.tabs.clear() for t in self.model['tmpls']: self.addTab(t) def addTab(self, t): c = self.connect w = QWidget() l = QHBoxLayout() l.setMargin(0) l.setSpacing(3) left = QWidget() # template area tform = aqt.forms.template.Ui_Form() tform.setupUi(left) tform.label1.setText(u" →") tform.label2.setText(u" →") tform.labelc1.setText(u" ↗") tform.labelc2.setText(u" ↘") if self.style().objectName() == "gtk+": # gtk+ requires margins in inner layout tform.tlayout1.setContentsMargins(0, 11, 0, 0) tform.tlayout2.setContentsMargins(0, 11, 0, 0) tform.tlayout3.setContentsMargins(0, 11, 0, 0) if len(self.cards) > 1: tform.groupBox_3.setTitle(_( "Styling (shared between cards)")) c(tform.front, SIGNAL("textChanged()"), self.saveCard) c(tform.css, SIGNAL("textChanged()"), self.saveCard) c(tform.back, SIGNAL("textChanged()"), self.saveCard) l.addWidget(left, 5) # preview area right = QWidget() pform = aqt.forms.preview.Ui_Form() pform.setupUi(right) if self.style().objectName() == "gtk+": # gtk+ requires margins in inner layout pform.frontPrevBox.setContentsMargins(0, 11, 0, 0) pform.backPrevBox.setContentsMargins(0, 11, 0, 0) # for cloze notes, show that it's one of n cards if self.model['type'] == MODEL_CLOZE: cnt = len(self.mm.availOrds( self.model, joinFields(self.note.fields))) for g in pform.groupBox, pform.groupBox_2: g.setTitle(g.title() + _(" (1 of %d)") % max(cnt, 1)) pform.frontWeb = AnkiWebView() pform.frontPrevBox.addWidget(pform.frontWeb) pform.backWeb = AnkiWebView() pform.backPrevBox.addWidget(pform.backWeb) for wig in pform.frontWeb, pform.backWeb: wig.page().setLinkDelegationPolicy( QWebPage.DelegateExternalLinks) l.addWidget(right, 5) w.setLayout(l) self.forms.append({'tform': tform, 'pform': pform}) self.tabs.addTab(w, t['name']) def onRemoveTab(self, idx): if len(self.model['tmpls']) < 2: return showInfo(_("At least one card type is required.")) cards = self.mm.tmplUseCount(self.model, idx) cards = ngettext("%d card", "%d cards", cards) % cards msg = (_("Delete the '%(a)s' card type, and its %(b)s?") % dict(a=self.model['tmpls'][idx]['name'], b=cards)) if not askUser(msg): return if not self.mm.remTemplate(self.model, self.cards[idx].template()): return showWarning(_("""\ Removing this card type would cause one or more notes to be deleted. \ Please create a new card type first.""")) self.redraw() # Buttons ########################################################################## def setupButtons(self): c = self.connect l = self.buttons = QHBoxLayout() help = QPushButton(_("Help")) help.setAutoDefault(False) l.addWidget(help) c(help, SIGNAL("clicked()"), self.onHelp) l.addStretch() addField = QPushButton(_("Add Field")) addField.setAutoDefault(False) l.addWidget(addField) c(addField, SIGNAL("clicked()"), self.onAddField) if self.model['type'] != MODEL_CLOZE: flip = QPushButton(_("Flip")) flip.setAutoDefault(False) l.addWidget(flip) c(flip, SIGNAL("clicked()"), self.onFlip) more = QPushButton(_("More") + u" ▾") more.setAutoDefault(False) l.addWidget(more) c(more, SIGNAL("clicked()"), lambda: self.onMore(more)) l.addStretch() close = QPushButton(_("Close")) close.setAutoDefault(False) l.addWidget(close) c(close, SIGNAL("clicked()"), self.accept) # Cards ########################################################################## def selectCard(self, idx): if self.tabs.currentIndex() == idx: # trigger a re-read self.onCardSelected(idx) else: self.tabs.setCurrentIndex(idx) def onCardSelected(self, idx): if self.redrawing: return self.card = self.cards[idx] self.ord = idx self.tab = self.forms[idx] self.tabs.setCurrentIndex(idx) self.playedAudio = {} self.readCard() self.renderPreview() def readCard(self): t = self.card.template() self.redrawing = True self.tab['tform'].front.setPlainText(t['qfmt']) self.tab['tform'].css.setPlainText(self.model['css']) self.tab['tform'].back.setPlainText(t['afmt']) self.tab['tform'].front.setAcceptRichText(False) self.tab['tform'].css.setAcceptRichText(False) self.tab['tform'].back.setAcceptRichText(False) self.tab['tform'].front.setTabStopWidth(30) self.tab['tform'].css.setTabStopWidth(30) self.tab['tform'].back.setTabStopWidth(30) self.redrawing = False def saveCard(self): if self.redrawing: return text = self.tab['tform'].front.toPlainText() self.card.template()['qfmt'] = text text = self.tab['tform'].css.toPlainText() self.card.model()['css'] = text text = self.tab['tform'].back.toPlainText() self.card.template()['afmt'] = text self.renderPreview() # Preview ########################################################################## def renderPreview(self): c = self.card ti = self.maybeTextInput base = getBase(self.mw.col) self.tab['pform'].frontWeb.stdHtml( ti(mungeQA(self.mw.col, c.q(reload=True))), self.mw.reviewer._styles(), bodyClass="card card%d" % (c.ord+1), head=base, js=anki.js.browserSel) self.tab['pform'].backWeb.stdHtml( ti(mungeQA(self.mw.col, c.a()), type='a'), self.mw.reviewer._styles(), bodyClass="card card%d" % (c.ord+1), head=base, js=anki.js.browserSel) clearAudioQueue() if c.id not in self.playedAudio: playFromText(c.q()) playFromText(c.a()) self.playedAudio[c.id] = True def maybeTextInput(self, txt, type='q'): if "[[type:" not in txt: return txt origLen = len(txt) txt = txt.replace("<hr id=answer>", "") hadHR = origLen != len(txt) def answerRepl(match): res = self.mw.reviewer.correct(u"exomple", u"an example") if hadHR: res = "<hr id=answer>" + res return res if type == 'q': repl = "<input id='typeans' type=text value='exomple'>" repl = "<center>%s</center>" % repl else: repl = answerRepl return re.sub("\[\[type:.+?\]\]", repl, txt) # Card operations ###################################################################### def onRename(self): name = getOnlyText(_("New name:"), default=self.card.template()['name']) if not name: return if name in [c.template()['name'] for c in self.cards if c.template()['ord'] != self.ord]: return showWarning(_("That name is already used.")) self.card.template()['name'] = name self.tabs.setTabText(self.tabs.currentIndex(), name) def onReorder(self): n = len(self.cards) cur = self.card.template()['ord']+1 pos = getOnlyText( _("Enter new card position (1...%s):") % n, default=str(cur)) if not pos: return try: pos = int(pos) except ValueError: return if pos < 1 or pos > n: return if pos == cur: return pos -= 1 self.mm.moveTemplate(self.model, self.card.template(), pos) self.ord = pos self.redraw() def _newCardName(self): n = len(self.cards) + 1 while 1: name = _("Card %d") % n if name not in [c.template()['name'] for c in self.cards]: break n += 1 return name def onAddCard(self): name = self._newCardName() t = self.mm.newTemplate(name) old = self.card.template() t['qfmt'] = "%s<br>\n%s" % (_("Edit to customize"), old['qfmt']) t['afmt'] = old['afmt'] self.mm.addTemplate(self.model, t) self.ord = len(self.cards) self.redraw() def onFlip(self): old = self.card.template() self._flipQA(old, old) self.redraw() def _flipQA(self, src, dst): m = re.match("(?s)(.+)<hr id=answer>(.+)", src['afmt']) if not m: showInfo(_("""\ Anki couldn't find the line between the question and answer. Please \ adjust the template manually to switch the question and answer.""")) return dst['afmt'] = "{{FrontSide}}\n\n<hr id=answer>\n\n%s" % src['qfmt'] dst['qfmt'] = m.group(2).strip() return True def onMore(self, button): m = QMenu(self) a = m.addAction(_("Rename")) a.connect(a, SIGNAL("triggered()"), self.onRename) if self.model['type'] != MODEL_CLOZE: a = m.addAction(_("Reposition")) a.connect(a, SIGNAL("triggered()"), self.onReorder) t = self.card.template() if t['did']: s = _(" (on)") else: s = _(" (off)") a = m.addAction(_("Deck Override") + s) a.connect(a, SIGNAL("triggered()"), self.onTargetDeck) a = m.addAction(_("Browser Appearance")) a.connect(a, SIGNAL("triggered()"), self.onBrowserDisplay) m.exec_(button.mapToGlobal(QPoint(0,0))) def onBrowserDisplay(self): d = QDialog() f = aqt.forms.browserdisp.Ui_Dialog() f.setupUi(d) t = self.card.template() f.qfmt.setText(t.get('bqfmt', "")) f.afmt.setText(t.get('bafmt', "")) f.font.setCurrentFont(QFont(t.get('bfont', "Arial"))) f.fontSize.setValue(t.get('bsize', 12)) d.connect(f.buttonBox, SIGNAL("accepted()"), lambda: self.onBrowserDisplayOk(f)) d.exec_() def onBrowserDisplayOk(self, f): t = self.card.template() t['bqfmt'] = f.qfmt.text().strip() t['bafmt'] = f.afmt.text().strip() t['bfont'] = f.font.currentFont().family() t['bsize'] = f.fontSize.value() def onTargetDeck(self): from aqt.tagedit import TagEdit t = self.card.template() d = QDialog(self) d.setWindowTitle("Anki") d.setMinimumWidth(400) l = QVBoxLayout() lab = QLabel(_("""\ Enter deck to place new %s cards in, or leave blank:""") % self.card.template()['name']) lab.setWordWrap(True) l.addWidget(lab) te = TagEdit(d, type=1) te.setCol(self.col) l.addWidget(te) if t['did']: te.setText(self.col.decks.get(t['did'])['name']) te.selectAll() bb = QDialogButtonBox(QDialogButtonBox.Close) self.connect(bb, SIGNAL("rejected()"), d, SLOT("close()")) l.addWidget(bb) d.setLayout(l) d.exec_() if not te.text().strip(): t['did'] = None else: t['did'] = self.col.decks.id(te.text()) def onAddField(self): diag = QDialog(self) form = aqt.forms.addfield.Ui_Dialog() form.setupUi(diag) fields = [f['name'] for f in self.model['flds']] form.fields.addItems(fields) form.font.setCurrentFont(QFont("Arial")) form.size.setValue(20) diag.show() # Work around a Qt bug, # https://bugreports.qt-project.org/browse/QTBUG-1894 if isMac or isWin: # No problems on Macs or Windows. form.fields.showPopup() else: # Delay showing the pop-up. self.mw.progress.timer(200, form.fields.showPopup, False) if not diag.exec_(): return if form.radioQ.isChecked(): obj = self.tab['tform'].front else: obj = self.tab['tform'].back self._addField(obj, fields[form.fields.currentIndex()], form.font.currentFont().family(), form.size.value()) def _addField(self, widg, field, font, size): t = widg.toPlainText() t +="\n<div style='font-family: %s; font-size: %spx;'>{{%s}}</div>\n" % ( font, size, field) widg.setPlainText(t) self.saveCard() # Closing & Help ###################################################################### def accept(self): self.reject() def reject(self): clearAudioQueue() if self.addMode: # remove the filler fields we added for name in self.emptyFields: self.note[name] = "" self.mw.col.db.execute("delete from notes where id = ?", self.note.id) self.mm.save(self.model, templates=True) self.mw.reset() saveGeom(self, "CardLayout") return QDialog.reject(self) def onHelp(self): openHelp("templates")
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/clayout.py
clayout.py
import sys import cgi from anki.lang import _ from aqt.qt import * from aqt.utils import showText, showWarning class ErrorHandler(QObject): "Catch stderr and write into buffer." ivl = 100 def __init__(self, mw): QObject.__init__(self, mw) self.mw = mw self.timer = None self.connect(self, SIGNAL("errorTimer"), self._setTimer) self.pool = "" sys.stderr = self def write(self, data): # make sure we have unicode if not isinstance(data, unicode): data = unicode(data, "utf8", "replace") # dump to stdout sys.stdout.write(data.encode("utf-8")) # save in buffer self.pool += data # and update timer self.setTimer() def setTimer(self): # we can't create a timer from a different thread, so we post a # message to the object on the main thread self.emit(SIGNAL("errorTimer")) def _setTimer(self): if not self.timer: self.timer = QTimer(self.mw) self.mw.connect(self.timer, SIGNAL("timeout()"), self.onTimeout) self.timer.setInterval(self.ivl) self.timer.setSingleShot(True) self.timer.start() def tempFolderMsg(self): return _("""\ The permissions on your system's temporary folder are incorrect, and Anki is \ not able to correct them automatically. Please search for 'temp folder' in the \ Anki manual for more information.""") def onTimeout(self): error = cgi.escape(self.pool) self.pool = "" self.mw.progress.clear() if "abortSchemaMod" in error: return if "Pyaudio not" in error: return showWarning(_("Please install PyAudio")) if "install mplayer" in error: return showWarning(_("Please install mplayer")) if "no default output" in error: return showWarning(_("Please connect a microphone, and ensure " "other programs are not using the audio device.")) if "invalidTempFolder" in error: return showWarning(self.tempFolderMsg()) if "disk I/O error" in error: return showWarning(_("""\ An error occurred while accessing the database. Possible causes: - Antivirus, firewall, backup, or synchronization software may be \ interfering with Anki. Try disabling such software and see if the \ problem goes away. - Your disk may be full. - The Documents/Anki folder may be on a network drive. - Files in the Documents/Anki folder may not be writeable. - Your hard disk may have errors. It's a good idea to run Tools>Check Database to ensure your collection \ is not corrupt. """)) stdText = _("""\ An error occurred. It may have been caused by a harmless bug, <br> or your deck may have a problem. <p>To confirm it's not a problem with your deck, please run <b>Tools &gt; Check Database</b>. <p>If that doesn't fix the problem, please copy the following<br> into a bug report:""") pluginText = _("""\ An error occurred in an add-on.<br> Please post on the add-on forum:<br>%s<br>""") pluginText %= "https://anki.tenderapp.com/discussions/add-ons" if "addon" in error: txt = pluginText else: txt = stdText # show dialog txt = txt + "<div style='white-space: pre-wrap'>" + error + "</div>" showText(txt, type="html")
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/errors.py
errors.py
from aqt.qt import * import re, os, sys, urllib, subprocess import aqt from anki.sound import stripSounds from anki.utils import isWin, isMac, invalidFilename def openHelp(section): link = aqt.appHelpSite if section: link += "#%s" % section openLink(link) def openLink(link): tooltip(_("Loading..."), period=1000) QDesktopServices.openUrl(QUrl(link)) def showWarning(text, parent=None, help=""): "Show a small warning with an OK button." return showInfo(text, parent, help, "warning") def showCritical(text, parent=None, help=""): "Show a small critical error with an OK button." return showInfo(text, parent, help, "critical") def showInfo(text, parent=False, help="", type="info"): "Show a small info window with an OK button." if parent is False: parent = aqt.mw.app.activeWindow() or aqt.mw if type == "warning": icon = QMessageBox.Warning elif type == "critical": icon = QMessageBox.Critical else: icon = QMessageBox.Information mb = QMessageBox(parent) mb.setText(text) mb.setIcon(icon) mb.setWindowModality(Qt.WindowModal) b = mb.addButton(QMessageBox.Ok) b.setDefault(True) if help: b = mb.addButton(QMessageBox.Help) b.connect(b, SIGNAL("clicked()"), lambda: openHelp(help)) b.setAutoDefault(False) return mb.exec_() def showText(txt, parent=None, type="text", run=True, geomKey=None): if not parent: parent = aqt.mw.app.activeWindow() or aqt.mw diag = QDialog(parent) diag.setWindowTitle("Anki") layout = QVBoxLayout(diag) diag.setLayout(layout) text = QTextEdit() text.setReadOnly(True) if type == "text": text.setPlainText(txt) else: text.setHtml(txt) layout.addWidget(text) box = QDialogButtonBox(QDialogButtonBox.Close) layout.addWidget(box) def onReject(): if geomKey: saveGeom(diag, geomKey) QDialog.reject(diag) diag.connect(box, SIGNAL("rejected()"), onReject) diag.setMinimumHeight(400) diag.setMinimumWidth(500) if geomKey: restoreGeom(diag, geomKey) if run: diag.exec_() else: return diag, box def askUser(text, parent=None, help="", defaultno=False, msgfunc=None): "Show a yes/no question. Return true if yes." if not parent: parent = aqt.mw.app.activeWindow() if not msgfunc: msgfunc = QMessageBox.question sb = QMessageBox.Yes | QMessageBox.No if help: sb |= QMessageBox.Help while 1: if defaultno: default = QMessageBox.No else: default = QMessageBox.Yes r = msgfunc(parent, "Anki", text, sb, default) if r == QMessageBox.Help: openHelp(help) else: break return r == QMessageBox.Yes class ButtonedDialog(QMessageBox): def __init__(self, text, buttons, parent=None, help=""): QDialog.__init__(self, parent) self.buttons = [] self.setWindowTitle("Anki") self.help = help self.setIcon(QMessageBox.Warning) self.setText(text) # v = QVBoxLayout() # v.addWidget(QLabel(text)) # box = QDialogButtonBox() # v.addWidget(box) for b in buttons: self.buttons.append( self.addButton(b, QMessageBox.AcceptRole)) if help: self.addButton(_("Help"), QMessageBox.HelpRole) buttons.append(_("Help")) #self.setLayout(v) def run(self): self.exec_() but = self.clickedButton().text() if but == "Help": # FIXME stop dialog closing? openHelp(self.help) return self.clickedButton().text() def setDefault(self, idx): self.setDefaultButton(self.buttons[idx]) def askUserDialog(text, buttons, parent=None, help=""): if not parent: parent = aqt.mw diag = ButtonedDialog(text, buttons, parent, help) return diag class GetTextDialog(QDialog): def __init__(self, parent, question, help=None, edit=None, default=u"", title="Anki"): QDialog.__init__(self, parent) self.setWindowTitle(title) self.question = question self.help = help self.qlabel = QLabel(question) self.setMinimumWidth(400) v = QVBoxLayout() v.addWidget(self.qlabel) if not edit: edit = QLineEdit() self.l = edit if default: self.l.setText(default) self.l.selectAll() v.addWidget(self.l) buts = QDialogButtonBox.Ok | QDialogButtonBox.Cancel if help: buts |= QDialogButtonBox.Help b = QDialogButtonBox(buts) v.addWidget(b) self.setLayout(v) self.connect(b.button(QDialogButtonBox.Ok), SIGNAL("clicked()"), self.accept) self.connect(b.button(QDialogButtonBox.Cancel), SIGNAL("clicked()"), self.reject) if help: self.connect(b.button(QDialogButtonBox.Help), SIGNAL("clicked()"), self.helpRequested) def accept(self): return QDialog.accept(self) def reject(self): return QDialog.reject(self) def helpRequested(self): openHelp(self.help) def getText(prompt, parent=None, help=None, edit=None, default=u"", title="Anki"): if not parent: parent = aqt.mw.app.activeWindow() or aqt.mw d = GetTextDialog(parent, prompt, help=help, edit=edit, default=default, title=title) d.setWindowModality(Qt.WindowModal) ret = d.exec_() return (unicode(d.l.text()), ret) def getOnlyText(*args, **kwargs): (s, r) = getText(*args, **kwargs) if r: return s else: return u"" # fixme: these utilities could be combined into a single base class def chooseList(prompt, choices, startrow=0, parent=None): if not parent: parent = aqt.mw.app.activeWindow() d = QDialog(parent) d.setWindowModality(Qt.WindowModal) l = QVBoxLayout() d.setLayout(l) t = QLabel(prompt) l.addWidget(t) c = QListWidget() c.addItems(choices) c.setCurrentRow(startrow) l.addWidget(c) bb = QDialogButtonBox(QDialogButtonBox.Ok) bb.connect(bb, SIGNAL("accepted()"), d, SLOT("accept()")) l.addWidget(bb) d.exec_() return c.currentRow() def getTag(parent, deck, question, tags="user", **kwargs): from aqt.tagedit import TagEdit te = TagEdit(parent) te.setCol(deck) ret = getText(question, parent, edit=te, **kwargs) te.hideCompleter() return ret # File handling ###################################################################### def getFile(parent, title, cb, filter="*.*", dir=None, key=None): "Ask the user for a file." assert not dir or not key if not dir: dirkey = key+"Directory" dir = aqt.mw.pm.profile.get(dirkey, "") else: dirkey = None d = QFileDialog(parent) # fix #233 crash if isMac: d.setOptions(QFileDialog.DontUseNativeDialog) d.setFileMode(QFileDialog.ExistingFile) d.setDirectory(dir) d.setWindowTitle(title) d.setNameFilter(filter) ret = [] def accept(): # work around an osx crash #aqt.mw.app.processEvents() file = unicode(list(d.selectedFiles())[0]) if dirkey: dir = os.path.dirname(file) aqt.mw.pm.profile[dirkey] = dir if cb: cb(file) ret.append(file) d.connect(d, SIGNAL("accepted()"), accept) d.exec_() return ret and ret[0] def getSaveFile(parent, title, dir_description, key, ext, fname=None): """Ask the user for a file to save. Use DIR_DESCRIPTION as config variable. The file dialog will default to open with FNAME.""" config_key = dir_description + 'Directory' base = aqt.mw.pm.profile.get(config_key, aqt.mw.pm.base) path = os.path.join(base, fname) file = unicode(QFileDialog.getSaveFileName( parent, title, path, u"{0} (*{1})".format(key, ext), options=QFileDialog.DontConfirmOverwrite)) if file: # add extension if not file.lower().endswith(ext): file += ext # save new default dir = os.path.dirname(file) aqt.mw.pm.profile[config_key] = dir # check if it exists if os.path.exists(file): if not askUser( _("This file exists. Are you sure you want to overwrite it?"), parent): return None return file def saveGeom(widget, key): key += "Geom" aqt.mw.pm.profile[key] = widget.saveGeometry() def restoreGeom(widget, key, offset=None, adjustSize=False): key += "Geom" if aqt.mw.pm.profile.get(key): widget.restoreGeometry(aqt.mw.pm.profile[key]) if isMac and offset: if qtminor > 6: # bug in osx toolkit s = widget.size() widget.resize(s.width(), s.height()+offset*2) else: if adjustSize: widget.adjustSize() def saveState(widget, key): key += "State" aqt.mw.pm.profile[key] = widget.saveState() def restoreState(widget, key): key += "State" if aqt.mw.pm.profile.get(key): widget.restoreState(aqt.mw.pm.profile[key]) def saveSplitter(widget, key): key += "Splitter" aqt.mw.pm.profile[key] = widget.saveState() def restoreSplitter(widget, key): key += "Splitter" if aqt.mw.pm.profile.get(key): widget.restoreState(aqt.mw.pm.profile[key]) def saveHeader(widget, key): key += "Header" aqt.mw.pm.profile[key] = widget.saveState() def restoreHeader(widget, key): key += "Header" if aqt.mw.pm.profile.get(key): widget.restoreState(aqt.mw.pm.profile[key]) def mungeQA(col, txt): txt = col.media.escapeImages(txt) txt = stripSounds(txt) # osx webkit doesn't understand font weight 600 txt = re.sub("font-weight: *600", "font-weight:bold", txt) if isMac: # custom fonts cause crashes on osx at the moment txt = txt.replace("font-face", "invalid") return txt def applyStyles(widget): p = os.path.join(aqt.mw.pm.base, "style.css") if os.path.exists(p): widget.setStyleSheet(open(p).read()) def getBase(col): base = None mdir = col.media.dir() if isWin and not mdir.startswith("\\\\"): prefix = u"file:///" else: prefix = u"file://" mdir = mdir.replace("\\", "/") base = prefix + unicode( urllib.quote(mdir.encode("utf-8")), "utf-8") + "/" return '<base href="%s">' % base def openFolder(path): if isWin: if isinstance(path, unicode): path = path.encode(sys.getfilesystemencoding()) subprocess.Popen(["explorer", path]) else: QDesktopServices.openUrl(QUrl("file://" + path)) def shortcut(key): if isMac: return re.sub("(?i)ctrl", "Command", key) return key def maybeHideClose(bbox): if isMac: b = bbox.button(QDialogButtonBox.Close) if b: bbox.removeButton(b) def addCloseShortcut(widg): if not isMac: return widg._closeShortcut = QShortcut(QKeySequence("Ctrl+W"), widg) widg.connect(widg._closeShortcut, SIGNAL("activated()"), widg, SLOT("reject()")) # Tooltips ###################################################################### _tooltipTimer = None _tooltipLabel = None def tooltip(msg, period=3000, parent=None): global _tooltipTimer, _tooltipLabel class CustomLabel(QLabel): def mousePressEvent(self, evt): evt.accept() self.hide() closeTooltip() aw = parent or aqt.mw.app.activeWindow() or aqt.mw lab = CustomLabel("""\ <table cellpadding=10> <tr> <td><img src=":/icons/help-hint.png"></td> <td>%s</td> </tr> </table>""" % msg, aw) lab.setFrameStyle(QFrame.Panel) lab.setLineWidth(2) lab.setWindowFlags(Qt.ToolTip) p = QPalette() p.setColor(QPalette.Window, QColor("#feffc4")) p.setColor(QPalette.WindowText, QColor("#000000")) lab.setPalette(p) lab.move( aw.mapToGlobal(QPoint(0, -100 + aw.height()))) lab.show() _tooltipTimer = aqt.mw.progress.timer( period, closeTooltip, False) _tooltipLabel = lab def closeTooltip(): global _tooltipLabel, _tooltipTimer if _tooltipLabel: try: _tooltipLabel.deleteLater() except: # already deleted as parent window closed pass _tooltipLabel = None if _tooltipTimer: _tooltipTimer.stop() _tooltipTimer = None # true if invalid; print warning def checkInvalidFilename(str, dirsep=True): bad = invalidFilename(str, dirsep) if bad: showWarning(_("The following character can not be used: %s") % bad) return True return False
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/utils.py
utils.py
from anki.lang import _ from aqt.qt import * import aqt.forms from aqt.utils import saveGeom, restoreGeom, showWarning, askUser, shortcut, \ tooltip, openHelp, addCloseShortcut from anki.sound import clearAudioQueue from anki.hooks import addHook, remHook, runHook from anki.utils import stripHTMLMedia, isMac import aqt.editor, aqt.modelchooser, aqt.deckchooser class AddCards(QDialog): def __init__(self, mw): QDialog.__init__(self, None, Qt.Window) self.mw = mw self.form = aqt.forms.addcards.Ui_Dialog() self.form.setupUi(self) self.setWindowTitle(_("Add")) self.setMinimumHeight(300) self.setMinimumWidth(400) self.setupChoosers() self.setupEditor() self.setupButtons() self.onReset() self.history = [] self.forceClose = False restoreGeom(self, "add") addHook('reset', self.onReset) addHook('currentModelChanged', self.onReset) addCloseShortcut(self) self.show() self.setupNewNote() def setupEditor(self): self.editor = aqt.editor.Editor( self.mw, self.form.fieldsArea, self, True) def setupChoosers(self): self.modelChooser = aqt.modelchooser.ModelChooser( self.mw, self.form.modelArea) self.deckChooser = aqt.deckchooser.DeckChooser( self.mw, self.form.deckArea) def helpRequested(self): openHelp("addingnotes") def setupButtons(self): bb = self.form.buttonBox ar = QDialogButtonBox.ActionRole # add self.addButton = bb.addButton(_("Add"), ar) self.addButton.setShortcut(QKeySequence("Ctrl+Return")) self.addButton.setToolTip(shortcut(_("Add (shortcut: ctrl+enter)"))) self.connect(self.addButton, SIGNAL("clicked()"), self.addCards) # close self.closeButton = QPushButton(_("Close")) self.closeButton.setAutoDefault(False) bb.addButton(self.closeButton, QDialogButtonBox.RejectRole) # help self.helpButton = QPushButton(_("Help")) self.helpButton.setAutoDefault(False) bb.addButton(self.helpButton, QDialogButtonBox.HelpRole) self.connect(self.helpButton, SIGNAL("clicked()"), self.helpRequested) # history b = bb.addButton( _("History")+ u" ▾", ar) if isMac: sc = "Ctrl+Shift+H" else: sc = "Ctrl+H" b.setShortcut(QKeySequence(sc)) b.setToolTip(_("Shortcut: %s") % shortcut(sc)) self.connect(b, SIGNAL("clicked()"), self.onHistory) b.setEnabled(False) self.historyButton = b def setupNewNote(self, set=True): f = self.mw.col.newNote() if set: self.editor.setNote(f, focus=True) return f def onReset(self, model=None, keep=False): oldNote = self.editor.note note = self.setupNewNote(set=False) flds = note.model()['flds'] # copy fields from old note if oldNote: if not keep: self.removeTempNote(oldNote) for n in range(len(note.fields)): try: if not keep or flds[n]['sticky']: note.fields[n] = oldNote.fields[n] else: note.fields[n] = "" except IndexError: break self.editor.currentField = 0 self.editor.setNote(note, focus=True) def removeTempNote(self, note): if not note or not note.id: return # we don't have to worry about cards; just the note self.mw.col._remNotes([note.id]) def addHistory(self, note): txt = stripHTMLMedia(",".join(note.fields))[:30] self.history.insert(0, (note.id, txt)) self.history = self.history[:15] self.historyButton.setEnabled(True) def onHistory(self): m = QMenu(self) for nid, txt in self.history: a = m.addAction(_("Edit %s") % txt) a.connect(a, SIGNAL("triggered()"), lambda nid=nid: self.editHistory(nid)) runHook("AddCards.onHistory", self, m) m.exec_(self.historyButton.mapToGlobal(QPoint(0,0))) def editHistory(self, nid): browser = aqt.dialogs.open("Browser", self.mw) browser.form.searchEdit.lineEdit().setText("nid:%d" % nid) browser.onSearch() def addNote(self, note): note.model()['did'] = self.deckChooser.selectedId() ret = note.dupeOrEmpty() if ret == 1: showWarning(_( "The first field is empty."), help="AddItems#AddError") return if '{{cloze:' in note.model()['tmpls'][0]['qfmt']: if not self.mw.col.models._availClozeOrds( note.model(), note.joinedFields(), False): if not askUser(_("You have a cloze deletion note type " "but have not made any cloze deletions. Proceed?")): return cards = self.mw.col.addNote(note) if not cards: showWarning(_("""\ The input you have provided would make an empty \ question on all cards."""), help="AddItems") return self.addHistory(note) self.mw.requireReset() return note def addCards(self): self.editor.saveNow() self.editor.saveAddModeVars() note = self.editor.note note = self.addNote(note) if not note: return tooltip(_("Added"), period=500) # stop anything playing clearAudioQueue() self.onReset(keep=True) self.mw.col.autosave() def keyPressEvent(self, evt): "Show answer on RET or register answer." if (evt.key() in (Qt.Key_Enter, Qt.Key_Return) and self.editor.tags.hasFocus()): evt.accept() return return QDialog.keyPressEvent(self, evt) def reject(self): if not self.canClose(): return remHook('reset', self.onReset) remHook('currentModelChanged', self.onReset) clearAudioQueue() self.removeTempNote(self.editor.note) self.editor.setNote(None) self.modelChooser.cleanup() self.deckChooser.cleanup() self.mw.maybeReset() saveGeom(self, "add") aqt.dialogs.close("AddCards") QDialog.reject(self) def canClose(self): if (self.forceClose or self.editor.fieldsAreBlank() or askUser(_("Close and lose current input?"))): return True return False
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/addcards.py
addcards.py
from aqt.qt import * import aqt.forms from aqt import appVersion from aqt.utils import openLink def show(parent): dialog = QDialog(parent) abt = aqt.forms.about.Ui_About() abt.setupUi(dialog) abt.label.page().setLinkDelegationPolicy(QWebPage.DelegateAllLinks) def onLink(url): openLink(url.toString()) parent.connect(abt.label, SIGNAL("linkClicked(QUrl)"), onLink) abouttext = "<center><img src='qrc:/icons/anki-logo-thin.png'></center>" abouttext += '<p>' + _("Anki is a friendly, intelligent spaced learning \ system. It's free and open source.") abouttext += "<p>"+_("Anki is licensed under the AGPL3 license. Please see " "the license file in the source distribution for more information.") abouttext += '<p>' + _("Version %s") % appVersion + '<br>' abouttext += ("Qt %s PyQt %s<br>") % (QT_VERSION_STR, PYQT_VERSION_STR) abouttext += (_("<a href='%s'>Visit website</a>") % aqt.appWebsite) + \ "</span>" abouttext += '<p>' + _("Written by Damien Elmes, with patches, translation,\ testing and design from:<p>%(cont)s") % {'cont': u"""Aaron Harsh, Ádám Szegi, Alex Fraser, Andreas Klauer, Andrew Wright, Bernhard Ibertsberger, C. van Rooyen, Charlene Barina, Christian Krause, Christian Rusche, David Smith, Dave Druelinger, Dotan Cohen, Emilio Wuerges, Emmanuel Jarri, Frank Harper, Gregor Skumavc, H. Mijail, Houssam Salem, Ian Lewis, Immanuel Asmus, Iroiro, Jarvik7, Jin Eun-Deok, Jo Nakashima, Johanna Lindh, Julien Baley, Jussi Määttä, Kieran Clancy, LaC, Laurent Steffan, Luca Ban, Luciano Esposito, Marco Giancotti, Marcus Rubeus, Mari Egami, Michael Jürges, Mark Wilbur, Matthew Duggan, Matthew Holtz, Meelis Vasser, Michael Keppler, Michael Montague, Michael Penkov, Michal Čadil, Morteza Salehi, Nathanael Law, Nick Cook, Niklas Laxström, Nguyễn Hào Khôi, Norbert Nagold, Ole Guldberg, Pcsl88, Petr Michalec, Piotr Kubowicz, Richard Colley, Roland Sieker, Samson Melamed, Stefaan De Pooter, Silja Ijas, Snezana Lukic, Soren Bjornstad, Susanna Björverud, Sylvain Durand, Tacutu, Timm Preetz, Timo Paulssen, Ursus, Victor Suba, Volker Jansen, Volodymyr Goncharenko, Xtru %s 黃文龍 """% _("<!--about diag--> and")} abouttext += '<p>' + _("""\ The icons were obtained from various sources; please see the Anki source for credits.""") abouttext += '<p>' + _("If you have contributed and are not on this list, \ please get in touch.") abouttext += '<p>' + _("A big thanks to all the people who have provided \ suggestions, bug reports and donations.") abt.label.setHtml(abouttext) dialog.adjustSize() dialog.show() dialog.exec_()
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/about.py
about.py
from aqt.qt import * import aqt.editor from aqt.utils import saveGeom, restoreGeom from anki.hooks import addHook, remHook from anki.utils import isMac class EditCurrent(QDialog): def __init__(self, mw): if isMac: # use a separate window on os x so we can a clean menu QDialog.__init__(self, None, Qt.Window) else: QDialog.__init__(self, mw) QDialog.__init__(self, None, Qt.Window) self.mw = mw self.form = aqt.forms.editcurrent.Ui_Dialog() self.form.setupUi(self) self.setWindowTitle(_("Edit Current")) self.setMinimumHeight(400) self.setMinimumWidth(500) self.connect(self, SIGNAL("rejected()"), self.onSave) self.form.buttonBox.button(QDialogButtonBox.Close).setShortcut( QKeySequence("Ctrl+Return")) self.editor = aqt.editor.Editor(self.mw, self.form.fieldsArea, self) self.editor.setNote(self.mw.reviewer.card.note()) restoreGeom(self, "editcurrent") addHook("reset", self.onReset) self.mw.requireReset() self.show() # reset focus after open self.editor.web.setFocus() def onReset(self): # lazy approach for now: throw away edits try: n = self.mw.reviewer.card.note() n.load() except: # card's been deleted remHook("reset", self.onReset) self.editor.setNote(None) self.mw.reset() aqt.dialogs.close("EditCurrent") self.close() return self.editor.setNote(n) def onSave(self): remHook("reset", self.onReset) self.editor.saveNow() r = self.mw.reviewer try: r.card.load() except: # card was removed by clayout pass else: self.mw.reviewer.cardQueue.append(self.mw.reviewer.card) self.mw.moveToState("review") saveGeom(self, "editcurrent") aqt.dialogs.close("EditCurrent") def canClose(self): return True
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/editcurrent.py
editcurrent.py
import datetime, time from aqt.qt import * from aqt.utils import openFolder, showWarning, getText, openHelp, showInfo import aqt class Preferences(QDialog): def __init__(self, mw): if not mw.col: showInfo(_("Please open a profile first.")) return QDialog.__init__(self, mw, Qt.Window) self.mw = mw self.prof = self.mw.pm.profile self.form = aqt.forms.preferences.Ui_Preferences() self.form.setupUi(self) self.form.buttonBox.button(QDialogButtonBox.Help).setAutoDefault(False) self.form.buttonBox.button(QDialogButtonBox.Close).setAutoDefault(False) self.connect(self.form.buttonBox, SIGNAL("helpRequested()"), lambda: openHelp("profileprefs")) self.setupCollection() self.setupNetwork() self.setupBackup() self.setupOptions() self.show() def accept(self): # avoid exception if main window is already closed if not self.mw.col: return self.updateCollection() self.updateNetwork() self.updateBackup() self.updateOptions() self.mw.pm.save() self.mw.reset() self.done(0) def reject(self): self.accept() # Collection options ###################################################################### def setupCollection(self): import anki.consts as c f = self.form qc = self.mw.col.conf self.startDate = datetime.datetime.fromtimestamp(self.mw.col.crt) f.dayOffset.setValue(self.startDate.hour) f.lrnCutoff.setValue(qc['collapseTime']/60.0) f.timeLimit.setValue(qc['timeLim']/60.0) f.showEstimates.setChecked(qc['estTimes']) f.showProgress.setChecked(qc['dueCounts']) f.newSpread.addItems(c.newCardSchedulingLabels().values()) f.newSpread.setCurrentIndex(qc['newSpread']) f.useCurrent.setCurrentIndex(int(not qc.get("addToCur", True))) def updateCollection(self): f = self.form d = self.mw.col qc = d.conf qc['dueCounts'] = f.showProgress.isChecked() qc['estTimes'] = f.showEstimates.isChecked() qc['newSpread'] = f.newSpread.currentIndex() qc['timeLim'] = f.timeLimit.value()*60 qc['collapseTime'] = f.lrnCutoff.value()*60 qc['addToCur'] = not f.useCurrent.currentIndex() hrs = f.dayOffset.value() old = self.startDate date = datetime.datetime( old.year, old.month, old.day, hrs) d.crt = int(time.mktime(date.timetuple())) d.setMod() # Network ###################################################################### def setupNetwork(self): self.form.syncOnProgramOpen.setChecked( self.prof['autoSync']) self.form.syncMedia.setChecked( self.prof['syncMedia']) if not self.prof['syncKey']: self._hideAuth() else: self.form.syncUser.setText(self.prof.get('syncUser', "")) self.connect(self.form.syncDeauth, SIGNAL("clicked()"), self.onSyncDeauth) def _hideAuth(self): self.form.syncDeauth.setVisible(False) self.form.syncUser.setText("") self.form.syncLabel.setText(_("""\ <b>Synchronization</b><br> Not currently enabled; click the sync button in the main window to enable.""")) def onSyncDeauth(self): self.prof['syncKey'] = None self.mw.col.media.forceResync() self._hideAuth() def updateNetwork(self): self.prof['autoSync'] = self.form.syncOnProgramOpen.isChecked() self.prof['syncMedia'] = self.form.syncMedia.isChecked() if self.form.fullSync.isChecked(): self.mw.col.modSchema(check=False) self.mw.col.setMod() # Backup ###################################################################### def setupBackup(self): self.form.numBackups.setValue(self.prof['numBackups']) self.form.compressBackups.setChecked(self.prof.get("compressBackups", True)) self.connect(self.form.openBackupFolder, SIGNAL("linkActivated(QString)"), self.onOpenBackup) def onOpenBackup(self): openFolder(self.mw.pm.backupFolder()) def updateBackup(self): self.prof['numBackups'] = self.form.numBackups.value() self.prof['compressBackups'] = self.form.compressBackups.isChecked() # Basic & Advanced Options ###################################################################### def setupOptions(self): self.form.stripHTML.setChecked(self.prof['stripHTML']) self.form.pastePNG.setChecked(self.prof.get("pastePNG", False)) self.connect( self.form.profilePass, SIGNAL("clicked()"), self.onProfilePass) def updateOptions(self): self.prof['stripHTML'] = self.form.stripHTML.isChecked() self.prof['pastePNG'] = self.form.pastePNG.isChecked() def onProfilePass(self): pw, ret = getText(_("""\ Lock account with password, or leave blank:""")) if not ret: return if not pw: self.prof['key'] = None return pw2, ret = getText(_("Confirm password:")) if not ret: return if pw != pw2: showWarning(_("Passwords didn't match")) self.prof['key'] = self.mw.pm._pwhash(pw)
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/preferences.py
preferences.py
# Profile handling ########################################################################## # - Saves in pickles rather than json to easily store Qt window state. # - Saves in sqlite rather than a flat file so the config can't be corrupted import os import random import cPickle import locale import re from aqt.qt import * from anki.db import DB from anki.utils import isMac, isWin, intTime, checksum from anki.lang import langs from aqt.utils import showWarning from aqt import appHelpSite import aqt.forms from send2trash import send2trash metaConf = dict( ver=0, updates=True, created=intTime(), id=random.randrange(0, 2**63), lastMsg=-1, suppressUpdate=False, firstRun=True, defaultLang=None, disabledAddons=[], ) profileConf = dict( # profile key=None, mainWindowGeom=None, mainWindowState=None, numBackups=30, lastOptimize=intTime(), # editing fullSearch=False, searchHistory=[], lastColour="#00f", stripHTML=True, pastePNG=False, # not exposed in gui deleteMedia=False, preserveKeyboard=True, # syncing syncKey=None, syncMedia=True, autoSync=True, # importing allowHTML=False, importMode=1, ) class ProfileManager(object): def __init__(self, base=None, profile=None): self.name = None self.db = None # instantiate base folder if base: self.base = os.path.abspath(base) else: self.base = self._defaultBase() self.ensureBaseExists() # load metadata self.firstRun = self._loadMeta() # did the user request a profile to start up with? if profile: try: self.load(profile) except TypeError: raise Exception("Provided profile does not exist.") # Base creation ###################################################################### def ensureBaseExists(self): try: self._ensureExists(self.base) except: # can't translate, as lang not initialized QMessageBox.critical( None, "Error", """\ Anki could not create the folder %s. Please ensure that location is not \ read-only and you have permission to write to it. If you cannot fix this \ issue, please see the documentation for information on running Anki from \ a flash drive.""" % self.base) raise # Profile load/save ###################################################################### def profiles(self): return sorted( unicode(x, "utf8") for x in self.db.list("select name from profiles") if x != "_global") def load(self, name, passwd=None): prof = cPickle.loads( self.db.scalar("select data from profiles where name = ?", name.encode("utf8"))) if prof['key'] and prof['key'] != self._pwhash(passwd): self.name = None return False if name != "_global": self.name = name self.profile = prof return True def save(self): sql = "update profiles set data = ? where name = ?" self.db.execute(sql, cPickle.dumps(self.profile), self.name.encode("utf8")) self.db.execute(sql, cPickle.dumps(self.meta), "_global") self.db.commit() def create(self, name): prof = profileConf.copy() self.db.execute("insert into profiles values (?, ?)", name.encode("utf8"), cPickle.dumps(prof)) self.db.commit() def remove(self, name): p = self.profileFolder() if os.path.exists(p): send2trash(p) self.db.execute("delete from profiles where name = ?", name.encode("utf8")) self.db.commit() def rename(self, name): oldName = self.name oldFolder = self.profileFolder() self.name = name newFolder = self.profileFolder(create=False) if os.path.exists(newFolder): if (oldFolder != newFolder) and ( oldFolder.lower() == newFolder.lower()): # OS is telling us the folder exists because it does not take # case into account; use a temporary folder location midFolder = ''.join([oldFolder, '-temp']) if not os.path.exists(midFolder): os.rename(oldFolder, midFolder) oldFolder = midFolder else: showWarning(_("Please remove the folder %s and try again.") % midFolder) self.name = oldName return else: showWarning(_("Folder already exists.")) self.name = oldName return # update name self.db.execute("update profiles set name = ? where name = ?", name.encode("utf8"), oldName.encode("utf-8")) # rename folder try: os.rename(oldFolder, newFolder) except WindowsError as e: self.db.rollback() if "Access is denied" in e: showWarning(_("""\ Anki could not rename your profile because it could not rename the profile \ folder on disk. Please ensure you have permission to write to Documents/Anki \ and no other programs are accessing your profile folders, then try again.""")) else: raise except: self.db.rollback() raise else: self.db.commit() # Folder handling ###################################################################### def profileFolder(self, create=True): path = os.path.join(self.base, self.name) if create: self._ensureExists(path) return path def addonFolder(self): return self._ensureExists(os.path.join(self.base, "addons")) def backupFolder(self): return self._ensureExists( os.path.join(self.profileFolder(), "backups")) def collectionPath(self): return os.path.join(self.profileFolder(), "collection.anki2") # Helpers ###################################################################### def _ensureExists(self, path): if not os.path.exists(path): os.makedirs(path) return path def _defaultBase(self): if isWin: if False: #qtmajor >= 5: loc = QStandardPaths.writeableLocation(QStandardPaths.DocumentsLocation) else: loc = QDesktopServices.storageLocation(QDesktopServices.DocumentsLocation) return os.path.join(loc, "Anki") elif isMac: return os.path.expanduser("~/Documents/Anki") else: # use Documents/Anki on new installs, ~/Anki on existing ones p = os.path.expanduser("~/Anki") if os.path.exists(p): return p else: loc = QDesktopServices.storageLocation(QDesktopServices.DocumentsLocation) if loc[:-1] == QDesktopServices.storageLocation( QDesktopServices.HomeLocation): # occasionally "documentsLocation" will return the home # folder because the Documents folder isn't configured # properly; fall back to an English path return os.path.expanduser("~/Documents/Anki") else: return os.path.join(loc, "Anki") def _loadMeta(self): path = os.path.join(self.base, "prefs.db") new = not os.path.exists(path) def recover(): # if we can't load profile, start with a new one if self.db: try: self.db.close() except: pass broken = path+".broken" if os.path.exists(broken): os.unlink(broken) os.rename(path, broken) QMessageBox.warning( None, "Preferences Corrupt", """\ Anki's prefs.db file was corrupt and has been recreated. If you were using multiple \ profiles, please add them back using the same names to recover your cards.""") try: self.db = DB(path, text=str) self.db.execute(""" create table if not exists profiles (name text primary key, data text not null);""") except: recover() return self._loadMeta() if not new: # load previously created try: self.meta = cPickle.loads( self.db.scalar( "select data from profiles where name = '_global'")) return except: recover() return self._loadMeta() # create a default global profile self.meta = metaConf.copy() self.db.execute("insert or replace into profiles values ('_global', ?)", cPickle.dumps(metaConf)) self._setDefaultLang() return True def ensureProfile(self): "Create a new profile if none exists." if self.firstRun: self.create(_("User 1")) p = os.path.join(self.base, "README.txt") open(p, "w").write((_("""\ This folder stores all of your Anki data in a single location, to make backups easy. To tell Anki to use a different location, please see: %s """) % (appHelpSite + "#startupopts")).encode("utf8")) def _pwhash(self, passwd): return checksum(unicode(self.meta['id'])+unicode(passwd)) # Default language ###################################################################### # On first run, allow the user to choose the default language def _setDefaultLang(self): # the dialog expects _ to be defined, but we're running before # setupLang() has been called. so we create a dummy op for now import __builtin__ __builtin__.__dict__['_'] = lambda x: x # create dialog class NoCloseDiag(QDialog): def reject(self): pass d = self.langDiag = NoCloseDiag() f = self.langForm = aqt.forms.setlang.Ui_Dialog() f.setupUi(d) d.connect(d, SIGNAL("accepted()"), self._onLangSelected) d.connect(d, SIGNAL("rejected()"), lambda: True) # default to the system language try: (lang, enc) = locale.getdefaultlocale() except: # fails on osx lang = "en" if lang and lang not in ("pt_BR", "zh_CN", "zh_TW"): lang = re.sub("(.*)_.*", "\\1", lang) # find index idx = None en = None for c, (name, code) in enumerate(langs): if code == "en": en = c if code == lang: idx = c # if the system language isn't available, revert to english if idx is None: idx = en # update list f.lang.addItems([x[0] for x in langs]) f.lang.setCurrentRow(idx) d.exec_() def _onLangSelected(self): f = self.langForm obj = langs[f.lang.currentRow()] code = obj[1] name = obj[0] en = "Are you sure you wish to display Anki's interface in %s?" r = QMessageBox.question( None, "Anki", en%name, QMessageBox.Yes | QMessageBox.No, QMessageBox.No) if r != QMessageBox.Yes: return self._setDefaultLang() self.meta['defaultLang'] = code sql = "update profiles set data = ? where name = ?" self.db.execute(sql, cPickle.dumps(self.meta), "_global") self.db.commit()
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/profiles.py
profiles.py
from aqt.utils import openLink, shortcut, tooltip from anki.utils import isMac import aqt from anki.sound import clearAudioQueue class Overview(object): "Deck overview." def __init__(self, mw): self.mw = mw self.web = mw.web self.bottom = aqt.toolbar.BottomBar(mw, mw.bottomWeb) def show(self): clearAudioQueue() self.web.setLinkHandler(self._linkHandler) self.web.setKeyHandler(None) self.mw.keyHandler = self._keyHandler self.mw.web.setFocus() self.refresh() def refresh(self): self.mw.col.reset() self._renderPage() self._renderBottom() # Handlers ############################################################ def _linkHandler(self, url): if url == "study": self.mw.col.startTimebox() self.mw.moveToState("review") if self.mw.state == "overview": tooltip(_("No cards are due yet.")) elif url == "anki": print "anki menu" elif url == "opts": self.mw.onDeckConf() elif url == "cram": deck = self.mw.col.decks.current() self.mw.onCram("'deck:%s'" % deck['name']) elif url == "refresh": self.mw.col.sched.rebuildDyn() self.mw.reset() elif url == "empty": self.mw.col.sched.emptyDyn(self.mw.col.decks.selected()) self.mw.reset() elif url == "decks": self.mw.moveToState("deckBrowser") elif url == "review": openLink(aqt.appShared+"info/%s?v=%s"%(self.sid, self.sidVer)) elif url == "studymore": self.onStudyMore() elif url == "unbury": self.mw.col.sched.unburyCardsForDeck() self.mw.reset() elif url.lower().startswith("http"): openLink(url) def _keyHandler(self, evt): cram = self.mw.col.decks.current()['dyn'] key = unicode(evt.text()) if key == "o": self.mw.onDeckConf() if key == "r" and cram: self.mw.col.sched.rebuildDyn() self.mw.reset() if key == "e" and cram: self.mw.col.sched.emptyDyn(self.mw.col.decks.selected()) self.mw.reset() if key == "c" and not cram: self.onStudyMore() if key == "u": self.mw.col.sched.unburyCardsForDeck() self.mw.reset() # HTML ############################################################ def _renderPage(self): but = self.mw.button deck = self.mw.col.decks.current() self.sid = deck.get("sharedFrom") if self.sid: self.sidVer = deck.get("ver", None) shareLink = '<a class=smallLink href="review">Reviews and Updates</a>' else: shareLink = "" self.web.stdHtml(self._body % dict( deck=deck['name'], shareLink=shareLink, desc=self._desc(deck), table=self._table() ), self.mw.sharedCSS + self._css) def _desc(self, deck): if deck['dyn']: desc = _("""\ This is a special deck for studying outside of the normal schedule.""") desc += " " + _("""\ Cards will be automatically returned to their original decks after you review \ them.""") desc += " " + _("""\ Deleting this deck from the deck list will return all remaining cards \ to their original deck.""") else: desc = deck.get("desc", "") if not desc: return "<p>" if deck['dyn']: dyn = "dyn" else: dyn = "" return '<div class="descfont descmid description %s">%s</div>' % ( dyn, desc) def _table(self): counts = list(self.mw.col.sched.counts()) finished = not sum(counts) for n in range(len(counts)): if counts[n] >= 1000: counts[n] = "1000+" but = self.mw.button if finished: return '<div style="white-space: pre-wrap;">%s</div>' % ( self.mw.col.sched.finishedMsg()) else: return ''' <table width=300 cellpadding=5> <tr><td align=center valign=top> <table cellspacing=5> <tr><td>%s:</td><td><b><font color=#00a>%s</font></b></td></tr> <tr><td>%s:</td><td><b><font color=#C35617>%s</font></b></td></tr> <tr><td>%s:</td><td><b><font color=#0a0>%s</font></b></td></tr> </table> </td><td align=center> %s</td></tr></table>''' % ( _("New"), counts[0], _("Learning"), counts[1], _("To Review"), counts[2], but("study", _("Study Now"), id="study")) _body = """ <center> <h3>%(deck)s</h3> %(shareLink)s %(desc)s %(table)s </center> <script>$(function () { $("#study").focus(); });</script> """ _css = """ .smallLink { font-size: 10px; } h3 { margin-bottom: 0; } .descfont { padding: 1em; color: #333; } .description { white-space: pre-wrap; } #fulldesc { display:none; } .descmid { width: 70%; margin: 0 auto 0; text-align: left; } .dyn { text-align: center; } """ # Bottom area ###################################################################### def _renderBottom(self): links = [ ["O", "opts", _("Options")], ] if self.mw.col.decks.current()['dyn']: links.append(["R", "refresh", _("Rebuild")]) links.append(["E", "empty", _("Empty")]) else: links.append(["C", "studymore", _("Custom Study")]) #links.append(["F", "cram", _("Filter/Cram")]) if self.mw.col.sched.haveBuried(): links.append(["U", "unbury", _("Unbury")]) buf = "" for b in links: if b[0]: b[0] = _("Shortcut key: %s") % shortcut(b[0]) buf += """ <button title="%s" onclick='py.link(\"%s\");'>%s</button>""" % tuple(b) self.bottom.draw(buf) if isMac: size = 28 else: size = 36 + self.mw.fontHeightDelta*3 self.bottom.web.setFixedHeight(size) self.bottom.web.setLinkHandler(self._linkHandler) # Studying more ###################################################################### def onStudyMore(self): import aqt.customstudy aqt.customstudy.CustomStudy(self.mw)
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/overview.py
overview.py
import getpass import os import sys import optparse import tempfile import __builtin__ import locale import gettext from aqt.qt import * import anki.lang from anki.consts import HELP_SITE from anki.lang import langDir from anki.utils import isMac from anki import version as _version appVersion=_version appWebsite="http://ankisrs.net/" appChanges="http://ankisrs.net/docs/changes.html" appDonate="http://ankisrs.net/support/" appShared="https://ankiweb.net/shared/" appUpdate="https://ankiweb.net/update/desktop" appHelpSite=HELP_SITE mw = None # set on init moduleDir = os.path.split(os.path.dirname(os.path.abspath(__file__)))[0] try: import aqt.forms except ImportError, e: if "forms" in str(e): print "If you're running from git, did you run build_ui.sh?" print raise from anki.utils import checksum # Dialog manager - manages modeless windows ########################################################################## class DialogManager(object): def __init__(self): from aqt import addcards, browser, editcurrent self._dialogs = { "AddCards": [addcards.AddCards, None], "Browser": [browser.Browser, None], "EditCurrent": [editcurrent.EditCurrent, None], } def open(self, name, *args): (creator, instance) = self._dialogs[name] if instance: instance.setWindowState(instance.windowState() | Qt.WindowActive) instance.activateWindow() instance.raise_() return instance else: instance = creator(*args) self._dialogs[name][1] = instance return instance def close(self, name): self._dialogs[name] = [self._dialogs[name][0], None] def closeAll(self): "True if all closed successfully." for (n, (creator, instance)) in self._dialogs.items(): if instance: if not instance.canClose(): return False instance.forceClose = True instance.close() self.close(n) return True dialogs = DialogManager() # Language handling ########################################################################## # Qt requires its translator to be installed before any GUI widgets are # loaded, and we need the Qt language to match the gettext language or # translated shortcuts will not work. _gtrans = None _qtrans = None def setupLang(pm, app, force=None): global _gtrans, _qtrans try: locale.setlocale(locale.LC_ALL, '') except: pass lang = force or pm.meta["defaultLang"] dir = langDir() # gettext _gtrans = gettext.translation( 'anki', dir, languages=[lang], fallback=True) __builtin__.__dict__['_'] = _gtrans.ugettext __builtin__.__dict__['ngettext'] = _gtrans.ungettext anki.lang.setLang(lang, local=False) if lang in ("he","ar","fa"): app.setLayoutDirection(Qt.RightToLeft) else: app.setLayoutDirection(Qt.LeftToRight) # qt _qtrans = QTranslator() if _qtrans.load("qt_" + lang, dir): app.installTranslator(_qtrans) # App initialisation ########################################################################## class AnkiApp(QApplication): # Single instance support on Win32/Linux ################################################## KEY = "anki"+checksum(getpass.getuser()) TMOUT = 5000 def __init__(self, argv): QApplication.__init__(self, argv) self._argv = argv def secondInstance(self): # we accept only one command line argument. if it's missing, send # a blank screen to just raise the existing window opts, args = parseArgs(self._argv) buf = "raise" if args and args[0]: buf = os.path.abspath(args[0]) if self.sendMsg(buf): print "Already running; reusing existing instance." return True else: # send failed, so we're the first instance or the # previous instance died QLocalServer.removeServer(self.KEY) self._srv = QLocalServer(self) self.connect(self._srv, SIGNAL("newConnection()"), self.onRecv) self._srv.listen(self.KEY) return False def sendMsg(self, txt): sock = QLocalSocket(self) sock.connectToServer(self.KEY, QIODevice.WriteOnly) if not sock.waitForConnected(self.TMOUT): # first instance or previous instance dead return False sock.write(txt) if not sock.waitForBytesWritten(self.TMOUT): # existing instance running but hung return False sock.disconnectFromServer() return True def onRecv(self): sock = self._srv.nextPendingConnection() if not sock.waitForReadyRead(self.TMOUT): sys.stderr.write(sock.errorString()) return buf = sock.readAll() buf = unicode(buf, sys.getfilesystemencoding(), "ignore") self.emit(SIGNAL("appMsg"), buf) sock.disconnectFromServer() # OS X file/url handler ################################################## def event(self, evt): if evt.type() == QEvent.FileOpen: self.emit(SIGNAL("appMsg"), evt.file() or "raise") return True return QApplication.event(self, evt) def parseArgs(argv): "Returns (opts, args)." # py2app fails to strip this in some instances, then anki dies # as there's no such profile if isMac and len(argv) > 1 and argv[1].startswith("-psn"): argv = [argv[0]] parser = optparse.OptionParser(version="%prog " + appVersion) parser.usage = "%prog [OPTIONS] [file to import]" parser.add_option("-b", "--base", help="path to base folder") parser.add_option("-p", "--profile", help="profile name to load") parser.add_option("-l", "--lang", help="interface language (en, de, etc)") return parser.parse_args(argv[1:]) def run(): try: _run() except Exception, e: QMessageBox.critical(None, "Startup Error", "Please notify support of this error:\n\n"+ traceback.format_exc()) def _run(): global mw # parse args opts, args = parseArgs(sys.argv) opts.base = unicode(opts.base or "", sys.getfilesystemencoding()) opts.profile = unicode(opts.profile or "", sys.getfilesystemencoding()) # on osx we'll need to add the qt plugins to the search path if isMac and getattr(sys, 'frozen', None): rd = os.path.abspath(moduleDir + "/../../..") QCoreApplication.setLibraryPaths([rd]) if isMac: QFont.insertSubstitution(".Lucida Grande UI", "Lucida Grande") # create the app app = AnkiApp(sys.argv) QCoreApplication.setApplicationName("Anki") if app.secondInstance(): # we've signaled the primary instance, so we should close return # disable icons on mac; this must be done before window created if isMac: app.setAttribute(Qt.AA_DontShowIconsInMenus) # we must have a usable temp dir try: tempfile.gettempdir() except: QMessageBox.critical( None, "Error", """\ No usable temporary folder found. Make sure C:\\temp exists or TEMP in your \ environment points to a valid, writable folder.""") return # qt version must be up to date if qtmajor <= 4 and qtminor <= 6: QMessageBox.warning( None, "Error", "Your Qt version is known to be buggy. Until you " "upgrade to a newer Qt, you may experience issues such as images " "failing to show up during review.") # profile manager from aqt.profiles import ProfileManager pm = ProfileManager(opts.base, opts.profile) # i18n setupLang(pm, app, opts.lang) # remaining pm init pm.ensureProfile() # load the main window import aqt.main mw = aqt.main.AnkiQt(app, pm, args) app.exec_()
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/__init__.py
__init__.py
import time, re, traceback from aqt.qt import * from anki.sync import httpCon from aqt.utils import showWarning from anki.hooks import addHook, remHook import aqt.sync # monkey-patches httplib2 def download(mw, code): "Download addon/deck from AnkiWeb. On success caller must stop progress diag." # check code is valid try: code = int(code) except ValueError: showWarning(_("Invalid code.")) return # create downloading thread thread = Downloader(code) def onRecv(): try: mw.progress.update(label="%dKB downloaded" % (thread.recvTotal/1024)) except NameError: # some users report the following error on long downloads # NameError: free variable 'mw' referenced before assignment in enclosing scope # unsure why this is happening, but guard against throwing the # error pass mw.connect(thread, SIGNAL("recv"), onRecv) thread.start() mw.progress.start(immediate=True) while not thread.isFinished(): mw.app.processEvents() thread.wait(100) if not thread.error: # success return thread.data, thread.fname else: mw.progress.finish() showWarning(_("Download failed: %s") % thread.error) class Downloader(QThread): def __init__(self, code): QThread.__init__(self) self.code = code self.error = None def run(self): # setup progress handler self.byteUpdate = time.time() self.recvTotal = 0 def canPost(): if (time.time() - self.byteUpdate) > 0.1: self.byteUpdate = time.time() return True def recvEvent(bytes): self.recvTotal += bytes if canPost(): self.emit(SIGNAL("recv")) addHook("httpRecv", recvEvent) con = httpCon() try: resp, cont = con.request( aqt.appShared + "download/%d" % self.code) except Exception, e: exc = traceback.format_exc() try: self.error = unicode(e[0], "utf8", "ignore") except: self.error = unicode(exc, "utf8", "ignore") return finally: remHook("httpRecv", recvEvent) if resp['status'] == '200': self.error = None self.fname = re.match("attachment; filename=(.+)", resp['content-disposition']).group(1) self.data = cont elif resp['status'] == '403': self.error = _("Invalid code.") else: self.error = _("Error downloading: %s") % resp['status']
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/downloader.py
downloader.py
import os import re from aqt.qt import * import aqt from aqt.utils import getSaveFile, tooltip, showWarning, askUser, \ checkInvalidFilename from anki.exporting import exporters class ExportDialog(QDialog): def __init__(self, mw, did=None): QDialog.__init__(self, mw, Qt.Window) self.mw = mw self.col = mw.col self.frm = aqt.forms.exporting.Ui_ExportDialog() self.frm.setupUi(self) self.exporter = None self.setup(did) self.exec_() def setup(self, did): self.frm.format.insertItems(0, list(zip(*exporters())[0])) self.connect(self.frm.format, SIGNAL("activated(int)"), self.exporterChanged) self.exporterChanged(0) self.decks = [_("All Decks")] + sorted(self.col.decks.allNames()) self.frm.deck.addItems(self.decks) # save button b = QPushButton(_("Export...")) self.frm.buttonBox.addButton(b, QDialogButtonBox.AcceptRole) # set default option if accessed through deck button if did: name = self.mw.col.decks.get(did)['name'] index = self.frm.deck.findText(name) self.frm.deck.setCurrentIndex(index) def exporterChanged(self, idx): self.exporter = exporters()[idx][1](self.col) self.isApkg = hasattr(self.exporter, "includeSched") self.isTextNote = hasattr(self.exporter, "includeTags") self.hideTags = hasattr(self.exporter, "hideTags") self.frm.includeSched.setVisible(self.isApkg) self.frm.includeMedia.setVisible(self.isApkg) self.frm.includeTags.setVisible( not self.isApkg and not self.hideTags) def accept(self): self.exporter.includeSched = ( self.frm.includeSched.isChecked()) self.exporter.includeMedia = ( self.frm.includeMedia.isChecked()) self.exporter.includeTags = ( self.frm.includeTags.isChecked()) if not self.frm.deck.currentIndex(): self.exporter.did = None else: name = self.decks[self.frm.deck.currentIndex()] self.exporter.did = self.col.decks.id(name) if (self.isApkg and self.exporter.includeSched and not self.exporter.did): verbatim = True # it's a verbatim apkg export, so place on desktop instead of # choosing file; use homedir if no desktop usingHomedir = False file = os.path.join(QDesktopServices.storageLocation( QDesktopServices.DesktopLocation), "collection.apkg") if not os.path.exists(os.path.dirname(file)): usingHomedir = True file = os.path.join(QDesktopServices.storageLocation( QDesktopServices.HomeLocation), "collection.apkg") if os.path.exists(file): if usingHomedir: question = _("%s already exists in your home directory. Overwrite it?") else: question = _("%s already exists on your desktop. Overwrite it?") if not askUser(question % "collection.apkg"): return else: verbatim = False # Get deck name and remove invalid filename characters deck_name = self.decks[self.frm.deck.currentIndex()] deck_name = re.sub('[\\\\/?<>:*|"^]', '_', deck_name) filename = os.path.join(aqt.mw.pm.base, u'{0}{1}'.format(deck_name, self.exporter.ext)) while 1: file = getSaveFile(self, _("Export"), "export", self.exporter.key, self.exporter.ext, fname=filename) if not file: return if checkInvalidFilename(os.path.basename(file), dirsep=False): continue break self.hide() if file: self.mw.progress.start(immediate=True) try: f = open(file, "wb") f.close() except (OSError, IOError), e: showWarning(_("Couldn't save file: %s") % unicode(e)) else: os.unlink(file) self.exporter.exportInto(file) if verbatim: if usingHomedir: msg = _("A file called %s was saved in your home directory.") else: msg = _("A file called %s was saved on your desktop.") msg = msg % "collection.apkg" period = 5000 else: period = 3000 if self.isTextNote: msg = ngettext("%d note exported.", "%d notes exported.", self.exporter.count) % self.exporter.count else: msg = ngettext("%d card exported.", "%d cards exported.", self.exporter.count) % self.exporter.count tooltip(msg, period=period) finally: self.mw.progress.finish() QDialog.accept(self)
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/exporting.py
exporting.py
from aqt.qt import * from operator import itemgetter from aqt.utils import showInfo, askUser, getText, maybeHideClose, openHelp import aqt.modelchooser, aqt.clayout from anki import stdmodels from aqt.utils import saveGeom, restoreGeom class Models(QDialog): def __init__(self, mw, parent=None, fromMain=False): self.mw = mw self.parent = parent or mw self.fromMain = fromMain QDialog.__init__(self, self.parent, Qt.Window) self.col = mw.col self.mm = self.col.models self.mw.checkpoint(_("Note Types")) self.form = aqt.forms.models.Ui_Dialog() self.form.setupUi(self) self.connect(self.form.buttonBox, SIGNAL("helpRequested()"), lambda: openHelp("notetypes")) self.setupModels() restoreGeom(self, "models") self.exec_() # Models ########################################################################## def setupModels(self): self.model = None c = self.connect; f = self.form; box = f.buttonBox s = SIGNAL("clicked()") t = QDialogButtonBox.ActionRole b = box.addButton(_("Add"), t) c(b, s, self.onAdd) b = box.addButton(_("Rename"), t) c(b, s, self.onRename) b = box.addButton(_("Delete"), t) c(b, s, self.onDelete) if self.fromMain: b = box.addButton(_("Fields..."), t) c(b, s, self.onFields) b = box.addButton(_("Cards..."), t) c(b, s, self.onCards) b = box.addButton(_("Options..."), t) c(b, s, self.onAdvanced) c(f.modelsList, SIGNAL("currentRowChanged(int)"), self.modelChanged) c(f.modelsList, SIGNAL("itemDoubleClicked(QListWidgetItem*)"), self.onRename) self.updateModelsList() f.modelsList.setCurrentRow(0) maybeHideClose(box) def onRename(self): txt = getText(_("New name:"), default=self.model['name']) if txt[1] and txt[0]: self.model['name'] = txt[0] self.mm.save(self.model) self.updateModelsList() def updateModelsList(self): row = self.form.modelsList.currentRow() if row == -1: row = 0 self.models = self.col.models.all() self.models.sort(key=itemgetter("name")) self.form.modelsList.clear() for m in self.models: mUse = self.mm.useCount(m) mUse = ngettext("%d note", "%d notes", mUse) % mUse item = QListWidgetItem("%s [%s]" % (m['name'], mUse)) self.form.modelsList.addItem(item) self.form.modelsList.setCurrentRow(row) def modelChanged(self): if self.model: self.saveModel() idx = self.form.modelsList.currentRow() self.model = self.models[idx] def onAdd(self): m = AddModel(self.mw, self).get() if m: txt = getText(_("Name:"), default=m['name'])[0] if txt: m['name'] = txt self.mm.ensureNameUnique(m) self.mm.save(m) self.updateModelsList() def onDelete(self): if len(self.models) < 2: showInfo(_("Please add another note type first."), parent=self) return if self.mm.useCount(self.model): msg = _("Delete this note type and all its cards?") else: msg = _("Delete this unused note type?") if not askUser(msg, parent=self): return self.mm.rem(self.model) self.model = None self.updateModelsList() def onAdvanced(self): d = QDialog(self) frm = aqt.forms.modelopts.Ui_Dialog() frm.setupUi(d) frm.latexHeader.setText(self.model['latexPre']) frm.latexFooter.setText(self.model['latexPost']) d.setWindowTitle(_("Options for %s") % self.model['name']) self.connect( frm.buttonBox, SIGNAL("helpRequested()"), lambda: openHelp("latex")) restoreGeom(d, "modelopts") d.exec_() saveGeom(d, "modelopts") self.model['latexPre'] = unicode(frm.latexHeader.toPlainText()) self.model['latexPost'] = unicode(frm.latexFooter.toPlainText()) def saveModel(self): self.mm.save(self.model) def _tmpNote(self): self.mm.setCurrent(self.model) n = self.col.newNote(forDeck=False) for name in n.keys(): n[name] = "("+name+")" try: if "{{cloze:Text}}" in self.model['tmpls'][0]['qfmt']: n['Text'] = _("This is a {{c1::sample}} cloze deletion.") except: # invalid cloze pass return n def onFields(self): from aqt.fields import FieldDialog n = self._tmpNote() FieldDialog(self.mw, n, parent=self) def onCards(self): from aqt.clayout import CardLayout n = self._tmpNote() CardLayout(self.mw, n, ord=0, parent=self, addMode=True) # Cleanup ########################################################################## # need to flush model on change or reject def reject(self): self.saveModel() self.mw.reset() saveGeom(self, "models") QDialog.reject(self) class AddModel(QDialog): def __init__(self, mw, parent=None): self.parent = parent or mw self.mw = mw self.col = mw.col QDialog.__init__(self, self.parent, Qt.Window) self.model = None self.dialog = aqt.forms.addmodel.Ui_Dialog() self.dialog.setupUi(self) # standard models self.models = [] for (name, func) in stdmodels.models: if callable(name): name = name() item = QListWidgetItem(_("Add: %s") % name) self.dialog.models.addItem(item) self.models.append((True, func)) # add copies for m in sorted(self.col.models.all(), key=itemgetter("name")): item = QListWidgetItem(_("Clone: %s") % m['name']) self.dialog.models.addItem(item) self.models.append((False, m)) self.dialog.models.setCurrentRow(0) # the list widget will swallow the enter key s = QShortcut(QKeySequence("Return"), self) self.connect(s, SIGNAL("activated()"), self.accept) # help self.connect(self.dialog.buttonBox, SIGNAL("helpRequested()"), self.onHelp) def get(self): self.exec_() return self.model def reject(self): QDialog.reject(self) def accept(self): (isStd, model) = self.models[self.dialog.models.currentRow()] if isStd: # create self.model = model(self.col) else: # add copy to deck self.model = self.mw.col.models.copy(model) self.mw.col.models.setCurrent(self.model) QDialog.accept(self) def onHelp(self): openHelp("notetypes")
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/models.py
models.py
import sys, os, traceback from cStringIO import StringIO import zipfile from aqt.qt import * from aqt.utils import showInfo, openFolder, isWin, openLink, \ askUser, restoreGeom, saveGeom, showWarning from zipfile import ZipFile import aqt.forms import aqt from aqt.downloader import download from anki.lang import _ # in the future, it would be nice to save the addon id and unzippped file list # to the config so that we can clear up all files and check for updates class AddonManager(object): def __init__(self, mw): self.mw = mw f = self.mw.form; s = SIGNAL("triggered()") self.mw.connect(f.actionOpenPluginFolder, s, self.onOpenAddonFolder) self.mw.connect(f.actionDownloadSharedPlugin, s, self.onGetAddons) self._menus = [] if isWin: self.clearAddonCache() sys.path.insert(0, self.addonsFolder()) if not self.mw.safeMode: self.loadAddons() def files(self): return [f for f in os.listdir(self.addonsFolder()) if f.endswith(".py")] def loadAddons(self): for file in self.files(): try: __import__(file.replace(".py", "")) except: traceback.print_exc() self.rebuildAddonsMenu() # Menus ###################################################################### def onOpenAddonFolder(self, path=None): if path is None: path = self.addonsFolder() openFolder(path) def rebuildAddonsMenu(self): for m in self._menus: self.mw.form.menuPlugins.removeAction(m.menuAction()) for file in self.files(): m = self.mw.form.menuPlugins.addMenu( os.path.splitext(file)[0]) self._menus.append(m) a = QAction(_("Edit..."), self.mw) p = os.path.join(self.addonsFolder(), file) self.mw.connect(a, SIGNAL("triggered()"), lambda p=p: self.onEdit(p)) m.addAction(a) a = QAction(_("Delete..."), self.mw) self.mw.connect(a, SIGNAL("triggered()"), lambda p=p: self.onRem(p)) m.addAction(a) def onEdit(self, path): d = QDialog(self.mw) frm = aqt.forms.editaddon.Ui_Dialog() frm.setupUi(d) d.setWindowTitle(os.path.basename(path)) frm.text.setPlainText(unicode(open(path).read(), "utf8")) d.connect(frm.buttonBox, SIGNAL("accepted()"), lambda: self.onAcceptEdit(path, frm)) d.exec_() def onAcceptEdit(self, path, frm): open(path, "w").write(frm.text.toPlainText().encode("utf8")) showInfo(_("Edits saved. Please restart Anki.")) def onRem(self, path): if not askUser(_("Delete %s?") % os.path.basename(path)): return os.unlink(path) self.rebuildAddonsMenu() showInfo(_("Deleted. Please restart Anki.")) # Tools ###################################################################### def addonsFolder(self): dir = self.mw.pm.addonFolder() if isWin: dir = dir.encode(sys.getfilesystemencoding()) return dir def clearAddonCache(self): "Clear .pyc files which may cause crashes if Python version updated." dir = self.addonsFolder() for curdir, dirs, files in os.walk(dir): for f in files: if not f.endswith(".pyc"): continue os.unlink(os.path.join(curdir, f)) def registerAddon(self, name, updateId): # not currently used return # Installing add-ons ###################################################################### def onGetAddons(self): GetAddons(self.mw) def install(self, data, fname): if fname.endswith(".py"): # .py files go directly into the addon folder path = os.path.join(self.addonsFolder(), fname) open(path, "wb").write(data) return # .zip file try: z = ZipFile(StringIO(data)) except zipfile.BadZipFile: showWarning(_("The download was corrupt. Please try again.")) return base = self.addonsFolder() for n in z.namelist(): if n.endswith("/"): # folder; ignore continue # write z.extract(n, base) class GetAddons(QDialog): def __init__(self, mw): QDialog.__init__(self, mw) self.mw = mw self.form = aqt.forms.getaddons.Ui_Dialog() self.form.setupUi(self) b = self.form.buttonBox.addButton( _("Browse"), QDialogButtonBox.ActionRole) self.connect(b, SIGNAL("clicked()"), self.onBrowse) restoreGeom(self, "getaddons", adjustSize=True) self.exec_() saveGeom(self, "getaddons") def onBrowse(self): openLink(aqt.appShared + "addons/") def accept(self): QDialog.accept(self) # create downloader thread ret = download(self.mw, self.form.code.text()) if not ret: return data, fname = ret self.mw.addonManager.install(data, fname) self.mw.progress.finish() showInfo(_("Download successful. Please restart Anki."))
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/addons.py
addons.py
from aqt.qt import * import aqt from aqt.utils import showWarning, openHelp, askUser, saveGeom, restoreGeom class DeckConf(QDialog): def __init__(self, mw, first=False, search="", deck=None): QDialog.__init__(self, mw) self.mw = mw self.deck = deck or self.mw.col.decks.current() self.search = search self.form = aqt.forms.dyndconf.Ui_Dialog() self.form.setupUi(self) if first: label = _("Build") else: label = _("Rebuild") self.ok = self.form.buttonBox.addButton( label, QDialogButtonBox.AcceptRole) self.mw.checkpoint(_("Options")) self.setWindowModality(Qt.WindowModal) self.connect(self.form.buttonBox, SIGNAL("helpRequested()"), lambda: openHelp("filtered")) self.setWindowTitle(_("Options for %s") % self.deck['name']) restoreGeom(self, "dyndeckconf") self.setupOrder() self.loadConf() if search: self.form.search.setText(search) self.form.search.selectAll() self.show() self.exec_() saveGeom(self, "dyndeckconf") def setupOrder(self): import anki.consts as cs self.form.order.addItems(cs.dynOrderLabels().values()) def loadConf(self): f = self.form d = self.deck search, limit, order = d['terms'][0] f.search.setText(search) if d['delays']: f.steps.setText(self.listToUser(d['delays'])) f.stepsOn.setChecked(True) else: f.steps.setText("1 10") f.stepsOn.setChecked(False) f.resched.setChecked(d['resched']) f.order.setCurrentIndex(order) f.limit.setValue(limit) def saveConf(self): f = self.form d = self.deck d['delays'] = None if f.stepsOn.isChecked(): steps = self.userToList(f.steps) if steps: d['delays'] = steps else: d['delays'] = None d['terms'][0] = [f.search.text(), f.limit.value(), f.order.currentIndex()] d['resched'] = f.resched.isChecked() self.mw.col.decks.save(d) return True def reject(self): self.ok = False QDialog.reject(self) def accept(self): if not self.saveConf(): return if not self.mw.col.sched.rebuildDyn(): if askUser(_("""\ The provided search did not match any cards. Would you like to revise \ it?""")): return self.mw.reset() QDialog.accept(self) # Step load/save - fixme: share with std options screen ######################################################## def listToUser(self, l): return " ".join([str(x) for x in l]) def userToList(self, w, minSize=1): items = unicode(w.text()).split(" ") ret = [] for i in items: if not i: continue try: i = float(i) assert i > 0 if i == int(i): i = int(i) ret.append(i) except: # invalid, don't update showWarning(_("Steps must be numbers.")) return if len(ret) < minSize: showWarning(_("At least one step is required.")) return return ret
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/dyndeckconf.py
dyndeckconf.py
from aqt.qt import * import re class TagEdit(QLineEdit): # 0 = tags, 1 = decks def __init__(self, parent, type=0): QLineEdit.__init__(self, parent) self.col = None self.model = QStringListModel() self.type = type if type == 0: self.completer = TagCompleter(self.model, parent, self) else: self.completer = QCompleter(self.model, parent) self.completer.setCompletionMode(QCompleter.PopupCompletion) self.completer.setCaseSensitivity(Qt.CaseInsensitive) self.setCompleter(self.completer) def setCol(self, col): "Set the current col, updating list of available tags." self.col = col if self.type == 0: l = sorted(self.col.tags.all()) else: l = sorted(self.col.decks.allNames()) self.model.setStringList(l) def focusInEvent(self, evt): QLineEdit.focusInEvent(self, evt) self.showCompleter() def keyPressEvent(self, evt): if evt.key() in (Qt.Key_Enter, Qt.Key_Return): self.hideCompleter() QWidget.keyPressEvent(self, evt) return QLineEdit.keyPressEvent(self, evt) if not evt.text(): # if it's a modifier, don't show return if evt.key() not in ( Qt.Key_Enter, Qt.Key_Return, Qt.Key_Escape, Qt.Key_Space, Qt.Key_Tab, Qt.Key_Backspace, Qt.Key_Delete): self.showCompleter() def showCompleter(self): self.completer.setCompletionPrefix(self.text()) self.completer.complete() def focusOutEvent(self, evt): QLineEdit.focusOutEvent(self, evt) self.emit(SIGNAL("lostFocus")) self.completer.popup().hide() def hideCompleter(self): self.completer.popup().hide() class TagCompleter(QCompleter): def __init__(self, model, parent, edit, *args): QCompleter.__init__(self, model, parent) self.tags = [] self.edit = edit self.cursor = None def splitPath(self, str): str = unicode(str).strip() str = re.sub(" +", " ", str) self.tags = self.edit.col.tags.split(str) self.tags.append(u"") p = self.edit.cursorPosition() self.cursor = str.count(" ", 0, p) return [self.tags[self.cursor]] def pathFromIndex(self, idx): if self.cursor is None: return self.edit.text() ret = QCompleter.pathFromIndex(self, idx) self.tags[self.cursor] = unicode(ret) try: self.tags.remove(u"") except ValueError: pass return " ".join(self.tags)
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/tagedit.py
tagedit.py
from aqt.qt import * from anki.consts import * import aqt from aqt.utils import showWarning, openHelp, getOnlyText, askUser class FieldDialog(QDialog): def __init__(self, mw, note, ord=0, parent=None): QDialog.__init__(self, parent or mw) #, Qt.Window) self.mw = aqt.mw self.parent = parent or mw self.note = note self.col = self.mw.col self.mm = self.mw.col.models self.model = note.model() self.mw.checkpoint(_("Fields")) self.form = aqt.forms.fields.Ui_Dialog() self.form.setupUi(self) self.setWindowTitle(_("Fields for %s") % self.model['name']) self.form.buttonBox.button(QDialogButtonBox.Help).setAutoDefault(False) self.form.buttonBox.button(QDialogButtonBox.Close).setAutoDefault(False) self.currentIdx = None self.oldSortField = self.model['sortf'] self.fillFields() self.setupSignals() self.form.fieldList.setCurrentRow(0) self.exec_() ########################################################################## def fillFields(self): self.currentIdx = None self.form.fieldList.clear() for f in self.model['flds']: self.form.fieldList.addItem(f['name']) def setupSignals(self): c = self.connect s = SIGNAL f = self.form c(f.fieldList, s("currentRowChanged(int)"), self.onRowChange) c(f.fieldAdd, s("clicked()"), self.onAdd) c(f.fieldDelete, s("clicked()"), self.onDelete) c(f.fieldRename, s("clicked()"), self.onRename) c(f.fieldPosition, s("clicked()"), self.onPosition) c(f.sortField, s("clicked()"), self.onSortField) c(f.buttonBox, s("helpRequested()"), self.onHelp) def onRowChange(self, idx): if idx == -1: return self.saveField() self.loadField(idx) def _uniqueName(self, prompt, ignoreOrd=None, old=""): txt = getOnlyText(prompt, default=old) if not txt: return for f in self.model['flds']: if ignoreOrd is not None and f['ord'] == ignoreOrd: continue if f['name'] == txt: showWarning(_("That field name is already used.")) return return txt def onRename(self): idx = self.currentIdx f = self.model['flds'][idx] name = self._uniqueName(_("New name:"), self.currentIdx, f['name']) if not name: return self.mm.renameField(self.model, f, name) self.saveField() self.fillFields() self.form.fieldList.setCurrentRow(idx) def onAdd(self): name = self._uniqueName(_("Field name:")) if not name: return self.saveField() self.mw.progress.start() f = self.mm.newField(name) self.mm.addField(self.model, f) self.mw.progress.finish() self.fillFields() self.form.fieldList.setCurrentRow(len(self.model['flds'])-1) def onDelete(self): if len(self.model['flds']) < 2: return showWarning(_("Notes require at least one field.")) c = self.mm.useCount(self.model) c = ngettext("%d note", "%d notes", c) % c if not askUser(_("Delete field from %s?") % c): return f = self.model['flds'][self.form.fieldList.currentRow()] self.mw.progress.start() self.mm.remField(self.model, f) self.mw.progress.finish() self.fillFields() self.form.fieldList.setCurrentRow(0) def onPosition(self, delta=-1): idx = self.currentIdx l = len(self.model['flds']) txt = getOnlyText(_("New position (1...%d):") % l, default=str(idx+1)) if not txt: return try: pos = int(txt) except ValueError: return if not 0 < pos <= l: return self.saveField() f = self.model['flds'][self.currentIdx] self.mw.progress.start() self.mm.moveField(self.model, f, pos-1) self.mw.progress.finish() self.fillFields() self.form.fieldList.setCurrentRow(pos-1) def onSortField(self): # don't allow user to disable; it makes no sense self.form.sortField.setChecked(True) self.model['sortf'] = self.form.fieldList.currentRow() def loadField(self, idx): self.currentIdx = idx fld = self.model['flds'][idx] f = self.form f.fontFamily.setCurrentFont(QFont(fld['font'])) f.fontSize.setValue(fld['size']) f.sticky.setChecked(fld['sticky']) f.sortField.setChecked(self.model['sortf'] == fld['ord']) f.rtl.setChecked(fld['rtl']) def saveField(self): # not initialized yet? if self.currentIdx is None: return idx = self.currentIdx fld = self.model['flds'][idx] f = self.form fld['font'] = f.fontFamily.currentFont().family() fld['size'] = f.fontSize.value() fld['sticky'] = f.sticky.isChecked() fld['rtl'] = f.rtl.isChecked() def reject(self): self.saveField() if self.oldSortField != self.model['sortf']: self.mw.progress.start() self.mw.col.updateFieldCache(self.mm.nids(self.model)) self.mw.progress.finish() self.mm.save(self.model) self.mw.reset() QDialog.reject(self) def accept(self): self.reject() def onHelp(self): openHelp("fields")
AnkiServer
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/fields.py
fields.py
import shutil from tempfile import mkdtemp, mktemp import os from zipfile import ZipFile from .excel.app import AnkiExcelSync class AnkiFormatEditor: def __init__(self): self.tempdir = mkdtemp() def convert(self, in_file, out_file=None, out_format=None): in_file_type = os.path.splitext(in_file)[1] if out_format is None: assert out_file is not None, "Either out_file or out_format must be specified." out_file_type = os.path.splitext(out_file)[1] else: if out_format[0] == '.': out_file_type = out_format else: out_file_type = '.' + out_format if out_file is not None: out_file_header = os.path.splitext(out_file)[0] else: out_file_header = os.path.splitext(in_file)[0] out_file = '{}{}'.format(out_file_header, out_file_type) assert in_file_type != out_file_type, 'File types must be different' conversion = (in_file_type, out_file_type) if conversion == ('.apkg', '.anki2'): self.unzip(in_file, out_file=out_file) elif conversion == ('.apkg', '.xlsx'): self.export_anki_sqlite(self.unzip(in_file, os.path.join(self.tempdir, mktemp())), out_file) elif conversion == ('.anki2', '.apkg'): self.zip(in_file, out_file) elif conversion == ('.anki2', '.xlsx'): self.export_anki_sqlite(in_file, out_file) elif conversion == ('.xlsx', '.anki2'): self.import_anki_sqlite(in_file, out_file, out_path='') elif conversion == ('.xlsx', '.apkg'): self.zip(self.import_anki_sqlite(in_file), out_file) else: raise Exception("Unsupported conversion.") def unzip(self, in_file, out_file): with ZipFile(in_file) as zf: zf.extract('collection.anki2', path=self.tempdir) shutil.move(os.path.join(self.tempdir, 'collection.anki2'), out_file) return out_file @staticmethod def zip(in_file, out_file): with ZipFile(out_file, 'w') as zf: zf.write(in_file, arcname='collection.anki2') zf.writestr('media', '{}') @staticmethod def export_anki_sqlite(in_file, out_file): with AnkiExcelSync(anki_database=in_file, excel_filename=out_file) as sync_portal: sync_portal.to_excel() def import_anki_sqlite(self, in_file, out_file=None, out_path=''): if out_file is None: out_file = os.path.join(self.tempdir, 'collection.anki2') with AnkiExcelSync(anki_database=out_file, excel_filename=in_file, read_only=True) as sync_portal: sync_portal.to_sqlite() return os.path.join(out_path, out_file) def anki_convert(in_file, out_file=None, out_format=None, out_path=None): AnkiFormatEditor().convert(in_file, out_file, out_format)
AnkiTools
/AnkiTools-0.3.7-py3-none-any.whl/ankitools/editor.py
editor.py
import openpyxl as px from openpyxl.utils import get_column_letter from collections import OrderedDict, namedtuple from time import time from datetime import datetime import json import logging from .api.ankidirect import AnkiDirect from .tools.defaults import DEFAULT_API_MODEL_DEFINITION DeckTuple = namedtuple('DeckTuple', ['deck_id', 'deck_name']) CardTuple = namedtuple('CardTuple', ['card_id', 'note_id', 'deck_name', 'template_order']) COLUMN_ID_MIN_WIDTH = len(str(int(time() * 1000))) + 3 COLUMN_TIMESTAMP_WIDTH = len(datetime.fromtimestamp(datetime.now().timestamp()).isoformat()) + 1 class AnkiExcelSync: SHEET_SETTINGS = '.settings' SHEET_DECKS = '.decks' def __init__(self, excel: str, anki_database: str): self.anki_direct = AnkiDirect(anki_database=anki_database) self.excel_filename = excel self.settings = { 'models': dict(), 'decks': dict() } try: self.wb = px.load_workbook(self.excel_filename) except FileNotFoundError: self.wb = self.create() def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.save() self.close() def close(self): self.wb.close() def save(self): self.wb.save(self.excel_filename) def to_excel(self): self.wb.save(self.excel_filename) def to_sqlite(self): self.anki_direct.add(self.to_json()) def to_json(self): payload = { 'data': dict(), 'definitions': dict() } sheet_names = self.wb.sheetnames try: sheet_names.remove(self.SHEET_SETTINGS) sheet_names.remove(self.SHEET_DECKS) except ValueError: pass for sheet_name in sheet_names: payload['data'][sheet_name] = list() row_iter = self.wb[sheet_name].iter_rows() header = list(self.get_cell_value_iter(next(row_iter))) for row in row_iter: record = OrderedDict(zip(header, self.get_cell_value_iter(row))) formatted_record = { 'data': record, 'decks': { 'Card 1': sheet_name } } payload['data'][sheet_name].append(formatted_record) # This will further be "string-formatted", so it needs to be deep-copied. # Currently implemented using a ReadOnlyJsonObject object. payload['definitions'][sheet_name] = DEFAULT_API_MODEL_DEFINITION.to_json_object() payload['definitions'][sheet_name]['templates'][0]['data']['qfmt'] = \ payload['definitions'][sheet_name]['templates'][0]['data']['qfmt'] % header[0] payload['definitions'][sheet_name]['templates'][0]['data']['afmt'] = \ payload['definitions'][sheet_name]['templates'][0]['data']['afmt'] % header[1] return payload def create(self): wb = px.Workbook() ws = wb.active ws.title = self.SHEET_SETTINGS ws.column_dimensions['B'].width = COLUMN_TIMESTAMP_WIDTH timestamp = datetime.fromtimestamp(datetime.now().timestamp()).isoformat() ws.append(['Created', timestamp]) ws.append(['Modified', timestamp]) # Getting sheet names models = self.anki_direct.models_dict model_id_to_name = dict() for model_id, model_dict in models.items(): sheet_name = model_dict['name'] logging.info('Creating sheet {}'.format(sheet_name)) # Writing header if sheet_name not in wb.sheetnames: header = ['id'] field_pairs = [(fld['ord'], fld['name']) for fld in model_dict['flds']] header.extend([x[1] for x in sorted(field_pairs)]) header.append('Tags') wb.create_sheet(sheet_name) ws = wb[sheet_name] ws.append(header) ws.column_dimensions['A'].width = COLUMN_ID_MIN_WIDTH for header_id in range(1, len(header)): width = len(header[header_id]) * 1.2 if width < 15: width = 15 ws.column_dimensions[get_column_letter(header_id + 1)].width = width model_id_to_name[model_id] = sheet_name self.settings['models'][sheet_name] = { 'id': model_id, 'templates': model_dict['tmpls'] } # Getting sheet contents notes_iter = self.anki_direct.notes for note in notes_iter: try: sheet_name = model_id_to_name[str(note['mid'])] except KeyError: continue # Writing record logging.info('Creating note {} - {}'.format(note['id'], json.dumps(note['formatted_flds'], ensure_ascii=False))) record = [note['id']] record.extend(note['formatted_flds']) record.append(note['tags']) wb[sheet_name].append(record) # Getting deck id and names decks_dict = self.anki_direct.decks_dict for deck_info in decks_dict.values(): self.settings['decks'][deck_info['name']] = deck_info # Getting card distribution wb.create_sheet(self.SHEET_DECKS, 1) ws = wb[self.SHEET_DECKS] ws.append(CardTuple._fields) ws.column_dimensions['A'].width = COLUMN_ID_MIN_WIDTH ws.column_dimensions['B'].width = COLUMN_ID_MIN_WIDTH for i in range(2, len(CardTuple._fields) + 1): ws.column_dimensions[get_column_letter(i)].width = 15 cards_iter = self.anki_direct.cards for card in cards_iter: record = CardTuple( card_id=card['id'], note_id=card['nid'], deck_name=decks_dict[str(card['did'])]['name'], template_order=card['ord'] ) ws.append(record) # Delete empty sheets sheet_names = wb.sheetnames sheet_names.remove(self.SHEET_SETTINGS) sheet_names.remove(self.SHEET_DECKS) for sheet_name in sheet_names: if wb[sheet_name].max_row <= 1: wb.remove(wb[sheet_name]) return wb @staticmethod def get_cell_value_iter(cell_iter): for cell in cell_iter: value = cell.value if not value: yield '' else: yield value
AnkiTools
/AnkiTools-0.3.7-py3-none-any.whl/ankitools/_excel.py
_excel.py
class AnkiContentVerify: def __init__(self, anki_content): self.anki_content = anki_content def missing_decks(self): deck_dirs = set() for deck in self.anki_content['decks'].values(): deck_dirs.add(tuple(deck['name'].split('::'))) new_deck_names = set() for deck_dir in deck_dirs: for i in range(1, len(deck_dir)): super_deck_dir = tuple(deck_dir[:i]) if super_deck_dir not in deck_dirs: new_deck_names.add('::'.join(super_deck_dir)) return new_deck_names def get_model_id(self, model_name): for model_id, model in self.anki_content['models'].items(): if model['name'] == model_name: return model_id return None def check_header(self, header, model_id): for header_item in header: if header_item not in (fld['name'] for fld in self.anki_content['models'][model_id]['flds']): return False return True def check_card_sides(self, card_sides, model_id): for card_side in card_sides: if card_side not in (tmpl['name'] for tmpl in self.anki_content['models'][model_id]['tmpls']): return False return True @staticmethod def check_qfmt_afmt(card_side_format, header): def has_field(qfmt_afmt): for header_item in header: if ("{{%s}}" % header_item) in qfmt_afmt: return True return False if not has_field(card_side_format['qfmt']): return False if not has_field(card_side_format['afmt']): return False return True def verify_add_info(self, add_info): missing_models_requirement = dict() for model_name, notes in add_info['data'].items(): model_id = self.get_model_id(model_name) if model_id is None: try: if model_name not in add_info['definitions'].keys(): return False except KeyError as e: print(e) return False missing_models_requirement[model_name] = { 'header': set(), 'card_sides': set() } if model_name not in add_info['definitions'].keys(): return False for note in notes: if model_id is not None: if not self.check_header(note['data'].keys(), model_id): return False if not self.check_card_sides(note['decks'].keys(), model_id): return False else: missing_models_requirement[model_name]['header'].update(note['data'].keys()) missing_models_requirement[model_name]['card_sides'].update(note['decks'].keys()) if len(missing_models_requirement) > 0: for model_name, model_template in add_info['definitions'].items(): for card_template in model_template['templates']: if not self.check_qfmt_afmt(card_template['data'], missing_models_requirement[model_name]['header']): return False if card_template['name'] not in missing_models_requirement[model_name]['card_sides']: return False return True
AnkiTools
/AnkiTools-0.3.7-py3-none-any.whl/ankitools/api/verify.py
verify.py
import sqlite3 from time import time import psutil import os from AnkiTools.tools.path import get_collection_path from AnkiTools.tools.create import AnkiContentCreator from AnkiTools.tools.write import write_anki_json, write_anki_table, write_anki_schema from AnkiTools.tools.read import read_anki_json, read_anki_table from .verify import AnkiContentVerify class AnkiDirect: def __init__(self, anki_database: str=None): if anki_database is None: anki_database = get_collection_path() try: assert 'Anki' not in (p.name() for p in psutil.process_iter()), \ "Please close Anki first before accessing Application Data collection.anki2 directly." except psutil.ZombieProcess as e: print(e) do_init = False if not os.path.exists(anki_database): do_init = True self.conn = sqlite3.connect(anki_database) if do_init: self.creator = AnkiContentCreator() write_anki_schema(self.conn) anki_collection = self.creator.new_collection() write_anki_table(self.conn, 'col', [anki_collection], do_commit=True) self._id_to_record = self.data else: self._id_to_record = self.data self.creator = AnkiContentCreator(self._id_to_record) self._name_to_id = self.name_to_id self.verify = AnkiContentVerify(self._id_to_record) def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.close() def close(self): self.conn.close() @property def data(self): data = { 'decks': self.decks_dict, 'models': self.models_dict, 'notes': dict(), 'cards': dict() } for record in self.notes: data['notes'][str(record['id'])] = record for record in self.cards: data['cards'][str(record['id'])] = record return data @property def name_to_id(self): name_to_id = { 'models': dict(), 'decks': dict() } for k, v in self.models_dict.items(): name_to_id['models'][v['name']] = k for k, v in self.decks_dict.items(): name_to_id['decks'][v['name']] = k return name_to_id def _get_model_id(self, model_name, model_header, model_definition, **kwargs): try: model_id = self._name_to_id['models'][model_name] except KeyError: anki_model = self.creator.new_model(model_name, model_header, model_definition, modified=kwargs.get('modified', None)) self._id_to_record['models'][str(anki_model['id'])] = anki_model write_anki_json(self.conn, 'models', [anki_model], do_commit=True) model_id = anki_model['id'] self._name_to_id['models'][model_name] = model_id return model_id def _get_card_ordering(self, model_id, note_side): note_sides = [template['name'] for template in self._id_to_record['models'][str(model_id)]['tmpls']] return note_sides.index(note_side) @property def models_dict(self): return read_anki_json(self.conn, 'models') @property def decks_dict(self): return read_anki_json(self.conn, 'decks') @property def notes(self): yield from read_anki_table(self.conn, 'notes') @property def cards(self): yield from read_anki_table(self.conn, 'cards') def add(self, data): if not self.verify.verify_add_info(data): return False modified = int(time()) for model_name, notes in data['data'].items(): model_id = self._get_model_id(model_name, notes[0]['data'].keys(), data.get('definitions', dict()).get(model_name, dict())) anki_notes = [] anki_cards = [] anki_decks = [] for note in notes: anki_note = self.creator.new_note(flds_list=list(note['data'].values()), model_id=model_id, modified=modified) self._id_to_record['notes'][str(anki_note['id'])] = anki_note anki_notes.append(anki_note) for note_side, deck_name in note['decks'].items(): try: deck_id = self._name_to_id['decks'][deck_name] except KeyError: anki_deck = self.creator.new_deck(deck_name) self._id_to_record['decks'][str(anki_deck['id'])] = anki_deck anki_decks.append(anki_deck) deck_id = anki_deck['id'] self._name_to_id['decks'][deck_name] = deck_id anki_card = self.creator.new_card(anki_note['id'], deck_id, self._get_card_ordering(model_id, note_side), modified=modified) self._id_to_record['cards'][str(anki_card['id'])] = anki_card anki_cards.append(anki_card) missing_deck_names = self.verify.missing_decks() for deck_name in missing_deck_names: anki_deck = self.creator.new_deck(deck_name) self._id_to_record['decks'][str(anki_deck['id'])] = anki_deck anki_decks.append(anki_deck) write_anki_table(self.conn, 'notes', anki_notes, do_commit=False) write_anki_table(self.conn, 'cards', anki_cards, do_commit=False) write_anki_json(self.conn, 'decks', anki_decks, do_commit=False) self.conn.commit() return True
AnkiTools
/AnkiTools-0.3.7-py3-none-any.whl/ankitools/api/ankidirect.py
ankidirect.py
import pyexcel_xlsx from tempfile import mkdtemp, mktemp import os from collections import OrderedDict, namedtuple from datetime import datetime import json import logging from AnkiTools.api.ankidirect import AnkiDirect from AnkiTools.tools.defaults import DEFAULT_API_MODEL_DEFINITION from .formatter import ExcelFormatter CardTuple = namedtuple('CardTuple', ['card_id', 'note_id', 'deck_name', 'template_order']) class AnkiExcelSync: SHEET_SETTINGS = '.settings' SHEET_DECKS = '.decks' def __init__(self, excel_filename: str, anki_database: str, read_only: bool=False): self.anki_direct = AnkiDirect(anki_database=anki_database) self.excel_filename = excel_filename self.read_only = read_only if os.path.exists(excel_filename): self.data = self.load_excel() else: self.data = self.new_data() self.load_anki_direct() def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.close() def new_data(self): timestamp = datetime.fromtimestamp(datetime.now().timestamp()).isoformat() data = { 'meta': { self.SHEET_SETTINGS: OrderedDict([ ('Created', timestamp), ('Modified', timestamp) ]), self.SHEET_DECKS: dict() }, 'data': OrderedDict() } return data def load_anki_direct(self): # Getting card distribution cards_iter = self.anki_direct.cards decks_dict = self.anki_direct.decks_dict for card in cards_iter: record = CardTuple( card_id=card['id'], note_id=card['nid'], deck_name=decks_dict[str(card['did'])]['name'], template_order=card['ord'] ) self.data['meta'][self.SHEET_DECKS][str(card['id'])] = record._asdict() # Getting sheet names models = self.anki_direct.models_dict for model_id, model_dict in models.items(): sheet_name = model_dict['name'] logging.info('Creating sheet {}'.format(sheet_name)) # Writing header header = ['id'] field_pairs = [(fld['ord'], fld['name']) for fld in model_dict['flds']] header.extend([x[1] for x in sorted(field_pairs)]) header.append('Tags') model_dict['header'] = header # Getting sheet contents notes_iter = self.anki_direct.notes for note in notes_iter: sheet_name = models[str(note['mid'])]['name'] header = models[str(note['mid'])]['header'] # Writing record logging.info('Creating note {} - {}'.format(note['id'], json.dumps(note['formatted_flds'], ensure_ascii=False))) record = [note['id']] record.extend(note['formatted_flds']) record.append(note['tags']) self.data['data'].setdefault(sheet_name, dict())[str(note['id'])] = OrderedDict(zip(header, record)) def load_excel(self): data = dict() raw = pyexcel_xlsx.get_data(self.excel_filename) data['meta'] = dict() try: data['meta'][self.SHEET_SETTINGS] = OrderedDict() for row in raw.pop(self.SHEET_SETTINGS): data['meta'][self.SHEET_SETTINGS][row[0]] = row[1] except (KeyError, IndexError) as e: print(e) try: data['meta'][self.SHEET_DECKS] = dict() header, *records = raw.pop(self.SHEET_DECKS) for record in records: data['meta'][self.SHEET_DECKS][record[0]] = OrderedDict(zip(header, record)) except (KeyError, IndexError) as e: print(e) data['data'] = OrderedDict() for note_type, values in raw.items(): header, *records = values data['data'][note_type] = dict() for record in records: data['data'][note_type][record[0]] = OrderedDict(zip(header, record)) return data def close(self): if not self.read_only: self.save() def save(self, formatted=False): if formatted: temp_filename = os.path.join(mkdtemp(), mktemp(suffix='.xlsx')) pyexcel_xlsx.save_data(temp_filename, self.excel_raw) formatter = ExcelFormatter(excel_data_file=temp_filename, excel_formatting_file=self.excel_filename, out_file=self.excel_filename) formatter.do_formatting() formatter.save() else: pyexcel_xlsx.save_data(self.excel_filename, self.excel_raw) @property def excel_raw(self): excel_raw = OrderedDict() # Create sheet self.SHEET_SETTINGS excel_raw[self.SHEET_SETTINGS] = list() for k, v in self.data['meta'][self.SHEET_SETTINGS].items(): assert isinstance(v, str) excel_raw[self.SHEET_SETTINGS].append([k, v]) excel_raw[self.SHEET_SETTINGS].append(['']) # Create sheet self.SHEET_DECKS excel_raw[self.SHEET_DECKS] = list() source = self.data['meta'][self.SHEET_DECKS] v = source[list(source.keys())[0]] assert isinstance(v, (dict, OrderedDict)) excel_raw[self.SHEET_DECKS].append(list(v.keys())) for v in source.values(): assert isinstance(v, (dict, OrderedDict)) excel_raw[self.SHEET_DECKS].append(list(v.values())) for note_type, v in self.data['data'].items(): # Check whether there is a item in this note_type. # If not, do not create the sheet. try: v2 = source[list(source.keys())[0]] except IndexError: continue # Create sheet excel_raw[note_type] = list() source = self.data['data'][note_type] assert isinstance(v2, (dict, OrderedDict)) excel_raw[note_type].append(list(v2.keys())) for v2 in source.values(): assert isinstance(v2, (dict, OrderedDict)) excel_raw[note_type].append(list(v2.values())) return excel_raw def to_excel(self): self.save() def to_sqlite(self): self.anki_direct.add(self.to_payload()) def to_payload(self): payload = { 'data': dict(), 'definitions': dict() } for sheet_name, records in self.data['data'].items(): if records: payload['data'][sheet_name] = list() # This will further be "string-formatted", so it needs to be deep-copied. # Currently implemented using a ReadOnlyJsonObject object. payload['definitions'][sheet_name] = DEFAULT_API_MODEL_DEFINITION.to_json_object() random_record = records[list(records.keys())[0]] assert isinstance(random_record, (dict, OrderedDict)) header = list(random_record.keys()) payload['definitions'][sheet_name]['templates'][0]['data']['qfmt'] = \ payload['definitions'][sheet_name]['templates'][0]['data']['qfmt'] % header[0] payload['definitions'][sheet_name]['templates'][0]['data']['afmt'] = \ payload['definitions'][sheet_name]['templates'][0]['data']['afmt'] % header[1] for record in records.values(): formatted_record = { 'data': record, 'decks': { 'Card 1': sheet_name } } payload['data'][sheet_name].append(formatted_record) return payload
AnkiTools
/AnkiTools-0.3.7-py3-none-any.whl/ankitools/excel/app.py
app.py
import json from json.decoder import JSONDecodeError from collections import OrderedDict from collections.abc import Mapping from AnkiTools.dir import module_path def _default(self, obj): return getattr(obj.__class__, "to_json_object", _default.default)(obj) _default.default = json.JSONEncoder().default json.JSONEncoder.default = _default class ReadOnlyJsonObject(Mapping): def __init__(self, data, dumps_kw: dict=None, loads_kw: dict=None): if dumps_kw is None: dumps_kw = dict() if loads_kw is None: self._loads_kw = dict(object_pairs_hook=OrderedDict) else: self._loads_kw = loads_kw if isinstance(data, str): self._json_string = data else: self._json_string = json.dumps(data, **dumps_kw) @property def _data(self): return json.loads(self._json_string, **self._loads_kw) def to_json_object(self): return self._data def __getitem__(self, key): try: return json.loads(self._data[key], **self._loads_kw) except (TypeError, JSONDecodeError): return self._data[key] def __len__(self): return len(self._data) def __iter__(self): return iter(self._data) def __str__(self): return json.dumps(self._data, indent=2) def __repr__(self): return self._json_string # Load auto-generated default values from Anki (collection.anki2) with open(module_path('defaults.json')) as f: defaults = json.load(f, object_pairs_hook=OrderedDict) DEFAULT_COLLECTION = ReadOnlyJsonObject(defaults['col']) DEFAULT_MODEL = ReadOnlyJsonObject(tuple(DEFAULT_COLLECTION['models'].values())[0]) DEFAULT_TEMPLATE = ReadOnlyJsonObject(DEFAULT_MODEL['tmpls'][0]) # Load author-defined default values with open(module_path('defaults_api.json')) as f: defaults = json.load(f, object_pairs_hook=OrderedDict) DEFAULT_API_MODEL_DEFINITION = ReadOnlyJsonObject(defaults['model_definition']) # # Load is_json settings # IS_JSON = OrderedDict() # with open(module_path('defaults_formatted.json')) as f: # defaults = json.load(f, object_pairs_hook=OrderedDict) # for table_name, table_dict in defaults.items(): # IS_JSON[table_name] = OrderedDict() # if table_dict is None: # continue # # for header_item, v in table_dict.items(): # try: # if v['is_json'] is True: # IS_JSON[table_name][header_item] = True # else: # IS_JSON[table_name][header_item] = False # except TypeError: # IS_JSON[table_name][header_item] = False # # IS_JSON = ReadOnlyJsonObject(IS_JSON) def get_constants(): constants = OrderedDict() for k, v in globals().items(): if k.isupper(): constants[k] = v return constants if __name__ == '__main__': pass # print(IS_JSON['col'])
AnkiTools
/AnkiTools-0.3.7-py3-none-any.whl/ankitools/tools/defaults.py
defaults.py
from time import time from collections import OrderedDict from bs4 import BeautifulSoup from hashlib import sha1 import json from .defaults import (DEFAULT_COLLECTION, DEFAULT_TEMPLATE, DEFAULT_MODEL, DEFAULT_API_MODEL_DEFINITION) from .guid import guid64 class AnkiContentCreator: def __init__(self, ids=None, formatted_defaults=True): """ :param dict ids: :param bool formatted_defaults: """ if not ids: ids = { 'models': dict(), 'decks': dict(), 'cards': dict(), 'notes': dict() } self.ids = dict() for k, v in ids.items(): self.ids[k] = set(ids[k].keys()) self.formatted_defaults = formatted_defaults def new_model(self, model_name, model_header, model_definition=None, modified=None, **kwargs): """ :param str model_name: :param list model_header: :param OrderedDict model_definition: :param int modified: :param kwargs: :return: """ if not model_definition: model_definition = DEFAULT_API_MODEL_DEFINITION if not modified: modified = int(time()) tmpls = kwargs.get('tmpls', [self.new_template(template['name'], i, formatting=template['data']) for i, template in enumerate(model_definition['templates'])]) css = kwargs.get('css', model_definition.get('css', None)) if css is None: css = DEFAULT_MODEL['css'] model_id = self._unique_id('models') model = dict([ ("vers", []), ("name", model_name), ("tags", []), ("did", None), ("usn", -1), ("req", [[0, "all", [0]]]), ("flds", [self.new_field(field_name, i, **kwargs.get('flds_kwargs', dict())) for i, field_name in enumerate(model_header)]), ("sortf", 0), ("latexPre", DEFAULT_MODEL['latexPre']), ("tmpls", tmpls), ("latexPost", DEFAULT_MODEL['latexPost']), ("type", 0), ("id", model_id), ("css", css), ("mod", modified) ]) for k, v in model.items(): if k in kwargs.keys(): model[k] = kwargs[k] return model @staticmethod def new_field(field_name: str, ordering: int, **kwargs): """ Fields have no unique ID. :param field_name: :param ordering: :param kwargs: :return: """ field = dict([ ('name', field_name), ('rtl', False), ('sticky', False), ('media', []), ('ord', ordering), ('font', 'Arial'), ('size', 12) ]) for k, v in field.items(): if k in kwargs.keys(): field[k] = kwargs[k] return field @staticmethod def new_template(template_name: str, ordering: int, formatting: dict=None, **kwargs): """ Templates have no unique ID. :param template_name: :param ordering: :param formatting: :param kwargs: :return: """ if formatting is not None: kwargs.update(formatting) template = dict([ ('name', template_name), ('qfmt', DEFAULT_TEMPLATE['qfmt']), ('did', None), ('bafmt', DEFAULT_TEMPLATE['bafmt']), ('afmt', DEFAULT_TEMPLATE['afmt']), ('ord', ordering), ('bqfmt', DEFAULT_TEMPLATE['bqfmt']) ]) for k, v in template.items(): if k in kwargs.keys(): template[k] = kwargs[k] return template def new_note(self, flds_list: iter, model_id: int, modified: int=None, tags_list: iter=None, **kwargs): if tags_list is None: tags_list = [] if modified is None: modified = int(time()) sfld = BeautifulSoup(flds_list[0], 'html.parser').text note = OrderedDict([ ('id', self._unique_id('notes')), ('guid', guid64()), ('mid', model_id), ('mod', modified), ('usn', -1), ('tags', ' '.join(tags_list)), ('flds', '\x1f'.join(flds_list)), ('sfld', sfld), ('csum', sha1(sfld.encode('utf8')).hexdigest()), ('flags', 0), ('data', '') ]) for k, v in note.items(): if k in kwargs.keys(): note[k] = kwargs[k] assert len(note) == 11, 'Invalid Anki Note format.' return note def new_card(self, note_id: int, deck_id: int, ordering: int, modified: int, **kwargs): card = OrderedDict([ ('id', self._unique_id('cards')), ('nid', note_id), ('did', deck_id), ('ord', ordering), ('mod', modified), ('usn', -1), ('type', 0), ('queue', 0), ('due', note_id), # Due is used differently for different card types: # new: note id or random int # due: integer day, relative to the collection's creation time # learning: integer timestamp ('ivl', 0), ('factor', 0), ('reps', 0), ('lapses', 0), ('left', 0), ('odue', 0), ('odid', 0), ('flags', 0), ('data', '') ]) for k, v in card.items(): if k in kwargs.keys(): card[k] = kwargs[k] assert len(card) == 18, 'Invalid Anki Card format.' return card def new_deck(self, deck_name, **kwargs): deck = dict([ ('desc', ''), ('name', deck_name), ('extendRev', 50), ('usn', 0), ('collapsed', False), ('newToday', [0, 0]), ('timeToday', [0, 0]), ('dyn', 0), ('extendNew', 10), ('conf', 1), ('revToday', [0, 0]), ('lrnToday', [0, 0]), ('id', self._unique_id('decks')), ('mod', int(time())) ]) for k, v in deck.items(): if k in kwargs.keys(): deck[k] = kwargs[k] return deck def new_collection(self, modified: int=None, models=None, decks=None, **kwargs): """ :param int modified: :param OrderedDict models: :param OrderedDict decks: :param kwargs: :return: """ if modified is None: modified = int(time() * 1000) if models is None: models = DEFAULT_COLLECTION['models'] if decks is None: decks = DEFAULT_COLLECTION['decks'] collection = OrderedDict([ ('id', 1), ('crt', int(time())), ('mod', modified), ('scm', int(time() * 1000)), ('ver', DEFAULT_COLLECTION['ver']), ('dty', 0), ('usn', 0), ('ls', 0), ('conf', json.dumps(DEFAULT_COLLECTION['conf'])), ('models', json.dumps(models)), ('decks', json.dumps(decks)), ('dconf', json.dumps(DEFAULT_COLLECTION['dconf'])), ('tags', json.dumps(DEFAULT_COLLECTION['tags'])) ]) for k, v in kwargs.items(): if k in collection.keys(): collection[k] = v return collection # @staticmethod # def stringify_for_sqlite(item_type, item): # for header_item, is_json in IS_JSON[item_type].items(): # if is_json: # item[header_item] = json.dumps(item[header_item], default=lambda obj: obj.__dict__) # # return item def _unique_id(self, item_type: str): item_id = int(time() * 1000) while item_id in self.ids[item_type]: item_id += 1 self.ids[item_type].add(item_id) return item_id
AnkiTools
/AnkiTools-0.3.7-py3-none-any.whl/ankitools/tools/create.py
create.py
import json def write_anki_table(conn, table_name, new_records, do_commit=True): """ :param sqlite3.Connection conn: :param 'notes'|'cards' table_name: :param iter of OrderedDict new_records: :param bool do_commit: :return: """ for new_record in new_records: conn.execute('INSERT INTO {} ({}) VALUES ({})' .format(table_name, ','.join(new_record.keys()), ','.join(['?' for _ in range(len(new_record))])), tuple(new_record.values())) if do_commit: conn.commit() def write_anki_json(conn, json_name, new_dicts, do_commit=True): """ :param sqlite3.Connection conn: :param 'models'|'decks' json_name: :param iter of dict new_dicts: :param bool do_commit: :return: """ cursor = conn.execute('SELECT {} FROM col'.format(json_name)) json_item = json.loads(cursor.fetchone()[0]) for new_dict in new_dicts: json_item[new_dict['id']] = new_dict conn.execute('UPDATE col SET {}=?'.format(json_name), (json.dumps(json_item),)) if do_commit: conn.commit() def write_anki_schema(conn): """ :param sqlite3.Connection conn: :return: """ conn.executescript(""" -- Cards are what you review. -- There can be multiple cards for each note, as determined by the Template. CREATE TABLE cards ( id integer primary key, -- the epoch milliseconds of when the card was created nid integer not null,-- -- notes.id did integer not null, -- deck id (available in col table) ord integer not null, -- ordinal : identifies which of the card templates it corresponds to -- valid values are from 0 to num templates - 1 mod integer not null, -- modificaton time as epoch seconds usn integer not null, -- update sequence number : used to figure out diffs when syncing. -- value of -1 indicates changes that need to be pushed to server. -- usn < server usn indicates changes that need to be pulled from server. type integer not null, -- 0=new, 1=learning, 2=due, 3=filtered queue integer not null, -- -3=sched buried, -2=user buried, -1=suspended, -- 0=new, 1=learning, 2=due (as for type) -- 3=in learning, next rev in at least a day after the previous review due integer not null, -- Due is used differently for different card types: -- new: note id or random int -- due: integer day, relative to the collection's creation time -- learning: integer timestamp ivl integer not null, -- interval (used in SRS algorithm). Negative = seconds, positive = days factor integer not null, -- factor (used in SRS algorithm) reps integer not null, -- number of reviews lapses integer not null, -- the number of times the card went from a "was answered correctly" -- to "was answered incorrectly" state left integer not null, -- reps left till graduation odue integer not null, -- original due: only used when the card is currently in filtered deck odid integer not null, -- original did: only used when the card is currently in filtered deck flags integer not null, -- currently unused data text not null -- currently unused ); -- col contains a single row that holds various information about the collection CREATE TABLE col ( id integer primary key, -- arbitrary number since there is only one row crt integer not null, -- created timestamp mod integer not null, -- last modified in milliseconds scm integer not null, -- schema mod time: time when "schema" was modified. -- If server scm is different from the client scm a full-sync is required ver integer not null, -- version dty integer not null, -- dirty: unused, set to 0 usn integer not null, -- update sequence number: used for finding diffs when syncing. -- See usn in cards table for more details. ls integer not null, -- "last sync time" conf text not null, -- json object containing configuration options that are synced models text not null, -- json array of json objects containing the models (aka Note types) decks text not null, -- json array of json objects containing the deck dconf text not null, -- json array of json objects containing the deck options tags text not null -- a cache of tags used in the collection (This list is displayed in the browser. Potentially at other place) ); -- Contains deleted cards, notes, and decks that need to be synced. -- usn should be set to -1, -- oid is the original id. -- type: 0 for a card, 1 for a note and 2 for a deck CREATE TABLE graves ( usn integer not null, oid integer not null, type integer not null ); -- Notes contain the raw information that is formatted into a number of cards -- according to the models CREATE TABLE notes ( id integer primary key, -- epoch seconds of when the note was created guid text not null, -- globally unique id, almost certainly used for syncing mid integer not null, -- model id mod integer not null, -- modification timestamp, epoch seconds usn integer not null, -- update sequence number: for finding diffs when syncing. -- See the description in the cards table for more info tags text not null, -- space-separated string of tags. -- includes space at the beginning and end, for LIKE "% tag %" queries flds text not null, -- the values of the fields in this note. separated by 0x1f (31) character. sfld text not null, -- sort field: used for quick sorting and duplicate check csum integer not null, -- field checksum used for duplicate check. -- integer representation of first 8 digits of sha1 hash of the first field flags integer not null, -- unused data text not null -- unused ); -- revlog is a review history; it has a row for every review you've ever done! CREATE TABLE revlog ( id integer primary key, -- epoch-milliseconds timestamp of when you did the review cid integer not null, -- cards.id usn integer not null, -- update sequence number: for finding diffs when syncing. -- See the description in the cards table for more info ease integer not null, -- which button you pushed to score your recall. -- review: 1(wrong), 2(hard), 3(ok), 4(easy) -- learn/relearn: 1(wrong), 2(ok), 3(easy) ivl integer not null, -- interval lastIvl integer not null, -- last interval factor integer not null, -- factor time integer not null, -- how many milliseconds your review took, up to 60000 (60s) type integer not null -- 0=learn, 1=review, 2=relearn, 3=cram ); CREATE INDEX ix_cards_nid on cards (nid); CREATE INDEX ix_cards_sched on cards (did, queue, due); CREATE INDEX ix_cards_usn on cards (usn); CREATE INDEX ix_notes_csum on notes (csum); CREATE INDEX ix_notes_usn on notes (usn); CREATE INDEX ix_revlog_cid on revlog (cid); CREATE INDEX ix_revlog_usn on revlog (usn); """) conn.commit()
AnkiTools
/AnkiTools-0.3.7-py3-none-any.whl/ankitools/tools/write.py
write.py
================== AnkiVim ================== |Build Status| |Health_| |Coverage_| |Pypi_| Overview ======== Use vim to rapidly write textfiles immediately importable into anki(1). Requirements ============ * Python >= 2.7 or Python >= 3.4 Installation ============ Simply run: .. code-block:: python pip install AnkiVim You can verify that this step succeeded and see available options by running: .. code-block:: python anki-vim -h Generating Anki Cards ===================== 1. Call: .. code-block:: python anki-vim DECKNAME 2. vim starts with a preformatted card. Start typing the front part of the anki card below "QUESTION", then fill out the back below "ANSWER". (*NOTE*: It is important to leave the QUESTION and ANSWER headers intact throughout) 3. save the file and exit the editor. (vim: ":wq") => the card contents are saved to: `$HOME/.ankivim/decks/DECKNAME/raw_cards.txt`. (alternatively, you can specify a custom location for your decks using the `--deckpath` option of `script/anki-vim.py`.) 4. editor starts right back up to generate a new card for the same deck => Iterate 2.-4. until all cards are generated. 5. to stop the card creation process, simply close your editor ( for vim: ":q" or ":q!", for additional information see `this book <https://www.amazon.com/How-Exit-Vim-Chris-Worfolk-ebook/dp/B01N5M1U6W>`_) Importing into Anki ===================== Procedure to import anki cards generated by this tool into anki: 1. Open Anki 2. Generate the deck to import into or click on an existing deck 3. Click import and navigate to "/path/to/anki-vim/decks/DECKNAME/raw_cards.txt" 4. check "allow html in fields" 5. Done Supports: ========= * Latex commands of any kind * html tags of any kind (images can be included this way as well) .. |Build Status| image:: https://travis-ci.org/MFreidank/AnkiVim.svg?branch=master :target: https://travis-ci.org/MFreidank/AnkiVim .. |Coverage_| image:: https://coveralls.io/repos/github/MFreidank/AnkiVim/badge.svg :target: https://coveralls.io/github/MFreidank/AnkiVim :alt: Coverage .. |Health_| image:: https://api.codacy.com/project/badge/Grade/d0d6624881c0415fb72999e355741e2b :target: https://www.codacy.com/app/MFreidank/AnkiVim?utm_source=github.com&amp;utm_medium=referral&amp;utm_content=MFreidank/AnkiVim&amp;utm_campaign=Badge_Grade :alt: Health .. |Pypi_| image:: https://badge.fury.io/py/AnkiVim.svg :target: https://badge.fury.io/py/AnkiVim
AnkiVim
/AnkiVim-1.5.3.tar.gz/AnkiVim-1.5.3/README.rst
README.rst
* select a different prefix for underscore */ $u = _.noConflict(); /** * make the code below compatible with browsers without * an installed firebug like debugger if (!window.console || !console.firebug) { var names = ["log", "debug", "info", "warn", "error", "assert", "dir", "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", "profile", "profileEnd"]; window.console = {}; for (var i = 0; i < names.length; ++i) window.console[names[i]] = function() {}; } */ /** * small helper function to urldecode strings */ jQuery.urldecode = function(x) { return decodeURIComponent(x).replace(/\+/g, ' '); }; /** * small helper function to urlencode strings */ jQuery.urlencode = encodeURIComponent; /** * This function returns the parsed url parameters of the * current request. Multiple values per key are supported, * it will always return arrays of strings for the value parts. */ jQuery.getQueryParameters = function(s) { if (typeof s == 'undefined') s = document.location.search; var parts = s.substr(s.indexOf('?') + 1).split('&'); var result = {}; for (var i = 0; i < parts.length; i++) { var tmp = parts[i].split('=', 2); var key = jQuery.urldecode(tmp[0]); var value = jQuery.urldecode(tmp[1]); if (key in result) result[key].push(value); else result[key] = [value]; } return result; }; /** * highlight a given string on a jquery object by wrapping it in * span elements with the given class name. */ jQuery.fn.highlightText = function(text, className) { function highlight(node) { if (node.nodeType == 3) { var val = node.nodeValue; var pos = val.toLowerCase().indexOf(text); if (pos >= 0 && !jQuery(node.parentNode).hasClass(className)) { var span = document.createElement("span"); span.className = className; span.appendChild(document.createTextNode(val.substr(pos, text.length))); node.parentNode.insertBefore(span, node.parentNode.insertBefore( document.createTextNode(val.substr(pos + text.length)), node.nextSibling)); node.nodeValue = val.substr(0, pos); } } else if (!jQuery(node).is("button, select, textarea")) { jQuery.each(node.childNodes, function() { highlight(this); }); } } return this.each(function() { highlight(this); }); }; /* * backward compatibility for jQuery.browser * This will be supported until firefox bug is fixed. */ if (!jQuery.browser) { jQuery.uaMatch = function(ua) { ua = ua.toLowerCase(); var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || /(webkit)[ \/]([\w.]+)/.exec(ua) || /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || /(msie) ([\w.]+)/.exec(ua) || ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || []; return { browser: match[ 1 ] || "", version: match[ 2 ] || "0" }; }; jQuery.browser = {}; jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; } /** * Small JavaScript module for the documentation. */ var Documentation = { init : function() { this.fixFirefoxAnchorBug(); this.highlightSearchWords(); this.initIndexTable(); }, /** * i18n support */ TRANSLATIONS : {}, PLURAL_EXPR : function(n) { return n == 1 ? 0 : 1; }, LOCALE : 'unknown', // gettext and ngettext don't access this so that the functions // can safely bound to a different name (_ = Documentation.gettext) gettext : function(string) { var translated = Documentation.TRANSLATIONS[string]; if (typeof translated == 'undefined') return string; return (typeof translated == 'string') ? translated : translated[0]; }, ngettext : function(singular, plural, n) { var translated = Documentation.TRANSLATIONS[singular]; if (typeof translated == 'undefined') return (n == 1) ? singular : plural; return translated[Documentation.PLURALEXPR(n)]; }, addTranslations : function(catalog) { for (var key in catalog.messages) this.TRANSLATIONS[key] = catalog.messages[key]; this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); this.LOCALE = catalog.locale; }, /** * add context elements like header anchor links */ addContextElements : function() { $('div[id] > :header:first').each(function() { $('<a class="headerlink">\u00B6</a>'). attr('href', '#' + this.id). attr('title', _('Permalink to this headline')). appendTo(this); }); $('dt[id]').each(function() { $('<a class="headerlink">\u00B6</a>'). attr('href', '#' + this.id). attr('title', _('Permalink to this definition')). appendTo(this); }); }, /** * workaround a firefox stupidity * see: https://bugzilla.mozilla.org/show_bug.cgi?id=645075 */ fixFirefoxAnchorBug : function() { if (document.location.hash) window.setTimeout(function() { document.location.href += ''; }, 10); }, /** * highlight the search words provided in the url in the text */ highlightSearchWords : function() { var params = $.getQueryParameters(); var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; if (terms.length) { var body = $('div.body'); if (!body.length) { body = $('body'); } window.setTimeout(function() { $.each(terms, function() { body.highlightText(this.toLowerCase(), 'highlighted'); }); }, 10); $('<p class="highlight-link"><a href="javascript:Documentation.' + 'hideSearchWords()">' + _('Hide Search Matches') + '</a></p>') .appendTo($('#searchbox')); } }, /** * init the domain index toggle buttons */ initIndexTable : function() { var togglers = $('img.toggler').click(function() { var src = $(this).attr('src'); var idnum = $(this).attr('id').substr(7); $('tr.cg-' + idnum).toggle(); if (src.substr(-9) == 'minus.png') $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); else $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); }).css('display', ''); if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { togglers.click(); } }, /** * helper function to hide the search marks again */ hideSearchWords : function() { $('#searchbox .highlight-link').fadeOut(300); $('span.highlighted').removeClass('highlighted'); }, /** * make the url absolute */ makeURL : function(relativeURL) { return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; }, /** * get the current relative url */ getCurrentURL : function() { var path = document.location.pathname; var parts = path.split(/\//); $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { if (this == '..') parts.pop(); }); var url = parts.join('/'); return path.substring(url.lastIndexOf('/') + 1, path.length - 1); }, initOnKeyListeners: function() { $(document).keyup(function(event) { var activeElementType = document.activeElement.tagName; // don't navigate when in search box or textarea if (activeElementType !== 'TEXTAREA' && activeElementType !== 'INPUT' && activeElementType !== 'SELECT') { switch (event.keyCode) { case 37: // left var prevHref = $('link[rel="prev"]').prop('href'); if (prevHref) { window.location.href = prevHref; return false; } case 39: // right var nextHref = $('link[rel="next"]').prop('href'); if (nextHref) { window.location.href = nextHref; return false; } } } }); } }; // quick alias for translations _ = Documentation.gettext; $(document).ready(function() { Documentation.init(); });
AnkiVim
/AnkiVim-1.5.3.tar.gz/AnkiVim-1.5.3/docs/build/html/_static/doctools.js
doctools.js
(function(){function q(a,c,d){if(a===c)return a!==0||1/a==1/c;if(a==null||c==null)return a===c;if(a._chain)a=a._wrapped;if(c._chain)c=c._wrapped;if(a.isEqual&&b.isFunction(a.isEqual))return a.isEqual(c);if(c.isEqual&&b.isFunction(c.isEqual))return c.isEqual(a);var e=l.call(a);if(e!=l.call(c))return false;switch(e){case "[object String]":return a==String(c);case "[object Number]":return a!=+a?c!=+c:a==0?1/a==1/c:a==+c;case "[object Date]":case "[object Boolean]":return+a==+c;case "[object RegExp]":return a.source== c.source&&a.global==c.global&&a.multiline==c.multiline&&a.ignoreCase==c.ignoreCase}if(typeof a!="object"||typeof c!="object")return false;for(var f=d.length;f--;)if(d[f]==a)return true;d.push(a);var f=0,g=true;if(e=="[object Array]"){if(f=a.length,g=f==c.length)for(;f--;)if(!(g=f in a==f in c&&q(a[f],c[f],d)))break}else{if("constructor"in a!="constructor"in c||a.constructor!=c.constructor)return false;for(var h in a)if(b.has(a,h)&&(f++,!(g=b.has(c,h)&&q(a[h],c[h],d))))break;if(g){for(h in c)if(b.has(c, h)&&!f--)break;g=!f}}d.pop();return g}var r=this,G=r._,n={},k=Array.prototype,o=Object.prototype,i=k.slice,H=k.unshift,l=o.toString,I=o.hasOwnProperty,w=k.forEach,x=k.map,y=k.reduce,z=k.reduceRight,A=k.filter,B=k.every,C=k.some,p=k.indexOf,D=k.lastIndexOf,o=Array.isArray,J=Object.keys,s=Function.prototype.bind,b=function(a){return new m(a)};if(typeof exports!=="undefined"){if(typeof module!=="undefined"&&module.exports)exports=module.exports=b;exports._=b}else r._=b;b.VERSION="1.3.1";var j=b.each= b.forEach=function(a,c,d){if(a!=null)if(w&&a.forEach===w)a.forEach(c,d);else if(a.length===+a.length)for(var e=0,f=a.length;e<f;e++){if(e in a&&c.call(d,a[e],e,a)===n)break}else for(e in a)if(b.has(a,e)&&c.call(d,a[e],e,a)===n)break};b.map=b.collect=function(a,c,b){var e=[];if(a==null)return e;if(x&&a.map===x)return a.map(c,b);j(a,function(a,g,h){e[e.length]=c.call(b,a,g,h)});if(a.length===+a.length)e.length=a.length;return e};b.reduce=b.foldl=b.inject=function(a,c,d,e){var f=arguments.length>2;a== null&&(a=[]);if(y&&a.reduce===y)return e&&(c=b.bind(c,e)),f?a.reduce(c,d):a.reduce(c);j(a,function(a,b,i){f?d=c.call(e,d,a,b,i):(d=a,f=true)});if(!f)throw new TypeError("Reduce of empty array with no initial value");return d};b.reduceRight=b.foldr=function(a,c,d,e){var f=arguments.length>2;a==null&&(a=[]);if(z&&a.reduceRight===z)return e&&(c=b.bind(c,e)),f?a.reduceRight(c,d):a.reduceRight(c);var g=b.toArray(a).reverse();e&&!f&&(c=b.bind(c,e));return f?b.reduce(g,c,d,e):b.reduce(g,c)};b.find=b.detect= function(a,c,b){var e;E(a,function(a,g,h){if(c.call(b,a,g,h))return e=a,true});return e};b.filter=b.select=function(a,c,b){var e=[];if(a==null)return e;if(A&&a.filter===A)return a.filter(c,b);j(a,function(a,g,h){c.call(b,a,g,h)&&(e[e.length]=a)});return e};b.reject=function(a,c,b){var e=[];if(a==null)return e;j(a,function(a,g,h){c.call(b,a,g,h)||(e[e.length]=a)});return e};b.every=b.all=function(a,c,b){var e=true;if(a==null)return e;if(B&&a.every===B)return a.every(c,b);j(a,function(a,g,h){if(!(e= e&&c.call(b,a,g,h)))return n});return e};var E=b.some=b.any=function(a,c,d){c||(c=b.identity);var e=false;if(a==null)return e;if(C&&a.some===C)return a.some(c,d);j(a,function(a,b,h){if(e||(e=c.call(d,a,b,h)))return n});return!!e};b.include=b.contains=function(a,c){var b=false;if(a==null)return b;return p&&a.indexOf===p?a.indexOf(c)!=-1:b=E(a,function(a){return a===c})};b.invoke=function(a,c){var d=i.call(arguments,2);return b.map(a,function(a){return(b.isFunction(c)?c||a:a[c]).apply(a,d)})};b.pluck= function(a,c){return b.map(a,function(a){return a[c]})};b.max=function(a,c,d){if(!c&&b.isArray(a))return Math.max.apply(Math,a);if(!c&&b.isEmpty(a))return-Infinity;var e={computed:-Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;b>=e.computed&&(e={value:a,computed:b})});return e.value};b.min=function(a,c,d){if(!c&&b.isArray(a))return Math.min.apply(Math,a);if(!c&&b.isEmpty(a))return Infinity;var e={computed:Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;b<e.computed&&(e={value:a,computed:b})}); return e.value};b.shuffle=function(a){var b=[],d;j(a,function(a,f){f==0?b[0]=a:(d=Math.floor(Math.random()*(f+1)),b[f]=b[d],b[d]=a)});return b};b.sortBy=function(a,c,d){return b.pluck(b.map(a,function(a,b,g){return{value:a,criteria:c.call(d,a,b,g)}}).sort(function(a,b){var c=a.criteria,d=b.criteria;return c<d?-1:c>d?1:0}),"value")};b.groupBy=function(a,c){var d={},e=b.isFunction(c)?c:function(a){return a[c]};j(a,function(a,b){var c=e(a,b);(d[c]||(d[c]=[])).push(a)});return d};b.sortedIndex=function(a, c,d){d||(d=b.identity);for(var e=0,f=a.length;e<f;){var g=e+f>>1;d(a[g])<d(c)?e=g+1:f=g}return e};b.toArray=function(a){return!a?[]:a.toArray?a.toArray():b.isArray(a)?i.call(a):b.isArguments(a)?i.call(a):b.values(a)};b.size=function(a){return b.toArray(a).length};b.first=b.head=function(a,b,d){return b!=null&&!d?i.call(a,0,b):a[0]};b.initial=function(a,b,d){return i.call(a,0,a.length-(b==null||d?1:b))};b.last=function(a,b,d){return b!=null&&!d?i.call(a,Math.max(a.length-b,0)):a[a.length-1]};b.rest= b.tail=function(a,b,d){return i.call(a,b==null||d?1:b)};b.compact=function(a){return b.filter(a,function(a){return!!a})};b.flatten=function(a,c){return b.reduce(a,function(a,e){if(b.isArray(e))return a.concat(c?e:b.flatten(e));a[a.length]=e;return a},[])};b.without=function(a){return b.difference(a,i.call(arguments,1))};b.uniq=b.unique=function(a,c,d){var d=d?b.map(a,d):a,e=[];b.reduce(d,function(d,g,h){if(0==h||(c===true?b.last(d)!=g:!b.include(d,g)))d[d.length]=g,e[e.length]=a[h];return d},[]); return e};b.union=function(){return b.uniq(b.flatten(arguments,true))};b.intersection=b.intersect=function(a){var c=i.call(arguments,1);return b.filter(b.uniq(a),function(a){return b.every(c,function(c){return b.indexOf(c,a)>=0})})};b.difference=function(a){var c=b.flatten(i.call(arguments,1));return b.filter(a,function(a){return!b.include(c,a)})};b.zip=function(){for(var a=i.call(arguments),c=b.max(b.pluck(a,"length")),d=Array(c),e=0;e<c;e++)d[e]=b.pluck(a,""+e);return d};b.indexOf=function(a,c, d){if(a==null)return-1;var e;if(d)return d=b.sortedIndex(a,c),a[d]===c?d:-1;if(p&&a.indexOf===p)return a.indexOf(c);for(d=0,e=a.length;d<e;d++)if(d in a&&a[d]===c)return d;return-1};b.lastIndexOf=function(a,b){if(a==null)return-1;if(D&&a.lastIndexOf===D)return a.lastIndexOf(b);for(var d=a.length;d--;)if(d in a&&a[d]===b)return d;return-1};b.range=function(a,b,d){arguments.length<=1&&(b=a||0,a=0);for(var d=arguments[2]||1,e=Math.max(Math.ceil((b-a)/d),0),f=0,g=Array(e);f<e;)g[f++]=a,a+=d;return g}; var F=function(){};b.bind=function(a,c){var d,e;if(a.bind===s&&s)return s.apply(a,i.call(arguments,1));if(!b.isFunction(a))throw new TypeError;e=i.call(arguments,2);return d=function(){if(!(this instanceof d))return a.apply(c,e.concat(i.call(arguments)));F.prototype=a.prototype;var b=new F,g=a.apply(b,e.concat(i.call(arguments)));return Object(g)===g?g:b}};b.bindAll=function(a){var c=i.call(arguments,1);c.length==0&&(c=b.functions(a));j(c,function(c){a[c]=b.bind(a[c],a)});return a};b.memoize=function(a, c){var d={};c||(c=b.identity);return function(){var e=c.apply(this,arguments);return b.has(d,e)?d[e]:d[e]=a.apply(this,arguments)}};b.delay=function(a,b){var d=i.call(arguments,2);return setTimeout(function(){return a.apply(a,d)},b)};b.defer=function(a){return b.delay.apply(b,[a,1].concat(i.call(arguments,1)))};b.throttle=function(a,c){var d,e,f,g,h,i=b.debounce(function(){h=g=false},c);return function(){d=this;e=arguments;var b;f||(f=setTimeout(function(){f=null;h&&a.apply(d,e);i()},c));g?h=true: a.apply(d,e);i();g=true}};b.debounce=function(a,b){var d;return function(){var e=this,f=arguments;clearTimeout(d);d=setTimeout(function(){d=null;a.apply(e,f)},b)}};b.once=function(a){var b=false,d;return function(){if(b)return d;b=true;return d=a.apply(this,arguments)}};b.wrap=function(a,b){return function(){var d=[a].concat(i.call(arguments,0));return b.apply(this,d)}};b.compose=function(){var a=arguments;return function(){for(var b=arguments,d=a.length-1;d>=0;d--)b=[a[d].apply(this,b)];return b[0]}}; b.after=function(a,b){return a<=0?b():function(){if(--a<1)return b.apply(this,arguments)}};b.keys=J||function(a){if(a!==Object(a))throw new TypeError("Invalid object");var c=[],d;for(d in a)b.has(a,d)&&(c[c.length]=d);return c};b.values=function(a){return b.map(a,b.identity)};b.functions=b.methods=function(a){var c=[],d;for(d in a)b.isFunction(a[d])&&c.push(d);return c.sort()};b.extend=function(a){j(i.call(arguments,1),function(b){for(var d in b)a[d]=b[d]});return a};b.defaults=function(a){j(i.call(arguments, 1),function(b){for(var d in b)a[d]==null&&(a[d]=b[d])});return a};b.clone=function(a){return!b.isObject(a)?a:b.isArray(a)?a.slice():b.extend({},a)};b.tap=function(a,b){b(a);return a};b.isEqual=function(a,b){return q(a,b,[])};b.isEmpty=function(a){if(b.isArray(a)||b.isString(a))return a.length===0;for(var c in a)if(b.has(a,c))return false;return true};b.isElement=function(a){return!!(a&&a.nodeType==1)};b.isArray=o||function(a){return l.call(a)=="[object Array]"};b.isObject=function(a){return a===Object(a)}; b.isArguments=function(a){return l.call(a)=="[object Arguments]"};if(!b.isArguments(arguments))b.isArguments=function(a){return!(!a||!b.has(a,"callee"))};b.isFunction=function(a){return l.call(a)=="[object Function]"};b.isString=function(a){return l.call(a)=="[object String]"};b.isNumber=function(a){return l.call(a)=="[object Number]"};b.isNaN=function(a){return a!==a};b.isBoolean=function(a){return a===true||a===false||l.call(a)=="[object Boolean]"};b.isDate=function(a){return l.call(a)=="[object Date]"}; b.isRegExp=function(a){return l.call(a)=="[object RegExp]"};b.isNull=function(a){return a===null};b.isUndefined=function(a){return a===void 0};b.has=function(a,b){return I.call(a,b)};b.noConflict=function(){r._=G;return this};b.identity=function(a){return a};b.times=function(a,b,d){for(var e=0;e<a;e++)b.call(d,e)};b.escape=function(a){return(""+a).replace(/&/g,"&amp;").replace(/</g,"&lt;").replace(/>/g,"&gt;").replace(/"/g,"&quot;").replace(/'/g,"&#x27;").replace(/\//g,"&#x2F;")};b.mixin=function(a){j(b.functions(a), function(c){K(c,b[c]=a[c])})};var L=0;b.uniqueId=function(a){var b=L++;return a?a+b:b};b.templateSettings={evaluate:/<%([\s\S]+?)%>/g,interpolate:/<%=([\s\S]+?)%>/g,escape:/<%-([\s\S]+?)%>/g};var t=/.^/,u=function(a){return a.replace(/\\\\/g,"\\").replace(/\\'/g,"'")};b.template=function(a,c){var d=b.templateSettings,d="var __p=[],print=function(){__p.push.apply(__p,arguments);};with(obj||{}){__p.push('"+a.replace(/\\/g,"\\\\").replace(/'/g,"\\'").replace(d.escape||t,function(a,b){return"',_.escape("+ u(b)+"),'"}).replace(d.interpolate||t,function(a,b){return"',"+u(b)+",'"}).replace(d.evaluate||t,function(a,b){return"');"+u(b).replace(/[\r\n\t]/g," ")+";__p.push('"}).replace(/\r/g,"\\r").replace(/\n/g,"\\n").replace(/\t/g,"\\t")+"');}return __p.join('');",e=new Function("obj","_",d);return c?e(c,b):function(a){return e.call(this,a,b)}};b.chain=function(a){return b(a).chain()};var m=function(a){this._wrapped=a};b.prototype=m.prototype;var v=function(a,c){return c?b(a).chain():a},K=function(a,c){m.prototype[a]= function(){var a=i.call(arguments);H.call(a,this._wrapped);return v(c.apply(b,a),this._chain)}};b.mixin(b);j("pop,push,reverse,shift,sort,splice,unshift".split(","),function(a){var b=k[a];m.prototype[a]=function(){var d=this._wrapped;b.apply(d,arguments);var e=d.length;(a=="shift"||a=="splice")&&e===0&&delete d[0];return v(d,this._chain)}});j(["concat","join","slice"],function(a){var b=k[a];m.prototype[a]=function(){return v(b.apply(this._wrapped,arguments),this._chain)}});m.prototype.chain=function(){this._chain= true;return this};m.prototype.value=function(){return this._wrapped}}).call(this);
AnkiVim
/AnkiVim-1.5.3.tar.gz/AnkiVim-1.5.3/docs/build/html/_static/underscore.js
underscore.js
(function() { // Baseline setup // -------------- // Establish the root object, `window` in the browser, or `global` on the server. var root = this; // Save the previous value of the `_` variable. var previousUnderscore = root._; // Establish the object that gets returned to break out of a loop iteration. var breaker = {}; // Save bytes in the minified (but not gzipped) version: var ArrayProto = Array.prototype, ObjProto = Object.prototype, FuncProto = Function.prototype; // Create quick reference variables for speed access to core prototypes. var slice = ArrayProto.slice, unshift = ArrayProto.unshift, toString = ObjProto.toString, hasOwnProperty = ObjProto.hasOwnProperty; // All **ECMAScript 5** native function implementations that we hope to use // are declared here. var nativeForEach = ArrayProto.forEach, nativeMap = ArrayProto.map, nativeReduce = ArrayProto.reduce, nativeReduceRight = ArrayProto.reduceRight, nativeFilter = ArrayProto.filter, nativeEvery = ArrayProto.every, nativeSome = ArrayProto.some, nativeIndexOf = ArrayProto.indexOf, nativeLastIndexOf = ArrayProto.lastIndexOf, nativeIsArray = Array.isArray, nativeKeys = Object.keys, nativeBind = FuncProto.bind; // Create a safe reference to the Underscore object for use below. var _ = function(obj) { return new wrapper(obj); }; // Export the Underscore object for **Node.js**, with // backwards-compatibility for the old `require()` API. If we're in // the browser, add `_` as a global object via a string identifier, // for Closure Compiler "advanced" mode. if (typeof exports !== 'undefined') { if (typeof module !== 'undefined' && module.exports) { exports = module.exports = _; } exports._ = _; } else { root['_'] = _; } // Current version. _.VERSION = '1.3.1'; // Collection Functions // -------------------- // The cornerstone, an `each` implementation, aka `forEach`. // Handles objects with the built-in `forEach`, arrays, and raw objects. // Delegates to **ECMAScript 5**'s native `forEach` if available. var each = _.each = _.forEach = function(obj, iterator, context) { if (obj == null) return; if (nativeForEach && obj.forEach === nativeForEach) { obj.forEach(iterator, context); } else if (obj.length === +obj.length) { for (var i = 0, l = obj.length; i < l; i++) { if (i in obj && iterator.call(context, obj[i], i, obj) === breaker) return; } } else { for (var key in obj) { if (_.has(obj, key)) { if (iterator.call(context, obj[key], key, obj) === breaker) return; } } } }; // Return the results of applying the iterator to each element. // Delegates to **ECMAScript 5**'s native `map` if available. _.map = _.collect = function(obj, iterator, context) { var results = []; if (obj == null) return results; if (nativeMap && obj.map === nativeMap) return obj.map(iterator, context); each(obj, function(value, index, list) { results[results.length] = iterator.call(context, value, index, list); }); if (obj.length === +obj.length) results.length = obj.length; return results; }; // **Reduce** builds up a single result from a list of values, aka `inject`, // or `foldl`. Delegates to **ECMAScript 5**'s native `reduce` if available. _.reduce = _.foldl = _.inject = function(obj, iterator, memo, context) { var initial = arguments.length > 2; if (obj == null) obj = []; if (nativeReduce && obj.reduce === nativeReduce) { if (context) iterator = _.bind(iterator, context); return initial ? obj.reduce(iterator, memo) : obj.reduce(iterator); } each(obj, function(value, index, list) { if (!initial) { memo = value; initial = true; } else { memo = iterator.call(context, memo, value, index, list); } }); if (!initial) throw new TypeError('Reduce of empty array with no initial value'); return memo; }; // The right-associative version of reduce, also known as `foldr`. // Delegates to **ECMAScript 5**'s native `reduceRight` if available. _.reduceRight = _.foldr = function(obj, iterator, memo, context) { var initial = arguments.length > 2; if (obj == null) obj = []; if (nativeReduceRight && obj.reduceRight === nativeReduceRight) { if (context) iterator = _.bind(iterator, context); return initial ? obj.reduceRight(iterator, memo) : obj.reduceRight(iterator); } var reversed = _.toArray(obj).reverse(); if (context && !initial) iterator = _.bind(iterator, context); return initial ? _.reduce(reversed, iterator, memo, context) : _.reduce(reversed, iterator); }; // Return the first value which passes a truth test. Aliased as `detect`. _.find = _.detect = function(obj, iterator, context) { var result; any(obj, function(value, index, list) { if (iterator.call(context, value, index, list)) { result = value; return true; } }); return result; }; // Return all the elements that pass a truth test. // Delegates to **ECMAScript 5**'s native `filter` if available. // Aliased as `select`. _.filter = _.select = function(obj, iterator, context) { var results = []; if (obj == null) return results; if (nativeFilter && obj.filter === nativeFilter) return obj.filter(iterator, context); each(obj, function(value, index, list) { if (iterator.call(context, value, index, list)) results[results.length] = value; }); return results; }; // Return all the elements for which a truth test fails. _.reject = function(obj, iterator, context) { var results = []; if (obj == null) return results; each(obj, function(value, index, list) { if (!iterator.call(context, value, index, list)) results[results.length] = value; }); return results; }; // Determine whether all of the elements match a truth test. // Delegates to **ECMAScript 5**'s native `every` if available. // Aliased as `all`. _.every = _.all = function(obj, iterator, context) { var result = true; if (obj == null) return result; if (nativeEvery && obj.every === nativeEvery) return obj.every(iterator, context); each(obj, function(value, index, list) { if (!(result = result && iterator.call(context, value, index, list))) return breaker; }); return result; }; // Determine if at least one element in the object matches a truth test. // Delegates to **ECMAScript 5**'s native `some` if available. // Aliased as `any`. var any = _.some = _.any = function(obj, iterator, context) { iterator || (iterator = _.identity); var result = false; if (obj == null) return result; if (nativeSome && obj.some === nativeSome) return obj.some(iterator, context); each(obj, function(value, index, list) { if (result || (result = iterator.call(context, value, index, list))) return breaker; }); return !!result; }; // Determine if a given value is included in the array or object using `===`. // Aliased as `contains`. _.include = _.contains = function(obj, target) { var found = false; if (obj == null) return found; if (nativeIndexOf && obj.indexOf === nativeIndexOf) return obj.indexOf(target) != -1; found = any(obj, function(value) { return value === target; }); return found; }; // Invoke a method (with arguments) on every item in a collection. _.invoke = function(obj, method) { var args = slice.call(arguments, 2); return _.map(obj, function(value) { return (_.isFunction(method) ? method || value : value[method]).apply(value, args); }); }; // Convenience version of a common use case of `map`: fetching a property. _.pluck = function(obj, key) { return _.map(obj, function(value){ return value[key]; }); }; // Return the maximum element or (element-based computation). _.max = function(obj, iterator, context) { if (!iterator && _.isArray(obj)) return Math.max.apply(Math, obj); if (!iterator && _.isEmpty(obj)) return -Infinity; var result = {computed : -Infinity}; each(obj, function(value, index, list) { var computed = iterator ? iterator.call(context, value, index, list) : value; computed >= result.computed && (result = {value : value, computed : computed}); }); return result.value; }; // Return the minimum element (or element-based computation). _.min = function(obj, iterator, context) { if (!iterator && _.isArray(obj)) return Math.min.apply(Math, obj); if (!iterator && _.isEmpty(obj)) return Infinity; var result = {computed : Infinity}; each(obj, function(value, index, list) { var computed = iterator ? iterator.call(context, value, index, list) : value; computed < result.computed && (result = {value : value, computed : computed}); }); return result.value; }; // Shuffle an array. _.shuffle = function(obj) { var shuffled = [], rand; each(obj, function(value, index, list) { if (index == 0) { shuffled[0] = value; } else { rand = Math.floor(Math.random() * (index + 1)); shuffled[index] = shuffled[rand]; shuffled[rand] = value; } }); return shuffled; }; // Sort the object's values by a criterion produced by an iterator. _.sortBy = function(obj, iterator, context) { return _.pluck(_.map(obj, function(value, index, list) { return { value : value, criteria : iterator.call(context, value, index, list) }; }).sort(function(left, right) { var a = left.criteria, b = right.criteria; return a < b ? -1 : a > b ? 1 : 0; }), 'value'); }; // Groups the object's values by a criterion. Pass either a string attribute // to group by, or a function that returns the criterion. _.groupBy = function(obj, val) { var result = {}; var iterator = _.isFunction(val) ? val : function(obj) { return obj[val]; }; each(obj, function(value, index) { var key = iterator(value, index); (result[key] || (result[key] = [])).push(value); }); return result; }; // Use a comparator function to figure out at what index an object should // be inserted so as to maintain order. Uses binary search. _.sortedIndex = function(array, obj, iterator) { iterator || (iterator = _.identity); var low = 0, high = array.length; while (low < high) { var mid = (low + high) >> 1; iterator(array[mid]) < iterator(obj) ? low = mid + 1 : high = mid; } return low; }; // Safely convert anything iterable into a real, live array. _.toArray = function(iterable) { if (!iterable) return []; if (iterable.toArray) return iterable.toArray(); if (_.isArray(iterable)) return slice.call(iterable); if (_.isArguments(iterable)) return slice.call(iterable); return _.values(iterable); }; // Return the number of elements in an object. _.size = function(obj) { return _.toArray(obj).length; }; // Array Functions // --------------- // Get the first element of an array. Passing **n** will return the first N // values in the array. Aliased as `head`. The **guard** check allows it to work // with `_.map`. _.first = _.head = function(array, n, guard) { return (n != null) && !guard ? slice.call(array, 0, n) : array[0]; }; // Returns everything but the last entry of the array. Especcialy useful on // the arguments object. Passing **n** will return all the values in // the array, excluding the last N. The **guard** check allows it to work with // `_.map`. _.initial = function(array, n, guard) { return slice.call(array, 0, array.length - ((n == null) || guard ? 1 : n)); }; // Get the last element of an array. Passing **n** will return the last N // values in the array. The **guard** check allows it to work with `_.map`. _.last = function(array, n, guard) { if ((n != null) && !guard) { return slice.call(array, Math.max(array.length - n, 0)); } else { return array[array.length - 1]; } }; // Returns everything but the first entry of the array. Aliased as `tail`. // Especially useful on the arguments object. Passing an **index** will return // the rest of the values in the array from that index onward. The **guard** // check allows it to work with `_.map`. _.rest = _.tail = function(array, index, guard) { return slice.call(array, (index == null) || guard ? 1 : index); }; // Trim out all falsy values from an array. _.compact = function(array) { return _.filter(array, function(value){ return !!value; }); }; // Return a completely flattened version of an array. _.flatten = function(array, shallow) { return _.reduce(array, function(memo, value) { if (_.isArray(value)) return memo.concat(shallow ? value : _.flatten(value)); memo[memo.length] = value; return memo; }, []); }; // Return a version of the array that does not contain the specified value(s). _.without = function(array) { return _.difference(array, slice.call(arguments, 1)); }; // Produce a duplicate-free version of the array. If the array has already // been sorted, you have the option of using a faster algorithm. // Aliased as `unique`. _.uniq = _.unique = function(array, isSorted, iterator) { var initial = iterator ? _.map(array, iterator) : array; var result = []; _.reduce(initial, function(memo, el, i) { if (0 == i || (isSorted === true ? _.last(memo) != el : !_.include(memo, el))) { memo[memo.length] = el; result[result.length] = array[i]; } return memo; }, []); return result; }; // Produce an array that contains the union: each distinct element from all of // the passed-in arrays. _.union = function() { return _.uniq(_.flatten(arguments, true)); }; // Produce an array that contains every item shared between all the // passed-in arrays. (Aliased as "intersect" for back-compat.) _.intersection = _.intersect = function(array) { var rest = slice.call(arguments, 1); return _.filter(_.uniq(array), function(item) { return _.every(rest, function(other) { return _.indexOf(other, item) >= 0; }); }); }; // Take the difference between one array and a number of other arrays. // Only the elements present in just the first array will remain. _.difference = function(array) { var rest = _.flatten(slice.call(arguments, 1)); return _.filter(array, function(value){ return !_.include(rest, value); }); }; // Zip together multiple lists into a single array -- elements that share // an index go together. _.zip = function() { var args = slice.call(arguments); var length = _.max(_.pluck(args, 'length')); var results = new Array(length); for (var i = 0; i < length; i++) results[i] = _.pluck(args, "" + i); return results; }; // If the browser doesn't supply us with indexOf (I'm looking at you, **MSIE**), // we need this function. Return the position of the first occurrence of an // item in an array, or -1 if the item is not included in the array. // Delegates to **ECMAScript 5**'s native `indexOf` if available. // If the array is large and already in sort order, pass `true` // for **isSorted** to use binary search. _.indexOf = function(array, item, isSorted) { if (array == null) return -1; var i, l; if (isSorted) { i = _.sortedIndex(array, item); return array[i] === item ? i : -1; } if (nativeIndexOf && array.indexOf === nativeIndexOf) return array.indexOf(item); for (i = 0, l = array.length; i < l; i++) if (i in array && array[i] === item) return i; return -1; }; // Delegates to **ECMAScript 5**'s native `lastIndexOf` if available. _.lastIndexOf = function(array, item) { if (array == null) return -1; if (nativeLastIndexOf && array.lastIndexOf === nativeLastIndexOf) return array.lastIndexOf(item); var i = array.length; while (i--) if (i in array && array[i] === item) return i; return -1; }; // Generate an integer Array containing an arithmetic progression. A port of // the native Python `range()` function. See // [the Python documentation](http://docs.python.org/library/functions.html#range). _.range = function(start, stop, step) { if (arguments.length <= 1) { stop = start || 0; start = 0; } step = arguments[2] || 1; var len = Math.max(Math.ceil((stop - start) / step), 0); var idx = 0; var range = new Array(len); while(idx < len) { range[idx++] = start; start += step; } return range; }; // Function (ahem) Functions // ------------------ // Reusable constructor function for prototype setting. var ctor = function(){}; // Create a function bound to a given object (assigning `this`, and arguments, // optionally). Binding with arguments is also known as `curry`. // Delegates to **ECMAScript 5**'s native `Function.bind` if available. // We check for `func.bind` first, to fail fast when `func` is undefined. _.bind = function bind(func, context) { var bound, args; if (func.bind === nativeBind && nativeBind) return nativeBind.apply(func, slice.call(arguments, 1)); if (!_.isFunction(func)) throw new TypeError; args = slice.call(arguments, 2); return bound = function() { if (!(this instanceof bound)) return func.apply(context, args.concat(slice.call(arguments))); ctor.prototype = func.prototype; var self = new ctor; var result = func.apply(self, args.concat(slice.call(arguments))); if (Object(result) === result) return result; return self; }; }; // Bind all of an object's methods to that object. Useful for ensuring that // all callbacks defined on an object belong to it. _.bindAll = function(obj) { var funcs = slice.call(arguments, 1); if (funcs.length == 0) funcs = _.functions(obj); each(funcs, function(f) { obj[f] = _.bind(obj[f], obj); }); return obj; }; // Memoize an expensive function by storing its results. _.memoize = function(func, hasher) { var memo = {}; hasher || (hasher = _.identity); return function() { var key = hasher.apply(this, arguments); return _.has(memo, key) ? memo[key] : (memo[key] = func.apply(this, arguments)); }; }; // Delays a function for the given number of milliseconds, and then calls // it with the arguments supplied. _.delay = function(func, wait) { var args = slice.call(arguments, 2); return setTimeout(function(){ return func.apply(func, args); }, wait); }; // Defers a function, scheduling it to run after the current call stack has // cleared. _.defer = function(func) { return _.delay.apply(_, [func, 1].concat(slice.call(arguments, 1))); }; // Returns a function, that, when invoked, will only be triggered at most once // during a given window of time. _.throttle = function(func, wait) { var context, args, timeout, throttling, more; var whenDone = _.debounce(function(){ more = throttling = false; }, wait); return function() { context = this; args = arguments; var later = function() { timeout = null; if (more) func.apply(context, args); whenDone(); }; if (!timeout) timeout = setTimeout(later, wait); if (throttling) { more = true; } else { func.apply(context, args); } whenDone(); throttling = true; }; }; // Returns a function, that, as long as it continues to be invoked, will not // be triggered. The function will be called after it stops being called for // N milliseconds. _.debounce = function(func, wait) { var timeout; return function() { var context = this, args = arguments; var later = function() { timeout = null; func.apply(context, args); }; clearTimeout(timeout); timeout = setTimeout(later, wait); }; }; // Returns a function that will be executed at most one time, no matter how // often you call it. Useful for lazy initialization. _.once = function(func) { var ran = false, memo; return function() { if (ran) return memo; ran = true; return memo = func.apply(this, arguments); }; }; // Returns the first function passed as an argument to the second, // allowing you to adjust arguments, run code before and after, and // conditionally execute the original function. _.wrap = function(func, wrapper) { return function() { var args = [func].concat(slice.call(arguments, 0)); return wrapper.apply(this, args); }; }; // Returns a function that is the composition of a list of functions, each // consuming the return value of the function that follows. _.compose = function() { var funcs = arguments; return function() { var args = arguments; for (var i = funcs.length - 1; i >= 0; i--) { args = [funcs[i].apply(this, args)]; } return args[0]; }; }; // Returns a function that will only be executed after being called N times. _.after = function(times, func) { if (times <= 0) return func(); return function() { if (--times < 1) { return func.apply(this, arguments); } }; }; // Object Functions // ---------------- // Retrieve the names of an object's properties. // Delegates to **ECMAScript 5**'s native `Object.keys` _.keys = nativeKeys || function(obj) { if (obj !== Object(obj)) throw new TypeError('Invalid object'); var keys = []; for (var key in obj) if (_.has(obj, key)) keys[keys.length] = key; return keys; }; // Retrieve the values of an object's properties. _.values = function(obj) { return _.map(obj, _.identity); }; // Return a sorted list of the function names available on the object. // Aliased as `methods` _.functions = _.methods = function(obj) { var names = []; for (var key in obj) { if (_.isFunction(obj[key])) names.push(key); } return names.sort(); }; // Extend a given object with all the properties in passed-in object(s). _.extend = function(obj) { each(slice.call(arguments, 1), function(source) { for (var prop in source) { obj[prop] = source[prop]; } }); return obj; }; // Fill in a given object with default properties. _.defaults = function(obj) { each(slice.call(arguments, 1), function(source) { for (var prop in source) { if (obj[prop] == null) obj[prop] = source[prop]; } }); return obj; }; // Create a (shallow-cloned) duplicate of an object. _.clone = function(obj) { if (!_.isObject(obj)) return obj; return _.isArray(obj) ? obj.slice() : _.extend({}, obj); }; // Invokes interceptor with the obj, and then returns obj. // The primary purpose of this method is to "tap into" a method chain, in // order to perform operations on intermediate results within the chain. _.tap = function(obj, interceptor) { interceptor(obj); return obj; }; // Internal recursive comparison function. function eq(a, b, stack) { // Identical objects are equal. `0 === -0`, but they aren't identical. // See the Harmony `egal` proposal: http://wiki.ecmascript.org/doku.php?id=harmony:egal. if (a === b) return a !== 0 || 1 / a == 1 / b; // A strict comparison is necessary because `null == undefined`. if (a == null || b == null) return a === b; // Unwrap any wrapped objects. if (a._chain) a = a._wrapped; if (b._chain) b = b._wrapped; // Invoke a custom `isEqual` method if one is provided. if (a.isEqual && _.isFunction(a.isEqual)) return a.isEqual(b); if (b.isEqual && _.isFunction(b.isEqual)) return b.isEqual(a); // Compare `[[Class]]` names. var className = toString.call(a); if (className != toString.call(b)) return false; switch (className) { // Strings, numbers, dates, and booleans are compared by value. case '[object String]': // Primitives and their corresponding object wrappers are equivalent; thus, `"5"` is // equivalent to `new String("5")`. return a == String(b); case '[object Number]': // `NaN`s are equivalent, but non-reflexive. An `egal` comparison is performed for // other numeric values. return a != +a ? b != +b : (a == 0 ? 1 / a == 1 / b : a == +b); case '[object Date]': case '[object Boolean]': // Coerce dates and booleans to numeric primitive values. Dates are compared by their // millisecond representations. Note that invalid dates with millisecond representations // of `NaN` are not equivalent. return +a == +b; // RegExps are compared by their source patterns and flags. case '[object RegExp]': return a.source == b.source && a.global == b.global && a.multiline == b.multiline && a.ignoreCase == b.ignoreCase; } if (typeof a != 'object' || typeof b != 'object') return false; // Assume equality for cyclic structures. The algorithm for detecting cyclic // structures is adapted from ES 5.1 section 15.12.3, abstract operation `JO`. var length = stack.length; while (length--) { // Linear search. Performance is inversely proportional to the number of // unique nested structures. if (stack[length] == a) return true; } // Add the first object to the stack of traversed objects. stack.push(a); var size = 0, result = true; // Recursively compare objects and arrays. if (className == '[object Array]') { // Compare array lengths to determine if a deep comparison is necessary. size = a.length; result = size == b.length; if (result) { // Deep compare the contents, ignoring non-numeric properties. while (size--) { // Ensure commutative equality for sparse arrays. if (!(result = size in a == size in b && eq(a[size], b[size], stack))) break; } } } else { // Objects with different constructors are not equivalent. if ('constructor' in a != 'constructor' in b || a.constructor != b.constructor) return false; // Deep compare objects. for (var key in a) { if (_.has(a, key)) { // Count the expected number of properties. size++; // Deep compare each member. if (!(result = _.has(b, key) && eq(a[key], b[key], stack))) break; } } // Ensure that both objects contain the same number of properties. if (result) { for (key in b) { if (_.has(b, key) && !(size--)) break; } result = !size; } } // Remove the first object from the stack of traversed objects. stack.pop(); return result; } // Perform a deep comparison to check if two objects are equal. _.isEqual = function(a, b) { return eq(a, b, []); }; // Is a given array, string, or object empty? // An "empty" object has no enumerable own-properties. _.isEmpty = function(obj) { if (_.isArray(obj) || _.isString(obj)) return obj.length === 0; for (var key in obj) if (_.has(obj, key)) return false; return true; }; // Is a given value a DOM element? _.isElement = function(obj) { return !!(obj && obj.nodeType == 1); }; // Is a given value an array? // Delegates to ECMA5's native Array.isArray _.isArray = nativeIsArray || function(obj) { return toString.call(obj) == '[object Array]'; }; // Is a given variable an object? _.isObject = function(obj) { return obj === Object(obj); }; // Is a given variable an arguments object? _.isArguments = function(obj) { return toString.call(obj) == '[object Arguments]'; }; if (!_.isArguments(arguments)) { _.isArguments = function(obj) { return !!(obj && _.has(obj, 'callee')); }; } // Is a given value a function? _.isFunction = function(obj) { return toString.call(obj) == '[object Function]'; }; // Is a given value a string? _.isString = function(obj) { return toString.call(obj) == '[object String]'; }; // Is a given value a number? _.isNumber = function(obj) { return toString.call(obj) == '[object Number]'; }; // Is the given value `NaN`? _.isNaN = function(obj) { // `NaN` is the only value for which `===` is not reflexive. return obj !== obj; }; // Is a given value a boolean? _.isBoolean = function(obj) { return obj === true || obj === false || toString.call(obj) == '[object Boolean]'; }; // Is a given value a date? _.isDate = function(obj) { return toString.call(obj) == '[object Date]'; }; // Is the given value a regular expression? _.isRegExp = function(obj) { return toString.call(obj) == '[object RegExp]'; }; // Is a given value equal to null? _.isNull = function(obj) { return obj === null; }; // Is a given variable undefined? _.isUndefined = function(obj) { return obj === void 0; }; // Has own property? _.has = function(obj, key) { return hasOwnProperty.call(obj, key); }; // Utility Functions // ----------------- // Run Underscore.js in *noConflict* mode, returning the `_` variable to its // previous owner. Returns a reference to the Underscore object. _.noConflict = function() { root._ = previousUnderscore; return this; }; // Keep the identity function around for default iterators. _.identity = function(value) { return value; }; // Run a function **n** times. _.times = function (n, iterator, context) { for (var i = 0; i < n; i++) iterator.call(context, i); }; // Escape a string for HTML interpolation. _.escape = function(string) { return (''+string).replace(/&/g, '&amp;').replace(/</g, '&lt;').replace(/>/g, '&gt;').replace(/"/g, '&quot;').replace(/'/g, '&#x27;').replace(/\//g,'&#x2F;'); }; // Add your own custom functions to the Underscore object, ensuring that // they're correctly added to the OOP wrapper as well. _.mixin = function(obj) { each(_.functions(obj), function(name){ addToWrapper(name, _[name] = obj[name]); }); }; // Generate a unique integer id (unique within the entire client session). // Useful for temporary DOM ids. var idCounter = 0; _.uniqueId = function(prefix) { var id = idCounter++; return prefix ? prefix + id : id; }; // By default, Underscore uses ERB-style template delimiters, change the // following template settings to use alternative delimiters. _.templateSettings = { evaluate : /<%([\s\S]+?)%>/g, interpolate : /<%=([\s\S]+?)%>/g, escape : /<%-([\s\S]+?)%>/g }; // When customizing `templateSettings`, if you don't want to define an // interpolation, evaluation or escaping regex, we need one that is // guaranteed not to match. var noMatch = /.^/; // Within an interpolation, evaluation, or escaping, remove HTML escaping // that had been previously added. var unescape = function(code) { return code.replace(/\\\\/g, '\\').replace(/\\'/g, "'"); }; // JavaScript micro-templating, similar to John Resig's implementation. // Underscore templating handles arbitrary delimiters, preserves whitespace, // and correctly escapes quotes within interpolated code. _.template = function(str, data) { var c = _.templateSettings; var tmpl = 'var __p=[],print=function(){__p.push.apply(__p,arguments);};' + 'with(obj||{}){__p.push(\'' + str.replace(/\\/g, '\\\\') .replace(/'/g, "\\'") .replace(c.escape || noMatch, function(match, code) { return "',_.escape(" + unescape(code) + "),'"; }) .replace(c.interpolate || noMatch, function(match, code) { return "'," + unescape(code) + ",'"; }) .replace(c.evaluate || noMatch, function(match, code) { return "');" + unescape(code).replace(/[\r\n\t]/g, ' ') + ";__p.push('"; }) .replace(/\r/g, '\\r') .replace(/\n/g, '\\n') .replace(/\t/g, '\\t') + "');}return __p.join('');"; var func = new Function('obj', '_', tmpl); if (data) return func(data, _); return function(data) { return func.call(this, data, _); }; }; // Add a "chain" function, which will delegate to the wrapper. _.chain = function(obj) { return _(obj).chain(); }; // The OOP Wrapper // --------------- // If Underscore is called as a function, it returns a wrapped object that // can be used OO-style. This wrapper holds altered versions of all the // underscore functions. Wrapped objects may be chained. var wrapper = function(obj) { this._wrapped = obj; }; // Expose `wrapper.prototype` as `_.prototype` _.prototype = wrapper.prototype; // Helper function to continue chaining intermediate results. var result = function(obj, chain) { return chain ? _(obj).chain() : obj; }; // A method to easily add functions to the OOP wrapper. var addToWrapper = function(name, func) { wrapper.prototype[name] = function() { var args = slice.call(arguments); unshift.call(args, this._wrapped); return result(func.apply(_, args), this._chain); }; }; // Add all of the Underscore functions to the wrapper object. _.mixin(_); // Add all mutator Array functions to the wrapper. each(['pop', 'push', 'reverse', 'shift', 'sort', 'splice', 'unshift'], function(name) { var method = ArrayProto[name]; wrapper.prototype[name] = function() { var wrapped = this._wrapped; method.apply(wrapped, arguments); var length = wrapped.length; if ((name == 'shift' || name == 'splice') && length === 0) delete wrapped[0]; return result(wrapped, this._chain); }; }); // Add all accessor Array functions to the wrapper. each(['concat', 'join', 'slice'], function(name) { var method = ArrayProto[name]; wrapper.prototype[name] = function() { return result(method.apply(this._wrapped, arguments), this._chain); }; }); // Start chaining a wrapped Underscore object. wrapper.prototype.chain = function() { this._chain = true; return this; }; // Extracts the result from a wrapped and chained object. wrapper.prototype.value = function() { return this._wrapped; }; }).call(this);
AnkiVim
/AnkiVim-1.5.3.tar.gz/AnkiVim-1.5.3/docs/build/html/_static/underscore-1.3.1.js
underscore-1.3.1.js
(function($) { $.fn.autogrow = function() { return this.each(function() { var textarea = this; $.fn.autogrow.resize(textarea); $(textarea) .focus(function() { textarea.interval = setInterval(function() { $.fn.autogrow.resize(textarea); }, 500); }) .blur(function() { clearInterval(textarea.interval); }); }); }; $.fn.autogrow.resize = function(textarea) { var lineHeight = parseInt($(textarea).css('line-height'), 10); var lines = textarea.value.split('\n'); var columns = textarea.cols; var lineCount = 0; $.each(lines, function() { lineCount += Math.ceil(this.length / columns) || 1; }); var height = lineHeight * (lineCount + 1); $(textarea).css('height', height); }; })(jQuery); (function($) { var comp, by; function init() { initEvents(); initComparator(); } function initEvents() { $(document).on("click", 'a.comment-close', function(event) { event.preventDefault(); hide($(this).attr('id').substring(2)); }); $(document).on("click", 'a.vote', function(event) { event.preventDefault(); handleVote($(this)); }); $(document).on("click", 'a.reply', function(event) { event.preventDefault(); openReply($(this).attr('id').substring(2)); }); $(document).on("click", 'a.close-reply', function(event) { event.preventDefault(); closeReply($(this).attr('id').substring(2)); }); $(document).on("click", 'a.sort-option', function(event) { event.preventDefault(); handleReSort($(this)); }); $(document).on("click", 'a.show-proposal', function(event) { event.preventDefault(); showProposal($(this).attr('id').substring(2)); }); $(document).on("click", 'a.hide-proposal', function(event) { event.preventDefault(); hideProposal($(this).attr('id').substring(2)); }); $(document).on("click", 'a.show-propose-change', function(event) { event.preventDefault(); showProposeChange($(this).attr('id').substring(2)); }); $(document).on("click", 'a.hide-propose-change', function(event) { event.preventDefault(); hideProposeChange($(this).attr('id').substring(2)); }); $(document).on("click", 'a.accept-comment', function(event) { event.preventDefault(); acceptComment($(this).attr('id').substring(2)); }); $(document).on("click", 'a.delete-comment', function(event) { event.preventDefault(); deleteComment($(this).attr('id').substring(2)); }); $(document).on("click", 'a.comment-markup', function(event) { event.preventDefault(); toggleCommentMarkupBox($(this).attr('id').substring(2)); }); } /** * Set comp, which is a comparator function used for sorting and * inserting comments into the list. */ function setComparator() { // If the first three letters are "asc", sort in ascending order // and remove the prefix. if (by.substring(0,3) == 'asc') { var i = by.substring(3); comp = function(a, b) { return a[i] - b[i]; }; } else { // Otherwise sort in descending order. comp = function(a, b) { return b[by] - a[by]; }; } // Reset link styles and format the selected sort option. $('a.sel').attr('href', '#').removeClass('sel'); $('a.by' + by).removeAttr('href').addClass('sel'); } /** * Create a comp function. If the user has preferences stored in * the sortBy cookie, use those, otherwise use the default. */ function initComparator() { by = 'rating'; // Default to sort by rating. // If the sortBy cookie is set, use that instead. if (document.cookie.length > 0) { var start = document.cookie.indexOf('sortBy='); if (start != -1) { start = start + 7; var end = document.cookie.indexOf(";", start); if (end == -1) { end = document.cookie.length; by = unescape(document.cookie.substring(start, end)); } } } setComparator(); } /** * Show a comment div. */ function show(id) { $('#ao' + id).hide(); $('#ah' + id).show(); var context = $.extend({id: id}, opts); var popup = $(renderTemplate(popupTemplate, context)).hide(); popup.find('textarea[name="proposal"]').hide(); popup.find('a.by' + by).addClass('sel'); var form = popup.find('#cf' + id); form.submit(function(event) { event.preventDefault(); addComment(form); }); $('#s' + id).after(popup); popup.slideDown('fast', function() { getComments(id); }); } /** * Hide a comment div. */ function hide(id) { $('#ah' + id).hide(); $('#ao' + id).show(); var div = $('#sc' + id); div.slideUp('fast', function() { div.remove(); }); } /** * Perform an ajax request to get comments for a node * and insert the comments into the comments tree. */ function getComments(id) { $.ajax({ type: 'GET', url: opts.getCommentsURL, data: {node: id}, success: function(data, textStatus, request) { var ul = $('#cl' + id); var speed = 100; $('#cf' + id) .find('textarea[name="proposal"]') .data('source', data.source); if (data.comments.length === 0) { ul.html('<li>No comments yet.</li>'); ul.data('empty', true); } else { // If there are comments, sort them and put them in the list. var comments = sortComments(data.comments); speed = data.comments.length * 100; appendComments(comments, ul); ul.data('empty', false); } $('#cn' + id).slideUp(speed + 200); ul.slideDown(speed); }, error: function(request, textStatus, error) { showError('Oops, there was a problem retrieving the comments.'); }, dataType: 'json' }); } /** * Add a comment via ajax and insert the comment into the comment tree. */ function addComment(form) { var node_id = form.find('input[name="node"]').val(); var parent_id = form.find('input[name="parent"]').val(); var text = form.find('textarea[name="comment"]').val(); var proposal = form.find('textarea[name="proposal"]').val(); if (text == '') { showError('Please enter a comment.'); return; } // Disable the form that is being submitted. form.find('textarea,input').attr('disabled', 'disabled'); // Send the comment to the server. $.ajax({ type: "POST", url: opts.addCommentURL, dataType: 'json', data: { node: node_id, parent: parent_id, text: text, proposal: proposal }, success: function(data, textStatus, error) { // Reset the form. if (node_id) { hideProposeChange(node_id); } form.find('textarea') .val('') .add(form.find('input')) .removeAttr('disabled'); var ul = $('#cl' + (node_id || parent_id)); if (ul.data('empty')) { $(ul).empty(); ul.data('empty', false); } insertComment(data.comment); var ao = $('#ao' + node_id); ao.find('img').attr({'src': opts.commentBrightImage}); if (node_id) { // if this was a "root" comment, remove the commenting box // (the user can get it back by reopening the comment popup) $('#ca' + node_id).slideUp(); } }, error: function(request, textStatus, error) { form.find('textarea,input').removeAttr('disabled'); showError('Oops, there was a problem adding the comment.'); } }); } /** * Recursively append comments to the main comment list and children * lists, creating the comment tree. */ function appendComments(comments, ul) { $.each(comments, function() { var div = createCommentDiv(this); ul.append($(document.createElement('li')).html(div)); appendComments(this.children, div.find('ul.comment-children')); // To avoid stagnating data, don't store the comments children in data. this.children = null; div.data('comment', this); }); } /** * After adding a new comment, it must be inserted in the correct * location in the comment tree. */ function insertComment(comment) { var div = createCommentDiv(comment); // To avoid stagnating data, don't store the comments children in data. comment.children = null; div.data('comment', comment); var ul = $('#cl' + (comment.node || comment.parent)); var siblings = getChildren(ul); var li = $(document.createElement('li')); li.hide(); // Determine where in the parents children list to insert this comment. for(i=0; i < siblings.length; i++) { if (comp(comment, siblings[i]) <= 0) { $('#cd' + siblings[i].id) .parent() .before(li.html(div)); li.slideDown('fast'); return; } } // If we get here, this comment rates lower than all the others, // or it is the only comment in the list. ul.append(li.html(div)); li.slideDown('fast'); } function acceptComment(id) { $.ajax({ type: 'POST', url: opts.acceptCommentURL, data: {id: id}, success: function(data, textStatus, request) { $('#cm' + id).fadeOut('fast'); $('#cd' + id).removeClass('moderate'); }, error: function(request, textStatus, error) { showError('Oops, there was a problem accepting the comment.'); } }); } function deleteComment(id) { $.ajax({ type: 'POST', url: opts.deleteCommentURL, data: {id: id}, success: function(data, textStatus, request) { var div = $('#cd' + id); if (data == 'delete') { // Moderator mode: remove the comment and all children immediately div.slideUp('fast', function() { div.remove(); }); return; } // User mode: only mark the comment as deleted div .find('span.user-id:first') .text('[deleted]').end() .find('div.comment-text:first') .text('[deleted]').end() .find('#cm' + id + ', #dc' + id + ', #ac' + id + ', #rc' + id + ', #sp' + id + ', #hp' + id + ', #cr' + id + ', #rl' + id) .remove(); var comment = div.data('comment'); comment.username = '[deleted]'; comment.text = '[deleted]'; div.data('comment', comment); }, error: function(request, textStatus, error) { showError('Oops, there was a problem deleting the comment.'); } }); } function showProposal(id) { $('#sp' + id).hide(); $('#hp' + id).show(); $('#pr' + id).slideDown('fast'); } function hideProposal(id) { $('#hp' + id).hide(); $('#sp' + id).show(); $('#pr' + id).slideUp('fast'); } function showProposeChange(id) { $('#pc' + id).hide(); $('#hc' + id).show(); var textarea = $('#pt' + id); textarea.val(textarea.data('source')); $.fn.autogrow.resize(textarea[0]); textarea.slideDown('fast'); } function hideProposeChange(id) { $('#hc' + id).hide(); $('#pc' + id).show(); var textarea = $('#pt' + id); textarea.val('').removeAttr('disabled'); textarea.slideUp('fast'); } function toggleCommentMarkupBox(id) { $('#mb' + id).toggle(); } /** Handle when the user clicks on a sort by link. */ function handleReSort(link) { var classes = link.attr('class').split(/\s+/); for (var i=0; i<classes.length; i++) { if (classes[i] != 'sort-option') { by = classes[i].substring(2); } } setComparator(); // Save/update the sortBy cookie. var expiration = new Date(); expiration.setDate(expiration.getDate() + 365); document.cookie= 'sortBy=' + escape(by) + ';expires=' + expiration.toUTCString(); $('ul.comment-ul').each(function(index, ul) { var comments = getChildren($(ul), true); comments = sortComments(comments); appendComments(comments, $(ul).empty()); }); } /** * Function to process a vote when a user clicks an arrow. */ function handleVote(link) { if (!opts.voting) { showError("You'll need to login to vote."); return; } var id = link.attr('id'); if (!id) { // Didn't click on one of the voting arrows. return; } // If it is an unvote, the new vote value is 0, // Otherwise it's 1 for an upvote, or -1 for a downvote. var value = 0; if (id.charAt(1) != 'u') { value = id.charAt(0) == 'u' ? 1 : -1; } // The data to be sent to the server. var d = { comment_id: id.substring(2), value: value }; // Swap the vote and unvote links. link.hide(); $('#' + id.charAt(0) + (id.charAt(1) == 'u' ? 'v' : 'u') + d.comment_id) .show(); // The div the comment is displayed in. var div = $('div#cd' + d.comment_id); var data = div.data('comment'); // If this is not an unvote, and the other vote arrow has // already been pressed, unpress it. if ((d.value !== 0) && (data.vote === d.value * -1)) { $('#' + (d.value == 1 ? 'd' : 'u') + 'u' + d.comment_id).hide(); $('#' + (d.value == 1 ? 'd' : 'u') + 'v' + d.comment_id).show(); } // Update the comments rating in the local data. data.rating += (data.vote === 0) ? d.value : (d.value - data.vote); data.vote = d.value; div.data('comment', data); // Change the rating text. div.find('.rating:first') .text(data.rating + ' point' + (data.rating == 1 ? '' : 's')); // Send the vote information to the server. $.ajax({ type: "POST", url: opts.processVoteURL, data: d, error: function(request, textStatus, error) { showError('Oops, there was a problem casting that vote.'); } }); } /** * Open a reply form used to reply to an existing comment. */ function openReply(id) { // Swap out the reply link for the hide link $('#rl' + id).hide(); $('#cr' + id).show(); // Add the reply li to the children ul. var div = $(renderTemplate(replyTemplate, {id: id})).hide(); $('#cl' + id) .prepend(div) // Setup the submit handler for the reply form. .find('#rf' + id) .submit(function(event) { event.preventDefault(); addComment($('#rf' + id)); closeReply(id); }) .find('input[type=button]') .click(function() { closeReply(id); }); div.slideDown('fast', function() { $('#rf' + id).find('textarea').focus(); }); } /** * Close the reply form opened with openReply. */ function closeReply(id) { // Remove the reply div from the DOM. $('#rd' + id).slideUp('fast', function() { $(this).remove(); }); // Swap out the hide link for the reply link $('#cr' + id).hide(); $('#rl' + id).show(); } /** * Recursively sort a tree of comments using the comp comparator. */ function sortComments(comments) { comments.sort(comp); $.each(comments, function() { this.children = sortComments(this.children); }); return comments; } /** * Get the children comments from a ul. If recursive is true, * recursively include childrens' children. */ function getChildren(ul, recursive) { var children = []; ul.children().children("[id^='cd']") .each(function() { var comment = $(this).data('comment'); if (recursive) comment.children = getChildren($(this).find('#cl' + comment.id), true); children.push(comment); }); return children; } /** Create a div to display a comment in. */ function createCommentDiv(comment) { if (!comment.displayed && !opts.moderator) { return $('<div class="moderate">Thank you! Your comment will show up ' + 'once it is has been approved by a moderator.</div>'); } // Prettify the comment rating. comment.pretty_rating = comment.rating + ' point' + (comment.rating == 1 ? '' : 's'); // Make a class (for displaying not yet moderated comments differently) comment.css_class = comment.displayed ? '' : ' moderate'; // Create a div for this comment. var context = $.extend({}, opts, comment); var div = $(renderTemplate(commentTemplate, context)); // If the user has voted on this comment, highlight the correct arrow. if (comment.vote) { var direction = (comment.vote == 1) ? 'u' : 'd'; div.find('#' + direction + 'v' + comment.id).hide(); div.find('#' + direction + 'u' + comment.id).show(); } if (opts.moderator || comment.text != '[deleted]') { div.find('a.reply').show(); if (comment.proposal_diff) div.find('#sp' + comment.id).show(); if (opts.moderator && !comment.displayed) div.find('#cm' + comment.id).show(); if (opts.moderator || (opts.username == comment.username)) div.find('#dc' + comment.id).show(); } return div; } /** * A simple template renderer. Placeholders such as <%id%> are replaced * by context['id'] with items being escaped. Placeholders such as <#id#> * are not escaped. */ function renderTemplate(template, context) { var esc = $(document.createElement('div')); function handle(ph, escape) { var cur = context; $.each(ph.split('.'), function() { cur = cur[this]; }); return escape ? esc.text(cur || "").html() : cur; } return template.replace(/<([%#])([\w\.]*)\1>/g, function() { return handle(arguments[2], arguments[1] == '%' ? true : false); }); } /** Flash an error message briefly. */ function showError(message) { $(document.createElement('div')).attr({'class': 'popup-error'}) .append($(document.createElement('div')) .attr({'class': 'error-message'}).text(message)) .appendTo('body') .fadeIn("slow") .delay(2000) .fadeOut("slow"); } /** Add a link the user uses to open the comments popup. */ $.fn.comment = function() { return this.each(function() { var id = $(this).attr('id').substring(1); var count = COMMENT_METADATA[id]; var title = count + ' comment' + (count == 1 ? '' : 's'); var image = count > 0 ? opts.commentBrightImage : opts.commentImage; var addcls = count == 0 ? ' nocomment' : ''; $(this) .append( $(document.createElement('a')).attr({ href: '#', 'class': 'sphinx-comment-open' + addcls, id: 'ao' + id }) .append($(document.createElement('img')).attr({ src: image, alt: 'comment', title: title })) .click(function(event) { event.preventDefault(); show($(this).attr('id').substring(2)); }) ) .append( $(document.createElement('a')).attr({ href: '#', 'class': 'sphinx-comment-close hidden', id: 'ah' + id }) .append($(document.createElement('img')).attr({ src: opts.closeCommentImage, alt: 'close', title: 'close' })) .click(function(event) { event.preventDefault(); hide($(this).attr('id').substring(2)); }) ); }); }; var opts = { processVoteURL: '/_process_vote', addCommentURL: '/_add_comment', getCommentsURL: '/_get_comments', acceptCommentURL: '/_accept_comment', deleteCommentURL: '/_delete_comment', commentImage: '/static/_static/comment.png', closeCommentImage: '/static/_static/comment-close.png', loadingImage: '/static/_static/ajax-loader.gif', commentBrightImage: '/static/_static/comment-bright.png', upArrow: '/static/_static/up.png', downArrow: '/static/_static/down.png', upArrowPressed: '/static/_static/up-pressed.png', downArrowPressed: '/static/_static/down-pressed.png', voting: false, moderator: false }; if (typeof COMMENT_OPTIONS != "undefined") { opts = jQuery.extend(opts, COMMENT_OPTIONS); } var popupTemplate = '\ <div class="sphinx-comments" id="sc<%id%>">\ <p class="sort-options">\ Sort by:\ <a href="#" class="sort-option byrating">best rated</a>\ <a href="#" class="sort-option byascage">newest</a>\ <a href="#" class="sort-option byage">oldest</a>\ </p>\ <div class="comment-header">Comments</div>\ <div class="comment-loading" id="cn<%id%>">\ loading comments... <img src="<%loadingImage%>" alt="" /></div>\ <ul id="cl<%id%>" class="comment-ul"></ul>\ <div id="ca<%id%>">\ <p class="add-a-comment">Add a comment\ (<a href="#" class="comment-markup" id="ab<%id%>">markup</a>):</p>\ <div class="comment-markup-box" id="mb<%id%>">\ reStructured text markup: <i>*emph*</i>, <b>**strong**</b>, \ <code>``code``</code>, \ code blocks: <code>::</code> and an indented block after blank line</div>\ <form method="post" id="cf<%id%>" class="comment-form" action="">\ <textarea name="comment" cols="80"></textarea>\ <p class="propose-button">\ <a href="#" id="pc<%id%>" class="show-propose-change">\ Propose a change &#9657;\ </a>\ <a href="#" id="hc<%id%>" class="hide-propose-change">\ Propose a change &#9663;\ </a>\ </p>\ <textarea name="proposal" id="pt<%id%>" cols="80"\ spellcheck="false"></textarea>\ <input type="submit" value="Add comment" />\ <input type="hidden" name="node" value="<%id%>" />\ <input type="hidden" name="parent" value="" />\ </form>\ </div>\ </div>'; var commentTemplate = '\ <div id="cd<%id%>" class="sphinx-comment<%css_class%>">\ <div class="vote">\ <div class="arrow">\ <a href="#" id="uv<%id%>" class="vote" title="vote up">\ <img src="<%upArrow%>" />\ </a>\ <a href="#" id="uu<%id%>" class="un vote" title="vote up">\ <img src="<%upArrowPressed%>" />\ </a>\ </div>\ <div class="arrow">\ <a href="#" id="dv<%id%>" class="vote" title="vote down">\ <img src="<%downArrow%>" id="da<%id%>" />\ </a>\ <a href="#" id="du<%id%>" class="un vote" title="vote down">\ <img src="<%downArrowPressed%>" />\ </a>\ </div>\ </div>\ <div class="comment-content">\ <p class="tagline comment">\ <span class="user-id"><%username%></span>\ <span class="rating"><%pretty_rating%></span>\ <span class="delta"><%time.delta%></span>\ </p>\ <div class="comment-text comment"><#text#></div>\ <p class="comment-opts comment">\ <a href="#" class="reply hidden" id="rl<%id%>">reply &#9657;</a>\ <a href="#" class="close-reply" id="cr<%id%>">reply &#9663;</a>\ <a href="#" id="sp<%id%>" class="show-proposal">proposal &#9657;</a>\ <a href="#" id="hp<%id%>" class="hide-proposal">proposal &#9663;</a>\ <a href="#" id="dc<%id%>" class="delete-comment hidden">delete</a>\ <span id="cm<%id%>" class="moderation hidden">\ <a href="#" id="ac<%id%>" class="accept-comment">accept</a>\ </span>\ </p>\ <pre class="proposal" id="pr<%id%>">\ <#proposal_diff#>\ </pre>\ <ul class="comment-children" id="cl<%id%>"></ul>\ </div>\ <div class="clearleft"></div>\ </div>\ </div>'; var replyTemplate = '\ <li>\ <div class="reply-div" id="rd<%id%>">\ <form id="rf<%id%>">\ <textarea name="comment" cols="80"></textarea>\ <input type="submit" value="Add reply" />\ <input type="button" value="Cancel" />\ <input type="hidden" name="parent" value="<%id%>" />\ <input type="hidden" name="node" value="" />\ </form>\ </div>\ </li>'; $(document).ready(function() { init(); }); })(jQuery); $(document).ready(function() { // add comment anchors for all paragraphs that are commentable $('.sphinx-has-comment').comment(); // highlight search words in search results $("div.context").each(function() { var params = $.getQueryParameters(); var terms = (params.q) ? params.q[0].split(/\s+/) : []; var result = $(this); $.each(terms, function() { result.highlightText(this.toLowerCase(), 'highlighted'); }); }); // directly open comment window if requested var anchor = document.location.hash; if (anchor.substring(0, 9) == '#comment-') { $('#ao' + anchor.substring(9)).click(); document.location.hash = '#s' + anchor.substring(9); } });
AnkiVim
/AnkiVim-1.5.3.tar.gz/AnkiVim-1.5.3/docs/build/html/_static/websupport.js
websupport.js
* Porter Stemmer */ var Stemmer = function() { var step2list = { ational: 'ate', tional: 'tion', enci: 'ence', anci: 'ance', izer: 'ize', bli: 'ble', alli: 'al', entli: 'ent', eli: 'e', ousli: 'ous', ization: 'ize', ation: 'ate', ator: 'ate', alism: 'al', iveness: 'ive', fulness: 'ful', ousness: 'ous', aliti: 'al', iviti: 'ive', biliti: 'ble', logi: 'log' }; var step3list = { icate: 'ic', ative: '', alize: 'al', iciti: 'ic', ical: 'ic', ful: '', ness: '' }; var c = "[^aeiou]"; // consonant var v = "[aeiouy]"; // vowel var C = c + "[^aeiouy]*"; // consonant sequence var V = v + "[aeiou]*"; // vowel sequence var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 var s_v = "^(" + C + ")?" + v; // vowel in stem this.stemWord = function (w) { var stem; var suffix; var firstch; var origword = w; if (w.length < 3) return w; var re; var re2; var re3; var re4; firstch = w.substr(0,1); if (firstch == "y") w = firstch.toUpperCase() + w.substr(1); // Step 1a re = /^(.+?)(ss|i)es$/; re2 = /^(.+?)([^s])s$/; if (re.test(w)) w = w.replace(re,"$1$2"); else if (re2.test(w)) w = w.replace(re2,"$1$2"); // Step 1b re = /^(.+?)eed$/; re2 = /^(.+?)(ed|ing)$/; if (re.test(w)) { var fp = re.exec(w); re = new RegExp(mgr0); if (re.test(fp[1])) { re = /.$/; w = w.replace(re,""); } } else if (re2.test(w)) { var fp = re2.exec(w); stem = fp[1]; re2 = new RegExp(s_v); if (re2.test(stem)) { w = stem; re2 = /(at|bl|iz)$/; re3 = new RegExp("([^aeiouylsz])\\1$"); re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); if (re2.test(w)) w = w + "e"; else if (re3.test(w)) { re = /.$/; w = w.replace(re,""); } else if (re4.test(w)) w = w + "e"; } } // Step 1c re = /^(.+?)y$/; if (re.test(w)) { var fp = re.exec(w); stem = fp[1]; re = new RegExp(s_v); if (re.test(stem)) w = stem + "i"; } // Step 2 re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; if (re.test(w)) { var fp = re.exec(w); stem = fp[1]; suffix = fp[2]; re = new RegExp(mgr0); if (re.test(stem)) w = stem + step2list[suffix]; } // Step 3 re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; if (re.test(w)) { var fp = re.exec(w); stem = fp[1]; suffix = fp[2]; re = new RegExp(mgr0); if (re.test(stem)) w = stem + step3list[suffix]; } // Step 4 re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; re2 = /^(.+?)(s|t)(ion)$/; if (re.test(w)) { var fp = re.exec(w); stem = fp[1]; re = new RegExp(mgr1); if (re.test(stem)) w = stem; } else if (re2.test(w)) { var fp = re2.exec(w); stem = fp[1] + fp[2]; re2 = new RegExp(mgr1); if (re2.test(stem)) w = stem; } // Step 5 re = /^(.+?)e$/; if (re.test(w)) { var fp = re.exec(w); stem = fp[1]; re = new RegExp(mgr1); re2 = new RegExp(meq1); re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) w = stem; } re = /ll$/; re2 = new RegExp(mgr1); if (re.test(w) && re2.test(w)) { re = /.$/; w = w.replace(re,""); } // and turn initial Y back to y if (firstch == "y") w = firstch.toLowerCase() + w.substr(1); return w; } } /** * Simple result scoring code. */ var Scorer = { // Implement the following function to further tweak the score for each result // The function takes a result array [filename, title, anchor, descr, score] // and returns the new score. /* score: function(result) { return result[4]; }, */ // query matches the full name of an object objNameMatch: 11, // or matches in the last dotted part of the object name objPartialMatch: 6, // Additive scores depending on the priority of the object objPrio: {0: 15, // used to be importantResults 1: 5, // used to be objectResults 2: -5}, // used to be unimportantResults // Used when the priority is not in the mapping. objPrioDefault: 0, // query found in title title: 15, // query found in terms term: 5 }; var splitChars = (function() { var result = {}; var singles = [96, 180, 187, 191, 215, 247, 749, 885, 903, 907, 909, 930, 1014, 1648, 1748, 1809, 2416, 2473, 2481, 2526, 2601, 2609, 2612, 2615, 2653, 2702, 2706, 2729, 2737, 2740, 2857, 2865, 2868, 2910, 2928, 2948, 2961, 2971, 2973, 3085, 3089, 3113, 3124, 3213, 3217, 3241, 3252, 3295, 3341, 3345, 3369, 3506, 3516, 3633, 3715, 3721, 3736, 3744, 3748, 3750, 3756, 3761, 3781, 3912, 4239, 4347, 4681, 4695, 4697, 4745, 4785, 4799, 4801, 4823, 4881, 5760, 5901, 5997, 6313, 7405, 8024, 8026, 8028, 8030, 8117, 8125, 8133, 8181, 8468, 8485, 8487, 8489, 8494, 8527, 11311, 11359, 11687, 11695, 11703, 11711, 11719, 11727, 11735, 12448, 12539, 43010, 43014, 43019, 43587, 43696, 43713, 64286, 64297, 64311, 64317, 64319, 64322, 64325, 65141]; var i, j, start, end; for (i = 0; i < singles.length; i++) { result[singles[i]] = true; } var ranges = [[0, 47], [58, 64], [91, 94], [123, 169], [171, 177], [182, 184], [706, 709], [722, 735], [741, 747], [751, 879], [888, 889], [894, 901], [1154, 1161], [1318, 1328], [1367, 1368], [1370, 1376], [1416, 1487], [1515, 1519], [1523, 1568], [1611, 1631], [1642, 1645], [1750, 1764], [1767, 1773], [1789, 1790], [1792, 1807], [1840, 1868], [1958, 1968], [1970, 1983], [2027, 2035], [2038, 2041], [2043, 2047], [2070, 2073], [2075, 2083], [2085, 2087], [2089, 2307], [2362, 2364], [2366, 2383], [2385, 2391], [2402, 2405], [2419, 2424], [2432, 2436], [2445, 2446], [2449, 2450], [2483, 2485], [2490, 2492], [2494, 2509], [2511, 2523], [2530, 2533], [2546, 2547], [2554, 2564], [2571, 2574], [2577, 2578], [2618, 2648], [2655, 2661], [2672, 2673], [2677, 2692], [2746, 2748], [2750, 2767], [2769, 2783], [2786, 2789], [2800, 2820], [2829, 2830], [2833, 2834], [2874, 2876], [2878, 2907], [2914, 2917], [2930, 2946], [2955, 2957], [2966, 2968], [2976, 2978], [2981, 2983], [2987, 2989], [3002, 3023], [3025, 3045], [3059, 3076], [3130, 3132], [3134, 3159], [3162, 3167], [3170, 3173], [3184, 3191], [3199, 3204], [3258, 3260], [3262, 3293], [3298, 3301], [3312, 3332], [3386, 3388], [3390, 3423], [3426, 3429], [3446, 3449], [3456, 3460], [3479, 3481], [3518, 3519], [3527, 3584], [3636, 3647], [3655, 3663], [3674, 3712], [3717, 3718], [3723, 3724], [3726, 3731], [3752, 3753], [3764, 3772], [3774, 3775], [3783, 3791], [3802, 3803], [3806, 3839], [3841, 3871], [3892, 3903], [3949, 3975], [3980, 4095], [4139, 4158], [4170, 4175], [4182, 4185], [4190, 4192], [4194, 4196], [4199, 4205], [4209, 4212], [4226, 4237], [4250, 4255], [4294, 4303], [4349, 4351], [4686, 4687], [4702, 4703], [4750, 4751], [4790, 4791], [4806, 4807], [4886, 4887], [4955, 4968], [4989, 4991], [5008, 5023], [5109, 5120], [5741, 5742], [5787, 5791], [5867, 5869], [5873, 5887], [5906, 5919], [5938, 5951], [5970, 5983], [6001, 6015], [6068, 6102], [6104, 6107], [6109, 6111], [6122, 6127], [6138, 6159], [6170, 6175], [6264, 6271], [6315, 6319], [6390, 6399], [6429, 6469], [6510, 6511], [6517, 6527], [6572, 6592], [6600, 6607], [6619, 6655], [6679, 6687], [6741, 6783], [6794, 6799], [6810, 6822], [6824, 6916], [6964, 6980], [6988, 6991], [7002, 7042], [7073, 7085], [7098, 7167], [7204, 7231], [7242, 7244], [7294, 7400], [7410, 7423], [7616, 7679], [7958, 7959], [7966, 7967], [8006, 8007], [8014, 8015], [8062, 8063], [8127, 8129], [8141, 8143], [8148, 8149], [8156, 8159], [8173, 8177], [8189, 8303], [8306, 8307], [8314, 8318], [8330, 8335], [8341, 8449], [8451, 8454], [8456, 8457], [8470, 8472], [8478, 8483], [8506, 8507], [8512, 8516], [8522, 8525], [8586, 9311], [9372, 9449], [9472, 10101], [10132, 11263], [11493, 11498], [11503, 11516], [11518, 11519], [11558, 11567], [11622, 11630], [11632, 11647], [11671, 11679], [11743, 11822], [11824, 12292], [12296, 12320], [12330, 12336], [12342, 12343], [12349, 12352], [12439, 12444], [12544, 12548], [12590, 12592], [12687, 12689], [12694, 12703], [12728, 12783], [12800, 12831], [12842, 12880], [12896, 12927], [12938, 12976], [12992, 13311], [19894, 19967], [40908, 40959], [42125, 42191], [42238, 42239], [42509, 42511], [42540, 42559], [42592, 42593], [42607, 42622], [42648, 42655], [42736, 42774], [42784, 42785], [42889, 42890], [42893, 43002], [43043, 43055], [43062, 43071], [43124, 43137], [43188, 43215], [43226, 43249], [43256, 43258], [43260, 43263], [43302, 43311], [43335, 43359], [43389, 43395], [43443, 43470], [43482, 43519], [43561, 43583], [43596, 43599], [43610, 43615], [43639, 43641], [43643, 43647], [43698, 43700], [43703, 43704], [43710, 43711], [43715, 43738], [43742, 43967], [44003, 44015], [44026, 44031], [55204, 55215], [55239, 55242], [55292, 55295], [57344, 63743], [64046, 64047], [64110, 64111], [64218, 64255], [64263, 64274], [64280, 64284], [64434, 64466], [64830, 64847], [64912, 64913], [64968, 65007], [65020, 65135], [65277, 65295], [65306, 65312], [65339, 65344], [65371, 65381], [65471, 65473], [65480, 65481], [65488, 65489], [65496, 65497]]; for (i = 0; i < ranges.length; i++) { start = ranges[i][0]; end = ranges[i][1]; for (j = start; j <= end; j++) { result[j] = true; } } return result; })(); function splitQuery(query) { var result = []; var start = -1; for (var i = 0; i < query.length; i++) { if (splitChars[query.charCodeAt(i)]) { if (start !== -1) { result.push(query.slice(start, i)); start = -1; } } else if (start === -1) { start = i; } } if (start !== -1) { result.push(query.slice(start)); } return result; } /** * Search Module */ var Search = { _index : null, _queued_query : null, _pulse_status : -1, init : function() { var params = $.getQueryParameters(); if (params.q) { var query = params.q[0]; $('input[name="q"]')[0].value = query; this.performSearch(query); } }, loadIndex : function(url) { $.ajax({type: "GET", url: url, data: null, dataType: "script", cache: true, complete: function(jqxhr, textstatus) { if (textstatus != "success") { document.getElementById("searchindexloader").src = url; } }}); }, setIndex : function(index) { var q; this._index = index; if ((q = this._queued_query) !== null) { this._queued_query = null; Search.query(q); } }, hasIndex : function() { return this._index !== null; }, deferQuery : function(query) { this._queued_query = query; }, stopPulse : function() { this._pulse_status = 0; }, startPulse : function() { if (this._pulse_status >= 0) return; function pulse() { var i; Search._pulse_status = (Search._pulse_status + 1) % 4; var dotString = ''; for (i = 0; i < Search._pulse_status; i++) dotString += '.'; Search.dots.text(dotString); if (Search._pulse_status > -1) window.setTimeout(pulse, 500); } pulse(); }, /** * perform a search for something (or wait until index is loaded) */ performSearch : function(query) { // create the required interface elements this.out = $('#search-results'); this.title = $('<h2>' + _('Searching') + '</h2>').appendTo(this.out); this.dots = $('<span></span>').appendTo(this.title); this.status = $('<p style="display: none"></p>').appendTo(this.out); this.output = $('<ul class="search"/>').appendTo(this.out); $('#search-progress').text(_('Preparing search...')); this.startPulse(); // index already loaded, the browser was quick! if (this.hasIndex()) this.query(query); else this.deferQuery(query); }, /** * execute search (requires search index to be loaded) */ query : function(query) { var i; var stopwords = ["a","and","are","as","at","be","but","by","for","if","in","into","is","it","near","no","not","of","on","or","such","that","the","their","then","there","these","they","this","to","was","will","with"]; // stem the searchterms and add them to the correct list var stemmer = new Stemmer(); var searchterms = []; var excluded = []; var hlterms = []; var tmp = splitQuery(query); var objectterms = []; for (i = 0; i < tmp.length; i++) { if (tmp[i] !== "") { objectterms.push(tmp[i].toLowerCase()); } if ($u.indexOf(stopwords, tmp[i].toLowerCase()) != -1 || tmp[i].match(/^\d+$/) || tmp[i] === "") { // skip this "word" continue; } // stem the word var word = stemmer.stemWord(tmp[i].toLowerCase()); // prevent stemmer from cutting word smaller than two chars if(word.length < 3 && tmp[i].length >= 3) { word = tmp[i]; } var toAppend; // select the correct list if (word[0] == '-') { toAppend = excluded; word = word.substr(1); } else { toAppend = searchterms; hlterms.push(tmp[i].toLowerCase()); } // only add if not already in the list if (!$u.contains(toAppend, word)) toAppend.push(word); } var highlightstring = '?highlight=' + $.urlencode(hlterms.join(" ")); // console.debug('SEARCH: searching for:'); // console.info('required: ', searchterms); // console.info('excluded: ', excluded); // prepare search var terms = this._index.terms; var titleterms = this._index.titleterms; // array of [filename, title, anchor, descr, score] var results = []; $('#search-progress').empty(); // lookup as object for (i = 0; i < objectterms.length; i++) { var others = [].concat(objectterms.slice(0, i), objectterms.slice(i+1, objectterms.length)); results = results.concat(this.performObjectSearch(objectterms[i], others)); } // lookup as search terms in fulltext results = results.concat(this.performTermsSearch(searchterms, excluded, terms, titleterms)); // let the scorer override scores with a custom scoring function if (Scorer.score) { for (i = 0; i < results.length; i++) results[i][4] = Scorer.score(results[i]); } // now sort the results by score (in opposite order of appearance, since the // display function below uses pop() to retrieve items) and then // alphabetically results.sort(function(a, b) { var left = a[4]; var right = b[4]; if (left > right) { return 1; } else if (left < right) { return -1; } else { // same score: sort alphabetically left = a[1].toLowerCase(); right = b[1].toLowerCase(); return (left > right) ? -1 : ((left < right) ? 1 : 0); } }); // for debugging //Search.lastresults = results.slice(); // a copy //console.info('search results:', Search.lastresults); // print the results var resultCount = results.length; function displayNextItem() { // results left, load the summary and display it if (results.length) { var item = results.pop(); var listItem = $('<li style="display:none"></li>'); if (DOCUMENTATION_OPTIONS.FILE_SUFFIX === '') { // dirhtml builder var dirname = item[0] + '/'; if (dirname.match(/\/index\/$/)) { dirname = dirname.substring(0, dirname.length-6); } else if (dirname == 'index/') { dirname = ''; } listItem.append($('<a/>').attr('href', DOCUMENTATION_OPTIONS.URL_ROOT + dirname + highlightstring + item[2]).html(item[1])); } else { // normal html builders listItem.append($('<a/>').attr('href', item[0] + DOCUMENTATION_OPTIONS.FILE_SUFFIX + highlightstring + item[2]).html(item[1])); } if (item[3]) { listItem.append($('<span> (' + item[3] + ')</span>')); Search.output.append(listItem); listItem.slideDown(5, function() { displayNextItem(); }); } else if (DOCUMENTATION_OPTIONS.HAS_SOURCE) { var suffix = DOCUMENTATION_OPTIONS.SOURCELINK_SUFFIX; $.ajax({url: DOCUMENTATION_OPTIONS.URL_ROOT + '_sources/' + item[5] + (item[5].slice(-suffix.length) === suffix ? '' : suffix), dataType: "text", complete: function(jqxhr, textstatus) { var data = jqxhr.responseText; if (data !== '' && data !== undefined) { listItem.append(Search.makeSearchSummary(data, searchterms, hlterms)); } Search.output.append(listItem); listItem.slideDown(5, function() { displayNextItem(); }); }}); } else { // no source available, just display title Search.output.append(listItem); listItem.slideDown(5, function() { displayNextItem(); }); } } // search finished, update title and status message else { Search.stopPulse(); Search.title.text(_('Search Results')); if (!resultCount) Search.status.text(_('Your search did not match any documents. Please make sure that all words are spelled correctly and that you\'ve selected enough categories.')); else Search.status.text(_('Search finished, found %s page(s) matching the search query.').replace('%s', resultCount)); Search.status.fadeIn(500); } } displayNextItem(); }, /** * search for object names */ performObjectSearch : function(object, otherterms) { var filenames = this._index.filenames; var docnames = this._index.docnames; var objects = this._index.objects; var objnames = this._index.objnames; var titles = this._index.titles; var i; var results = []; for (var prefix in objects) { for (var name in objects[prefix]) { var fullname = (prefix ? prefix + '.' : '') + name; if (fullname.toLowerCase().indexOf(object) > -1) { var score = 0; var parts = fullname.split('.'); // check for different match types: exact matches of full name or // "last name" (i.e. last dotted part) if (fullname == object || parts[parts.length - 1] == object) { score += Scorer.objNameMatch; // matches in last name } else if (parts[parts.length - 1].indexOf(object) > -1) { score += Scorer.objPartialMatch; } var match = objects[prefix][name]; var objname = objnames[match[1]][2]; var title = titles[match[0]]; // If more than one term searched for, we require other words to be // found in the name/title/description if (otherterms.length > 0) { var haystack = (prefix + ' ' + name + ' ' + objname + ' ' + title).toLowerCase(); var allfound = true; for (i = 0; i < otherterms.length; i++) { if (haystack.indexOf(otherterms[i]) == -1) { allfound = false; break; } } if (!allfound) { continue; } } var descr = objname + _(', in ') + title; var anchor = match[3]; if (anchor === '') anchor = fullname; else if (anchor == '-') anchor = objnames[match[1]][1] + '-' + fullname; // add custom score for some objects according to scorer if (Scorer.objPrio.hasOwnProperty(match[2])) { score += Scorer.objPrio[match[2]]; } else { score += Scorer.objPrioDefault; } results.push([docnames[match[0]], fullname, '#'+anchor, descr, score, filenames[match[0]]]); } } } return results; }, /** * search for full-text terms in the index */ performTermsSearch : function(searchterms, excluded, terms, titleterms) { var docnames = this._index.docnames; var filenames = this._index.filenames; var titles = this._index.titles; var i, j, file; var fileMap = {}; var scoreMap = {}; var results = []; // perform the search on the required terms for (i = 0; i < searchterms.length; i++) { var word = searchterms[i]; var files = []; var _o = [ {files: terms[word], score: Scorer.term}, {files: titleterms[word], score: Scorer.title} ]; // no match but word was a required one if ($u.every(_o, function(o){return o.files === undefined;})) { break; } // found search word in contents $u.each(_o, function(o) { var _files = o.files; if (_files === undefined) return if (_files.length === undefined) _files = [_files]; files = files.concat(_files); // set score for the word in each file to Scorer.term for (j = 0; j < _files.length; j++) { file = _files[j]; if (!(file in scoreMap)) scoreMap[file] = {} scoreMap[file][word] = o.score; } }); // create the mapping for (j = 0; j < files.length; j++) { file = files[j]; if (file in fileMap) fileMap[file].push(word); else fileMap[file] = [word]; } } // now check if the files don't contain excluded terms for (file in fileMap) { var valid = true; // check if all requirements are matched if (fileMap[file].length != searchterms.length) continue; // ensure that none of the excluded terms is in the search result for (i = 0; i < excluded.length; i++) { if (terms[excluded[i]] == file || titleterms[excluded[i]] == file || $u.contains(terms[excluded[i]] || [], file) || $u.contains(titleterms[excluded[i]] || [], file)) { valid = false; break; } } // if we have still a valid result we can add it to the result list if (valid) { // select one (max) score for the file. // for better ranking, we should calculate ranking by using words statistics like basic tf-idf... var score = $u.max($u.map(fileMap[file], function(w){return scoreMap[file][w]})); results.push([docnames[file], titles[file], '', null, score, filenames[file]]); } } return results; }, /** * helper function to return a node containing the * search summary for a given text. keywords is a list * of stemmed words, hlwords is the list of normal, unstemmed * words. the first one is used to find the occurrence, the * latter for highlighting it. */ makeSearchSummary : function(text, keywords, hlwords) { var textLower = text.toLowerCase(); var start = 0; $.each(keywords, function() { var i = textLower.indexOf(this.toLowerCase()); if (i > -1) start = i; }); start = Math.max(start - 120, 0); var excerpt = ((start > 0) ? '...' : '') + $.trim(text.substr(start, 240)) + ((start + 240 - text.length) ? '...' : ''); var rv = $('<div class="context"></div>').text(excerpt); $.each(hlwords, function() { rv = rv.highlightText(this, 'highlighted'); }); return rv; } }; $(document).ready(function() { Search.init(); });
AnkiVim
/AnkiVim-1.5.3.tar.gz/AnkiVim-1.5.3/docs/build/html/_static/searchtools.js
searchtools.js
from os import makedirs, getenv from os.path import abspath, exists as path_exists, join as path_join from subprocess import check_call, CalledProcessError import sys import tempfile import ankivim from ankivim.errors import HeaderNotIntactError # for python2+3 compatibility in file writing if sys.version_info.major == 3: def write_file(file_handle, string): file_handle.write(string.encode("utf-8")) else: def write_file(file_handle, string): file_handle.write(string) draw_frame = """%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %\t\t\t\t\t\t%\n%\t\t\t\t\t\t%\n%\t\t\t{content}\t\t%\n%\t\t\t\t\t\t%\n%\t\t\t\t\t\t%\n%\t\t\t\t\t\t% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% """.format def parse_qa(contents): """ Read front (question) and back (answer) of a new anki card from `contents_file`. Parameters ---------- contents : string Returns ------- (question, answer): (string, string) 2-tuple User-input for `question` and `answer` for the new card. Raises ------- `HeaderNotIntactError` if user has modified the QUESTION/ANSWER headers that serve as markers during parsing. """ question, answer = [], [] header_lines = 0 for line in contents.split("\n"): if line.startswith("%"): header_lines += 1 elif 0 < header_lines < 15: question.append(line.replace('\n', "<br />")) elif header_lines > 8: answer.append(line.replace('\n', "<br />")) if header_lines < 8: raise HeaderNotIntactError( "You deleted header lines! " "The QUESTION and ANSWER markers must be kept intact, " "otherwise parsing fails." ) return "<br />".join(question), "<br />".join(answer) def editor_command(filename, editor=getenv("EDITOR", "vim"), # editor args below target vim 7.4, overwrite for other # editor choices. editor_args=( # set cursor below headers "-c {}".format(r'/\v\%\n\zs(^$|^[^\%]{1}.*$)'), # use anki_vim snippets "-c set filetype=anki_vim", # latex syntax highlighting "-c set syntax=tex", # load anki-vim snippets for this buffer '-c let b:UltiSnipsSnippetDirectories=["UltiSnips", "{snippet_directory}"]'.format( snippet_directory=abspath(path_join( ankivim.__path__[0], "UltiSnips",))),),): """ Open `filename` using `editor` which is called with arguments `editor_args`. Parameters ---------- filename : string (Full) path to a file to open. editor : string, optional (Full) path to an editor executable to use. Defaults to the result of calling `getenv("EDITOR", "vim")`, that is either environment variable $EDITOR if it is set with fallback "vim" if it is not. editor_args : tuple, optional Additional arguments to pass to `editor` upon calling. Defaults to a suggested sequence of default arguments for vim(1). Returns ---------- editor_call : tuple Tuple that contains full call to `editor` with `editor_args` to open `filename`. Can directly be passed to `subprocess.call`. """ return tuple([editor] + list(editor_args) + [filename]) def open_editor(filename, editor=getenv("EDITOR", "vim"), # editor args below target vim 7.4, overwrite for other # editor choices. editor_args=( # set cursor below headers "-c {}".format(r'/\v\%\n\zs(^$|^[^\%]{1}.*$)'), # use anki_vim snippets "-c set filetype=anki_vim", # latex syntax highlighting "-c set syntax=tex", # load anki-vim snippets for this buffer '-c let b:UltiSnipsSnippetDirectories=["UltiSnips", "{snippet_directory}"]'.format( snippet_directory=abspath(path_join( ankivim.__path__[0], "UltiSnips",))),),): """ Open `filename` using `editor` which is called with arguments `editor_args`. Parameters ---------- filename : string (Full) path to a file to open. editor : string, optional (Full) path to an editor executable to use. Defaults to the result of calling `getenv("EDITOR", "vim")`, that is either environment variable $EDITOR if it is set with fallback "vim" if it is not. editor_args : tuple, optional Additional arguments to pass to `editor` upon calling. Defaults to a suggested sequence of default arguments for vim(1). """ call_command = editor_command(filename, editor, editor_args) try: check_call(call_command) except CalledProcessError: raise ValueError( "Failed to call editor '{editor}' on filename '{filename}'.\n " "Full call string was: {call}""".format( editor=editor, filename=filename, call=" ".join(call_command) ) ) def create_card(deckpath, editor=getenv("EDITOR", "vim"), # editor args below target vim 7.4, overwrite for other # editor choices. editor_args=( # set cursor below headers "-c {}".format(r'/\v\%\n\zs(^$|^[^\%]{1}.*$)'), # use anki_vim snippets "-c set filetype=anki_vim", # latex syntax highlighting "-c set syntax=tex", # load anki-vim snippets for this buffer '-c let b:UltiSnipsSnippetDirectories=["UltiSnips", "{snippet_directory}"]'.format( snippet_directory=abspath(path_join( ankivim.__path__[0], "UltiSnips",))),),): """ Create a new anki-card in deck at path `deckpath`, by appending new formatted content to deckpath/raw_cards.txt. Will create a new deck directory at `deckpath` if there is none yet. Parameters ---------- deckpath : string Full path to a folder containing raw textual data that can be imported into anki(1) directly. editor : string, optional (Full) path to an editor executable to use. Defaults to the result of calling `getenv("EDITOR", "vim")`, that is either environment variable $EDITOR if it is set with fallback "vim" if it is not. editor_args : tuple, optional Additional arguments to pass to `editor` upon calling. Defaults to a suggested sequence of default arguments for vim(1). """ if not path_exists(deckpath): makedirs(deckpath) qa_headers = "{question}{space}{answer}{space}".format( question=draw_frame(content="QUESTION"), answer=draw_frame(content="ANSWER\t"), space="\n\n\n", ) with tempfile.NamedTemporaryFile(suffix='.anki_vim') as temporary_file: write_file(temporary_file, qa_headers) # flush to ensure Q/A headers are already in the file when we # open it in vim. temporary_file.flush() # Call vim, set the cursor below the "FRONT" header, # allow snippets for our new filetype, set the syntax highlighting # so that it supports latex highlighting. open_editor( filename=temporary_file.name, editor=editor, editor_args=editor_args ) with open(temporary_file.name, 'r') as contents_file: contents = contents_file.read() has_no_user_input = contents == qa_headers if has_no_user_input: return False with open(path_join(deckpath, "raw_cards.txt"), "a") as f: question, answer = parse_qa(contents) f.writelines([question, "\t", answer, "\n"]) return True
AnkiVim
/AnkiVim-1.5.3.tar.gz/AnkiVim-1.5.3/ankivim/cards.py
cards.py
import math import matplotlib.pyplot as plt from .Generaldistribution import Distribution class Gaussian(Distribution): """ Gaussian distribution class for calculating and visualizing a Gaussian distribution. Attributes: mean (float) representing the mean value of the distribution stdev (float) representing the standard deviation of the distribution data_list (list of floats) a list of floats extracted from the data file """ def __init__(self, mu=0, sigma=1): Distribution.__init__(self, mu, sigma) def calculate_mean(self): """Function to calculate the mean of the data set. Args: None Returns: float: mean of the data set """ avg = 1.0 * sum(self.data) / len(self.data) self.mean = avg return self.mean def calculate_stdev(self, sample=True): """Function to calculate the standard deviation of the data set. Args: sample (bool): whether the data represents a sample or population Returns: float: standard deviation of the data set """ if sample: n = len(self.data) - 1 else: n = len(self.data) mean = self.calculate_mean() sigma = 0 for d in self.data: sigma += (d - mean) ** 2 sigma = math.sqrt(sigma / n) self.stdev = sigma return self.stdev def plot_histogram(self): """Function to output a histogram of the instance variable data using matplotlib pyplot library. Args: None Returns: None """ plt.hist(self.data) plt.title('Histogram of Data') plt.xlabel('data') plt.ylabel('count') def pdf(self, x): """Probability density function calculator for the gaussian distribution. Args: x (float): point for calculating the probability density function Returns: float: probability density function output """ return (1.0 / (self.stdev * math.sqrt(2*math.pi))) * math.exp(-0.5*((x - self.mean) / self.stdev) ** 2) def plot_histogram_pdf(self, n_spaces = 50): """Function to plot the normalized histogram of the data and a plot of the probability density function along the same range Args: n_spaces (int): number of data points Returns: list: x values for the pdf plot list: y values for the pdf plot """ mu = self.mean sigma = self.stdev min_range = min(self.data) max_range = max(self.data) # calculates the interval between x values interval = 1.0 * (max_range - min_range) / n_spaces x = [] y = [] # calculate the x values to visualize for i in range(n_spaces): tmp = min_range + interval*i x.append(tmp) y.append(self.pdf(tmp)) # make the plots fig, axes = plt.subplots(2,sharex=True) fig.subplots_adjust(hspace=.5) axes[0].hist(self.data, density=True) axes[0].set_title('Normed Histogram of Data') axes[0].set_ylabel('Density') axes[1].plot(x, y) axes[1].set_title('Normal Distribution for \n Sample Mean and Sample Standard Deviation') axes[0].set_ylabel('Density') plt.show() return x, y def __add__(self, other): """Function to add together two Gaussian distributions Args: other (Gaussian): Gaussian instance Returns: Gaussian: Gaussian distribution """ result = Gaussian() result.mean = self.mean + other.mean result.stdev = math.sqrt(self.stdev ** 2 + other.stdev ** 2) return result def __repr__(self): """Function to output the characteristics of the Gaussian instance Args: None Returns: string: characteristics of the Gaussian """ return "mean {}, standard deviation {}".format(self.mean, self.stdev)
Ankit-distributions
/Ankit_distributions-0.1.tar.gz/Ankit_distributions-0.1/Ankit_distributions/Gaussiandistribution.py
Gaussiandistribution.py
import math import matplotlib.pyplot as plt from .Generaldistribution import Distribution class Binomial(Distribution): """ Binomial distribution class for calculating and visualizing a Binomial distribution. Attributes: mean (float) representing the mean value of the distribution stdev (float) representing the standard deviation of the distribution data_list (list of floats) a list of floats to be extracted from the data file p (float) representing the probability of an event occurring n (int) number of trials TODO: Fill out all functions below """ def __init__(self, prob=.5, size=20): self.n = size self.p = prob Distribution.__init__(self, self.calculate_mean(), self.calculate_stdev()) def calculate_mean(self): """Function to calculate the mean from p and n Args: None Returns: float: mean of the data set """ self.mean = self.p * self.n return self.mean def calculate_stdev(self): """Function to calculate the standard deviation from p and n. Args: None Returns: float: standard deviation of the data set """ self.stdev = math.sqrt(self.n * self.p * (1 - self.p)) return self.stdev def replace_stats_with_data(self): """Function to calculate p and n from the data set Args: None Returns: float: the p value float: the n value """ self.n = len(self.data) self.p = 1.0 * sum(self.data) / len(self.data) self.mean = self.calculate_mean() self.stdev = self.calculate_stdev() def plot_bar(self): """Function to output a histogram of the instance variable data using matplotlib pyplot library. Args: None Returns: None """ plt.bar(x = ['0', '1'], height = [(1 - self.p) * self.n, self.p * self.n]) plt.title('Bar Chart of Data') plt.xlabel('outcome') plt.ylabel('count') def pdf(self, k): """Probability density function calculator for the gaussian distribution. Args: x (float): point for calculating the probability density function Returns: float: probability density function output """ a = math.factorial(self.n) / (math.factorial(k) * (math.factorial(self.n - k))) b = (self.p ** k) * (1 - self.p) ** (self.n - k) return a * b def plot_bar_pdf(self): """Function to plot the pdf of the binomial distribution Args: None Returns: list: x values for the pdf plot list: y values for the pdf plot """ x = [] y = [] # calculate the x values to visualize for i in range(self.n + 1): x.append(i) y.append(self.pdf(i)) # make the plots plt.bar(x, y) plt.title('Distribution of Outcomes') plt.ylabel('Probability') plt.xlabel('Outcome') plt.show() return x, y def __add__(self, other): """Function to add together two Binomial distributions with equal p Args: other (Binomial): Binomial instance Returns: Binomial: Binomial distribution """ try: assert self.p == other.p, 'p values are not equal' except AssertionError as error: raise result = Binomial() result.n = self.n + other.n result.p = self.p result.calculate_mean() result.calculate_stdev() return result def __repr__(self): """Function to output the characteristics of the Binomial instance Args: None Returns: string: characteristics of the Gaussian """ return "mean {}, standard deviation {}, p {}, n {}".\ format(self.mean, self.stdev, self.p, self.n)
Ankit-distributions
/Ankit_distributions-0.1.tar.gz/Ankit_distributions-0.1/Ankit_distributions/Binomialdistribution.py
Binomialdistribution.py
import logging import random import string import socket import flask import requests from flask import _app_ctx_stack from flask import current_app from flask import g from flask import request from py_zipkin import zipkin from py_zipkin import Encoding __version_info__ = ('0', '0', '6') __version__ = '.'.join(__version_info__) __author__ = 'Hyper Anna' __license__ = 'BSD' __copyright__ = '(c) 2019 by Hyper Anna' __all__ = ['Zipkin'] class Zipkin(object): def _gen_random_id(self): return ''.join( random.choice( string.digits) for i in range(16)) def __init__(self, app=None, sample_rate=100, timeout=1): self._exempt_views = set() self._sample_rate = sample_rate if app is not None: self.init_app(app) self._transport_handler = None self._transport_exception_handler = None self._timeout = timeout self._header = {'Content-Type': 'application/json'} self._encoding = Encoding.V2_JSON def default_exception_handler(self, ex): pass def default_handler(self, encoded_span): try: #body = str.encode('\x0c\x00\x00\x00\x01') + encoded_span return requests.post( self.app.config.get('ZIPKIN_DSN'), data=encoded_span, headers=self._header, timeout=self._timeout, ) except Exception as e: if self._transport_exception_handler: self._transport_exception_handler(e) else: self.default_exception_handler(e) def transport_handler(self, callback): self._transport_handler = callback return callback def transport_exception_handler(self, callback): self._transport_exception_handler = callback return callback def init_app(self, app): self.app = app app.before_request(self._before_request) app.after_request(self._after_request) self._disable = app.config.get( 'ZIPKIN_DISABLE', app.config.get('TESTING', False)) return self def _should_use_token(self, view_func): return (view_func not in self._exempt_views) def _before_request(self): if self._disable: return _app_ctx_stack.top._view_func = \ current_app.view_functions.get(request.endpoint) if not self._should_use_token(_app_ctx_stack.top._view_func): return headers = request.headers trace_id = headers.get('X-B3-TraceId') or self._gen_random_id() span_id = headers.get('X-B3-SpanId') or self._gen_random_id() parent_span_id = headers.get('X-B3-ParentSpanId') is_sampled = str(headers.get('X-B3-Sampled') or '1') == '1' flags = headers.get('X-B3-Flags') zipkin_attrs = zipkin.ZipkinAttrs( trace_id=trace_id, span_id=span_id, parent_span_id=parent_span_id, flags=flags, is_sampled=is_sampled, ) handler = self._transport_handler or self.default_handler span = zipkin.zipkin_span( service_name=self.app.config.get('ZIPKIN_SERVICE_NAME', self.app.name), span_name='{0}.{1}'.format(request.endpoint, request.method), transport_handler=handler, sample_rate=self._sample_rate, zipkin_attrs=zipkin_attrs, encoding=self._encoding ) g._zipkin_span = span g._zipkin_span.start() default_tags = self.app.config.get('ZIPKIN_TAGS', {'hostname': socket.gethostname()}) self.update_tags(default_tags) def exempt(self, view): view_location = '{0}.{1}'.format(view.__module__, view.__name__) self._exempt_views.add(view_location) return view def _after_request(self, response): if self._disable: return response if not hasattr(g, '_zipkin_span'): return response g._zipkin_span.stop() return response def create_http_headers_for_new_span(self): if self._disable: return dict() return zipkin.create_http_headers_for_new_span() def update_tags(self, tags): if all([hasattr(g, '_zipkin_span'), g._zipkin_span]): g._zipkin_span.update_binary_annotations( tags) def child_span(f): def decorated(*args, **kwargs): span = zipkin.zipkin_span( service_name=flask.current_app.name, span_name=f.__name__, ) kwargs['span'] = span with span: val = f(*args, **kwargs) span.update_binary_annotations({ 'function_args': args, 'function_returns': val, }) return val return decorated
Anna-Flask-Zipkin
/Anna_Flask_Zipkin-0.0.6-py3-none-any.whl/flask_zipkin.py
flask_zipkin.py
annalist ======== (Short README for PyPI - a longer version can be found at https://github.com/gklyne/annalist) Free-form web data notebook - "Data management for little guys" Annalist is a *Linked Data Notebook*, supporting collection, organization and sharing of structured and semi-structured data. The name "Annalist" derives from "`a person who writes annals <http://www.oxforddictionaries.com/definition/english/annalist>`_". Goals ----- The overall goal of Annalist is to make it easy for individuals and small teams to create and explore linked data on the web, without requiring software development. * Easy data: out-of-box data acquisition, modification and organization of small data records. * Flexible data: new record types and fields can be added as-required. * Sharable data: use textual, easy to read file formats that can be shared by web, email, file transfer, version management system, memory stick, etc. * Remixable data: records that can be first class participants in a wider ecosystem of linked data, with links in and links out. Demo system home page --------------------- For general information about Annalist, and demonstration system links, see the [Annalist demonstration system home page](http://annalist.net/). Status ------ For current release status, see https://github.com/gklyne/annalist/blob/master/documents/release-notes/release-v0.5.md
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/README.rst
README.rst
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) import os import os.path import errno import traceback import re import stat import time import json import shutil import io from django.conf import settings from utils.py3porting import ( is_string, to_unicode, urlparse, urljoin, urlsplit, urlopen, Request, get_message_type ) from annalist import layout from annalist.identifiers import ANNAL def valid_id(id_string, reserved_ok=False): """ Checks the supplied id is valid as an Annalist identifier. The main requirement is that it is valid as a URI path segment, so it can be used in the creation of URIs for Annalist resources. Also, filters out reserved Annalist identifiers unless reserved_ok=True parameter is provided. >>> valid_id("abcdef_1234") True >>> valid_id("abcdef/1234") False >>> valid_id("_annalist_collection") False >>> valid_id("_annalist_collection", reserved_ok=True) True >>> valid_id("") False """ reserved = ( [ layout.INITIAL_VALUES_ID , layout.COLL_ROOT_CONF_OLD_DIR , layout.SITEDATA_ID ]) #@@DEBUG - write traceback to local file # DEBUG = (id_string in reserved) and (not reserved_ok) # if DEBUG: # with open("debug-traceback.log", "a") as f: # f.write("@@@@ Unexpected identifier %s"%(id_string)) # f.write("".join(traceback.format_stack())) #@@ # cf. urls.py: if id_string and re.match(r"\w{1,128}$", id_string): return reserved_ok or (id_string not in reserved) # log.warning("util.valid_id: id %s"%(id_string)) return False def split_type_entity_id(eid, default_type_id=None): """ Returns (type_id,entyity_id) pair for supplied string "type_id/entity_id". The supplied `eid` may be a bare "entity_id" then the supplied default type_id is used. >>> split_type_entity_id("type_id/entity_id") == ('type_id', 'entity_id') True >>> split_type_entity_id("t/e", "f") == ('t', 'e') True >>> split_type_entity_id("entity_id", "def_type_id") == ('def_type_id', 'entity_id') True >>> split_type_entity_id(None, "def_type_id") == ('def_type_id', None) True """ if eid is not None: sub_ids = eid.split("/") if len(sub_ids) == 2: return (sub_ids[0], sub_ids[1]) if len(sub_ids) == 1: return (default_type_id, sub_ids[0]) return (default_type_id, "") return (default_type_id, None) def extract_entity_id(eid): """ Accepts an entity id which may be have `type_id/entity_id` form, and returns the bare `entity_id` value. >>> extract_entity_id("type_id/entity_id") == "entity_id" True >>> extract_entity_id("entity_id") == "entity_id" True """ _type_id, entity_id = split_type_entity_id(eid) return entity_id def fill_type_entity_id(eid, default_type_id=None): """ Assemble a type+entity composite identifier based on a supplied id string. If the string does not already include a type_id value, the supplied default is used. >>> fill_type_entity_id("entity_id", default_type_id="def_type_id") == "def_type_id/entity_id" True >>> fill_type_entity_id("type_id/entity_id", default_type_id="def_type_id") == "type_id/entity_id" True """ type_id, entity_id = split_type_entity_id(eid, default_type_id=default_type_id) return make_type_entity_id(type_id, entity_id) def make_type_entity_id(type_id=None, entity_id=None): """ Assemble a type_id and entity_id and return a composite identifier. If the entity Id is blank, ignore the supplied type id >>> make_type_entity_id(type_id="type_id", entity_id="entity_id") == "type_id/entity_id" True >>> make_type_entity_id(type_id="type_id", entity_id="") == "" True """ assert type_id is not None, "make_type_entity_id: no type id (%s, %s)"%(type_id, entity_id) assert entity_id is not None, "make_type_entity_id: no entity id (%s, %s)"%(type_id, entity_id) if entity_id != "": return type_id + "/" + entity_id return "" def make_entity_base_url(url): """ Returns an entity URL with a trailing "/" so that it can be used consistently with urljoin to obtain URLs for specific resources associated with the entity. >>> make_entity_base_url("/example/path/") == '/example/path/' True >>> make_entity_base_url("/example/path") == '/example/path/' True """ return url if url.endswith("/") else url + "/" def label_from_id(id_string): """ Returns a label string constructed from the suppliued Id string Underscore characters in the Id are replaced by spaces. The first character may be capirtalized. >>> label_from_id("entity_id") == "Entity id" True """ temp = id_string.replace('_', ' ').strip() label = temp[0].upper() + temp[1:] return label def slug_from_name(filename): """ Extracts a slug (id) value from a filename >>> slug_from_path("bar.baz") == 'bar' True >>> slug_from_path("bar") == 'bar' True >>> slug_from_path(".baz") == '.baz' True """ slug = os.path.splitext(filename)[0] return slug def slug_from_path(path): """ Extracts a slug (id) value from a file path >>> slug_from_path("/foo/bar.baz") == 'bar' True >>> slug_from_path("/bar") == 'bar' True >>> slug_from_path("bar") == 'bar' True >>> slug_from_path("/example.org/foo/bar/.baz") == '.baz' True >>> slug_from_path("/foo/bar.baz") == 'bar' True >>> slug_from_path("/example.org/foo/bar/")+"$" == '$' True """ return slug_from_name(os.path.basename(path)) def slug_from_uri(uri): """ Extracts a slug (id) value from a URI >>> slug_from_uri("http:/example.org/foo/bar") == 'bar' True >>> slug_from_uri("/example.org/foo/bar") == 'bar' True >>> slug_from_uri("/foo/bar") == 'bar' True >>> slug_from_uri("/bar") == 'bar' True >>> slug_from_uri("bar") == 'bar' True >>> slug_from_uri("/example.org/foo/bar/")+"$" == '$' True >>> slug_from_uri("http:/example.org/foo/bar.baz") == 'bar' True >>> slug_from_uri("http:/example.org/foo/bar;baz") == 'bar;baz' True >>> slug_from_uri("http:/example.org/foo/bar?baz") == 'bar' True >>> slug_from_uri("http:/example.org/foo/bar#baz") == 'bar' True """ return slug_from_path(urlsplit(uri).path) def ensure_dir(dirname): """ Ensure that a named directory exists; if it does not, attempt to create it. """ try: os.makedirs(dirname) except OSError as e: if e.errno != errno.EEXIST: raise return def entity_dir_path(base_dir, path, filename): """ Assemble full entity description directory and file names from supplied components. base_dir is the fully qualified base directory for the site or collection from which data is to be read path is a relative path or list of path segments within the site or resource for the site or collection from which data is to be read. This value may be absent (an empty list). filename is a file name for the data resource to be read. Returns a pair containing the full directory and path names for the entity file. >>> entity_dir_path("/base/dir/","sub","file.ext") == ('/base/dir/sub', '/base/dir/sub/file.ext') True >>> entity_dir_path("/base/dir/",["sub"],"file.ext") == ('/base/dir/sub', '/base/dir/sub/file.ext') True >>> entity_dir_path("/base/dir/",[],"file.ext") == ('/base/dir', '/base/dir/file.ext') True >>> entity_dir_path("/base/dir/",["sub1","sub2"],"file.ext") == ('/base/dir/sub1/sub2', '/base/dir/sub1/sub2/file.ext') True >>> entity_dir_path("/base/dir",["sub"],"file.ext") == ('/base/dir/sub', '/base/dir/sub/file.ext') True >>> entity_dir_path("/base/dir",[],"sub/file.ext") == ('/base/dir/sub', '/base/dir/sub/file.ext') True """ # log.debug("util.entity_dir_path %s, %r, %s"%(base_dir, path, filename)) if path: if isinstance(path, (list, tuple)): d = os.path.join(base_dir, *path) else: d = os.path.join(base_dir, path) else: d = base_dir p = os.path.join(d, filename) d = os.path.dirname(p) return (d, p) def entity_path(base_dir, path, filename): """ Assemble full entity description file names from supplied components. base_dir is the fully qualified base directory for the site or collection from which data is to be read path is a relative path or list of path segments within the site or resource for the site or collection from which data is to be read. This value may be absent. filename is a file name for the data resource to be read. Returns the full path name for the entity file if the intermediate directories all exist, otherwise None if any directories are missing. No test is made for existence of the filename. >>> entity_path('.',[],"file.ext") == './file.ext' True >>> entity_path('.',["nopath"],"file.ext") is None True """ (d, p) = entity_dir_path(base_dir, path, filename) # log.debug("entity_path: d %s, p %s"%(d,p)) if d and os.path.isdir(d): return p return None def entity_url_host(baseuri, entityref): """ Return host part (as appears in an HTTP host: header) from an entity URI. >>> entity_url_host("http://example.org/basepath/", "/path/to/entity") == 'example.org' True >>> entity_url_host("http://example.org:80/basepath/", "/path/to/entity") == 'example.org:80' True >>> entity_url_host("http://[email protected]:80/basepath/", "/path/to/entity") == 'example.org:80' True >>> entity_url_host("http://base.example.org:80/basepath/", "http://ref.example.org/path/to/entity") == 'ref.example.org' True """ uri = urljoin(baseuri, entityref) p = urlparse(uri) h = p.hostname or "" if p.port: h += ":" + str(p.port) return h def entity_url_path(baseuri, entityref): """ Return absolute path part from an entity URI, excluding query or fragment. >>> entity_url_path("http://example.org/basepath/", "/path/to/entity") == '/path/to/entity' True >>> entity_url_path("http://example.org/basepath/", "relpath/to/entity") == '/basepath/relpath/to/entity' True >>> entity_url_path("http://example.org/basepath/", "/path/to/entity?query") == '/path/to/entity' True >>> entity_url_path("http://example.org/basepath/", "/path/to/entity#frag") == '/path/to/entity' True >>> entity_url_path("/basepath/", "relpath/to/entity") == '/basepath/relpath/to/entity' True """ uri = urljoin(baseuri, entityref) return urlparse(uri).path def make_resource_url(baseuri, entityref, resourceref): """ Build a URL for an entity resource that is based on a supplied base URI and entity reference, but including the supplied resource name reference (i.e. filename) This function preserves any query parameters from the supplied entityref. >>> make_resource_url("http://example.org/foo/", "/bar/stuff", "entity.ref") == 'http://example.org/bar/entity.ref' True >>> make_resource_url("http://example.org/foo/", "/bar/stuff?query=val", "entity.ref") == 'http://example.org/bar/entity.ref?query=val' True """ url = urljoin(baseuri, entityref) resource_url = urljoin(url, make_resource_ref_query(entityref, resourceref)) return resource_url def make_resource_ref_query(entityref, resourceref): """ Returns `resourceref` with the query component (if any) from entityref. This is used to generate a resource reference that is the supplied resource ref treated as relative to the supplied entityref, including any query parameters included in entityref. >>> make_resource_ref_query("http://example.com/foo?query=value", "http://example.org/bar") == "http://example.org/bar?query=value" True """ query = urlsplit(entityref).query if query != "": query = "?" + query return urljoin(resourceref, query) def strip_comments(f): """ Returns a file-like object that returns content from the supplied file-like object, but with comment lines replaced with blank lines. >>> f1 = io.StringIO("// comment\\ndata\\n// another comment\\n\\n") >>> f2 = strip_comments(f1) >>> f2.read() == '\\ndata\\n\\n\\n' True """ fnc = io.StringIO() sof = fnc.tell() for line in f: if re.match(r"^\s*//", line): fnc.write("\n") else: fnc.write(to_unicode(line)) fnc.seek(sof) return fnc def renametree_temp(src): """ Rename tree to temporary name, and return that name, or None if the source directory does not exist. """ count = 0 while count < 10: # prevents indefinite loop count += 1 tmp = os.path.join(os.path.dirname(src),"_removetree_tmp_%d"%(count)) try: os.rename(src, tmp) return tmp # Success! # except WindowsError as e: # log.warning( # "util.renametree_temp: WindowsError: winerror %d, strerror %s, errno %d"% # (e.winerror, e.strerror, e.errno) # ) # continue # Try another temp name except OSError as e: time.sleep(1) if e.errno == errno.EACCES: log.warning("util.renametree_temp: %s EACCES, retrying", tmp) continue # Try another temp name if e.errno == errno.ENOTEMPTY: log.warning("util.renametree_temp: %s ENOTEMPTY, retrying", tmp) continue # Try another temp name if e.errno == errno.EEXIST: log.warning("util.renametree_temp: %s EEXIST, retrying", tmp) shutil.rmtree(tmp, ignore_errors=True) # Try to clean up old files continue # Try another temp name if e.errno == errno.ENOENT: log.warning("util.renametree_temp: %s ENOENT, skipping", tmp) break # 'src' does not exist(?) raise # Other error: propagaee return None def removetree(tgt): """ Work-around for python problem with shutils tree remove functions on Windows. See: http://stackoverflow.com/questions/23924223/ http://stackoverflow.com/questions/1213706/ http://stackoverflow.com/questions/1889597/ http://bugs.python.org/issue19643 """ # shutil.rmtree error handler that attempts recovery on Windows from # attempts to remove a read-only file or directory (see links above). def error_handler(func, path, execinfo): """ figure out recovery based on error... """ e = execinfo[1] if e.errno == errno.ENOENT or not os.path.exists(path): return # path does not exist if func in (os.rmdir, os.remove) and e.errno == errno.EACCES: try: os.chmod(path, stat.S_IRWXU| stat.S_IRWXG| stat.S_IRWXO) # 0777 except Exception as che: log.warning("util.removetree: chmod failed: %s", che) try: func(path) except Exception as rfe: log.warning("util.removetree: 'func' retry failed: %s", rfe) if not os.path.exists(path): return # Gone, assume all is well raise if e.errno == errno.ENOTEMPTY: log.warning("util.removetree: Not empty: %s, %s", path, tgt) time.sleep(1) removetree(path) # Retry complete removal return log.warning("util.removetree: rmtree path: %s, error: %r", path, execinfo) raise e # Workaround for problems on Windows: it appears that the directory # removal does not complete immediately, causing subsequent failures. # Try renaming to a new directory first, so that the tgt is immediately # available for re-use. tmp = renametree_temp(tgt) if tmp: shutil.rmtree(tmp, onerror=error_handler) return def replacetree(src, tgt): """ Work-around for python problem with shutils tree copy functions on Windows. See: http://stackoverflow.com/questions/23924223/ """ if os.path.exists(tgt): removetree(tgt) shutil.copytree(src, tgt) return def updatetree(src, tgt): """ Like replacetree, except that existing files are not removed unless replaced by a file of the name name in the source tree. NOTE: can't use shutil.copytree for this, as that requires that the destination tree does not exist. """ files = os.listdir(src) for f in files: sf = os.path.join(src, f) if os.path.exists(sf) and not os.path.islink(sf): # Ignore symlinks if os.path.isdir(sf): tf = os.path.join(tgt, f) if not os.path.isdir(tf): os.makedirs(tf) updatetree(sf, tf) # Recursive dir copy else: shutil.copy2(sf, tgt) # Copy single file, may overwrite return def expandtree(src, tgt): """ Like updatetree, except that existing files are not updated. """ files = os.listdir(src) for f in files: sf = os.path.join(src, f) if os.path.exists(sf) and not os.path.islink(sf): # Ignore symlinks tf = os.path.join(tgt, f) if os.path.isdir(sf): if not os.path.isdir(tf): os.makedirs(tf) expandtree(sf, tf) # Recursive dir copy elif not os.path.exists(tf): shutil.copy2(sf, tgt) # Copy single file return def download_url_to_file(url, fileName=None): """ Download resource at given URL to a specified file, or to a a filename based on any Content-disposition header present, or on the URL itself. This code lifted from a contribution by [Michael Waterfall](http://michael.typify.io/) at [http://stackoverflow.com/questions/862173/](). (Thanks!) """ def getFileName(url, openUrl): """ Local helper to extract filename from content disposition header or URL """ if 'Content-Disposition' in openUrl.info(): # If the response has Content-Disposition, try to get filename from it # cd = dict(map( # lambda x: x.strip().split('=') if '=' in x else (x.strip(),''), # openUrl.info()['Content-Disposition'].split(';'))) cd = dict( [ x.strip().split('=') if '=' in x else (x.strip(),'') for x in openUrl.info()['Content-Disposition'].split(';') ]) if 'filename' in cd: filename = cd['filename'].strip("\"'") if filename: return filename return os.path.basename(urlsplit(url)[2]) r = urlopen(Request(url)) try: fileName = fileName or getFileName(url,r) with open(fileName, 'wb') as f: shutil.copyfileobj(r, f) finally: r.close() return def __unused__download_url_to_fileobj(url, fileobj=None): """ Download resource at given URL and write the data to to a supplied file stream object. """ r = urlopen(Request(url)) try: shutil.copyfileobj(r, fileobj) finally: r.close() return # Update MIME types returned by open_url when opening a file # @@TODO: unify logic with resourcetypes module, and do all MIME type wrangling there import mimetypes mimetypes.init() mimetypes.add_type("text/markdown", ".md") def open_url(url): """ Opens a file-like object to access resource contents at a URL, and returns the access object, actual URL (following any redirect), and resource type (MIME content-type string) """ r = urlopen(Request(url)) u = r.geturl() t = get_message_type(r.info()) return (r, u, t) def copy_resource_to_fileobj(srcobj, dstobj): """ Copies data from a supplied source file object to a supplied destination object. Specifically, this is used when downloading a web resource to a local stored entity. """ #@@TODO: timeout / size limit? (Potential DoS?) shutil.copyfileobj(srcobj, dstobj) return if __name__ == "__main__": import doctest doctest.testmod() # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/util.py
util.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) CUSTOMIZE_COLLECTION_HEADING = "Customize collection &mdash; %(coll_label)s" SITE_NAME_DEFAULT = "Annalist linked data notebook" ACTION_COMPLETED = "Action completed" NO_ACTION_PERFORMED = "No action performed" NO_SELECTION = "(No '%(id)s' selected)" INPUT_ERROR = "Problem with input" INPUT_VALIDATION_ERROR = "Problem with input provided" SYSTEM_ERROR = "System error" DATA_ERROR = "Problem with data" UNEXPECTED_FORM_DATA = "Unexpected form data: %r" MISSING_COLLECTION_ID = "Missing identifier for new collection" INVALID_COLLECTION_ID = "Invalid identifier for new collection: '%(coll_id)s'" CREATED_COLLECTION_ID = "Created new collection: '%(coll_id)s'" NO_COLLECTION_METADATA = "Metadata not found for collection '%(id)s'" CONFIRM_REQUESTED_ACTION = "Confirm requested action" ARE_YOU_SURE = "Are you sure?" CONFIRM_OR_CANCEL = '''Click "Confirm" to continue, or "Cancel" to abort operation''' ACTION_COMPLETED = "Action completed" TURTLE_SERIALIZE_ERROR = "Problem generating Turtle serialization from data" TURTLE_SERIALIZE_REASON = "Internal description of error" JSONLD_PARSE_ERROR = "Problem pasring JSON-LD data (maybe JSON-LD context)" JSONLD_PARSE_REASON = "Internal description of error" INVALID_OPERATION_ATTEMPTED = "Attempt to peform invalid operation" INVALID_TYPE_CHANGE = "Change of entity type to or from '_type' is not supported" INVALID_TYPE_RENAME = "Renaming of Annalist built-in types is not supported" CREATE_ENTITY_FAILED = "Problem creating/updating entity %s/%s (see log for more info)" RENAME_ENTITY_FAILED = "Problem renaming entity %s/%s to %s/%s (see log for more info)" COPY_ENTITY_FAILED = "Problem copying entity %s/%s to %s/%s (see log for more info)" RENAME_TYPE_FAILED = "Problem renaming type %s to %s (see log for more info)" IMPORT_ERROR = "Resource import error" IMPORT_ERROR_REASON = ("Failed to import resource %(import_url)s as %(import_name)s"+ " for %(type_id)s/%(id)s: %(import_exc)s") IMPORT_DONE = "Resource imported" IMPORT_DONE_DETAIL = ("Imported <%(resource_url)s>"+ " as %(import_name)s"+ " for entity %(type_id)s/%(id)s") UPLOAD_ERROR = "File upload error" UPLOAD_ERROR_REASON = ("Failed to upload file %(uploaded_file)s as %(upload_name)s"+ " for %(type_id)s/%(id)s: %(import_exc)s") UPLOAD_DONE = "File uploaded" UPLOAD_DONE_DETAIL = ("Uploaded <%(uploaded_file)s>"+ " as %(upload_name)s"+ " for entity %(type_id)s/%(id)s") NO_COLLECTION_VIEW = "No collection selected for viewing" MANY_COLLECTIONS_VIEW = "Too many collections selected for viewing: %(ids)s" NO_COLLECTION_EDIT = "No collection selected for editing" MANY_COLLECTIONS_EDIT = "Too many collections selected for viewing: %(ids)s" NO_COLLECTIONS_REMOVE = "No collections selected for removal" REMOVE_COLLECTIONS = "Remove collection(s): %(ids)s" MIGRATE_COLLECTION_ERROR = "Error(s) occurred while migrating collection data for %(id)s" MIGRATED_COLLECTION_DATA = "Migrated data for collection %(id)s" TOO_MANY_ENTITIES_SEL = "Too many items selected" NO_ENTITY_FOR_COPY = "No entity selected to copy" NO_ENTITY_FOR_EDIT = "No entity selected to edit" NO_ENTITY_FOR_DELETE = "No entity selected to delete" CANNOT_DELETE_ENTITY = "Entity %(id)s of type %(type_id)s not found or cannot be deleted" SITE_ENTITY_FOR_DELETE = "Cannot remove site built-in entity %(id)s of type %(type_id)s, or entity not found" TYPE_VALUES_FOR_DELETE = "Cannot remove type %(id)s with existing values" REMOVE_ENTITY_DATA = "Remove entity %(id)s of type %(type_id)s in collection %(coll_id)s" NO_TYPE_FOR_COPY = "No entity type selected to copy" NO_TYPE_FOR_EDIT = "No entity type selected to edit" NO_TYPE_FOR_DELETE = "No entity type selected to delete" NO_VIEW_FOR_COPY = "No entity view selected to copy" NO_VIEW_FOR_EDIT = "No entity view selected to edit" NO_VIEW_FOR_DELETE = "No entity view selected to delete" NO_LIST_FOR_COPY = "No list view selected to copy" NO_LIST_FOR_EDIT = "No list view selected to edit" NO_LIST_FOR_DELETE = "No list view selected to delete" ENTITY_MESSAGE_LABEL = "%(type_id)s/%(entity_id)s in collection %(coll_id)s" ENTITY_DEFAULT_LABEL = "" # "Entity %(type_id)s/%(entity_id)s in collection %(coll_id)s" ENTITY_DEFAULT_COMMENT = "" # "Entity %(type_id)s/%(entity_id)s in collection %(coll_id)s" ENTITY_DOES_NOT_EXIST = "Entity %(type_id)s/%(id)s (%(label)s) does not exist" ENTITY_COPY_FILE_ERROR = "Failed to copy file %(file)s while copying entity %(id)% to %(src_id)s" RESOURCE_DOES_NOT_EXIST = "Resource %(ref)s for entity %(id)s does not exist" RESOURCE_NOT_DEFINED = "Resource %(ref)s is not present for entity %(id)s" REMOVE_RECORD_TYPE = "Remove entity type %(id)s in collection %(coll_id)s" REMOVE_RECORD_VIEW = "Remove entity view %(id)s in collection %(coll_id)s" REMOVE_RECORD_LIST = "Remove list %(id)s in collection %(coll_id)s" LIST_NOT_DEFINED = "List %(list_id)s/%(list_ref)s is not present for entity type %(type_id)s" LIST_NOT_ACCESSED = "List %(list_id)s/%(list_ref)s not accessed for entity type %(type_id)s" SITE_RESOURCE_NOT_DEFINED = "Resource %(ref)s is not recogized for site" SITE_RESOURCE_NOT_EXIST = "Site resource %(ref)s does not exist" COLLECTION_ID = "Problem with collection identifier" COLLECTION_ID_INVALID = "The collection identifier is missing or not a valid identifier" COLLECTION_LABEL = "Collection %(id)s" COLLECTION_EXISTS = "Collection %(save_id)s already exists" COLLECTION_NOT_EXISTS = "Collection %(id)s does not exist" COLLECTION_REMOVED = "The following collections were removed: %(ids)s" COLLECTION_NEWER_VERSION = ("Cannot access collection %(id)s, "+ "which was created by software version %(ver)s. "+ "(Update Annalist server software to use this collection)") COLL_PARENT_NOT_EXIST = "Collection %(id)s references non-existent parent %(parent_id)s" COLL_RESOURCE_NOT_DEFINED = "Resource %(ref)s is not recogized for collection %(id)s" COLL_RESOURCE_NOT_EXIST = "Resource %(ref)s for collection %(id)s does not exist" COLL_MIGRATE_DIR_FAILED = "Collection %(id)s migration %(old_path)s -> %(new_path)s failed. (%(exc)s)" ANNALIST_USER_ID = "Problem with user identifier" ANNALIST_USER_ID_INVALID = "The user identifier is missing or not a valid identifier" ANNALIST_USER_LABEL = "User %(id)s in collection %(coll_id)s" ANNALIST_USER_EXISTS = "User %(save_id)s in collection %(save_coll)s already exists" ANNALIST_USER_NOT_EXISTS = "User %(id)s in collection %(coll_id)s does not exist" ANNALIST_USER_REMOVED = "User %(id)s in collection %(coll_id)s was removed" RECORD_TYPE_ID = "Problem with entity type identifier" RECORD_TYPE_ID_INVALID = "The entity type identifier is missing or not a valid identifier" RECORD_TYPE_LABEL = "Entity type %(id)s in collection %(coll_id)s" RECORD_TYPE_EXISTS = "Entity type %(save_id)s in collection %(save_coll)s already exists" RECORD_TYPE_NOT_EXISTS = "Entity type %(id)s in collection %(coll_id)s does not exist" RECORD_TYPE_REMOVED = "Entity type %(id)s in collection %(coll_id)s was removed" RECORD_VIEW_ID = "Problem with entity view identifier" RECORD_VIEW_ID_INVALID = "The entity view identifier is missing or not a valid identifier" RECORD_VIEW_LABEL = "Entity view %(id)s in collection %(coll_id)s" RECORD_VIEW_EXISTS = "Entity view %(save_id)s in collection %(save_coll)s already exists" RECORD_VIEW_NOT_EXISTS = "Entity view %(id)s in collection %(coll_id)s does not exist" RECORD_VIEW_REMOVED = "Entity view %(id)s in collection %(coll_id)s was removed" RECORD_VIEW_LOAD_ERROR = "Error loading view '%(id)s', file %(file)s: %(message)s" DISPLAY_ALTERNATIVE_VIEW = "Displaying alternative view '%(id)s'" RECORD_LIST_ID = "Problem with list identifier" RECORD_LIST_ID_INVALID = "The list identifier is missing or not a valid identifier" RECORD_LIST_LABEL = "List %(id)s in collection %(coll_id)s" RECORD_LIST_EXISTS = "List %(save_id)s in collection %(save_coll)s already exists" RECORD_LIST_NOT_EXISTS = "List %(id)s in collection %(coll_id)s does not exist" RECORD_LIST_REMOVED = "List %(id)s in collection %(coll_id)s was removed" RECORD_LIST_LOAD_ERROR = "Error loading list '%(id)s', file %(file)s: %(message)s" DISPLAY_ALTERNATIVE_LIST = "Displaying alternative list '%(id)s'" RECORD_GROUP_ID = "Problem with field group identifier" RECORD_GROUP_ID_INVALID = "The field group identifier is missing or not a valid identifier" RECORD_GROUP_LABEL = "Field group %(id)s in collection %(coll_id)s" RECORD_GROUP_EXISTS = "Field group %(save_id)s in collection %(save_coll)s already exists" RECORD_GROUP_NOT_EXISTS = "Field group %(id)s in collection %(coll_id)s does not exist" RECORD_GROUP_REMOVED = "Field group %(id)s in collection %(coll_id)s was removed" RECORD_FIELD_ID = "Problem with view field identifier" RECORD_FIELD_ID_INVALID = "The view field identifier is missing or not a valid identifier" RECORD_FIELD_LABEL = "View field %(id)s in collection %(coll_id)s" RECORD_FIELD_EXISTS = "View field %(save_id)s in collection %(save_coll)s already exists" RECORD_FIELD_NOT_EXISTS = "View field %(id)s in collection %(coll_id)s does not exist" RECORD_FIELD_REMOVED = "View field %(id)s in collection %(coll_id)s was removed" RECORD_VOCAB_ID = "Problem with vocabulary identifier" RECORD_VOCAB_ID_INVALID = "The vocabulary namespace identifier is missing or not a valid identifier" RECORD_VOCAB_LABEL = "Vocabulary %(id)s in collection %(coll_id)s" RECORD_VOCAB_EXISTS = "Vocabulary %(save_id)s in collection %(save_coll)s already exists" RECORD_VOCAB_NOT_EXISTS = "Vocabulary %(id)s in collection %(coll_id)s does not exist" RECORD_VOCAB_REMOVED = "Vocabulary %(id)s in collection %(coll_id)s was removed" RECORD_VOCAB_URI_TERM = "Vocabulary %(id)s namespace URI %(uri)s does not end with with an expected delimiter" RECORD_INFO_ID = "Problem with general information record identifier" RECORD_INFO_ID_INVALID = "General information record identifier is missing or not a valid identifier" RECORD_INFO_LABEL = "General information record %(id)s in collection %(coll_id)s" RECORD_INFO_EXISTS = "General information record %(save_id)s in collection %(save_coll)s already exists" RECORD_INFO_NOT_EXISTS = "General information record %(id)s in collection %(coll_id)s does not exist" RECORD_INFO_REMOVED = "General information record %(id)s in collection %(coll_id)s was removed" RECORD_ENUM_ID = "Problem with enumeration type identifier" RECORD_ENUM_ID_INVALID = "The enumeration type identifier is missing or not a valid identifier" RECORD_ENUM_LABEL = "Enumeration type %(id)s in collection %(coll_id)s" RECORD_ENUM_EXISTS = "Enumeration type %(save_id)s in collection %(save_coll)s already exists" RECORD_ENUM_NOT_EXISTS = "Enumeration type %(id)s in collection %(coll_id)s does not exist" RECORD_ENUM_REMOVED = "Enumeration type %(id)s in collection %(coll_id)s was removed" ENTITY_DATA_ID = "Problem with entity identifier" ENTITY_DATA_ID_INVALID = "The entity identifier is missing, too long, or not a valid identifier" ENTITY_DATA_LABEL = "Entity %(id)s of type %(type_id)s in collection %(coll_id)s" ENTITY_DATA_EXISTS = "Entity %(save_id)s of type %(save_type)s in collection %(save_coll)s already exists" ENTITY_DATA_NOT_EXISTS = "Entity %(id)s of type %(type_id)s in collection %(coll_id)s does not exist" ENTITY_DATA_REMOVED = "Entity %(id)s of type %(type_id)s in collection %(coll_id)s was removed" ENTITY_TYPE_ID = "Problem with entity type identifier" ENTITY_TYPE_ID_INVALID = "The entity type identifier is missing, too long, or not a valid identifier (%(type_id)s)" ENTITY_LOAD_ERROR = "Error loading '%(id)s', file %(file)s: %(message)s" DEFAULT_LIST_UPDATED = "Default list view for collection %(coll_id)s changed to %(list_id)s" DEFAULT_VIEW_UPDATED = "Default view for collection %(coll_id)s changed to %(view_id)s/%(type_id)s/%(entity_id)s" REMOVE_FIELD_ERROR = "Problem with remove field(s) request" MOVE_FIELD_ERROR = "Problem with move field up/down request" NO_FIELD_SELECTED = "No field(s) selected" CREATE_FIELD_ENTITY_ERROR = "Create new entity error" NO_REFER_TO_TYPE = "Field '%(field_label)s' does not specify a valid 'Refer to type'" MISSING_FIELD_LABEL = "(field missing: '%(id)s')" VIEW_DESCRIPTION_HEADING = "Problem with view description" VIEW_PROPERTY_DUPLICATE = "Field %(field_id)s repeats use of property %(property_uri)s in view" UNKNOWN_TASK_ID = "Unknown task Id in form response: %(task_id)s" NO_VIEW_OR_LIST_SELECTED = "Please select an exiting view and/or list as a basis for creating new ones" TASK_CREATE_VIEW_LIST = "Created new view and/or list for type %(id)s (%(label)s)" TASK_CREATE_SUBTYPE = "Created subtype %(id)s (%(label)s)" TASK_CREATE_SUBFIELD = "Created field %(id)s (%(label)s) using subproperty of %(base_uri)s." TASK_CREATE_MANY_VALUE_FIELD = "Created repeating value field '%(field_id)s' for '%(label)s' (check subfield 'Entity type' is blank, or matches repeat field 'Value type')" TASK_CREATE_LIST_VALUE_FIELD = "Created sequence of values field '%(field_id)s' for '%(label)s' (check subfield 'Entity type' is blank, or matches repeat field 'Value type')" TASK_CREATE_REFERENCE_FIELD = "Created reference to field '%(field_id)s'. (Select value for 'Refer to type' on current display, and re-save. Also check subfield 'Entity type' is blank, or matches referring field 'Value type')" # Strings for data generated by task buttons # TYPE_COMMENT = ( # "# %(type_label)s\n\n"+ # "Entity type [%(type_label)s]($BASE:_type/%(type_id)s)." # ) SUBTYPE_COMMENT = ( "# %(type_label)s\n\n"+ "Entity type [%(type_label)s]($BASE:_type/%(type_id)s), "+ "subtype of [%(base_type_label)s]($BASE:_type/%(base_type_id)s)." ) SUBFIELD_LABEL = ( "@@ Subfield of %(base_field_label)s (%(base_field_id)s)@@" ) SUBFIELD_COMMENT = ( "# %(field_label)s\n\n"+ "Field [%(field_label)s]($BASE:_field/%(field_id)s), "+ "using property uri %(field_prop_uri)s, "+ "subproperty of [%(base_field_label)s]($BASE:_field/%(base_field_id)s)." ) TYPE_VIEW_LABEL = "%(type_label)s view" TYPE_VIEW_COMMENT = ( "# %(type_label)s view\n\n"+ "View entity of type [%(type_label)s]($BASE:_type/%(type_id)s)." ) TYPE_LIST_LABEL = "%(type_label)s list" TYPE_LIST_COMMENT = ( "# %(type_label)s list\n\n"+ "List entities of type [%(type_label)s]($BASE:_type/%(type_id)s)." ) MANY_FIELD_LABEL = "%(field_label)s (repeating)" MANY_FIELD_COMMENT = ( "# %(field_label)s (repeating)\n\n"+ "Zero, one or more instances of [%(field_label)s]($BASE:_field/%(field_id)s)." ) MANY_FIELD_PLACEHOLDER = "(Zero, one or more %(field_label)s fields)" MANY_FIELD_ADD = "Add %(field_label)s" MANY_FIELD_DELETE = "Remove %(field_label)s" LIST_FIELD_LABEL = "%(field_label)s (sequence)" LIST_FIELD_COMMENT = ( "# %(field_label)s (sequence)\n\n"+ "List of [%(field_label)s]($BASE:_field/%(field_id)s) fields." ) LIST_FIELD_PLACEHOLDER = "(Sequence of %(field_label)s fields)" LIST_FIELD_ADD = "Add %(field_label)s" LIST_FIELD_DELETE = "Remove %(field_label)s" FIELD_REF_LABEL = "%(field_label)s (ref)" FIELD_REF_COMMENT = "%(field_label)s (ref)" FIELD_REF_PLACEHOLDER = "(Reference to %(field_label)s field)" # Other strings COLL_README_HEAD = ( "# %(label)s\n\r"+ "\n\r"+ "" ) COLL_README = ( "# Annalist collection `%(id)s`\n\r"+ "\n\r"+ "This directory contains an [Annalist](http://annalist.net) data collection.\n\r"+ "\n\r"+ "%(heading)s"+ "%(comment)s"+ "\n\r"+ # "\n\r"+ "") # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/message.py
message.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" from django.urls import re_path from annalist.views.home_redirects import ( AnnalistHomeView, AnnalistTypeRedirect, AnnalistEntityRedirect ) from annalist.views.profile import ProfileView from annalist.views.confirm import ConfirmView from annalist.views.serverlog import ServerLogView from annalist.views.site import SiteView, SiteActionView from annalist.views.collection import CollectionView, CollectionEditView from annalist.views.recordtypedelete import RecordTypeDeleteConfirmedView from annalist.views.recordviewdelete import RecordViewDeleteConfirmedView from annalist.views.recordlistdelete import RecordListDeleteConfirmedView from annalist.views.entityedit import GenericEntityEditView from annalist.views.entitylist import EntityGenericListView from annalist.views.entitylistdata import EntityListDataView from annalist.views.entitydelete import EntityDataDeleteConfirmedView from annalist.views.siteresource import SiteResourceAccess from annalist.views.collectionresource import CollectionResourceAccess from annalist.views.entityresource import EntityResourceAccess from annalist.views.statichack import serve_pages, serve_static from login.login_views import LoginUserView, LoginPostView, LogoutUserView from login.auth_oidc_client import OIDC_AuthDoneView from login.auth_django_client import LocalUserPasswordView # c - collections # v - view # l - list # d - data/default view # # Metadata (using built-in type identifiers, otherwise same pattern as data): # # /c/<coll-id>/d/_type/ list of record types # /c/<coll-id>/d/_type/<type-id> view of type description # /c/<coll-id>/d/_view/ list of record views # /c/<coll-id>/d/_view/<view-id> view of view description # /c/<coll-id>/d/_list/ list of record lists # /c/<coll-id>/d/_list/<list-id> view of list description # /c/<coll-id>/d/_field/ list of field descriptions # /c/<coll-id>/d/_field/<field-id> view of field description # # Data: # # /c/<coll-id>/d/ default list of records # /c/<coll-id>/d/<type-id>/ default list of records of specified type # /c/<coll-id>/d/<type-id>/<entity-id> default view of identified entity # # /c/<coll-id>/l/<list-id>/ specified list of records # /c/<coll-id>/l/<list-id>/<type-id> specified list of records of specified type # /c/<coll-id>/v/<view-id>/<type-id>/<entity-id> specified view of record # # Suffixes /!new, /!copy, /!edit, /!delete, etc. are used for forms that are part of the # user interface for editing collections and resources, and do not of themselves identify # persistent resources. urlpatterns = [ # Site pages re_path(r'^$', AnnalistHomeView.as_view(), name='AnnalistHomeView'), re_path(r'^site/$', SiteView.as_view(), name='AnnalistSiteView'), re_path(r'^site/!action$', SiteActionView.as_view(), name='AnnalistSiteActionView'), re_path(r'^confirm/$', ConfirmView.as_view(), name='AnnalistConfirmView'), re_path(r'^serverlog/$', ServerLogView.as_view(), name='AnnalistServerLogView'), #@@ site/site.json #@@ site/site.ttl # Special forms for collection view, customize and type/view/list deletion re_path(r'^c/(?P<coll_id>\w{1,128})/$', CollectionView.as_view(), name='AnnalistCollectionView'), re_path(r'^c/(?P<coll_id>\w{1,128})/!edit$', CollectionEditView.as_view(), name='AnnalistCollectionEditView'), re_path(r'^c/(?P<coll_id>\w{1,128})/d/types/!delete_confirmed$', RecordTypeDeleteConfirmedView.as_view(), name='AnnalistRecordTypeDeleteView'), re_path(r'^c/(?P<coll_id>\w{1,128})/d/views/!delete_confirmed$', RecordViewDeleteConfirmedView.as_view(), name='AnnalistRecordViewDeleteView'), re_path(r'^c/(?P<coll_id>\w{1,128})/d/lists/!delete_confirmed$', RecordListDeleteConfirmedView.as_view(), name='AnnalistRecordListDeleteView'), # Default/API access lists and data # (these may content negotiate for various formats) re_path(r'^c/(?P<coll_id>\w{1,128})/d/$', EntityGenericListView.as_view(), name='AnnalistEntityDefaultListAll'), re_path(r'^c/(?P<coll_id>\w{1,128})/d/(?P<type_id>\w{1,128})/$', EntityGenericListView.as_view(), name='AnnalistEntityDefaultListType'), re_path(r'^c/(?P<coll_id>\w{1,128})/d/(?P<type_id>\w{1,128})/!delete_confirmed$', EntityDataDeleteConfirmedView.as_view(), name='AnnalistEntityDataDeleteView'), re_path(r'^c/(?P<coll_id>\w{1,128})/d/(?P<type_id>\w{1,128})/(?P<entity_id>\w{1,128})/$', GenericEntityEditView.as_view(), name='AnnalistEntityAccessView'), # Default edit views re_path(r'^c/(?P<coll_id>\w{1,128})/d/(?P<type_id>\w{1,128})/(?P<entity_id>\w{1,128})/!(?P<action>copy)$', GenericEntityEditView.as_view(), name='AnnalistEntityDefaultDataView'), re_path(r'^c/(?P<coll_id>\w{1,128})/d/(?P<type_id>\w{1,128})/(?P<entity_id>\w{1,128})/!(?P<action>edit)$', GenericEntityEditView.as_view(), name='AnnalistEntityDefaultDataView'), re_path(r'^c/(?P<coll_id>\w{1,128})/d/(?P<type_id>\w{1,128})/(?P<entity_id>\w{1,128})/!(?P<action>view)$', GenericEntityEditView.as_view(), name='AnnalistEntityDefaultDataView'), # JSON list views without list_id specified re_path(r'^c/(?P<coll_id>\w{1,128})/d/(?P<list_ref>entity_list.[\w]{1,32})$', EntityListDataView.as_view(), name='AnnalistEntityListDataAll'), re_path(r'^c/(?P<coll_id>\w{1,128})/d/(?P<type_id>\w{1,128})/(?P<list_ref>entity_list.[\w]{1,32})$', EntityListDataView.as_view(), name='AnnalistEntityListDataType'), # Redirect type/entity URIs without trailing '/' # (Note these cannot match JSON resource names as '.' is not matched here) re_path(r'^c/(?P<coll_id>\w{1,128})/d/(?P<type_id>\w{1,128})$', AnnalistTypeRedirect.as_view(), name='AnnalistTypeRedirect'), re_path(r'^c/(?P<coll_id>\w{1,128})/d/(?P<type_id>\w{1,128})/(?P<entity_id>\w{1,128})$', AnnalistEntityRedirect.as_view(), name='AnnalistEntityRedirect'), # Specified list views re_path(r'^c/(?P<coll_id>\w{1,128})/l/$', EntityGenericListView.as_view(), name='AnnalistEntityDefaultList'), re_path(r'^c/(?P<coll_id>\w{1,128})/l/(?P<list_id>\w{1,128})/$', EntityGenericListView.as_view(), name='AnnalistEntityGenericList'), re_path(r'^c/(?P<coll_id>\w{1,128})/l/(?P<list_id>\w{1,128})/(?P<type_id>\w{1,128})/$', EntityGenericListView.as_view(), name='AnnalistEntityGenericList'), # JSON specified list views re_path(r'^c/(?P<coll_id>\w{1,128})/l/(?P<list_ref>entity_list.[\w]{1,32})$', EntityListDataView.as_view(), name='AnnalistEntityListDataAll'), re_path(r'^c/(?P<coll_id>\w{1,128})/l/(?P<list_id>\w{1,128})/(?P<list_ref>entity_list.[\w]{1,32})$', EntityListDataView.as_view(), name='AnnalistEntityListDataAll'), re_path(r'^c/(?P<coll_id>\w{1,128})/l/(?P<list_id>\w{1,128})/(?P<type_id>\w{1,128})/(?P<list_ref>entity_list.[\w]{1,32})$', EntityListDataView.as_view(), name='AnnalistEntityListDataType'), # Specified entity edit/view forms re_path(r'^c/(?P<coll_id>\w{1,128})/v/(?P<view_id>\w{1,128})/(?P<type_id>\w{1,128})/(?P<entity_id>\w{1,128})/$', GenericEntityEditView.as_view(), name='AnnalistEntityDataView'), re_path(r'^c/(?P<coll_id>\w{1,128})/v/(?P<view_id>\w{1,128})/(?P<type_id>\w{1,128})/!(?P<action>new)$', GenericEntityEditView.as_view(), name='AnnalistEntityNewView'), re_path(r'^c/(?P<coll_id>\w{1,128})/v/(?P<view_id>\w{1,128})/(?P<type_id>\w{1,128})/(?P<entity_id>\w{1,128})/!(?P<action>copy)$', GenericEntityEditView.as_view(), name='AnnalistEntityEditView'), re_path(r'^c/(?P<coll_id>\w{1,128})/v/(?P<view_id>\w{1,128})/(?P<type_id>\w{1,128})/(?P<entity_id>\w{1,128})/!(?P<action>edit)$', GenericEntityEditView.as_view(), name='AnnalistEntityEditView'), re_path(r'^c/(?P<coll_id>\w{1,128})/v/(?P<view_id>\w{1,128})/(?P<type_id>\w{1,128})/(?P<entity_id>\w{1,128})/!(?P<action>view)$', GenericEntityEditView.as_view(), name='AnnalistEntityEditView'), # Named resource access (metadata, context, attachments, etc.) re_path(r'^site/(?P<resource_ref>[\w.-]{1,250})$', SiteResourceAccess.as_view(), name='AnnalistSiteResourceAccess'), re_path(r'^c/(?P<coll_id>\w{1,128})/d/(?P<resource_ref>[\w.-]{1,250})$', CollectionResourceAccess.as_view(), name='AnnalistCollectionResourceAccess'), re_path(r'^c/(?P<coll_id>\w{1,128})/d/(?P<type_id>\w{1,128})/(?P<entity_id>\w{1,128})/(?P<resource_ref>[\w.-]{1,250})$', EntityResourceAccess.as_view(), name='AnnalistEntityResourceAccess'), # Entity resource data access with specified view re_path(r'^c/(?P<coll_id>\w{1,128})/v/(?P<view_id>\w{1,128})/(?P<type_id>\w{1,128})/(?P<entity_id>\w{1,128})/(?P<resource_ref>[\w.-]{1,250})$', EntityResourceAccess.as_view(), name='AnnalistEntityViewAccess'), # Access supporting application pages in same collection re_path(r'^c/(?P<coll_id>\w{1,128})/p/(?P<page_ref>[\w/.-]{1,250})$', serve_pages), # Access "favicon.ico" re_path(r'^(?P<path>favicon.ico)$', serve_static), ] # End of urlpatterns # Login-related view URLs urlpatterns += [ re_path(r'^login/$', LoginUserView.as_view(), name='LoginUserView'), re_path(r'^login_post/$', LoginPostView.as_view(), name='LoginPostView'), re_path(r'^login_local/$', LocalUserPasswordView.as_view(), name='LocalUserPasswordView'), re_path(r'^login_done/', OIDC_AuthDoneView.as_view(), name='OIDC_AuthDoneView'), re_path(r'^profile/$', ProfileView.as_view(), name='AnnalistProfileView'), re_path(r'^logout/$', LogoutUserView.as_view(), name='LogoutUserView'), # Info view... # re_path(r'^c/(?P<coll_id>_annalist_site)/d/(?P<type_id>_info)/(?P<entity_id>about)/$', re_path(r'^c/(?P<coll_id>\w{1,128})/d/(?P<type_id>\w{1,128})/(?P<entity_id>\w{1,128})/$', GenericEntityEditView.as_view(), name='AnnalistInfoView'), ] # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/urls.py
urls.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2015, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" # import logging # log = logging.getLogger(__name__) from annalist.identifiers import ANNAL # """ # Each resource type URI or CURIE is associated with a list of one or more file # extensions and MIME content-types. # # The first of each list indicates the value used when creating or serving a # resource of the indicated type. Any other values given are alternatives # that are accepted as supplying a resource that is compatible with the type. # # File extensions and MIME types are presented as pairs so that an extension # can be inferred when a MIME content-type is given, and vice versa. # """ resource_types = ( { ANNAL.CURIE.Metadata: [ ("jsonld", "application/ld+json") , ("json", "application/json") ] , ANNAL.CURIE.Text: [ ("txt", "text/plain") ] , ANNAL.CURIE.Richtext: [ ("md", "text/markdown") , ("txt", "text/plain") ] , ANNAL.CURIE.Image: [ ("image", "image/*") # Default extension , ("png", "image/png") , ("jpg", "image/jpeg") , ("jpeg", "image/jpeg") , ("gif", "image/gif") , ("tiff", "image/tiff") , ("svg", "image/svg") , ("pdf", "application/pdf") ] , ANNAL.CURIE.Audio: [ ("audio", "audio/*") # Default extension , ("mp3", "audio/mpeg") , ("mp4", "audio/mp4") , ("wav", "audio/wav") , ("ogg", "audio/ogg") #@@ needs fleshing out? ] , ANNAL.CURIE.Resource: [ ("md", "text/markdown") , ("txt", "text/plain") , ("png", "image/png") , ("jpg", "image/jpeg") , ("jpeg", "image/jpeg") , ("gif", "image/gif") , ("tiff", "image/tiff") , ("svg", "image/svg") , ("pdf", "application/pdf") ] }) default_types = [("dat", "application/octet-stream")] def file_extension(typeuri): """ Returns preferred file extension for resource type >>> file_extension(ANNAL.CURIE.Metadata) == "jsonld" True >>> file_extension(ANNAL.CURIE.Richtext) == "md" True """ return resource_types.get(typeuri, default_types)[0][0] def content_type(typeuri): """ Returns preferred MIME content-type for resource type >>> content_type(ANNAL.CURIE.Metadata) == "application/ld+json" True >>> content_type(ANNAL.CURIE.Richtext) == "text/markdown" True """ return resource_types.get(typeuri, default_types)[0][1] def file_extension_for_content_type(typeuri, content_type): """ Returns file extension for given content-type as an instance of a given type URI, or None. >>> file_extension_for_content_type(ANNAL.CURIE.Richtext, "text/markdown") == "md" True >>> file_extension_for_content_type(ANNAL.CURIE.Resource, "text/markdown") == "md" True >>> file_extension_for_content_type(ANNAL.CURIE.Resource, "application/pdf") == "pdf" True >>> file_extension_for_content_type(ANNAL.CURIE.Resource, "application/unknown") == None True """ for fe, ct in resource_types.get(typeuri, default_types): if ct == content_type: return fe return None def content_type_for_file_extension(typeuri, file_extension): """ Returns content-type for given file extension as an instance of a given type URI, or None. >>> content_type_for_file_extension(ANNAL.CURIE.Richtext, "md") == "text/markdown" True >>> content_type_for_file_extension(ANNAL.CURIE.Resource, "md") == "text/markdown" True >>> content_type_for_file_extension(ANNAL.CURIE.Resource, "pdf") == "application/pdf" True >>> content_type_for_file_extension(ANNAL.CURIE.Resource, "unknown") == None True """ for fe, ct in resource_types.get(typeuri, default_types): if fe == file_extension: return ct return None if __name__ == "__main__": import doctest doctest.testmod() # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/resourcetypes.py
resourcetypes.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2015, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) # Collection data # # NOTE: when updating this, src/setup.py and src/MANIFEST.in also need to be updated. # Also, am_help.py # # @@TODO: consider ways to discover these details by scanning the file system? # installable_collections = ( { "Namespace_defs": { 'data_dir': "namedata" , 'coll_meta': { "rdfs:label": "Namespace definitions" , "rdfs:comment": "# Namespace definitions"+ "\r\n\r\n"+ "Defines some common vocabulary namespaces "+ "not included in the base site data."+ "\r\n" , "annal:comment": "Initialized by: `annalist-manager installcollection`" } } , "Resource_defs": { 'data_dir': "Resource_defs" , 'coll_meta': { "rdfs:label": "Resource definitions" , "rdfs:comment": "# Resource definitions"+ "\r\n\r\n"+ "This collection defines types, views and fields that "+ "can be used to incorporate references to uploaded, "+ "imported or linked media resources into entity views."+ "\r\n" , "annal:comment": "Initialized by: `annalist-manager installcollection`" } } , "Concept_defs": { 'data_dir': "Concept_defs" , 'coll_meta': { "rdfs:label": "Concept definitions" , "rdfs:comment": "# Concept definitions\r\n\r\n"+ "This collection defines types, views and fields that can be used "+ "to associate concepts (based on the SKOS vocabulary) with entities."+ "\r\n" , "annal:inherit_from": "_coll/Resource_defs" , "annal:comment": "Initialized by: `annalist-manager installcollection`" , "annal:default_view_type": "Concept" } } , "Journal_defs": { 'data_dir': "Journal_defs" , 'coll_meta': { "rdfs:label": "Journal and resource definitions" , "rdfs:comment": "# Journal and resource definitions"+ "\r\n\r\n"+ "This collection defines types, views and fields that "+ "can be used to incorporate references to uploaded, "+ "imported or linked media resources into entity views."+ "\r\n\r\n"+ "Also defines a \"Journal\" type that can be used to record "+ "(mostly) unstructured information about some process, "+ "along with associated resources."+ "\r\n" , "annal:inherit_from": "_coll/Concept_defs" , "annal:comment": "Initialized by: `annalist-manager installcollection`" , "annal:default_view_id": "Journal_note_view" , "annal:default_view_type": "Journal" , "annal:default_view_entity": "01_journal_resources" } } , "Bibliography_defs": { 'data_dir': "Bibliography_defs" , 'coll_meta': { "rdfs:label": "Bibliography definitions" , "rdfs:comment": "# Bibliography definitions"+ "\r\n\r\n"+ "Defines types and views for bibliographic definitions, "+ "based loosely on BibJSON." , "annal:comment": "Initialized by `annalist-manager installcollection`" } } , "RDF_schema_defs": { 'data_dir': "RDF_schema_defs" , 'coll_meta': { "rdfs:label": "RDF schema terms for defining vocabularies" , "rdfs:comment": "# Definitions for defining RDF schema for vocabularies"+ "\r\n\r\n"+ "This Annalist collection contains definitions that may "+ "be imported to creaing RDF schema definitions as an "+ "Annalist collection."+ "\r\n\r\n"+ "NOTE: current limitations of Annalist mean that the "+ "exported JSON-LD does not directly use standard "+ "RDF schema terms for everything. "+ "For example, subclasses are referenced using a "+ "local URI reference rather than the global "+ "absolute URI, which can be obtained by defererencing "+ "the given reference and extracting the `annal:uri` "+ "value from there."+ "\r\n" , "annal:comment": "Initialized by: `annalist-manager installcollection`" } } , "Annalist_schema": { 'data_dir': "Annalist_schema" , 'coll_meta': { "rdfs:label": "Schema definitions for terms in the Annalist namespace" , "rdfs:comment": "# Schema definitions for terms in the Annalist namespace"+ "\r\n\r\n"+ "This is an Annalist collection which describes terms "+ "in the Annalist (`annal:`) namespace."+ "\r\n\r\n"+ "It uses definitions from collection `RDF_schema_defs`."+ "\r\n" , "annal:comment": "Initialized by `annalist-manager installcollection`" , "annal:inherit_from": "_coll/RDF_schema_defs" , "annal:default_list": "Classes" } } , "Tutorial_example_data": { 'data_dir': "Tutorial_example_data" , 'coll_meta': { "rdfs:label": "Tutorial example: photo collection sample data" , "rdfs:comment": "# Tutorial example: photo collection sample data"+ "\r\n\r\n"+ "This is an Annalist collection which is an example of the result "+ "of performing steps described in the Annalist tutorial document."+ "\r\n" , "annal:comment": "Initialized by `annalist-manager installcollection`" } } # , "...": # { 'data_dir': "..." # , 'coll_meta': # { "rdfs:label": "..." # , "rdfs:comment": "# ...\r\n\r\n"+ # "... "+ # "..." # , "annal:comment": "Initialized by `annalist-manager installcollection`" # } # } }) # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/collections_data.py
collections_data.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) class Urispace(object): """ Placeholder class for URI values in namespace. """ def __init__(self): return class Curiespace(object): """ Placeholder class for CURIE values in namespace. """ def __init__(self): return class Namespace(object): """ Class represents namespace of URI identifiers. Provides expressions for URI and CURIE values of each identifier in the namespace. >>> ns = Namespace("test", "http://example.com/test/") >>> cf = ns.mk_curie("foo") >>> cf == 'test:foo' True >>> uf = ns.mk_uri("foo") >>> uf == 'http://example.com/test/foo' True >>> ns.to_uri(cf) == 'http://example.com/test/foo' True >>> ns.to_uri("notest:bar") == 'notest:bar' True """ def __init__(self, prefix, baseUri): """ Initialise a namespace. prefix a CURIE prefix to be associated with this namespace. _baseUri a base URI for all names in this namespace """ self._prefix = prefix self._baseUri = baseUri self.URI = Urispace() self.CURIE = Curiespace() return def mk_curie(self, name): """ Make a CURIE string for an identifier in this namespace """ return self._prefix+":"+name def mk_uri(self, name): """ Make a URI string for an identifier in this namespace """ return self._baseUri+name def to_uri(self, curie): """ Converts a supplied CURIE to a URI if it uses the current namespace prefix. """ parts = curie.split(':', 1) if (len(parts) == 2) and (parts[0] == self._prefix): return self.mk_uri(parts[1]) return curie def __getattr__(self, name): """ Return value for <namespace>.<name> """ # Called for attributes that aren't defined directly. # Placeholder should generate error if name not in self.__dict__: raise AttributeError return self.__dict__[name] def makeNamespace(prefix, baseUri, names): """ Create a namespace with given prefix, base URI and set of local names. Returns the namespace value. Attributes of the URI attribute are URIs for the corresponding identifier (e.g. ANNAL.URI.Site, cf. below). Attributes of the CURIE attribute are CURIES (e.g. ANNAL.CURIE.Site). """ ns = Namespace(prefix, baseUri) for name in names: setattr(ns.URI, name, ns.mk_uri(name)) setattr(ns.CURIE, name, ns.mk_curie(name)) return ns # """ # Partial enumeration of RDF namespace - add others as needed # """ RDF = makeNamespace("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#", [ "Property", "Statement", "List" , "type", "value" , "first", "rest", "nil" ]) # """ # Partial enumeration of RDFS namespace - add others as needed # """ RDFS = makeNamespace("rdfs", "http://www.w3.org/2000/01/rdf-schema#", [ "Resource", "Class", "Literal", "Container", "Datatype" , "label", "comment", "member", "seeAlso" ]) # """ # Partial enumeration of OWL namespace # """ OWL = makeNamespace("owl", "http://www.w3.org/2002/07/owl#", [ "Thing", "Nothing" , "sameAs", "differentFrom", "equivalentClass" ]) # """ # Annalist namespace terms # """ ANNAL = makeNamespace("annal", "http://purl.org/annalist/2014/#", [ "Unknown_type" # Entity value types , "Collection" , "Default_type" , "Entity" , "EntityData" , "EntityRoot" , "Enum" , "Enum_field_placement" , "Enum_list_type" , "Enum_render_type" , "Enum_value_mode" , "Enum_value_type" # Unused?? , "Field" , "Field_group" , "List" , "Site" , "SiteData" , "Type" , "Type_Data" , "User" , "View" , "Vocabulary" , "Information" # Repeat/list group types , "Field_list" , "Field_superproperty_uri" , "Group_field" # @@deprecated , "List_field" , "Type_supertype_uri" , "View_field" # Data value and resource types , "Audio" , "EntityRef" , "Identifier" , "Image" , "Longtext" , "Metadata" , "Placement" , "Resource" , "Richtext" , "Text" , "Video" # Properties in list JSON , "entity_list" # Properties in internal entities , "id", "type_id", "type", "url", "uri" # Types, Views, lists and field groups , "default_type", "default_view" , "supertype_uri" , "ns_prefix" , "display_type", "type_list", "type_view" , "field_aliases", "alias_target", "alias_source" , "view_entity_type", "open_view", "view_fields" , "task_buttons", "edit_task_buttons", "view_task_buttons" , "button_id", "button_label", "button_help" , "list_entity_type", "list_entity_selector", "list_fields" , "group_entity_type", "group_fields" , "field_id" # User permissions , "user_uri", "user_permission" # Field definitions , "field_render_type", "field_value_type", "field_value_mode" , "field_entity_type" , "placeholder", "tooltip", "default_value", "property_uri" , "superproperty_uri" , "field_ref_type", "field_ref_restriction", "field_ref_field" , "field_fields", "repeat_label_add", "repeat_label_delete" , "field_name", "field_placement" , "group_ref" # deprecated # Collection metadata , "software_version", "meta_comment", "inherit_from" , "default_list" , "default_view_id", "default_view_type", "default_view_entity" # Schema properties (like RDF equivalents, but connecting Annalist entities) # The intent is that the RDF equivalents can be inferred by looking at the # referenced entities. , "subclassOf", "subpropertyOf", "domain", "range" # Deprecated identifiers - used in entity migration , "Slug", "RepeatGroup", "RepeatGroupRow" , "options_typeref", "restrict_values", "target_field" , "record_type", "field_target_type", "comment" , "supertype_uris" , "user_permissions" ]) if __name__ == "__main__": import doctest doctest.testmod() # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/identifiers.py
identifiers.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014-2016, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import os.path import logging log = logging.getLogger(__name__) # Annalist configuration and metadata files # # Directory layout: # # $BASE_DATA_DIR # annalist-site/ # c/ # _annalist-site/ # d/ # coll_meta.json_ld # coll_prov.json_ld # coll_context.json_ld # _type/ # : # <collection-id>/ # d/ # coll_meta.jsonld # coll_prov.jsonld # _type/ # <type-id>/ # type_meta.jsonld # type_prov.jsonld # : # _view/ # <view-id>/ # view_meta.jsonld # view_prov.jsonld # : # _list/ # <list-id>/ # list_meta.jsonld # list_prov.jsonld # : # (etc.) # : # <type-id>/ # <entity-id>/ # entity-data.jsonld # entity-prov.jsonld # : # : # <collection-id>/ # : COLL_TYPEID = "_coll" COLL_BASE_DIR = "d" COLL_PAGE_DIR = "p" COLL_ROOT_CONF_OLD_DIR = "_annalist_collection" COLL_BASE_CONF_OLD_DIR = "../" + COLL_ROOT_CONF_OLD_DIR COLL_META_FILE = "coll_meta.jsonld" COLL_META_TURTLE = "coll_meta.ttl" COLL_PROV_FILE = "coll_prov.jsonld" COLL_BASE_REF = COLL_BASE_DIR + "/" COLL_PAGE_REF = COLL_PAGE_DIR + "/" COLL_META_REF = COLL_BASE_REF + COLL_META_FILE COLL_PROV_REF = COLL_BASE_REF + COLL_PROV_FILE COLL_TURTLE_REF = COLL_BASE_REF + COLL_META_TURTLE META_COLL_REF = "../" META_COLL_BASE_REF = "./" COLL_CONTEXT_FILE = "coll_context.jsonld" # COLL_CONTEXT_REF = COLL_BASE_REF + COLL_CONTEXT_FILE SITE_TYPEID = "_site" SITEDATA_ID = "_annalist_site" SITEDATA_DIR = "c/%(id)s"%{'id': SITEDATA_ID} SITEDATA_OLD_DIR1 = "_annalist_site" SITEDATA_OLD_DIR2 = SITEDATA_DIR+"/"+COLL_ROOT_CONF_OLD_DIR SITE_META_PATH = "" SITE_META_REF = "." SITE_META_FILE = "site_meta.jsonld" # Currently not used except to store description data META_SITE_REF = "./" SITE_COLL_VIEW = "c/%(id)s/" SITE_COLL_PATH = "c/%(id)s" SITE_CONTEXT_FILE = "site_context.jsonld" SITE_DATABASE_FILE = "db.sqlite3" SITEDATA_BASE_DIR = SITEDATA_DIR + "/" + COLL_BASE_DIR # used in tests SITEDATA_META_FILE = COLL_META_FILE # used in views SITEDATA_PROV_FILE = COLL_PROV_FILE # used in views SITEDATA_CONTEXT_PATH = "./" # used in models BIBDATA_ID = "Bibliography_defs" # used for testing # ------------------------- # Entities of various types # ------------------------- # # NOTE: definitive entity URIs are *without* trailing "/". # Rediretion to a URI wit the trailing "/" retrieves a representation of the entity, # generally an HTML form view. Redirection to other forms is used for alternative # representations. # # Type records TYPE_TYPEID = "_type" # type id for type records, used in URL TYPE_DIR = "_type" # collection directory in file system TYPE_DIR_PREV = "types" # collection directory in file system TYPE_META_FILE = "type_meta.jsonld" # type metadata file name TYPE_META_TURTLE = "type_meta.ttl" # reference type metadata as Turtle TYPE_PROV_FILE = "type_prov.jsonld" # type provenance file name COLL_BASE_TYPE_REF = TYPE_TYPEID + "/%(id)s" # ref type relative to collection base URL COLL_TYPE_VIEW = COLL_BASE_REF + COLL_BASE_TYPE_REF + "/" # ref type view relative to collection entity COLL_TYPE_PATH = COLL_BASE_REF + TYPE_DIR + "/%(id)s" # type dir relative to collection root dir # List description records LIST_TYPEID = "_list" # list type id, used in URL LIST_DIR = "_list" # collection directory in file system LIST_DIR_PREV = "lists" # collection directory in file system LIST_META_FILE = "list_meta.jsonld" # list metadata file name LIST_META_TURTLE = "list_meta.ttl" # reference list metadata as Turtle LIST_PROV_FILE = "list_prov.jsonld" # list provenance file name COLL_BASE_LIST_REF = LIST_TYPEID + "/%(id)s" # ref list relative to collection base URL COLL_LIST_VIEW = COLL_BASE_REF + COLL_BASE_LIST_REF + "/" # ref list view relative to collection entity COLL_LIST_PATH = COLL_BASE_REF + LIST_DIR + "/%(id)s" # list dir relative to collection root dir # View description records VIEW_TYPEID = "_view" # view type id, used in URL VIEW_DIR = "_view" # collection directory in file system VIEW_DIR_PREV = "views" # previous collection directory VIEW_META_FILE = "view_meta.jsonld" # view metadata file name VIEW_META_TURTLE = "view_meta.ttl" # reference view metadata as turtle VIEW_PROV_FILE = "view_prov.jsonld" # view provenance file name COLL_BASE_VIEW_REF = VIEW_TYPEID + "/%(id)s" # ref view relative to collection base URL COLL_VIEW_VIEW = COLL_BASE_REF + COLL_BASE_VIEW_REF + "/" # ref view relative to collection entity COLL_VIEW_PATH = COLL_BASE_REF + VIEW_DIR + "/%(id)s" # view dir relative to collection root dir # Field-group description records GROUP_TYPEID = "_group" # group type id, used in URL GROUP_DIR = "_group" # collection directory in file system GROUP_DIR_PREV = "groups" # previous collection directory GROUP_META_FILE = "group_meta.jsonld" # group metadata file name GROUP_PROV_FILE = "group_prov.jsonld" # group provenance file name COLL_BASE_GROUP_REF = GROUP_TYPEID + "/%(id)s" # ref group relative to collection base URL COLL_GROUP_VIEW = COLL_BASE_REF + COLL_BASE_GROUP_REF + "/" # ref group view relative to collection entity COLL_GROUP_PATH = COLL_BASE_REF + GROUP_DIR + "/%(id)s" # group dir relative to collection root dir # Field description records FIELD_TYPEID = "_field" # field type id, used in URL FIELD_DIR = "_field" # collection directory in file system FIELD_DIR_PREV = "fields" # previous collection directory FIELD_META_FILE = "field_meta.jsonld" # field metadata file name FIELD_META_TURTLE = "field_meta.ttl" # reference field metadata as turtle FIELD_PROV_FILE = "field_prov.jsonld" # field provenance file name COLL_BASE_FIELD_REF = FIELD_TYPEID + "/%(id)s" # ref field relative to collection base URL COLL_FIELD_VIEW = COLL_BASE_REF + COLL_BASE_FIELD_REF + "/" # ref field view relative to collection entity COLL_FIELD_PATH = COLL_BASE_REF + FIELD_DIR + "/%(id)s" # field dir relative to collection root dir # User permission records USER_TYPEID = "_user" # type id, used in URL USER_DIR = "_user" # collection directory in file system USER_DIR_PREV = "users" # previous collection directory USER_META_FILE = "user_meta.jsonld" # user metadata file name USER_META_TURTLE = "user_meta.ttl" # reference user metadata as turtle USER_PROV_FILE = "user_prov.jsonld" # user provenance file name COLL_BASE_USER_REF = USER_TYPEID + "/%(id)s" # ref user relative to collection base URL COLL_USER_VIEW = COLL_BASE_REF + COLL_BASE_USER_REF + "/" # ref user relative to collection entity COLL_USER_PATH = COLL_BASE_REF + USER_DIR + "/%(id)s" # user dir relative to collection root dir # Vocabulary namespace records VOCAB_TYPEID = "_vocab" # type id, used in URL VOCAB_DIR = "_vocab" # collection directory in file system VOCAB_DIR_PREV = "vocabs" # previous collection directory VOCAB_META_FILE = "vocab_meta.jsonld" # vocab metadata file name VOCAB_META_TURTLE = "vocab_meta.ttl" # reference vocab metadata as Turtle VOCAB_PROV_FILE = "vocab_prov.jsonld" # vocab provenance file name COLL_BASE_VOCAB_REF = VOCAB_TYPEID + "/%(id)s" # ref vocab relative to collection base URL COLL_VOCAB_VIEW = COLL_BASE_REF + COLL_BASE_VOCAB_REF + "/" # ref vocab view relative to collection entity COLL_VOCAB_PATH = COLL_BASE_REF + VOCAB_DIR + "/%(id)s" # vocab dir relative to collection root dir # General information records # Used for holding application information for display; e.g., for the `about` link. INFO_TYPEID = "_info" # info type id INFO_DIR = "_info" # collection directory in file system INFO_DIR_PREV = None # previous directory for migration INFO_META_FILE = "info_meta.jsonld" # info data file name INFO_META_TURTLE = "info_meta.ttl" # reference info data as Turtle INFO_PROV_FILE = "info_prov.jsonld" # info provenance file name COLL_BASE_INFO_REF = INFO_TYPEID + "/%(id)s" # ref info relative to collection base URL COLL_INFO_VIEW = COLL_BASE_REF + COLL_BASE_INFO_REF + "/" # ref info view relative to collection entity COLL_INFO_PATH = COLL_BASE_REF + INFO_DIR + "/%(id)s" # info dir relative to collection root dir # Enumerated value descriptions ENUM_FIELD_PLACEMENT_ID = "_enum_field_placement" # Field placement options ENUM_LIST_TYPE_ID = "_enum_list_type" # List type (list, grid) ENUM_RENDER_TYPE_ID = "_enum_render_type" # Field render type ENUM_VALUE_MODE_ID = "_enum_value_mode" # Field value mode (direct, entity, upload, etc.) ENUM_VALUE_TYPE_ID = "_enum_value_type" # Field value type (text, longtext, etc.) ENUM_FIELD_PLACEMENT_DIR = ENUM_FIELD_PLACEMENT_ID # Field placement options ENUM_LIST_TYPE_DIR = ENUM_LIST_TYPE_ID # List type (list, grid) ENUM_RENDER_TYPE_DIR = ENUM_RENDER_TYPE_ID # Field render type ENUM_VALUE_MODE_DIR = ENUM_VALUE_MODE_ID # Field value mode (direct, entity, upload, etc.) ENUM_VALUE_TYPE_DIR = ENUM_VALUE_TYPE_ID # Field value type (text, longtext, etc.) ENUM_FIELD_PLACEMENT_DIR_PREV1 = "enums/Enum_field_placement" ENUM_LIST_TYPE_DIR_PREV1 = "enums/Enum_list_type" ENUM_RENDER_TYPE_DIR_PREV1 = "enums/Enum_render_type" ENUM_VALUE_MODE_DIR_PREV1 = "enums/Enum_value_mode" ENUM_VALUE_TYPE_DIR_PREV1 = "enums/Enum_value_type" ENUM_FIELD_PLACEMENT_DIR_PREV2 = "_enum/Enum_field_placement" ENUM_LIST_TYPE_DIR_PREV2 = "_enum/Enum_list_type" ENUM_RENDER_TYPE_DIR_PREV2 = "_enum/Enum_render_type" ENUM_VALUE_MODE_DIR_PREV2 = "_enum/Enum_value_mode" ENUM_VALUE_TYPE_DIR_PREV2 = "_enum/Enum_value_type" ENUM_META_FILE = "enum_meta.jsonld" # enum metadata file name ENUM_META_TURTLE = "enum_meta.ttl" # reference enum metadata as Turtle ENUM_PROV_FILE = "enum_prov.jsonld" # enum provenance file name COLL_BASE_ENUM_REF = "%(type_id)s/%(id)s" # ref enum relative to collection base URL COLL_ENUM_PATH = COLL_BASE_REF + "%(type_id)s/%(id)s" COLL_ENUM_VIEW = COLL_ENUM_PATH + "/" # ref enum view relative to collection entity # Record type data records (these act as parents for Entity data records) TYPEDATA_TYPEID = "_entitytypedata" # typedata id TYPEDATA_META_FILE = "type_data_meta.jsonld" # type data metadata file name TYPEDATA_PROV_FILE = "type_data_prov.jsonld" # type data provenance file name COLL_BASE_TYPEDATA_REF = "%(id)s" # ref type data relative to collection base URL TYPEDATA_COLL_BASE_REF = "../" # ref collection base from record type data TYPEDATA_CONTEXT_FILE = TYPEDATA_COLL_BASE_REF + COLL_CONTEXT_FILE # ref collection context file COLL_TYPEDATA_PATH = "d/%(id)s" # dir type data relative to collection root dir COLL_TYPEDATA_VIEW = "d/%(id)s/" # ref type data view relative to collection entity # Entity data records (these contain user data, organized by record type) # Entity data layout information... TYPEDATA_ENTITY_VIEW = "%(id)s/" TYPEDATA_ENTITY_PATH = "%(id)s" COLL_ENTITY_VIEW = "d/%(type_id)s/%(id)s/" COLL_ENTITY_PATH = "d/%(type_id)s/%(id)s" SITE_ENTITY_VIEW = "c/%(coll_id)s/d/%(type_id)s/%(id)s/" SITE_ENTITY_PATH = "c/%(coll_id)s/d/%(type_id)s/%(id)s" ENTITY_BASE_REF = "" ENTITY_DATA_FILE = "entity_data.jsonld" ENTITY_DATA_TURTLE = "entity_data.ttl" ENTITY_PROV_FILE = "entity_prov.jsonld" ENTITY_LIST_FILE = "entity_list.jsonld" # Entity list as JSON resource ENTITY_LIST_TURTLE = "entity_list.ttl" # Entity list as Turtle resource COLL_BASE_ENTITY_REF = "%(type_id)s/%(id)s" ENTITY_COLL_BASE_REF = "../../" #@@ NOTE: @base ignored when loading external context - is this correct? #@@ ENTITY_CONTEXT_FILE = COLL_CONTEXT_FILE ENTITY_CONTEXT_FILE = ENTITY_COLL_BASE_REF + COLL_CONTEXT_FILE ENTITY_OLD_DATA_FILE = "entity-data.jsonld" # Other symbols TASK_TYPEID = "_task" # task id INITIAL_VALUES_ID = "_initial_values" # reserved id used for initial values of new entity # Lists of directory names for collection migration, etc: DATA_DIRS_CURR_PREV = ( [ (TYPE_DIR, TYPE_DIR_PREV) , (LIST_DIR, LIST_DIR_PREV) , (VIEW_DIR, VIEW_DIR_PREV) , (GROUP_DIR, GROUP_DIR_PREV) , (FIELD_DIR, FIELD_DIR_PREV) , (ENUM_FIELD_PLACEMENT_DIR, ENUM_FIELD_PLACEMENT_DIR_PREV1) , (ENUM_LIST_TYPE_DIR, ENUM_LIST_TYPE_DIR_PREV1) , (ENUM_RENDER_TYPE_DIR, ENUM_RENDER_TYPE_DIR_PREV1) , (ENUM_VALUE_MODE_DIR, ENUM_VALUE_MODE_DIR_PREV1) , (ENUM_VALUE_TYPE_DIR, ENUM_VALUE_TYPE_DIR_PREV1) , (ENUM_FIELD_PLACEMENT_DIR, ENUM_FIELD_PLACEMENT_DIR_PREV2) , (ENUM_LIST_TYPE_DIR, ENUM_LIST_TYPE_DIR_PREV2) , (ENUM_RENDER_TYPE_DIR, ENUM_RENDER_TYPE_DIR_PREV2) , (ENUM_VALUE_MODE_DIR, ENUM_VALUE_MODE_DIR_PREV2) , (ENUM_VALUE_TYPE_DIR, ENUM_VALUE_TYPE_DIR_PREV2) ]) DATA_DIRS = [ p[0] for p in DATA_DIRS_CURR_PREV ] # map(lambda pair:pair[0], DATA_DIRS_CURR_PREV) DATA_DIRS_PREV = [ p[1] for p in DATA_DIRS_CURR_PREV ] # map(lambda pair:pair[1], DATA_DIRS_CURR_PREV) DATA_VOCAB_DIRS = DATA_DIRS + [VOCAB_DIR] COLL_DIRS_CURR_PREV = ( DATA_DIRS_CURR_PREV + [ (USER_DIR, USER_DIR_PREV) , (VOCAB_DIR, VOCAB_DIR_PREV) , (INFO_DIR, INFO_DIR_PREV) ]) COLL_DIRS = [ p[0] for p in COLL_DIRS_CURR_PREV ] COLL_DIRS_PREV = [ p[1] for p in COLL_DIRS_CURR_PREV if p[1] ] # Name generation suffixes for tasks that generate new records SUFFIX_LIST = "" SUFFIX_VIEW = "" SUFFIX_TYPE = "" SUFFIX_SUBTYPE = "_subtype" SUFFIX_SUBPROPERTY = "_subproperty" SUFFIX_REPEAT = "_many" SUFFIX_REPEAT_P = "_many" SUFFIX_SEQUENCE = "_list" SUFFIX_SEQUENCE_P = "_list" SUFFIX_REF_FIELD = "_ref" # Reference field name... SUFFIX_REF_FIELD_P = "_ref" # Reference field property ... class Layout(object): """ A dynamically created layout value with paths that are dynamically constructed using a supplied base directory. """ def __init__(self, base_data_dir, site_dir_name): """ Dynamically initialize a layout value """ self.BASE_DIR = base_data_dir self.SITE_DIR_NAME = site_dir_name self.SITEDATA_ID = SITEDATA_ID self.SITEDATA_DIR = SITEDATA_DIR self.SITEDATA_OLD_DIR1 = SITEDATA_OLD_DIR1 self.SITEDATA_OLD_DIR2 = SITEDATA_OLD_DIR2 self.SITEDATA_BASE_DIR = SITEDATA_BASE_DIR # e.g. c/_annalist_site/d self.SITE_PATH = os.path.join(base_data_dir, site_dir_name) self.SITE_META_FILE = SITE_META_FILE self.SITE_DATABASE_FILE = SITE_DATABASE_FILE return # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/layout.py
layout.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) class Annalist_Error(Exception): """ General Annalist error """ def __init__(self, value=None, msg="Annalist error"): super(Annalist_Error, self).__init__(value, msg) self._msg = msg self._value = value return def __str__(self): txt = self._msg if self._value: txt += ": "+repr(self._value) return txt def __repr__(self): return ( "Annalist_Error(%s, value=%s)"% (repr(self._msg), repr(self._value))) class UnexpectedValue_Error(Annalist_Error): """ Annalist unexpected value error Typically raised when an entity contains a deprecated field """ def __init__(self, value=None, msg="Entity not found"): super(UnexpectedValue_Error, self).__init__(value, msg) return class EntityNotFound_Error(Annalist_Error): """ Annalist entity (resource) not found error. Typically raised when a required file is missing. The offending filename should be used for the exception value. """ def __init__(self, value=None, msg="Entity not found"): super(EntityNotFound_Error, self).__init__(value, msg) return class TargetIdNotFound_Error(Annalist_Error): """ Annalist target entity id not found error. Raised for a field that should reference a target entity that does not contain a target entity id. Value is: (type_id, field_name) """ def __init__(self, value=("@@notype","@@noprop"), msg="Target entity not selected"): value_s = ": (expected reference to type '%s' for field '%s')"%value super(TargetIdNotFound_Error, self).__init__(None, msg+value_s) return class TargetEntityNotFound_Error(Annalist_Error): """ Annalist target entity not found error. Raised for a field that shoukd reference a target entity, but which references a non-existent entity. Value is: (type_id, entity_id). """ def __init__(self, value=("@@notype","@@noprop"), msg="Referenced target entity not found"): value_s = ": (reference to %s/%s)"%value super(TargetEntityNotFound_Error, self).__init__(None, msg+value_s) return # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/exceptions.py
exceptions.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) import os import os.path import shutil import json import datetime from collections import OrderedDict from packaging.version import Version # from distutils.version import LooseVersion from django.conf import settings from utils.py3porting import isoformat_space import annalist from annalist import layout from annalist import message from annalist.exceptions import Annalist_Error from annalist.identifiers import RDF, RDFS, ANNAL from annalist.util import valid_id, extract_entity_id, make_type_entity_id from annalist.models.entity import Entity from annalist.models.annalistuser import AnnalistUser from annalist.models.collectiontypecache import CollectionTypeCache from annalist.models.collectionfieldcache import CollectionFieldCache from annalist.models.collectionvocabcache import CollectionVocabCache from annalist.models.recordtype import RecordType from annalist.models.recordview import RecordView from annalist.models.recordlist import RecordList from annalist.models.recordfield import RecordField from annalist.models.recordgroup import RecordGroup, RecordGroup_migration from annalist.models.recordvocab import RecordVocab from annalist.models.rendertypeinfo import ( is_render_type_literal, is_render_type_id, is_render_type_set, is_render_type_list, is_render_type_object, ) # --------------------------------------------------------------------------- # # Static data for collection data caches # # --------------------------------------------------------------------------- type_cache = CollectionTypeCache() field_cache = CollectionFieldCache() vocab_cache = CollectionVocabCache() # --------------------------------------------------------------------------- # # Collection class # # --------------------------------------------------------------------------- class Collection(Entity): _entitytype = ANNAL.CURIE.Collection _entitytypeid = layout.COLL_TYPEID _entityview = layout.SITE_COLL_VIEW _entityroot = layout.SITE_COLL_PATH _entitybase = layout.COLL_BASE_REF _entityfile = layout.COLL_META_FILE _entityref = layout.META_COLL_REF _contextbase = layout.META_COLL_BASE_REF _contextref = layout.COLL_CONTEXT_FILE def __init__(self, parentsite, coll_id, altparent=None): """ Initialize a new Collection object. parentsite is the parent site from which the new collection is descended. coll_id the collection identifier for the collection altparent is an alternative parent to search for descendents of the new Collection. Effectively, the new Collection inherits definitions from this alternative parent. """ # log.debug("Collection.__init__: coll_id %s, parent dir %s"%(coll_id, parentsite._entitydir)) if altparent is not None: if not isinstance(altparent, Collection): msg = "Collection altparent value must be a Collection (got %r)"%(altparent,) log.error(msg) raise ValueError(msg) self._parentsite = parentsite self._parentcoll = ( altparent or None if coll_id == layout.SITEDATA_ID else parentsite.site_data_collection() ) super(Collection, self).__init__(parentsite, coll_id, altparent=self._parentcoll) return def _migrate_values(self, collmetadata): """ Collection data format migration method. """ migration_map = ( [ (ANNAL.CURIE.comment, ANNAL.CURIE.meta_comment ) ]) collmetadata = self._migrate_values_map_field_names(migration_map, collmetadata) collmetadata[ANNAL.CURIE.id] = self._entityid # In case directory renamed by hand return collmetadata def flush_collection_caches(self): """ Flush all caches associated with the current collection """ type_cache.flush_cache(self) field_cache.flush_cache(self) vocab_cache.flush_cache(self) return @classmethod def flush_all_caches(self): """ Flush all caches associated with all collections """ type_cache.flush_all() field_cache.flush_all() vocab_cache.flush_all() return # Site def get_site(self): """ Return site object for the site from which the current collection is accessed. """ return self._parentsite def get_site_data(self): """ Return parent object for accessing site data. """ return self._parentsite.site_data_collection() # Alternate collections handling def set_alt_entities(self, altparent): """ Update the alternative parent for the current collection. Returns a list of parents accessible from the supplied altparent (including itself) """ # log.info("Collection.set_alt_entities: coll_id %s, altparent_id %s"%(self.get_id(), altparent.get_id())) if not isinstance(altparent, Collection): msg = "Collection.set_alt_entities value must be a Collection (got %r)"%(altparent,) log.error(msg) raise ValueError(msg) parents = super(Collection, self).set_alt_entities(altparent) parentids = [ p.get_id() for p in parents ] # log.info( # "@@ Collection.set_alt_entities: coll: %r, parentids %r"% # (self.get_id(), parentids) # ) if layout.SITEDATA_ID not in parentids: msg = ( "Entity.set_alt_entities cannot access site data (%s) via %r)"% (layout.SITEDATA_ID, altparent) ) log.error(msg) raise ValueError(msg) if not self._ensure_values_loaded(): msg = ( "Entity.set_alt_entities cannot load collection data for %s)"% (self.get_id(),) ) log.error(msg) raise ValueError(msg) self[ANNAL.CURIE.inherit_from] = make_type_entity_id(layout.COLL_TYPEID, altparent.get_id()) if not ( self.get(ANNAL.CURIE.default_list, None) or self.get(ANNAL.CURIE.default_view_type, None) or self.get(ANNAL.CURIE.default_view_entity, None) ): # Copy default collection view details from parent if none defined locally self[ANNAL.CURIE.default_list] = altparent.get(ANNAL.CURIE.default_list, None) self[ANNAL.CURIE.default_view_id] = altparent.get(ANNAL.CURIE.default_view_id, None) self[ANNAL.CURIE.default_view_type] = altparent.get(ANNAL.CURIE.default_view_type, None) self[ANNAL.CURIE.default_view_entity] = altparent.get(ANNAL.CURIE.default_view_entity, None) return parents @classmethod def create(cls, parent, coll_id, coll_meta): """ Overload Entity.create with logic to set alternative parent details for collection configuration inheritance, if an alternative is specified in the collection data supplied. cls is the Collection class object. parent is the parent from which the collection is descended. coll_id is the local identifier (slug) for the collection. coll_meta is a dictionary of collection metadata values that are stored for the created collection. Returns the created Collection instance. """ # log.debug("Collection.create: %s, altscope %s"%(coll_id, altscope)) coll = super(Collection, cls).create(parent, coll_id, coll_meta) if coll is not None: cls._set_alt_parent_coll(parent, coll) return coll @classmethod def _migrate_collection_config_dir(cls, parent, coll_id): # If old collection layout is present, migrate to new layout using "d/ # Rename collection configuration directories and files individually so that # existing data directories are not touched. parent_base_dir, parent_meta_file = parent._dir_path() coll_root_dir = os.path.join(parent_base_dir, layout.SITE_COLL_PATH%{"id": coll_id}) coll_base_dir = os.path.join(coll_root_dir, layout.COLL_BASE_DIR) coll_conf_old_dir = os.path.join(coll_root_dir, layout.COLL_ROOT_CONF_OLD_DIR) #@@ TODO: remove this during 1.x prelease cycle, # not covered by tests. Or remove entire method. if os.path.isdir(coll_conf_old_dir): log.info("Migrate old configuration from %s"%(coll_conf_old_dir,)) for old_name in os.listdir(coll_conf_old_dir): old_path = os.path.join(coll_conf_old_dir, old_name) if ( ( os.path.isdir(old_path) ) or ( os.path.isfile(old_path) and old_path.endswith(".jsonld") ) ): log.info("- %s -> %s"%(old_name, coll_base_dir)) new_path = os.path.join(coll_base_dir, old_name) try: os.rename(old_path, new_path) except Exception as e: msg = (message.COLL_MIGRATE_DIR_FAILED% { "id": coll_id , "old_path": old_path, "new_path": new_path , "exc": e } ) log.error("Collection._migrate_collection_config_dir: "+msg) assert False, msg # Rename old config dir to avoid triggering this logic again coll_conf_saved_dir = coll_conf_old_dir+".saved" try: os.rename(coll_conf_old_dir, coll_conf_saved_dir) except Exception as e: msg = (message.COLL_MIGRATE_DIR_FAILED% { "id": coll_id , "old_path": coll_conf_old_dir, "new_path": coll_conf_saved_dir , "exc": e } ) log.error("Collection._migrate_collection_config_dir: "+msg) assert False, msg #@@ return def _post_update_processing(self, entitydata, post_update_flags): """ Default method for post-update processing. This method is called just after collection metadata has been created or updated. For a collection, the caches are flushed as a change to the parent collection may mean that cached values are no longer applicable. (Updating a collection description is considered to be a relatively rare operation.) """ self.flush_collection_caches() return entitydata @classmethod def load(cls, parent, coll_id, altscope=None): """ Overload Entity.load with logic to set alternative parent details for collection configuration inheritance, if an alternative is specified in the collection data loaded. cls is the Collection class object. parent is the parent from which the collection is descended. coll_id is the local identifier (slug) for the collection. altscope if supplied, indicates a scope other than the current collection to search for children. Returns an instance of the indicated Collection class with data loaded from the corresponding Annalist storage, or None if there is no such entity. """ # log.debug("@@ Collection.load: %s, altscope %s"%(coll_id, altscope)) cls._migrate_collection_config_dir(parent, coll_id) coll = super(Collection, cls).load( parent, coll_id, altscope=altscope ) if coll is not None: cls._set_alt_parent_coll(parent, coll) return coll @classmethod def _set_alt_parent_coll(cls, parent, coll): """ Set alternative parent collection - sets up search path for subsequent references. """ coll_id = coll.get_id() parent_coll_id = extract_entity_id(coll.get(ANNAL.CURIE.inherit_from, None)) if parent_coll_id and parent_coll_id != layout.SITEDATA_ID: parent_coll = Collection.load(parent, parent_coll_id) if parent_coll is None: err_msg = message.COLL_PARENT_NOT_EXIST%{"id": coll_id, "parent_id": parent_coll_id} coll.set_error(err_msg) log.warning("Collection._set_alt_parent_coll: "+err_msg) else: log.debug( "Collection._set_alt_parent_coll: coll %s references parent %s"% (coll_id, parent_coll_id) ) coll.set_alt_entities(parent_coll) return coll # Software compatibility version def update_software_compatibility_version(self): # (assumes data loaded) ver = self.get(ANNAL.CURIE.software_version, None) or "0.0.0" if Version(ver) < Version(annalist.__version_data__): self[ANNAL.CURIE.software_version] = annalist.__version_data__ self._save() # User permissions def create_user_permissions(self, user_id, user_uri, user_name, user_description, user_permissions=["VIEW"] ): user_values = ( { ANNAL.CURIE.type: ANNAL.CURIE.User , RDFS.CURIE.label: user_name , RDFS.CURIE.comment: user_description , ANNAL.CURIE.user_uri: user_uri , ANNAL.CURIE.user_permission: user_permissions }) user = AnnalistUser.create(self, user_id, user_values) return user def get_user_permissions(self, user_id, user_uri): """ Get a user permissions record (AnnalistUser). To return a value, both the user_id and the user_uri (typically a mailto: URI, but may be any *authenticated* identifier) must match. This is to prevent access to records of a deleted account being granted to a new account created with the same user_id (username). user_id local identifier for the type to retrieve. user_uri authenticated identifier associated with the user_id. That is, the authentication service used is presumed to confirm that the identifier belongs to the user currently logged in with the supplied username. returns an AnnalistUser object for the identified user, or None. This object contains information about permissions granted to the user in the current collection. """ user = AnnalistUser.load(self, user_id, altscope="user") # log.debug("Collection.get_user_permissions: user_id %s, user_uri %s, user %r"% # (user_id, user_uri, user) # ) if user: for f in [RDFS.CURIE.label, RDFS.CURIE.comment, ANNAL.CURIE.user_uri, ANNAL.CURIE.user_permission]: if f not in user: user = None break if user and user[ANNAL.CURIE.user_uri] != user_uri: user = None # URI mismatch: return None. return user # Vocabulary namespaces def cache_get_vocab(self, vocab_id): """ Retrieve namespace vocabulary entity for id (namespace prefix) from cache. Returns namespace vocabulary entity if found, otherwise None. """ vocab_cache.get_vocab(self, vocab_id) t = vocab_cache.get_vocab(self, vocab_id) # Was it previously created but not cached? if not t and RecordType.exists(self, vocab_id, altscope="all"): msg = ( "Collection.get_vocab %s present but not cached for collection %s"% (vocab_id, self.get_id()) ) log.warning(msg) t = RecordType.load(self, vocab_id, altscope="all") vocab_cache.set_vocab(self, t) # raise ValueError(msg) #@@ (used in testing to help pinpoint errors) return t # Record types def types(self, altscope="all"): """ Iterator over record types stored in the current collection. """ return type_cache.get_all_types(self, altscope=altscope) def cache_add_type(self, type_entity): """ Add or update type information in type cache. """ log.debug("Collection.cache_add_type %s in %s"%(type_entity.get_id(), self.get_id())) type_cache.remove_type(self, type_entity.get_id()) type_cache.set_type(self, type_entity) return def cache_get_type(self, type_id): """ Retrieve type from cache. Returns type entity if found, otherwise None. """ type_cache.get_type(self, type_id) t = type_cache.get_type(self, type_id) # Was it previously created but not cached? if not t and RecordType.exists(self, type_id, altscope="all"): msg = ( "Collection.get_type %s present but not cached for collection %s"% (type_id, self.get_id()) ) log.warning(msg) t = RecordType.load(self, type_id, altscope="all") type_cache.set_type(self, t) # raise ValueError(msg) #@@ (used in testing to help pinpoint errors) return t def cache_remove_type(self, type_id): """ Remove type from type cache. """ type_cache.remove_type(self, type_id) return def cache_get_all_type_ids(self, altscope="all"): """ Iterator over type ids of types stored in the current collection. """ return type_cache.get_all_type_ids(self, altscope=altscope) def cache_get_supertypes(self, type_entity): """ Return supertypes of supplied type. """ type_uri = type_entity.get_uri() return type_cache.get_type_uri_supertypes(self, type_uri) def cache_get_subtypes(self, type_entity): """ Return subtypes of supplied type. """ type_uri = type_entity.get_uri() return type_cache.get_type_uri_subtypes(self, type_uri) def cache_get_subtype_uris(self, type_uri): """ Return subtype URIs of supplied type URI. The suplied URI is not itself required to be declared as identifying a defined type entity. """ return type_cache.get_type_uri_subtype_uris(self, type_uri) def cache_get_supertype_uris(self, type_uri): """ Return supertype URIs of supplied type URI. This returns all supertype URIs declared by the supertypes, even when there is not corresponding type entity defined. """ return type_cache.get_type_uri_supertype_uris(self, type_uri) def add_type(self, type_id, type_meta): """ Add a new or updated record type to the current collection type_id identifier for the new type, as a string with a form that is valid as URI path segment. type_meta a dictionary providing additional information about the type to be created. Returns a RecordType object for the newly created type. """ log.debug("Collection.add_type %s in %s"%(type_id, self.get_id())) t = RecordType.create(self, type_id, type_meta) return t def get_type(self, type_id): """ Retrieve identified type description type_id local identifier for the type to retrieve. returns a RecordType object for the identified type, or None. """ if not valid_id(type_id): msg = "Collection %s get_type(%s) invalid id"%(self.get_id(), type_id) log.error(msg) raise ValueError(msg, type_id) return self.cache_get_type(type_id) def get_uri_type(self, type_uri): """ Return type entity corresponding to the supplied type URI, or None if not found. """ t = type_cache.get_type_from_uri(self, type_uri) return t def remove_type(self, type_id): """ Remove identified type description type_id local identifier for the type to remove. Returns None on success, or a non-False status code if the type is not removed. """ s = RecordType.remove(self, type_id) return s # Record views def views(self, altscope="all"): """ Generator enumerates and returns record views that may be stored """ for f in self._children(RecordView, altscope=altscope): v = self.get_view(f) if v and v.get_id() != layout.INITIAL_VALUES_ID: yield v return def add_view(self, view_id, view_meta): """ Add a new record view to the current collection view_id identifier for the new view, as a string with a form that is valid as URI path segment. view_meta a dictionary providing additional information about the view to be created. returns a RecordView object for the newly created view. """ v = RecordView.create(self, view_id, view_meta) return v def get_view(self, view_id): """ Retrieve identified view description view_id local identifier for the view to retrieve. returns a RecordView object for the identified view, or None. """ v = RecordView.load(self, view_id, altscope="all") return v def remove_view(self, view_id): """ Remove identified view description view_id local identifier for the view to remove. Returns None on success, or a non-False status code if the view is not removed. """ s = RecordView.remove(self, view_id) return s def set_default_view(self, view_id, type_id, entity_id): """ Set and save the default list to be displayed for the current collection. """ self[ANNAL.CURIE.default_view_id] = view_id self[ANNAL.CURIE.default_view_type] = type_id self[ANNAL.CURIE.default_view_entity] = entity_id self._save() return def get_default_view(self): """ Return the default view id, type and entity to be displayed for the current collection. """ view_id = self.get(ANNAL.CURIE.default_view_id, None) type_id = self.get(ANNAL.CURIE.default_view_type, None) entity_id = self.get(ANNAL.CURIE.default_view_entity, None) # log.info("Collection.get_default_view: %s/%s/%s"%(view_id, type_id, entity_id)) return (view_id, type_id, entity_id) # Record lists def lists(self, altscope="all"): """ Generator enumerates and returns record lists that may be stored """ for f in self._children(RecordList, altscope=altscope): l = self.get_list(f) if l and l.get_id() != layout.INITIAL_VALUES_ID: yield l return def add_list(self, list_id, list_meta): """ Add a new record list to the current collection list_id identifier for the new list, as a string with a form that is valid as URI path segment. list_meta a dictionary providing additional information about the list to be created. returns a RecordList object for the newly created list. """ l = RecordList.create(self, list_id, list_meta) return l def get_list(self, list_id): """ Retrieve identified list description list_id local identifier for the list to retrieve. returns a RecordList object for the identified list, or None. """ l = RecordList.load(self, list_id, altscope="all") return l def remove_list(self, list_id): """ Remove identified list description list_id local identifier for the list to remove. Returns None on success, or a non-False status code if the list is not removed. """ s = RecordList.remove(self, list_id) return s def set_default_list(self, list_id): """ Set and save the default list to be displayed for the current collection. """ self[ANNAL.CURIE.default_list] = list_id self[ANNAL.CURIE.default_view_id] = None self[ANNAL.CURIE.default_view_type] = None self[ANNAL.CURIE.default_view_entity] = None self._save() return def get_default_list(self): """ Return the default list to be displayed for the current collection. """ list_id = self.get(ANNAL.CURIE.default_list, None) if list_id and not RecordList.exists(self, list_id, altscope="all"): log.warning( "Default list %s for collection %s does not exist"% (list_id, self.get_id()) ) list_id = None return list_id # View (and list) fields and properties def fields(self, altscope="all"): """ Iterator over view fields stored in the current collection. """ return field_cache.get_all_fields(self, altscope=altscope) def cache_add_field(self, field_entity): """ Add or update field information in field cache. """ log.debug("Collection.cache_add_field %s in %s"%(field_entity.get_id(), self.get_id())) field_cache.remove_field(self, field_entity.get_id()) field_cache.set_field(self, field_entity) return def cache_get_field(self, field_id): """ Retrieve field from cache. Returns field entity if found, otherwise None. """ t = field_cache.get_field(self, field_id) # Was it previously created but not cached? if not t and RecordField.exists(self, field_id, altscope="all"): msg = ( "Collection.get_field %s present but not cached for collection %s"% (field_id, self.get_id()) ) log.warning(msg) t = RecordField.load(self, field_id, altscope="all") field_cache.set_field(self, t) return t def cache_remove_field(self, field_id): """ Remove field from field cache. """ field_cache.remove_field(self, field_id) return def cache_get_all_field_ids(self, altscope="all"): """ Iterator over field ids of fields stored in the current collection. """ return field_cache.get_all_field_ids(self, altscope=altscope) def cache_get_subproperty_fields(self, field_entity): """ Return fields that use subproperties of supplied field's property URI. """ property_uri = field_entity.get_property_uri() return field_cache.get_subproperty_fields(self, property_uri) def cache_get_superproperty_fields(self, field_entity): """ Return fields that use superproperties of supplied field's property URI. """ property_uri = field_entity.get_property_uri() return field_cache.get_superproperty_fields(self, property_uri) def cache_get_subproperty_uris(self, property_uri): """ Return subproperty URIs of supplied property URI. The suplied URI is not itself required to be declared as used by a defined field entity. """ return field_cache.get_subproperty_uris(self, property_uri) def cache_get_superproperty_uris(self, property_uri): """ Return superproperty URIs of supplied property URI. This returns all superproperty URIs declared by the field, and any fields that use the superproperty URIs, even when there is no corresponding field definition. """ return field_cache.get_superproperty_uris(self, property_uri) def add_field(self, field_id, field_meta): """ Add a new or updated record field to the current collection field_id identifier for the new field, as a string with a form that is valid as URI path segment. field_meta a dictionary providing additional information about the field to be created. Returns a RecordField object for the newly created field. """ log.debug("Collection.add_field %s in %s"%(field_id, self.get_id())) f = RecordField.create(self, field_id, field_meta) return f def get_field(self, field_id): """ Retrieve identified field description field_id local identifier for the field to retrieve. returns a RecordField object for the identified field, or None. """ if not valid_id(field_id, reserved_ok=True): msg = "Collection %s get_field(%s) invalid id"%(self.get_id(), field_id) log.error(msg) # Construct and return a placeholder field ph_meta = ( { RDFS.CURIE.label: "(field error)" , ANNAL.CURIE.field_render_type: "_enum_render_type/Placeholder" , ANNAL.CURIE.field_value_mode: "_enum_value_mode/Value_direct" , ANNAL.CURIE.field_placement: "small:0,12" , ANNAL.CURIE.placeholder: "(Invalid field id: '%s')"%(field_id,) }) f = RecordField._child_init(self, "_placeholder") f.set_values(ph_meta) return f return self.cache_get_field(field_id) def get_uri_field(self, property_uri): """ Return field entity corresponding to the supplied property URI """ t = field_cache.get_field_from_property_uri(self, property_uri) return t def remove_field(self, field_id): """ Remove identified field description field_id local identifier for the field to remove. Returns None on success, or a non-False status code if the field is not removed. """ s = RecordField.remove(self, field_id) return s # JSON-LD context data def generate_coll_jsonld_context(self, flags=None): """ (Re)generate JSON-LD context description for the current collection. Returns list of errors, or empty list. """ errs = [] if flags and ("nocontext" in flags): # Skip processing if "nocontext" flag provided return # log.info("Generating context for collection %s"%(self.get_id())) # Build context data context = self.get_coll_jsonld_context() datetime_now = datetime.datetime.today().replace(microsecond=0) datetime_str = isoformat_space(datetime_now) # Assemble and write out context description with self._metaobj( layout.META_COLL_BASE_REF, layout.COLL_CONTEXT_FILE, "wt" ) as context_io: json.dump( { "_comment": "Generated by generate_coll_jsonld_context on %s"%datetime_str , "@context": context }, context_io, indent=2, separators=(',', ': '), sort_keys=True ) # Create collection README.md for human context... if self._values: README_vals = ( { "id": self.get_id() , "label": self._values.get("rdfs:label", self.get_id()) , "heading": "" , "comment": self._values.get("rdfs:comment", "") }) if not README_vals["comment"].startswith("#"): README_vals["heading"] = message.COLL_README_HEAD%README_vals README_text = message.COLL_README%README_vals with self._metaobj( layout.META_COLL_REF, "README.md", "wt" ) as readme_io: readme_io.write(README_text) return errs def get_coll_jsonld_context(self): """ Return dictionary containing context structure for collection. Entry '@errs' is set to a list of errors encountered, or an empty list. """ # Use OrderedDict to allow some control over ordering of context file contents: # this is for humane purposes only, and is not technically critical. errs = [] context = OrderedDict( # { "@base": self.get_url() + layout.META_COLL_BASE_REF { ANNAL.CURIE.type: { "@type": "@id" } , ANNAL.CURIE.entity_list: { "@container": "@list" } }) # Collection-local URI prefix context.update( { '_site_': self.get_site().get_url() , '_coll_': self.get_url() , '_base_': self.get_url() + layout.COLL_BASE_REF }) # Common import/upload fields context.update( { 'resource_name': "annal:resource_name" , 'resource_type': "annal:resource_type" }) # upload-file fields context.update( { 'upload_name': "annal:upload_name" , 'uploaded_file': "annal:uploaded_file" , 'uploaded_size': "annal:uploaded_size" }) # import-resource fields context.update( { 'import_name': "annal:import_name" , 'import_url': { "@id": "annal:import_url" , "@type": "@id" } }) # Scan types, generate prefix data for t in self.child_entities(RecordType, altscope="all"): tid = t.get_id() if tid != layout.INITIAL_VALUES_ID: tns = t.get(ANNAL.CURIE.ns_prefix, "") if tns != "": context[tns] = self.get_url() + layout.COLL_TYPEDATA_VIEW%({"id": tid}) # Scan vocabs, generate prefix data (possibly overriding type-derived data) for v in self.child_entities(RecordVocab, altscope="all"): vid = v.get_id() if vid != layout.INITIAL_VALUES_ID: if ANNAL.CURIE.uri in v: vuri = v[ANNAL.CURIE.uri] if vuri[-1] not in {":", "/", "?", "#"}: msg = ( "Vocabulary %s namespace URI %s does not end with an expected delimiter"% (vid, vuri) ) log.warning(msg) errs.append(msg) context[vid] = v[ANNAL.CURIE.uri] # Scan view fields and generate context data for property URIs used for v in self.child_entities(RecordView, altscope="all"): view_fields = v.get(ANNAL.CURIE.view_fields, []) for fref in view_fields: fid = extract_entity_id(fref[ANNAL.CURIE.field_id]) vuri = fref.get(ANNAL.CURIE.property_uri, None) furi, fcontext, field_list = self.get_field_uri_jsonld_context( fid, self.get_field_jsonld_context ) if fcontext is not None: fcontext['vid'] = v.get_id() fcontext['fid'] = fid e = self.set_field_uri_jsonld_context(vuri or furi, fid, fcontext, context) errs.extend(e) # If this field contains a list of subfields, scan those # NOTE: current implementation handles only a single level of field nesting if field_list: for subfref in field_list: subfid = extract_entity_id(subfref[ANNAL.CURIE.field_id]) subfuri = subfref.get(ANNAL.CURIE.property_uri, None) furi, fcontext, field_list = self.get_field_uri_jsonld_context( subfid, self.get_field_jsonld_context ) if fcontext is not None: fcontext['fid'] = fid fcontext['subfid'] = subfid e = self.set_field_uri_jsonld_context(subfuri or furi, subfid, fcontext, context) errs.extend(e) # Scan group fields and generate context data for property URIs used #@@TODO - to be deprecated when RecordGroup is removed from codebase # (during 1.0 release cycle?) # In due course, field groups will replaced by inline field lists. # This code does not process field lists for fields referenced by a group. #@@ for g in self.child_entities(RecordGroup_migration, altscope="all"): for gref in g[ANNAL.CURIE.group_fields]: fid = extract_entity_id(gref[ANNAL.CURIE.field_id]) guri = gref.get(ANNAL.CURIE.property_uri, None) furi, fcontext, field_list = self.get_field_uri_jsonld_context( fid, self.get_field_jsonld_context ) if fcontext is not None: fcontext['gid'] = g.get_id() fcontext['fid'] = fid e = self.set_field_uri_jsonld_context(guri or furi, fid, fcontext, context) errs.extend(e) if errs: context['@errs'] = errs return context def get_field_uri_jsonld_context(self, fid, get_field_context): """ Access field description, and return field property URI and appropriate property description for JSON-LD context. Returns a triple consisting of the field property URI, the context information to be generated for the field, and a list of any field references contained directly within the field definition (as opposed to a field group reference) If there is no corresponding field description, returns (None, None, None) If no context should be generated for the field URI, returns (uri, None, field_list) The field list returned is 'None' if there is no contained list of fields. """ f = RecordField.load(self, fid, altscope="all") if f is None: return (None, None, None) field_list = f.get(ANNAL.CURIE.field_fields, None) return (f[ANNAL.CURIE.property_uri], get_field_context(f), field_list) def set_field_uri_jsonld_context(self, puri, field_id, fcontext, property_contexts): """ Save property context description into supplied property_contexts dictionary. If the context is already defined, generate warning if there is a compatibility problem. Returns list of errors, or empty list. """ errs = [] if puri: uri_parts = puri.split(":") if len(uri_parts) > 1: # Ignore URIs without ':' if not fcontext: # For diagnostics to locate incompatible use... fcontext = {'fid': field_id} if (puri in property_contexts): pcontext = property_contexts[puri] pcontext.pop('err', None) # Drop pevious error(s) from report p_type = pcontext.get("@type", None) f_type = fcontext.get("@type", None) if (p_type != f_type): msg = ( "Incompatible value type for property %s in field %s (new %r; was %r)"% (puri, field_id, fcontext, pcontext) ) log.warning(msg) property_contexts[puri]['err'] = msg errs.append(msg) p_container = pcontext.get("@container", None) f_container = fcontext.get("@container", None) if ( (p_container != f_container) and ( (p_container == "@list") or (f_container == "@list") ) ): msg = ( "Incompatible container type for property %s in field %s (new %r; was %r)"% (puri, field_id, fcontext, pcontext) ) # msgp = "pcontext @type %s, @container %s"%(p_type, p_container) # msgf = "fcontext @type %s, @container %s"%(f_type, f_container) log.warning(msg) # print "@@ "+msg # print "@@ pcontext @type %s, @container %s"%(p_type, p_container) # print "@@ fcontext @type %s, @container %s"%(f_type, f_container) property_contexts[puri]['err'] = msg errs.append(msg) # errs.append(msgp) # errs.append(msgf) elif ( fcontext and ( uri_parts[0] in property_contexts ) or # Prefix defined vocab? ( uri_parts[0] in ["http", "https", "file"] ) ): # Full URI? property_contexts[puri] = fcontext # msg = "Save context info for %s in field %s (new %r)"% (puri, field_id, fcontext) # print "@@ "+msg return errs # @@TODO: move this away from model logic, as it represents a dependency on view logic? @staticmethod def get_field_jsonld_context(fdesc): """ Returns a context description for the supplied field description. Returns None if no property context information is needed for the supplied field. """ rtype = extract_entity_id(fdesc[ANNAL.CURIE.field_render_type]) vmode = extract_entity_id(fdesc[ANNAL.CURIE.field_value_mode]) if vmode == "Value_entity": rtype = "Enum" elif vmode == "Value_import": rtype = "URIImport" elif vmode == "Value_upload": rtype = "FileUpload" if is_render_type_literal(rtype): fcontext = {} # { "@type": "xsd:string" } elif is_render_type_id(rtype): fcontext = { "@type": "@id" } # Add type from field descr? elif is_render_type_object(rtype): fcontext = {} else: msg = "Unexpected value mode or render type (%s, %s)"%(vmode, rtype) log.error(msg) raise ValueError(msg) if is_render_type_set(rtype): fcontext["@container"] = "@set" elif is_render_type_list(rtype): fcontext["@container"] = "@list" return fcontext # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/collection.py
collection.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function """ This module is used to cache per-collection field information. """ __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2018, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) from annalist import layout from annalist.exceptions import Annalist_Error from annalist.identifiers import ANNAL, RDFS from annalist.models.collectionentitycache import ( Cache_Error, CollectionEntityCacheObject, CollectionEntityCache ) from annalist.models.closurecache import ClosureCache from annalist.models.recordfield import RecordField # --------------------------------------------------------------------------- # # Field-cache object class # # --------------------------------------------------------------------------- class CollectionFieldCacheObject(CollectionEntityCacheObject): """ This class is a field definition cache for a specified collection. It extends class CollectionEntityCacheObject with field-specific logic; notably overriding method _load_entity with additional logic to maintain a superproperty closure cache, and methods to access that cache. """ def __init__(self, coll_id, entity_cls=RecordField): """ Initialize a cache object for a specified collection. coll_id Collection id with which the field cache is associated. """ super(CollectionFieldCacheObject, self).__init__(coll_id, entity_cls) self._superproperty_closure = ClosureCache(coll_id, ANNAL.CURIE.superproperty_uri) return def _load_entity(self, coll, field_entity): """ Internal helper method loads field data to cache. Also updates superproperty closure cache. Returns True if new field was added. """ field_id = field_entity.get_id() property_uri = field_entity.get_property_uri() field_parent = field_entity.get_parent().get_id() field_data = field_entity.get_save_values() add_field = super(CollectionFieldCacheObject, self)._load_entity( coll, field_entity, entity_uri=property_uri ) if add_field: # Add relations for superproperty references from the new property URI for superproperty_obj in field_data.get(ANNAL.CURIE.superproperty_uri, []): superproperty_uri = superproperty_obj["@id"] self._superproperty_closure.add_rel(property_uri, superproperty_uri) # Also add relations for references *to* the new property URI for try_subproperty_obj in self.get_all_entities(coll): sub_superp_objs = try_subproperty_obj.get(ANNAL.CURIE.superproperty_uri, []) sub_superp_uris = ( [ sub_superp_obj["@id"] for sub_superp_obj in sub_superp_objs ] ) if property_uri in sub_superp_uris: subproperty_uri = try_subproperty_obj.get(ANNAL.CURIE.property_uri, None) if subproperty_uri: self._superproperty_closure.add_rel(subproperty_uri, property_uri) return add_field def _drop_entity(self, coll, field_id): """ Override method that drops an entity from the cache, to also remove references from the superproperty closure cache. Returns the field entity removed, or None if not found. """ field_entity = super(CollectionFieldCacheObject, self)._drop_entity(coll, field_id) if field_entity: property_uri = field_entity.get_property_uri() self._superproperty_closure.remove_val(property_uri) return field_entity def get_superproperty_uris(self, property_uri): """ Returns all superproperty URIs for a specified property URI. Returns all superproperty URIs, even those for which there is no defined field entity. """ return self._superproperty_closure.fwd_closure(property_uri) def get_subproperty_uris(self, property_uri): """ Returns all subproperty URIs for a specified property URI. Returns all subproperty URIs, even those for which there is no defined field entity. """ return self._superproperty_closure.rev_closure(property_uri) def get_superproperty_fields(self, coll, property_uri): """ Returns all superproperties for a specified property URI. This method returns only those superproperties that are defined as entities. """ self._load_entities(coll) for st_uri in self.get_superproperty_uris(property_uri): st = self.get_entity_from_uri(coll, st_uri) if st: yield st return def get_subproperty_fields(self, coll, property_uri): """ Returns all subproperties for a specified property URI. This method returns only those subproperties that are defined as entities. """ self._load_entities(coll) for st_uri in self.get_subproperty_uris(property_uri): st = self.get_entity_from_uri(coll, st_uri) if st: yield st return def remove_cache(self): """ Close down and release all collection field cache data """ # log.debug("@@@@remove field cache %r"%(self.get_coll_id(),)) super(CollectionFieldCacheObject, self).remove_cache() self._superproperty_closure.remove_cache() self._superproperty_closure = None return # --------------------------------------------------------------------------- # # Collection field-cache class # # --------------------------------------------------------------------------- class CollectionFieldCache(CollectionEntityCache): """ This class manages field cache objects over multiple collections """ def __init__(self): """ Initialize. Initializes a value cache cache with no per-collection data. """ super(CollectionFieldCache, self).__init__(CollectionFieldCacheObject, RecordField) return # Collection field cache allocation and access methods def set_field(self, coll, field_entity): """ Save a new or updated field definition """ return self.set_entity(coll, field_entity) def remove_field(self, coll, field_id): """ Remove field from collection field cache. Returns the field entity removed if found, or None if not defined. """ return self.remove_entity(coll, field_id) def get_field(self, coll, field_id): """ Retrieve a field description for a given field Id. Returns a field object for the specified collection and field Id. """ return self.get_entity(coll, field_id) def get_field_from_uri(self, coll, field_uri): """ Retrieve a field description for a given property URI. Returns a field object for the specified collection and property URI. """ return self.get_entity_from_uri(coll, field_uri) def get_all_field_ids(self, coll, altscope=None): """ Returns all fields currently available for a collection in the indicated scope. Default scope is fields defined directly in the indicated collection. """ return self.get_all_entity_ids(coll, altscope=altscope) def get_all_fields(self, coll, altscope=None): """ Returns all fields currently available for a collection in the indicated scope. Default scope is fields defined directly in the indicated collection. """ return self.get_all_entities(coll, altscope=altscope) def get_superproperty_fields(self, coll, field_uri): """ Returns all superproperties for a specieid property URI. """ field_cache = self._get_cache(coll) return field_cache.get_superproperty_fields(coll, field_uri) def get_subproperty_fields(self, coll, field_uri): """ Returns all subproperties for a specieid property URI. """ field_cache = self._get_cache(coll) return field_cache.get_subproperty_fields(coll, field_uri) def get_superproperty_uris(self, coll, field_uri): """ Returns all superproperties for a specieid property URI. """ field_cache = self._get_cache(coll) return field_cache.get_superproperty_uris(field_uri) def get_subproperty_uris(self, coll, field_uri): """ Returns all subproperties for a specieid property URI. """ field_cache = self._get_cache(coll) return field_cache.get_subproperty_uris(field_uri) # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/collectionfieldcache.py
collectionfieldcache.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) import os import os.path import shutil from django.conf import settings from annalist import layout from annalist.exceptions import Annalist_Error from annalist.identifiers import ANNAL from annalist import util from annalist.models.entity import Entity class EntityData(Entity): _entitytype = ANNAL.CURIE.EntityData _entitytypeid = None _entityroot = layout.TYPEDATA_ENTITY_PATH _entityview = layout.TYPEDATA_ENTITY_VIEW _entitybase = layout.ENTITY_BASE_REF _entityfile = layout.ENTITY_DATA_FILE _contextbase = layout.ENTITY_COLL_BASE_REF _contextref = layout.ENTITY_CONTEXT_FILE def __init__(self, parent, entity_id): """ Initialize a new Entity Data object, without metadata. EntityData objects sit in this entity storage hierarchy: Site Collection RecordTypeData EntityData This arrangement allows entities for different record types to be saved into separate directories. parent is the parent collection (RecordTypeData) from which the entity is descended. entity_id the local identifier (slug) for the data record """ # print "@@ EntityData.__init__ id %s, _entitytypeid %s, parent_id %s"%(entity_id, self._entitytypeid, parent.get_id()) self._entitytypeid = self._entitytypeid or parent.get_id() super(EntityData, self).__init__(parent, entity_id) self._paramdict = { 'type_id': self._entitytypeid, 'id': entity_id } self._entityref = layout.COLL_BASE_ENTITY_REF%self._paramdict self._entityviewuri = parent._entityurl+self._entityview%self._paramdict # log.debug("EntityData: _entityviewuri %s"%(self._entityviewuri)) return def _migrate_filenames(self): """ Return filename migration list for entity data Returns a list of filenames used for the current entity type in previous versions of Annalist software. If the expected filename is not found when attempting to read a file, the _load_values() method calls this function to look for any of the filenames returned. If found, the file is renamed to the current version filename. """ return [layout.ENTITY_OLD_DATA_FILE] # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/entitydata.py
entitydata.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) import os import os.path import shutil from django.conf import settings from annalist import layout from annalist.exceptions import Annalist_Error from annalist.identifiers import ANNAL from annalist import util from annalist.models.entity import Entity from annalist.models.entitydata import EntityData from annalist.util import extract_entity_id class RecordView(EntityData): _entitytype = ANNAL.CURIE.View _entitytypeid = layout.VIEW_TYPEID _entityroot = layout.COLL_VIEW_PATH _entityview = layout.COLL_VIEW_VIEW _entityfile = layout.VIEW_META_FILE def __init__(self, parent, view_id): """ Initialize a new RecordView object, without metadta (yet). parent is the parent collection in which the view is defined. view_id the local identifier for the record view """ super(RecordView, self).__init__(parent, view_id) self._parent = parent # log.debug("RecordView %s: dir %s"%(view_id, self._entitydir)) return def _migrate_filenames(self): """ Override EntityData method """ return None def _migrate_values(self, entitydata): """ View description entity format migration method. The specification for this method is that it returns an entitydata value which is a copy of the supplied entitydata with format migrations applied. NOTE: implementations are free to apply migrations in-place. The resulting entitydata should be exactly as the supplied data *should* appear in storage to conform to the current format of the data. The migration function should be idempotent; i.e. x._migrate_values(x._migrate_values(e)) == x._migrate_values(e) """ migration_map = ( [ (ANNAL.CURIE.record_type, ANNAL.CURIE.view_entity_type) ]) entitydata = self._migrate_values_map_field_names(migration_map, entitydata) if ANNAL.CURIE.view_fields in entitydata: for f in entitydata[ANNAL.CURIE.view_fields]: field_id = extract_entity_id(f[ANNAL.CURIE.field_id]) if field_id == "Field_render": f[ANNAL.CURIE.field_id] = layout.FIELD_TYPEID+"/Field_render_type" if field_id == "Field_type": f[ANNAL.CURIE.field_id] = layout.FIELD_TYPEID+"/Field_value_type" if field_id == "View_target_type": f[ANNAL.CURIE.field_id] = layout.FIELD_TYPEID+"/View_entity_type" if field_id == "List_target_type": f[ANNAL.CURIE.field_id] = layout.FIELD_TYPEID+"/List_entity_type" # Return result return entitydata def _post_update_processing(self, entitydata, post_update_flags): """ Default post-update processing. This method is called when a RecordView entity has been updated. It invokes the containing collection method to regenerate the JSON LD context for the collection to which the entity belongs. """ self._parent.generate_coll_jsonld_context(flags=post_update_flags) return entitydata # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/recordview.py
recordview.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) import os import os.path import shutil from django.conf import settings from annalist import layout from annalist.exceptions import Annalist_Error from annalist.identifiers import ANNAL from annalist import util from annalist.models.entity import Entity from annalist.models.entitydata import EntityData class RecordType(EntityData): _entitytype = ANNAL.CURIE.Type _entitytypeid = layout.TYPE_TYPEID _entityroot = layout.COLL_TYPE_PATH _entityview = layout.COLL_TYPE_VIEW _entityfile = layout.TYPE_META_FILE def __init__(self, parent, type_id): """ Initialize a new RecordType object, without metadta (yet). parent is the parent collection in which the type is defined. type_id the local identifier for the record type """ super(RecordType, self).__init__(parent, type_id) self._parent = parent # log.debug("RecordType %s: dir %s"%(type_id, self._entitydir)) # log.debug("RecordType %s: uri %s"%(type_id, self._entityurl)) return def _migrate_values(self, entitydata): """ Type definition entity format migration method. The specification for this method is that it returns an entitydata value which is a copy of the supplied entitydata with format migrations applied. NOTE: implementations are free to apply migrations in-place. The resulting entitydata should be exctly as the supplied data *should* appear in storage to conform to the current format of the data. The migration function should be idempotent; i.e. x._migrate_values(x._migrate_values(e)) == x._migrate_values(e) """ # Convert representation of supertype URIs to use repeated property instead of # reference to an RDF list. if ANNAL.CURIE.supertype_uris in entitydata: if isinstance(entitydata[ANNAL.CURIE.supertype_uris], list): entitydata[ANNAL.CURIE.supertype_uri] = ( [ {'@id': st[ANNAL.CURIE.supertype_uri] } for st in entitydata[ANNAL.CURIE.supertype_uris] ]) del entitydata[ANNAL.CURIE.supertype_uris] # Return result return entitydata def _migrate_filenames(self): """ Override EntityData method """ return None def _post_update_processing(self, entitydata, post_update_flags): """ Post-update processing. This method is called when an entity has been created or updated. """ self._parent.cache_add_type(self) return entitydata def _post_remove_processing(self, post_update_flags): """ Post-remove processing. This method is called when an entity has been removed. """ self._parent.cache_remove_type(self.get_id()) return # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/recordtype.py
recordtype.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) import os import os.path import itertools import json import errno import traceback from django.conf import settings from utils.py3porting import is_string, urljoin from annalist import layout from annalist import util from annalist import message from annalist.exceptions import Annalist_Error from annalist.identifiers import ANNAL from annalist.models.entityroot import EntityRoot # ------------------------------------------------------------------------------------------- # # Helpers # # ------------------------------------------------------------------------------------------- def test_not_none(v): """ Helper function tests for a non-None value """ return v is not None def test_is_true(v): """ Helper function tests for a value that evaluates to Boolean True """ return bool(v) # ------------------------------------------------------------------------------------------- # # Entity # # ------------------------------------------------------------------------------------------- class Entity(EntityRoot): """ This is the base class for all entities managed by Annalist as descendents of some other entity. ## Entity access paths There are several key reference positions in a hierarchical entity structure: root is the root URL for the entity, without a trailing "/", also used to identify the entity. view is a URL used to view or retrieve default entity (meta-)data. base is a base URL for resolving relative references in entity data, and corresponds to the container where entity data is stored. name is the URL of the entity metadata resource (filename) prov is the URL of the entity provenance resource (filename) In many cases, the root and base URLs are the same, but they may differ (e.g. for collections). These various URLs are constructed from the base URL of the parent entity using the following class variables, which must be defined by all subclasses of "Entity". Constant strings for the actual values or value formatting templates used are defined in module layout.py. _entityroot is a relative reference from the parent entity base URL to the entity root URL. _entityview is a relative reference from the parent entity base URL to the entity view URL. This is generally _entityroot with a trailing "/" added. _entitybase is a relative reference from the entity root URL to the entity base URL. This is an empty string if the root is also the base URI, otherwise it indicates a sub-container (sub-directory) where entity data is stored. _entityfile is the resource name (filename) that can be used as a relative reference from the entity base URL to access the entity metadata file. _entityprov is the resource name (filename) that can be used as a relative reference from the entity base URL to access the entity provenance file. _entityref is a reverse relative reference from the base URL to the entity root URL _contextbase is a relative reference from the entity base URL to the corresponding JSON-LD context base URL for the containing collection. _contextref is a relative reference from the entity base URL to the corresponding JSON-LD context file URL for the containing collection. Other resources attached to an entity identified by names inthe entity metadatra, which are resolved against the entity base URL. The following methods are provided to access various entity and content URLs: entity.get_root_url() returns the root URL for 'entity' entity.get_base_url() returns the base URL for 'entity' entity.get_data_url() returns the URL for entity metadata as JSON-LD entity.get_data_url(resource_name) returns the URL for specified resource data entityroot.get_view_url() returns the view URL for 'entity' """ _last_id = None # Last ID allocated _entitytype = ANNAL.CURIE.Entity _entitytypeid = None _entityroot = "%(id)s" _entityview = "%(id)s/" _entitybase = "" _entityfile = None _entityprov = None _entityref = None _contextbase = None _contextref = None # _entitypath = None # Relative path from parent to entity (template) # _entityview = "%(id)s/" # Placeholder for testing # _entityfile = None # Relative reference to body file from entity base def __init__(self, parent, entityid, altparent=None): """ Initialize a new Entity object, possibly without values. The created entity is not saved to disk at this stage - see ._save() method. parent is the parent entity from which the new entity is descended. entityid the collection identifier for the collection altparent is an alternative parent entity to search for this entity, using the alternative path for the entity type: this is used to augment explicitly created entities in a collection with site-wide installed metadata entites (i.e. types, views, etc.) """ if not util.valid_id(entityid, reserved_ok=True): msg = "Invalid entity identifier: %s"%(entityid) log.error(msg) raise ValueError(msg) relpath = self.relpath(entityid) # log.debug( # "@@ _ Entity.__init__: id %s, parenturl %s, parentdir %s, relpath %s"% # (entityid, parent._entityurl, parent._entitydir, relpath) # ) entity_url = urljoin(parent._entityurl, relpath) entity_dir = os.path.normpath(os.path.join(parent._entitydir, relpath)) entity_base = parent._entitydir # Used as safety check when removing data if not entity_dir.startswith(entity_base): entity_base = parent._entitybasedir # log.debug( # "@@ _ Entity.__init__: entity_url %s, entity_dir %s"% # (entity_url, entity_dir) # ) entityviewurl = urljoin( parent._entityviewurl, self._entityview%{'id': entityid, 'type_id': self._entitytypeid} ) super(Entity, self).__init__(entity_url, entityviewurl, entity_dir, entity_base) self._entityid = entityid self._parent = parent self._ancestorid = entityid # May be changed by subclass _local_find_alt_parents self._altparent = altparent # Alternative to current entity to search # log.debug("Entity.__init__: entity_id %s, type_id %s"%(self._entityid, self.get_type_id())) return def _get_ref_url(self, baseurl, entityurl, urlref): """ Assemble a URL from supplied base URL and reference. The entity type id and id may be interpolated as if the supplied reference includes '%(type_id)s' and/or '%(id)s' respectively. """ # log.debug("Entity._get_ref_url: baseurl %s, urlref %s"%(baseurl, urlref)) if urlref is None: return None rooturl = urljoin(baseurl, entityurl) return urljoin( rooturl, urlref%({"type_id": self._entitytypeid, "id": self._entityid}) ) def get_root_url(self, baseurl=""): """ Return entity root URL. """ # log.debug( # "Entity.get_view_url: baseurl %s, _entityrooturl %s"% # (baseurl, self._entityviewurl) # ) return self._get_ref_url(baseurl, self._parent._entityurl, self._entityroot) def get_base_url(self, baseurl=""): """ Return entity base URL. """ return self._get_ref_url(baseurl, self._entityurl, self._entitybase) def get_data_url(self, resource_ref=None, baseurl=""): """ Return entity data URL. """ dataref = resource_ref or self._entityfile return self._get_ref_url(baseurl, self.get_base_url(baseurl=baseurl), dataref) def get_parent(self): """ Return parent entity """ return self._parent def set_alt_entities(self, altparent): """ Update the alternative parent for the current entity. Returns a list of parents accessible from the supplied altparent (including itself) """ # Set new alternative parent self._altparent = altparent # Build list of accessible parents, check for recursion parents = [self] + self._find_alt_parents(altscope="all") # log.info( # "@@ Entity.set_alt_entities: %r altparent %r, parents %r"% # (self.get_id(), altparent.get_id(), [p.get_id() for p in parents]) # ) return parents def _local_find_alt_parents(self): """ Returns a list of alternative parents for the current inheritance branch only; i.e. does not attempt to follow altparent chains in referenced trees. (That is handled by `_find_alt_parents` below.) This method may be overridden by classes that need to look higher in the class inheritance tree to find alternative parent branches. """ return [self._altparent] if self._altparent else [] def _find_alt_parents(self, altscope=None, parents_seen=[]): """ Local helper function returns a list of entities, not including the current entity, that are potentially in scope for containing entities considered to belong to the current entity. This function also checks for recursive references and returns an error if recursion is detected. See spike/tree_scan/tree_scan.lhs for algorithm. > parentlist e@(Entity {altparents=alts}) = e:altpath -- req (a) > where > altpath = mergealts e [ parentlist p | p <- alts ] > > mergealts :: Entity -> [[Entity]] -> [Entity] > mergealts _ [] = [] > mergealts parent [altpath] > | parent `elem` altpath = error ("Entity "++(show parent)++" has recursive altparent path") > | otherwise = altpath > mergealts parent (alt1:alt2:morealts) = > mergealts parent ((mergealtpair alt1 alt2):morealts) altscope if supplied, indicates a scope other than the current entity to search for children. Currently defined values are: "none" or None - search current entity only "all" - search current entity and all alternative parent entities, including their parents and alternatives. "select" - same as "all" - used for generating a list of options for a select/choice field. "user" - search current entity and site entity if it is on the alternatives list; skips intervening entities. Used to avoid inheriting user permissions with other configuration data. "site" - site-level only: used for listing collections; by default, collections are not included in enumerations of entities. (See EntityRoot. and Site._children method) "nosite" - collection-level only: used for listing entities from just collections. Used when cacheing data, where site data is assumed to be invariant, hence no need to re-load. """ altparents = self._local_find_alt_parents() # Class-specific local alternative discovery #@@ # log.info( # "@@ Entity._find_alt_parents: self %r, _altparents %r"% # (self.get_id(), [ p.get_id() for p in altparents if p ]) # ) #@@ altparent_lists = [] if altscope: for p in altparents: altp = [] if p: if p in parents_seen: msg = ( "Entity._find_alt_parents %r contains recursive altparent reference)"% (self.get_id(),) ) log.error(msg) raise ValueError(msg) elif ( (altscope == "all") or (altscope == "select") or ((altscope == "user") and (p.get_id() == layout.SITEDATA_ID)) or ((altscope == "nosite") and (p.get_id() != layout.SITEDATA_ID)) ): # (altscope == "user") and (self._altparent.get_id() == layout.SITEDATA_ID)): altp.append(p) altp.extend(p._find_alt_parents(altscope=altscope, parents_seen=parents_seen+[p])) altparent_lists.append(altp) parents = [] for alt_list in altparent_lists: if self in alt_list: # Is this test redundant?? Keeping it for safety. msg = ( "Entity._find_alt_parents %r generates recursive altparent reference)"% (self.get_id(),) ) log.error(msg) raise ValueError(msg) parents = self._merge_alt_parent_lists(parents, alt_list) return parents def _merge_alt_parent_lists(self, list1, list2): """ Merge a pair of allternative parent lists, preserving depth ordering and where possible placing entries from the first list ahead of entries from the second list. See spike/tree_scan/tree_scan.lhs for algorithm. > mergealtpair :: [Entity] -> [Entity] -> [Entity] > mergealtpair [] alt2 = alt2 > mergealtpair alt1 [] = alt1 > mergealtpair alt1@(h1:t1) alt2@(h2:t2) > | h1 == h2 = h1:mergealtpair t1 t2 -- req (b) (part) > | h1 `notElem` t2 = h1:mergealtpair t1 alt2 -- req (d) > | h2 `notElem` t1 = h2:mergealtpair alt1 t2 -- req (d) > | otherwise = error ("Cannot preserve depth ordering of "++(show h1)++" and "++(show h2)) """ if not list1: return list2 if not list2: return list1 if list1[0] == list2[0]: return [list1[0]] + self. _merge_alt_parent_lists(list1[1:], list2[1:]) if list1[0] not in list2: return [list1[0]] + self. _merge_alt_parent_lists(list1[1:], list2) if list2[0] not in list1: return [list2[0]] + self. _merge_alt_parent_lists(list1, list2[1:]) msg = ( "Entity._merge_alt_parent_lists: Cannot preserve depth ordering of %r and %r"% (list1[0].get_id(), list2[0].get_id()) ) log.error(msg) raise ValueError(msg) def get_alt_entities(self, altscope=None): """ Returns a list of alternative entities to the current entity to search for possible child entities. The supplied altscope parameter indicates the scope to be searched. Currently, only one alternative may be declared, but a list is returned that includes alternatives to the alternatives available, and to facilitate future developments supporting multiple inheritance paths. altscope if supplied, indicates a scope other than the current entity to search for children. See method `_find_alt_parents` for more details. """ if altscope is not None: if not is_string(altscope): log.error("altscope must be string (%r supplied)"%(altscope)) log.error("".join(traceback.format_stack())) raise ValueError("altscope must be string (%r supplied)"%(altscope)) # log.debug("Entity.get_alt_entities: %s/%s"%(self.get_type_id(), self.get_id())) alt_parents = [self] + self._find_alt_parents(altscope=altscope) # log.info( # "@@ Entity.get_alt_entities: %s/%s -> %r"% # (self.get_type_id(), self.get_id(), [ p.get_id() for p in alt_parents ]) # ) return alt_parents def try_alt_entities(self, func, test=test_is_true, altscope=None): """ Try applying the supplied function to the current entity and then any alternatives of the current entity, until a result is obtained that satisfies the supplied test. By default, looks for a result that evaluates as Boolan True If no satisfying value is found, returns the result from the last function executed (i.e. with the default test, returns None). The supplied function should operate on a single supplied entity, without attempting to evaluate alternatives: this function will enumerate the alternatives and make additional calls as needed. """ # log.debug("Entity.try_alt_entities: %s/%s"%(self.get_type_id(), self.get_id())) v = func(self) if test(v): return v alt_parents = self._parent.get_alt_entities(altscope=altscope) for altparent in alt_parents: # log.debug("Entity.try_alt_entities: alt %s/%s"%(altparent.get_type_id(), altparent.get_id())) v = func(altparent) if test(v): return v return v @classmethod def try_alt_parentage(cls, parent, entityid, func, test=test_is_true, altscope=None): """ Try applying the supplied function to an entity descended from the supplied parent, then any alternative parents to that parent, until a result is obtained that satisfies the supplied test. By default, looks for a result that evaluates as Boolan True Returns a pair consising of the satisfied entity and the corresponding value. If no satisfying value is found, returns the result for the last entity tried executed; i.e., with the default test, returns (None,None). The supplied function should operate on a single supplied entity, without attempting to evaluate alternatives: this function will enumerate the alternatives and make additional calls as needed. """ e = cls._child_init(parent, entityid) uv = e._entityviewurl v = func(e) if test(v): return (e, v) alt_parents = parent.get_alt_entities(altscope=altscope) for altparent in alt_parents: e = cls._child_init(altparent, entityid, entityviewurl=uv) v = func(e) if test(v): # log.info("@@ try_alt_parentage ancestorid %s, entityid %s"%(altparent._ancestorid, entityid)) return (e, v) # Failed: log details #@@ # log.debug( # "Entity.try_alt_parentage: no entity found for %s/%s with parent %s, scope %s"% # (cls._entitytypeid, entityid, parent.get_id(), altscope) # ) # for ap in alt_parents: # log.debug(" -- alt parent tried: %r"%(ap.get_id(),)) #@@ return (None, v) # Class helper methods @classmethod def allocate_new_id(cls, parent, base_id=None): """ Allocate and return an as-yet-unused entity id for a member of the indicated entity class as an offsprintof the indicated parent. If "base_id" is specified, it is used as part of the new Id allocated (used when copying an entity). """ if base_id and util.valid_id(base_id): last_id = 0 name_format = base_id+"_%02d" else: last_id = cls._last_id or 0 name_format = "%08d" while True: last_id += 1 new_id = name_format%last_id if not cls.exists(parent, new_id): break if not base_id: cls._last_id = last_id return new_id @classmethod def relpath(cls, entityid): """ Returns parent-relative path string for an identified entity of the given class. cls is the class of the entity whose relative path is returned. entityid is the local identifier (slug) for the entity. """ # log.debug("Entity.relpath: entitytype %s, entityid %s"%(cls._entitytype, entityid)) relpath = (cls._entityroot or "%(id)s")%{'id': entityid, 'type_id': cls._entitytypeid} # log.debug("Entity.relpath: %s"%(relpath)) return relpath @classmethod def path(cls, parent, entityid): """ Returns path string for accessing the body of the indicated entity. cls is the class of the entity whose path is returned. parent is the parent from which the entity is descended. entityid is the local identifier (slug) for the entity. """ # log.debug("Entity.path: entitytype %s, parentdir %s, entityid %s"% # (cls._entitytype, parent._entitydir, entityid) # ) assert cls._entityfile is not None p = util.entity_path( parent._entitydir, [cls.relpath(entityid), cls._entitybase], cls._entityfile ) log.debug("Entity.path: %s"%(p)) return p @classmethod def meta_resource_name(cls, name_ext=".jsonld"): """ Returns a metadata resource (file) name. By default, returns the name for JSON-LD data, but if an alternative name extension is profided returns a name with that extension instead. """ #@@TODO: type selection should use type identifier rather than extension string? resource_name = cls._entityfile assert resource_name.endswith(".jsonld") if resource_name.endswith(".jsonld") and name_ext != ".jsonld": resource_name = resource_name[0:-7]+name_ext return resource_name # I/O helper functions (copied from or overriding EntityRoot) def _children(self, cls, altscope=None): """ Iterates over candidate child identifiers that are possible instances of an indicated class. The supplied class is used to determine a subdirectory to be scanned. cls is a subclass of Entity indicating the type of children to iterate over. altscope if supplied, indicates a scope other than the current entity to search for children. @@NOTE: The logic in this method is intended to return inherited values before values defined in the current collection. It should probably be re-worked to return entries in order from all inherited definitions. The logic here could possibly be simplified to extract all values in the "alt parents" loop, (though the ordering might be tricky to preserve that way). """ # log.info("@@ Entity._children: parent %s, altscope %s"%(self.get_id(), altscope)) coll_entity_ids = list(super(Entity, self)._children(cls, altscope=None)) alt_parents = self.get_alt_entities(altscope=altscope) # parent_entity_ids = list(itertools.chain.from_iterable( # ( super(Entity, alt)._children(cls, altscope=altscope) # for alt in alt_parents # self.get_alt_entities(altscope=altscope) # ))) # See https://docs.python.org/2/library/itertools.html#itertools.chain parent_entity_ids = [] for alt in alt_parents: for eid in super(Entity, alt)._children(cls, altscope=altscope): # Filter out duplicates if eid not in parent_entity_ids: parent_entity_ids.append(eid) # log.info("@@ Entity._children: coll_entity_ids %r, parent_entity_ids %r"%(coll_entity_ids, parent_entity_ids)) # if altscope == "all" and self._altparent: # parent_entity_ids = self._altparent._children(cls, altscope=altscope) for entity_id in [f for f in parent_entity_ids if f not in coll_entity_ids] + coll_entity_ids: if util.valid_id(entity_id, reserved_ok=True): yield entity_id return def resource_file(self, resource_ref): """ Returns a file object value for a resource associated with the current entity, or with a corresponding entity with the same id descended from an alternative parent, or None if the resource is not present. """ file_obj = self.try_alt_entities( lambda e: super(Entity,e).resource_file(resource_ref), altscope="all" ) return file_obj # Create and access functions def child_entities(self, cls, altscope=None): """ Iterates over child entities of an indicated class. The supplied class is used to determine a subdirectory to be scanned, and to instantiate and load data for the entities found. cls is a subclass of Entity indicating the type of children to iterate over. altscope if supplied, indicates a scope other than the current entity to search for children. See `_find_alt_parents` for more details. """ for i in self._children(cls, altscope=altscope): e = cls.load(self, i, altscope=altscope) if e: yield e return @classmethod def _child_init(cls, parent, entityid, entityviewurl=None): """ Instantiate a child entity (e.g. for create and load methods) of a specified parent entity. parent is the parent entity for which a child is instantiated. entityid is the entity id of the child to be instantiated. entityviewurl if supplied, indicates an alternative URL to be used as the view URL for the initialized entitty. """ # log.info(" __ Entity._child_init: "+entityid) e = cls(parent, entityid) if entityviewurl is not None: e._entityviewurl = entityviewurl return e @classmethod def create(cls, parent, entityid, entitybody): """ Method creates a new entity or rewrites an existing entity. cls is a class value used to construct the new entity value parent is the parent entity from which the new entity is descended. entityid is the local identifier (slug) for the new entity - this is required to be unique among descendents of a common parent. entitybody is a dictionary of values that are stored for the created entity. Returns the created entity as an instance of the supplied class object. """ log.debug("Entity.create: entityid %s, parentid %s"%(entityid, parent.get_id())) e = cls._child_init(parent, entityid) e.set_values(entitybody) e._save() return e @classmethod def remove(cls, parent, entityid): """ Method removes an entity, deleting its details, data and descendents from Annalist storage. cls is the class of the entity to be removed parent is the parent from which the entity is descended. entityid is the local identifier (slug) for the entity. Returns None on success, or a status value indicating a reason for value. """ log.debug("Entity.remove: id %s"%(entityid)) e = cls.load(parent, entityid) if e: if "@error" in e: return Annalist_Error( message.ENTITY_LOAD_ERROR%( { 'id': entityid , 'file': e["@error"] , 'message': e["@message"] }) ) e._remove(cls._entitytype) else: return Annalist_Error("Entity %s not found"%(entityid)) return None @classmethod def load(cls, parent, entityid, altscope=None): """ Return an entity with given identifier belonging to some given parent, or None if there is not such identity. cls is the class of the entity to be loaded parent is the parent from which the entity is descended. entityid is the local identifier (slug) for the entity. altscope if supplied, indicates a scope other than the current entity to search for children. See `_find_alt_parents` for more details. Returns an instance of the indicated class with data loaded from the corresponding Annalist storage, or None if there is no such entity. """ # log.debug("Entity.load: entity %s/%s, altscope %s"% # (cls._entitytype, entityid, altscope) # ) entity = None if util.valid_id(entityid, reserved_ok=True): (e, v) = cls.try_alt_parentage( parent, entityid, (lambda e: e._load_values()), altscope=altscope ) # log.info(" __ Entity.load: _load_values "+repr(v)) # log.info("entity.load %r"%(v,)) if v: v = e._migrate_values(v) e.set_values(v) entity = e else: log.debug("Entity.load: invalid id %s"%entityid) # log.warning("@@Entity.load ub %r"%(entity._entityurl)) # log.warning("@@Entity.load uv %r"%(entity._entityviewurl)) # log.warning("@@Entity.load e %r"%(entity)) # log.warning("@@Entity.load v %r"%(v)) return entity @classmethod def exists(cls, parent, entityid, altscope=None): """ Method tests for existence of identified entity descended from given parent. cls is the class of the entity to be tested parent is the parent from which the entity is descended. entityid is the local identifier (slug) for the entity. altscope if supplied, indicates a scope other than the current entity to search for children. See `_find_alt_parents` for more details. Returns True if the entity exists, as determined by existence of the entity description metadata file. """ # log.debug("Entity.exists: entitytype %s, parentdir %s, entityid %s"% # (cls._entitytype, parent._entitydir, entityid) # ) (e, v) = cls.try_alt_parentage( parent, entityid, (lambda e: e._exists()), altscope=altscope ) return v @classmethod def fileobj(cls, parent, entityid, filename, filetypeuri, mimetype, mode, altscope=None): """ Method returns a file object value (like `open`) for accessing an imported resource associated with an entity (e.g. image, binary blob, etc.) cls is the class of the entity to be tested parent is the parent from which the entity is descended. entityid is the local identifier (slug) for the entity. filename is the local name for the file object to ne created or accessed. filetypeuri is a URI or CURIE indicating the type of resource for which a file object is created. This is used to determine details such as file extension used when creating a new file. mimetype is a MIME content-type string for the resource representation used. mode indicates how the resource is to be opened, with the same options that are used with the standard `open` method (as far as they are applicable). E.g. "wb" to create a new resource, and "r" to read an existing one. altscope if supplied, indicates a scope other than the current entity to search for children. See `_find_alt_parents` for more details. Returns a file object value, or None. """ log.debug("Entity.fileobj: entitytype %s, parentdir %s, entityid %s"% (cls._entitytype, parent._entitydir, entityid) ) if altscope is not None: if not is_string(altscope): log.error("altscope must be string (%r supplied)"%(altscope)) log.error("".join(traceback.format_stack())) raise ValueError("altscope must be string (%r supplied)"%(altscope)) (e, v) = cls.try_alt_parentage( parent, entityid, (lambda e: e._fileobj(filename, filetypeuri, mimetype, mode)), altscope=altscope ) return v # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/entity.py
entity.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function """ This module is used to cache information about relations for which transitive closure computations are frequently required (e.g. rdfs:superClassOf, rdfs:superpropertyOf, etc.). It is intended to concentrate transitive closure computations (and optimizations) that would otherwise be scattered across the codebase. """ __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2018, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) from annalist.exceptions import Annalist_Error from annalist.models.objectcache import get_cache, remove_cache # --------------------------------------------------------------------------- # # Local helper functions # # --------------------------------------------------------------------------- def make_cache_key(cache_type, coll_id, rel_id): return (cache_type, coll_id, rel_id) # --------------------------------------------------------------------------- # # Local helper functions to manage direct relations between values # # --------------------------------------------------------------------------- def add_direct_rel(rel, v1, v2): """ Local helper to add a direct relation """ if v1 not in rel: rel[v1] = set() rel[v1].add(v2) return def remove_direct_rel(rel, v1, v2): """ Local helper to remove a direct relation """ rel[v1].remove(v2) if rel[v1] == set(): del rel[v1] # Maintains consistency of get_values() method return def get_closure(rel, v): """ Return transitive closure of values v1 such that "v rel v1", given a dictionary of direct relations. Termination depends on directed relation; i.e. v1 in rel(v) => v not in get_closure(rel, v1) Thus the get_closure recursive call can never include v, as all values (v1) for which get_closure is called recursively are values that have been added to the closure result (cv). """ if v not in rel: return set() v1s = rel[v] cv = v1s.union(*[get_closure(rel, v1) for v1 in v1s]) return cv # --------------------------------------------------------------------------- # # Error class # # --------------------------------------------------------------------------- class Closure_Error(Annalist_Error): """ Class for errors raised by closure calculations. """ def __init__(self, value=None, msg="Closure_error"): super(Closure_Error, self).__init__(value, msg) return # --------------------------------------------------------------------------- # # Closure cache class # # --------------------------------------------------------------------------- class ClosureCache(object): """ This class saves information used to calculate transitive closures of a relation in a specified collection. In the following descriptions, the relation is taken to be defined over some set values vals from a domain Val. Core methods: add_rel :: Val, Val -> Bool | error remove_val :: Val -> Bool fwd_closure :: Val -> Val* rev_closure :: Val -> Val* Invariants: FORALL v, v1, v2 in vals: D (directed relation graph): D1: v not in fwd_closure(v) D2: v not in rev_closure(v) D3: v1 not in fwd_closure(v) or v1 not in rev_closure(v) S (symmetry): v2 in fwd_closure(v1) <=> v1 in rev_closure(v2) T (transitivity): v2 in fwd_closure(v1) and v3 in fwd_closure(v2) => v3 in fwd_closure(v1) """ def __init__(self, coll_id, rel_uri): """ Initialize. coll_id Id of collection with which relation closure is scoped rel URI of relation over which closure is calculated The parameters are provided for information about the scope of the closure, and are used to access saved cache values, but do not of themselves affect the actual closure calculations. The set of values over which the relation is defined is represented by dictionaries of direct forward and reverse mappings from members of the set. Initializes these to empty dictionaries, corresponding to an empty set of values. The invariants are all trivially true for an emty value set. """ super(ClosureCache, self).__init__() self._coll_id = coll_id self._rel_uri = rel_uri self._key = make_cache_key("ClosureCache", coll_id, rel_uri) self._cache = get_cache(self._key) self._cache.set("fwd", {}) self._cache.set("rev", {}) return def remove_cache(self): self._cache = None remove_cache(self._key) return def get_collection_id(self): return self._coll_id def get_relation_uri(self): return self._rel_uri def add_rel(self, v1, v2): """ Add a forward relation between v1 and v2. Returns True if a new relation is added, False if the relation is already defined or raises an error and leaves the ClosureCache unchanged if the new relation would violate one of the invariants. """ with self._cache.access("fwd", "rev") as rel: if v1 == v2: # Preserve invariant D1 (hence D2 by symmetry) msg = "Attempt to define relation with itself" raise Closure_Error(value=v1, msg=msg) if v2 in get_closure(rel["rev"], v1): # Preserve invariant D3 msg="Attempt to define forward relation for which closure contains reverse relation" raise Closure_Error(value=(v1,v2), msg=msg) if (v1 in rel["fwd"]) and (v2 in rel["fwd"][v1]): # Already defined - no change hence invariants preserved return False # Add new relation: these assignments occur together, so symmetry of direct relations is preserved add_direct_rel(rel["fwd"], v1, v2) add_direct_rel(rel["rev"], v2, v1) return True def remove_val(self, v): """ Remove value from set over which relation is defined. Operates by removing all direct relations that mention the value. """ updated = False with self._cache.access("fwd", "rev") as rel: if v in rel["fwd"]: for v2 in rel["fwd"][v]: # Remove reverse relations referencing this value (which must exist by symmetry) remove_direct_rel(rel["rev"], v2, v) # Restore symmetry del rel["fwd"][v] updated = True if v in rel["rev"]: for v1 in rel["rev"][v]: # Remove forward relations referencing this value (which must exist by symmetry) remove_direct_rel(rel["fwd"], v1, v) # Restore symmetry del rel["rev"][v] updated = True return updated def fwd_closure(self, v): """ Return transitive closure of values v1 for which "v rel v1" """ with self._cache.access("fwd") as rel: return get_closure(rel["fwd"], v) def rev_closure(self, v): with self._cache.access("rev") as rel: return get_closure(rel["rev"], v) def get_values(self): """ Returns the set of values over which the closure is currently defined. Also performs some consistency checks. (Defined mainly for testing.) """ with self._cache.access("fwd", "rev") as rel: vf1s = frozenset(rel["fwd"].keys()) # Forward relation first values (v1: v1 rel v2) vr1s = frozenset(rel["rev"].keys()) # Reverse relation first values (v2: v1 rel v2) vf2s = frozenset().union(*[ rel["fwd"][vf1] for vf1 in vf1s ]) vr2s = frozenset().union(*[ rel["rev"][vr1] for vr1 in vr1s ]) assert vf1s == vr2s assert vr1s == vf2s return vf1s | vr1s # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/closurecache.py
closurecache.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) import os import os.path import shutil from django.conf import settings from annalist import layout from annalist.identifiers import ANNAL, RDFS from annalist.models.entity import Entity from annalist.models.entitydata import EntityData from annalist.models.recordgroup import RecordGroup, RecordGroup_migration from annalist.util import ( split_type_entity_id, extract_entity_id, make_type_entity_id ) from annalist.exceptions import Annalist_Error class RecordField(EntityData): _entitytype = ANNAL.CURIE.Field _entitytypeid = layout.FIELD_TYPEID _entityroot = layout.COLL_FIELD_PATH _entityview = layout.COLL_FIELD_VIEW _entityfile = layout.FIELD_META_FILE def __init__(self, parent, field_id): """ Initialize a new RecordField object, without metadta (yet). parent is the parent collection to which the field belongs. field_id the local identifier for the record field """ # assert altparent, "RecordField instantiated with no altparent" super(RecordField, self).__init__(parent, field_id) self._parent = parent # log.debug("RecordField %s"%(field_id)) return def get_property_uri(self): """ Return field's property URI """ return self.get(ANNAL.CURIE.property_uri, "@@undefined_property_uri@@") def _migrate_filenames(self): """ Override EntityData method """ return None def _map_entity_field_enum_val(self, entitydata, key, type_id, old_enum_val, new_enum_val): """ Map enumerated value of specified type """ if key in entitydata: type_id_here, enum_val_here = split_type_entity_id(entitydata[key]) if type_id_here == type_id and enum_val_here == old_enum_val: entitydata[key] = make_type_entity_id(type_id, new_enum_val) return entitydata def _migrate_values(self, entitydata): """ Field description entity format migration method. The specification for this method is that it returns an entitydata value which is a copy of the supplied entitydata with format migrations applied. NOTE: implementations are free to apply migrations in-place. The resulting entitydata should be exactly as the supplied data *should* appear in storage to conform to the current format of the data. The migration function should be idempotent; i.e. x._migrate_values(x._migrate_values(e)) == x._migrate_values(e) """ field_id = entitydata[ANNAL.CURIE.id] migration_map = ( [ (ANNAL.CURIE.options_typeref, ANNAL.CURIE.field_ref_type ) , (ANNAL.CURIE.restrict_values, ANNAL.CURIE.field_ref_restriction) , (ANNAL.CURIE.target_field, ANNAL.CURIE.field_ref_field ) , (ANNAL.CURIE.field_target_type, ANNAL.CURIE.field_value_type ) ]) entitydata = self._migrate_values_map_field_names(migration_map, entitydata) # Fix up enumerated values to use new enumeration type names field_enum_types = ( [ (ANNAL.CURIE.field_render_type, "_enum_render_type") , (ANNAL.CURIE.field_value_mode, "_enum_value_mode") ]) for fkey, ftype in field_enum_types: if fkey in entitydata and entitydata[fkey]: entitydata[fkey] = make_type_entity_id( ftype, extract_entity_id(entitydata[fkey]) ) # If comment and no tooltip, create tooltip and update comment if (RDFS.CURIE.comment in entitydata) and (ANNAL.CURIE.tooltip not in entitydata): label = entitydata.get(RDFS.CURIE.label, "Field '%s'"%field_id) comment = entitydata[RDFS.CURIE.comment] entitydata[ANNAL.CURIE.tooltip] = comment entitydata[RDFS.CURIE.comment] = "# %s\r\n\r\n%s"%(label, comment) # If reference to field group, copy group field list inline if ANNAL.CURIE.group_ref in entitydata: group_type_id, group_id = split_type_entity_id( entitydata[ANNAL.CURIE.group_ref], default_type_id=layout.GROUP_TYPEID ) if group_id != "": log.info("Migrating group reference %s in field %s"%(group_id, field_id)) group_obj = RecordGroup_migration.load(self._parent, group_id) if not group_obj: msg = ( "Failed to load group '%s' for field '%s' in collection '%s'"% (group_id, field_id, self._parent.get_id()) ) log.warning(msg) self.set_error(msg) # raise Annalist_Error(msg) else: field_value_type = entitydata[ANNAL.CURIE.field_value_type] group_entity_type = group_obj[ANNAL.CURIE.group_entity_type] if field_value_type and group_entity_type and field_value_type != group_entity_type: log.warning( "Group %s entity type %s differs from field %s value type %s"% (group_id, group_entity_type, field_id, field_value_type) ) entitydata[ANNAL.CURIE.field_fields] = group_obj[ANNAL.CURIE.group_fields] del entitydata[ANNAL.CURIE.group_ref] # Default render type to "Text" if ANNAL.CURIE.field_render_type not in entitydata: entitydata[ANNAL.CURIE.field_render_type] = "_enum_render_type/Text" # Migrate changed render type names entitydata = self._map_entity_field_enum_val( entitydata, ANNAL.CURIE.field_render_type, "_enum_render_type", "RepeatGroup", "Group_Seq" ) entitydata = self._map_entity_field_enum_val( entitydata, ANNAL.CURIE.field_render_type, "_enum_render_type", "RepeatGroupRow", "Group_Seq_Row" ) entitydata = self._map_entity_field_enum_val( entitydata, ANNAL.CURIE.field_render_type, "_enum_render_type", "Slug", "EntityRef" ) # Calculate mode from other fields if not defined val_render = entitydata[ANNAL.CURIE.field_render_type] ref_type = entitydata.get(ANNAL.CURIE.field_ref_type, None) ref_field = entitydata.get(ANNAL.CURIE.field_ref_field, None) if ANNAL.CURIE.field_value_mode in entitydata: val_mode = entitydata[ANNAL.CURIE.field_value_mode] else: val_mode = "Value_direct" if val_render == "RefMultifield": val_mode = "Value_entity" elif val_render == "URIImport": val_mode = "Value_import" elif val_render == "FileUpload": val_mode = "Value_upload" entitydata[ANNAL.CURIE.field_value_mode] = val_mode # Consistency checks if ref_field: # 0.5.17: Tried log.warning, but found that some test cases still use # entity field reference. So downrading to DEBUG report for now. # @@TODO: review this log.debug( "RecordField %s: value given for deprecated property %s"% (field_id, ANNAL.CURIE.field_ref_field) ) if val_mode == "Value_field": log.warning( "RecordField %s: value 'Value_field' for property %s is deprecated"% (field_id, ANNAL.CURIE.field_value_mode) ) elif val_mode == "Value_entity": if not ref_type: log.warning( "RecordField %s: val_mode 'Value_entity' requires value for %s"% (field_id, ANNAL.CURIE.field_ref_type) ) if ref_field: log.warning( "RecordField %s: val_mode 'Value_entity' should not define value for %s"% (field_id, ANNAL.CURIE.field_ref_field) ) # Return result return entitydata def _pre_save_processing(self, entitydata): """ Pre-save value processing. This method is called just before a value is saved to fill in or update any values that were not specified in the form input. The specification for this method is that it returns an entitydata value which is a copy of the supplied entitydata with any data updates applied. NOTE: implementations are free to apply updates in-place. The resulting entitydata should be exactly as the supplied data *should* appear in storage. The update function should be idempotent; i.e. x._pre_save_processing(x._pre_save_processing(e)) == x._pre_save_processing(e) """ if not entitydata.get(ANNAL.CURIE.property_uri, None): entitydata[ANNAL.CURIE.property_uri] = entitydata[ANNAL.CURIE.id] return entitydata def _post_update_processing(self, entitydata, post_update_flags): """ Post-update processing. This method is called when a RecordField entity has been created or updated. It invokes the containing collection method to regenerate the JSON LD context for the collection to which the field belongs. """ self._parent.cache_add_field(self) self._parent.generate_coll_jsonld_context(flags=post_update_flags) return entitydata def _post_remove_processing(self, post_update_flags): """ Post-remove processing. This method is called when a RecordField entity has been removed. """ self._parent.cache_remove_field(self.get_id()) return # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/recordfield.py
recordfield.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function """ This module provides an object cacheing framework for arbitrary Python values. The intent is that all cacghe logic can be isolated, and may be re-implemented using a network cache faclity such as MemCache or Redis. The present implementation assumes a single-process, multi-threaded environment and interlocks cache accesses to avoid possible cache-related race conditions. """ __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2019, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) import sys import traceback import threading import contextlib from annalist.exceptions import Annalist_Error # =================================================================== # # Error class # # =================================================================== class Cache_Error(Annalist_Error): """ Class for errors raised by cache methods. """ def __init__(self, value=None, msg="Cache_error (objectcache)"): super(Cache_Error, self).__init__(value, msg) return # =================================================================== # # Cache creation and discovery # # =================================================================== globalcachelock = threading.Lock() # Used to interlock creation/deletion of caches objectcache_dict = {} # Initial empty set of object caches objectcache_tb = {} def get_cache(cachekey): """ This function locates or creates an object cache. cachekey is a hashable value that uniquely identifies the required cache (e.g. a string or URI). Returns the requested cache object, which may be created on-the-fly. """ with globalcachelock: if cachekey not in objectcache_dict: objectcache_dict[cachekey] = ObjectCache(cachekey) objectcache = objectcache_dict[cachekey] # Copy value while lock acquired return objectcache def remove_cache(cachekey): """ This function removes a cache from the set of object caches cachekey is a hashable value that uniquely identifies the required cache (e.g. a string or URI). """ # log.debug("objectcache.remove_cache %r"%(cachekey,)) objectcache = None with globalcachelock: if cachekey in objectcache_dict: objectcache = objectcache_dict[cachekey] del objectcache_dict[cachekey] # Defer operations that acquire the cache local lock until # the global lock is released if objectcache: objectcache.close() return # =================================================================== # # Object cache class # # =================================================================== class ObjectCache(object): """ A class for caching objects of some type. The cache is identified by is cache key value that is used to distinguish a particular object cache from all others (see also `getCache`) """ def __init__(self, cachekey): # log.debug("ObjectCache.__init__: cachekey %r"%(cachekey,)) self._cachekey = cachekey self._cachelock = threading.Lock() # Allocate a lock object for this cache self._cache = {} # Initial empty set of values self._opened = traceback.extract_stack() self._closed = None return def cache_key(self): """ Return cache key (e.g. for use with 'remove_cache') """ return self._cachekey def flush(self): """ Remove all objects from cache. """ # log.debug("ObjectCache.flush: cachekey %r"%(self._cachekey,)) with self._cachelock: for key in list(self._cache.keys()): del self._cache[key] return self def close(self): """ Close down this cache object. Once closed, it cannot be used again. """ # log.debug("ObjectCache.close: cachekey %r"%(self._cachekey,)) self.flush() self._cachelock = None # Discard lock object self._closed = traceback.extract_stack() return def set(self, key, value): """ Save object value in cache (overwriting any existing value for the key). key is a hashable value that uniquely identifies the required cache (e.g. a string or URI). value is a (new) value that is to be associated with the key. """ with self._cachelock: self._cache[key] = value return value def get(self, key, default=None): """ Retrieve object value from cache, or return default value """ if self._cachelock is None: msg = "Access after cache closed (%r, %s)"%(self._cachekey, key) log.error(msg) log.debug("---- closed at:") log.debug("".join(traceback.format_list(self._closed))) log.debug("----") raise Exception(msg) # print("@@@@ self._cachelock %r, self._cachekey %r"%(self._cachelock, self._cachekey)) with self._cachelock: value = self._cache.get(key, default) return value def pop(self, key, default=None): """ Remove object value from cache, return that or default value """ with self._cachelock: value = self._cache.pop(key, default) return value @contextlib.contextmanager def access(self, *keys): """ A context manager for interlocked access to a cached value. The value bound by the context manager (for a 'with ... as' assignment) is a dictionary that has entries for each of the values in the supplied key values for which there is a previously cached value. On exit from the context manager, if the value under any of the given keys has been changed, or if any new entries have been added, they are used to update the cached values before the interlock is released. Use like this: with cacheobject.access("key1", "key2", ...) as value: # value is dict of cached values for given keys # interlocked processing code here # updates to value are written back to cache on leavinbg context See: https://docs.python.org/2/library/contextlib.html """ with self._cachelock: value_dict = {} for key in keys: if key in self._cache: value_dict[key] = self._cache[key] yield value_dict for key in value_dict: self._cache[key] = value_dict[key] # If needed: this logic removes keys deleted by yield code... # for key in keys: # if key not in value_dict: # del self._cache[key] return # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/objectcache.py
objectcache.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function """ This module contains (and isolates) logic used to find entities based on entity type, list selection criteria and search terms. """ __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) import re from pyparsing import Word, QuotedString, Literal, Group, Empty, StringEnd, ParseException from pyparsing import alphas, alphanums from utils.py3porting import is_string, to_unicode from annalist import layout from annalist.util import valid_id, extract_entity_id from annalist.models.recordtype import RecordType from annalist.models.recordtypedata import RecordTypeData from annalist.models.entitytypeinfo import EntityTypeInfo # ------------------------------------------------------------------- # Auxilliary functions # ------------------------------------------------------------------- def order_entity_key(entity): """ Function returns sort key for ordering entities by type and entity id Use with `sorted`, thus: sorted(entities, order_entity_key) """ type_id = entity.get_type_id() entity_id = entity.get_id() key = ( 0 if type_id.startswith('_') else 1, type_id, 0 if entity_id.startswith('_') else 1, entity_id ) return key # ------------------------------------------------------------------- # EntityFinder # ------------------------------------------------------------------- class EntityFinder(object): """ Logic for enumerating entities matching a supplied type, selector and/or search string. """ def __init__(self, coll, selector=None): """ Initialize entity finder for collection and selector. """ super(EntityFinder, self).__init__() self._coll = coll self._site = coll.get_site() self._selector = EntitySelector(selector, FieldComparison(coll)) # self._subtypes = None return def get_collection_type_ids(self, altscope): """ Returns iterator over possible type ids in current collection. Each type is returned as a candidate type identifier string """ return self._coll.cache_get_all_type_ids(altscope=altscope) def get_collection_subtype_ids(self, supertype_id, altscope): """ Returns a iterator of type ids for all subtypes of the supplied type accessible in the indicated scope from the current collection, including the identified type itself. """ if not valid_id(supertype_id): log.warning("EntityFinder.get_collection_subtype_ids: invalid type_id %s"%(supertype_id,)) return supertype_info = EntityTypeInfo(self._coll, supertype_id) supertype_uri = supertype_info.get_type_uri() if supertype_uri is not None: for try_subtype_id in self.get_collection_type_ids(altscope): try_subtype = self._coll.cache_get_type(try_subtype_id) if try_subtype: try_subtype_uri = try_subtype.get_uri() if ( ( supertype_uri == try_subtype_uri ) or ( supertype_uri in self._coll.cache_get_supertype_uris(try_subtype_uri) ) ): yield try_subtype_id else: log.warning("EntityFinder.get_collection_subtype_ids: no type_uri for %s"%(supertype_id,)) def get_type_entities(self, type_id, user_permissions, altscope): """ Iterate over entities from collection matching the supplied type. 'altscope' is used to determine the extent of data to be included in the listing: a value of 'all' means that site-wide entyities are icnluded in the listing. Otherwise only collection entities are included. """ #@@ # log.info("get_type_entities: type_id %s, user_permissions %r"%(type_id,user_permissions)) #@@ entitytypeinfo = EntityTypeInfo(self._coll, type_id) for e in entitytypeinfo.enum_entities_with_implied_values( user_permissions, altscope=altscope ): if e.get_id() != layout.INITIAL_VALUES_ID: #@@ # log.info(" yield: %s"%(e.get_id(),)) #@@ yield e return def get_subtype_entities(self, type_id, user_permissions, altscope): """ Iterate over entities from collection that are of the indicated type or any of its subtypes. 'altscope' is used to determine the extent of data to be included in the listing: a value of 'all' means that site-wide entities are included in the listing. Otherwise only collection entities are included. """ for subtype_id in self.get_collection_subtype_ids(type_id, "all"): subtype_info = EntityTypeInfo(self._coll, subtype_id) es = subtype_info.enum_entities_with_implied_values( user_permissions, altscope=altscope ) #@@ # es = list(es) #@@ Force strict eval # log.info("get_subtype_entities: %r"%([e.get_id() for e in es],)) #@@ for e in es: if e.get_id() != layout.INITIAL_VALUES_ID: yield e return def get_all_types_entities(self, types, user_permissions, altscope): """ Iterate over all entities of all types from a supplied type iterator """ #@@ # log.info("@@@@ get_all_types_entities") #@@ for t in types: for e in self.get_type_entities(t, user_permissions, altscope): #@@ # log.info("get_all_types_entities: type %s/%s"%(t,e.get_id())) #@@ yield e return def get_base_entities(self, type_id=None, user_permissions=None, altscope=None): """ Iterate over base entities from collection, matching the supplied type id if supplied. If a type_id is supplied, site data values are included. """ entities = None if type_id: entities = self.get_subtype_entities(type_id, user_permissions, altscope) # return self.get_type_entities(type_id, user_permissions, scope) else: entities = self.get_all_types_entities( self.get_collection_type_ids(altscope="all"), user_permissions, altscope ) #@@ # entities = list(entities) #@@ Force strict eval # log.info("get_base_entities: %r"%([(e.get_type_id(), e.get_id()) for e in entities],)) #@@ return entities def search_entities(self, entities, search): """ Iterate over entities from supplied iterator containing supplied search term. """ for e in entities: if self.entity_contains(e, search): yield e return def get_entities(self, user_permissions=None, type_id=None, altscope=None, context=None, search=None ): """ Iterates over entities of the specified type, matching search term and visible to supplied user permissions. """ entities = self._selector.filter( self.get_base_entities(type_id, user_permissions, altscope), context=context ) if search: entities = self.search_entities(entities, search) return entities def get_entities_sorted(self, user_permissions=None, type_id=None, altscope=None, context={}, search=None ): """ Get sorted list of entities of the specified type, matching search term and visible to supplied user permissions. """ entities = self.get_entities( user_permissions, type_id=type_id, altscope=altscope, context=context, search=search ) #@@ # entities = list(entities) #@@ Force strict eval # log.info("get_entities_sorted: %r"%([e.get_id() for e in entities],)) #@@ return sorted(entities, key=order_entity_key) @classmethod def entity_contains(cls, e, search): """ Returns True if entity contains/matches search term, else False. Search term None (or blank) matches all entities. >>> e1 = { 'p:a': '1', 'p:b': '2', 'p:c': '3', 'annal:property_uri': 'annal:member' } >>> EntityFinder.entity_contains(e1, "1") True >>> EntityFinder.entity_contains(e1, "3") True >>> EntityFinder.entity_contains(e1, "nothere") False >>> EntityFinder.entity_contains(e1, "annal:member") True >>> e2 = { 'list': ['l1', 'l2', 'l3'] \ , 'dict': {'p:a': 'd1', 'p:b': 'd2', 'p:c': 'd3'} \ } >>> EntityFinder.entity_contains(e2, "l1") True >>> EntityFinder.entity_contains(e2, "d3") True >>> EntityFinder.entity_contains(e2, "nothere") False """ if search: # Entity is not a dict, so scan entity keys for search for key in e: val = e[key] if cls.value_contains(val, search): return True return False return True @classmethod def value_contains(cls, val, search): """ Helper function tests for search term in dictionary, list or string values. Other values are not searched. """ if isinstance(val, dict): for k in val: if cls.value_contains(val[k], search): return True elif isinstance(val, list): for e in val: if cls.value_contains(e, search): return True elif is_string(val): return search in val return False # ------------------------------------------------------------------- # EntitySelector # ------------------------------------------------------------------- class EntitySelector(object): """ This class implements a selector filter. It is initialized with a selector expression, and may be invoked as a filter applied to an entity generator, or as a predicate applied to a single entity. >>> e = { 'p:a': '1', 'p:b': '2', 'p:c': '3', '@type': ["http://example.com/type", "foo:bar"] } >>> c = { 'view': { 'v:a': '1', 'v:b': ['2', '3'] } } >>> f1 = "'1' == [p:a]" >>> f2 = "[p:a]=='2'" >>> f3 = "" >>> f4 = "'http://example.com/type' in [@type]" >>> f5 = "'foo:bar' in [@type]" >>> f6 = "'bar:foo' in [@type]" >>> f7 = "[p:a] in view[v:a]" >>> f8 = "[p:b] in view[v:b]" >>> f9 = "[p:a] in view[v:b]" >>> f10 = "[annal:field_entity_type] in view[annal:view_entity_type]" >>> f11 = "foo:bar in [@type]" >>> f12 = "bar:foo in [@type]" >>> EntitySelector(f1).select_entity(e, c) True >>> EntitySelector(f2).select_entity(e, c) False >>> EntitySelector(f3).select_entity(e, c) True >>> EntitySelector(f4).select_entity(e, c) True >>> EntitySelector(f5).select_entity(e, c) True >>> EntitySelector(f6).select_entity(e, c) False >>> EntitySelector(f7).select_entity(e, c) True >>> EntitySelector(f8).select_entity(e, c) True >>> EntitySelector(f9).select_entity(e, c) False >>> EntitySelector(f10).select_entity(e, c) True >>> EntitySelector(f11).select_entity(e, c) True >>> EntitySelector(f12).select_entity(e, c) False """ def __init__(self, selector, fieldcomp=None): self._fieldcomp = fieldcomp # Returns None if no filter is applied, otherwise a predcicate function self._selector = self.compile_selector_filter(selector) return def filter(self, entities, context=None): """ Iterate over selection of entities from supplied iterator, using the selection specification supplied to the constructor of the current object. entities is an iterator over entities from which selection is made context is a dictionary of context values that may be referenced by the selector in choosing entities to be returned. If no filtering is applied, the supplied iterator is returned as-is. """ if self._selector: entities = self._filter(entities, context) return entities def _filter(self, entities, context): """ Internal helper applies selector to entity iterator, returning a new iterator. """ for e in entities: if self._selector(e, context): yield e return def select_entity(self, entity, context={}): """ Apply selector to an entity, and returns True if the entity is selected """ if self._selector: return self._selector(entity, context) return True @classmethod #@@ @staticmethod, no cls? def parse_selector(cls, selector): """ Parse a selector and return list of tokens Selector formats: ALL (or blank) match any entity <val1> == <val2> values are same <val1> in <val2> second value is list containing 1st value, or values are same, or val1 is None. <val1> <name> <val2> invoke comparison method from supplied FieldComparison object <val1> and <val2> may be: [<field-id>] refers to field in entity under test <name>[<field-id>] refers to field of context value, or None if the indicated context value or field is not defined. "<string>" literal string value. Quotes within are escaped. <field_id> values are URIs or CURIEs, using characters defined by RFC3986, except "[" and "]" RFC3986: unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" reserved = gen-delims / sub-delims gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@" sub-delims = "!" / "$" / "&" / "'" / "(" / ")" / "*" / "+" / "," / ";" / "=" Parser uses pyparsing combinators (cf. http://pyparsing.wikispaces.com). """ def get_value(val_list): if len(val_list) == 1: return { 'type': 'literal', 'name': None, 'field_id': None, 'value': val_list[0] } elif val_list[0] == '[': return { 'type': 'entity', 'name': None, 'field_id': val_list[1], 'value': None } elif val_list[1] == '[': return { 'type': 'context', 'name': val_list[0], 'field_id': val_list[2], 'value': None } else: return { 'type': 'unknown', 'name': None, 'field_id': None, 'value': None } p_name = Word(alphas+"_", alphanums+"_") p_id = Word(alphas+"_@", alphanums+"_-.~:/?#@!$&'()*+,;=)") p_val = ( Group( Literal("[") + p_id + Literal("]") ) | Group( p_name + Literal("[") + p_id + Literal("]") ) | Group( QuotedString('"', "\\") ) | Group( QuotedString("'", "\\") ) | Group( p_id ) ) p_comp = ( Literal("==") | Literal("in") | p_name ) p_selector = ( p_val + p_comp + p_val + StringEnd() ) try: resultlist = p_selector.parseString(selector).asList() except ParseException: return None resultdict = {} if resultlist: resultdict['val1'] = get_value(resultlist[0]) resultdict['comp'] = resultlist[1] resultdict['val2'] = get_value(resultlist[2]) return resultdict def compile_selector_filter(self, selector): """ Return filter for for testing entities matching a supplied selector. Returns None if no selection is performed; i.e. all possible entities are selected. Selector formats: see `parse_selector` above. This function returns a filter function compiled from the supplied selector. """ def get_entity(field_id): "Get field from entity tested by filter" def get_entity_f(e, c): return e.get(field_id, None) return get_entity_f # def get_context(name, field_id): "Get field from named value in current display context" def get_context_f(e, c): if name in c and c[name]: return c[name].get(field_id, None) return None return get_context_f # def get_literal(value): "Get literal value specified directly in selector string" def get_literal_f(e, c): return value return get_literal_f # def get_val_f(selval): if selval['type'] == "entity": return get_entity(selval['field_id']) elif selval['type'] == "context": return get_context(selval['name'], selval['field_id']) elif selval['type'] == "literal": return get_literal(selval['value']) else: msg = "Unrecognized value type from selector (%s)"%selval['type'] raise ValueError(msg) assert False, "Unrecognized value type from selector" # def match_eq(v1f, v2f): def match_eq_f(e, c): return v1f(e, c) == v2f(e, c) return match_eq_f # def match_in(v1f, v2f): def match_in_f(e, c): v1 = v1f(e, c) if not v1: return True v2 = v2f(e, c) if isinstance(v2, list): return v1 in v2 return v1 == v2 return match_in_f # def match_subtype(v1f, v2f): def match_subtype_f(e, c): return self._fieldcomp.subtype(v1f(e, c), v2f(e, c)) return match_subtype_f # if selector in {None, "", "ALL"}: return None sel = self.parse_selector(selector) if not sel: msg = "Unrecognized selector syntax (%s)"%selector raise ValueError(msg) v1f = get_val_f(sel['val1']) v2f = get_val_f(sel['val2']) if sel['comp'] == "==": return match_eq(v1f, v2f) if sel['comp'] == "in": return match_in(v1f, v2f) if sel['comp'] == "subtype": return match_subtype(v1f, v2f) # Drop through: raise error msg = "Unrecognized entity selector (%s)"%selector raise ValueError(msg) # ------------------------------------------------------------------- # FieldComparison # ------------------------------------------------------------------- class FieldComparison(object): """ Logic for comparing fields using additional context information not available directly to 'EntitySelector' """ def __init__(self, coll): super(FieldComparison, self).__init__() self._coll = coll self._site = coll.get_site() return def get_uri_type_info(self, type_uri): """ Return typeinfo corresponding to the supplied type URI """ t = self._coll.get_uri_type(type_uri) return t and EntityTypeInfo(self._coll, t.get_id()) def subtype(self, type1_uri, type2_uri): """ Returns True if the first type is a subtype of the second type, where both types are supplied as type URIs. Returns True if both URIs are the same. If type1_uri is not specified, assume no restriction. If type2_uri is not specified, assume it does not satisfy the restriction. """ # log.info("FieldComparison.subtype(%s, %s)"%(type1_uri, type2_uri)) if not type2_uri or (type1_uri == type2_uri): return True if not type1_uri: return False type1_info = self.get_uri_type_info(type1_uri) type1_supertype_uris = (type1_info and type1_info.get_all_type_uris()) or [] # log.info("FieldComparison.subtype: type1_uris (supertypes) %r"%(type1_uris,)) return type2_uri in type1_supertype_uris if __name__ == "__main__": import doctest doctest.testmod() # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/entityfinder.py
entityfinder.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function """ This module is used to cache per-collection information about entities of some designated type. """ __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2018, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) from annalist import layout from annalist.exceptions import Annalist_Error from annalist.identifiers import ANNAL, RDFS from annalist.models.objectcache import get_cache, remove_cache # , remove_matching_caches # --------------------------------------------------------------------------- # # Local helper functions # # --------------------------------------------------------------------------- def make_cache_key(cache_type, entity_type_id, coll_id): return (cache_type, entity_type_id, coll_id) def match_cache_key_unused_(cache_types, entity_cls): def match_fn(cachekey): return (cachekey[0] in cache_types) and (cachekey[1] == entity_cls._entitytypeid) return match_fn # --------------------------------------------------------------------------- # # Error class # # --------------------------------------------------------------------------- class Cache_Error(Annalist_Error): """ Class for errors raised by closure calculations. """ def __init__(self, value=None, msg="Cache_error (collectionentityache)"): super(Cache_Error, self).__init__(value, msg) return # --------------------------------------------------------------------------- # # Entity-cache object class # # --------------------------------------------------------------------------- class CollectionEntityCacheObject(object): """ This class is an entity cache for a specified collection and entity type. NOTE: entities are instantiated with respect to a specified collection, but the collection objects are transient (regenerated for each request), so the cache stores the entity values but not the instantiated entities. Two kinds of information are cached: 1. entity cache: details of all entities that are visible in this class, indexed by entity id and entity URI 2. scope cache: lists of entity ids that are visible in different scopes: used when returning entity enumerations (see method "get_all_entities"). The scope cache is populated by calls to "get_all_entities". When a entity is added to or removed from the entity cache, lacking information about the scopes where it is visible, the scope cache is cleared. Scope values currently include "user", "all", "site"; None => "coll". Apart from treating None as collection local scope, the logic in this class treats scope names as opaque identifiers. The scope logic is embedded mainly in the Entity and EntityRoot class methods "_children". """ _cache_types = {"entities_by_id", "entity_ids_by_uri", "entity_ids_by_scope"} def __init__(self, coll_id, entity_cls): """ Initialize a cache object for a specified collection. coll_id Collection id with which the entity cache is associated. """ super(CollectionEntityCacheObject, self).__init__() self._coll_id = coll_id self._entity_cls = entity_cls self._type_id = entity_cls._entitytypeid self._entities_by_id = None self._entity_ids_by_uri = None self._entity_ids_by_scope = None self._site_cache = None return def _make_cache_key(self, cache_type): return make_cache_key(cache_type, self._type_id, self._coll_id) def _make_entity(self, coll, entity_id, entity_values): """ Internal helper method to construct an entity given its Id and values. coll is collection entity to which the new identity will belong entity_id is the new entity id entity_values is a dictionary containing: ["parent_id"] is the id of the parent entity ["data"] is a dictionary of values for the new entity Returns None if either Id or values evaluate as Boolean False (i.e. are None or empty), or if the parent collection is no longer accessible. """ entity = None if entity_id and entity_values: parent_id = entity_values["parent_id"] parent = coll if coll.get_id() != parent_id: for parent in coll.get_alt_entities(altscope="all"): if parent.get_id() == parent_id: break else: msg = ( "Saved parent id %s not found for entity %s/%s in collection %s"% (parent_id, self._type_id, entity_id, coll.get_id()) ) log.error(msg) return None # raise ValueError(msg) entity = self._entity_cls._child_init(parent, entity_id) entity.set_values(entity_values["data"]) return entity def _load_entity(self, coll, entity, entity_uri=None): """ Internal helper method saves entity data to cache. This function does not actually read entity data. Returns True if new entity data is added, otherwise False. """ entity_id = entity.get_id() if not entity_uri: entity_uri = entity.get_uri() entity_parent = entity.get_parent().get_id() entity_data = entity.get_save_values() add_entity = False with self._entities_by_id.access(entity_id) as es: if entity_id not in es: # Update cache via context handler es[entity_id] = {"parent_id": entity_parent, "data": entity_data} # Update other caches while _entities_by_id lock is acquired self._entity_ids_by_uri.set(entity_uri, entity_id) self._entity_ids_by_scope.flush() add_entity = True return add_entity def _load_entities(self, coll): """ Initialize cache of entities, if not already done. NOTE: site level entitites are cached separately by the collection cache manager, and merged separately. Hence "nosite" scope here. From entity.py: "nosite" - collection-level only: used for listing entities from just collections. Used when cacheing data, where site data is assumed to be invariant, hence no need to re-load. """ scope_name = "nosite" if self._site_cache else "all" if self._entities_by_id is None: self._entities_by_id = get_cache(self._make_cache_key("entities_by_id")) self._entity_ids_by_uri = get_cache(self._make_cache_key("entity_ids_by_uri")) self._entity_ids_by_scope = get_cache(self._make_cache_key("entity_ids_by_scope")) for entity_id in coll._children(self._entity_cls, altscope=scope_name): t = self._entity_cls.load(coll, entity_id, altscope=scope_name) self._load_entity(coll, t) return def _drop_entity(self, coll, entity_id): """ Drop entity from collection cache. Returns the entity removed, or None if not found. """ entity_values = self._entities_by_id.get(entity_id, None) entity = self._make_entity(coll, entity_id, entity_values) if entity: entity_uri = entity.get_uri() self._entities_by_id.pop(entity_id, None) self._entity_ids_by_uri.pop(entity_uri, None) self._entity_ids_by_scope.flush() return entity def set_site_cache(self, site_cache): self._site_cache = site_cache return def get_coll_id(self): return self._coll_id def set_entity(self, coll, entity): """ Save a new or updated entity definition. """ self._load_entities(coll) self._load_entity(coll, entity) return def remove_entity(self, coll, entity_id): """ Remove entity from collection cache. Returns the entity removed, or None if not found. """ self._load_entities(coll) # @@TODO: is this needed? return self._drop_entity(coll, entity_id) def get_entity(self, coll, entity_id): """ Retrieve the entity for a given entity id. Returns an entity for the supplied entity Id, or None if not defined for the current collection. """ self._load_entities(coll) entity_values = self._entities_by_id.get(entity_id, None) if entity_values: return self._make_entity(coll, entity_id, entity_values) # If not in collection cache, look for value in site cache: if self._site_cache: return self._site_cache.get_entity(coll.get_site_data(), entity_id) return None def get_entity_from_uri(self, coll, entity_uri): """ Retrieve an entity for a given entity URI. Returns an entity for the specified collecion and entuty URI, or None if the entity URI does not exist """ self._load_entities(coll) entity_id = self._entity_ids_by_uri.get(entity_uri, None) if entity_id: entity = self.get_entity(coll, entity_id) return entity # If not in collection cache, look for value in site cache: if self._site_cache: return self._site_cache.get_entity_from_uri( coll.get_site_data(), entity_uri ) return None def get_all_entity_ids(self, coll, altscope=None): """ Returns an iterator over all entity ids currently defined for a collection, which may be qualified by a specified scope. NOTE: this method returns only those entity ids for which a record has been saved to the collection data storage. """ self._load_entities(coll) scope_name = altscope or "coll" # 'None' designates collection-local scope scope_entity_ids = [] with self._entity_ids_by_scope.access(scope_name) as eids: if scope_name in eids: scope_entity_ids = eids[scope_name] else: # Collect entity ids for named scope for entity_id in coll._children(self._entity_cls, altscope=altscope): if entity_id != layout.INITIAL_VALUES_ID: scope_entity_ids.append(entity_id) # Update cache via context manager eids[scope_name] = scope_entity_ids return scope_entity_ids def get_all_entities(self, coll, altscope=None): """ Returns a generator of all entities currently defined for a collection, which may be qualified by a specified scope. NOTE: this method returns only those records that have actually been saved to the collection data storage. """ scope_entity_ids = self.get_all_entity_ids(coll, altscope=altscope) for entity_id in scope_entity_ids: t = self.get_entity(coll, entity_id) if t: yield t return def remove_cache(self): """ Close down and release all entity cache data """ if self._entities_by_id: remove_cache(self._entities_by_id.cache_key()) self._entities_by_id = None if self._entity_ids_by_uri: remove_cache(self._entity_ids_by_uri.cache_key()) self._entity_ids_by_uri = None if self._entity_ids_by_scope: remove_cache(self._entity_ids_by_scope.cache_key()) self._entity_ids_by_scope = None return # --------------------------------------------------------------------------- # # Collection entity-cache class # # --------------------------------------------------------------------------- coll_cache_by_type_id_coll_id = {} class CollectionEntityCache(object): """ This class manages multiple-collection cache objects """ def __init__(self, cache_cls, entity_cls): """ Initializes a value cache with no per-collection data. cache_cls is a class object for the collaction cache objects to be used. The constructor is called with collection id and entity class as parameters (see method `_get_cache`). entity_cls is a class object for the type of entity to be cached. """ super(CollectionEntityCache, self).__init__() self._cache_cls = cache_cls self._entity_cls = entity_cls self._type_id = entity_cls._entitytypeid coll_cache_by_type_id_coll_id[self._type_id] = {} return # Generic collection cache alllocation and access methods def _get_site_cache(self): """ Local helper returns a cache object for the site-wide entities """ if layout.SITEDATA_ID not in coll_cache_by_type_id_coll_id[self._type_id]: # log.info( # "CollectionEntityCache: creating %s cache for collection %s"% # (self._type_id, layout.SITEDATA_ID) # ) # Create and save new cache object site_cache = self._cache_cls(layout.SITEDATA_ID, self._entity_cls) coll_cache_by_type_id_coll_id[self._type_id][layout.SITEDATA_ID] = site_cache return coll_cache_by_type_id_coll_id[self._type_id][layout.SITEDATA_ID] def _get_cache(self, coll): """ Local helper returns a cache object for a specified collection. Creates a new cache object if needed. coll is a collection object for which a cache object is obtained """ coll_id = coll.get_id() # log.info( # "CollectionEntityCache: get %s cache for collection %s"% # (self._type_id, coll_id) # ) if coll_id not in coll_cache_by_type_id_coll_id[self._type_id]: # log.debug( # "CollectionEntityCache: creating %s cache for collection %s"% # (self._type_id, coll_id) # ) # Create and save new cache object coll_cache = self._cache_cls(coll_id, self._entity_cls) coll_cache.set_site_cache(self._get_site_cache()) coll_cache_by_type_id_coll_id[self._type_id][coll_id] = coll_cache return coll_cache_by_type_id_coll_id[self._type_id][coll_id] def flush_cache(self, coll): """ Remove all cached data for a specified collection. Returns True if the cache object was defined, otherwise False. coll is a collection object for which a cache is removed. """ coll_id = coll.get_id() cache = coll_cache_by_type_id_coll_id[self._type_id].pop(coll_id, None) if cache: cache.remove_cache() # log.info( # "CollectionEntityCache: flushed %s cache for collection %s"% # (self._type_id, coll_id) # ) return True return False def flush_all(self): """ Remove all cached data for all collections. """ # remove_cache_types = CollectionEntityCacheObject._cache_types # find_matching_caches( # match_cache_key(remove_cache_types, entity_cls), # lambda cache: cache.close() # ) caches = coll_cache_by_type_id_coll_id[self._type_id] coll_cache_by_type_id_coll_id[self._type_id] = {} # log.info( # "CollectionEntityCache: flushing %s cache for collections %r"% # (self._type_id, caches.keys()) # ) for coll_id in caches: caches[coll_id].remove_cache() return # Collection cache allocation and access methods def set_entity(self, coll, entity): """ Save a new or updated type definition """ entity_cache = self._get_cache(coll) return entity_cache.set_entity(coll, entity) def remove_entity(self, coll, entity_id): """ Remove entity from collection cache. Returns the entity removed if found, or None if not defined. """ entity_cache = self._get_cache(coll) return entity_cache.remove_entity(coll, entity_id) def get_entity(self, coll, entity_id): """ Retrieve an entity for a given entity id. Returns an entity object for the specified collection and entity id. """ entity_cache = self._get_cache(coll) return entity_cache.get_entity(coll, entity_id) def get_entity_from_uri(self, coll, entity_uri): """ Retrieve en entity for a given collection and entity URI. Returns an entity object for the specified collection and entity URI. """ entity_cache = self._get_cache(coll) return entity_cache.get_entity_from_uri(coll, entity_uri) def get_all_entity_ids(self, coll, altscope=None): """ Returns all entities currently available for a collection in the indicated scope. Default scope is entities defined directly in the indicated collection. """ entity_cache = self._get_cache(coll) return entity_cache.get_all_entity_ids(coll, altscope=altscope) def get_all_entities(self, coll, altscope=None): """ Returns all entities currently available for a collection in the indicated scope. Default scope is entities defined directly in the indicated collection. """ entity_cache = self._get_cache(coll) return entity_cache.get_all_entities(coll, altscope=altscope) # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/collectionentitycache.py
collectionentitycache.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) import os import os.path import shutil from django.conf import settings from annalist import layout from annalist.exceptions import Annalist_Error from annalist.identifiers import ANNAL from annalist import util from annalist.models.entity import Entity from annalist.models.entitydata import EntityData from annalist.util import extract_entity_id, make_type_entity_id class RecordList(EntityData): _entitytype = ANNAL.CURIE.List _entitytypeid = layout.LIST_TYPEID _entityroot = layout.COLL_LIST_PATH _entityview = layout.COLL_LIST_VIEW _entityfile = layout.LIST_META_FILE def __init__(self, parent, list_id): """ Initialize a new RecordList object, without metadta (yet). parent is the parent collection in which the list is defined. list_id the local identifier for the record list altparent is a site object to search for this new entity, allowing site-wide RecordType values to be found. """ super(RecordList, self).__init__(parent, list_id) self._parent = parent # log.debug("RecordList %s: dir %s"%(list_id, self._entitydir)) return def _migrate_filenames(self): """ Override EntityData method """ return None def _migrate_values(self, entitydata): """ List description entity format migration method. The specification for this method is that it returns an entitydata value which is a copy of the supplied entitydata with format migrations applied. NOTE: implementations are free to apply migrations in-place. The resulting entitydata should be exactly as the supplied data *should* appear in storage to conform to the current format of the data. The migration function should be idempotent; i.e. x._migrate_values(x._migrate_values(e)) == x._migrate_values(e) """ for fkey, ftype in [(ANNAL.CURIE.display_type, "_enum_list_type")]: entitydata[fkey] = make_type_entity_id( ftype, extract_entity_id(entitydata[fkey]) ) migration_map = ( [ (ANNAL.CURIE.record_type, ANNAL.CURIE.list_entity_type) ]) entitydata = self._migrate_values_map_field_names(migration_map, entitydata) if ANNAL.CURIE.list_fields in entitydata: for f in entitydata[ANNAL.CURIE.list_fields]: field_id = extract_entity_id(f[ANNAL.CURIE.field_id]) if field_id == "Field_render": f[ANNAL.CURIE.field_id] = layout.FIELD_TYPEID+"/Field_render_type" if field_id == "Field_type": f[ANNAL.CURIE.field_id] = layout.FIELD_TYPEID+"/Field_value_type" if field_id == "View_target_type": f[ANNAL.CURIE.field_id] = layout.FIELD_TYPEID+"/View_entity_type" if field_id == "List_target_type": f[ANNAL.CURIE.field_id] = layout.FIELD_TYPEID+"/List_entity_type" # Return result return entitydata # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/recordlist.py
recordlist.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2017, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) import sys import os import json import io from rdflib import Graph, URIRef, Literal # Used by `json_resource_file` below. # See: https://stackoverflow.com/questions/51981089 from utils.py3porting import write_bytes from annalist import message from annalist import layout from annalist.models.entitytypeinfo import EntityTypeInfo # Resource info data for built-in entity data site_fixed_json_resources = ( [ { "resource_name": layout.SITE_META_FILE, "resource_dir": layout.SITE_META_REF, "resource_type": "application/ld+json" } , { "resource_name": layout.SITE_CONTEXT_FILE, "resource_dir": layout.SITE_META_REF, "resource_type": "application/ld+json" } ]) collection_fixed_json_resources = ( [ { "resource_name": layout.COLL_META_FILE, "resource_dir": layout.COLL_BASE_DIR, "resource_type": "application/ld+json" } , { "resource_name": layout.COLL_PROV_FILE, "resource_dir": layout.COLL_BASE_DIR, "resource_type": "application/ld+json" } , { "resource_name": layout.COLL_CONTEXT_FILE, "resource_dir": layout.COLL_BASE_DIR, "resource_type": "application/ld+json" } ]) entity_fixed_json_resources = ( [ { "resource_name": layout.COLL_META_FILE, "resource_dir": layout.COLL_BASE_DIR, "resource_type": "application/ld+json" } , { "resource_name": layout.COLL_PROV_FILE, "resource_dir": layout.COLL_BASE_DIR, "resource_type": "application/ld+json" } , { "resource_name": layout.TYPE_META_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } , { "resource_name": layout.TYPE_PROV_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } , { "resource_name": layout.LIST_META_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } , { "resource_name": layout.LIST_PROV_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } , { "resource_name": layout.VIEW_META_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } , { "resource_name": layout.VIEW_PROV_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } , { "resource_name": layout.GROUP_META_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } , { "resource_name": layout.GROUP_PROV_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } , { "resource_name": layout.FIELD_META_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } , { "resource_name": layout.FIELD_PROV_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } , { "resource_name": layout.VOCAB_META_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } , { "resource_name": layout.VOCAB_PROV_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } , { "resource_name": layout.USER_META_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } , { "resource_name": layout.USER_PROV_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } , { "resource_name": layout.INFO_META_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } , { "resource_name": layout.INFO_PROV_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } , { "resource_name": layout.ENUM_META_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } , { "resource_name": layout.ENUM_PROV_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } , { "resource_name": layout.TYPEDATA_META_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } , { "resource_name": layout.ENTITY_DATA_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } , { "resource_name": layout.ENTITY_PROV_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } ]) entity_list_json_resources = ( # [ { "resource_name": layout.COLL_META_FILE, "resource_dir": layout.COLL_BASE_DIR, # "resource_type": "application/ld+json" } # , { "resource_name": layout.COLL_PROV_FILE, "resource_dir": layout.COLL_BASE_DIR, # "resource_type": "application/ld+json" } # , { "resource_name": layout.COLL_CONTEXT_FILE, "resource_dir": layout.COLL_BASE_DIR, # "resource_type": "application/ld+json" } [ { "resource_name": layout.ENTITY_LIST_FILE, "resource_dir": ".", "resource_type": "application/ld+json" } ]) # Resource access functions def entity_resource_file(entity, resource_info): """ Return a file object that reads out the content of a resource attached to a specified entity. """ return entity.resource_file(resource_info["resource_path"]) def json_resource_file(baseurl, jsondata, resource_info): """ Return a file object that reads out a JSON version of the supplied entity values data. baseurl base URL for resolving relative URI references. (Unused except for diagnostic purposes.) jsondata is the data to be formatted and returned. resource_info is a dictionary of values about the resource to be serialized. (Unused except for diagnostic purposes.) """ response_file = io.StringIO() json.dump(jsondata, response_file, indent=2, separators=(',', ': '), sort_keys=True) response_file.seek(0) return response_file def turtle_resource_file(baseurl, jsondata, resource_info): """ Return a file object that reads out a Turtle version of the supplied entity values data. The file object returns a byte stream, as this is what rdflib expects. baseurl base URL for resolving relative URI references. (Unused except for diagnostic purposes.) jsondata is the data to be formatted and returned. resource_info is a dictionary of values about the resource to be serialized. (Unused except for diagnostic purposes.) """ # NOTE: under Python 2, "BytesIO" is implemented by "StringIO", which does # not handle well a combination of str and unicode values, and may # raise an exception if the Turtle data contains non-ASCII characters. # The problem manifests when an error occurs, and manifests as a 500 # server error response. # # On reflection, I think the problem arises because the `message.*` # values are unicode (per `from __future__ import unicode_literals`), # and are getting joined with UTF-encoded bytestring values, which # results in the error noted. # # The fix here is to encode everything as bytes before writing. jsondata_file = json_resource_file(baseurl, jsondata, resource_info) response_file = io.BytesIO() g = Graph() try: g = g.parse(source=jsondata_file, publicID=baseurl, format="json-ld") except Exception as e: reason = str(e) log.warning(message.JSONLD_PARSE_ERROR) log.info(reason) log.info("baseurl %s, resourceinfo %r"%(baseurl, resource_info)) write_bytes(response_file, "\n\n***** ERROR ****\n") write_bytes(response_file, "%s"%message.JSONLD_PARSE_ERROR) write_bytes(response_file, "\n%s:\n"%message.JSONLD_PARSE_REASON) write_bytes(response_file, reason) write_bytes(response_file, "\n\n") try: g.serialize(destination=response_file, format='turtle', indent=4) except Exception as e: reason = str(e) log.warning(message.TURTLE_SERIALIZE_ERROR) log.info(reason) write_bytes(response_file, "\n\n***** ERROR ****\n") write_bytes(response_file, "%s"%message.TURTLE_SERIALIZE_ERROR) write_bytes(response_file, "\n%s:\n"%message.TURTLE_SERIALIZE_REASON) write_bytes(response_file, reason) write_bytes(response_file, "\n\n") response_file.seek(0) return response_file def make_turtle_resource_info(json_resource): """ Return Turtle resource description for fixed JSON resource """ turtle_resource = ( { "resource_name": json_resource["resource_name"][0:-7]+".ttl" , "resource_dir": json_resource["resource_dir"] , "resource_type": "text/turtle" , "resource_access": turtle_resource_file }) return turtle_resource def find_fixed_resource(fixed_json_resources, resource_ref): """ Return a description for the indicated fixed (built-in) resource from a supplied table, or None resource_ref is the local name of the desired resource relative to the base location of the entity (or collection or site data) to which it belongs. The description returned is a dictionary with the following keys: resource_name: filename of resource (i.e. part of URI path after final "/") resource_dir: directory of resource (i.e. part of URI path up to final "/") resource_path: a file or URI path to the resource data relative to the URI of the entity to which it belongs. resource_type: content-type of resource resource_access: optional: if present, specifies a function that returns an alternative representation of a JSON-LD data resource. (e.g. see `turtle_resource_file`) """ # log.debug("CollectionResourceAccess.find_resource %s/d/%s"%(coll.get_id(), resource_ref)) for fj in fixed_json_resources: if fj["resource_name"] == resource_ref: fr = dict(fj, resource_path=os.path.join(fj["resource_dir"]+"/", resource_ref)) return fr ft = make_turtle_resource_info(fj) if ft["resource_name"] == resource_ref: fr = dict(ft, resource_path=os.path.join(ft["resource_dir"]+"/", resource_ref)) return fr log.debug("EntityResourceAccess.find_fixed_resource: %s not found"%(resource_ref)) return None def find_entity_resource(entity, resource_ref, fixed_resources=entity_fixed_json_resources): """ Return a description for the indicated entity resource, or None resource_ref is the local name of the desired resource relative to the entity to which it belongs. fixed_resources is a table of fixed resource information, not necessarily referenced by the entity itself. The description returned is a dictionary with the following keys: resource_name: filename of resource (i.e. part of URI path after final "/") resource_dir: directory of resource (i.e. part of URI path up to final "/") resource_path: a file or URI path to the resource data relative to the URI of the entity to which it belongs. resource_type: content-type of resource resource_access: optional: if present, specifies a function that returns an alternative representation of a JSON-LD data resource. (e.g. see `turtle_resource_file`) """ log.debug( "EntityResourceAccess.find_entity_resource %s/%s/%s"% (entity.get_type_id(), entity.get_id(), resource_ref) ) fr = find_fixed_resource(fixed_resources, resource_ref) if fr: return fr # Look for resource description in entity data # @@TESTME for t, f in entity.enum_fields(): log.debug("find_resource: t %s, f %r"%(t,f)) if isinstance(f, dict): if f.get("resource_name", None) == resource_ref: f = dict(f, resource_path=resource_ref) return f return None def find_list_resource(type_id, list_id, list_ref): """ Return a description for the indicated entity resource, or None """ log.debug( "EntityResourceAccess.find_list_resource %s/%s/%s"% (list_id, type_id, list_ref) ) return find_fixed_resource(entity_list_json_resources, list_ref) def get_resource_file(entity, resource_info, base_url): """ Create a file object from which resource data can be read. resource_info is a value returned by `find_fixed_resource` or `find_entity_resource` base_url is a base URL that may be used to resolving relative references in the JSON-LD data. Returns a pair of values: the file object, and a content-type string. """ if "resource_access" in resource_info: # Use indicated resource access renderer jsondata = entity.get_values() resource_file = resource_info["resource_access"](base_url, jsondata, resource_info) else: # Return resource data direct from storage resource_file = entity.resource_file(resource_info["resource_path"]) return (resource_file, resource_info["resource_type"]) # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/entityresourceaccess.py
entityresourceaccess.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) import os import os.path import shutil from django.conf import settings from utils.py3porting import isoformat_space, text_to_str from annalist import layout from annalist.exceptions import Annalist_Error from annalist.identifiers import ANNAL from annalist import util from annalist.models.entity import Entity from annalist.models.entitydata import EntityData class RecordEnumBase(EntityData): _entitytypeid = "_enum_base_id" _entitytype = ANNAL.CURIE.Enum _entityroot = layout.COLL_ENUM_PATH _entityview = layout.COLL_ENUM_VIEW _entityfile = layout.ENUM_META_FILE _entityprov = layout.ENUM_PROV_FILE def __init__(self, parent, entity_id, type_id): # print("@@ RecordEnumBase.__init__ parentid %s, entityid %s"%(parent.get_id(), entity_id)) self._entitytypeid = type_id super(RecordEnumBase, self).__init__(parent, entity_id) return def _migrate_filenames(self): """ Override EntityData method """ return None def RecordEnumFactory(name, type_id): """ Returns a dynamically-subclassed instance of RecordEnumBase using the supplied class name and type_id for all created instances. """ # print("@@ RecordEnumFactory name %s, type_id %s"%(name, type_id)) def RecordEnumInit(self, parent, entity_id): # print("@@ RecordEnumInit parentid %s, entityid %s"%(parent.get_id(), entity_id)) super(RecordEnumBase, self).__init__(parent, entity_id) return return type(text_to_str(name), (RecordEnumBase,), { '_entitytypeid': type_id , '_entityroot': layout.COLL_ENUM_PATH%{'id': "%(id)s", 'type_id': type_id} , '__init__': RecordEnumInit} ) # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/recordenum.py
recordenum.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function """ Annalist site data Site data is an alternative location for generic Annalist metatadata (e.g. type and view definitions, etc.) that are common across all collections (and even installations). It is implemeted as a specially- named Collection object. """ __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014 G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) from annalist import layout from annalist.identifiers import ANNAL from annalist import util from annalist.models.collection import Collection class SiteData(Collection): _entitytype = ANNAL.CURIE.SiteData # _entitytypeid = layout.SITEDATA_TYPEID def __init__(self, parentsite, entityid=layout.SITEDATA_ID): """ Initialize a new SiteData object, without metadta (yet). parentsite is the parent site from which the new collection is descended. """ if entityid != layout.SITEDATA_ID: raise ValueError("Site data initialized with incorrect entity id (%s)"%entityid) super(SiteData, self).__init__(parentsite, entityid) return @classmethod def create_sitedata(cls, parent, sitedata): """ Method loads a site data entity cls is a class value used to construct the new entity value parent is the parent site from which the new SiteData entity is descended. sitedata is a dictionary of values that are stored for the created site data. Returns the site data collection as an instance of the supplied SiteData class. """ log.debug("SiteData.create_sitedata: entityid %s"%(layout.SITEDATA_ID)) return cls.create(parent, layout.SITEDATA_ID, sitedata) @classmethod def load_sitedata(cls, parent, test_exists=True): """ Method loads a site data entity cls is a class value used to construct the new entity value parent is the parent site from which the new SiteData entity is descended. test_exists unless this is supllied as False, generates an error if the site metadata does not exist. Returns the site data collection as an instance of the supplied SiteData class, with data oaded from the corresponding Annalist storage, or None if there is no such collection data. """ log.debug("SiteData.load_sitedata: entityid %s"%(layout.SITEDATA_ID)) d = cls.load(parent, layout.SITEDATA_ID) if test_exists: assert d, "Site data for %r not found"%(parent,) return d # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/sitedata.py
sitedata.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) import os import os.path import json import datetime import traceback from collections import OrderedDict from django.http import HttpResponse from django.http import HttpResponseRedirect from django.conf import settings from django.urls import resolve, reverse from utils.py3porting import isoformat_space, urljoin import annalist from annalist.identifiers import RDF, RDFS, ANNAL from annalist.exceptions import Annalist_Error, EntityNotFound_Error from annalist import layout from annalist import message from annalist.util import ( valid_id, extract_entity_id, replacetree, updatetree, expandtree ) from annalist.models.annalistuser import AnnalistUser from annalist.models.entityroot import EntityRoot from annalist.models.sitedata import SiteData from annalist.models.collection import Collection from annalist.models.recordvocab import RecordVocab from annalist.models.recordview import RecordView from annalist.models.recordfield import RecordField from annalist.models.rendertypeinfo import ( is_render_type_literal, is_render_type_id, is_render_type_set, is_render_type_list, is_render_type_object, ) class Site(EntityRoot): _entitytype = ANNAL.CURIE.Site _entitytypeid = layout.SITE_TYPEID _entitybase = layout.SITE_META_PATH _entityfile = layout.SITE_META_FILE _entityref = layout.META_SITE_REF _contextbase = layout.META_SITE_REF _contextref = layout.SITE_CONTEXT_FILE def __init__(self, sitebaseuri, sitebasedir, host=""): """ Initialize a Site object sitebaseuri the base URI of the site sitebasedir the base directory for site information """ log.debug("Site.__init__: sitebaseuri %s, sitebasedir %s"%(sitebaseuri, sitebasedir)) sitebaseuri = sitebaseuri if sitebaseuri.endswith("/") else sitebaseuri + "/" sitebasedir = sitebasedir if sitebasedir.endswith("/") else sitebasedir + "/" sitepath = layout.SITE_META_PATH siteuripath = urljoin(sitebaseuri, sitepath) sitedir = os.path.join(sitebasedir, sitepath) self._sitedata = None super(Site, self).__init__(host+siteuripath, siteuripath, sitedir, sitebasedir) self.set_id(layout.SITEDATA_ID) return def _exists(self): """ The site entity has no explicit data, so always respond with 'True' to an _exists() query """ return True def _children(self, cls, altscope=None): """ Iterates over candidate child identifiers that are possible instances of an indicated class. The supplied class is used to determine a subdirectory to be scanned. As a special case, the children are iterated only in a special `altscope` called "site". cls is a subclass of Entity indicating the type of children to iterate over. altscope Ignored, accepoted for compatibility with Entity._children() """ if altscope == "site": return self._base_children(cls) return iter(()) # Empty iterator def child_entity_ids(self, cls, altscope=None): """ Iterates over child entity identifiers of an indicated class. If the altscope is "all" or not specified, the altscope value used is "site". cls is a subclass of Entity indicating the type of children to iterate over. altscope if supplied, indicates a scope other than the current entity to search for children. See method `get_alt_entities` for more details. """ if altscope == "select": altscope = "site" return super(Site, self).child_entity_ids(cls, altscope=altscope) def site_data_collection(self, test_exists=True): """ Return collection entity that contains the site data. test_exists unless this is supllied as False, generates an error if the site metadata does not exist. """ if self._sitedata is None: self._sitedata = SiteData.load_sitedata(self, test_exists=test_exists) return self._sitedata def site_data_stream(self): """ Return stream containing the raw site data. """ return self.site_data_collection()._read_stream() def site_data(self): """ Return dictionary of site data """ # @@TODO: consider using generic view logic for this mapping (and elsewhere?) # This is currently a bit of a kludge, designed to match the site # view template. In due course, it may be reviewed and implemented # using the generic Annalist form generating framework site_data = self.site_data_collection().get_values() if not site_data: return None site_data["title"] = site_data.get(RDFS.CURIE.label, message.SITE_NAME_DEFAULT) log.debug("site.site_data: site_data %r"%(site_data)) colls = OrderedDict() for k, v in self.collections_dict().items(): # log.info("site.site_data: colls[%s] %r"%(k, v)) colls[k] = dict(v.items(), id=k, url=v[ANNAL.CURIE.url], title=v[RDFS.CURIE.label]) site_data["collections"] = colls return site_data def get_user_permissions(self, user_id, user_uri): """ Get a site-wide user permissions record (AnnalistUser). To return a value, both the user_id and the user_uri (typically a mailto: URI, but may be any *authenticated* identifier) must match. This is to prevent access to records of a deleted account being granted to a new account created with the same user_id (username). user_id local identifier for the type to retrieve. user_uri authenticated identifier associated with the user_id. That is, the authentication service used is presumed to confirm that the identifier belongs to the user currently logged in with the supplied username. returns an AnnalistUser object for the identified user, or None. This object contains information about permissions granted to the user in the current collection. """ return self.site_data_collection().get_user_permissions(user_id, user_uri) def collections(self): """ Generator enumerates and returns collection descriptions that are part of a site. Yielded values are collection objects. """ log.debug("site.collections: basedir: %s"%(self._entitydir)) for f in self._base_children(Collection): c = Collection.load(self, f) # log.info("Site.colections: Collection.load %s %r"%(f, c.get_values())) if c: yield c return def collections_dict(self): """ Return an ordered dictionary of collections indexed by collection id """ coll = [ (c.get_id(), c) for c in self.collections() ] return OrderedDict(sorted(coll)) def add_collection(self, coll_id, coll_meta, annal_ver=annalist.__version_data__): """ Add a new collection to the current site coll_id identifier for the new collection, as a string with a form that is valid as URI path segment. coll_meta a dictionary providing additional information about the collection to be created. annal_ver Override annalist version stored in collection metadata (parameter provided for testing) returns a Collection object for the newly created collection. """ d = dict(coll_meta) d[ANNAL.CURIE.software_version] = annal_ver c = Collection.create(self, coll_id, d) return c def remove_collection(self, coll_id): """ Remove a collection from the site data. coll_id identifier for the collection to remove. Returns a non-False status code if the collection is not removed. """ log.debug("remove_collection: %s"%(coll_id)) if coll_id == layout.SITEDATA_ID: raise ValueError("Attempt to remove site data collection (%s)"%coll_id) return Collection.remove(self, coll_id) # JSON-LD context data def generate_site_jsonld_context(self): """ (Re)generate JSON-LD context description for the current collection. get_field_uri_context is a supplied function that accepts a RecordField object abnd returns a context dictionary for the field thus described. """ # Build context data context = self.site_data_collection().get_coll_jsonld_context() # Assemble and write out context description datetime_now = datetime.datetime.today().replace(microsecond=0) datetime_str = isoformat_space(datetime_now) with self._metaobj( layout.SITEDATA_CONTEXT_PATH, layout.SITE_CONTEXT_FILE, "wt" ) as context_io: json.dump( { "_comment": "Generated by generate_site_jsonld_context on %s"%datetime_str , "@context": context }, context_io, indent=2, separators=(',', ': '), sort_keys=True ) return # Site data # # These methods are used by test_createsitedata and annalist-manager to initialize # or update Annalist site data. Tests are run using data copied from sampledata/init # to sampledata/data, allowing for additional test fixture files to be included. # @staticmethod def create_site_metadata(site_base_uri, site_base_dir, label=None, description=None): """ Create new site metadata record for a new site, and return the Site object. This resets the site label and description that may have been updated by a sirte administrator. """ datetime_now = datetime.datetime.today().replace(microsecond=0) if label is None: label = "Annalist linked data notebook site" if description is None: description = "Annalist site metadata and site-wide values." annal_comment = ( "Initialized by annalist.models.site.create_site_metadata at "+ isoformat_space(datetime_now)+" (UTC)" ) site = Site(site_base_uri, site_base_dir) sitedata_values = ( { RDFS.CURIE.label: label , RDFS.CURIE.comment: description , ANNAL.CURIE.meta_comment: annal_comment , ANNAL.CURIE.software_version: annalist.__version_data__ }) sitedata = SiteData.create_sitedata(site, sitedata_values) return site @staticmethod def create_site_readme(site): """ Create new site README.md. """ datetime_now = datetime.datetime.today().replace(microsecond=0) README = (( """%(site_base_dir)s\n"""+ """\n"""+ """This directory contains Annalist site data for %(site_base_uri)s.\n"""+ """\n"""+ """Directory layout:\n"""+ """\n"""+ """ %(site_base_dir)s\n"""+ """ c/\n"""+ """ _annalist_site/ (site-wide definitions)\n"""+ """ d/\n"""+ """ coll_meta.jsonld (site metadata)\n"""+ """ coll_context.jsonld (JSON-LD context for site definitions)\n"""+ """ %(enum_field_placement_dir)s/\n"""+ """ (field-placement-value)/\n"""+ """ enum_meta.jsonld\n"""+ """ :\n"""+ """ %(enum_list_type_dir)s/\n"""+ """ (list-type-id)/\n"""+ """ enum_meta.jsonld\n"""+ """ :\n"""+ """ %(enum_render_type_dir)s/\n"""+ """ (render-type-id)/\n"""+ """ enum_meta.jsonld\n"""+ """ :\n"""+ """ %(enum_value_type_dir)s/\n"""+ """ (value-type-id)/\n"""+ """ enum_meta.jsonld\n"""+ """ :\n"""+ """ %(enum_value_mode_dir)s/\n"""+ """ (value-mode-id)/\n"""+ """ enum_meta.jsonld\n"""+ """ :\n"""+ """ %(field_dir)s/\n"""+ """ (view-field definitions)\n"""+ """ :\n"""+ """ %(list_dir)s/\n"""+ """ (entity list definitions)\n"""+ """ :\n"""+ """ %(type_dir)s/\n"""+ """ (type definitions)\n"""+ """ :\n"""+ """ %(user_dir)s/\n"""+ """ (user permissions)\n"""+ """ :\n"""+ """ %(view_dir)s/\n"""+ """ (entity view definitions)\n"""+ """ :\n"""+ """ %(vocab_dir)s/\n"""+ """ (vocabulary namespace definitions)\n"""+ """ :\n"""+ """ (collection-id)/ (user-created data collection)\n"""+ """ d/\n"""+ """ coll_meta.jsonld (collection metadata)\n"""+ """ coll_context.jsonld (JSON-LD context for collection data)\n"""+ """ %(type_dir)s/ (collection type definitions)\n"""+ """ (type-id)/\n"""+ """ type_meta.jsonld\n"""+ """ :\n"""+ """ %(list_dir)s/ (collection list definitions)\n"""+ """ (list-id)/\n"""+ """ list_meta.jsonld\n"""+ """ :\n"""+ """ %(view_dir)s/ (collection view definitions)\n"""+ """ (view-id)/\n"""+ """ view_meta.jsonld\n"""+ """ :\n"""+ """ %(field_dir)s/ (collection field definitions)\n"""+ """ (field-id)/\n"""+ """ field_meta.jsonld\n"""+ """ :\n"""+ """ %(group_dir)s/ (collection field group definitions)\n"""+ """ (group-id)/\n"""+ """ group_meta.jsonld\n"""+ """ :\n"""+ """ %(user_dir)s/ (collection user permissions)\n"""+ """ (user-id)/\n"""+ """ user_meta.jsonld\n"""+ """ :\n"""+ """ (type-id)/ (contains all entity data for identified type)\n"""+ """ (entity-id)/ (contains data for identified type/entity)\n"""+ """ entity_data.jsonld (entity data)\n"""+ """ entity_prov.jsonld (entity provenance @@TODO)\n"""+ """ (attachment files) (uploaded/imported attachments)\n"""+ """\n"""+ """ : (repeat for entities of this type)\n"""+ """\n"""+ """ : (repeat for types in collection)\n"""+ """\n"""+ """ : (repeat for collections in site)\n"""+ """\n"""+ """Created by annalist.models.site.py\n"""+ """for Annalist %(version)s at %(datetime)s (UTC)\n"""+ """\n"""+ """\n""")% { 'site_base_dir': site._entitydir , 'site_base_uri': site._entityurl , 'datetime': isoformat_space(datetime_now) , 'version': annalist.__version__ , 'enum_field_placement_dir': layout.ENUM_FIELD_PLACEMENT_DIR , 'enum_list_type_dir': layout.ENUM_LIST_TYPE_DIR , 'enum_render_type_dir': layout.ENUM_RENDER_TYPE_DIR , 'enum_value_type_dir': layout.ENUM_VALUE_TYPE_DIR , 'enum_value_mode_dir': layout.ENUM_VALUE_MODE_DIR , 'field_dir': layout.FIELD_DIR , 'group_dir': layout.GROUP_DIR , 'list_dir': layout.LIST_DIR , 'type_dir': layout.TYPE_DIR , 'user_dir': layout.USER_DIR , 'view_dir': layout.VIEW_DIR , 'vocab_dir': layout.VOCAB_DIR } ) with site._fileobj("README", ANNAL.CURIE.Richtext, "text/markdown", "wt") as readme: readme.write(README) return @staticmethod def create_empty_coll_data( site, coll_id, label=None, description=None): """ Create empty collection, and returns the Collection object. """ # @@TESTME datetime_now = datetime.datetime.today().replace(microsecond=0) if label is None: label = "Collection %s"%coll_id if description is None: description = "Annalist data collection %s"%coll_id annal_comment = ( "Initialized by annalist.models.site.create_empty_coll_data at "+ isoformat_space(datetime_now)+" (UTC)" ) coll_values = ( { RDFS.CURIE.label: label , RDFS.CURIE.comment: description , ANNAL.CURIE.meta_comment: annal_comment , ANNAL.CURIE.software_version: annalist.__version_data__ }) coll = Collection.create(site, coll_id, coll_values) return coll @staticmethod def replace_site_data_dir(sitedata, sdir, site_data_src): """ Replace indicated sitedata directory data from source: old data for the directory is removed. """ site_data_tgt, site_data_file = sitedata._dir_path() s = os.path.join(site_data_src, sdir) d = os.path.join(site_data_tgt, sdir) if os.path.isdir(s): replacetree(s, d) return @staticmethod def update_site_data_dir(sitedata, sdir, site_data_src): """ Update indicated sitedata directory data from source: old data for the directory that is not updated is left as-is. """ site_data_tgt, site_data_file = sitedata._dir_path() s = os.path.join(site_data_src, sdir) d = os.path.join(site_data_tgt, sdir) if os.path.isdir(s): updatetree(s, d) return @staticmethod def expand_site_data_dir(sitedata, sdir, site_data_src): """ Expand indicated sitedata directory data from source: existing data is left as-is, even when a file of the same name exists in the source tree. """ site_data_tgt, site_data_file = sitedata._dir_path() s = os.path.join(site_data_src, sdir) d = os.path.join(site_data_tgt, sdir) if os.path.isdir(s): expandtree(s, d) return @staticmethod def initialize_site_data( site_base_uri, site_base_dir, site_data_src, label=None, description=None): """ Initializes site data for a new site for testing. Creates a README.md file in the site base directory, and creates a collection _annalist_site containing built-in types, views, etc. """ site = Site.create_site_metadata( site_base_uri, site_base_dir, label=label, description=description ) sitedata = site.site_data_collection() Site.create_site_readme(site) site_data_tgt, site_data_file = sitedata._dir_path() log.info("Copy Annalist site data from %s to %s"%(site_data_src, site_data_tgt)) for sdir in layout.COLL_DIRS: log.info("- %s -> %s"%(sdir, site_data_tgt)) Site.replace_site_data_dir(sitedata, sdir, site_data_src) sitedata.generate_coll_jsonld_context() return site @staticmethod def initialize_bib_data(site, bib_data_src ): # label=None, description=None """ Initializes bibliography definitions data for a new site for testing. """ bibdatacoll = site.create_empty_coll_data(site, layout.BIBDATA_ID, label="Bibliographic record definitions", description= "Definitions for bibliographic records, broadly following BibJSON. "+ "Used for some Annalist test cases." ) bib_data_tgt, bib_data_file = bibdatacoll._dir_path() log.info("Copy Annalist bibliographic definitions data from %s to %s"%(bib_data_src, bib_data_tgt)) for sdir in layout.DATA_DIRS: log.info("- %s -> %s"%(sdir, bib_data_tgt)) Site.replace_site_data_dir(bibdatacoll, sdir, bib_data_src) bibdatacoll.generate_coll_jsonld_context() return bibdatacoll # @staticmethod # def initialize_named_coll( # site, coll_id, coll_data_src, # label=None, description=None): # """ # Initializes bibliography definitions data for a new site for testing. # """ # namedcoll = site.create_empty_coll_data( # site, coll_id, # label=label, description=description # ) # coll_data_tgt, coll_data_file = namedcoll._dir_path() # log.info("Copy Annalist %s definitions data from %s to %s"%(coll_id, coll_data_src, coll_data_tgt)) # for sdir in layout.DATA_DIRS: # log.info("- %s -> %s"%(sdir, coll_data_tgt)) # Site.replace_site_data_dir(namedcoll, sdir, coll_data_src) # namedcoll.generate_coll_jsonld_context() # return namedcoll # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/site.py
site.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) import os import os.path import shutil import traceback from django.conf import settings from annalist import layout from annalist.exceptions import Annalist_Error from annalist.identifiers import ANNAL from annalist import util from annalist.models.entity import Entity from annalist.models.entitydata import EntityData from annalist.util import extract_entity_id class RecordGroup(EntityData): _entitytype = ANNAL.CURIE.Field_group _entitytypeid = layout.GROUP_TYPEID _entityroot = layout.COLL_GROUP_PATH _entityview = layout.COLL_GROUP_VIEW _entityfile = layout.GROUP_META_FILE _deprecation_warning = True def __init__(self, parent, group_id): """ Initialize a new RecordGroup object, without metadata (yet). parent is the parent collection in which the group is defined. group_id the local identifier for the field group """ if self._deprecation_warning: log.warn("Instantiating _group/%s for collection %s"%(group_id, parent.get_id())) # log.debug("".join(traceback.format_stack())) super(RecordGroup, self).__init__(parent, group_id) self._parent = parent # log.debug("RecordGroup %s: dir %s"%(group_id, self._entitydir)) return @classmethod def load(cls, parent, entityid, altscope=None, deprecation_warning=False): """ Overloaded load method with default deprecation warning """ if cls._deprecation_warning: log.warn("Loading _group/%s for collection %s"%(entityid, parent.get_id())) # log.debug("".join(traceback.format_stack())) return super(RecordGroup, cls).load(parent, entityid, altscope=altscope) def _migrate_filenames(self): """ Override EntityData method """ return None def _migrate_values(self, entitydata): """ Group description entity format migration method. The specification for this method is that it returns an entitydata value which is a copy of the supplied entitydata with format migrations applied. NOTE: implementations are free to apply migrations in-place. The resulting entitydata should be exactly as the supplied data *should* appear in storage to conform to the current format of the data. The migration function should be idempotent; i.e. x._migrate_values(x._migrate_values(e)) == x._migrate_values(e) """ migration_map = ( [ (ANNAL.CURIE.record_type, ANNAL.CURIE.group_entity_type) ]) entitydata = self._migrate_values_map_field_names(migration_map, entitydata) for f in entitydata.get(ANNAL.CURIE.group_fields, []): field_id = extract_entity_id(f[ANNAL.CURIE.field_id]) if field_id == "Field_render": f[ANNAL.CURIE.field_id] = layout.FIELD_TYPEID+"/Field_render_type" if field_id == "Field_type": f[ANNAL.CURIE.field_id] = layout.FIELD_TYPEID+"/Field_value_type" if field_id == "View_target_type": f[ANNAL.CURIE.field_id] = layout.FIELD_TYPEID+"/View_entity_type" if field_id == "List_target_type": f[ANNAL.CURIE.field_id] = layout.FIELD_TYPEID+"/List_entity_type" # Return result return entitydata def _post_update_processing(self, entitydata, post_update_flags): """ Post-update processing. This method is called when a RecordGroup entity has been updated. It invokes the containing collection method to regenerate the JSON LD context for the collection to which the group belongs. """ self._parent.generate_coll_jsonld_context(flags=post_update_flags) return entitydata class RecordGroup_migration(RecordGroup): """ Variation of RecordGroup with suppressed instantiation warning, used for migrating old data. """ _deprecation_warning = False def __init__(self, parent, group_id): super(RecordGroup_migration, self).__init__(parent, group_id) return # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/recordgroup.py
recordgroup.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) import os import os.path import shutil import json import errno from django.conf import settings from utils.py3porting import is_string, urljoin from annalist import layout from annalist import message from annalist import util from annalist.exceptions import Annalist_Error from annalist.identifiers import ANNAL, RDF, RDFS from annalist.resourcetypes import file_extension, file_extension_for_content_type from annalist.util import valid_id, make_type_entity_id, make_entity_base_url # ------------------------------------------------------------------------------------------- # # EntityRoot # # ------------------------------------------------------------------------------------------- class EntityRoot(object): """ This is the base class for entities that do not have any parent entity (e.g. Annalist Site objects). An entity presents at least the following interface: cls._entitytype type of entity (CURIE or URI) cls._entitytypeid local type id (slug) used in local URI construction cls._entityfile name of file where the entity data is stored cls._entityref relative reference to entity from body file cls._contextref relative reference to context from body file self._entityid ID of entity; may be None for "root" entities (e.g. site?) self._entityurl URI at which entity is accessed self._entitydir directory where entity is stored self._entitybasedir base directory where all data is stored self._values dictionary of values in entity body See also 'Entity' """ _entitytype = ANNAL.CURIE.EntityRoot _entitytypeid = None # To be overridden _entitybase = None # To be overridden _entityfile = None # To be overriden by entity subclasses.. _entityref = None # Relative ref to entity from body file _contextbase = None # Relative ref to collection base URI from body file _contextref = None # Relative ref to context file from body file def __init__(self, entityurl, entityviewurl, entitydir, entitybasedir): """ Initialize a new Entity object, possibly without values. The created entity is not saved to disk at this stage - see .save() method. entityurl is the base URI at which the entity is accessed entityviewurl is a URI reference that indicates the preferred URI path for accessing the current entity, where this may be the same entitydir is the base directory containing the entity entitybasedir is a directory that contains all data, directly or indirectly, associated with this entity or any possible descendents. The value is used as a safety check to ensure that data is not created or deleted outside an area that is known to contain only Annalist data. """ self._entityid = None self._ancestorid = None self._entityurl = make_entity_base_url(entityurl) self._entityviewurl = make_entity_base_url(entityviewurl) self._entitydir = make_entity_base_url(entitydir) self._entitybasedir = entitybasedir self._values = None self._errors = [] # log.debug("EntityRoot.__init__: entity URI %s, entity dir %s"%(self._entityurl, self._entitydir)) return def __repr__(self): return ( "EntityRoot: entitytypeid %s, entityid %s, entitydir %s, values %r"% (self._entitytypeid, self._entityid, self._entitydir, self._values) ) # General entity access methods def set_id(self, entityid): self._entityid = entityid return def get_id(self): return self._entityid def get_type_id(self): return self._entitytypeid def get_type_entity_id(self): """ Return type+entity Id that is unique within collection """ return make_type_entity_id(self._entitytypeid, self._entityid) def get_label(self): """ Return label string for the current entity. """ return self._values.get(RDFS.CURIE.label, self._entityid) def get_url(self, baseurl=""): """ Get fully qualified URL referred to supplied base. NOTE: entities are stored, as far as possible, using relative references. But when an absolute reference is required, the current context URL must be taken into account. If the URL returned by this function is stored, subsequent references to that value will be fixed, not relative, so the value should only be stored where they may be used as identifiers or "permalink" style locators, so the data can continue to be used when moved to a new location. NOTE: this function always returns the primary URL associated with the entity. Where the entity is accessed at a secondary location, that is handled internally and not exposed through this function. E.g. site-wide metadata entities are presented as belonging to a collection. This allows for collection-specific specializations to be created without changing the URI used. """ # log.debug("EntityRoot.get_url: baseurl %s, _entityurl %s"%(baseurl, self._entityurl)) return urljoin(baseurl, self._entityurl) def get_view_url(self, baseurl=""): """ Return URL used to view entity data. For metadata entities, this may be different from the URL at which the resource is located, per get_url(). The intent is to provide a URL that works regardless of whether the metadata is stored as site-wide or collection-specific data. This implementation just uses get_url(), but for entities that belong to a collection the URL is mapped via the web application to the underlying storage location. """ # log.debug("EntityRoot.get_view_url: baseurl %s, _entityurl %s"%(baseurl, self._entityurl)) return urljoin(baseurl, self._entityviewurl) def get_view_url_path(self, baseurl=""): """ Return URL path used to view entity data. This is the URI-path of the URL returned by get_view_url (above) """ # log.debug("EntityRoot.get_view_url_path: baseurl %s, _entityurl %s"%(baseurl, self._entityurl)) return util.entity_url_path(self.get_view_url(), "") def get_alt_entities(self, altscope=None): """ Returns a list of alternative entities to the current entity to search for possible child entities. The root entity has no such alternatives. """ return [] def set_values(self, values): """ Set or update values for a collection """ self._values = values.copy() self._values[ANNAL.CURIE.id] = self._values.get(ANNAL.CURIE.id, self._entityid) self._values[ANNAL.CURIE.type_id] = self._values.get(ANNAL.CURIE.type_id, self._entitytypeid) self._values[ANNAL.CURIE.type] = self._values.get(ANNAL.CURIE.type, self._entitytype) if ANNAL.CURIE.url not in self._values: self._values[ANNAL.CURIE.url] = self.get_view_url_path() # log.info("set_values %r"%(self._values,)) return self._values def get_values(self): """ Return collection metadata values """ return self._values def get_save_values(self): """ Return values that are or will be recorded when entity is saved. Also used for cache values. """ values = self._values.copy() if self._entityid != layout.INITIAL_VALUES_ID: values = self._pre_save_processing(values) values['@id'] = self._entityref values['@type'] = self._get_types(values.get('@type', None)) values['@context'] = ( [ { '@base': self._contextbase } , self._contextref ]) #@@TODO: is this next needed? Put logic in set_values? # if self._entityid: # values[ANNAL.CURIE.id] = self._entityid #@@ values.pop(ANNAL.CURIE.url, None) return values def set_error(self, msg): """ Records error/diagnostic information in an entity """ self._errors.append(msg) return def get_errors(self): """ Returns a list of error/diagnostic entries associated with an entity, or an empty list """ return self._errors def get_uri(self): """ Return URI for current entity, which may be set explicitly or derived from its present URL. """ return self._values.get(ANNAL.CURIE.uri, self._values[ANNAL.CURIE.url]) def enum_fields(self): """ Enumerate fields in entity. Recurses into fields that are lists or sequences of dictionaries, this being the structure used for repeated field groups. Yields `(path, value)` pairs, where `path` is a list of index values applied successively to access the corresponding value. """ def is_dict(e): return isinstance(e, dict) def enum_f(p, d): for k in d: if isinstance(d[k], (list, tuple)) and all([is_dict(e) for e in d[k]]): for i in range(len(d[k])): for f in enum_f(p+[k,i], d[k][i]): yield f else: yield (p+[k], d[k]) return if self._values: for f in enum_f([], self._values): yield f else: log.error("EntityRoot.enum_fields: no entity values present") return def resource_file(self, resource_ref): """ Returns a file object value for a resource associated with an entity, or None if the resource is not present. """ if self._exists_path(): # (body_dir, _) = self._dir_path() body_dir = self._entitydir log.debug("EntityRoot.resource_file: dir %s, resource_ref %s"%(body_dir, resource_ref)) file_name = os.path.join(body_dir, resource_ref) if os.path.isfile(file_name): return open(file_name, "rb") return None def get_field(self, path): """ Returns a field value corresponding to a path returned by enum_fields. """ def get_f(p, v): if p == []: return v else: return get_f(p[1:], v[p[0]]) return get_f(path, self._values) def child_entity_ids(self, cls, altscope=None): """ Iterates over child entity identifiers of an indicated class. The supplied class is used to determine a subdirectory to be scanned. cls is a subclass of Entity indicating the type of children to iterate over. altscope if supplied, indicates a scope other than the current entity to search for children. See method `get_alt_entities` for more details. """ if altscope == "select": altscope = "all" for i in self._children(cls, altscope=altscope): if cls.exists(self, i, altscope=altscope): yield i return # I/O helper functions def _dir_path(self): """ Return directory and path for current entity body file """ if self._entitybase is None: msg = ( "EntityRoot._dir_path without defined entity base reference (%s/%s)"% (self._entitytypeid, self._entityid,) ) log.error(msg) raise ValueError(msg) if self._entityfile is None: msg = ( "EntityRoot._dir_path without defined entity file path (%s/%s)"% (self._entitytypeid, self._entityid,) ) log.error(msg) raise ValueError(msg) # log.info(" _ EntityRoot._dir_path _entitydir %s"%(self._entitydir,)) # log.info(" _ EntityRoot._dir_path _entitybase %s"%(self._entitybase,)) # log.info(" _ EntityRoot._dir_path _entityfile %s"%(self._entityfile,)) (basedir, filepath) = util.entity_dir_path(self._entitydir, [self._entitybase], self._entityfile) # log.info(" _ EntityRoot._dir_path basedir, filepath %s, %s"%(basedir, filepath)) return (basedir, filepath) def _dir_path_uri(self): (d, p) = self._dir_path() return (d, p, self._entityurl) def _exists_path(self): """ Test if the entity denoted by the current object has been created. If found, also sets the entity in-use URL value for .get_url() returns path of of object body, or None """ (d, p, u) = self._dir_path_uri() # log.info("EntityRoot._exists_path %s"%(p)) if d and os.path.isdir(d): if p and os.path.isfile(p): # log.info("EntityRoot._exists_path %s: OK"%(p)) return p mp = self._migrate_path() if mp and os.path.isfile(mp): assert mp == p, "EntityRoot._exists_path: Migrated filename %s, expected %s"%(mp, p) # log.info("EntityRoot._exists_path %s: Migrated from %s"%(mp, p)) return mp # log.info("EntityRoot._exists_path %s: not present"%(p)) return None def _exists(self): """ Test if the entity denoted by the current object has been created. returns True or False. """ return self._exists_path() is not None def _get_types(self, types): """ Processes a supplied type value and returns a list of types to be stored. 1. None is converted to an empty list 2. A simple string is wrapped in a list 3. A tuple is converted to a list 4. If not already present, the current entity type is added to the list """ if types is None: types = [] elif isinstance(types, (tuple, list)): types = list(types) # Make mutable copy else: types = [types] if self._entitytype not in types: types.append(self._entitytype) return types def _save(self, post_update_flags=None): """ Save current entity to Annalist storage """ # @@TODO: think about capturing provenance metadata too. if not self._entityref: msg = "Entity._save without defined entity reference" log.error(msg) raise ValueError(msg) if not self._contextbase: msg = "Entity._save without defined context base" log.error(msg) raise ValueError(msg) if not self._contextref: msg = "Entity._save without defined context reference" log.error(msg) raise ValueError(msg) if not self._values: msg = "Entity._save without defined entity values" log.error(msg) raise ValueError(msg) (body_dir, body_file) = self._dir_path() # log.debug("EntityRoot._save: dir %s, file %s"%(body_dir, body_file)) fullpath = os.path.join(settings.BASE_SITE_DIR, body_file) # Next is partial protection against code errors if not fullpath.startswith(settings.BASE_SITE_DIR): log.error("EntityRoot._save: Failing to create entity at %s"%(fullpath,)) log.info("EntityRoot._save: dir %s, file %s"%(body_dir, body_file)) log.info("EntityRoot._save: settings.BASE_DATA_DIR %s"%(settings.BASE_DATA_DIR,)) msg = ( "Attempt to create entity file outside Annalist site tree (%s)"% fullpath ) log.error(msg) raise ValueError(msg) # Create directory (if needed) and save data util.ensure_dir(body_dir) values = self.get_save_values() with open(fullpath, "wt") as entity_io: json.dump(values, entity_io, indent=2, separators=(',', ': '), sort_keys=True) self._post_update_processing(values, post_update_flags) return def _remove(self, type_uri, post_remove_flags=None): """ Remove current entity from Annalist storage. Requires type_uri supplied as a double-check that the expected entity is being removed. """ d = self._entitydir # Extra check to guard against accidentally deleting wrong thing if type_uri in self._values['@type'] and d.startswith(self._entitybasedir): shutil.rmtree(d) else: log.error("Expected type_uri: %r, got %r"%(type_uri, e[ANNAL.CURIE.type])) log.error("Expected dirbase: %r, got %r"%(parent._entitydir, d)) raise Annalist_Error("Entity %s unexpected type %s or path %s"%(entityid, e[ANNAL.CURIE.type_id], d)) self._post_remove_processing(post_remove_flags) return def _load_values(self): """ Read current entity from Annalist storage, and return entity body. Adds value for 'annal:url' to the entity data returned. """ # log.debug("EntityRoot._load_values %s/%s"%(self.get_type_id(), self.get_id())) body_file = self._exists_path() if body_file: # log.debug("EntityRoot._load_values body_file %r"%(body_file,)) try: # @@TODO: rework name access to support different underlays with self._read_stream() as f: entitydata = json.load(util.strip_comments(f)) # log.debug("EntityRoot._load_values: url_path %s"%(self.get_view_url_path())) entitydata[ANNAL.CURIE.url] = self.get_view_url_path() return entitydata except IOError as e: if e.errno != errno.ENOENT: raise log.error("EntityRoot._load_values: no file %s"%(body_file)) except ValueError as e: log.error("EntityRoot._load_values: error loading %s"%(body_file)) log.error(e) return ( { "@error": body_file , "@message": "Error loading entity values %r"%(e,) }) return None def _ensure_values_loaded(self): """ If values are not loaded and present for the current entity, read and store them. Returns the values loaded, or None. """ if self._values is None: # log.debug( # "_ensure_values_loaded: _entitydir %s, _entityfile %s"% # (self._entitydir, self._entityfile) # ) vals = self._load_values() if vals: vals = self._migrate_values(vals) self.set_values(vals) return self._values def _migrate_path(self): """ Migrate entity data filenames from those used in older software versions. Returns name of migrated file if migration performed, otherwise None """ # log.debug("EntityRoot._migrate_path (%r)"%(self._migrate_filenames(),)) if self._migrate_filenames() is None: # log.debug("EntityRoot._migrate_path (skip)") return for old_data_filename in self._migrate_filenames(): # This logic migrates data from previous filenames (basedir, old_data_filepath) = util.entity_dir_path(self._entitydir, [], old_data_filename) if basedir and os.path.isdir(basedir): if old_data_filepath and os.path.isfile(old_data_filepath): # Old body file found here (d, new_data_filepath) = self._dir_path() log.info( "EntityRoot._migrate_path: Migrate file %s to %s"% (old_data_filepath, new_data_filepath) ) os.rename(old_data_filepath, new_data_filepath) return new_data_filepath # log.debug("EntityRoot._migrate_path (not found)") return None def _migrate_filenames(self): """ Default method for filename migration. Returns a list of filenames used for the current entity type in previous versions of Annalist software. If the expected filename is not found when attempting to read a file, the _load_values() method calls this function to and looks for any of the filenames returned. If found, the file is renamed to the current version filename. Default method returns None, which signals that no migration is to be performed. """ return None def _migrate_values(self, entitydata): """ Default method for entity format migration hook. The specification for this method is that it returns an entitydata value which is a copy of the supplied entitydata with format migrations applied. This default implementation applies no migrations, and simply returns the supplied value. The method may be overridden for entity types and instances for which migrations are to be applied. NOTE: implementations are free to apply migrations in-place. The resulting entitydata should be exctly as the supplied data *should* appear in storage to conform to the current format of the data. The migration function should be idempotent; i.e. x._migrate_values(x._migrate_values(e)) == x._migrate_values(e) """ return entitydata @classmethod def _migrate_values_map_field_names(cls, migration_map, entitydata): """ Support function to map field names using a supplied map. The map is a list of pairs (old_uri, new_uri), where occurrences of the old property URI are replaced with the same value using the new URI. The migrations are applied in-place, and the resulting updated entity data is returned. """ for old_key, new_key in migration_map: if old_key in entitydata: entitydata[new_key] = entitydata.pop(old_key) # Return result return entitydata @classmethod def _pre_save_validation(cls, type_id, entity_id, entitydata): """ Pre-save value validation. This method is called before a value is saved, to validate user- entered data. Individual entity classes may provide their own override methods for this (e.g., to perform checks that are specific to a class. The intent is that this will eventually be used to hook into a user-definable validation framework). Returns a list of strings describing any errors detected, or an empty list if no problems are found. """ errs = [] if not valid_id(entity_id): log.debug("_pre_save_validation: entity_id not valid ('%s')"%entity_id) errs.append((message.INPUT_VALIDATION_ERROR, message.ENTITY_DATA_ID_INVALID)) if not valid_id(type_id): log.debug("_pre_save_validation: type_id not valid_id('%s')"%type_id) errs.append((message.INPUT_VALIDATION_ERROR, message.ENTITY_TYPE_ID_INVALID)) return errs def _pre_save_processing(self, entitydata): """ Pre-save value processing. This method is called just before a value is saved to fill in or update any values that were not specified in the form input. The specification for this method is that it returns an entitydata value which is a copy of the supplied entitydata with any data updates applied. NOTE: implementations are free to apply updates in-place. The resulting entitydata should be exactly as the supplied data *should* appear in storage. The update function should be idempotent; i.e. x._pre_save_processing(x._pre_save_processing(e)) == x._pre_save_processing(e) Individual entity classes may provide their own override methods for this (e.g., to fill in values that have not been otherwise specified). """ return entitydata def _post_update_processing(self, entitydata, post_update_flags): """ Default method for post-update processing. This method is called just after an entity has been ceated or updated. Individual entity classes may provide their own override methods for this. (e.g. to trigger regeneration of context data when groups, views, fields or vocabulary descriptions are updated.) """ return entitydata def _post_remove_processing(self, post_update_flags): """ Default method for post-remove processing. This method is called when an entity has been removed. Individual entity classes may provide their own override methods for this. """ return def _base_children(self, cls): """ Iterates over candidate child identifiers that are possible instances of an indicated class. The supplied class is used to determine a subdirectory to be scanned. cls is a subclass of Entity indicating the type of children to iterate over. """ parent_dir = os.path.dirname(os.path.join(self._entitydir, cls._entityroot or "")) assert "%" not in parent_dir, "_entityroot template variable interpolation may be in filename part only" child_files = [] if os.path.isdir(parent_dir): child_files = os.listdir(parent_dir) for fil in child_files: if util.valid_id(fil, reserved_ok=True): yield fil return def _children(self, cls, altscope=None): """ Iterates over candidate child identifiers that are possible instances of an indicated class. The supplied class is used to determine a subdirectory to be scanned. cls is a subclass of Entity indicating the type of children to iterate over. altscope if the supplied value is "site", returns an empty iterator, otherwise an iterator over child entities. NOTE: `Site` class overrides this. """ # log.info("@@ EntityRoot._children: parent %s, altscope %s"%(self.get_id(), altscope)) if altscope != "site": return self._base_children(cls) return iter(()) # Empty iterator def _entity_files(self): #@@TODO: abstract logic to work with non-file storage # Used by 'entitytypeinfo' """ Iterates over files/resources (not subdirectories) that are part of the current entity. Returns pairs (p,f), where 'p' is a full path name, and 'f' is a filename within the current entity directory. """ for f in os.listdir(self._entitydir): p = os.path.join(self._entitydir, f) if os.path.isfile(p): yield (p, f) return def _copy_entity_files(self, src_entity): #@@TODO: abstract logic to work with non-file storage # Used by 'entityedit', 'am_managecollections' """ Copy metadata abnd attached resources from the supplied `src_entity` to the current entity. Resources that already exist for the current entty are not copied. returns list of error messages; an empty list indicates success. """ msgs = [] for p, f in src_entity._entity_files(): if not self._exists_file(f): p_new = self._copy_file(p, f) if not p_new: msg_vals = ( { 'id': self.get_id() , 'src_id': src_entity.get_id() , 'file': f }) log.warning( "EntityRoot._copy_entity_files: error copying file %(file)s from %(src_id)s to %(id)s"% msg_vals ) msgs.APPEND(message.ENTITY_COPY_FILE_ERROR%msg_vals) return msgs def _exists_file(self, f): #@@TODO: abstract logic to work with non-file storage # Used by 'entitytypeinfo' """ Test if a file named 'f' exists in the current entity directory """ return os.path.isfile(os.path.join(self._entitydir, f)) def _copy_file(self, p, f): #@@TODO: abstract logic to work with non-file storage # Used by 'entitytypeinfo' """ Copy file with path 'p' to a new file 'f' in the current entity directory """ new_p = os.path.join(self._entitydir, f) try: shutil.copy(p, new_p) except shutil.Error as e: log.error('shutil.copy error: %s' % e) return None except IOError as e: log.error('shutil.copy IOError: %s' % e.strerror) return None return new_p def _rename_files(self, old_entity): #@@TODO: abstract logic to work with non-file storage # Used by 'entitytypeinfo' """ Rename old entity files to path of current entity (which must not exist), and return path to resulting entity, otherwise None. """ new_p = None if os.path.exists(self._entitydir): log.error("EntityRoot._rename_files: destination %s already exists"%(self._entitydir,)) elif not self._entitydir.startswith(self._entitybasedir): log.error( "EntityRoot._rename_files: new expected dirbase: %r, got %r"% (self._entitybasedir, self._entitydir) ) elif not old_entity._entitydir.startswith(self._entitybasedir): log.error( "EntityRoot._rename_files: old expected dirbase: %r, got %r"% (self._entitybasedir, old_entity._entitydir) ) else: try: os.rename(old_entity._entitydir, self._entitydir) new_p = self._entitydir except IOError as e: log.error("EntityRoot._rename_files: os.rename IOError: %s" % e.strerror) return new_p def _fileobj(self, localname, filetypeuri, mimetype, mode): #@@TODO: abstract logic to work with non-file storage # Used by 'entity', 'entitytypeinfo', 'site', 'entityedit', 'util', # 'test_import_resource', 'test_render_ref_multifields', 'test_upload_file' """ Returns a file object for accessing a blob associated with the current entity. localname is the local name used to identify the file/resource among all those associated with the current entity. filetypeuri is a URI/CURIE that identifies the type of data stored in the blob. mimetype is a MIME content-type string for the resource representation used, used in selecting the file extension to be used, or None in which case a default file extension for the type is used. mode is a string defining how the resource is opened (using the same values as the built-in `open` function). """ (body_dir, body_file) = self._dir_path() # Same as `_save` file_ext = ( file_extension_for_content_type(filetypeuri, mimetype) or file_extension(filetypeuri) ) file_name = os.path.join(body_dir, localname+"."+file_ext) return open(file_name, mode) def _metaobj(self, localpath, localname, mode): #@@TODO: abstract logic to work with non-file storage # Used by 'collection', 'site' """ Returns a file object for accessing a metadata resource associated with the current entity. localpath is the local directory path (relative to the current entity's data) where the metadata resource will be accessed. localname is the local name used to identify the file/resource among all those associated with the current entity. mode is a string defining how the resource is opened (using the same values as the built-in `open` function). """ (body_dir, body_file) = self._dir_path() # Same as `_save` local_dir = os.path.join(body_dir, localpath) util.ensure_dir(local_dir) filename = os.path.join(local_dir, localname) # log.debug("entityroot._metaobj: self._entitydir %s"%(self._entitydir,)) # log.debug("entityroot._metaobj: body_dir %s, body_file %s"%(body_dir, body_file)) # log.debug("entityroot._metaobj: filename %s"%(filename,)) return open(filename, mode) def _read_stream(self): """ Opens a (file-like) stream to read entity data. Returns the stream object, which implements the context protocol to close the stream on exit from a containing with block; e.g. with e._read_stream() as f: // read data from f // f is closed here """ # @@TODO: factor out logic in common with _metaobj/_fileobj f_stream = None body_file = self._exists_path() if body_file: try: f_stream = open(body_file, "rt") except IOError as e: if e.errno != errno.ENOENT: raise log.error("EntityRoot._read_stream: no file %s"%(body_file)) return f_stream # Special methods to facilitate access to entity values by dictionary operations # on the Entity object def __iter__(self): """ Return entity value keys """ if self._values: for k in self._values: yield k return def keys(self): """ Return collection metadata value keys """ return self._values.keys() def items(self): """ Return collection metadata value fields """ return self._values.items() def get(self, key, default): """ Equivalent to dict.get() function """ # log.info("entityroot.get key %r, self._values %r"%(key, self._values)) return self[key] if self._values and key in self._values else default def setdefault(self, key, default): """ Equivalent to dict.setdefault() function, except that blank values also are overridden """ if self._values and key in self._values and self._values[key]: result = self._values[key] else: self._values[key] = default result = default return result def __getitem__(self, k): """ Allow direct indexing to access collection metadata value fields """ return self._values[k] def __setitem__(self, k, v): """ Allow direct indexing to update collection metadata value fields """ self._values[k] = v # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/entityroot.py
entityroot.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2015, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) import os import os.path import shutil from django.conf import settings from annalist import message from annalist import layout from annalist.exceptions import Annalist_Error from annalist.identifiers import ANNAL, RDFS, OWL from annalist import util from annalist.models.entity import Entity from annalist.models.entitydata import EntityData class RecordVocab(EntityData): _entitytype = ANNAL.CURIE.Vocabulary _entitytypeid = layout.VOCAB_TYPEID _entityroot = layout.COLL_VOCAB_PATH _entityview = layout.COLL_VOCAB_VIEW _entityfile = layout.VOCAB_META_FILE def __init__(self, parent, vocab_id): """ Initialize a new RecordVocab object, without metadata (yet). parent is the parent collection in which the namespace is defined. vocab_id the local identifier for the vocabulary; also used as namespace prefix. """ super(RecordVocab, self).__init__(parent, vocab_id) self._parent = parent # log.debug("RecordVocab %s: dir %s"%(vocab_id, self._entitydir)) return @classmethod def _pre_save_validation(cls, type_id, entity_id, entitydata): """ Pre-save value validation. Override EntityRoot method Returns a list of strings describing any errors detected, or an empty list if no problems are found. """ errs = super(RecordVocab, cls)._pre_save_validation(type_id, entity_id, entitydata) if ANNAL.CURIE.uri in entitydata: vuri = entitydata[ANNAL.CURIE.uri] if vuri[-1] not in {":", "/", "?", "#"}: msg = message.RECORD_VOCAB_URI_TERM%({"id": entity_id, "uri": vuri}) log.info(msg) errs.append(msg) return errs def _migrate_filenames(self): """ Override EntityData method """ return None def _migrate_values(self, entitydata): """ Vocabulary namespace definition entity format migration method. The specification for this method is that it returns an entitydata value which is a copy of the supplied entitydata with format migrations applied. NOTE: implementations are free to apply migrations in-place. The resulting entitydata should be exctly as the supplied data *should* appear in storage to conform to the current format of the data. The migration function should be idempotent; i.e. x._migrate_values(x._migrate_values(e)) == x._migrate_values(e) """ # Migrate # rdfs:seeAlso [ { 'owl:sameAs': <foo> }, ... ] # to: # rdfs:seeAlso [ { '@id': <foo> }, ... ] seeAlso = entitydata.get(RDFS.CURIE.seeAlso, []) for i in range(len(seeAlso)): if OWL.CURIE.sameAs in seeAlso[i]: seeAlso[i]['@id'] = seeAlso[i].pop(OWL.CURIE.sameAs) # Return result return entitydata def _post_update_processing(self, entitydata, post_update_flags): """ Post-update processing. This method is called when a RecordVocab entity has been updated. It invokes the containing collection method to regenerate the JSON LD context for the collection to which the entity belongs. """ self._parent.flush_collection_caches() self._parent.generate_coll_jsonld_context(flags=post_update_flags) return entitydata def _post_remove_processing(self, post_update_flags): """ Post-remove processing. This method is called when an entity has been removed. """ self._parent.flush_collection_caches() return # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/recordvocab.py
recordvocab.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function """ This module is used to cache per-collection type information. """ __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2017, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) from annalist import layout from annalist.exceptions import Annalist_Error from annalist.identifiers import ANNAL, RDFS from annalist.models.collectionentitycache import ( Cache_Error, CollectionEntityCacheObject, CollectionEntityCache ) from annalist.models.closurecache import ClosureCache from annalist.models.recordtype import RecordType # --------------------------------------------------------------------------- # # Type-cache object class # # --------------------------------------------------------------------------- class CollectionTypeCacheObject(CollectionEntityCacheObject): """ This class is a type cache for a specified collection. It extends class CollectionEntityCacheObject with type-specific logic; notably overriding method _load_entity with additional logic to maintain a supertype closure cache, and methods to access that cache. """ def __init__(self, coll_id, entity_cls=RecordType): """ Initialize a cache object for a specified collection. coll_id Collection id with which the type cache is associated. """ super(CollectionTypeCacheObject, self).__init__(coll_id, entity_cls) self._supertype_closure_cache = ClosureCache(coll_id, ANNAL.CURIE.supertype_uri) return def _gsupertype_cache(self): return self._supertype_closure_cache def _load_entity(self, coll, type_entity): """ Internal helper method loads type data to cache. Also updates supertype closure cache. Returns True if new type was added. """ type_id = type_entity.get_id() type_uri = type_entity.get_uri() type_parent = type_entity.get_parent().get_id() type_data = type_entity.get_save_values() add_type = super(CollectionTypeCacheObject, self)._load_entity(coll, type_entity) if add_type: # Add relations for supertype references from the new type URI for supertype_obj in type_data.get(ANNAL.CURIE.supertype_uri, []): supertype_uri = supertype_obj["@id"] self._gsupertype_cache().add_rel(type_uri, supertype_uri) # Also add relations for references *to* the new type URI for try_subtype in self.get_all_entities(coll): sub_st_objs = try_subtype.get(ANNAL.CURIE.supertype_uri, []) sub_st_uris = [ sub_st_obj["@id"] for sub_st_obj in sub_st_objs ] if type_uri in sub_st_uris: subtype_uri = try_subtype.get(ANNAL.CURIE.uri, None) if subtype_uri: self._gsupertype_cache().add_rel(subtype_uri, type_uri) return add_type def _drop_entity(self, coll, type_id): """ Override method that drops entity from cache, to also remove references from the supertype closure cache. Returns the type entity removed, or None if not found. """ type_entity = super(CollectionTypeCacheObject, self)._drop_entity(coll, type_id) if type_entity: type_uri = type_entity.get_uri() self._gsupertype_cache().remove_val(type_uri) return type_entity def get_type_uri_supertype_uris(self, type_uri): """ Returns all supertype URIs for a specified type URI. Returns all supertype URIs, even those for which there is no defined type entity. """ return self._gsupertype_cache().fwd_closure(type_uri) def get_type_uri_subtype_uris(self, type_uri): """ Returns all subtype URIs for a specified type URI. Returns all subtype URIs, even those for which there is no defined type entity. """ return self._gsupertype_cache().rev_closure(type_uri) def get_type_uri_supertypes(self, coll, type_uri): """ Returns all supertypes for a specified type URI. This method returns only those supertypes that are defined as entities. """ self._load_entities(coll) for st_uri in self.get_type_uri_supertype_uris(type_uri): st = self.get_entity_from_uri(coll, st_uri) if st: yield st return def get_type_uri_subtypes(self, coll, type_uri): """ Returns all subtypes for a specified type URI. This method returns only those subtypes that are defined as entities. """ self._load_entities(coll) for st_uri in self.get_type_uri_subtype_uris(type_uri): st = self.get_entity_from_uri(coll, st_uri) if st: yield st return def remove_cache(self): """ Close down and release all type cache data """ # log.debug("@@remove type cache %r"%(self.get_coll_id(),)) super(CollectionTypeCacheObject, self).remove_cache() self._supertype_closure_cache.remove_cache() self._supertype_closure_cache = None return # --------------------------------------------------------------------------- # # Collection type-cache class # # --------------------------------------------------------------------------- class CollectionTypeCache(CollectionEntityCache): """ This class manages type cache objects over multiple collections """ def __init__(self): """ Initialize. Initializes a value cache cache with no per-collection data. """ super(CollectionTypeCache, self).__init__(CollectionTypeCacheObject, RecordType) return # Collection type cache allocation and access methods def set_type(self, coll, type_entity): """ Save a new or updated type definition """ return self.set_entity(coll, type_entity) def remove_type(self, coll, type_id): """ Remove type from collection type cache. Returns the type entity removed if found, or None if not defined. """ return self.remove_entity(coll, type_id) def get_type(self, coll, type_id): """ Retrieve a type description for a given type Id. Returns a type object for the specified collection and type Id. """ return self.get_entity(coll, type_id) def get_type_from_uri(self, coll, type_uri): """ Retrieve a type description for a given type URI. Returns a type object for the specified collection and type URI. """ return self.get_entity_from_uri(coll, type_uri) def get_all_type_ids(self, coll, altscope=None): """ Returns all types currently available for a collection in the indicated scope. Default scope is types defined directly in the indicated collection. """ return self.get_all_entity_ids(coll, altscope=altscope) def get_all_types(self, coll, altscope=None): """ Returns all types currently available for a collection in the indicated scope. Default scope is types defined directly in the indicated collection. """ return self.get_all_entities(coll, altscope=altscope) def get_type_uri_supertypes(self, coll, type_uri): """ Returns all supertypes for a specieid type URI. """ type_cache = self._get_cache(coll) return type_cache.get_type_uri_supertypes(coll, type_uri) def get_type_uri_subtypes(self, coll, type_uri): """ Returns all subtypes for a specieid type URI. """ type_cache = self._get_cache(coll) return type_cache.get_type_uri_subtypes(coll, type_uri) def get_type_uri_supertype_uris(self, coll, type_uri): """ Returns all supertypes for a specieid type URI. """ type_cache = self._get_cache(coll) return type_cache.get_type_uri_supertype_uris(type_uri) def get_type_uri_subtype_uris(self, coll, type_uri): """ Returns all subtypes for a specieid type URI. """ type_cache = self._get_cache(coll) return type_cache.get_type_uri_subtype_uris(type_uri) # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/collectiontypecache.py
collectiontypecache.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function """ Annalist user record A user is represented in a collectionm by: - an ID (slug) - a URI (currently a mailto: URI) - a label (full name) - a description - a list of permissions applicable to the collection - ... The ID and URI must be matched by the authenticated issuer of an HTTP request for the permissionms to be applied. Other fields are cosmetic. """ __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import os import os.path import shutil import logging log = logging.getLogger(__name__) from django.conf import settings from annalist import layout from annalist.exceptions import Annalist_Error from annalist.identifiers import ANNAL from annalist import util from annalist.models.entity import Entity from annalist.models.entitydata import EntityData site_default_user_id = "_site_default_user_perms" site_default_user_uri = "annal:User/_default_user_perms" default_user_id = "_default_user_perms" default_user_uri = "annal:User/_default_user_perms" unknown_user_id = "_unknown_user_perms" unknown_user_uri = "annal:User/_unknown_user_perms" class AnnalistUser(EntityData): _entitytype = ANNAL.CURIE.User _entitytypeid = layout.USER_TYPEID _entityroot = layout.COLL_USER_PATH _entityview = layout.COLL_USER_VIEW _entityfile = layout.USER_META_FILE def __init__(self, parent, user_id): """ Initialize a new AnnalistUser object, without metadata (yet). parent is the parent entity from which the type is descended. user_id the local identifier for the user altparent is a site object to search for this new entity, allowing site-wide AnnalistUser values to be found. """ super(AnnalistUser, self).__init__(parent, user_id) # log.debug("AnnalistUser %s: dir %s"%(user_id, self._entitydir)) # log.debug("AnnalistUser %s: url %s, viewurl %s"%(user_id, self._entityurl, self._entityviewurl)) return def _migrate_values(self, userpermissions): """ User permission data format migration method. """ migration_map = ( [ (ANNAL.CURIE.user_permissions, ANNAL.CURIE.user_permission) ]) userpermissions = self._migrate_values_map_field_names(migration_map, userpermissions) return userpermissions # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/annalistuser.py
annalistuser.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function """ This module is used to cache per-collection vocabulary namespace information. """ __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2017, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) from annalist import layout from annalist.exceptions import Annalist_Error from annalist.identifiers import ANNAL, RDFS from annalist.models.collectionentitycache import ( Cache_Error, CollectionEntityCacheObject, CollectionEntityCache ) from annalist.models.recordvocab import RecordVocab # --------------------------------------------------------------------------- # # Collection namespace vocabulary cache class # # --------------------------------------------------------------------------- class CollectionVocabCache(CollectionEntityCache): """ This class manages and accesses namespace vocabulary cache objects for multiple collections. Per-collection cacheing is implemented by CollectionEntityCacheObject. """ def __init__(self): """ Initialize. Initializes a namespace vocabulary cache cache with no per-collection data. """ super(CollectionVocabCache, self).__init__(CollectionEntityCacheObject, RecordVocab) return # Collection vocabulary namespace cache alllocation and access methods def set_vocab(self, coll, vocab_entity): """ Save a new or updated vocabulary namespace definition """ return self.set_entity(coll, vocab_entity) def remove_vocab(self, coll, vocab_id): """ Remove vocabulary namespace from collection cache. Returns the vocabulary namespace entity removed if found, or None if not defined. """ return self.remove_entity(coll, vocab_id) def get_vocab(self, coll, vocab_id): """ Retrieve a vocabulary namespace description for a given Id. Returns a vocabulary namespace object for the specified collecion and Id. """ return self.get_entity(coll, vocab_id) def get_vocab_from_uri(self, coll, vocab_uri): """ Retrieve a vocabulary namespace description for a given URI. Returns a vocabulary namespace object for the specified collecion and URI. """ return self.get_entity_from_uri(coll, vocab_uri) def get_all_vocab_ids(self, coll, altscope=None): """ Returns all vocabulary namespaces currently available for a collection in the indicated scope. Default scope is vocabularies defined directly in the collection. """ return self.get_all_entity_ids(coll, altscope=altscope) def get_all_vocabs(self, coll, altscope=None): """ Returns all vocabulary namespace currently available for a collection in the indicated scope. Default scope is vocabularies defined directly in the collection. """ return self.get_all_entities(coll, altscope=altscope) # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/collectionvocabcache.py
collectionvocabcache.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import os import os.path import shutil import logging log = logging.getLogger(__name__) from django.conf import settings from annalist import layout from annalist.exceptions import Annalist_Error from annalist.identifiers import ANNAL from annalist import util from annalist.models.entity import Entity from annalist.models.entitydata import EntityData class RecordTypeData(Entity): _entitytype = ANNAL.CURIE.Type_Data _entitytypeid = layout.TYPEDATA_TYPEID _entityroot = layout.COLL_TYPEDATA_PATH _entityview = layout.COLL_TYPEDATA_VIEW _entitybase = "" _entityfile = layout.TYPEDATA_META_FILE _contextbase = layout.TYPEDATA_COLL_BASE_REF _contextref = layout.TYPEDATA_CONTEXT_FILE def __init__(self, parent, type_id): """ Initialize a new RecordTypeData object, without metadata. parent is the parent collection from which the type data is descended. type_id the local identifier (slug) for the record type """ # @@ # log.info( # "@@ RecordTypeData.__init__ id %s, _entitytypeid %s, parent_id %s"% # (type_id, self._entitytypeid, parent.get_id()) # ) # @@ self._entityref = layout.COLL_BASE_TYPEDATA_REF%{'id': type_id} super(RecordTypeData, self).__init__(parent, type_id) self._ancestorid = parent._ancestorid return def remove_entity(self, entity_id): t = EntityData.remove(self, entity_id) return t def _local_find_alt_parents(self): """ Returns a list of alternative parents for the current inheritance branch only; i.e. does not attempt to follow altparent chains in referenced trees. (That is handled by `_find_alt_parents`.) This method overrides the method in Entity to take account of the need to look beyond the immediate RecordTypeData instance to follow links to collections from which data is inherited. """ type_id = self.get_id() altcolls = self._parent._local_find_alt_parents() # log.info("@@ RecordTypeData._local_find_alt_parents altcolls %r"%(altcolls)) altdatas = [ alt for alt in [ RecordTypeData.load(c, type_id) for c in altcolls ] if alt ] # log.info("@@ RecordTypeData._local_find_alt_parents %r"%([p.get_id for p in altdatas])) return altdatas # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/recordtypedata.py
recordtypedata.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) import os import os.path import shutil import json import datetime from django.conf import settings from annalist import layout from annalist import message from annalist.identifiers import ANNAL from annalist.util import replacetree, updatetree from annalist.exceptions import Annalist_Error from annalist.models.site import Site from annalist.models.entityfinder import EntityFinder from annalist.models.entitytypeinfo import EntityTypeInfo, TYPE_CLASS_MAP from annalist.models.recordtypedata import RecordTypeData def initialize_coll_data(src_data_dir, tgt_coll): """ Initialize data in the specified target collection using definitions in the specified source directory. This is used for copying installable data collections from the Annalist installed software into the site data area. returns list of error messages; an empty list indicates success. """ tgt_data_dir, data_file = tgt_coll._dir_path() log.info("Copy Annalist collection data from %s to %s"%(src_data_dir, tgt_data_dir)) for sdir in layout.DATA_VOCAB_DIRS: # Don't copy user permissions if os.path.isdir(os.path.join(src_data_dir, sdir)): log.info("- %s -> %s"%(sdir, tgt_data_dir)) Site.replace_site_data_dir(tgt_coll, sdir, src_data_dir) # Copy entity data to target collection. expand_entitydata = os.path.join(src_data_dir, "entitydata" ) if os.path.isdir(expand_entitydata): log.info("- Copy entitydata/...") for edir in os.listdir(expand_entitydata): if os.path.isdir(os.path.join(expand_entitydata, edir)): log.info("- %s -> %s"%(edir, tgt_data_dir)) Site.replace_site_data_dir(tgt_coll, edir, expand_entitydata) # Generate initial JSON-LD context data tgt_coll.flush_all_caches() tgt_coll.generate_coll_jsonld_context() return [] def copy_coll_data(src_coll, tgt_coll): """ Copy collection data from specified source to target collection. returns list of error messages; an empty list indicates success. """ # @@TESTME: Not tested by test suite log.info("Copying collection '%s' to '%s'"%(src_coll.get_id(), tgt_coll.get_id())) msgs = [] entityfinder = EntityFinder(src_coll) for e in entityfinder.get_entities(): entity_id = e.get_id() typeinfo = EntityTypeInfo( tgt_coll, e.get_type_id(), create_typedata=True ) new_entity = typeinfo.create_entity(entity_id, e.get_values()) if not typeinfo.entity_exists(entity_id): msg = ( "Collection.copy_coll_data: Failed to create entity %s/%s"% (typeinfo.type_id, entity_id) ) log.warning(msg) msgs.append(msg) msgs += new_entity._copy_entity_files(e) return msgs def migrate_collection_dir(coll, prev_dir, curr_dir): """ Migrate (rename) a single directory belonging to the indicated collection. Returns list of errors or empty list. """ errs = [] if not prev_dir: return errs coll_base_dir, coll_meta_file = coll._dir_path() # log.debug( # "collectiondata.migrate_collection_dir %s: %s -> %s"% # (coll_base_dir, prev_dir, curr_dir) # ) expand_prev_dir = os.path.join(coll_base_dir, prev_dir) expand_curr_dir = os.path.join(coll_base_dir, curr_dir) # log.debug(" prev %s"%(expand_prev_dir,)) # log.debug(" curr %s"%(expand_curr_dir,)) if (curr_dir != prev_dir) and os.path.isdir(expand_prev_dir): log.info("migrate_coll_base_dir: %s"%coll_base_dir) log.info(" rename: %s -> %s"%(prev_dir, curr_dir)) # print "@@ rename %s -> %s"%(expand_prev_dir, expand_curr_dir) try: os.rename(expand_prev_dir, expand_curr_dir) except Exception as e: msg = (message.COLL_MIGRATE_DIR_FAILED% { "id": coll.get_id() , "old_path": prev_dir, "new_path": curr_dir , "exc": e } ) log.error("migrate_collection_dir: "+msg) errs.append(msg) # Create type data container for site types (so it can be enumerated later) if curr_dir in TYPE_CLASS_MAP: if not RecordTypeData.exists(coll, curr_dir): log.info("Create RecordTypeData for %s"%curr_dir) typedata = RecordTypeData.create(coll, curr_dir, {}) return errs def migrate_coll_config_dirs(coll): """ Migrate (rename) collection configuration directories. Returns list of error message strings, or empty list. """ errs = [] for curr_dir, prev_dir in layout.COLL_DIRS_CURR_PREV: # print "@@ migrate coll dir %s -> %s"%(prev_dir, curr_dir) e = migrate_collection_dir(coll, prev_dir, curr_dir) if e: errs.extend(e) return errs def migrate_coll_data(coll): """ Migrate collection data for specified collection Returns list of error message strings, or empty list. """ log.info("Migrate Annalist collection data for %s"%(coll.get_id())) errs = migrate_coll_config_dirs(coll) if errs: return errs try: entityfinder = EntityFinder(coll) for e in entityfinder.get_entities(): log.info("migrate_coll_data: %s/%s"%(e[ANNAL.CURIE.type_id], e[ANNAL.CURIE.id])) typeinfo = EntityTypeInfo(coll, e[ANNAL.CURIE.type_id]) typeinfo.set_type_uris(e) typeinfo.set_entity_uri(e[ANNAL.CURIE.id], e) e._save(post_update_flags={"nocontext"}) if e.get_errors(): errs.extend(e.get_errors()) except Annalist_Error as err: errs.append(str(err)) if errs: return errs # Rename _group directory errs = migrate_collection_dir(coll, layout.GROUP_DIR, layout.GROUP_DIR+".migrated") if errs: return errs coll.generate_coll_jsonld_context() return errs # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/collectiondata.py
collectiondata.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function """ Gather information about an entity/record type """ __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import traceback import logging log = logging.getLogger(__name__) import copy from annalist import message from annalist import layout from annalist.util import valid_id, extract_entity_id from annalist.identifiers import ANNAL, RDF, RDFS from annalist.models.collection import Collection from annalist.models.annalistuser import AnnalistUser, site_default_user_id, default_user_id, unknown_user_id from annalist.models.recordtype import RecordType from annalist.models.recordlist import RecordList from annalist.models.recordview import RecordView from annalist.models.recordgroup import RecordGroup, RecordGroup_migration from annalist.models.recordfield import RecordField from annalist.models.recordvocab import RecordVocab from annalist.models.recordinfo import RecordInfo from annalist.models.recordenum import RecordEnumFactory from annalist.models.recordtypedata import RecordTypeData from annalist.models.entitydata import EntityData COLL_ID = layout.COLL_TYPEID USER_ID = layout.USER_TYPEID TYPE_ID = layout.TYPE_TYPEID LIST_ID = layout.LIST_TYPEID VIEW_ID = layout.VIEW_TYPEID GROUP_ID = layout.GROUP_TYPEID FIELD_ID = layout.FIELD_TYPEID VOCAB_ID = layout.VOCAB_TYPEID INFO_ID = layout.INFO_TYPEID TASK_ID = layout.TASK_TYPEID COLL_MESSAGES = ( { 'parent_heading': "(@@ COLL_MESSAGES.parent_heading @@)" , 'parent_missing': "(@@ COLL_MESSAGES.parent_missing @@)" , 'entity_heading': message.COLLECTION_ID , 'entity_invalid_id': message.COLLECTION_ID_INVALID , 'entity_exists': message.COLLECTION_EXISTS , 'entity_not_exists': message.COLLECTION_NOT_EXISTS , 'entity_removed': message.COLLECTION_REMOVED , 'entity_type_heading': "(@@ COLL_MESSAGES.entity_type_heading @@)" , 'entity_type_invalid': "(@@ COLL_MESSAGES.entity_type_invalid @@)" }) ENTITY_MESSAGES = ( { 'parent_heading': message.RECORD_TYPE_ID , 'parent_missing': message.RECORD_TYPE_NOT_EXISTS , 'entity_heading': message.ENTITY_DATA_ID , 'entity_invalid_id': message.ENTITY_DATA_ID_INVALID , 'entity_exists': message.ENTITY_DATA_EXISTS , 'entity_not_exists': message.ENTITY_DATA_NOT_EXISTS , 'entity_removed': message.ENTITY_DATA_REMOVED , 'entity_type_heading': message.ENTITY_TYPE_ID , 'entity_type_invalid': message.ENTITY_TYPE_ID_INVALID }) USER_MESSAGES = ( { 'parent_heading': message.COLLECTION_ID , 'parent_missing': message.COLLECTION_NOT_EXISTS , 'entity_heading': message.ANNALIST_USER_ID , 'entity_invalid_id': message.ANNALIST_USER_ID_INVALID , 'entity_exists': message.ANNALIST_USER_EXISTS , 'entity_not_exists': message.ANNALIST_USER_NOT_EXISTS , 'entity_removed': message.ANNALIST_USER_REMOVED , 'entity_type_heading': message.ENTITY_TYPE_ID , 'entity_type_invalid': message.ENTITY_TYPE_ID_INVALID }) TYPE_MESSAGES = ( { 'parent_heading': message.COLLECTION_ID , 'parent_missing': message.COLLECTION_NOT_EXISTS , 'entity_heading': message.RECORD_TYPE_ID , 'entity_invalid_id': message.RECORD_TYPE_ID_INVALID , 'entity_exists': message.RECORD_TYPE_EXISTS , 'entity_not_exists': message.RECORD_TYPE_NOT_EXISTS , 'entity_removed': message.RECORD_TYPE_REMOVED , 'entity_type_heading': message.ENTITY_TYPE_ID , 'entity_type_invalid': message.ENTITY_TYPE_ID_INVALID }) LIST_MESSAGES = ( { 'parent_heading': message.COLLECTION_ID , 'parent_missing': message.COLLECTION_NOT_EXISTS , 'entity_heading': message.RECORD_LIST_ID , 'entity_invalid_id': message.RECORD_LIST_ID_INVALID , 'entity_exists': message.RECORD_LIST_EXISTS , 'entity_not_exists': message.RECORD_LIST_NOT_EXISTS , 'entity_removed': message.RECORD_LIST_REMOVED , 'entity_type_heading': message.ENTITY_TYPE_ID , 'entity_type_invalid': message.ENTITY_TYPE_ID_INVALID }) VIEW_MESSAGES = ( { 'parent_heading': message.COLLECTION_ID , 'parent_missing': message.COLLECTION_NOT_EXISTS , 'entity_heading': message.RECORD_VIEW_ID , 'entity_invalid_id': message.RECORD_VIEW_ID_INVALID , 'entity_exists': message.RECORD_VIEW_EXISTS , 'entity_not_exists': message.RECORD_VIEW_NOT_EXISTS , 'entity_removed': message.RECORD_VIEW_REMOVED , 'entity_type_heading': message.ENTITY_TYPE_ID , 'entity_type_invalid': message.ENTITY_TYPE_ID_INVALID }) GROUP_MESSAGES = ( { 'parent_heading': message.COLLECTION_ID , 'parent_missing': message.COLLECTION_NOT_EXISTS , 'entity_heading': message.RECORD_GROUP_ID , 'entity_invalid_id': message.RECORD_GROUP_ID_INVALID , 'entity_exists': message.RECORD_GROUP_EXISTS , 'entity_not_exists': message.RECORD_GROUP_NOT_EXISTS , 'entity_removed': message.RECORD_GROUP_REMOVED , 'entity_type_heading': message.ENTITY_TYPE_ID , 'entity_type_invalid': message.ENTITY_TYPE_ID_INVALID }) FIELD_MESSAGES = ( { 'parent_heading': message.COLLECTION_ID , 'parent_missing': message.COLLECTION_NOT_EXISTS , 'entity_heading': message.RECORD_FIELD_ID , 'entity_invalid_id': message.RECORD_FIELD_ID_INVALID , 'entity_exists': message.RECORD_FIELD_EXISTS , 'entity_not_exists': message.RECORD_FIELD_NOT_EXISTS , 'entity_removed': message.RECORD_FIELD_REMOVED , 'entity_type_heading': message.ENTITY_TYPE_ID , 'entity_type_invalid': message.ENTITY_TYPE_ID_INVALID }) VOCAB_MESSAGES = ( { 'parent_heading': message.COLLECTION_ID , 'parent_missing': message.COLLECTION_NOT_EXISTS , 'entity_heading': message.RECORD_VOCAB_ID , 'entity_invalid_id': message.RECORD_VOCAB_ID_INVALID , 'entity_exists': message.RECORD_VOCAB_EXISTS , 'entity_not_exists': message.RECORD_VOCAB_NOT_EXISTS , 'entity_removed': message.RECORD_VOCAB_REMOVED , 'entity_type_heading': message.ENTITY_TYPE_ID , 'entity_type_invalid': message.ENTITY_TYPE_ID_INVALID }) INFO_MESSAGES = ( { 'parent_heading': message.COLLECTION_ID , 'parent_missing': message.COLLECTION_NOT_EXISTS , 'entity_heading': message.RECORD_INFO_ID , 'entity_invalid_id': message.RECORD_INFO_ID_INVALID , 'entity_exists': message.RECORD_INFO_EXISTS , 'entity_not_exists': message.RECORD_INFO_NOT_EXISTS , 'entity_removed': message.RECORD_INFO_REMOVED , 'entity_type_heading': message.ENTITY_TYPE_ID , 'entity_type_invalid': message.ENTITY_TYPE_ID_INVALID }) ENUM_MESSAGES = ( { 'parent_heading': message.COLLECTION_ID , 'parent_missing': message.COLLECTION_NOT_EXISTS , 'entity_heading': message.RECORD_ENUM_ID , 'entity_invalid_id': message.RECORD_ENUM_ID_INVALID , 'entity_exists': message.RECORD_ENUM_EXISTS , 'entity_not_exists': message.RECORD_ENUM_NOT_EXISTS , 'entity_removed': message.RECORD_ENUM_REMOVED , 'entity_type_heading': message.ENTITY_TYPE_ID , 'entity_type_invalid': message.ENTITY_TYPE_ID_INVALID }) SITE_PERMISSIONS = ( { "view": "VIEW" # View site config data , "list": "VIEW" # .. , "search": "VIEW" # .. , "new": "FORBIDDEN" # Create collection record , "copy": "FORBIDDEN" # .. , "edit": "FORBIDDEN" # Update collection record , "delete": "FORBIDDEN" # Delete collection record , "config": "FORBIDDEN" # Change collection configuration , "admin": "ADMIN" # Change users or permissions }) ADMIN_PERMISSIONS = ( { "view": "ADMIN" # View user record , "list": "ADMIN" # .. , "search": "ADMIN" # .. , "new": "ADMIN" # Create user record , "copy": "ADMIN" # .. , "edit": "ADMIN" # Update user record , "delete": "ADMIN" # Delete user record , "config": "CONFIG" # Change collection configuration , "admin": "ADMIN" # Change users or permissions }) ADMIN_VIEW_PERMISSIONS = ( { "view": "VIEW" # View site record , "list": "VIEW" # .. , "search": "VIEW" # .. , "new": "ADMIN" # Create site record , "copy": "ADMIN" # .. , "edit": "ADMIN" # Update site record , "delete": "ADMIN" # Delete site record , "config": "CONFIG" # Change collection configuration , "admin": "ADMIN" # Change users or permissions }) CONFIG_PERMISSIONS = ( { "view": "VIEW" # View config record , "list": "VIEW" # .. , "search": "VIEW" # .. , "new": "CONFIG" # Create config record , "copy": "CONFIG" # .. , "edit": "CONFIG" # Update config record , "delete": "CONFIG" # Delete config record , "config": "CONFIG" # Change collection configuration , "admin": "ADMIN" # Change users or permissions }) ENTITY_PERMISSIONS = ( { "view": "VIEW" # View data record , "list": "VIEW" # .. , "search": "VIEW" # .. , "new": "CREATE" # Create data record , "copy": "CREATE" # .. , "edit": "UPDATE" # Update data record , "delete": "DELETE" # Delete data record , "config": "CONFIG" # Change collection configuration , "admin": "ADMIN" # Change users or permissions }) TYPE_CLASS_MAP = ( { COLL_ID: Collection , USER_ID: AnnalistUser , TYPE_ID: RecordType , LIST_ID: RecordList , VIEW_ID: RecordView , GROUP_ID: RecordGroup_migration , FIELD_ID: RecordField , VOCAB_ID: RecordVocab , INFO_ID: RecordInfo , '_enum_field_placement': RecordEnumFactory('_enum_field_placement', '_enum_field_placement') , '_enum_list_type': RecordEnumFactory('_enum_list_type', '_enum_list_type') , '_enum_render_type': RecordEnumFactory('_enum_render_type', '_enum_render_type') , '_enum_value_mode': RecordEnumFactory('_enum_value_mode', '_enum_value_mode') , '_enum_value_type': RecordEnumFactory('_enum_value_type', '_enum_value_type') }) TYPE_MESSAGE_MAP = ( { COLL_ID: COLL_MESSAGES , USER_ID: USER_MESSAGES , TYPE_ID: TYPE_MESSAGES , LIST_ID: LIST_MESSAGES , VIEW_ID: VIEW_MESSAGES , GROUP_ID: GROUP_MESSAGES , FIELD_ID: FIELD_MESSAGES , VOCAB_ID: VOCAB_MESSAGES , INFO_ID: INFO_MESSAGES , '_enum_field_placement': ENUM_MESSAGES , '_enum_list_type': ENUM_MESSAGES , '_enum_render_type': ENUM_MESSAGES , '_enum_value_mode': ENUM_MESSAGES , '_enum_value_type': ENUM_MESSAGES }) SITE_PERMISSIONS_MAP = ( { COLL_ID: SITE_PERMISSIONS , USER_ID: ADMIN_PERMISSIONS , TYPE_ID: SITE_PERMISSIONS , LIST_ID: SITE_PERMISSIONS , VIEW_ID: SITE_PERMISSIONS , GROUP_ID: SITE_PERMISSIONS , FIELD_ID: SITE_PERMISSIONS , VOCAB_ID: SITE_PERMISSIONS , INFO_ID: ADMIN_VIEW_PERMISSIONS , '_enum_field_placement': SITE_PERMISSIONS , '_enum_list_type': SITE_PERMISSIONS , '_enum_render_type': SITE_PERMISSIONS , '_enum_value_mode': SITE_PERMISSIONS , '_enum_value_type': SITE_PERMISSIONS , 'EntityData': SITE_PERMISSIONS }) TYPE_PERMISSIONS_MAP = ( { COLL_ID: CONFIG_PERMISSIONS , USER_ID: ADMIN_PERMISSIONS , TYPE_ID: CONFIG_PERMISSIONS , LIST_ID: CONFIG_PERMISSIONS , VIEW_ID: CONFIG_PERMISSIONS , GROUP_ID: CONFIG_PERMISSIONS , FIELD_ID: CONFIG_PERMISSIONS , VOCAB_ID: CONFIG_PERMISSIONS , INFO_ID: CONFIG_PERMISSIONS , '_enum_field_placement': SITE_PERMISSIONS , '_enum_list_type': SITE_PERMISSIONS , '_enum_render_type': SITE_PERMISSIONS , '_enum_value_mode': SITE_PERMISSIONS , '_enum_value_type': SITE_PERMISSIONS , 'EntityData': ENTITY_PERMISSIONS }) def get_built_in_type_ids(): """ Returns an interator over the built-in types """ return iter(TYPE_CLASS_MAP) class EntityTypeInfo(object): """ Check a supplied type identifier, and access values for: Entity class Entity parent Entity alternative parent for site-wide values Type-dependent messages """ def __init__(self, coll, type_id, create_typedata=False): """ Set up type attribute values. coll collection object in which type is used type_id entity type id, which is a collection-defined value, or one of a number of special site-wide built-in types. create_typedata if true, requests that a RecordTypeData entity be created and saved on disk for user-defined types if it does not already exist. (Creating a RecordTypeData entity ensures that the corresponding data storage location is available for saving entity data.) Attributes of type information object are: recordtype type object describing the identified type entityparent Parent entity for entities of this type, or None if the type is not defined for the collection entityaltparent Alternative (site-wide) parent entity for built-in types, or None entityclass Python class object for entity entitymessages a table of message strings for diagnostics relating to operations on this type. and other values as initialized here. """ self.entitycoll = coll self.recordtype = None self.entityparent = None self.coll_id = coll.get_id() self.type_id = type_id self.permissions_map = None if type_id == layout.COLL_TYPEID: # "_coll" # NOTE: # # This setup defaults to using site permissions for collection operations. # But there is some special-case code in views.displayinfo that uses the # collection itself if it exists. # # (See use of attribute DisplayInfo.coll_perms.) # self.recordtype = coll.get_site().site_data_collection().get_type(type_id) self.entityparent = coll.get_site() self.entityaltparent = None self.entityclass = Collection self.entitymessages = COLL_MESSAGES self.permissions_map = CONFIG_PERMISSIONS # unless entity is layout.SITEDATA_ID? elif type_id in TYPE_CLASS_MAP: self.recordtype = coll.get_type(type_id) self.entityparent = coll self.entityaltparent = coll.get_site() self.entityclass = TYPE_CLASS_MAP[type_id] self.entitymessages = TYPE_MESSAGE_MAP[type_id] if self.coll_id == layout.SITEDATA_ID: self.permissions_map = SITE_PERMISSIONS_MAP[type_id] else: self.permissions_map = TYPE_PERMISSIONS_MAP[type_id] else: if not valid_id(type_id): raise ValueError("EntityTypeInfo invalid type_id (%s)"%(type_id,)) if RecordType.exists(coll, type_id, altscope="all"): # log.info("@@ EntityTypeInfo: Type %s exists"%type_id) self.recordtype = coll.get_type(type_id) else: # log.info("@@ EntityTypeInfo: Type %s does not exist for collection %s"%(type_id,coll.get_id())) pass self.parent_typedata(create_typedata=create_typedata) self.entityaltparent = None self.entityclass = EntityData self.entitymessages = ENTITY_MESSAGES self.permissions_map = ENTITY_PERMISSIONS if not self.recordtype: # .recordtype is used by views.displayinfo to locate the default # view and/or list id for examining records of a particular type. # Also used in entityedit for getting @type URI/CURIE values. # Used in bound_field to get link to type record log.warning("EntityTypeInfo.__init__: RecordType %s not found"%type_id) # log.info("".join(traceback.format_stack())) # raise ValueError("Trace") return def get_type_id(self): """ Return id for current type """ if self.recordtype: return self.recordtype[ANNAL.CURIE.id] return None def get_type_uri(self): """ Return identiftying URI for the current type """ typeuri = None if self.recordtype: if ANNAL.CURIE.uri in self.recordtype: typeuri = self.recordtype[ANNAL.CURIE.uri] if not typeuri: typeuri = self.recordtype[ANNAL.CURIE.url] return typeuri def get_all_type_uris(self): """ Return list of all type URIs for this type """ type_uris = None type_uri = self.get_type_uri() if type_uri: type_uris = [type_uri] + list(self.entitycoll.cache_get_supertype_uris(type_uri)) return type_uris def set_type_uris(self, entity_values): """ Set URIs of current type in supplied entity values Previous type URIs are overridden. """ entity_values[ANNAL.CURIE.type] = self.get_type_uri() entity_values['@type'] = self.get_all_type_uris() return def make_entity_uri(self, entity_id): """ Return a candidate entity URI for an instance of the current type, based on a namespace prefix declared in the type definition. Returns None if no entity namespace prefix is declared for the type. """ if ANNAL.CURIE.ns_prefix in self.recordtype: ns_pref = self.recordtype[ANNAL.CURIE.ns_prefix] if ns_pref != "": if not valid_id(ns_pref): raise ValueError( "EntityTypeInfo invalid ns_prefix %s for type %s"% (ns_pref, self.type_id) ) return ns_pref + ":" + entity_id return None def set_entity_uri(self, entity_id, entity_values): """ Update entity URI in supplied entity values, if value is not already set. If current value is blank or same as URL then that value is discarded. """ if ANNAL.CURIE.uri in entity_values: if entity_values[ANNAL.CURIE.uri] in {"", entity_values.get(ANNAL.CURIE.url, "")}: entity_values.pop(ANNAL.CURIE.uri) if ANNAL.CURIE.uri not in entity_values: entity_uri = self.make_entity_uri(entity_id) if entity_uri: entity_values[ANNAL.CURIE.uri] = entity_uri return def get_default_view_id(self): """ Returns the default view id for the current record type """ view_id = None if self.recordtype: view_id = extract_entity_id(self.recordtype.get(ANNAL.CURIE.type_view, None)) else: log.warning("EntityTypeInfo.get_default_view_id: no type data for %s"%(self.type_id)) return view_id or "Default_view" def validate(self, entity_id, entity_values): """ Validates the entity values provided. (Invokes type-specific validation method) Returns a list of strings describing any errors detected, or an empty list if no problems are found. """ return self.entityclass._pre_save_validation(self.type_id, entity_id, entity_values) # Entity-specific methods def get_entity_permissions_map(self, entity_id=None): """ Returns an entity-specific permission map that takes account of special access permissions applied to specific individual entities. Thge permission maop is a map from an action name ("view", "new", etc) to a permission token that must be granted to a requester for the action to be allowed. If `entity_id` is not specified, or is None, returns a default permission map applicable to entities of the current type in the absence of more specific permission requirements. """ entity_perms_map = self.permissions_map # Check for entity-specific permissions # # The logic here is currently ad-hoc, but in due course could be replaced # by something more generic # log.info( # "@@ get_entity_permissions_map: type_id %s, entity_id %s"%(self.type_id, entity_id) # ) if self.type_id == USER_ID: # Relax view access requirements for default and unknown user id # (Real users require admin rights to view) if entity_id in [site_default_user_id, default_user_id, unknown_user_id]: entity_perms_map = dict(entity_perms_map) entity_perms_map["view"] = CONFIG_PERMISSIONS["view"] return entity_perms_map def _new_entity(self, entity_id): """ Returns a new, entity object of the current type with the given id """ return self.entityclass(self.entityparent, entity_id) def parent_exists(self): """ Test for existence of parent entity for the current type. """ return self.entityparent and self.entityparent._exists() def parent_typedata(self, create_typedata=False): """ Get or create reference to parent RecordTypeData entity. Optionally, create actual RecordTypeData entity if it does not exist. """ if create_typedata and not self.parent_exists(): # Only RecordTypeData entities are created dynamically: # others (site, coll) must already exist. self.entityparent = RecordTypeData.create(self.entitycoll, self.type_id, {}) elif not self.entityparent: self.entityparent = RecordTypeData(self.entitycoll, self.type_id) return self.entityparent def entity_exists(self, entity_id, altscope=None): """ Test for existence of identified entity of the current type. """ return self.entityclass.exists(self.entityparent, entity_id, altscope=altscope) def create_entity(self, entity_id, entity_values): """ Creates and returns an entity for the current type, with the supplied values. """ log.debug("create_entity: id %s, parent id %s"%(entity_id, self.entityparent.get_id())) # log.debug("create_entity: values %r"%(entity_values,)) # Set type URI for entity; previous types are not carried forwards self.set_type_uris(entity_values) self.set_entity_uri(entity_id, entity_values) return self.entityclass.create(self.entityparent, entity_id, entity_values) def remove_entity(self, entity_id): """ Remove identified entity for the current type. """ log.debug( "remove_entity id %s, parent %s"% (entity_id, self.entityparent) ) if self.type_id == COLL_ID: raise ValueError("EntitytypeInfo.remove_entity: Attempt to remove collection") return self.entityclass.remove(self.entityparent, entity_id) def get_entity(self, entity_id, action="view"): """ Loads and returns an entity for the current type, or returns None if the entity does not exist. If `action` is "new" then a new entity is initialized (but not saved). """ # log.debug( # "EntityTypeInfo.get_entity id %s, parent %s, altparent %s, action %s"% # (entity_id, self.entityparent, self.entityaltparent, action) # ) entity = None entity_id = extract_entity_id(entity_id) if valid_id(entity_id, reserved_ok=True): if action == "new": entity = self._new_entity(entity_id) entity_initial_values = self.get_initial_entity_values(entity_id) entity.set_values(entity_initial_values) elif self.entityclass.exists(self.entityparent, entity_id, altscope="all"): entity = self.entityclass.load(self.entityparent, entity_id, altscope="all") else: log.debug( "EntityTypeInfo.get_entity %s/%s at %s not found"% (self.type_id, entity_id, self.entityparent._entitydir) ) return entity def get_create_entity(self, entity_id): """ Read or create an entity with the indicated entity_id. If the identified entity does not already exist, a new entity is created and returned, but not (yet) saved. """ entity = self.get_entity(entity_id) if entity is None: entity = self.get_entity(entity_id, action="new") return entity def get_copy_entity(self, entity_id, copy_entity_id): """ Read or create an entity with the indicated entity_id. If the identified entity does not already exist, a new entity is created but not (yet) saved. The newly created entity is a copy of 'copy_entity_id'. """ entity_id = extract_entity_id(entity_id) entity = self.get_entity(entity_id) if entity is None: entity = self._new_entity(entity_id) entity.set_values( self.get_initial_entity_values(entity_id, copy_entity_id=copy_entity_id) ) return entity def get_entity_implied_values(self, entity): """ Adds implied values to the supplied entity value (e.g. aliases), and returns a new value with the additional values Implied values are determined by the type of the entity, and if type information is not present this function generates a failure. """ if not self.recordtype: raise AssertionError( "EntityTypeInfo.get_entity_implied_values called with no type information available. "+ "entity_id %s/%s, type_id %s"%(entity.get_type_id(), entity.get_id(), self.type_id) ) implied_entity = entity if implied_entity and ANNAL.CURIE.field_aliases in self.recordtype: implied_entity = copy.deepcopy(entity) for alias in self.recordtype[ANNAL.CURIE.field_aliases]: tgt = alias[ANNAL.CURIE.alias_target] src = alias[ANNAL.CURIE.alias_source] if implied_entity.get(tgt, None) in [None, ""]: implied_entity[tgt] = implied_entity.get(src, "") return implied_entity def rename_entity(self, new_entity_id, old_typeinfo, old_entity_id): """ Copy associated data files from specified entity to new. The calling program is expected to update data associated with the new entity. Subdirectories are copied as entire subtrees. """ if old_typeinfo.entity_exists(old_entity_id): new_entity = self._new_entity(new_entity_id) old_entity = old_typeinfo._new_entity(old_entity_id) p_new = new_entity._rename_files(old_entity) if not p_new: log.warning( "EntityTypeInfo.rename_entity: error renaming entity %s from %s to %s"% (old_entity.get_url(), old_entity_id, new_entity_id) ) else: log.warning( "EntityTypeInfo.rename_entity: source entity not found %s/%s"% (old_typeinfo.type_id, old_entity_id) ) return def enum_entity_ids(self, altscope=None): """ Iterate over entity identifiers in collection with current type. """ if self.entityparent: for eid in self.entityparent.child_entity_ids( self.entityclass, altscope=altscope): yield eid else: log.warning("EntityTypeInfo.enum_entity_ids: missing entityparent; type_id %s"%(self.type_id)) return def enum_entities(self, user_perms=None, altscope=None): """ Iterate over entities in collection with current type. """ if (not user_perms or self.permissions_map['list'] in user_perms[ANNAL.CURIE.user_permission]): if not self.entityparent: log.warning("EntityTypeInfo.enum_entities: missing entityparent; type_id %s"%(self.type_id)) else: for eid in self.entityparent.child_entity_ids( self.entityclass, altscope=altscope): yield self.get_entity(eid) return def enum_entities_with_implied_values(self, user_perms=None, altscope=None): """ Iterate over entities in collection with current type. Returns entities with alias and inferred fields instantiated. If user_perms is supplied and not None, checks that they contain permission to list values of the appropriate type. """ #@@ # log.info( # "@@ EntityTypeInfo.enum_entities_with_implied_values: parent %s, altscope %s"% # (self.entityparent.get_id(), altscope) # ) #@@ if (not user_perms or self.permissions_map['list'] in user_perms[ANNAL.CURIE.user_permission]): if not self.entityparent: log.warning( "EntityTypeInfo.enum_entities_with_implied_values: missing entityparent; type_id %s"% (self.type_id) ) elif not self.recordtype: log.warning( "EntityTypeInfo.enum_entities_with_implied_values: missing recordtype; type_id %s"% (self.type_id) ) # No record type info: return base entity without implied values for eid in self.entityparent.child_entity_ids( self.entityclass, altscope=altscope): yield self.get_entity(eid) else: #@@ # log.info( # "@@ enum_entities_with_implied_values: parent %s, altscope %s"% # (self.entityparent.get_id(), altscope) # ) #@@ for eid in self.entityparent.child_entity_ids( self.entityclass, altscope=altscope): yield self.get_entity_implied_values(self.get_entity(eid)) return def get_initial_entity_values(self, entity_id, copy_entity_id=layout.INITIAL_VALUES_ID): """ Returns an initial value dictionary for the indicated entity. Attempts to read initial values from the type parent directory. Failing that, returns system-wide default values. """ values = ( { '@type': [ANNAL.CURIE.EntityData] , ANNAL.CURIE.type_id: self.type_id , RDFS.CURIE.label: "" , RDFS.CURIE.comment: "" }) init_entity = self.get_entity(copy_entity_id) if init_entity: values = init_entity.get_values() values.pop("@id", None) values.pop(ANNAL.CURIE.id, None) values.pop(ANNAL.CURIE.url, None) values[ANNAL.CURIE.id] = entity_id values[RDFS.CURIE.label] = "" values[RDFS.CURIE.comment] = "" return values def get_fileobj(self, entity_id, name, typeuri, mimetype, mode): """ Returns a file object to access a file stored with the named entity with the designated type URI (typically obtained from a field description). The `mode` string value is interpreted like the `mode` parameter to the Python `open` function, to the extent applicable. """ fileobj = None if self.entityparent: fileobj = self.entityclass.fileobj( self.entityparent, entity_id, name, typeuri, mimetype, mode ) else: log.warning("EntityTypeInfo.get_fileobj: missing entityparent; type_id %s"%(self.type_id)) return fileobj def get_ancestor_id(self, entity): """ Returns the ancestor collection id for the supplied entity (which is assumed to be of the current type). """ if self.type_id == COLL_ID: return layout.SITEDATA_ID return entity._parent._ancestorid # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/entitytypeinfo.py
entitytypeinfo.py
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2015, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import logging log = logging.getLogger(__name__) # Render type classification, used for generating appropriate JSON-LD context values # Separated from annalist.views.fields.find_renderers to avoid model dependency on views _render_type_literal = set( [ "Text", "Textarea", "Codearea", "Showtext", "LangText" , "Placement", "CheckBox", "Markdown", "ShowMarkdown" , "EntityId", "EntityTypeId" , "TokenSet" # , "RefMultifield" ]) _render_type_id = set( [ "Identifier" , "EntityRef" , "RefAudio", "RefImage", "URILink", "URIImage" , "RefMultifield" , "Group_Set", "Group_Set_Row" , "Enum", "Enum_optional", "Enum_choice", "Enum_choice_opt" , "View_choice" , "Type", "View", "List", "Field" ]) _render_type_object = set( [ "URIImport", "FileUpload" , "RepeatGroup", "RepeatGroupRow", "Group_Seq", "Group_Seq_Row" ]) _render_type_set = set( [ "TokenSet", "Group_Set", "Group_Set_Row" ]) _render_type_list = set( [ "RepeatGroup", "RepeatGroupRow", "Group_Seq", "Group_Seq_Row" ]) def is_render_type_literal(field_render_type): """ Returns True if the supplied render type expects a literral (string) value to be stored in a corresponding entity field. """ return field_render_type in _render_type_literal def is_render_type_id(field_render_type): """ Returns True if the supplied render type expects a id (URI reference) value to be stored in a corresponding entity field. """ return field_render_type in _render_type_id def is_render_type_set(field_render_type): """ Returns True if the supplied render type expects a list value, which is interpreted as a set, to be stored in a corresponding entity field. """ return field_render_type in _render_type_set def is_render_type_list(field_render_type): """ Returns True if the supplied render type expects a list value, which is interpreted as an ordered list, to be stored in a corresponding entity field. """ return field_render_type in _render_type_list def is_render_type_object(field_render_type): """ Returns True if the supplied render type expects a complex (object) value to be stored in a corresponding entity field. """ return field_render_type in _render_type_object # End.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/models/rendertypeinfo.py
rendertypeinfo.py
This directory subtree contains Annalist data that is common to all Annalist installations, in the form of annalist data records. These records are somewhat self-referential, as they constitute data that is used to bootstarap the Annalist record descrtiption capabilities, describing the views and lists that are used for rendering record views and lists. The definitions also include default values (e.g. default recoird types and views, etc.). The intent is that the base definitions may be copied and modified for individual collections. When looking for a view or list definition, the Annalist software will first look in the collection's own metadata area (which can be locally modified), and then in the overall site metadata area (which cannot be locally modified, and provides baseline compatibility between Annalisty installations). NOTE: do not delete _annalist_site/site_meta.jsonld from the target site when updating site data: that file is site-specific.
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/data/sitedata/README.md
README.md
# Values of annal:Enum_render_type property Obtained by: grep -rh annal:Enum_render_type ./annalist | awk '{print $3}' | sort | uniq Results (reorganized and "attic" values removed) "annal:Enum_render_type/EntityId" "annal:Enum_render_type/EntityTypeId" "annal:Enum_render_type/EntityRef" "annal:Enum_render_type/Text" "annal:Enum_render_type/Textarea" "annal:Enum_render_type/Identifier" "annal:Enum_render_type/Enum" "annal:Enum_render_type/Field" "annal:Enum_render_type/List" "annal:Enum_render_type/Type" "annal:Enum_render_type/View" "annal:Enum_render_type/Placement" "annal:Enum_render_type/View_sel"
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/data/sitedata/_enum_render_type/Render-types.md
Render-types.md
(function(e,t,n,r){"use strict";function l(e){if(typeof e=="string"||e instanceof String)e=e.replace(/^['\\/"]+|(;\s?})+|['\\/"]+$/g,"");return e}var i=function(t){var n=t.length,r=e("head");while(n--)e("head").has("."+t[n]).length===0&&e("head").append('<meta class="'+t[n]+'" />')};i(["foundation-mq-small","foundation-mq-medium","foundation-mq-large","foundation-mq-xlarge","foundation-mq-xxlarge","foundation-data-attribute-namespace"]),e(function(){typeof FastClick!="undefined"&&typeof n.body!="undefined"&&FastClick.attach(n.body)});var s=function(t,r){if(typeof t=="string"){if(r){var i;if(r.jquery){i=r[0];if(!i)return r}else i=r;return e(i.querySelectorAll(t))}return e(n.querySelectorAll(t))}return e(t,r)},o=function(e){var t=[];return e||t.push("data"),this.namespace.length>0&&t.push(this.namespace),t.push(this.name),t.join("-")},u=function(e){var t=e.split("-"),n=t.length,r=[];while(n--)n!==0?r.push(t[n]):this.namespace.length>0?r.push(this.namespace,t[n]):r.push(t[n]);return r.reverse().join("-")},a=function(t,n){var r=this,i=!s(this).data(this.attr_name(!0));if(typeof t=="string")return this[t].call(this,n);s(this.scope).is("["+this.attr_name()+"]")?(s(this.scope).data(this.attr_name(!0)+"-init",e.extend({},this.settings,n||t,this.data_options(s(this.scope)))),i&&this.events(this.scope)):s("["+this.attr_name()+"]",this.scope).each(function(){var i=!s(this).data(r.attr_name(!0)+"-init");s(this).data(r.attr_name(!0)+"-init",e.extend({},r.settings,n||t,r.data_options(s(this)))),i&&r.events(this)})},f=function(e,t){function n(){t(e[0])}function r(){this.one("load",n);if(/MSIE (\d+\.\d+);/.test(navigator.userAgent)){var e=this.attr("src"),t=e.match(/\?/)?"&":"?";t+="random="+(new Date).getTime(),this.attr("src",e+t)}}if(!e.attr("src")){n();return}e[0].complete||e[0].readyState===4?n():r.call(e)};t.matchMedia=t.matchMedia||function(e,t){var n,r=e.documentElement,i=r.firstElementChild||r.firstChild,s=e.createElement("body"),o=e.createElement("div");return o.id="mq-test-1",o.style.cssText="position:absolute;top:-100em",s.style.background="none",s.appendChild(o),function(e){return o.innerHTML='&shy;<style media="'+e+'"> #mq-test-1 { width: 42px; }</style>',r.insertBefore(s,i),n=o.offsetWidth===42,r.removeChild(s),{matches:n,media:e}}}(n),function(e){function a(){n&&(s(a),u&&jQuery.fx.tick())}var n,r=0,i=["webkit","moz"],s=t.requestAnimationFrame,o=t.cancelAnimationFrame,u="undefined"!=typeof jQuery.fx;for(;r<i.length&&!s;r++)s=t[i[r]+"RequestAnimationFrame"],o=o||t[i[r]+"CancelAnimationFrame"]||t[i[r]+"CancelRequestAnimationFrame"];s?(t.requestAnimationFrame=s,t.cancelAnimationFrame=o,u&&(jQuery.fx.timer=function(e){e()&&jQuery.timers.push(e)&&!n&&(n=!0,a())},jQuery.fx.stop=function(){n=!1})):(t.requestAnimationFrame=function(e,n){var i=(new Date).getTime(),s=Math.max(0,16-(i-r)),o=t.setTimeout(function(){e(i+s)},s);return r=i+s,o},t.cancelAnimationFrame=function(e){clearTimeout(e)})}(jQuery),t.Foundation={name:"Foundation",version:"5.2.1",media_queries:{small:s(".foundation-mq-small").css("font-family").replace(/^[\/\\'"]+|(;\s?})+|[\/\\'"]+$/g,""),medium:s(".foundation-mq-medium").css("font-family").replace(/^[\/\\'"]+|(;\s?})+|[\/\\'"]+$/g,""),large:s(".foundation-mq-large").css("font-family").replace(/^[\/\\'"]+|(;\s?})+|[\/\\'"]+$/g,""),xlarge:s(".foundation-mq-xlarge").css("font-family").replace(/^[\/\\'"]+|(;\s?})+|[\/\\'"]+$/g,""),xxlarge:s(".foundation-mq-xxlarge").css("font-family").replace(/^[\/\\'"]+|(;\s?})+|[\/\\'"]+$/g,"")},stylesheet:e("<style></style>").appendTo("head")[0].sheet,global:{namespace:""},init:function(e,t,n,r,i){var o,u=[e,n,r,i],a=[];this.rtl=/rtl/i.test(s("html").attr("dir")),this.scope=e||this.scope,this.set_namespace();if(t&&typeof t=="string"&&!/reflow/i.test(t))this.libs.hasOwnProperty(t)&&a.push(this.init_lib(t,u));else for(var f in this.libs)a.push(this.init_lib(f,t));return e},init_lib:function(t,n){return this.libs.hasOwnProperty(t)?(this.patch(this.libs[t]),n&&n.hasOwnProperty(t)?(typeof this.libs[t].settings!="undefined"?e.extend(!0,this.libs[t].settings,n[t]):typeof this.libs[t].defaults!="undefined"&&e.extend(!0,this.libs[t].defaults,n[t]),this.libs[t].init.apply(this.libs[t],[this.scope,n[t]])):(n=n instanceof Array?n:Array(n),this.libs[t].init.apply(this.libs[t],n))):function(){}},patch:function(e){e.scope=this.scope,e.namespace=this.global.namespace,e.rtl=this.rtl,e.data_options=this.utils.data_options,e.attr_name=o,e.add_namespace=u,e.bindings=a,e.S=this.utils.S},inherit:function(e,t){var n=t.split(" "),r=n.length;while(r--)this.utils.hasOwnProperty(n[r])&&(e[n[r]]=this.utils[n[r]])},set_namespace:function(){var t=this.global.namespace||e(".foundation-data-attribute-namespace").css("font-family");if(/false/i.test(t))return;this.global.namespace=t},libs:{},utils:{S:s,throttle:function(e,t){var n=null;return function(){var r=this,i=arguments;clearTimeout(n),n=setTimeout(function(){e.apply(r,i)},t)}},debounce:function(e,t,n){var r,i;return function(){var s=this,o=arguments,u=function(){r=null,n||(i=e.apply(s,o))},a=n&&!r;return clearTimeout(r),r=setTimeout(u,t),a&&(i=e.apply(s,o)),i}},data_options:function(t){function a(e){return!isNaN(e-0)&&e!==null&&e!==""&&e!==!1&&e!==!0}function f(t){return typeof t=="string"?e.trim(t):t}var n={},r,i,s,o=function(e){var t=Foundation.global.namespace;return t.length>0?e.data(t+"-options"):e.data("options")},u=o(t);if(typeof u=="object")return u;s=(u||":").split(";"),r=s.length;while(r--)i=s[r].split(":"),/true/i.test(i[1])&&(i[1]=!0),/false/i.test(i[1])&&(i[1]=!1),a(i[1])&&(i[1].indexOf(".")===-1?i[1]=parseInt(i[1],10):i[1]=parseFloat(i[1],10)),i.length===2&&i[0].length>0&&(n[f(i[0])]=f(i[1]));return n},register_media:function(t,n){Foundation.media_queries[t]===r&&(e("head").append('<meta class="'+n+'">'),Foundation.media_queries[t]=l(e("."+n).css("font-family")))},add_custom_rule:function(e,t){if(t===r)Foundation.stylesheet.insertRule(e,Foundation.stylesheet.cssRules.length);else{var n=Foundation.media_queries[t];n!==r&&Foundation.stylesheet.insertRule("@media "+Foundation.media_queries[t]+"{ "+e+" }")}},image_loaded:function(e,t){var n=this,r=e.length;r===0&&t(e),e.each(function(){f(n.S(this),function(){r-=1,r===0&&t(e)})})},random_str:function(){return this.fidx||(this.fidx=0),this.prefix=this.prefix||[this.name||"F",(+(new Date)).toString(36)].join("-"),this.prefix+(this.fidx++).toString(36)}}},e.fn.foundation=function(){var e=Array.prototype.slice.call(arguments,0);return this.each(function(){return Foundation.init.apply(Foundation,[this].concat(e)),this})}})(jQuery,this,this.document),function(e,t,n,r){"use strict";Foundation.libs.slider={name:"slider",version:"5.2.1",settings:{start:0,end:100,step:1,initial:null,display_selector:"",on_change:function(){}},cache:{},init:function(e,t,n){Foundation.inherit(this,"throttle"),this.bindings(t,n),this.reflow()},events:function(){var n=this;e(this.scope).off(".slider").on("mousedown.fndtn.slider touchstart.fndtn.slider pointerdown.fndtn.slider","["+n.attr_name()+"] .range-slider-handle",function(t){n.cache.active||n.set_active_slider(e(t.target))}).on("mousemove.fndtn.slider touchmove.fndtn.slider pointermove.fndtn.slider",function(e){!n.cache.active||(e.preventDefault(),n.calculate_position(n.cache.active,e.pageX||e.originalEvent.touches[0].clientX||e.currentPoint.x))}).on("mouseup.fndtn.slider touchend.fndtn.slider pointerup.fndtn.slider",function(e){n.remove_active_slider()}).on("change.fndtn.slider",function(e){n.settings.on_change}),n.S(t).on("resize.fndtn.slider",n.throttle(function(e){n.reflow()},300))},set_active_slider:function(e){this.cache.active=e},remove_active_slider:function(){this.cache.active=null},calculate_position:function(t,n){var r=this,i=e.extend({},r.settings,r.data_options(t.parent())),s=e.data(t[0],"handle_w"),o=e.data(t[0],"handle_o"),u=e.data(t[0],"bar_w"),a=e.data(t[0],"bar_o");requestAnimationFrame(function(){var e=r.limit_to((n-a)/u,0,1),s=r.normalized_value(e,i.start,i.end,i.step);r.set_ui(t,s)})},set_ui:function(t,n){var r=e.extend({},this.settings,this.data_options(t.parent())),i=e.data(t[0],"handle_w"),s=e.data(t[0],"bar_w"),o=this.normalized_percentage(n,r.start,r.end),u=o*(s-i)-1,a=o*100;this.set_translate(t,u),t.siblings(".range-slider-active-segment").css("width",a+"%"),t.parent().attr(this.attr_name(),n),t.parent().trigger("change"),t.parent().children("input[type=hidden]").val(n),r.input_id!=""&&e(r.display_selector).each(function(){this.hasOwnProperty("value")?e(this).val(n):e(this).text(n)})},normalized_percentage:function(e,t,n){return e/(n-t)},normalized_value:function(e,t,n,r){var i=n-t,r=r,s=e*i,o=(s-s%r)/r,u=s%r,a=u>=r*.5?r:0;return o*r+a},set_translate:function(t,n,r){r?e(t).css("-webkit-transform","translateY("+n+"px)").css("-moz-transform","translateY("+n+"px)").css("-ms-transform","translateY("+n+"px)").css("-o-transform","translateY("+n+"px)").css("transform","translateY("+n+"px)"):e(t).css("-webkit-transform","translateX("+n+"px)").css("-moz-transform","translateX("+n+"px)").css("-ms-transform","translateX("+n+"px)").css("-o-transform","translateX("+n+"px)").css("transform","translateX("+n+"px)")},limit_to:function(e,t,n){return Math.min(Math.max(e,t),n)},initialize_settings:function(t){e.data(t,"bar",e(t).parent()),e.data(t,"bar_o",e(t).parent().offset().left),e.data(t,"bar_w",e(t).parent().outerWidth()),e.data(t,"handle_o",e(t).offset().left),e.data(t,"handle_w",e(t).outerWidth()),e.data(t,"settings",e.extend({},this.settings,this.data_options(e(t).parent())))},set_initial_position:function(t){var n=e.data(t.children(".range-slider-handle")[0],"settings"),r=n.initial?n.initial:Math.floor((n.end-n.start)*.5/n.step)*n.step,i=t.children(".range-slider-handle");this.set_ui(i,r)},set_value:function(t){var n=this;e("["+n.attr_name()+"]",this.scope).each(function(){e(this).attr(n.attr_name(),t)}),!e(this.scope).attr(n.attr_name())||e(this.scope).attr(n.attr_name(),t),n.reflow()},reflow:function(){var t=this;t.S("["+this.attr_name()+"]").each(function(){var n=e(this).children(".range-slider-handle")[0],r=e(this).attr(t.attr_name());t.initialize_settings(n),r?t.set_ui(e(n),parseInt(r)):t.set_initial_position(e(this))})}}}(jQuery,this,this.document),function(e,t,n,r){"use strict";var i=i||!1;Foundation.libs.joyride={name:"joyride",version:"5.2.1",defaults:{expose:!1,modal:!0,tip_location:"bottom",nub_position:"auto",scroll_speed:1500,scroll_animation:"linear",timer:0,start_timer_on_click:!0,start_offset:0,next_button:!0,tip_animation:"fade",pause_after:[],exposed:[],tip_animation_fade_speed:300,cookie_monster:!1,cookie_name:"joyride",cookie_domain:!1,cookie_expires:365,tip_container:"body",tip_location_patterns:{top:["bottom"],bottom:[],left:["right","top","bottom"],right:["left","top","bottom"]},post_ride_callback:function(){},post_step_callback:function(){},pre_step_callback:function(){},pre_ride_callback:function(){},post_expose_callback:function(){},template:{link:'<a href="#close" class="joyride-close-tip">&times;</a>',timer:'<div class="joyride-timer-indicator-wrap"><span class="joyride-timer-indicator"></span></div>',tip:'<div class="joyride-tip-guide"><span class="joyride-nub"></span></div>',wrapper:'<div class="joyride-content-wrapper"></div>',button:'<a href="#" class="small button joyride-next-tip"></a>',modal:'<div class="joyride-modal-bg"></div>',expose:'<div class="joyride-expose-wrapper"></div>',expose_cover:'<div class="joyride-expose-cover"></div>'},expose_add_class:""},init:function(t,n,r){Foundation.inherit(this,"throttle random_str"),this.settings=this.settings||e.extend({},this.defaults,r||n),this.bindings(n,r)},events:function(){var n=this;e(this.scope).off(".joyride").on("click.fndtn.joyride",".joyride-next-tip, .joyride-modal-bg",function(e){e.preventDefault(),this.settings.$li.next().length<1?this.end():this.settings.timer>0?(clearTimeout(this.settings.automate),this.hide(),this.show(),this.startTimer()):(this.hide(),this.show())}.bind(this)).on("click.fndtn.joyride",".joyride-close-tip",function(e){e.preventDefault(),this.end()}.bind(this)),e(t).off(".joyride").on("resize.fndtn.joyride",n.throttle(function(){if(e("["+n.attr_name()+"]").length>0&&n.settings.$next_tip){if(n.settings.exposed.length>0){var t=e(n.settings.exposed);t.each(function(){var t=e(this);n.un_expose(t),n.expose(t)})}n.is_phone()?n.pos_phone():n.pos_default(!1,!0)}},100))},start:function(){var t=this,n=e("["+this.attr_name()+"]",this.scope),r=["timer","scrollSpeed","startOffset","tipAnimationFadeSpeed","cookieExpires"],i=r.length;if(!n.length>0)return;this.settings.init||this.events(),this.settings=n.data(this.attr_name(!0)+"-init"),this.settings.$content_el=n,this.settings.$body=e(this.settings.tip_container),this.settings.body_offset=e(this.settings.tip_container).position(),this.settings.$tip_content=this.settings.$content_el.find("> li"),this.settings.paused=!1,this.settings.attempts=0,typeof e.cookie!="function"&&(this.settings.cookie_monster=!1);if(!this.settings.cookie_monster||this.settings.cookie_monster&&!e.cookie(this.settings.cookie_name))this.settings.$tip_content.each(function(n){var s=e(this);this.settings=e.extend({},t.defaults,t.data_options(s));var o=i;while(o--)t.settings[r[o]]=parseInt(t.settings[r[o]],10);t.create({$li:s,index:n})}),!this.settings.start_timer_on_click&&this.settings.timer>0?(this.show("init"),this.startTimer()):this.show("init")},resume:function(){this.set_li(),this.show()},tip_template:function(t){var n,r;return t.tip_class=t.tip_class||"",n=e(this.settings.template.tip).addClass(t.tip_class),r=e.trim(e(t.li).html())+this.button_text(t.button_text)+this.settings.template.link+this.timer_instance(t.index),n.append(e(this.settings.template.wrapper)),n.first().attr(this.add_namespace("data-index"),t.index),e(".joyride-content-wrapper",n).append(r),n[0]},timer_instance:function(t){var n;return t===0&&this.settings.start_timer_on_click&&this.settings.timer>0||this.settings.timer===0?n="":n=e(this.settings.template.timer)[0].outerHTML,n},button_text:function(t){return this.settings.next_button?(t=e.trim(t)||"Next",t=e(this.settings.template.button).append(t)[0].outerHTML):t="",t},create:function(t){var n=t.$li.attr(this.add_namespace("data-button"))||t.$li.attr(this.add_namespace("data-text")),r=t.$li.attr("class"),i=e(this.tip_template({tip_class:r,index:t.index,button_text:n,li:t.$li}));e(this.settings.tip_container).append(i)},show:function(t){var n=null;this.settings.$li===r||e.inArray(this.settings.$li.index(),this.settings.pause_after)===-1?(this.settings.paused?this.settings.paused=!1:this.set_li(t),this.settings.attempts=0,this.settings.$li.length&&this.settings.$target.length>0?(t&&(this.settings.pre_ride_callback(this.settings.$li.index(),this.settings.$next_tip),this.settings.modal&&this.show_modal()),this.settings.pre_step_callback(this.settings.$li.index(),this.settings.$next_tip),this.settings.modal&&this.settings.expose&&this.expose(),this.settings.tip_settings=e.extend({},this.settings,this.data_options(this.settings.$li)),this.settings.timer=parseInt(this.settings.timer,10),this.settings.tip_settings.tip_location_pattern=this.settings.tip_location_patterns[this.settings.tip_settings.tip_location],/body/i.test(this.settings.$target.selector)||this.scroll_to(),this.is_phone()?this.pos_phone(!0):this.pos_default(!0),n=this.settings.$next_tip.find(".joyride-timer-indicator"),/pop/i.test(this.settings.tip_animation)?(n.width(0),this.settings.timer>0?(this.settings.$next_tip.show(),setTimeout(function(){n.animate({width:n.parent().width()},this.settings.timer,"linear")}.bind(this),this.settings.tip_animation_fade_speed)):this.settings.$next_tip.show()):/fade/i.test(this.settings.tip_animation)&&(n.width(0),this.settings.timer>0?(this.settings.$next_tip.fadeIn(this.settings.tip_animation_fade_speed).show(),setTimeout(function(){n.animate({width:n.parent().width()},this.settings.timer,"linear")}.bind(this),this.settings.tip_animation_fadeSpeed)):this.settings.$next_tip.fadeIn(this.settings.tip_animation_fade_speed)),this.settings.$current_tip=this.settings.$next_tip):this.settings.$li&&this.settings.$target.length<1?this.show():this.end()):this.settings.paused=!0},is_phone:function(){return matchMedia(Foundation.media_queries.small).matches&&!matchMedia(Foundation.media_queries.medium).matches},hide:function(){this.settings.modal&&this.settings.expose&&this.un_expose(),this.settings.modal||e(".joyride-modal-bg").hide(),this.settings.$current_tip.css("visibility","hidden"),setTimeout(e.proxy(function(){this.hide(),this.css("visibility","visible")},this.settings.$current_tip),0),this.settings.post_step_callback(this.settings.$li.index(),this.settings.$current_tip)},set_li:function(e){e?(this.settings.$li=this.settings.$tip_content.eq(this.settings.start_offset),this.set_next_tip(),this.settings.$current_tip=this.settings.$next_tip):(this.settings.$li=this.settings.$li.next(),this.set_next_tip()),this.set_target()},set_next_tip:function(){this.settings.$next_tip=e(".joyride-tip-guide").eq(this.settings.$li.index()),this.settings.$next_tip.data("closed","")},set_target:function(){var t=this.settings.$li.attr(this.add_namespace("data-class")),r=this.settings.$li.attr(this.add_namespace("data-id")),i=function(){return r?e(n.getElementById(r)):t?e("."+t).first():e("body")};this.settings.$target=i()},scroll_to:function(){var n,r;n=e(t).height()/2,r=Math.ceil(this.settings.$target.offset().top-n+this.settings.$next_tip.outerHeight()),r!=0&&e("html, body").animate({scrollTop:r},this.settings.scroll_speed,"swing")},paused:function(){return e.inArray(this.settings.$li.index()+1,this.settings.pause_after)===-1},restart:function(){this.hide(),this.settings.$li=r,this.show("init")},pos_default:function(n,r){var i=Math.ceil(e(t).height()/2),s=this.settings.$next_tip.offset(),o=this.settings.$next_tip.find(".joyride-nub"),u=Math.ceil(o.outerWidth()/2),a=Math.ceil(o.outerHeight()/2),f=n||!1;f&&(this.settings.$next_tip.css("visibility","hidden"),this.settings.$next_tip.show()),typeof r=="undefined"&&(r=!1),/body/i.test(this.settings.$target.selector)?this.settings.$li.length&&this.pos_modal(o):(this.bottom()?(this.rtl?this.settings.$next_tip.css({top:this.settings.$target.offset().top+a+this.settings.$target.outerHeight(),left:this.settings.$target.offset().left+this.settings.$target.outerWidth()-this.settings.$next_tip.outerWidth()}):this.settings.$next_tip.css({top:this.settings.$target.offset().top+a+this.settings.$target.outerHeight(),left:this.settings.$target.offset().left}),this.nub_position(o,this.settings.tip_settings.nub_position,"top")):this.top()?(this.rtl?this.settings.$next_tip.css({top:this.settings.$target.offset().top-this.settings.$next_tip.outerHeight()-a,left:this.settings.$target.offset().left+this.settings.$target.outerWidth()-this.settings.$next_tip.outerWidth()}):this.settings.$next_tip.css({top:this.settings.$target.offset().top-this.settings.$next_tip.outerHeight()-a,left:this.settings.$target.offset().left}),this.nub_position(o,this.settings.tip_settings.nub_position,"bottom")):this.right()?(this.settings.$next_tip.css({top:this.settings.$target.offset().top,left:this.settings.$target.outerWidth()+this.settings.$target.offset().left+u}),this.nub_position(o,this.settings.tip_settings.nub_position,"left")):this.left()&&(this.settings.$next_tip.css({top:this.settings.$target.offset().top,left:this.settings.$target.offset().left-this.settings.$next_tip.outerWidth()-u}),this.nub_position(o,this.settings.tip_settings.nub_position,"right")),!this.visible(this.corners(this.settings.$next_tip))&&this.settings.attempts<this.settings.tip_settings.tip_location_pattern.length&&(o.removeClass("bottom").removeClass("top").removeClass("right").removeClass("left"),this.settings.tip_settings.tip_location=this.settings.tip_settings.tip_location_pattern[this.settings.attempts],this.settings.attempts++,this.pos_default())),f&&(this.settings.$next_tip.hide(),this.settings.$next_tip.css("visibility","visible"))},pos_phone:function(t){var n=this.settings.$next_tip.outerHeight(),r=this.settings.$next_tip.offset(),i=this.settings.$target.outerHeight(),s=e(".joyride-nub",this.settings.$next_tip),o=Math.ceil(s.outerHeight()/2),u=t||!1;s.removeClass("bottom").removeClass("top").removeClass("right").removeClass("left"),u&&(this.settings.$next_tip.css("visibility","hidden"),this.settings.$next_tip.show()),/body/i.test(this.settings.$target.selector)?this.settings.$li.length&&this.pos_modal(s):this.top()?(this.settings.$next_tip.offset({top:this.settings.$target.offset().top-n-o}),s.addClass("bottom")):(this.settings.$next_tip.offset({top:this.settings.$target.offset().top+i+o}),s.addClass("top")),u&&(this.settings.$next_tip.hide(),this.settings.$next_tip.css("visibility","visible"))},pos_modal:function(e){this.center(),e.hide(),this.show_modal()},show_modal:function(){if(!this.settings.$next_tip.data("closed")){var t=e(".joyride-modal-bg");t.length<1&&e("body").append(this.settings.template.modal).show(),/pop/i.test(this.settings.tip_animation)?t.show():t.fadeIn(this.settings.tip_animation_fade_speed)}},expose:function(){var n,r,i,s,o,u="expose-"+this.random_str(6);if(arguments.length>0&&arguments[0]instanceof e)i=arguments[0];else{if(!this.settings.$target||!!/body/i.test(this.settings.$target.selector))return!1;i=this.settings.$target}if(i.length<1)return t.console&&console.error("element not valid",i),!1;n=e(this.settings.template.expose),this.settings.$body.append(n),n.css({top:i.offset().top,left:i.offset().left,width:i.outerWidth(!0),height:i.outerHeight(!0)}),r=e(this.settings.template.expose_cover),s={zIndex:i.css("z-index"),position:i.css("position")},o=i.attr("class")==null?"":i.attr("class"),i.css("z-index",parseInt(n.css("z-index"))+1),s.position=="static"&&i.css("position","relative"),i.data("expose-css",s),i.data("orig-class",o),i.attr("class",o+" "+this.settings.expose_add_class),r.css({top:i.offset().top,left:i.offset().left,width:i.outerWidth(!0),height:i.outerHeight(!0)}),this.settings.modal&&this.show_modal(),this.settings.$body.append(r),n.addClass(u),r.addClass(u),i.data("expose",u),this.settings.post_expose_callback(this.settings.$li.index(),this.settings.$next_tip,i),this.add_exposed(i)},un_expose:function(){var n,r,i,s,o,u=!1;if(arguments.length>0&&arguments[0]instanceof e)r=arguments[0];else{if(!this.settings.$target||!!/body/i.test(this.settings.$target.selector))return!1;r=this.settings.$target}if(r.length<1)return t.console&&console.error("element not valid",r),!1;n=r.data("expose"),i=e("."+n),arguments.length>1&&(u=arguments[1]),u===!0?e(".joyride-expose-wrapper,.joyride-expose-cover").remove():i.remove(),s=r.data("expose-css"),s.zIndex=="auto"?r.css("z-index",""):r.css("z-index",s.zIndex),s.position!=r.css("position")&&(s.position=="static"?r.css("position",""):r.css("position",s.position)),o=r.data("orig-class"),r.attr("class",o),r.removeData("orig-classes"),r.removeData("expose"),r.removeData("expose-z-index"),this.remove_exposed(r)},add_exposed:function(t){this.settings.exposed=this.settings.exposed||[],t instanceof e||typeof t=="object"?this.settings.exposed.push(t[0]):typeof t=="string"&&this.settings.exposed.push(t)},remove_exposed:function(t){var n,r;t instanceof e?n=t[0]:typeof t=="string"&&(n=t),this.settings.exposed=this.settings.exposed||[],r=this.settings.exposed.length;while(r--)if(this.settings.exposed[r]==n){this.settings.exposed.splice(r,1);return}},center:function(){var n=e(t);return this.settings.$next_tip.css({top:(n.height()-this.settings.$next_tip.outerHeight())/2+n.scrollTop(),left:(n.width()-this.settings.$next_tip.outerWidth())/2+n.scrollLeft()}),!0},bottom:function(){return/bottom/i.test(this.settings.tip_settings.tip_location)},top:function(){return/top/i.test(this.settings.tip_settings.tip_location)},right:function(){return/right/i.test(this.settings.tip_settings.tip_location)},left:function(){return/left/i.test(this.settings.tip_settings.tip_location)},corners:function(n){var r=e(t),i=r.height()/2,s=Math.ceil(this.settings.$target.offset().top-i+this.settings.$next_tip.outerHeight()),o=r.width()+r.scrollLeft(),u=r.height()+s,a=r.height()+r.scrollTop(),f=r.scrollTop();return s<f&&(s<0?f=0:f=s),u>a&&(a=u),[n.offset().top<f,o<n.offset().left+n.outerWidth(),a<n.offset().top+n.outerHeight(),r.scrollLeft()>n.offset().left]},visible:function(e){var t=e.length;while(t--)if(e[t])return!1;return!0},nub_position:function(e,t,n){t==="auto"?e.addClass(n):e.addClass(t)},startTimer:function(){this.settings.$li.length?this.settings.automate=setTimeout(function(){this.hide(),this.show(),this.startTimer()}.bind(this),this.settings.timer):clearTimeout(this.settings.automate)},end:function(){this.settings.cookie_monster&&e.cookie(this.settings.cookie_name,"ridden",{expires:this.settings.cookie_expires,domain:this.settings.cookie_domain}),this.settings.timer>0&&clearTimeout(this.settings.automate),this.settings.modal&&this.settings.expose&&this.un_expose(),this.settings.$next_tip.data("closed",!0),e(".joyride-modal-bg").hide(),this.settings.$current_tip.hide(),this.settings.post_step_callback(this.settings.$li.index(),this.settings.$current_tip),this.settings.post_ride_callback(this.settings.$li.index(),this.settings.$current_tip),e(".joyride-tip-guide").remove()},off:function(){e(this.scope).off(".joyride"),e(t).off(".joyride"),e(".joyride-close-tip, .joyride-next-tip, .joyride-modal-bg").off(".joyride"),e(".joyride-tip-guide, .joyride-modal-bg").remove(),clearTimeout(this.settings.automate),this.settings={}},reflow:function(){}}}(jQuery,this,this.document),function(e,t,n,r){"use strict";Foundation.libs.equalizer={name:"equalizer",version:"5.2.1",settings:{use_tallest:!0,before_height_change:e.noop,after_height_change:e.noop},init:function(e,t,n){this.bindings(t,n),this.reflow()},events:function(){this.S(t).off(".equalizer").on("resize.fndtn.equalizer",function(e){this.reflow()}.bind(this))},equalize:function(t){var n=!1,r=t.find("["+this.attr_name()+"-watch]"),i=r.first().offset().top,s=t.data(this.attr_name(!0)+"-init");if(r.length===0)return;s.before_height_change(),t.trigger("before-height-change"),r.height("inherit"),r.each(function(){var t=e(this);t.offset().top!==i&&(n=!0)});if(n)return;var o=r.map(function(){return e(this).outerHeight()}).get();if(s.use_tallest){var u=Math.max.apply(null,o);r.css("height",u)}else{var a=Math.min.apply(null,o);r.css("height",a)}s.after_height_change(),t.trigger("after-height-change")},reflow:function(){var t=this;this.S("["+this.attr_name()+"]",this.scope).each(function(){t.equalize(e(this))})}}}(jQuery,this,this.document),function(e,t,n,r){"use strict";Foundation.libs.dropdown={name:"dropdown",version:"5.2.1",settings:{active_class:"open",align:"bottom",is_hover:!1,opened:function(){},closed:function(){}},init:function(e,t,n){Foundation.inherit(this,"throttle"),this.bindings(t,n)},events:function(n){var r=this,i=r.S;i(this.scope).off(".dropdown").on("click.fndtn.dropdown","["+this.attr_name()+"]",function(e){var t=i(this).data(r.attr_name(!0)+"-init")||r.settings;e.preventDefault(),(!t.is_hover||Modernizr.touch)&&r.toggle(i(this))}).on("mouseenter.fndtn.dropdown","["+this.attr_name()+"], ["+this.attr_name()+"-content]",function(e){var t=i(this);clearTimeout(r.timeout);if(t.data(r.data_attr()))var n=i("#"+t.data(r.data_attr())),s=t;else{var n=t;s=i("["+r.attr_name()+"='"+n.attr("id")+"']")}var o=s.data(r.attr_name(!0)+"-init")||r.settings;i(e.target).data(r.data_attr())&&o.is_hover&&r.closeall.call(r),o.is_hover&&r.open.apply(r,[n,s])}).on("mouseleave.fndtn.dropdown","["+this.attr_name()+"], ["+this.attr_name()+"-content]",function(e){var t=i(this);r.timeout=setTimeout(function(){if(t.data(r.data_attr())){var e=t.data(r.data_attr(!0)+"-init")||r.settings;e.is_hover&&r.close.call(r,i("#"+t.data(r.data_attr())))}else{var n=i("["+r.attr_name()+'="'+i(this).attr("id")+'"]'),e=n.data(r.attr_name(!0)+"-init")||r.settings;e.is_hover&&r.close.call(r,t)}}.bind(this),150)}).on("click.fndtn.dropdown",function(t){var n=i(t.target).closest("["+r.attr_name()+"-content]");if(i(t.target).data(r.data_attr())||i(t.target).parent().data(r.data_attr()))return;if(!i(t.target).data("revealId")&&n.length>0&&(i(t.target).is("["+r.attr_name()+"-content]")||e.contains(n.first()[0],t.target))){t.stopPropagation();return}r.close.call(r,i("["+r.attr_name()+"-content]"))}).on("opened.fndtn.dropdown","["+r.attr_name()+"-content]",function(){r.settings.opened.call(this)}).on("closed.fndtn.dropdown","["+r.attr_name()+"-content]",function(){r.settings.closed.call(this)}),i(t).off(".dropdown").on("resize.fndtn.dropdown",r.throttle(function(){r.resize.call(r)},50)),this.resize()},close:function(e){var t=this;e.each(function(){t.S(this).hasClass(t.settings.active_class)&&(t.S(this).css(Foundation.rtl?"right":"left","-99999px").removeClass(t.settings.active_class),t.S(this).trigger("closed",[e]))})},closeall:function(){var t=this;e.each(t.S("["+this.attr_name()+"-content]"),function(){t.close.call(t,t.S(this))})},open:function(e,t){this.css(e.addClass(this.settings.active_class),t),e.trigger("opened",[e,t])},data_attr:function(){return this.namespace.length>0?this.namespace+"-"+this.name:this.name},toggle:function(e){var t=this.S("#"+e.data(this.data_attr()));if(t.length===0)return;this.close.call(this,this.S("["+this.attr_name()+"-content]").not(t)),t.hasClass(this.settings.active_class)?this.close.call(this,t):(this.close.call(this,this.S("["+this.attr_name()+"-content]")),this.open.call(this,t,e))},resize:function(){var e=this.S("["+this.attr_name()+"-content].open"),t=this.S("["+this.attr_name()+"='"+e.attr("id")+"']");e.length&&t.length&&this.css(e,t)},css:function(e,t){this.clear_idx();if(this.small()){var n=this.dirs.bottom.call(e,t);e.attr("style","").removeClass("drop-left drop-right drop-top").css({position:"absolute",width:"95%","max-width":"none",top:n.top}),e.css(Foundation.rtl?"right":"left","2.5%")}else{var r=t.data(this.attr_name(!0)+"-init")||this.settings;this.style(e,t,r)}return e},style:function(t,n,r){var i=e.extend({position:"absolute"},this.dirs[r.align].call(t,n,r));t.attr("style","").css(i)},dirs:{_base:function(e){var t=this.offsetParent(),n=t.offset(),r=e.offset();return r.top-=n.top,r.left-=n.left,r},top:function(e,t){var n=Foundation.libs.dropdown,r=n.dirs._base.call(this,e),i=e.outerWidth()/2-8;return this.addClass("drop-top"),(e.outerWidth()<this.outerWidth()||n.small())&&n.adjust_pip(i,r),Foundation.rtl?{left:r.left-this.outerWidth()+e.outerWidth(),top:r.top-this.outerHeight()}:{left:r.left,top:r.top-this.outerHeight()}},bottom:function(e,t){var n=Foundation.libs.dropdown,r=n.dirs._base.call(this,e),i=e.outerWidth()/2-8;return(e.outerWidth()<this.outerWidth()||n.small())&&n.adjust_pip(i,r),n.rtl?{left:r.left-this.outerWidth()+e.outerWidth(),top:r.top+e.outerHeight()}:{left:r.left,top:r.top+e.outerHeight()}},left:function(e,t){var n=Foundation.libs.dropdown.dirs._base.call(this,e);return this.addClass("drop-left"),{left:n.left-this.outerWidth(),top:n.top}},right:function(e,t){var n=Foundation.libs.dropdown.dirs._base.call(this,e);return this.addClass("drop-right"),{left:n.left+e.outerWidth(),top:n.top}}},adjust_pip:function(e,t){var n=Foundation.stylesheet;this.small()&&(e+=t.left-8),this.rule_idx=n.cssRules.length;var r=".f-dropdown.open:before",i=".f-dropdown.open:after",s="left: "+e+"px;",o="left: "+(e-1)+"px;";n.insertRule?(n.insertRule([r,"{",s,"}"].join(" "),this.rule_idx),n.insertRule([i,"{",o,"}"].join(" "),this.rule_idx+1)):(n.addRule(r,s,this.rule_idx),n.addRule(i,o,this.rule_idx+1))},clear_idx:function(){var e=Foundation.stylesheet;this.rule_idx&&(e.deleteRule(this.rule_idx),e.deleteRule(this.rule_idx),delete this.rule_idx)},small:function(){return matchMedia(Foundation.media_queries.small).matches&&!matchMedia(Foundation.media_queries.medium).matches},off:function(){this.S(this.scope).off(".fndtn.dropdown"),this.S("html, body").off(".fndtn.dropdown"),this.S(t).off(".fndtn.dropdown"),this.S("[data-dropdown-content]").off(".fndtn.dropdown")},reflow:function(){}}}(jQuery,this,this.document),function(e,t,n,r){"use strict";Foundation.libs.clearing={name:"clearing",version:"5.2.1",settings:{templates:{viewing:'<a href="#" class="clearing-close">&times;</a><div class="visible-img" style="display: none"><div class="clearing-touch-label"></div><img src="data:image/gif;base64,R0lGODlhAQABAAD/ACwAAAAAAQABAAACADs%3D" alt="" /><p class="clearing-caption"></p><a href="#" class="clearing-main-prev"><span></span></a><a href="#" class="clearing-main-next"><span></span></a></div>'},close_selectors:".clearing-close" ,touch_label:"&larr;&nbsp;Swipe to Advance&nbsp;&rarr;",init:!1,locked:!1},init:function(e,t,n){var r=this;Foundation.inherit(this,"throttle image_loaded"),this.bindings(t,n),r.S(this.scope).is("["+this.attr_name()+"]")?this.assemble(r.S("li",this.scope)):r.S("["+this.attr_name()+"]",this.scope).each(function(){r.assemble(r.S("li",this))})},events:function(r){var i=this,s=i.S;e(".scroll-container").length>0&&(this.scope=e(".scroll-container")),s(this.scope).off(".clearing").on("click.fndtn.clearing","ul["+this.attr_name()+"] li",function(e,t,n){var t=t||s(this),n=n||t,r=t.next("li"),o=t.closest("["+i.attr_name()+"]").data(i.attr_name(!0)+"-init"),u=s(e.target);e.preventDefault(),o||(i.init(),o=t.closest("["+i.attr_name()+"]").data(i.attr_name(!0)+"-init")),n.hasClass("visible")&&t[0]===n[0]&&r.length>0&&i.is_open(t)&&(n=r,u=s("img",n)),i.open(u,t,n),i.update_paddles(n)}).on("click.fndtn.clearing",".clearing-main-next",function(e){i.nav(e,"next")}).on("click.fndtn.clearing",".clearing-main-prev",function(e){i.nav(e,"prev")}).on("click.fndtn.clearing",this.settings.close_selectors,function(e){Foundation.libs.clearing.close(e,this)}),e(n).on("keydown.fndtn.clearing",function(e){i.keydown(e)}),s(t).off(".clearing").on("resize.fndtn.clearing",function(){i.resize()}),this.swipe_events(r)},swipe_events:function(e){var t=this,n=t.S;n(this.scope).on("touchstart.fndtn.clearing",".visible-img",function(e){e.touches||(e=e.originalEvent);var t={start_page_x:e.touches[0].pageX,start_page_y:e.touches[0].pageY,start_time:(new Date).getTime(),delta_x:0,is_scrolling:r};n(this).data("swipe-transition",t),e.stopPropagation()}).on("touchmove.fndtn.clearing",".visible-img",function(e){e.touches||(e=e.originalEvent);if(e.touches.length>1||e.scale&&e.scale!==1)return;var r=n(this).data("swipe-transition");typeof r=="undefined"&&(r={}),r.delta_x=e.touches[0].pageX-r.start_page_x,typeof r.is_scrolling=="undefined"&&(r.is_scrolling=!!(r.is_scrolling||Math.abs(r.delta_x)<Math.abs(e.touches[0].pageY-r.start_page_y)));if(!r.is_scrolling&&!r.active){e.preventDefault();var i=r.delta_x<0?"next":"prev";r.active=!0,t.nav(e,i)}}).on("touchend.fndtn.clearing",".visible-img",function(e){n(this).data("swipe-transition",{}),e.stopPropagation()})},assemble:function(t){var n=t.parent();if(n.parent().hasClass("carousel"))return;n.after('<div id="foundationClearingHolder"></div>');var r=this.S("#foundationClearingHolder"),i=n.data(this.attr_name(!0)+"-init"),s=n.detach(),o={grid:'<div class="carousel">'+s[0].outerHTML+"</div>",viewing:i.templates.viewing},u='<div class="clearing-assembled"><div>'+o.viewing+o.grid+"</div></div>",a=this.settings.touch_label;Modernizr.touch&&(u=e(u).find(".clearing-touch-label").html(a).end()),r.after(u).remove()},open:function(t,r,i){function p(){setTimeout(function(){this.image_loaded(l,function(){l.outerWidth()===1&&!h?p.call(this):d.call(this,l)}.bind(this))}.bind(this),50)}function d(t){var n=e(t);t.css("visibility","visible"),o.css("overflow","hidden"),u.addClass("clearing-blackout"),a.addClass("clearing-container"),f.show(),this.fix_height(i).caption(s.S(".clearing-caption",f),s.S("img",i)).center_and_label(t,c).shift(r,i,function(){i.siblings().removeClass("visible"),i.addClass("visible")})}var s=this,o=e(n.body),u=i.closest(".clearing-assembled"),a=e("div",u).first(),f=e(".visible-img",a),l=e("img",f).not(t),c=e(".clearing-touch-label",".clearing-blackout"),h=!1;l.error(function(){h=!0}),this.locked()||(l.attr("src",this.load(t)).css("visibility","hidden"),p.call(this))},close:function(t,r){t.preventDefault();var i=function(e){return/blackout/.test(e.selector)?e:e.closest(".clearing-blackout")}(e(r)),s=e(n.body),o,u;return r===t.target&&i&&(s.css("overflow",""),o=e("div",i).first(),u=e(".visible-img",o),this.settings.prev_index=0,e("ul["+this.attr_name()+"]",i).attr("style","").closest(".clearing-blackout").removeClass("clearing-blackout"),o.removeClass("clearing-container"),u.hide()),!1},is_open:function(e){return e.parent().prop("style").length>0},keydown:function(t){var n=e(".clearing-blackout ul["+this.attr_name()+"]"),r=this.rtl?37:39,i=this.rtl?39:37,s=27;t.which===r&&this.go(n,"next"),t.which===i&&this.go(n,"prev"),t.which===s&&this.S("a.clearing-close").trigger("click")},nav:function(t,n){var r=e("ul["+this.attr_name()+"]",".clearing-blackout");t.preventDefault(),this.go(r,n)},resize:function(){var t=e("img",".clearing-blackout .visible-img"),n=e(".clearing-touch-label",".clearing-blackout");t.length&&this.center_and_label(t,n)},fix_height:function(e){var t=e.parent().children(),n=this;return t.each(function(){var e=n.S(this),t=e.find("img");e.height()>t.outerHeight()&&e.addClass("fix-height")}).closest("ul").width(t.length*100+"%"),this},update_paddles:function(e){var t=e.closest(".carousel").siblings(".visible-img");e.next().length>0?this.S(".clearing-main-next",t).removeClass("disabled"):this.S(".clearing-main-next",t).addClass("disabled"),e.prev().length>0?this.S(".clearing-main-prev",t).removeClass("disabled"):this.S(".clearing-main-prev",t).addClass("disabled")},center_and_label:function(e,t){return this.rtl?(e.css({marginRight:-(e.outerWidth()/2),marginTop:-(e.outerHeight()/2),left:"auto",right:"50%"}),t.length>0&&t.css({marginRight:-(t.outerWidth()/2),marginTop:-(e.outerHeight()/2)-t.outerHeight()-10,left:"auto",right:"50%"})):(e.css({marginLeft:-(e.outerWidth()/2),marginTop:-(e.outerHeight()/2)}),t.length>0&&t.css({marginLeft:-(t.outerWidth()/2),marginTop:-(e.outerHeight()/2)-t.outerHeight()-10})),this},load:function(e){if(e[0].nodeName==="A")var t=e.attr("href");else var t=e.parent().attr("href");return this.preload(e),t?t:e.attr("src")},preload:function(e){this.img(e.closest("li").next()).img(e.closest("li").prev())},img:function(e){if(e.length){var t=new Image,n=this.S("a",e);n.length?t.src=n.attr("href"):t.src=this.S("img",e).attr("src")}return this},caption:function(e,t){var n=t.attr("data-caption");return n?e.html(n).show():e.text("").hide(),this},go:function(e,t){var n=this.S(".visible",e),r=n[t]();r.length&&this.S("img",r).trigger("click",[n,r])},shift:function(e,t,n){var r=t.parent(),i=this.settings.prev_index||t.index(),s=this.direction(r,e,t),o=this.rtl?"right":"left",u=parseInt(r.css("left"),10),a=t.outerWidth(),f,l={};t.index()!==i&&!/skip/.test(s)?/left/.test(s)?(this.lock(),l[o]=u+a,r.animate(l,300,this.unlock())):/right/.test(s)&&(this.lock(),l[o]=u-a,r.animate(l,300,this.unlock())):/skip/.test(s)&&(f=t.index()-this.settings.up_count,this.lock(),f>0?(l[o]=-(f*a),r.animate(l,300,this.unlock())):(l[o]=0,r.animate(l,300,this.unlock()))),n()},direction:function(e,t,n){var r=this.S("li",e),i=r.outerWidth()+r.outerWidth()/4,s=Math.floor(this.S(".clearing-container").outerWidth()/i)-1,o=r.index(n),u;return this.settings.up_count=s,this.adjacent(this.settings.prev_index,o)?o>s&&o>this.settings.prev_index?u="right":o>s-1&&o<=this.settings.prev_index?u="left":u=!1:u="skip",this.settings.prev_index=o,u},adjacent:function(e,t){for(var n=t+1;n>=t-1;n--)if(n===e)return!0;return!1},lock:function(){this.settings.locked=!0},unlock:function(){this.settings.locked=!1},locked:function(){return this.settings.locked},off:function(){this.S(this.scope).off(".fndtn.clearing"),this.S(t).off(".fndtn.clearing")},reflow:function(){this.init()}}}(jQuery,this,this.document),function(e,t,n,r){"use strict";var i=function(){},s=function(r,i){if(r.hasClass(i.slides_container_class))return this;var s=this,a,f=r,l,c,h,p=0,d,v=!1;s.cache={},s.slides=function(){return f.children(i.slide_selector)},s.slides().first().addClass(i.active_slide_class),s.update_slide_number=function(t){i.slide_number&&(l.find("span:first").text(parseInt(t)+1),l.find("span:last").text(s.slides().length)),i.bullets&&(c.children().removeClass(i.bullets_active_class),e(c.children().get(t)).addClass(i.bullets_active_class))},s.update_active_link=function(t){var n=e('[data-orbit-link="'+s.slides().eq(t).attr("data-orbit-slide")+'"]');n.siblings().removeClass(i.bullets_active_class),n.addClass(i.bullets_active_class)},s.build_markup=function(){f.wrap('<div class="'+i.container_class+'"></div>'),a=f.parent(),f.addClass(i.slides_container_class),i.navigation_arrows&&(a.append(e('<a href="#"><span></span></a>').addClass(i.prev_class)),a.append(e('<a href="#"><span></span></a>').addClass(i.next_class))),i.timer&&(h=e("<div>").addClass(i.timer_container_class),h.append("<span>"),h.append(e("<div>").addClass(i.timer_progress_class)),h.addClass(i.timer_paused_class),a.append(h)),i.slide_number&&(l=e("<div>").addClass(i.slide_number_class),l.append("<span></span> "+i.slide_number_text+" <span></span>"),a.append(l)),i.bullets&&(c=e("<ol>").addClass(i.bullets_container_class),a.append(c),c.wrap('<div class="orbit-bullets-container"></div>'),s.slides().each(function(t,n){var r=e("<li>").attr("data-orbit-slide",t);c.append(r)})),i.stack_on_small&&a.addClass(i.stack_on_small_class)},s._prepare_direction=function(t,n){var r="next";t<=p&&(r="prev"),i.animation==="slide"&&setTimeout(function(){f.removeClass("swipe-prev swipe-next"),r==="next"?f.addClass("swipe-next"):r==="prev"&&f.addClass("swipe-prev")},0);var o=s.slides();if(t>=o.length){if(!i.circular)return!1;t=0}else if(t<0){if(!i.circular)return!1;t=o.length-1}var u=e(o.get(p)),a=e(o.get(t));return[r,u,a,t]},s._goto=function(e,t){if(e===null)return!1;if(s.cache.animating)return!1;if(e===p)return!1;typeof s.cache.timer=="object"&&s.cache.timer.restart();var n=s.slides();s.cache.animating=!0;var r=s._prepare_direction(e),o=r[0],u=r[1],a=r[2],e=r[3];f.trigger("before-slide-change.fndtn.orbit"),i.before_slide_change(),p=e,u.css("transitionDuration",i.animation_speed+"ms"),a.css("transitionDuration",i.animation_speed+"ms");var l=function(){var r=function(){t===!0&&s.cache.timer.restart(),s.update_slide_number(p),a.addClass(i.active_slide_class),s.update_active_link(e),f.trigger("after-slide-change.fndtn.orbit",[{slide_number:p,total_slides:n.length}]),i.after_slide_change(p,n.length),setTimeout(function(){s.cache.animating=!1},100)};f.height()!=a.height()&&i.variable_height?f.animate({height:a.height()},250,"linear",r):r()};if(n.length===1)return l(),!1;var c=function(){o==="next"&&d.next(u,a,l),o==="prev"&&d.prev(u,a,l)};a.height()>f.height()&&i.variable_height?f.animate({height:a.height()},250,"linear",c):c()},s.next=function(e){e.stopImmediatePropagation(),e.preventDefault(),s._prepare_direction(p+1),setTimeout(function(){s._goto(p+1)},100)},s.prev=function(e){e.stopImmediatePropagation(),e.preventDefault(),s._prepare_direction(p-1),setTimeout(function(){s._goto(p-1)},100)},s.link_custom=function(t){t.preventDefault();var n=e(this).attr("data-orbit-link");if(typeof n=="string"&&(n=e.trim(n))!=""){var r=a.find("[data-orbit-slide="+n+"]");r.index()!=-1&&setTimeout(function(){s._goto(r.index())},100)}},s.link_bullet=function(t){var n=e(this).attr("data-orbit-slide");if(typeof n=="string"&&(n=e.trim(n))!="")if(isNaN(parseInt(n))){var r=a.find("[data-orbit-slide="+n+"]");r.index()!=-1&&setTimeout(function(){s._goto(r.index()+1)},100)}else setTimeout(function(){s._goto(parseInt(n))},100)},s.timer_callback=function(){s._goto(p+1,!0)},s.compute_dimensions=function(){var t=e(s.slides().get(p)),n=t.height();i.variable_height||s.slides().each(function(){e(this).height()>n&&(n=e(this).height())}),f.height(n)},s.create_timer=function(){var e=new o(a.find("."+i.timer_container_class),i,s.timer_callback);return e},s.stop_timer=function(){typeof s.cache.timer=="object"&&s.cache.timer.stop()},s.toggle_timer=function(){var e=a.find("."+i.timer_container_class);e.hasClass(i.timer_paused_class)?(typeof s.cache.timer=="undefined"&&(s.cache.timer=s.create_timer()),s.cache.timer.start()):typeof s.cache.timer=="object"&&s.cache.timer.stop()},s.init=function(){s.build_markup(),i.timer&&(s.cache.timer=s.create_timer(),Foundation.utils.image_loaded(this.slides().children("img"),s.cache.timer.start)),i.animation==="fade"&&f.addClass("fade"),d=new u(i,f),a.on("click","."+i.next_class,s.next),a.on("click","."+i.prev_class,s.prev),a.on("click","[data-orbit-slide]",s.link_bullet),a.on("click",s.toggle_timer),i.swipe&&f.on("touchstart.fndtn.orbit",function(e){if(s.cache.animating)return;e.touches||(e=e.originalEvent),s.cache.start_page_x=e.touches[0].pageX,s.cache.start_page_y=e.touches[0].pageY,s.cache.start_time=(new Date).getTime(),s.cache.delta_x=0,s.cache.is_scrolling=null,s.cache.direction=null,s.stop_timer()}).on("touchmove.fndtn.orbit",function(e){Math.abs(s.cache.delta_x)>5&&(e.preventDefault(),e.stopPropagation());if(s.cache.animating)return;requestAnimationFrame(function(){e.touches||(e=e.originalEvent);if(e.touches.length>1||e.scale&&e.scale!==1)return;s.cache.delta_x=e.touches[0].pageX-s.cache.start_page_x,s.cache.is_scrolling===null&&(s.cache.is_scrolling=!!(s.cache.is_scrolling||Math.abs(s.cache.delta_x)<Math.abs(e.touches[0].pageY-s.cache.start_page_y)));if(s.cache.is_scrolling)return;var t=s.cache.delta_x<0?p+1:p-1;if(s.cache.direction!==t){var n=s._prepare_direction(t);s.cache.direction=t,s.cache.dir=n[0],s.cache.current=n[1],s.cache.next=n[2]}if(i.animation==="slide"){var r,o;r=s.cache.delta_x/a.width()*100,r>=0?o=-(100-r):o=100+r,s.cache.current.css("transform","translate3d("+r+"%,0,0)"),s.cache.next.css("transform","translate3d("+o+"%,0,0)")}})}).on("touchend.fndtn.orbit",function(e){if(s.cache.animating)return;e.preventDefault(),e.stopPropagation(),setTimeout(function(){s._goto(s.cache.direction)},50)}),a.on("mouseenter.fndtn.orbit",function(e){i.timer&&i.pause_on_hover&&s.stop_timer()}).on("mouseleave.fndtn.orbit",function(e){i.timer&&i.resume_on_mouseout&&s.cache.timer.start()}),e(n).on("click","[data-orbit-link]",s.link_custom),e(t).on("resize",s.compute_dimensions),Foundation.utils.image_loaded(this.slides().children("img"),s.compute_dimensions),Foundation.utils.image_loaded(this.slides().children("img"),function(){a.prev(".preloader").css("display","none"),s.update_slide_number(0),s.update_active_link(0),f.trigger("ready.fndtn.orbit")})},s.init()},o=function(e,t,n){var r=this,i=t.timer_speed,s=e.find("."+t.timer_progress_class),o,u,a=-1;this.update_progress=function(e){var t=s.clone();t.attr("style",""),t.css("width",e+"%"),s.replaceWith(t),s=t},this.restart=function(){clearTimeout(u),e.addClass(t.timer_paused_class),a=-1,r.update_progress(0),r.start()},this.start=function(){if(!e.hasClass(t.timer_paused_class))return!0;a=a===-1?i:a,e.removeClass(t.timer_paused_class),o=(new Date).getTime(),s.animate({width:"100%"},a,"linear"),u=setTimeout(function(){r.restart(),n()},a),e.trigger("timer-started.fndtn.orbit")},this.stop=function(){if(e.hasClass(t.timer_paused_class))return!0;clearTimeout(u),e.addClass(t.timer_paused_class);var n=(new Date).getTime();a-=n-o;var s=100-a/i*100;r.update_progress(s),e.trigger("timer-stopped.fndtn.orbit")}},u=function(e,t){var n="webkitTransitionEnd otransitionend oTransitionEnd msTransitionEnd transitionend";this.next=function(e,r,i){r.on(n,function(t){r.unbind(n),e.removeClass("active animate-out"),r.removeClass("animate-in"),i()}),t.children().css({transform:"",transitionDuration:""}),e.addClass("animate-out"),r.addClass("animate-in")},this.prev=function(e,t,r){t.on(n,function(i){t.unbind(n),e.removeClass("active animate-out"),t.removeClass("animate-in"),r()}),e.css({transform:"",transitionDuration:""}).addClass("animate-out"),t.css({transform:"",transitionDuration:""}).addClass("animate-in")}};Foundation.libs=Foundation.libs||{},Foundation.libs.orbit={name:"orbit",version:"5.2.1",settings:{animation:"slide",timer_speed:1e4,pause_on_hover:!0,resume_on_mouseout:!1,animation_speed:500,stack_on_small:!1,navigation_arrows:!0,slide_number:!0,slide_number_text:"of",container_class:"orbit-container",stack_on_small_class:"orbit-stack-on-small",next_class:"orbit-next",prev_class:"orbit-prev",timer_container_class:"orbit-timer",timer_paused_class:"paused",timer_progress_class:"orbit-progress",slides_container_class:"orbit-slides-container",slide_selector:"*",bullets_container_class:"orbit-bullets",bullets_active_class:"active",slide_number_class:"orbit-slide-number",caption_class:"orbit-caption",active_slide_class:"active",orbit_transition_class:"orbit-transitioning",bullets:!0,circular:!0,timer:!0,variable_height:!1,swipe:!0,before_slide_change:i,after_slide_change:i},init:function(e,t,n){var r=this;this.bindings(t,n)},events:function(e){var t=new s(this.S(e),this.S(e).data("orbit-init"));this.S(e).data(self.name+"-instance",t)},reflow:function(){var e=this;if(e.S(e.scope).is("[data-orbit]")){var t=e.S(e.scope),n=t.data(e.name+"-instance");n.compute_dimensions()}else e.S("[data-orbit]",e.scope).each(function(t,n){var r=e.S(n),i=e.data_options(r),s=r.data(e.name+"-instance");s.compute_dimensions()})}}}(jQuery,this,this.document),function(e,t,n,r){"use strict";Foundation.libs.offcanvas={name:"offcanvas",version:"5.2.1",settings:{},init:function(e,t,n){this.events()},events:function(){var n=this.S;n(this.scope).off(".offcanvas").on("click.fndtn.offcanvas",".left-off-canvas-toggle",function(e){e.preventDefault(),n(this).closest(".off-canvas-wrap").toggleClass("move-right")}).on("click.fndtn.offcanvas",".exit-off-canvas",function(e){e.preventDefault(),n(".off-canvas-wrap").removeClass("move-right")}).on("click.fndtn.offcanvas",".left-off-canvas-menu a",function(r){r.preventDefault();var i=e(this).attr("href");n(".off-canvas-wrap").on("transitionend webkitTransitionEnd oTransitionEnd",function(e){t.location=i,n(".off-canvas-wrap").off("transitionend webkitTransitionEnd oTransitionEnd")}),n(".off-canvas-wrap").removeClass("move-right")}).on("click.fndtn.offcanvas",".right-off-canvas-toggle",function(e){e.preventDefault(),n(this).closest(".off-canvas-wrap").toggleClass("move-left")}).on("click.fndtn.offcanvas",".exit-off-canvas",function(e){e.preventDefault(),n(".off-canvas-wrap").removeClass("move-left")}).on("click.fndtn.offcanvas",".right-off-canvas-menu a",function(r){r.preventDefault();var i=e(this).attr("href");n(".off-canvas-wrap").on("transitionend webkitTransitionEnd oTransitionEnd",function(e){t.location=i,n(".off-canvas-wrap").off("transitionend webkitTransitionEnd oTransitionEnd")}),n(".off-canvas-wrap").removeClass("move-left")})},reflow:function(){}}}(jQuery,this,this.document),function(e,t,n,r){"use strict";Foundation.libs.alert={name:"alert",version:"5.2.1",settings:{animation:"fadeOut",speed:300,callback:function(){}},init:function(e,t,n){this.bindings(t,n)},events:function(){var t=this,n=this.S;e(this.scope).off(".alert").on("click.fndtn.alert","["+this.attr_name()+"] a.close",function(e){var r=n(this).closest("["+t.attr_name()+"]"),i=r.data(t.attr_name(!0)+"-init")||t.settings;e.preventDefault(),r[i.animation](i.speed,function(){n(this).trigger("close").remove(),i.callback()})})},reflow:function(){}}}(jQuery,this,this.document),function(e,t,n,r){"use strict";function i(e){var t=/fade/i.test(e),n=/pop/i.test(e);return{animate:t||n,pop:n,fade:t}}Foundation.libs.reveal={name:"reveal",version:"5.2.1",locked:!1,settings:{animation:"fadeAndPop",animation_speed:250,close_on_background_click:!0,close_on_esc:!0,dismiss_modal_class:"close-reveal-modal",bg_class:"reveal-modal-bg",open:function(){},opened:function(){},close:function(){},closed:function(){},bg:e(".reveal-modal-bg"),css:{open:{opacity:0,visibility:"visible",display:"block"},close:{opacity:1,visibility:"hidden",display:"none"}}},init:function(t,n,r){e.extend(!0,this.settings,n,r),this.bindings(n,r)},events:function(e){var t=this,r=t.S;return r(this.scope).off(".reveal").on("click.fndtn.reveal","["+this.add_namespace("data-reveal-id")+"]",function(e){e.preventDefault();if(!t.locked){var n=r(this),i=n.data(t.data_attr("reveal-ajax"));t.locked=!0;if(typeof i=="undefined")t.open.call(t,n);else{var s=i===!0?n.attr("href"):i;t.open.call(t,n,{url:s})}}}),r(n).on("touchend.fndtn.reveal click.fndtn.reveal",this.close_targets(),function(e){e.preventDefault();if(!t.locked){var n=r("["+t.attr_name()+"].open").data(t.attr_name(!0)+"-init"),i=r(e.target)[0]===r("."+n.bg_class)[0];if(i){if(!n.close_on_background_click)return;e.stopPropagation()}t.locked=!0,t.close.call(t,i?r("["+t.attr_name()+"].open"):r(this).closest("["+t.attr_name()+"]"))}}),r("["+t.attr_name()+"]",this.scope).length>0?r(this.scope).on("open.fndtn.reveal",this.settings.open).on("opened.fndtn.reveal",this.settings.opened).on("opened.fndtn.reveal",this.open_video).on("close.fndtn.reveal",this.settings.close).on("closed.fndtn.reveal",this.settings.closed).on("closed.fndtn.reveal",this.close_video):r(this.scope).on("open.fndtn.reveal","["+t.attr_name()+"]",this.settings.open).on("opened.fndtn.reveal","["+t.attr_name()+"]",this.settings.opened).on("opened.fndtn.reveal","["+t.attr_name()+"]",this.open_video).on("close.fndtn.reveal","["+t.attr_name()+"]",this.settings.close).on("closed.fndtn.reveal","["+t.attr_name()+"]",this.settings.closed).on("closed.fndtn.reveal","["+t.attr_name()+"]",this.close_video),!0},key_up_on:function(e){var t=this;return t.S("body").off("keyup.fndtn.reveal").on("keyup.fndtn.reveal",function(e){var n=t.S("["+t.attr_name()+"].open"),r=n.data(t.attr_name(!0)+"-init");r&&e.which===27&&r.close_on_esc&&!t.locked&&t.close.call(t,n)}),!0},key_up_off:function(e){return this.S("body").off("keyup.fndtn.reveal"),!0},open:function(t,n){var r=this;if(t)if(typeof t.selector!="undefined")var i=r.S("#"+t.data(r.data_attr("reveal-id")));else{var i=r.S(this.scope);n=t}else var i=r.S(this.scope);var s=i.data(r.attr_name(!0)+"-init");if(!i.hasClass("open")){var o=r.S("["+r.attr_name()+"].open");typeof i.data("css-top")=="undefined"&&i.data("css-top",parseInt(i.css("top"),10)).data("offset",this.cache_offset(i)),this.key_up_on(i),i.trigger("open"),o.length<1&&this.toggle_bg(i),typeof n=="string"&&(n={url:n});if(typeof n=="undefined"||!n.url)o.length>0&&this.hide(o,s.css.close),this.show(i,s.css.open);else{var u=typeof n.success!="undefined"?n.success:null;e.extend(n,{success:function(t,n,a){e.isFunction(u)&&u(t,n,a),i.html(t),r.S(i).foundation("section","reflow"),o.length>0&&r.hide(o,s.css.close),r.show(i,s.css.open)}}),e.ajax(n)}}},close:function(e){var e=e&&e.length?e:this.S(this.scope),t=this.S("["+this.attr_name()+"].open"),n=e.data(this.attr_name(!0)+"-init");t.length>0&&(this.locked=!0,this.key_up_off(e),e.trigger("close"),this.toggle_bg(e),this.hide(t,n.css.close,n))},close_targets:function(){var e="."+this.settings.dismiss_modal_class;return this.settings.close_on_background_click?e+", ."+this.settings.bg_class:e},toggle_bg:function(t){var n=t.data(this.attr_name(!0));this.S("."+this.settings.bg_class).length===0&&(this.settings.bg=e("<div />",{"class":this.settings.bg_class}).appendTo("body").hide()),this.settings.bg.filter(":visible").length>0?this.hide(this.settings.bg):this.show(this.settings.bg)},show:function(n,r){if(r){var s=n.data(this.attr_name(!0)+"-init");if(n.parent("body").length===0){var o=n.wrap('<div style="display: none;" />').parent(),u=this.settings.rootElement||"body";n.on("closed.fndtn.reveal.wrapped",function(){n.detach().appendTo(o),n.unwrap().unbind("closed.fndtn.reveal.wrapped")}),n.detach().appendTo(u)}var a=i(s.animation);a.animate||(this.locked=!1);if(a.pop){r.top=e(t).scrollTop()-n.data("offset")+"px";var f={top:e(t).scrollTop()+n.data("css-top")+"px",opacity:1};return setTimeout(function(){return n.css(r).animate(f,s.animation_speed,"linear",function(){this.locked=!1,n.trigger("opened")}.bind(this)).addClass("open")}.bind(this),s.animation_speed/2)}if(a.fade){r.top=e(t).scrollTop()+n.data("css-top")+"px";var f={opacity:1};return setTimeout(function(){return n.css(r).animate(f,s.animation_speed,"linear",function(){this.locked=!1,n.trigger("opened")}.bind(this)).addClass("open")}.bind(this),s.animation_speed/2)}return n.css(r).show().css({opacity:1}).addClass("open").trigger("opened")}var s=this.settings;return i(s.animation).fade?n.fadeIn(s.animation_speed/2):(this.locked=!1,n.show())},hide:function(n,r){if(r){var s=n.data(this.attr_name(!0)+"-init"),o=i(s.animation);o.animate||(this.locked=!1);if(o.pop){var u={top:-e(t).scrollTop()-n.data("offset")+"px",opacity:0};return setTimeout(function(){return n.animate(u,s.animation_speed,"linear",function(){this.locked=!1,n.css(r).trigger("closed")}.bind(this)).removeClass("open")}.bind(this),s.animation_speed/2)}if(o.fade){var u={opacity:0};return setTimeout(function(){return n.animate(u,s.animation_speed,"linear",function(){this.locked=!1,n.css(r).trigger("closed")}.bind(this)).removeClass("open")}.bind(this),s.animation_speed/2)}return n.hide().css(r).removeClass("open").trigger("closed")}var s=this.settings;return i(s.animation).fade?n.fadeOut(s.animation_speed/2):n.hide()},close_video:function(t){var n=e(".flex-video",t.target),r=e("iframe",n);r.length>0&&(r.attr("data-src",r[0].src),r.attr("src","about:blank"),n.hide())},open_video:function(t){var n=e(".flex-video",t.target),i=n.find("iframe");if(i.length>0){var s=i.attr("data-src");if(typeof s=="string")i[0].src=i.attr("data-src");else{var o=i[0].src;i[0].src=r,i[0].src=o}n.show()}},data_attr:function(e){return this.namespace.length>0?this.namespace+"-"+e:e},cache_offset:function(e){var t=e.show().height()+parseInt(e.css("top"),10);return e.hide(),t},off:function(){e(this.scope).off(".fndtn.reveal")},reflow:function(){}}}(jQuery,this,this.document),function(e,t,n,r){"use strict";Foundation.libs.interchange={name:"interchange",version:"5.2.1",cache:{},images_loaded:!1,nodes_loaded:!1,settings:{load_attr:"interchange",named_queries:{"default":"only screen",small:Foundation.media_queries.small,medium:Foundation.media_queries.medium,large:Foundation.media_queries.large,xlarge:Foundation.media_queries.xlarge,xxlarge:Foundation.media_queries.xxlarge,landscape:"only screen and (orientation: landscape)",portrait:"only screen and (orientation: portrait)",retina:"only screen and (-webkit-min-device-pixel-ratio: 2),only screen and (min--moz-device-pixel-ratio: 2),only screen and (-o-min-device-pixel-ratio: 2/1),only screen and (min-device-pixel-ratio: 2),only screen and (min-resolution: 192dpi),only screen and (min-resolution: 2dppx)"},directives:{replace:function(t,n,r){if(/IMG/.test(t[0].nodeName)){var i=t[0].src;if((new RegExp(n,"i")).test(i))return;return t[0].src=n,r(t[0].src)}var s=t.data(this.data_attr+"-last-path");if(s==n)return;var o="/^.(.jpg|.jpeg|.png|.gif|.tiff|.bmp)??|#?./";return(new RegExp(o,"i")).test(n)?(e(t).css("background-image","url("+n+")"),t.data("interchange-last-path",n),r(n)):e.get(n,function(e){t.html(e),t.data(this.data_attr+"-last-path",n),r()})}}},init:function(t,n,r){Foundation.inherit(this,"throttle random_str"),this.data_attr=this.set_data_attr(),e.extend(!0,this.settings,n,r),this.bindings(n,r),this.load("images"),this.load("nodes")},get_media_hash:function(){var e="";for(var t in this.settings.named_queries)e+=matchMedia(this.settings.named_queries[t]).matches.toString();return e},events:function(){var n=this,r;return e(t).off(".interchange").on("resize.fndtn.interchange",n.throttle(function(){var e=n.get_media_hash();e!==r&&n.resize(),r=e},50)),this},resize:function(){var t=this.cache;if(!this.images_loaded||!this.nodes_loaded){setTimeout(e.proxy(this.resize,this),50);return}for(var n in t)if(t.hasOwnProperty(n)){var r=this.results(n,t[n]);r&&this.settings.directives[r.scenario[1]].call(this,r.el,r.scenario[0],function(){if(arguments[0]instanceof Array)var e=arguments[0];else var e=Array.prototype.slice.call(arguments,0);r.el.trigger(r.scenario[1],e)})}},results:function(e,t){var n=t.length;if(n>0){var r=this.S("["+this.add_namespace("data-uuid")+'="'+e+'"]');while(n--){var i,s=t[n][2];this.settings.named_queries.hasOwnProperty(s)?i=matchMedia(this.settings.named_queries[s]):i=matchMedia(s);if(i.matches)return{el:r,scenario:t[n]}}}return!1},load:function(e,t){return(typeof this["cached_"+e]=="undefined"||t)&&this["update_"+e](),this["cached_"+e]},update_images:function(){var e=this.S("img["+this.data_attr+"]"),t=e.length,n=t,r=0,i=this.data_attr;this.cache={},this.cached_images=[],this.images_loaded=t===0;while(n--){r++;if(e[n]){var s=e[n].getAttribute(i)||"";s.length>0&&this.cached_images.push(e[n])}r===t&&(this.images_loaded=!0,this.enhance("images"))}return this},update_nodes:function(){var e=this.S("["+this.data_attr+"]").not("img"),t=e.length,n=t,r=0,i=this.data_attr;this.cached_nodes=[],this.nodes_loaded=t===0;while(n--){r++;var s=e[n].getAttribute(i)||"";s.length>0&&this.cached_nodes.push(e[n]),r===t&&(this.nodes_loaded=!0,this.enhance("nodes"))}return this},enhance:function(n){var r=this["cached_"+n].length;while(r--)this.object(e(this["cached_"+n][r]));return e(t).trigger("resize")},parse_params:function(e,t,n){return[this.trim(e),this.convert_directive(t),this.trim(n)]},convert_directive:function(e){var t=this.trim(e);return t.length>0?t:"replace"},object:function(e){var t=this.parse_data_attr(e),n=[],r=t.length;if(r>0)while(r--){var i=t[r].split(/\((.*?)(\))$/);if(i.length>1){var s=i[0].split(","),o=this.parse_params(s[0],s[1],i[1]);n.push(o)}}return this.store(e,n)},store:function(e,t){var n=this.random_str(),r=e.data(this.add_namespace("uuid",!0));return this.cache[r]?this.cache[r]:(e.attr(this.add_namespace("data-uuid"),n),this.cache[n]=t)},trim:function(t){return typeof t=="string"?e.trim(t):t},set_data_attr:function(e){return e?this.namespace.length>0?this.namespace+"-"+this.settings.load_attr:this.settings.load_attr:this.namespace.length>0?"data-"+this.namespace+"-"+this.settings.load_attr:"data-"+this.settings.load_attr},parse_data_attr:function(e){var t=e.attr(this.attr_name()).split(/\[(.*?)\]/),n=t.length,r=[];while(n--)t[n].replace(/[\W\d]+/,"").length>4&&r.push(t[n]);return r},reflow:function(){this.load("images",!0),this.load("nodes",!0)}}}(jQuery,this,this.document),function(e,t,n,r){"use strict";Foundation.libs["magellan-expedition"]={name:"magellan-expedition",version:"5.2.1",settings:{active_class:"active",threshold:0,destination_threshold:20,throttle_delay:30},init:function(e,t,n){Foundation.inherit(this,"throttle"),this.bindings(t,n)},events:function(){var n=this,r=n.S,i=n.settings;n.set_expedition_position(),r(n.scope).off(".magellan").on("click.fndtn.magellan","["+n.add_namespace("data-magellan-arrival")+'] a[href^="#"]',function(r){r.preventDefault();var i=e(this).closest("["+n.attr_name()+"]"),s=i.data("magellan-expedition-init"),o=this.hash.split("#").join(""),u=e("a[name="+o+"]");u.length===0&&(u=e("#"+o));var a=u.offset().top;i.css("position")==="fixed"&&(a-=i.outerHeight()),e("html, body").stop().animate({scrollTop:a},700,"swing",function(){t.location.hash="#"+o})}).on("scroll.fndtn.magellan",n.throttle(this.check_for_arrivals.bind(this),i.throttle_delay)),e(t).on("resize.fndtn.magellan",n.throttle(this.set_expedition_position.bind(this),i.throttle_delay))},check_for_arrivals:function(){var e=this;e.update_arrivals(),e.update_expedition_positions()},set_expedition_position:function(){var t=this;e("["+this.attr_name()+"=fixed]",t.scope).each(function(n,r){var i=e(this),s=i.attr("styles"),o;i.attr("style",""),o=i.offset().top,i.data(t.data_attr("magellan-top-offset"),o),i.attr("style",s)})},update_expedition_positions:function(){var n=this,r=e(t).scrollTop();e("["+this.attr_name()+"=fixed]",n.scope).each(function(){var t=e(this),i=t.data("magellan-top-offset");if(r>=i){var s=t.prev("["+n.add_namespace("data-magellan-expedition-clone")+"]");s.length===0&&(s=t.clone(),s.removeAttr(n.attr_name()),s.attr(n.add_namespace("data-magellan-expedition-clone"),""),t.before(s)),t.css({position:"fixed",top:0})}else t.prev("["+n.add_namespace("data-magellan-expedition-clone")+"]").remove(),t.attr("style","")})},update_arrivals:function(){var n=this,r=e(t).scrollTop();e("["+this.attr_name()+"]",n.scope).each(function(){var t=e(this),i=i=t.data(n.attr_name(!0)+"-init"),s=n.offsets(t,r),o=t.find("["+n.add_namespace("data-magellan-arrival")+"]"),u=!1;s.each(function(e,r){if(r.viewport_offset>=r.top_offset){var s=t.find("["+n.add_namespace("data-magellan-arrival")+"]");return s.not(r.arrival).removeClass(i.active_class),r.arrival.addClass(i.active_class),u=!0,!0}}),u||o.removeClass(i.active_class)})},offsets:function(t,n){var r=this,i=t.data(r.attr_name(!0)+"-init"),s=n+i.destination_threshold;return t.find("["+r.add_namespace("data-magellan-arrival")+"]").map(function(t,n){var i=e(this).data(r.data_attr("magellan-arrival")),o=e("["+r.add_namespace("data-magellan-destination")+"="+i+"]");if(o.length>0){var u=o.offset().top;return{destination:o,arrival:e(this),top_offset:u,viewport_offset:s}}}).sort(function(e,t){return e.top_offset<t.top_offset?-1:e.top_offset>t.top_offset?1:0})},data_attr:function(e){return this.namespace.length>0?this.namespace+"-"+e:e},off:function(){this.S(this.scope).off(".magellan"),this.S(t).off(".magellan" )},reflow:function(){var t=this;e("["+t.add_namespace("data-magellan-expedition-clone")+"]",t.scope).remove()}}}(jQuery,this,this.document),function(e,t,n,r){"use strict";Foundation.libs.accordion={name:"accordion",version:"5.2.1",settings:{active_class:"active",toggleable:!0},init:function(e,t,n){this.bindings(t,n)},events:function(){var t=this,n=this.S;n(this.scope).off(".fndtn.accordion").on("click.fndtn.accordion","["+this.attr_name()+"] dd > a",function(r){var i=n(this).closest("["+t.attr_name()+"]"),s=n("#"+this.href.split("#")[1]),o=n("dd > .content",i),u=e("> dd",i),a=i.data(t.attr_name(!0)+"-init"),f=n("dd > .content."+a.active_class,i),l=n("dd."+a.active_class,i);r.preventDefault();if(!n(this).closest("dl").is(i))return;if(f[0]==s[0]&&a.toggleable)return l.toggleClass(a.active_class,!1),s.toggleClass(a.active_class,!1);o.removeClass(a.active_class),u.removeClass(a.active_class),s.addClass(a.active_class).parent().addClass(a.active_class)})},off:function(){},reflow:function(){}}}(jQuery,this,this.document),function(e,t,n,r){"use strict";Foundation.libs.topbar={name:"topbar",version:"5.2.1",settings:{index:0,sticky_class:"sticky",custom_back_text:!0,back_text:"Back",is_hover:!0,mobile_show_parent_link:!1,scrolltop:!0,sticky_on:"all"},init:function(t,n,r){Foundation.inherit(this,"add_custom_rule register_media throttle");var i=this;i.register_media("topbar","foundation-mq-topbar"),this.bindings(n,r),i.S("["+this.attr_name()+"]",this.scope).each(function(){var t=e(this),n=t.data(i.attr_name(!0)+"-init"),r=i.S("section",this),s=t.children().filter("ul").first();t.data("index",0);var o=t.parent();o.hasClass("fixed")||i.is_sticky(t,o,n)?(i.settings.sticky_class=n.sticky_class,i.settings.sticky_topbar=t,t.data("height",o.outerHeight()),t.data("stickyoffset",o.offset().top)):t.data("height",t.outerHeight()),n.assembled||i.assemble(t),n.is_hover?i.S(".has-dropdown",t).addClass("not-click"):i.S(".has-dropdown",t).removeClass("not-click"),i.add_custom_rule(".f-topbar-fixed { padding-top: "+t.data("height")+"px }"),o.hasClass("fixed")&&i.S("body").addClass("f-topbar-fixed")})},is_sticky:function(e,t,n){var r=t.hasClass(n.sticky_class);return r&&n.sticky_on==="all"?!0:r&&this.small()&&n.sticky_on==="small"?!0:r&&this.medium()&&n.sticky_on==="medium"?!0:r&&this.large()&&n.sticky_on==="large"?!0:!1},toggle:function(n){var r=this;if(n)var i=r.S(n).closest("["+this.attr_name()+"]");else var i=r.S("["+this.attr_name()+"]");var s=i.data(this.attr_name(!0)+"-init"),o=r.S("section, .section",i);r.breakpoint()&&(r.rtl?(o.css({right:"0%"}),e(">.name",o).css({right:"100%"})):(o.css({left:"0%"}),e(">.name",o).css({left:"100%"})),r.S("li.moved",o).removeClass("moved"),i.data("index",0),i.toggleClass("expanded").css("height","")),s.scrolltop?i.hasClass("expanded")?i.parent().hasClass("fixed")&&(s.scrolltop?(i.parent().removeClass("fixed"),i.addClass("fixed"),r.S("body").removeClass("f-topbar-fixed"),t.scrollTo(0,0)):i.parent().removeClass("expanded")):i.hasClass("fixed")&&(i.parent().addClass("fixed"),i.removeClass("fixed"),r.S("body").addClass("f-topbar-fixed")):(r.is_sticky(i,i.parent(),s)&&i.parent().addClass("fixed"),i.parent().hasClass("fixed")&&(i.hasClass("expanded")?(i.addClass("fixed"),i.parent().addClass("expanded"),r.S("body").addClass("f-topbar-fixed")):(i.removeClass("fixed"),i.parent().removeClass("expanded"),r.update_sticky_positioning())))},timer:null,events:function(n){var r=this,i=this.S;i(this.scope).off(".topbar").on("click.fndtn.topbar","["+this.attr_name()+"] .toggle-topbar",function(e){e.preventDefault(),r.toggle(this)}).on("click.fndtn.topbar",'.top-bar .top-bar-section li a[href^="#"],['+this.attr_name()+'] .top-bar-section li a[href^="#"]',function(t){var n=e(this).closest("li");r.breakpoint()&&!n.hasClass("back")&&!n.hasClass("has-dropdown")&&r.toggle()}).on("click.fndtn.topbar","["+this.attr_name()+"] li.has-dropdown",function(e){var t=i(this),n=i(e.target),s=t.closest("["+r.attr_name()+"]"),o=s.data(r.attr_name(!0)+"-init");if(n.data("revealId")){r.toggle();return}if(r.breakpoint())return;if(o.is_hover&&!Modernizr.touch)return;e.stopImmediatePropagation(),t.hasClass("hover")?(t.removeClass("hover").find("li").removeClass("hover"),t.parents("li.hover").removeClass("hover")):(t.addClass("hover"),n[0].nodeName==="A"&&n.parent().hasClass("has-dropdown")&&e.preventDefault())}).on("click.fndtn.topbar","["+this.attr_name()+"] .has-dropdown>a",function(e){if(r.breakpoint()){e.preventDefault();var t=i(this),n=t.closest("["+r.attr_name()+"]"),s=n.find("section, .section"),o=t.next(".dropdown").outerHeight(),u=t.closest("li");n.data("index",n.data("index")+1),u.addClass("moved"),r.rtl?(s.css({right:-(100*n.data("index"))+"%"}),s.find(">.name").css({right:100*n.data("index")+"%"})):(s.css({left:-(100*n.data("index"))+"%"}),s.find(">.name").css({left:100*n.data("index")+"%"})),n.css("height",t.siblings("ul").outerHeight(!0)+n.data("height"))}}),i(t).off(".topbar").on("resize.fndtn.topbar",r.throttle(function(){r.resize.call(r)},50)).trigger("resize"),i("body").off(".topbar").on("click.fndtn.topbar touchstart.fndtn.topbar",function(e){var t=i(e.target).closest("li").closest("li.hover");if(t.length>0)return;i("["+r.attr_name()+"] li").removeClass("hover")}),i(this.scope).on("click.fndtn.topbar","["+this.attr_name()+"] .has-dropdown .back",function(e){e.preventDefault();var t=i(this),n=t.closest("["+r.attr_name()+"]"),s=n.find("section, .section"),o=n.data(r.attr_name(!0)+"-init"),u=t.closest("li.moved"),a=u.parent();n.data("index",n.data("index")-1),r.rtl?(s.css({right:-(100*n.data("index"))+"%"}),s.find(">.name").css({right:100*n.data("index")+"%"})):(s.css({left:-(100*n.data("index"))+"%"}),s.find(">.name").css({left:100*n.data("index")+"%"})),n.data("index")===0?n.css("height",""):n.css("height",a.outerHeight(!0)+n.data("height")),setTimeout(function(){u.removeClass("moved")},300)})},resize:function(){var e=this;e.S("["+this.attr_name()+"]").each(function(){var t=e.S(this),r=t.data(e.attr_name(!0)+"-init"),i=t.parent("."+e.settings.sticky_class),s;if(!e.breakpoint()){var o=t.hasClass("expanded");t.css("height","").removeClass("expanded").find("li").removeClass("hover"),o&&e.toggle(t)}e.is_sticky(t,i,r)&&(i.hasClass("fixed")?(i.removeClass("fixed"),s=i.offset().top,e.S(n.body).hasClass("f-topbar-fixed")&&(s-=t.data("height")),t.data("stickyoffset",s),i.addClass("fixed")):(s=i.offset().top,t.data("stickyoffset",s)))})},breakpoint:function(){return!matchMedia(Foundation.media_queries.topbar).matches},small:function(){return matchMedia(Foundation.media_queries.small).matches},medium:function(){return matchMedia(Foundation.media_queries.medium).matches},large:function(){return matchMedia(Foundation.media_queries.large).matches},assemble:function(t){var n=this,r=t.data(this.attr_name(!0)+"-init"),i=n.S("section",t),s=e(this).children().filter("ul").first();i.detach(),n.S(".has-dropdown>a",i).each(function(){var t=n.S(this),i=t.siblings(".dropdown"),s=t.attr("href");if(!i.find(".title.back").length){if(r.mobile_show_parent_link&&s&&s.length>1)var o=e('<li class="title back js-generated"><h5><a href="javascript:void(0)"></a></h5></li><li><a class="parent-link js-generated" href="'+s+'">'+t.text()+"</a></li>");else var o=e('<li class="title back js-generated"><h5><a href="javascript:void(0)"></a></h5></li>');r.custom_back_text==1?e("h5>a",o).html(r.back_text):e("h5>a",o).html("&laquo; "+t.html()),i.prepend(o)}}),i.appendTo(t),this.sticky(),this.assembled(t)},assembled:function(t){t.data(this.attr_name(!0),e.extend({},t.data(this.attr_name(!0)),{assembled:!0}))},height:function(t){var n=0,r=this;return e("> li",t).each(function(){n+=r.S(this).outerHeight(!0)}),n},sticky:function(){var e=this.S(t),n=this;this.S(t).on("scroll",function(){n.update_sticky_positioning()})},update_sticky_positioning:function(){var e="."+this.settings.sticky_class,n=this.S(t),r=this;if(r.settings.sticky_topbar&&r.is_sticky(this.settings.sticky_topbar,this.settings.sticky_topbar.parent(),this.settings)){var i=this.settings.sticky_topbar.data("stickyoffset");r.S(e).hasClass("expanded")||(n.scrollTop()>i?r.S(e).hasClass("fixed")||(r.S(e).addClass("fixed"),r.S("body").addClass("f-topbar-fixed")):n.scrollTop()<=i&&r.S(e).hasClass("fixed")&&(r.S(e).removeClass("fixed"),r.S("body").removeClass("f-topbar-fixed")))}},off:function(){this.S(this.scope).off(".fndtn.topbar"),this.S(t).off(".fndtn.topbar")},reflow:function(){}}}(jQuery,this,this.document),function(e,t,n,r){"use strict";Foundation.libs.tab={name:"tab",version:"5.2.1",settings:{active_class:"active",callback:function(){},deep_linking:!1,scroll_to_content:!0},default_tab_hashes:[],init:function(e,t,n){var r=this,i=this.S;this.bindings(t,n),this.handle_location_hash_change(),i("["+this.attr_name()+"] > dd.active > a",this.scope).each(function(){r.default_tab_hashes.push(this.hash)})},events:function(){var e=this,n=this.S;n(this.scope).off(".tab").on("click.fndtn.tab","["+this.attr_name()+"] > dd > a",function(t){t.preventDefault(),t.stopPropagation(),e.toggle_active_tab(n(this).parent())}),n(t).on("hashchange.fndtn.tab",function(t){t.preventDefault(),e.handle_location_hash_change()})},handle_location_hash_change:function(){var t=this,n=this.S;n("["+this.attr_name()+"]",this.scope).each(function(){var i=n(this).data(t.attr_name(!0)+"-init");if(i.deep_linking){var s=t.scope.location.hash;if(s!=""){var o=n(s);if(o.hasClass("content")&&o.parent().hasClass("tab-content"))t.toggle_active_tab(e("["+t.attr_name()+"] > dd > a[href="+s+"]").parent());else{var u=o.closest(".content").attr("id");u!=r&&t.toggle_active_tab(e("["+t.attr_name()+"] > dd > a[href=#"+u+"]").parent(),s)}}else for(var a in t.default_tab_hashes)t.toggle_active_tab(e("["+t.attr_name()+"] > dd > a[href="+t.default_tab_hashes[a]+"]").parent())}})},toggle_active_tab:function(n,i){var s=this.S,o=n.closest("["+this.attr_name()+"]"),u=n.children("a").first(),a="#"+u.attr("href").split("#")[1],f=s(a),l=n.siblings(),c=o.data(this.attr_name(!0)+"-init");s(this).data(this.data_attr("tab-content"))&&(a="#"+s(this).data(this.data_attr("tab-content")).split("#")[1],f=s(a));if(c.deep_linking){var h=e("body,html").scrollTop();i!=r?t.location.hash=i:t.location.hash=a,c.scroll_to_content?i==r||i==a?n.parent()[0].scrollIntoView():s(a)[0].scrollIntoView():(i==r||i==a)&&e("body,html").scrollTop(h)}n.addClass(c.active_class).triggerHandler("opened"),l.removeClass(c.active_class),f.siblings().removeClass(c.active_class).end().addClass(c.active_class),c.callback(n),f.triggerHandler("toggled",[n]),o.triggerHandler("toggled",[f])},data_attr:function(e){return this.namespace.length>0?this.namespace+"-"+e:e},off:function(){},reflow:function(){}}}(jQuery,this,this.document),function(e,t,n,r){"use strict";Foundation.libs.abide={name:"abide",version:"5.2.1",settings:{live_validate:!0,focus_on_invalid:!0,error_labels:!0,timeout:1e3,patterns:{alpha:/^[a-zA-Z]+$/,alpha_numeric:/^[a-zA-Z0-9]+$/,integer:/^\d+$/,number:/-?(?:\d+|\d{1,3}(?:,\d{3})+)?(?:\.\d+)?/,card:/^(?:4[0-9]{12}(?:[0-9]{3})?|5[1-5][0-9]{14}|6(?:011|5[0-9][0-9])[0-9]{12}|3[47][0-9]{13}|3(?:0[0-5]|[68][0-9])[0-9]{11}|(?:2131|1800|35\d{3})\d{11})$/,cvv:/^([0-9]){3,4}$/,email:/^[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/,url:/(https?|ftp|file|ssh):\/\/(((([a-zA-Z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(%[\da-f]{2})|[!\$&'\(\)\*\+,;=]|:)*@)?(((\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5]))|((([a-zA-Z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(([a-zA-Z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])([a-zA-Z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])*([a-zA-Z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])))\.)+(([a-zA-Z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(([a-zA-Z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])([a-zA-Z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])*([a-zA-Z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])))\.?)(:\d*)?)(\/((([a-zA-Z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(%[\da-f]{2})|[!\$&'\(\)\*\+,;=]|:|@)+(\/(([a-zA-Z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(%[\da-f]{2})|[!\$&'\(\)\*\+,;=]|:|@)*)*)?)?(\?((([a-zA-Z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(%[\da-f]{2})|[!\$&'\(\)\*\+,;=]|:|@)|[\uE000-\uF8FF]|\/|\?)*)?(\#((([a-zA-Z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(%[\da-f]{2})|[!\$&'\(\)\*\+,;=]|:|@)|\/|\?)*)?/,domain:/^([a-zA-Z0-9]([a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{2,6}$/,datetime:/([0-2][0-9]{3})\-([0-1][0-9])\-([0-3][0-9])T([0-5][0-9])\:([0-5][0-9])\:([0-5][0-9])(Z|([\-\+]([0-1][0-9])\:00))/,date:/(?:19|20)[0-9]{2}-(?:(?:0[1-9]|1[0-2])-(?:0[1-9]|1[0-9]|2[0-9])|(?:(?!02)(?:0[1-9]|1[0-2])-(?:30))|(?:(?:0[13578]|1[02])-31))/,time:/(0[0-9]|1[0-9]|2[0-3])(:[0-5][0-9]){2}/,dateISO:/\d{4}[\/\-]\d{1,2}[\/\-]\d{1,2}/,month_day_year:/(0[1-9]|1[012])[- \/.](0[1-9]|[12][0-9]|3[01])[- \/.](19|20)\d\d/,color:/^#?([a-fA-F0-9]{6}|[a-fA-F0-9]{3})$/},validators:{equalTo:function(e,t,r){var i=n.getElementById(e.getAttribute(this.add_namespace("data-equalto"))).value,s=e.value,o=i===s;return o}}},timer:null,init:function(e,t,n){this.bindings(t,n)},events:function(t){var n=this,r=n.S(t).attr("novalidate","novalidate"),i=r.data(this.attr_name(!0)+"-init");this.invalid_attr=this.add_namespace("data-invalid"),r.off(".abide").on("submit.fndtn.abide validate.fndtn.abide",function(e){var t=/ajax/i.test(n.S(this).attr(n.attr_name()));return n.validate(n.S(this).find("input, textarea, select").get(),e,t)}).on("reset",function(){return n.reset(e(this))}).find("input, textarea, select").off(".abide").on("blur.fndtn.abide change.fndtn.abide",function(e){n.validate([this],e)}).on("keydown.fndtn.abide",function(t){var r=e(this).closest("form").data(n.attr_name(!0)+"-init");r.live_validate===!0&&(clearTimeout(n.timer),n.timer=setTimeout(function(){n.validate([this],t)}.bind(this),r.timeout))})},reset:function(t){t.removeAttr(this.invalid_attr),e(this.invalid_attr,t).removeAttr(this.invalid_attr),e(".error",t).not("small").removeClass("error")},validate:function(e,t,n){var r=this.parse_patterns(e),i=r.length,s=this.S(e[0]).closest("form"),o=/submit/.test(t.type);for(var u=0;u<i;u++)if(!r[u]&&(o||n))return this.settings.focus_on_invalid&&e[u].focus(),s.trigger("invalid"),this.S(e[u]).closest("form").attr(this.invalid_attr,""),!1;return(o||n)&&s.trigger("valid"),s.removeAttr(this.invalid_attr),n?!1:!0},parse_patterns:function(e){var t=e.length,n=[];while(t--)n.push(this.pattern(e[t]));return this.check_validation_and_apply_styles(n)},pattern:function(e){var t=e.getAttribute("type"),n=typeof e.getAttribute("required")=="string",r=e.getAttribute("pattern")||"";return this.settings.patterns.hasOwnProperty(r)&&r.length>0?[e,this.settings.patterns[r],n]:r.length>0?[e,new RegExp(r),n]:this.settings.patterns.hasOwnProperty(t)?[e,this.settings.patterns[t],n]:(r=/.*/,[e,r,n])},check_validation_and_apply_styles:function(t){var n=t.length,r=[];while(n--){var i=t[n][0],s=t[n][2],o=i.value,u=this.S(i).parent(),a=i.getAttribute(this.add_namespace("data-abide-validator")),f=i.type==="radio",l=i.type==="checkbox",c=this.S('label[for="'+i.getAttribute("id")+'"]'),h=s?i.value.length>0:!0,p,d;i.getAttribute(this.add_namespace("data-equalto"))&&(a="equalTo"),u.is("label")?p=u.parent():p=u,f&&s?r.push(this.valid_radio(i,s)):l&&s?r.push(this.valid_checkbox(i,s)):a?(d=this.settings.validators[a].apply(this,[i,s,p]),r.push(d),d?(this.S(i).removeAttr(this.invalid_attr),p.removeClass("error")):(this.S(i).attr(this.invalid_attr,""),p.addClass("error"))):t[n][1].test(o)&&h||!s&&i.value.length<1||e(i).attr("disabled")?(this.S(i).removeAttr(this.invalid_attr),p.removeClass("error"),c.length>0&&this.settings.error_labels&&c.removeClass("error"),r.push(!0),e(i).triggerHandler("valid")):(this.S(i).attr(this.invalid_attr,""),p.addClass("error"),c.length>0&&this.settings.error_labels&&c.addClass("error"),r.push(!1),e(i).triggerHandler("invalid"))}return r},valid_checkbox:function(e,t){var e=this.S(e),n=e.is(":checked")||!t;return n?e.removeAttr(this.invalid_attr).parent().removeClass("error"):e.attr(this.invalid_attr,"").parent().addClass("error"),n},valid_radio:function(e,t){var r=e.getAttribute("name"),i=n.getElementsByName(r),s=i.length,o=!1;for(var u=0;u<s;u++)i[u].checked&&(o=!0);for(var u=0;u<s;u++)o?this.S(i[u]).removeAttr(this.invalid_attr).parent().removeClass("error"):this.S(i[u]).attr(this.invalid_attr,"").parent().addClass("error");return o}}}(jQuery,this,this.document),function(e,t,n,r){"use strict";Foundation.libs.tooltip={name:"tooltip",version:"5.2.1",settings:{additional_inheritable_classes:[],tooltip_class:".tooltip",append_to:"body",touch_close_text:"Tap To Close",disable_for_touch:!1,hover_delay:200,tip_template:function(e,t){return'<span data-selector="'+e+'" class="'+Foundation.libs.tooltip.settings.tooltip_class.substring(1)+'">'+t+'<span class="nub"></span></span>'}},cache:{},init:function(e,t,n){Foundation.inherit(this,"random_str"),this.bindings(t,n)},events:function(t){var n=this,r=n.S;n.create(this.S(t)),e(this.scope).off(".tooltip").on("mouseenter.fndtn.tooltip mouseleave.fndtn.tooltip touchstart.fndtn.tooltip MSPointerDown.fndtn.tooltip","["+this.attr_name()+"]:not(a)",function(t){var i=r(this),s=e.extend({},n.settings,n.data_options(i)),o=!1;if(/mouse/i.test(t.type)&&n.ie_touch(t))return!1;if(i.hasClass("open"))Modernizr.touch&&/touchstart|MSPointerDown/i.test(t.type)&&t.preventDefault(),n.hide(i);else{if(s.disable_for_touch&&Modernizr.touch&&/touchstart|MSPointerDown/i.test(t.type))return;!s.disable_for_touch&&Modernizr.touch&&/touchstart|MSPointerDown/i.test(t.type)&&(t.preventDefault(),r(s.tooltip_class+".open").hide(),o=!0),/enter|over/i.test(t.type)?this.timer=setTimeout(function(){var e=n.showTip(i)}.bind(this),n.settings.hover_delay):t.type==="mouseout"||t.type==="mouseleave"?(clearTimeout(this.timer),n.hide(i)):n.showTip(i)}}).on("mouseleave.fndtn.tooltip touchstart.fndtn.tooltip MSPointerDown.fndtn.tooltip","["+this.attr_name()+"].open",function(t){if(/mouse/i.test(t.type)&&n.ie_touch(t))return!1;if(e(this).data("tooltip-open-event-type")=="touch"&&t.type=="mouseleave")return;e(this).data("tooltip-open-event-type")=="mouse"&&/MSPointerDown|touchstart/i.test(t.type)?n.convert_to_touch(e(this)):n.hide(e(this))}).on("DOMNodeRemoved DOMAttrModified","["+this.attr_name()+"]:not(a)",function(e){n.hide(r(this))})},ie_touch:function(e){return!1},showTip:function(e){var t=this.getTip(e);return this.show(e)},getTip:function(t){var n=this.selector(t),r=e.extend({},this.settings,this.data_options(t)),i=null;return n&&(i=this.S('span[data-selector="'+n+'"]'+r.tooltip_class)),typeof i=="object"?i:!1},selector:function(e){var t=e.attr("id"),n=e.attr(this.attr_name())||e.attr("data-selector");return(t&&t.length<1||!t)&&typeof n!="string"&&(n=this.random_str(6),e.attr("data-selector",n)),t&&t.length>0?t:n},create:function(n){var r=this,i=e.extend({},this.settings,this.data_options(n)),s=this.settings.tip_template;typeof i.tip_template=="string"&&t.hasOwnProperty(i.tip_template)&&(s=t[i.tip_template]);var o=e(s(this.selector(n),e("<div></div>").html(n.attr("title")).html())),u=this.inheritable_classes(n);o.addClass(u).appendTo(i.append_to),Modernizr.touch&&(o.append('<span class="tap-to-close">'+i.touch_close_text+"</span>"),o.on("touchstart.fndtn.tooltip MSPointerDown.fndtn.tooltip",function(e){r.hide(n)})),n.removeAttr("title").attr("title","")},reposition:function(t,n,r){var i,s,o,u,a,f;n.css("visibility","hidden").show(),i=t.data("width"),s=n.children(".nub"),o=s.outerHeight(),u=s.outerHeight(),this.small()?n.css({width:"100%"}):n.css({width:i?i:"auto"}),f=function(e,t,n,r,i,s){return e.css({top:t?t:"auto",bottom:r?r:"auto",left:i?i:"auto",right:n?n:"auto"}).end()},f(n,t.offset().top+t.outerHeight()+10,"auto","auto",t.offset().left);if(this.small())f(n,t.offset().top+t.outerHeight()+10,"auto","auto",12.5,e(this.scope).width()),n.addClass("tip-override"),f(s,-o,"auto","auto",t.offset().left);else{var l=t.offset().left;Foundation.rtl&&(s.addClass("rtl"),l=t.offset().left+t.outerWidth()-n.outerWidth()),f(n,t.offset().top+t.outerHeight()+10,"auto","auto",l),n.removeClass("tip-override"),r&&r.indexOf("tip-top")>-1?(Foundation.rtl&&s.addClass("rtl"),f(n,t.offset().top-n.outerHeight(),"auto","auto",l).removeClass("tip-override")):r&&r.indexOf("tip-left")>-1?(f(n,t.offset().top+t.outerHeight()/2-n.outerHeight()/2,"auto","auto",t.offset().left-n.outerWidth()-o).removeClass("tip-override"),s.removeClass("rtl")):r&&r.indexOf("tip-right")>-1&&(f(n,t.offset().top+t.outerHeight()/2-n.outerHeight()/2,"auto","auto",t.offset().left+t.outerWidth()+o).removeClass("tip-override"),s.removeClass("rtl"))}n.css("visibility","visible").hide()},small:function(){return matchMedia(Foundation.media_queries.small).matches},inheritable_classes:function(t){var n=e.extend({},this.settings,this.data_options(t)),r=["tip-top","tip-left","tip-bottom","tip-right","radius","round"].concat(n.additional_inheritable_classes),i=t.attr("class"),s=i?e.map(i.split(" "),function(t,n){if(e.inArray(t,r)!==-1)return t}).join(" "):"";return e.trim(s)},convert_to_touch:function(t){var n=this,r=n.getTip(t),i=e.extend({},n.settings,n.data_options(t));r.find(".tap-to-close").length===0&&(r.append('<span class="tap-to-close">'+i.touch_close_text+"</span>"),r.on("click.fndtn.tooltip.tapclose touchstart.fndtn.tooltip.tapclose MSPointerDown.fndtn.tooltip.tapclose",function(e){n.hide(t)})),t.data("tooltip-open-event-type","touch")},show:function(e){var t=this.getTip(e);e.data("tooltip-open-event-type")=="touch"&&this.convert_to_touch(e),this.reposition(e,t,e.attr("class")),e.addClass("open"),t.fadeIn(150)},hide:function(e){var t=this.getTip(e);t.fadeOut(150,function(){t.find(".tap-to-close").remove(),t.off("click.fndtn.tooltip.tapclose touchstart.fndtn.tooltip.tapclose MSPointerDown.fndtn.tapclose"),e.removeClass("open")})},off:function(){var t=this;this.S(this.scope).off(".fndtn.tooltip"),this.S(this.settings.tooltip_class).each(function(n){e("["+t.attr_name()+"]").get(n).attr("title",e(this).text())}).remove()},reflow:function(){}}}(jQuery,this,this.document);
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/data/static/foundation/js/foundation.min.js
foundation.min.js
;(function ($, window, document, undefined) { 'use strict'; Foundation.libs.abide = { name : 'abide', version : '5.2.1', settings : { live_validate : true, focus_on_invalid : true, error_labels: true, // labels with a for="inputId" will recieve an `error` class timeout : 1000, patterns : { alpha: /^[a-zA-Z]+$/, alpha_numeric : /^[a-zA-Z0-9]+$/, integer: /^\d+$/, number: /-?(?:\d+|\d{1,3}(?:,\d{3})+)?(?:\.\d+)?/, // amex, visa, diners card : /^(?:4[0-9]{12}(?:[0-9]{3})?|5[1-5][0-9]{14}|6(?:011|5[0-9][0-9])[0-9]{12}|3[47][0-9]{13}|3(?:0[0-5]|[68][0-9])[0-9]{11}|(?:2131|1800|35\d{3})\d{11})$/, cvv : /^([0-9]){3,4}$/, // http://www.whatwg.org/specs/web-apps/current-work/multipage/states-of-the-type-attribute.html#valid-e-mail-address email : /^[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/, url: /(https?|ftp|file|ssh):\/\/(((([a-zA-Z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(%[\da-f]{2})|[!\$&'\(\)\*\+,;=]|:)*@)?(((\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5]))|((([a-zA-Z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(([a-zA-Z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])([a-zA-Z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])*([a-zA-Z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])))\.)+(([a-zA-Z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(([a-zA-Z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])([a-zA-Z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])*([a-zA-Z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])))\.?)(:\d*)?)(\/((([a-zA-Z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(%[\da-f]{2})|[!\$&'\(\)\*\+,;=]|:|@)+(\/(([a-zA-Z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(%[\da-f]{2})|[!\$&'\(\)\*\+,;=]|:|@)*)*)?)?(\?((([a-zA-Z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(%[\da-f]{2})|[!\$&'\(\)\*\+,;=]|:|@)|[\uE000-\uF8FF]|\/|\?)*)?(\#((([a-zA-Z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(%[\da-f]{2})|[!\$&'\(\)\*\+,;=]|:|@)|\/|\?)*)?/, // abc.de domain: /^([a-zA-Z0-9]([a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{2,6}$/, datetime: /([0-2][0-9]{3})\-([0-1][0-9])\-([0-3][0-9])T([0-5][0-9])\:([0-5][0-9])\:([0-5][0-9])(Z|([\-\+]([0-1][0-9])\:00))/, // YYYY-MM-DD date: /(?:19|20)[0-9]{2}-(?:(?:0[1-9]|1[0-2])-(?:0[1-9]|1[0-9]|2[0-9])|(?:(?!02)(?:0[1-9]|1[0-2])-(?:30))|(?:(?:0[13578]|1[02])-31))/, // HH:MM:SS time : /(0[0-9]|1[0-9]|2[0-3])(:[0-5][0-9]){2}/, dateISO: /\d{4}[\/\-]\d{1,2}[\/\-]\d{1,2}/, // MM/DD/YYYY month_day_year : /(0[1-9]|1[012])[- \/.](0[1-9]|[12][0-9]|3[01])[- \/.](19|20)\d\d/, // #FFF or #FFFFFF color: /^#?([a-fA-F0-9]{6}|[a-fA-F0-9]{3})$/ }, validators : { equalTo: function(el, required, parent) { var from = document.getElementById(el.getAttribute(this.add_namespace('data-equalto'))).value, to = el.value, valid = (from === to); return valid; } } }, timer : null, init : function (scope, method, options) { this.bindings(method, options); }, events : function (scope) { var self = this, form = self.S(scope).attr('novalidate', 'novalidate'), settings = form.data(this.attr_name(true) + '-init'); this.invalid_attr = this.add_namespace('data-invalid'); form .off('.abide') .on('submit.fndtn.abide validate.fndtn.abide', function (e) { var is_ajax = /ajax/i.test(self.S(this).attr(self.attr_name())); return self.validate(self.S(this).find('input, textarea, select').get(), e, is_ajax); }) .on('reset', function() { return self.reset($(this)); }) .find('input, textarea, select') .off('.abide') .on('blur.fndtn.abide change.fndtn.abide', function (e) { self.validate([this], e); }) .on('keydown.fndtn.abide', function (e) { var settings = $(this).closest('form').data(self.attr_name(true) + '-init'); if (settings.live_validate === true) { clearTimeout(self.timer); self.timer = setTimeout(function () { self.validate([this], e); }.bind(this), settings.timeout); } }); }, reset : function (form) { form.removeAttr(this.invalid_attr); $(this.invalid_attr, form).removeAttr(this.invalid_attr); $('.error', form).not('small').removeClass('error'); }, validate : function (els, e, is_ajax) { var validations = this.parse_patterns(els), validation_count = validations.length, form = this.S(els[0]).closest('form'), submit_event = /submit/.test(e.type); // Has to count up to make sure the focus gets applied to the top error for (var i=0; i < validation_count; i++) { if (!validations[i] && (submit_event || is_ajax)) { if (this.settings.focus_on_invalid) els[i].focus(); form.trigger('invalid'); this.S(els[i]).closest('form').attr(this.invalid_attr, ''); return false; } } if (submit_event || is_ajax) { form.trigger('valid'); } form.removeAttr(this.invalid_attr); if (is_ajax) return false; return true; }, parse_patterns : function (els) { var i = els.length, el_patterns = []; while (i--) { el_patterns.push(this.pattern(els[i])); } return this.check_validation_and_apply_styles(el_patterns); }, pattern : function (el) { var type = el.getAttribute('type'), required = typeof el.getAttribute('required') === 'string'; var pattern = el.getAttribute('pattern') || ''; if (this.settings.patterns.hasOwnProperty(pattern) && pattern.length > 0) { return [el, this.settings.patterns[pattern], required]; } else if (pattern.length > 0) { return [el, new RegExp(pattern), required]; } if (this.settings.patterns.hasOwnProperty(type)) { return [el, this.settings.patterns[type], required]; } pattern = /.*/; return [el, pattern, required]; }, check_validation_and_apply_styles : function (el_patterns) { var i = el_patterns.length, validations = []; while (i--) { var el = el_patterns[i][0], required = el_patterns[i][2], value = el.value, direct_parent = this.S(el).parent(), validator = el.getAttribute(this.add_namespace('data-abide-validator')), is_radio = el.type === "radio", is_checkbox = el.type === "checkbox", label = this.S('label[for="' + el.getAttribute('id') + '"]'), valid_length = (required) ? (el.value.length > 0) : true; var parent, valid; // support old way to do equalTo validations if(el.getAttribute(this.add_namespace('data-equalto'))) { validator = "equalTo" } if (!direct_parent.is('label')) { parent = direct_parent; } else { parent = direct_parent.parent(); } if (is_radio && required) { validations.push(this.valid_radio(el, required)); } else if (is_checkbox && required) { validations.push(this.valid_checkbox(el, required)); } else if (validator) { valid = this.settings.validators[validator].apply(this, [el, required, parent]) validations.push(valid); if (valid) { this.S(el).removeAttr(this.invalid_attr); parent.removeClass('error'); } else { this.S(el).attr(this.invalid_attr, ''); parent.addClass('error'); } } else { if (el_patterns[i][1].test(value) && valid_length || !required && el.value.length < 1 || $(el).attr('disabled')) { this.S(el).removeAttr(this.invalid_attr); parent.removeClass('error'); if (label.length > 0 && this.settings.error_labels) label.removeClass('error'); validations.push(true); $(el).triggerHandler('valid'); } else { this.S(el).attr(this.invalid_attr, ''); parent.addClass('error'); if (label.length > 0 && this.settings.error_labels) label.addClass('error'); validations.push(false); $(el).triggerHandler('invalid'); } } } return validations; }, valid_checkbox : function(el, required) { var el = this.S(el), valid = (el.is(':checked') || !required); if (valid) { el.removeAttr(this.invalid_attr).parent().removeClass('error'); } else { el.attr(this.invalid_attr, '').parent().addClass('error'); } return valid; }, valid_radio : function (el, required) { var name = el.getAttribute('name'), group = document.getElementsByName(name), count = group.length, valid = false; // Has to count up to make sure the focus gets applied to the top error for (var i=0; i < count; i++) { if (group[i].checked) valid = true; } // Has to count up to make sure the focus gets applied to the top error for (var i=0; i < count; i++) { if (valid) { this.S(group[i]).removeAttr(this.invalid_attr).parent().removeClass('error'); } else { this.S(group[i]).attr(this.invalid_attr, '').parent().addClass('error'); } } return valid; } }; }(jQuery, this, this.document));
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/data/static/foundation/js/foundation/foundation.abide.js
foundation.abide.js
;(function ($, window, document, undefined) { 'use strict'; Foundation.libs.clearing = { name : 'clearing', version: '5.2.1', settings : { templates : { viewing : '<a href="#" class="clearing-close">&times;</a>' + '<div class="visible-img" style="display: none"><div class="clearing-touch-label"></div><img src="data:image/gif;base64,R0lGODlhAQABAAD/ACwAAAAAAQABAAACADs%3D" alt="" />' + '<p class="clearing-caption"></p><a href="#" class="clearing-main-prev"><span></span></a>' + '<a href="#" class="clearing-main-next"><span></span></a></div>' }, // comma delimited list of selectors that, on click, will close clearing, // add 'div.clearing-blackout, div.visible-img' to close on background click close_selectors : '.clearing-close', touch_label : '&larr;&nbsp;Swipe to Advance&nbsp;&rarr;', // event initializers and locks init : false, locked : false }, init : function (scope, method, options) { var self = this; Foundation.inherit(this, 'throttle image_loaded'); this.bindings(method, options); if (self.S(this.scope).is('[' + this.attr_name() + ']')) { this.assemble(self.S('li', this.scope)); } else { self.S('[' + this.attr_name() + ']', this.scope).each(function () { self.assemble(self.S('li', this)); }); } }, events : function (scope) { var self = this, S = self.S; if ($('.scroll-container').length > 0) { this.scope = $('.scroll-container'); } S(this.scope) .off('.clearing') .on('click.fndtn.clearing', 'ul[' + this.attr_name() + '] li', function (e, current, target) { var current = current || S(this), target = target || current, next = current.next('li'), settings = current.closest('[' + self.attr_name() + ']').data(self.attr_name(true) + '-init'), image = S(e.target); e.preventDefault(); if (!settings) { self.init(); settings = current.closest('[' + self.attr_name() + ']').data(self.attr_name(true) + '-init'); } // if clearing is open and the current image is // clicked, go to the next image in sequence if (target.hasClass('visible') && current[0] === target[0] && next.length > 0 && self.is_open(current)) { target = next; image = S('img', target); } // set current and target to the clicked li if not otherwise defined. self.open(image, current, target); self.update_paddles(target); }) .on('click.fndtn.clearing', '.clearing-main-next', function (e) { self.nav(e, 'next') }) .on('click.fndtn.clearing', '.clearing-main-prev', function (e) { self.nav(e, 'prev') }) .on('click.fndtn.clearing', this.settings.close_selectors, function (e) { Foundation.libs.clearing.close(e, this) }); $(document).on('keydown.fndtn.clearing', function (e) { self.keydown(e) }); S(window).off('.clearing').on('resize.fndtn.clearing', function () { self.resize() }); this.swipe_events(scope); }, swipe_events : function (scope) { var self = this, S = self.S; S(this.scope) .on('touchstart.fndtn.clearing', '.visible-img', function(e) { if (!e.touches) { e = e.originalEvent; } var data = { start_page_x: e.touches[0].pageX, start_page_y: e.touches[0].pageY, start_time: (new Date()).getTime(), delta_x: 0, is_scrolling: undefined }; S(this).data('swipe-transition', data); e.stopPropagation(); }) .on('touchmove.fndtn.clearing', '.visible-img', function(e) { if (!e.touches) { e = e.originalEvent; } // Ignore pinch/zoom events if(e.touches.length > 1 || e.scale && e.scale !== 1) return; var data = S(this).data('swipe-transition'); if (typeof data === 'undefined') { data = {}; } data.delta_x = e.touches[0].pageX - data.start_page_x; if ( typeof data.is_scrolling === 'undefined') { data.is_scrolling = !!( data.is_scrolling || Math.abs(data.delta_x) < Math.abs(e.touches[0].pageY - data.start_page_y) ); } if (!data.is_scrolling && !data.active) { e.preventDefault(); var direction = (data.delta_x < 0) ? 'next' : 'prev'; data.active = true; self.nav(e, direction); } }) .on('touchend.fndtn.clearing', '.visible-img', function(e) { S(this).data('swipe-transition', {}); e.stopPropagation(); }); }, assemble : function ($li) { var $el = $li.parent(); if ($el.parent().hasClass('carousel')) return; $el.after('<div id="foundationClearingHolder"></div>'); var holder = this.S('#foundationClearingHolder'), settings = $el.data(this.attr_name(true) + '-init'), grid = $el.detach(), data = { grid: '<div class="carousel">' + grid[0].outerHTML + '</div>', viewing: settings.templates.viewing }, wrapper = '<div class="clearing-assembled"><div>' + data.viewing + data.grid + '</div></div>', touch_label = this.settings.touch_label; if (Modernizr.touch) { wrapper = $(wrapper).find('.clearing-touch-label').html(touch_label).end(); } holder.after(wrapper).remove(); }, open : function ($image, current, target) { var self = this, body = $(document.body), root = target.closest('.clearing-assembled'), container = $('div', root).first(), visible_image = $('.visible-img', container), image = $('img', visible_image).not($image), label = $('.clearing-touch-label', '.clearing-blackout'), error = false; image.error(function () { error = true; }); function startLoad() { setTimeout(function () { this.image_loaded(image, function () { if (image.outerWidth() === 1 && !error) { startLoad.call(this); } else { cb.call(this, image); } }.bind(this)); }.bind(this), 50); } function cb (image) { var $image = $(image); image.css('visibility', 'visible'); // toggle the gallery body.css('overflow', 'hidden'); root.addClass('clearing-blackout'); container.addClass('clearing-container'); visible_image.show(); this.fix_height(target) .caption(self.S('.clearing-caption', visible_image), self.S('img', target)) .center_and_label(image, label) .shift(current, target, function () { target.siblings().removeClass('visible'); target.addClass('visible'); }); } if (!this.locked()) { // set the image to the selected thumbnail image .attr('src', this.load($image)) .css('visibility', 'hidden'); startLoad.call(this); } }, close : function (e, el) { e.preventDefault(); var root = (function (target) { if (/blackout/.test(target.selector)) { return target; } else { return target.closest('.clearing-blackout'); } }($(el))), body = $(document.body), container, visible_image; if (el === e.target && root) { body.css('overflow', ''); container = $('div', root).first(); visible_image = $('.visible-img', container); this.settings.prev_index = 0; $('ul[' + this.attr_name() + ']', root) .attr('style', '').closest('.clearing-blackout') .removeClass('clearing-blackout'); container.removeClass('clearing-container'); visible_image.hide(); } return false; }, is_open : function (current) { return current.parent().prop('style').length > 0; }, keydown : function (e) { var clearing = $('.clearing-blackout ul[' + this.attr_name() + ']'), NEXT_KEY = this.rtl ? 37 : 39, PREV_KEY = this.rtl ? 39 : 37, ESC_KEY = 27; if (e.which === NEXT_KEY) this.go(clearing, 'next'); if (e.which === PREV_KEY) this.go(clearing, 'prev'); if (e.which === ESC_KEY) this.S('a.clearing-close').trigger('click'); }, nav : function (e, direction) { var clearing = $('ul[' + this.attr_name() + ']', '.clearing-blackout'); e.preventDefault(); this.go(clearing, direction); }, resize : function () { var image = $('img', '.clearing-blackout .visible-img'), label = $('.clearing-touch-label', '.clearing-blackout'); if (image.length) { this.center_and_label(image, label); } }, // visual adjustments fix_height : function (target) { var lis = target.parent().children(), self = this; lis.each(function () { var li = self.S(this), image = li.find('img'); if (li.height() > image.outerHeight()) { li.addClass('fix-height'); } }) .closest('ul') .width(lis.length * 100 + '%'); return this; }, update_paddles : function (target) { var visible_image = target .closest('.carousel') .siblings('.visible-img'); if (target.next().length > 0) { this.S('.clearing-main-next', visible_image) .removeClass('disabled'); } else { this.S('.clearing-main-next', visible_image) .addClass('disabled'); } if (target.prev().length > 0) { this.S('.clearing-main-prev', visible_image) .removeClass('disabled'); } else { this.S('.clearing-main-prev', visible_image) .addClass('disabled'); } }, center_and_label : function (target, label) { if (!this.rtl) { target.css({ marginLeft : -(target.outerWidth() / 2), marginTop : -(target.outerHeight() / 2) }); if (label.length > 0) { label.css({ marginLeft : -(label.outerWidth() / 2), marginTop : -(target.outerHeight() / 2)-label.outerHeight()-10 }); } } else { target.css({ marginRight : -(target.outerWidth() / 2), marginTop : -(target.outerHeight() / 2), left: 'auto', right: '50%' }); if (label.length > 0) { label.css({ marginRight : -(label.outerWidth() / 2), marginTop : -(target.outerHeight() / 2)-label.outerHeight()-10, left: 'auto', right: '50%' }); } } return this; }, // image loading and preloading load : function ($image) { if ($image[0].nodeName === "A") { var href = $image.attr('href'); } else { var href = $image.parent().attr('href'); } this.preload($image); if (href) return href; return $image.attr('src'); }, preload : function ($image) { this .img($image.closest('li').next()) .img($image.closest('li').prev()); }, img : function (img) { if (img.length) { var new_img = new Image(), new_a = this.S('a', img); if (new_a.length) { new_img.src = new_a.attr('href'); } else { new_img.src = this.S('img', img).attr('src'); } } return this; }, // image caption caption : function (container, $image) { var caption = $image.attr('data-caption'); if (caption) { container .html(caption) .show(); } else { container .text('') .hide(); } return this; }, // directional methods go : function ($ul, direction) { var current = this.S('.visible', $ul), target = current[direction](); if (target.length) { this.S('img', target) .trigger('click', [current, target]); } }, shift : function (current, target, callback) { var clearing = target.parent(), old_index = this.settings.prev_index || target.index(), direction = this.direction(clearing, current, target), dir = this.rtl ? 'right' : 'left', left = parseInt(clearing.css('left'), 10), width = target.outerWidth(), skip_shift; var dir_obj = {}; // we use jQuery animate instead of CSS transitions because we // need a callback to unlock the next animation // needs support for RTL ** if (target.index() !== old_index && !/skip/.test(direction)){ if (/left/.test(direction)) { this.lock(); dir_obj[dir] = left + width; clearing.animate(dir_obj, 300, this.unlock()); } else if (/right/.test(direction)) { this.lock(); dir_obj[dir] = left - width; clearing.animate(dir_obj, 300, this.unlock()); } } else if (/skip/.test(direction)) { // the target image is not adjacent to the current image, so // do we scroll right or not skip_shift = target.index() - this.settings.up_count; this.lock(); if (skip_shift > 0) { dir_obj[dir] = -(skip_shift * width); clearing.animate(dir_obj, 300, this.unlock()); } else { dir_obj[dir] = 0; clearing.animate(dir_obj, 300, this.unlock()); } } callback(); }, direction : function ($el, current, target) { var lis = this.S('li', $el), li_width = lis.outerWidth() + (lis.outerWidth() / 4), up_count = Math.floor(this.S('.clearing-container').outerWidth() / li_width) - 1, target_index = lis.index(target), response; this.settings.up_count = up_count; if (this.adjacent(this.settings.prev_index, target_index)) { if ((target_index > up_count) && target_index > this.settings.prev_index) { response = 'right'; } else if ((target_index > up_count - 1) && target_index <= this.settings.prev_index) { response = 'left'; } else { response = false; } } else { response = 'skip'; } this.settings.prev_index = target_index; return response; }, adjacent : function (current_index, target_index) { for (var i = target_index + 1; i >= target_index - 1; i--) { if (i === current_index) return true; } return false; }, // lock management lock : function () { this.settings.locked = true; }, unlock : function () { this.settings.locked = false; }, locked : function () { return this.settings.locked; }, off : function () { this.S(this.scope).off('.fndtn.clearing'); this.S(window).off('.fndtn.clearing'); }, reflow : function () { this.init(); } }; }(jQuery, this, this.document));
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/data/static/foundation/js/foundation/foundation.clearing.js
foundation.clearing.js
;(function ($, window, document, undefined) { 'use strict'; Foundation.libs.topbar = { name : 'topbar', version: '5.2.1', settings : { index : 0, sticky_class : 'sticky', custom_back_text: true, back_text: 'Back', is_hover: true, mobile_show_parent_link: false, scrolltop : true, // jump to top when sticky nav menu toggle is clicked sticky_on : 'all' }, init : function (section, method, options) { Foundation.inherit(this, 'add_custom_rule register_media throttle'); var self = this; self.register_media('topbar', 'foundation-mq-topbar'); this.bindings(method, options); self.S('[' + this.attr_name() + ']', this.scope).each(function () { var topbar = $(this), settings = topbar.data(self.attr_name(true) + '-init'), section = self.S('section', this), titlebar = topbar.children().filter('ul').first(); topbar.data('index', 0); var topbarContainer = topbar.parent(); if(topbarContainer.hasClass('fixed') || self.is_sticky(topbar, topbarContainer, settings) ) { self.settings.sticky_class = settings.sticky_class; self.settings.sticky_topbar = topbar; topbar.data('height', topbarContainer.outerHeight()); topbar.data('stickyoffset', topbarContainer.offset().top); } else { topbar.data('height', topbar.outerHeight()); } if (!settings.assembled) self.assemble(topbar); if (settings.is_hover) { self.S('.has-dropdown', topbar).addClass('not-click'); } else { self.S('.has-dropdown', topbar).removeClass('not-click'); } // Pad body when sticky (scrolled) or fixed. self.add_custom_rule('.f-topbar-fixed { padding-top: ' + topbar.data('height') + 'px }'); if (topbarContainer.hasClass('fixed')) { self.S('body').addClass('f-topbar-fixed'); } }); }, is_sticky: function (topbar, topbarContainer, settings) { var sticky = topbarContainer.hasClass(settings.sticky_class); if (sticky && settings.sticky_on === 'all') { return true; } else if (sticky && this.small() && settings.sticky_on === 'small') { return true; } else if (sticky && this.medium() && settings.sticky_on === 'medium') { return true; } else if (sticky && this.large() && settings.sticky_on === 'large') { return true; } return false; }, toggle: function (toggleEl) { var self = this; if (toggleEl) { var topbar = self.S(toggleEl).closest('[' + this.attr_name() + ']'); } else { var topbar = self.S('[' + this.attr_name() + ']'); } var settings = topbar.data(this.attr_name(true) + '-init'); var section = self.S('section, .section', topbar); if (self.breakpoint()) { if (!self.rtl) { section.css({left: '0%'}); $('>.name', section).css({left: '100%'}); } else { section.css({right: '0%'}); $('>.name', section).css({right: '100%'}); } self.S('li.moved', section).removeClass('moved'); topbar.data('index', 0); topbar .toggleClass('expanded') .css('height', ''); } if (settings.scrolltop) { if (!topbar.hasClass('expanded')) { if (topbar.hasClass('fixed')) { topbar.parent().addClass('fixed'); topbar.removeClass('fixed'); self.S('body').addClass('f-topbar-fixed'); } } else if (topbar.parent().hasClass('fixed')) { if (settings.scrolltop) { topbar.parent().removeClass('fixed'); topbar.addClass('fixed'); self.S('body').removeClass('f-topbar-fixed'); window.scrollTo(0,0); } else { topbar.parent().removeClass('expanded'); } } } else { if(self.is_sticky(topbar, topbar.parent(), settings)) { topbar.parent().addClass('fixed'); } if(topbar.parent().hasClass('fixed')) { if (!topbar.hasClass('expanded')) { topbar.removeClass('fixed'); topbar.parent().removeClass('expanded'); self.update_sticky_positioning(); } else { topbar.addClass('fixed'); topbar.parent().addClass('expanded'); self.S('body').addClass('f-topbar-fixed'); } } } }, timer : null, events : function (bar) { var self = this, S = this.S; S(this.scope) .off('.topbar') .on('click.fndtn.topbar', '[' + this.attr_name() + '] .toggle-topbar', function (e) { e.preventDefault(); self.toggle(this); }) .on('click.fndtn.topbar','.top-bar .top-bar-section li a[href^="#"],[' + this.attr_name() + '] .top-bar-section li a[href^="#"]',function (e) { var li = $(this).closest('li'); if(self.breakpoint() && !li.hasClass('back') && !li.hasClass('has-dropdown')) { self.toggle(); } }) .on('click.fndtn.topbar', '[' + this.attr_name() + '] li.has-dropdown', function (e) { var li = S(this), target = S(e.target), topbar = li.closest('[' + self.attr_name() + ']'), settings = topbar.data(self.attr_name(true) + '-init'); if(target.data('revealId')) { self.toggle(); return; } if (self.breakpoint()) return; if (settings.is_hover && !Modernizr.touch) return; e.stopImmediatePropagation(); if (li.hasClass('hover')) { li .removeClass('hover') .find('li') .removeClass('hover'); li.parents('li.hover') .removeClass('hover'); } else { li.addClass('hover'); if (target[0].nodeName === 'A' && target.parent().hasClass('has-dropdown')) { e.preventDefault(); } } }) .on('click.fndtn.topbar', '[' + this.attr_name() + '] .has-dropdown>a', function (e) { if (self.breakpoint()) { e.preventDefault(); var $this = S(this), topbar = $this.closest('[' + self.attr_name() + ']'), section = topbar.find('section, .section'), dropdownHeight = $this.next('.dropdown').outerHeight(), $selectedLi = $this.closest('li'); topbar.data('index', topbar.data('index') + 1); $selectedLi.addClass('moved'); if (!self.rtl) { section.css({left: -(100 * topbar.data('index')) + '%'}); section.find('>.name').css({left: 100 * topbar.data('index') + '%'}); } else { section.css({right: -(100 * topbar.data('index')) + '%'}); section.find('>.name').css({right: 100 * topbar.data('index') + '%'}); } topbar.css('height', $this.siblings('ul').outerHeight(true) + topbar.data('height')); } }); S(window).off('.topbar').on('resize.fndtn.topbar', self.throttle(function () { self.resize.call(self); }, 50)).trigger('resize'); S('body').off('.topbar').on('click.fndtn.topbar touchstart.fndtn.topbar', function (e) { var parent = S(e.target).closest('li').closest('li.hover'); if (parent.length > 0) { return; } S('[' + self.attr_name() + '] li').removeClass('hover'); }); // Go up a level on Click S(this.scope).on('click.fndtn.topbar', '[' + this.attr_name() + '] .has-dropdown .back', function (e) { e.preventDefault(); var $this = S(this), topbar = $this.closest('[' + self.attr_name() + ']'), section = topbar.find('section, .section'), settings = topbar.data(self.attr_name(true) + '-init'), $movedLi = $this.closest('li.moved'), $previousLevelUl = $movedLi.parent(); topbar.data('index', topbar.data('index') - 1); if (!self.rtl) { section.css({left: -(100 * topbar.data('index')) + '%'}); section.find('>.name').css({left: 100 * topbar.data('index') + '%'}); } else { section.css({right: -(100 * topbar.data('index')) + '%'}); section.find('>.name').css({right: 100 * topbar.data('index') + '%'}); } if (topbar.data('index') === 0) { topbar.css('height', ''); } else { topbar.css('height', $previousLevelUl.outerHeight(true) + topbar.data('height')); } setTimeout(function () { $movedLi.removeClass('moved'); }, 300); }); }, resize : function () { var self = this; self.S('[' + this.attr_name() + ']').each(function () { var topbar = self.S(this), settings = topbar.data(self.attr_name(true) + '-init'); var stickyContainer = topbar.parent('.' + self.settings.sticky_class); var stickyOffset; if (!self.breakpoint()) { var doToggle = topbar.hasClass('expanded'); topbar .css('height', '') .removeClass('expanded') .find('li') .removeClass('hover'); if(doToggle) { self.toggle(topbar); } } if(self.is_sticky(topbar, stickyContainer, settings)) { if(stickyContainer.hasClass('fixed')) { // Remove the fixed to allow for correct calculation of the offset. stickyContainer.removeClass('fixed'); stickyOffset = stickyContainer.offset().top; if(self.S(document.body).hasClass('f-topbar-fixed')) { stickyOffset -= topbar.data('height'); } topbar.data('stickyoffset', stickyOffset); stickyContainer.addClass('fixed'); } else { stickyOffset = stickyContainer.offset().top; topbar.data('stickyoffset', stickyOffset); } } }); }, breakpoint : function () { return !matchMedia(Foundation.media_queries['topbar']).matches; }, small : function () { return matchMedia(Foundation.media_queries['small']).matches; }, medium : function () { return matchMedia(Foundation.media_queries['medium']).matches; }, large : function () { return matchMedia(Foundation.media_queries['large']).matches; }, assemble : function (topbar) { var self = this, settings = topbar.data(this.attr_name(true) + '-init'), section = self.S('section', topbar), titlebar = $(this).children().filter('ul').first(); // Pull element out of the DOM for manipulation section.detach(); self.S('.has-dropdown>a', section).each(function () { var $link = self.S(this), $dropdown = $link.siblings('.dropdown'), url = $link.attr('href'); if (!$dropdown.find('.title.back').length) { if (settings.mobile_show_parent_link && url && url.length > 1) { var $titleLi = $('<li class="title back js-generated"><h5><a href="javascript:void(0)"></a></h5></li><li><a class="parent-link js-generated" href="' + url + '">' + $link.text() +'</a></li>'); } else { var $titleLi = $('<li class="title back js-generated"><h5><a href="javascript:void(0)"></a></h5></li>'); } // Copy link to subnav if (settings.custom_back_text == true) { $('h5>a', $titleLi).html(settings.back_text); } else { $('h5>a', $titleLi).html('&laquo; ' + $link.html()); } $dropdown.prepend($titleLi); } }); // Put element back in the DOM section.appendTo(topbar); // check for sticky this.sticky(); this.assembled(topbar); }, assembled : function (topbar) { topbar.data(this.attr_name(true), $.extend({}, topbar.data(this.attr_name(true)), {assembled: true})); }, height : function (ul) { var total = 0, self = this; $('> li', ul).each(function () { total += self.S(this).outerHeight(true); }); return total; }, sticky : function () { var $window = this.S(window), self = this; this.S(window).on('scroll', function() { self.update_sticky_positioning(); }); }, update_sticky_positioning: function() { var klass = '.' + this.settings.sticky_class, $window = this.S(window), self = this; if (self.settings.sticky_topbar && self.is_sticky(this.settings.sticky_topbar,this.settings.sticky_topbar.parent(), this.settings)) { var distance = this.settings.sticky_topbar.data('stickyoffset'); if (!self.S(klass).hasClass('expanded')) { if ($window.scrollTop() > (distance)) { if (!self.S(klass).hasClass('fixed')) { self.S(klass).addClass('fixed'); self.S('body').addClass('f-topbar-fixed'); } } else if ($window.scrollTop() <= distance) { if (self.S(klass).hasClass('fixed')) { self.S(klass).removeClass('fixed'); self.S('body').removeClass('f-topbar-fixed'); } } } } }, off : function () { this.S(this.scope).off('.fndtn.topbar'); this.S(window).off('.fndtn.topbar'); }, reflow : function () {} }; }(jQuery, this, this.document));
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/data/static/foundation/js/foundation/foundation.topbar.js
foundation.topbar.js
(function ($, window, document, undefined) { 'use strict'; var header_helpers = function (class_array) { var i = class_array.length; var head = $('head'); while (i--) { if($('head').has('.' + class_array[i]).length === 0) { $('head').append('<meta class="' + class_array[i] + '" />'); } } }; header_helpers([ 'foundation-mq-small', 'foundation-mq-medium', 'foundation-mq-large', 'foundation-mq-xlarge', 'foundation-mq-xxlarge', 'foundation-data-attribute-namespace']); // Enable FastClick if present $(function() { if (typeof FastClick !== 'undefined') { // Don't attach to body if undefined if (typeof document.body !== 'undefined') { FastClick.attach(document.body); } } }); // private Fast Selector wrapper, // returns jQuery object. Only use where // getElementById is not available. var S = function (selector, context) { if (typeof selector === 'string') { if (context) { var cont; if (context.jquery) { cont = context[0]; if (!cont) return context; } else { cont = context; } return $(cont.querySelectorAll(selector)); } return $(document.querySelectorAll(selector)); } return $(selector, context); }; // Namespace functions. var attr_name = function (init) { var arr = []; if (!init) arr.push('data'); if (this.namespace.length > 0) arr.push(this.namespace); arr.push(this.name); return arr.join('-'); }; var add_namespace = function (str) { var parts = str.split('-'), i = parts.length, arr = []; while (i--) { if (i !== 0) { arr.push(parts[i]); } else { if (this.namespace.length > 0) { arr.push(this.namespace, parts[i]); } else { arr.push(parts[i]); } } } return arr.reverse().join('-'); }; // Event binding and data-options updating. var bindings = function (method, options) { var self = this, should_bind_events = !S(this).data(this.attr_name(true)); if (typeof method === 'string') { return this[method].call(this, options); } if (S(this.scope).is('[' + this.attr_name() +']')) { S(this.scope).data(this.attr_name(true) + '-init', $.extend({}, this.settings, (options || method), this.data_options(S(this.scope)))); if (should_bind_events) { this.events(this.scope); } } else { S('[' + this.attr_name() +']', this.scope).each(function () { var should_bind_events = !S(this).data(self.attr_name(true) + '-init'); S(this).data(self.attr_name(true) + '-init', $.extend({}, self.settings, (options || method), self.data_options(S(this)))); if (should_bind_events) { self.events(this); } }); } }; var single_image_loaded = function (image, callback) { function loaded () { callback(image[0]); } function bindLoad () { this.one('load', loaded); if (/MSIE (\d+\.\d+);/.test(navigator.userAgent)) { var src = this.attr( 'src' ), param = src.match( /\?/ ) ? '&' : '?'; param += 'random=' + (new Date()).getTime(); this.attr('src', src + param); } } if (!image.attr('src')) { loaded(); return; } if (image[0].complete || image[0].readyState === 4) { loaded(); } else { bindLoad.call(image); } }; /* https://github.com/paulirish/matchMedia.js */ window.matchMedia = window.matchMedia || (function( doc, undefined ) { "use strict"; var bool, docElem = doc.documentElement, refNode = docElem.firstElementChild || docElem.firstChild, // fakeBody required for <FF4 when executed in <head> fakeBody = doc.createElement( "body" ), div = doc.createElement( "div" ); div.id = "mq-test-1"; div.style.cssText = "position:absolute;top:-100em"; fakeBody.style.background = "none"; fakeBody.appendChild(div); return function (q) { div.innerHTML = "&shy;<style media=\"" + q + "\"> #mq-test-1 { width: 42px; }</style>"; docElem.insertBefore( fakeBody, refNode ); bool = div.offsetWidth === 42; docElem.removeChild( fakeBody ); return { matches: bool, media: q }; }; }( document )); /* * jquery.requestAnimationFrame * https://github.com/gnarf37/jquery-requestAnimationFrame * Requires jQuery 1.8+ * * Copyright (c) 2012 Corey Frang * Licensed under the MIT license. */ (function($) { // requestAnimationFrame polyfill adapted from Erik Möller // fixes from Paul Irish and Tino Zijdel // http://paulirish.com/2011/requestanimationframe-for-smart-animating/ // http://my.opera.com/emoller/blog/2011/12/20/requestanimationframe-for-smart-er-animating var animating, lastTime = 0, vendors = ['webkit', 'moz'], requestAnimationFrame = window.requestAnimationFrame, cancelAnimationFrame = window.cancelAnimationFrame, jqueryFxAvailable = 'undefined' !== typeof jQuery.fx; for (; lastTime < vendors.length && !requestAnimationFrame; lastTime++) { requestAnimationFrame = window[ vendors[lastTime] + "RequestAnimationFrame" ]; cancelAnimationFrame = cancelAnimationFrame || window[ vendors[lastTime] + "CancelAnimationFrame" ] || window[ vendors[lastTime] + "CancelRequestAnimationFrame" ]; } function raf() { if (animating) { requestAnimationFrame(raf); if (jqueryFxAvailable) { jQuery.fx.tick(); } } } if (requestAnimationFrame) { // use rAF window.requestAnimationFrame = requestAnimationFrame; window.cancelAnimationFrame = cancelAnimationFrame; if (jqueryFxAvailable) { jQuery.fx.timer = function (timer) { if (timer() && jQuery.timers.push(timer) && !animating) { animating = true; raf(); } }; jQuery.fx.stop = function () { animating = false; }; } } else { // polyfill window.requestAnimationFrame = function (callback, element) { var currTime = new Date().getTime(), timeToCall = Math.max(0, 16 - (currTime - lastTime)), id = window.setTimeout(function () { callback(currTime + timeToCall); }, timeToCall); lastTime = currTime + timeToCall; return id; }; window.cancelAnimationFrame = function (id) { clearTimeout(id); }; } }( jQuery )); function removeQuotes (string) { if (typeof string === 'string' || string instanceof String) { string = string.replace(/^['\\/"]+|(;\s?})+|['\\/"]+$/g, ''); } return string; } window.Foundation = { name : 'Foundation', version : '5.2.1', media_queries : { small : S('.foundation-mq-small').css('font-family').replace(/^[\/\\'"]+|(;\s?})+|[\/\\'"]+$/g, ''), medium : S('.foundation-mq-medium').css('font-family').replace(/^[\/\\'"]+|(;\s?})+|[\/\\'"]+$/g, ''), large : S('.foundation-mq-large').css('font-family').replace(/^[\/\\'"]+|(;\s?})+|[\/\\'"]+$/g, ''), xlarge: S('.foundation-mq-xlarge').css('font-family').replace(/^[\/\\'"]+|(;\s?})+|[\/\\'"]+$/g, ''), xxlarge: S('.foundation-mq-xxlarge').css('font-family').replace(/^[\/\\'"]+|(;\s?})+|[\/\\'"]+$/g, '') }, stylesheet : $('<style></style>').appendTo('head')[0].sheet, global: { namespace: '' }, init : function (scope, libraries, method, options, response) { var library_arr, args = [scope, method, options, response], responses = []; // check RTL this.rtl = /rtl/i.test(S('html').attr('dir')); // set foundation global scope this.scope = scope || this.scope; this.set_namespace(); if (libraries && typeof libraries === 'string' && !/reflow/i.test(libraries)) { if (this.libs.hasOwnProperty(libraries)) { responses.push(this.init_lib(libraries, args)); } } else { for (var lib in this.libs) { responses.push(this.init_lib(lib, libraries)); } } return scope; }, init_lib : function (lib, args) { if (this.libs.hasOwnProperty(lib)) { this.patch(this.libs[lib]); if (args && args.hasOwnProperty(lib)) { if (typeof this.libs[lib].settings !== 'undefined') { $.extend(true, this.libs[lib].settings, args[lib]); } else if (typeof this.libs[lib].defaults !== 'undefined') { $.extend(true, this.libs[lib].defaults, args[lib]); } return this.libs[lib].init.apply(this.libs[lib], [this.scope, args[lib]]); } args = args instanceof Array ? args : Array(args); // PATCH: added this line return this.libs[lib].init.apply(this.libs[lib], args); } return function () {}; }, patch : function (lib) { lib.scope = this.scope; lib.namespace = this.global.namespace; lib.rtl = this.rtl; lib['data_options'] = this.utils.data_options; lib['attr_name'] = attr_name; lib['add_namespace'] = add_namespace; lib['bindings'] = bindings; lib['S'] = this.utils.S; }, inherit : function (scope, methods) { var methods_arr = methods.split(' '), i = methods_arr.length; while (i--) { if (this.utils.hasOwnProperty(methods_arr[i])) { scope[methods_arr[i]] = this.utils[methods_arr[i]]; } } }, set_namespace: function () { // Don't bother reading the namespace out of the meta tag // if the namespace has been set globally in javascript // // Example: something like Foundation.global.namespace = 'my-namespace'; // // Otherwise, if the namespace hasn't been set globally, // read it out of the meta tag // var namespace = this.global.namespace || $('.foundation-data-attribute-namespace').css('font-family'); if (/false/i.test(namespace)) return; this.global.namespace = namespace; }, libs : {}, // methods that can be inherited in libraries utils : { // Description: // Fast Selector wrapper returns jQuery object. Only use where getElementById // is not available. // // Arguments: // Selector (String): CSS selector describing the element(s) to be // returned as a jQuery object. // // Scope (String): CSS selector describing the area to be searched. Default // is document. // // Returns: // Element (jQuery Object): jQuery object containing elements matching the // selector within the scope. S : S, // Description: // Executes a function a max of once every n milliseconds // // Arguments: // Func (Function): Function to be throttled. // // Delay (Integer): Function execution threshold in milliseconds. // // Returns: // Lazy_function (Function): Function with throttling applied. throttle : function (func, delay) { var timer = null; return function () { var context = this, args = arguments; clearTimeout(timer); timer = setTimeout(function () { func.apply(context, args); }, delay); }; }, // Description: // Executes a function when it stops being invoked for n seconds // Modified version of _.debounce() http://underscorejs.org // // Arguments: // Func (Function): Function to be debounced. // // Delay (Integer): Function execution threshold in milliseconds. // // Immediate (Bool): Whether the function should be called at the beginning // of the delay instead of the end. Default is false. // // Returns: // Lazy_function (Function): Function with debouncing applied. debounce : function (func, delay, immediate) { var timeout, result; return function () { var context = this, args = arguments; var later = function () { timeout = null; if (!immediate) result = func.apply(context, args); }; var callNow = immediate && !timeout; clearTimeout(timeout); timeout = setTimeout(later, delay); if (callNow) result = func.apply(context, args); return result; }; }, // Description: // Parses data-options attribute // // Arguments: // El (jQuery Object): Element to be parsed. // // Returns: // Options (Javascript Object): Contents of the element's data-options // attribute. data_options : function (el) { var opts = {}, ii, p, opts_arr, data_options = function (el) { var namespace = Foundation.global.namespace; if (namespace.length > 0) { return el.data(namespace + '-options'); } return el.data('options'); }; var cached_options = data_options(el); if (typeof cached_options === 'object') { return cached_options; } opts_arr = (cached_options || ':').split(';'), ii = opts_arr.length; function isNumber (o) { return ! isNaN (o-0) && o !== null && o !== "" && o !== false && o !== true; } function trim (str) { if (typeof str === 'string') return $.trim(str); return str; } while (ii--) { p = opts_arr[ii].split(':'); if (/true/i.test(p[1])) p[1] = true; if (/false/i.test(p[1])) p[1] = false; if (isNumber(p[1])) { if (p[1].indexOf('.') === -1) { p[1] = parseInt(p[1], 10); } else { p[1] = parseFloat(p[1], 10); } } if (p.length === 2 && p[0].length > 0) { opts[trim(p[0])] = trim(p[1]); } } return opts; }, // Description: // Adds JS-recognizable media queries // // Arguments: // Media (String): Key string for the media query to be stored as in // Foundation.media_queries // // Class (String): Class name for the generated <meta> tag register_media : function (media, media_class) { if(Foundation.media_queries[media] === undefined) { $('head').append('<meta class="' + media_class + '">'); Foundation.media_queries[media] = removeQuotes($('.' + media_class).css('font-family')); } }, // Description: // Add custom CSS within a JS-defined media query // // Arguments: // Rule (String): CSS rule to be appended to the document. // // Media (String): Optional media query string for the CSS rule to be // nested under. add_custom_rule : function (rule, media) { if (media === undefined) { Foundation.stylesheet.insertRule(rule, Foundation.stylesheet.cssRules.length); } else { var query = Foundation.media_queries[media]; if (query !== undefined) { Foundation.stylesheet.insertRule('@media ' + Foundation.media_queries[media] + '{ ' + rule + ' }'); } } }, // Description: // Performs a callback function when an image is fully loaded // // Arguments: // Image (jQuery Object): Image(s) to check if loaded. // // Callback (Function): Fundation to execute when image is fully loaded. image_loaded : function (images, callback) { var self = this, unloaded = images.length; if (unloaded === 0) { callback(images); } images.each(function () { single_image_loaded(self.S(this), function () { unloaded -= 1; if (unloaded === 0) { callback(images); } }); }); }, // Description: // Returns a random, alphanumeric string // // Arguments: // Length (Integer): Length of string to be generated. Defaults to random // integer. // // Returns: // Rand (String): Pseudo-random, alphanumeric string. random_str : function () { if (!this.fidx) this.fidx = 0; this.prefix = this.prefix || [(this.name || 'F'), (+new Date).toString(36)].join('-'); return this.prefix + (this.fidx++).toString(36); } } }; $.fn.foundation = function () { var args = Array.prototype.slice.call(arguments, 0); return this.each(function () { Foundation.init.apply(Foundation, [this].concat(args)); return this; }); }; }(jQuery, this, this.document));
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/data/static/foundation/js/foundation/foundation.js
foundation.js
;(function ($, window, document, undefined) { 'use strict'; var Modernizr = Modernizr || false; Foundation.libs.joyride = { name : 'joyride', version : '5.2.1', defaults : { expose : false, // turn on or off the expose feature modal : true, // Whether to cover page with modal during the tour tip_location : 'bottom', // 'top' or 'bottom' in relation to parent nub_position : 'auto', // override on a per tooltip bases scroll_speed : 1500, // Page scrolling speed in milliseconds, 0 = no scroll animation scroll_animation : 'linear', // supports 'swing' and 'linear', extend with jQuery UI. timer : 0, // 0 = no timer , all other numbers = timer in milliseconds start_timer_on_click : true, // true or false - true requires clicking the first button start the timer start_offset : 0, // the index of the tooltip you want to start on (index of the li) next_button : true, // true or false to control whether a next button is used tip_animation : 'fade', // 'pop' or 'fade' in each tip pause_after : [], // array of indexes where to pause the tour after exposed : [], // array of expose elements tip_animation_fade_speed : 300, // when tipAnimation = 'fade' this is speed in milliseconds for the transition cookie_monster : false, // true or false to control whether cookies are used cookie_name : 'joyride', // Name the cookie you'll use cookie_domain : false, // Will this cookie be attached to a domain, ie. '.notableapp.com' cookie_expires : 365, // set when you would like the cookie to expire. tip_container : 'body', // Where will the tip be attached tip_location_patterns : { top: ['bottom'], bottom: [], // bottom should not need to be repositioned left: ['right', 'top', 'bottom'], right: ['left', 'top', 'bottom'] }, post_ride_callback : function (){}, // A method to call once the tour closes (canceled or complete) post_step_callback : function (){}, // A method to call after each step pre_step_callback : function (){}, // A method to call before each step pre_ride_callback : function (){}, // A method to call before the tour starts (passed index, tip, and cloned exposed element) post_expose_callback : function (){}, // A method to call after an element has been exposed template : { // HTML segments for tip layout link : '<a href="#close" class="joyride-close-tip">&times;</a>', timer : '<div class="joyride-timer-indicator-wrap"><span class="joyride-timer-indicator"></span></div>', tip : '<div class="joyride-tip-guide"><span class="joyride-nub"></span></div>', wrapper : '<div class="joyride-content-wrapper"></div>', button : '<a href="#" class="small button joyride-next-tip"></a>', modal : '<div class="joyride-modal-bg"></div>', expose : '<div class="joyride-expose-wrapper"></div>', expose_cover: '<div class="joyride-expose-cover"></div>' }, expose_add_class : '' // One or more space-separated class names to be added to exposed element }, init : function (scope, method, options) { Foundation.inherit(this, 'throttle random_str'); this.settings = this.settings || $.extend({}, this.defaults, (options || method)); this.bindings(method, options) }, events : function () { var self = this; $(this.scope) .off('.joyride') .on('click.fndtn.joyride', '.joyride-next-tip, .joyride-modal-bg', function (e) { e.preventDefault(); if (this.settings.$li.next().length < 1) { this.end(); } else if (this.settings.timer > 0) { clearTimeout(this.settings.automate); this.hide(); this.show(); this.startTimer(); } else { this.hide(); this.show(); } }.bind(this)) .on('click.fndtn.joyride', '.joyride-close-tip', function (e) { e.preventDefault(); this.end(); }.bind(this)); $(window) .off('.joyride') .on('resize.fndtn.joyride', self.throttle(function () { if ($('[' + self.attr_name() + ']').length > 0 && self.settings.$next_tip) { if (self.settings.exposed.length > 0) { var $els = $(self.settings.exposed); $els.each(function () { var $this = $(this); self.un_expose($this); self.expose($this); }); } if (self.is_phone()) { self.pos_phone(); } else { self.pos_default(false, true); } } }, 100)); }, start : function () { var self = this, $this = $('[' + this.attr_name() + ']', this.scope), integer_settings = ['timer', 'scrollSpeed', 'startOffset', 'tipAnimationFadeSpeed', 'cookieExpires'], int_settings_count = integer_settings.length; if (!$this.length > 0) return; if (!this.settings.init) this.events(); this.settings = $this.data(this.attr_name(true) + '-init'); // non configureable settings this.settings.$content_el = $this; this.settings.$body = $(this.settings.tip_container); this.settings.body_offset = $(this.settings.tip_container).position(); this.settings.$tip_content = this.settings.$content_el.find('> li'); this.settings.paused = false; this.settings.attempts = 0; // can we create cookies? if (typeof $.cookie !== 'function') { this.settings.cookie_monster = false; } // generate the tips and insert into dom. if (!this.settings.cookie_monster || this.settings.cookie_monster && !$.cookie(this.settings.cookie_name)) { this.settings.$tip_content.each(function (index) { var $this = $(this); this.settings = $.extend({}, self.defaults, self.data_options($this)) // Make sure that settings parsed from data_options are integers where necessary var i = int_settings_count; while (i--) { self.settings[integer_settings[i]] = parseInt(self.settings[integer_settings[i]], 10); } self.create({$li : $this, index : index}); }); // show first tip if (!this.settings.start_timer_on_click && this.settings.timer > 0) { this.show('init'); this.startTimer(); } else { this.show('init'); } } }, resume : function () { this.set_li(); this.show(); }, tip_template : function (opts) { var $blank, content; opts.tip_class = opts.tip_class || ''; $blank = $(this.settings.template.tip).addClass(opts.tip_class); content = $.trim($(opts.li).html()) + this.button_text(opts.button_text) + this.settings.template.link + this.timer_instance(opts.index); $blank.append($(this.settings.template.wrapper)); $blank.first().attr(this.add_namespace('data-index'), opts.index); $('.joyride-content-wrapper', $blank).append(content); return $blank[0]; }, timer_instance : function (index) { var txt; if ((index === 0 && this.settings.start_timer_on_click && this.settings.timer > 0) || this.settings.timer === 0) { txt = ''; } else { txt = $(this.settings.template.timer)[0].outerHTML; } return txt; }, button_text : function (txt) { if (this.settings.next_button) { txt = $.trim(txt) || 'Next'; txt = $(this.settings.template.button).append(txt)[0].outerHTML; } else { txt = ''; } return txt; }, create : function (opts) { var buttonText = opts.$li.attr(this.add_namespace('data-button')) || opts.$li.attr(this.add_namespace('data-text')), tipClass = opts.$li.attr('class'), $tip_content = $(this.tip_template({ tip_class : tipClass, index : opts.index, button_text : buttonText, li : opts.$li })); $(this.settings.tip_container).append($tip_content); }, show : function (init) { var $timer = null; // are we paused? if (this.settings.$li === undefined || ($.inArray(this.settings.$li.index(), this.settings.pause_after) === -1)) { // don't go to the next li if the tour was paused if (this.settings.paused) { this.settings.paused = false; } else { this.set_li(init); } this.settings.attempts = 0; if (this.settings.$li.length && this.settings.$target.length > 0) { if (init) { //run when we first start this.settings.pre_ride_callback(this.settings.$li.index(), this.settings.$next_tip); if (this.settings.modal) { this.show_modal(); } } this.settings.pre_step_callback(this.settings.$li.index(), this.settings.$next_tip); if (this.settings.modal && this.settings.expose) { this.expose(); } this.settings.tip_settings = $.extend({}, this.settings, this.data_options(this.settings.$li)); this.settings.timer = parseInt(this.settings.timer, 10); this.settings.tip_settings.tip_location_pattern = this.settings.tip_location_patterns[this.settings.tip_settings.tip_location]; // scroll if not modal if (!/body/i.test(this.settings.$target.selector)) { this.scroll_to(); } if (this.is_phone()) { this.pos_phone(true); } else { this.pos_default(true); } $timer = this.settings.$next_tip.find('.joyride-timer-indicator'); if (/pop/i.test(this.settings.tip_animation)) { $timer.width(0); if (this.settings.timer > 0) { this.settings.$next_tip.show(); setTimeout(function () { $timer.animate({ width: $timer.parent().width() }, this.settings.timer, 'linear'); }.bind(this), this.settings.tip_animation_fade_speed); } else { this.settings.$next_tip.show(); } } else if (/fade/i.test(this.settings.tip_animation)) { $timer.width(0); if (this.settings.timer > 0) { this.settings.$next_tip .fadeIn(this.settings.tip_animation_fade_speed) .show(); setTimeout(function () { $timer.animate({ width: $timer.parent().width() }, this.settings.timer, 'linear'); }.bind(this), this.settings.tip_animation_fadeSpeed); } else { this.settings.$next_tip.fadeIn(this.settings.tip_animation_fade_speed); } } this.settings.$current_tip = this.settings.$next_tip; // skip non-existant targets } else if (this.settings.$li && this.settings.$target.length < 1) { this.show(); } else { this.end(); } } else { this.settings.paused = true; } }, is_phone : function () { return matchMedia(Foundation.media_queries.small).matches && !matchMedia(Foundation.media_queries.medium).matches; }, hide : function () { if (this.settings.modal && this.settings.expose) { this.un_expose(); } if (!this.settings.modal) { $('.joyride-modal-bg').hide(); } // Prevent scroll bouncing...wait to remove from layout this.settings.$current_tip.css('visibility', 'hidden'); setTimeout($.proxy(function() { this.hide(); this.css('visibility', 'visible'); }, this.settings.$current_tip), 0); this.settings.post_step_callback(this.settings.$li.index(), this.settings.$current_tip); }, set_li : function (init) { if (init) { this.settings.$li = this.settings.$tip_content.eq(this.settings.start_offset); this.set_next_tip(); this.settings.$current_tip = this.settings.$next_tip; } else { this.settings.$li = this.settings.$li.next(); this.set_next_tip(); } this.set_target(); }, set_next_tip : function () { this.settings.$next_tip = $(".joyride-tip-guide").eq(this.settings.$li.index()); this.settings.$next_tip.data('closed', ''); }, set_target : function () { var cl = this.settings.$li.attr(this.add_namespace('data-class')), id = this.settings.$li.attr(this.add_namespace('data-id')), $sel = function () { if (id) { return $(document.getElementById(id)); } else if (cl) { return $('.' + cl).first(); } else { return $('body'); } }; this.settings.$target = $sel(); }, scroll_to : function () { var window_half, tipOffset; window_half = $(window).height() / 2; tipOffset = Math.ceil(this.settings.$target.offset().top - window_half + this.settings.$next_tip.outerHeight()); if (tipOffset != 0) { $('html, body').animate({ scrollTop: tipOffset }, this.settings.scroll_speed, 'swing'); } }, paused : function () { return ($.inArray((this.settings.$li.index() + 1), this.settings.pause_after) === -1); }, restart : function () { this.hide(); this.settings.$li = undefined; this.show('init'); }, pos_default : function (init, resizing) { var half_fold = Math.ceil($(window).height() / 2), tip_position = this.settings.$next_tip.offset(), $nub = this.settings.$next_tip.find('.joyride-nub'), nub_width = Math.ceil($nub.outerWidth() / 2), nub_height = Math.ceil($nub.outerHeight() / 2), toggle = init || false; // tip must not be "display: none" to calculate position if (toggle) { this.settings.$next_tip.css('visibility', 'hidden'); this.settings.$next_tip.show(); } if (typeof resizing === 'undefined') { resizing = false; } if (!/body/i.test(this.settings.$target.selector)) { if (this.bottom()) { if (this.rtl) { this.settings.$next_tip.css({ top: (this.settings.$target.offset().top + nub_height + this.settings.$target.outerHeight()), left: this.settings.$target.offset().left + this.settings.$target.outerWidth() - this.settings.$next_tip.outerWidth()}); } else { this.settings.$next_tip.css({ top: (this.settings.$target.offset().top + nub_height + this.settings.$target.outerHeight()), left: this.settings.$target.offset().left}); } this.nub_position($nub, this.settings.tip_settings.nub_position, 'top'); } else if (this.top()) { if (this.rtl) { this.settings.$next_tip.css({ top: (this.settings.$target.offset().top - this.settings.$next_tip.outerHeight() - nub_height), left: this.settings.$target.offset().left + this.settings.$target.outerWidth() - this.settings.$next_tip.outerWidth()}); } else { this.settings.$next_tip.css({ top: (this.settings.$target.offset().top - this.settings.$next_tip.outerHeight() - nub_height), left: this.settings.$target.offset().left}); } this.nub_position($nub, this.settings.tip_settings.nub_position, 'bottom'); } else if (this.right()) { this.settings.$next_tip.css({ top: this.settings.$target.offset().top, left: (this.settings.$target.outerWidth() + this.settings.$target.offset().left + nub_width)}); this.nub_position($nub, this.settings.tip_settings.nub_position, 'left'); } else if (this.left()) { this.settings.$next_tip.css({ top: this.settings.$target.offset().top, left: (this.settings.$target.offset().left - this.settings.$next_tip.outerWidth() - nub_width)}); this.nub_position($nub, this.settings.tip_settings.nub_position, 'right'); } if (!this.visible(this.corners(this.settings.$next_tip)) && this.settings.attempts < this.settings.tip_settings.tip_location_pattern.length) { $nub.removeClass('bottom') .removeClass('top') .removeClass('right') .removeClass('left'); this.settings.tip_settings.tip_location = this.settings.tip_settings.tip_location_pattern[this.settings.attempts]; this.settings.attempts++; this.pos_default(); } } else if (this.settings.$li.length) { this.pos_modal($nub); } if (toggle) { this.settings.$next_tip.hide(); this.settings.$next_tip.css('visibility', 'visible'); } }, pos_phone : function (init) { var tip_height = this.settings.$next_tip.outerHeight(), tip_offset = this.settings.$next_tip.offset(), target_height = this.settings.$target.outerHeight(), $nub = $('.joyride-nub', this.settings.$next_tip), nub_height = Math.ceil($nub.outerHeight() / 2), toggle = init || false; $nub.removeClass('bottom') .removeClass('top') .removeClass('right') .removeClass('left'); if (toggle) { this.settings.$next_tip.css('visibility', 'hidden'); this.settings.$next_tip.show(); } if (!/body/i.test(this.settings.$target.selector)) { if (this.top()) { this.settings.$next_tip.offset({top: this.settings.$target.offset().top - tip_height - nub_height}); $nub.addClass('bottom'); } else { this.settings.$next_tip.offset({top: this.settings.$target.offset().top + target_height + nub_height}); $nub.addClass('top'); } } else if (this.settings.$li.length) { this.pos_modal($nub); } if (toggle) { this.settings.$next_tip.hide(); this.settings.$next_tip.css('visibility', 'visible'); } }, pos_modal : function ($nub) { this.center(); $nub.hide(); this.show_modal(); }, show_modal : function () { if (!this.settings.$next_tip.data('closed')) { var joyridemodalbg = $('.joyride-modal-bg'); if (joyridemodalbg.length < 1) { $('body').append(this.settings.template.modal).show(); } if (/pop/i.test(this.settings.tip_animation)) { joyridemodalbg.show(); } else { joyridemodalbg.fadeIn(this.settings.tip_animation_fade_speed); } } }, expose : function () { var expose, exposeCover, el, origCSS, origClasses, randId = 'expose-' + this.random_str(6); if (arguments.length > 0 && arguments[0] instanceof $) { el = arguments[0]; } else if(this.settings.$target && !/body/i.test(this.settings.$target.selector)){ el = this.settings.$target; } else { return false; } if(el.length < 1){ if(window.console){ console.error('element not valid', el); } return false; } expose = $(this.settings.template.expose); this.settings.$body.append(expose); expose.css({ top: el.offset().top, left: el.offset().left, width: el.outerWidth(true), height: el.outerHeight(true) }); exposeCover = $(this.settings.template.expose_cover); origCSS = { zIndex: el.css('z-index'), position: el.css('position') }; origClasses = el.attr('class') == null ? '' : el.attr('class'); el.css('z-index',parseInt(expose.css('z-index'))+1); if (origCSS.position == 'static') { el.css('position','relative'); } el.data('expose-css',origCSS); el.data('orig-class', origClasses); el.attr('class', origClasses + ' ' + this.settings.expose_add_class); exposeCover.css({ top: el.offset().top, left: el.offset().left, width: el.outerWidth(true), height: el.outerHeight(true) }); if (this.settings.modal) this.show_modal(); this.settings.$body.append(exposeCover); expose.addClass(randId); exposeCover.addClass(randId); el.data('expose', randId); this.settings.post_expose_callback(this.settings.$li.index(), this.settings.$next_tip, el); this.add_exposed(el); }, un_expose : function () { var exposeId, el, expose , origCSS, origClasses, clearAll = false; if (arguments.length > 0 && arguments[0] instanceof $) { el = arguments[0]; } else if(this.settings.$target && !/body/i.test(this.settings.$target.selector)){ el = this.settings.$target; } else { return false; } if(el.length < 1){ if (window.console) { console.error('element not valid', el); } return false; } exposeId = el.data('expose'); expose = $('.' + exposeId); if (arguments.length > 1) { clearAll = arguments[1]; } if (clearAll === true) { $('.joyride-expose-wrapper,.joyride-expose-cover').remove(); } else { expose.remove(); } origCSS = el.data('expose-css'); if (origCSS.zIndex == 'auto') { el.css('z-index', ''); } else { el.css('z-index', origCSS.zIndex); } if (origCSS.position != el.css('position')) { if(origCSS.position == 'static') {// this is default, no need to set it. el.css('position', ''); } else { el.css('position', origCSS.position); } } origClasses = el.data('orig-class'); el.attr('class', origClasses); el.removeData('orig-classes'); el.removeData('expose'); el.removeData('expose-z-index'); this.remove_exposed(el); }, add_exposed: function(el){ this.settings.exposed = this.settings.exposed || []; if (el instanceof $ || typeof el === 'object') { this.settings.exposed.push(el[0]); } else if (typeof el == 'string') { this.settings.exposed.push(el); } }, remove_exposed: function(el){ var search, i; if (el instanceof $) { search = el[0] } else if (typeof el == 'string'){ search = el; } this.settings.exposed = this.settings.exposed || []; i = this.settings.exposed.length; while (i--) { if (this.settings.exposed[i] == search) { this.settings.exposed.splice(i, 1); return; } } }, center : function () { var $w = $(window); this.settings.$next_tip.css({ top : ((($w.height() - this.settings.$next_tip.outerHeight()) / 2) + $w.scrollTop()), left : ((($w.width() - this.settings.$next_tip.outerWidth()) / 2) + $w.scrollLeft()) }); return true; }, bottom : function () { return /bottom/i.test(this.settings.tip_settings.tip_location); }, top : function () { return /top/i.test(this.settings.tip_settings.tip_location); }, right : function () { return /right/i.test(this.settings.tip_settings.tip_location); }, left : function () { return /left/i.test(this.settings.tip_settings.tip_location); }, corners : function (el) { var w = $(window), window_half = w.height() / 2, //using this to calculate since scroll may not have finished yet. tipOffset = Math.ceil(this.settings.$target.offset().top - window_half + this.settings.$next_tip.outerHeight()), right = w.width() + w.scrollLeft(), offsetBottom = w.height() + tipOffset, bottom = w.height() + w.scrollTop(), top = w.scrollTop(); if (tipOffset < top) { if (tipOffset < 0) { top = 0; } else { top = tipOffset; } } if (offsetBottom > bottom) { bottom = offsetBottom; } return [ el.offset().top < top, right < el.offset().left + el.outerWidth(), bottom < el.offset().top + el.outerHeight(), w.scrollLeft() > el.offset().left ]; }, visible : function (hidden_corners) { var i = hidden_corners.length; while (i--) { if (hidden_corners[i]) return false; } return true; }, nub_position : function (nub, pos, def) { if (pos === 'auto') { nub.addClass(def); } else { nub.addClass(pos); } }, startTimer : function () { if (this.settings.$li.length) { this.settings.automate = setTimeout(function () { this.hide(); this.show(); this.startTimer(); }.bind(this), this.settings.timer); } else { clearTimeout(this.settings.automate); } }, end : function () { if (this.settings.cookie_monster) { $.cookie(this.settings.cookie_name, 'ridden', { expires: this.settings.cookie_expires, domain: this.settings.cookie_domain }); } if (this.settings.timer > 0) { clearTimeout(this.settings.automate); } if (this.settings.modal && this.settings.expose) { this.un_expose(); } this.settings.$next_tip.data('closed', true); $('.joyride-modal-bg').hide(); this.settings.$current_tip.hide(); this.settings.post_step_callback(this.settings.$li.index(), this.settings.$current_tip); this.settings.post_ride_callback(this.settings.$li.index(), this.settings.$current_tip); $('.joyride-tip-guide').remove(); }, off : function () { $(this.scope).off('.joyride'); $(window).off('.joyride'); $('.joyride-close-tip, .joyride-next-tip, .joyride-modal-bg').off('.joyride'); $('.joyride-tip-guide, .joyride-modal-bg').remove(); clearTimeout(this.settings.automate); this.settings = {}; }, reflow : function () {} }; }(jQuery, this, this.document));
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/data/static/foundation/js/foundation/foundation.joyride.js
foundation.joyride.js
;(function ($, window, document, undefined) { 'use strict'; Foundation.libs['magellan-expedition'] = { name : 'magellan-expedition', version : '5.2.1', settings : { active_class: 'active', threshold: 0, // pixels from the top of the expedition for it to become fixes destination_threshold: 20, // pixels from the top of destination for it to be considered active throttle_delay: 30 // calculation throttling to increase framerate }, init : function (scope, method, options) { Foundation.inherit(this, 'throttle'); this.bindings(method, options); }, events : function () { var self = this, S = self.S, settings = self.settings; // initialize expedition offset self.set_expedition_position(); S(self.scope) .off('.magellan') .on('click.fndtn.magellan', '[' + self.add_namespace('data-magellan-arrival') + '] a[href^="#"]', function (e) { e.preventDefault(); var expedition = $(this).closest('[' + self.attr_name() + ']'), settings = expedition.data('magellan-expedition-init'); var hash = this.hash.split('#').join(''), target = $('a[name='+hash+']'); if (target.length === 0) target = $('#'+hash); // Account for expedition height if fixed position var scroll_top = target.offset().top; if (expedition.css('position') === 'fixed') { scroll_top = scroll_top - expedition.outerHeight(); } $('html, body').stop().animate({ 'scrollTop': scroll_top }, 700, 'swing', function () { window.location.hash = '#'+hash; }); }) .on('scroll.fndtn.magellan', self.throttle(this.check_for_arrivals.bind(this), settings.throttle_delay)) $(window).on('resize.fndtn.magellan', self.throttle(this.set_expedition_position.bind(this), settings.throttle_delay)); }, check_for_arrivals : function() { var self = this; self.update_arrivals(); self.update_expedition_positions(); }, set_expedition_position : function() { var self = this; $('[' + this.attr_name() + '=fixed]', self.scope).each(function(idx, el) { var expedition = $(this), styles = expedition.attr('styles'), // save styles top_offset; expedition.attr('style', ''); top_offset = expedition.offset().top; expedition.data(self.data_attr('magellan-top-offset'), top_offset); expedition.attr('style', styles); }); }, update_expedition_positions : function() { var self = this, window_top_offset = $(window).scrollTop(); $('[' + this.attr_name() + '=fixed]', self.scope).each(function() { var expedition = $(this), top_offset = expedition.data('magellan-top-offset'); if (window_top_offset >= top_offset) { // Placeholder allows height calculations to be consistent even when // appearing to switch between fixed/non-fixed placement var placeholder = expedition.prev('[' + self.add_namespace('data-magellan-expedition-clone') + ']'); if (placeholder.length === 0) { placeholder = expedition.clone(); placeholder.removeAttr(self.attr_name()); placeholder.attr(self.add_namespace('data-magellan-expedition-clone'),''); expedition.before(placeholder); } expedition.css({position:'fixed', top: 0}); } else { expedition.prev('[' + self.add_namespace('data-magellan-expedition-clone') + ']').remove(); expedition.attr('style',''); } }); }, update_arrivals : function() { var self = this, window_top_offset = $(window).scrollTop(); $('[' + this.attr_name() + ']', self.scope).each(function() { var expedition = $(this), settings = settings = expedition.data(self.attr_name(true) + '-init'), offsets = self.offsets(expedition, window_top_offset), arrivals = expedition.find('[' + self.add_namespace('data-magellan-arrival') + ']'), active_item = false; offsets.each(function(idx, item) { if (item.viewport_offset >= item.top_offset) { var arrivals = expedition.find('[' + self.add_namespace('data-magellan-arrival') + ']'); arrivals.not(item.arrival).removeClass(settings.active_class); item.arrival.addClass(settings.active_class); active_item = true; return true; } }); if (!active_item) arrivals.removeClass(settings.active_class); }); }, offsets : function(expedition, window_offset) { var self = this, settings = expedition.data(self.attr_name(true) + '-init'), viewport_offset = (window_offset + settings.destination_threshold); return expedition.find('[' + self.add_namespace('data-magellan-arrival') + ']').map(function(idx, el) { var name = $(this).data(self.data_attr('magellan-arrival')), dest = $('[' + self.add_namespace('data-magellan-destination') + '=' + name + ']'); if (dest.length > 0) { var top_offset = dest.offset().top; return { destination : dest, arrival : $(this), top_offset : top_offset, viewport_offset : viewport_offset } } }).sort(function(a, b) { if (a.top_offset < b.top_offset) return -1; if (a.top_offset > b.top_offset) return 1; return 0; }); }, data_attr: function (str) { if (this.namespace.length > 0) { return this.namespace + '-' + str; } return str; }, off : function () { this.S(this.scope).off('.magellan'); this.S(window).off('.magellan'); }, reflow : function () { var self = this; // remove placeholder expeditions used for height calculation purposes $('[' + self.add_namespace('data-magellan-expedition-clone') + ']', self.scope).remove(); } }; }(jQuery, this, this.document));
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/data/static/foundation/js/foundation/foundation.magellan.js
foundation.magellan.js
;(function ($, window, document, undefined) { 'use strict'; Foundation.libs.interchange = { name : 'interchange', version : '5.2.1', cache : {}, images_loaded : false, nodes_loaded : false, settings : { load_attr : 'interchange', named_queries : { 'default' : 'only screen', small : Foundation.media_queries.small, medium : Foundation.media_queries.medium, large : Foundation.media_queries.large, xlarge : Foundation.media_queries.xlarge, xxlarge: Foundation.media_queries.xxlarge, landscape : 'only screen and (orientation: landscape)', portrait : 'only screen and (orientation: portrait)', retina : 'only screen and (-webkit-min-device-pixel-ratio: 2),' + 'only screen and (min--moz-device-pixel-ratio: 2),' + 'only screen and (-o-min-device-pixel-ratio: 2/1),' + 'only screen and (min-device-pixel-ratio: 2),' + 'only screen and (min-resolution: 192dpi),' + 'only screen and (min-resolution: 2dppx)' }, directives : { replace: function (el, path, trigger) { // The trigger argument, if called within the directive, fires // an event named after the directive on the element, passing // any parameters along to the event that you pass to trigger. // // ex. trigger(), trigger([a, b, c]), or trigger(a, b, c) // // This allows you to bind a callback like so: // $('#interchangeContainer').on('replace', function (e, a, b, c) { // console.log($(this).html(), a, b, c); // }); if (/IMG/.test(el[0].nodeName)) { var orig_path = el[0].src; if (new RegExp(path, 'i').test(orig_path)) return; el[0].src = path; return trigger(el[0].src); } var last_path = el.data(this.data_attr + '-last-path'); if (last_path == path) return; var regex = "/^.(\.jpg|\.jpeg|\.png|\.gif|\.tiff|\.bmp)\??|#?./"; if (new RegExp(regex,'i').test(path)){ $(el).css('background-image', 'url('+path+')'); el.data('interchange-last-path', path); return trigger(path); } return $.get(path, function (response) { el.html(response); el.data(this.data_attr + '-last-path', path); trigger(); }); } } }, init : function (scope, method, options) { Foundation.inherit(this, 'throttle random_str'); this.data_attr = this.set_data_attr(); $.extend(true, this.settings, method, options); this.bindings(method, options); this.load('images'); this.load('nodes'); }, get_media_hash : function() { var mediaHash=''; for (var queryName in this.settings.named_queries ) { mediaHash += matchMedia(this.settings.named_queries[queryName]).matches.toString(); } return mediaHash; }, events : function () { var self = this, prevMediaHash; $(window) .off('.interchange') .on('resize.fndtn.interchange', self.throttle(function () { var currMediaHash = self.get_media_hash(); if (currMediaHash !== prevMediaHash) { self.resize(); } prevMediaHash = currMediaHash; }, 50)); return this; }, resize : function () { var cache = this.cache; if(!this.images_loaded || !this.nodes_loaded) { setTimeout($.proxy(this.resize, this), 50); return; } for (var uuid in cache) { if (cache.hasOwnProperty(uuid)) { var passed = this.results(uuid, cache[uuid]); if (passed) { this.settings.directives[passed .scenario[1]].call(this, passed.el, passed.scenario[0], function () { if (arguments[0] instanceof Array) { var args = arguments[0]; } else { var args = Array.prototype.slice.call(arguments, 0); } passed.el.trigger(passed.scenario[1], args); }); } } } }, results : function (uuid, scenarios) { var count = scenarios.length; if (count > 0) { var el = this.S('[' + this.add_namespace('data-uuid') + '="' + uuid + '"]'); while (count--) { var mq, rule = scenarios[count][2]; if (this.settings.named_queries.hasOwnProperty(rule)) { mq = matchMedia(this.settings.named_queries[rule]); } else { mq = matchMedia(rule); } if (mq.matches) { return {el: el, scenario: scenarios[count]}; } } } return false; }, load : function (type, force_update) { if (typeof this['cached_' + type] === 'undefined' || force_update) { this['update_' + type](); } return this['cached_' + type]; }, update_images : function () { var images = this.S('img[' + this.data_attr + ']'), count = images.length, i = count, loaded_count = 0, data_attr = this.data_attr; this.cache = {}; this.cached_images = []; this.images_loaded = (count === 0); while (i--) { loaded_count++; if (images[i]) { var str = images[i].getAttribute(data_attr) || ''; if (str.length > 0) { this.cached_images.push(images[i]); } } if (loaded_count === count) { this.images_loaded = true; this.enhance('images'); } } return this; }, update_nodes : function () { var nodes = this.S('[' + this.data_attr + ']').not('img'), count = nodes.length, i = count, loaded_count = 0, data_attr = this.data_attr; this.cached_nodes = []; this.nodes_loaded = (count === 0); while (i--) { loaded_count++; var str = nodes[i].getAttribute(data_attr) || ''; if (str.length > 0) { this.cached_nodes.push(nodes[i]); } if(loaded_count === count) { this.nodes_loaded = true; this.enhance('nodes'); } } return this; }, enhance : function (type) { var i = this['cached_' + type].length; while (i--) { this.object($(this['cached_' + type][i])); } return $(window).trigger('resize'); }, parse_params : function (path, directive, mq) { return [this.trim(path), this.convert_directive(directive), this.trim(mq)]; }, convert_directive : function (directive) { var trimmed = this.trim(directive); if (trimmed.length > 0) { return trimmed; } return 'replace'; }, object : function(el) { var raw_arr = this.parse_data_attr(el), scenarios = [], i = raw_arr.length; if (i > 0) { while (i--) { var split = raw_arr[i].split(/\((.*?)(\))$/); if (split.length > 1) { var cached_split = split[0].split(','), params = this.parse_params(cached_split[0], cached_split[1], split[1]); scenarios.push(params); } } } return this.store(el, scenarios); }, store : function (el, scenarios) { var uuid = this.random_str(), current_uuid = el.data(this.add_namespace('uuid', true)); if (this.cache[current_uuid]) return this.cache[current_uuid]; el.attr(this.add_namespace('data-uuid'), uuid); return this.cache[uuid] = scenarios; }, trim : function(str) { if (typeof str === 'string') { return $.trim(str); } return str; }, set_data_attr: function (init) { if (init) { if (this.namespace.length > 0) { return this.namespace + '-' + this.settings.load_attr; } return this.settings.load_attr; } if (this.namespace.length > 0) { return 'data-' + this.namespace + '-' + this.settings.load_attr; } return 'data-' + this.settings.load_attr; }, parse_data_attr : function (el) { var raw = el.attr(this.attr_name()).split(/\[(.*?)\]/), i = raw.length, output = []; while (i--) { if (raw[i].replace(/[\W\d]+/, '').length > 4) { output.push(raw[i]); } } return output; }, reflow : function () { this.load('images', true); this.load('nodes', true); } }; }(jQuery, this, this.document));
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/data/static/foundation/js/foundation/foundation.interchange.js
foundation.interchange.js
;(function ($, window, document, undefined) { 'use strict'; Foundation.libs.offcanvas = { name : 'offcanvas', version : '5.2.1', settings : {}, init : function (scope, method, options) { this.events(); }, events : function () { var S = this.S; S(this.scope).off('.offcanvas') .on('click.fndtn.offcanvas', '.left-off-canvas-toggle', function (e) { e.preventDefault(); S(this).closest('.off-canvas-wrap').toggleClass('move-right'); }) .on('click.fndtn.offcanvas', '.exit-off-canvas', function (e) { e.preventDefault(); S(".off-canvas-wrap").removeClass("move-right"); }) .on('click.fndtn.offcanvas', '.left-off-canvas-menu a', function (e) { e.preventDefault(); var href = $(this).attr('href'); S('.off-canvas-wrap').on('transitionend webkitTransitionEnd oTransitionEnd', function(e) { window.location = href S('.off-canvas-wrap').off('transitionend webkitTransitionEnd oTransitionEnd'); }); S(".off-canvas-wrap").removeClass("move-right"); }) .on('click.fndtn.offcanvas', '.right-off-canvas-toggle', function (e) { e.preventDefault(); S(this).closest(".off-canvas-wrap").toggleClass("move-left"); }) .on('click.fndtn.offcanvas', '.exit-off-canvas', function (e) { e.preventDefault(); S(".off-canvas-wrap").removeClass("move-left"); }) .on('click.fndtn.offcanvas', '.right-off-canvas-menu a', function (e) { e.preventDefault(); var href = $(this).attr('href'); S('.off-canvas-wrap').on('transitionend webkitTransitionEnd oTransitionEnd', function(e) { window.location = href S('.off-canvas-wrap').off('transitionend webkitTransitionEnd oTransitionEnd'); }); S(".off-canvas-wrap").removeClass("move-left"); }); }, reflow : function () {} }; }(jQuery, this, this.document));
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/data/static/foundation/js/foundation/foundation.offcanvas.js
foundation.offcanvas.js
;(function ($, window, document, undefined) { 'use strict'; Foundation.libs.dropdown = { name : 'dropdown', version : '5.2.1', settings : { active_class: 'open', align: 'bottom', is_hover: false, opened: function(){}, closed: function(){} }, init : function (scope, method, options) { Foundation.inherit(this, 'throttle'); this.bindings(method, options); }, events : function (scope) { var self = this, S = self.S; S(this.scope) .off('.dropdown') .on('click.fndtn.dropdown', '[' + this.attr_name() + ']', function (e) { var settings = S(this).data(self.attr_name(true) + '-init') || self.settings; e.preventDefault(); if (!settings.is_hover || Modernizr.touch) self.toggle(S(this)); }) .on('mouseenter.fndtn.dropdown', '[' + this.attr_name() + '], [' + this.attr_name() + '-content]', function (e) { var $this = S(this); clearTimeout(self.timeout); if ($this.data(self.data_attr())) { var dropdown = S('#' + $this.data(self.data_attr())), target = $this; } else { var dropdown = $this; target = S("[" + self.attr_name() + "='" + dropdown.attr('id') + "']"); } var settings = target.data(self.attr_name(true) + '-init') || self.settings; if(S(e.target).data(self.data_attr()) && settings.is_hover) { self.closeall.call(self); } if (settings.is_hover) self.open.apply(self, [dropdown, target]); }) .on('mouseleave.fndtn.dropdown', '[' + this.attr_name() + '], [' + this.attr_name() + '-content]', function (e) { var $this = S(this); self.timeout = setTimeout(function () { if ($this.data(self.data_attr())) { var settings = $this.data(self.data_attr(true) + '-init') || self.settings; if (settings.is_hover) self.close.call(self, S('#' + $this.data(self.data_attr()))); } else { var target = S('[' + self.attr_name() + '="' + S(this).attr('id') + '"]'), settings = target.data(self.attr_name(true) + '-init') || self.settings; if (settings.is_hover) self.close.call(self, $this); } }.bind(this), 150); }) .on('click.fndtn.dropdown', function (e) { var parent = S(e.target).closest('[' + self.attr_name() + '-content]'); if (S(e.target).data(self.data_attr()) || S(e.target).parent().data(self.data_attr())) { return; } if (!(S(e.target).data('revealId')) && (parent.length > 0 && (S(e.target).is('[' + self.attr_name() + '-content]') || $.contains(parent.first()[0], e.target)))) { e.stopPropagation(); return; } self.close.call(self, S('[' + self.attr_name() + '-content]')); }) .on('opened.fndtn.dropdown', '[' + self.attr_name() + '-content]', function () { self.settings.opened.call(this); }) .on('closed.fndtn.dropdown', '[' + self.attr_name() + '-content]', function () { self.settings.closed.call(this); }); S(window) .off('.dropdown') .on('resize.fndtn.dropdown', self.throttle(function () { self.resize.call(self); }, 50)); this.resize(); }, close: function (dropdown) { var self = this; dropdown.each(function () { if (self.S(this).hasClass(self.settings.active_class)) { self.S(this) .css(Foundation.rtl ? 'right':'left', '-99999px') .removeClass(self.settings.active_class); self.S(this).trigger('closed', [dropdown]); } }); }, closeall: function() { var self = this; $.each(self.S('[' + this.attr_name() + '-content]'), function() { self.close.call(self, self.S(this)) }); }, open: function (dropdown, target) { this .css(dropdown .addClass(this.settings.active_class), target); dropdown.trigger('opened', [dropdown, target]); }, data_attr: function () { if (this.namespace.length > 0) { return this.namespace + '-' + this.name; } return this.name; }, toggle : function (target) { var dropdown = this.S('#' + target.data(this.data_attr())); if (dropdown.length === 0) { // No dropdown found, not continuing return; } this.close.call(this, this.S('[' + this.attr_name() + '-content]').not(dropdown)); if (dropdown.hasClass(this.settings.active_class)) { this.close.call(this, dropdown); } else { this.close.call(this, this.S('[' + this.attr_name() + '-content]')) this.open.call(this, dropdown, target); } }, resize : function () { var dropdown = this.S('[' + this.attr_name() + '-content].open'), target = this.S("[" + this.attr_name() + "='" + dropdown.attr('id') + "']"); if (dropdown.length && target.length) { this.css(dropdown, target); } }, css : function (dropdown, target) { this.clear_idx(); if (this.small()) { var p = this.dirs.bottom.call(dropdown, target); dropdown.attr('style', '').removeClass('drop-left drop-right drop-top').css({ position : 'absolute', width: '95%', 'max-width': 'none', top: p.top }); dropdown.css(Foundation.rtl ? 'right':'left', '2.5%'); } else { var settings = target.data(this.attr_name(true) + '-init') || this.settings; this.style(dropdown, target, settings); } return dropdown; }, style : function (dropdown, target, settings) { var css = $.extend({position: 'absolute'}, this.dirs[settings.align].call(dropdown, target, settings)); dropdown.attr('style', '').css(css); }, // return CSS property object // `this` is the dropdown dirs : { // Calculate target offset _base : function (t) { var o_p = this.offsetParent(), o = o_p.offset(), p = t.offset(); p.top -= o.top; p.left -= o.left; return p; }, top: function (t, s) { var self = Foundation.libs.dropdown, p = self.dirs._base.call(this, t), pip_offset_base = (t.outerWidth() / 2) - 8; this.addClass('drop-top'); if (t.outerWidth() < this.outerWidth() || self.small()) { self.adjust_pip(pip_offset_base, p); } if (Foundation.rtl) { return {left: p.left - this.outerWidth() + t.outerWidth(), top: p.top - this.outerHeight()}; } return {left: p.left, top: p.top - this.outerHeight()}; }, bottom: function (t, s) { var self = Foundation.libs.dropdown, p = self.dirs._base.call(this, t), pip_offset_base = (t.outerWidth() / 2) - 8; if (t.outerWidth() < this.outerWidth() || self.small()) { self.adjust_pip(pip_offset_base, p); } if (self.rtl) { return {left: p.left - this.outerWidth() + t.outerWidth(), top: p.top + t.outerHeight()}; } return {left: p.left, top: p.top + t.outerHeight()}; }, left: function (t, s) { var p = Foundation.libs.dropdown.dirs._base.call(this, t); this.addClass('drop-left'); return {left: p.left - this.outerWidth(), top: p.top}; }, right: function (t, s) { var p = Foundation.libs.dropdown.dirs._base.call(this, t); this.addClass('drop-right'); return {left: p.left + t.outerWidth(), top: p.top}; } }, // Insert rule to style psuedo elements adjust_pip : function (pip_offset_base, p) { var sheet = Foundation.stylesheet; if (this.small()) { pip_offset_base += p.left - 8; } this.rule_idx = sheet.cssRules.length; var sel_before = '.f-dropdown.open:before', sel_after = '.f-dropdown.open:after', css_before = 'left: ' + pip_offset_base + 'px;', css_after = 'left: ' + (pip_offset_base - 1) + 'px;'; if (sheet.insertRule) { sheet.insertRule([sel_before, '{', css_before, '}'].join(' '), this.rule_idx); sheet.insertRule([sel_after, '{', css_after, '}'].join(' '), this.rule_idx + 1); } else { sheet.addRule(sel_before, css_before, this.rule_idx); sheet.addRule(sel_after, css_after, this.rule_idx + 1); } }, // Remove old dropdown rule index clear_idx : function () { var sheet = Foundation.stylesheet; if (this.rule_idx) { sheet.deleteRule(this.rule_idx); sheet.deleteRule(this.rule_idx); delete this.rule_idx; } }, small : function () { return matchMedia(Foundation.media_queries.small).matches && !matchMedia(Foundation.media_queries.medium).matches; }, off: function () { this.S(this.scope).off('.fndtn.dropdown'); this.S('html, body').off('.fndtn.dropdown'); this.S(window).off('.fndtn.dropdown'); this.S('[data-dropdown-content]').off('.fndtn.dropdown'); }, reflow : function () {} }; }(jQuery, this, this.document));
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/data/static/foundation/js/foundation/foundation.dropdown.js
foundation.dropdown.js
;(function ($, window, document, undefined) { 'use strict'; Foundation.libs.slider = { name : 'slider', version : '5.2.1', settings: { start: 0, end: 100, step: 1, initial: null, display_selector: '', on_change: function(){} }, cache : {}, init : function (scope, method, options) { Foundation.inherit(this,'throttle'); this.bindings(method, options); this.reflow(); }, events : function() { var self = this; $(this.scope) .off('.slider') .on('mousedown.fndtn.slider touchstart.fndtn.slider pointerdown.fndtn.slider', '[' + self.attr_name() + '] .range-slider-handle', function(e) { if (!self.cache.active) { self.set_active_slider($(e.target)); } }) .on('mousemove.fndtn.slider touchmove.fndtn.slider pointermove.fndtn.slider', function(e) { if (!!self.cache.active) { e.preventDefault(); self.calculate_position(self.cache.active, e.pageX || e.originalEvent.touches[0].clientX || e.currentPoint.x); } }) .on('mouseup.fndtn.slider touchend.fndtn.slider pointerup.fndtn.slider', function(e) { self.remove_active_slider(); }) .on('change.fndtn.slider', function(e) { self.settings.on_change; }); self.S(window) .on('resize.fndtn.slider', self.throttle(function(e) { self.reflow(); }, 300)); }, set_active_slider : function($handle) { this.cache.active = $handle; }, remove_active_slider : function() { this.cache.active = null; }, calculate_position : function($handle, cursor_x) { var self = this, settings = $.extend({}, self.settings, self.data_options($handle.parent())), handle_w = $.data($handle[0], 'handle_w'), handle_o = $.data($handle[0], 'handle_o'), bar_w = $.data($handle[0], 'bar_w'), bar_o = $.data($handle[0], 'bar_o'); requestAnimationFrame(function(){ var pct = self.limit_to((((cursor_x)-bar_o)/bar_w),0,1), norm = self.normalized_value(pct, settings.start, settings.end, settings.step); self.set_ui($handle, norm); }); }, set_ui : function($handle, value) { var settings = $.extend({}, this.settings, this.data_options($handle.parent())), handle_w = $.data($handle[0], 'handle_w'), bar_w = $.data($handle[0], 'bar_w'), norm_pct = this.normalized_percentage(value, settings.start, settings.end), handle_offset = norm_pct*(bar_w-handle_w)-1, progress_bar_width = norm_pct*100; this.set_translate($handle, handle_offset); $handle.siblings('.range-slider-active-segment').css('width', progress_bar_width+'%'); $handle.parent().attr(this.attr_name(), value); $handle.parent().trigger('change'); $handle.parent().children('input[type=hidden]').val(value); if (settings.input_id != '') { $(settings.display_selector).each(function(){ if (this.hasOwnProperty('value')) { $(this).val(value); } else { $(this).text(value); } }); } }, normalized_percentage : function(val, start, end) { return val/(end - start); }, normalized_value : function(val, start, end, step) { var range = end - start, step = step, point = val*range, mod = (point-(point%step)) / step, rem = point % step, round = ( rem >= step*0.5 ? step : 0); return (mod*step + round); }, set_translate : function(ele, offset, vertical) { if (vertical) { $(ele) .css('-webkit-transform', 'translateY('+offset+'px)') .css('-moz-transform', 'translateY('+offset+'px)') .css('-ms-transform', 'translateY('+offset+'px)') .css('-o-transform', 'translateY('+offset+'px)') .css('transform', 'translateY('+offset+'px)'); } else { $(ele) .css('-webkit-transform', 'translateX('+offset+'px)') .css('-moz-transform', 'translateX('+offset+'px)') .css('-ms-transform', 'translateX('+offset+'px)') .css('-o-transform', 'translateX('+offset+'px)') .css('transform', 'translateX('+offset+'px)'); } }, limit_to : function(val, min, max) { return Math.min(Math.max(val, min), max); }, initialize_settings : function(handle) { $.data(handle, 'bar', $(handle).parent()); $.data(handle, 'bar_o', $(handle).parent().offset().left); $.data(handle, 'bar_w', $(handle).parent().outerWidth()); $.data(handle, 'handle_o', $(handle).offset().left); $.data(handle, 'handle_w', $(handle).outerWidth()); $.data(handle, 'settings', $.extend({}, this.settings, this.data_options($(handle).parent()))); }, set_initial_position : function($ele) { var settings = $.data($ele.children('.range-slider-handle')[0], 'settings'), initial = (!!settings.initial ? settings.initial : Math.floor((settings.end-settings.start)*0.5/settings.step)*settings.step), $handle = $ele.children('.range-slider-handle'); this.set_ui($handle, initial); }, set_value : function(value) { var self = this; $('[' + self.attr_name() + ']', this.scope).each(function(){ $(this).attr(self.attr_name(), value); }); if (!!$(this.scope).attr(self.attr_name())) { $(this.scope).attr(self.attr_name(), value); } self.reflow(); }, reflow : function() { var self = this; self.S('[' + this.attr_name() + ']').each(function() { var handle = $(this).children('.range-slider-handle')[0], val = $(this).attr(self.attr_name()); self.initialize_settings(handle); if (val) { self.set_ui($(handle), parseInt(val)); } else { self.set_initial_position($(this)); } }); } }; }(jQuery, this, this.document));
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/data/static/foundation/js/foundation/foundation.slider.js
foundation.slider.js
;(function ($, window, document, undefined) { 'use strict'; var noop = function() {}; var Orbit = function(el, settings) { // Don't reinitialize plugin if (el.hasClass(settings.slides_container_class)) { return this; } var self = this, container, slides_container = el, number_container, bullets_container, timer_container, idx = 0, animate, adjust_height_after = false; self.cache = {}; self.slides = function() { return slides_container.children(settings.slide_selector); }; self.slides().first().addClass(settings.active_slide_class); self.update_slide_number = function(index) { if (settings.slide_number) { number_container.find('span:first').text(parseInt(index)+1); number_container.find('span:last').text(self.slides().length); } if (settings.bullets) { bullets_container.children().removeClass(settings.bullets_active_class); $(bullets_container.children().get(index)).addClass(settings.bullets_active_class); } }; self.update_active_link = function(index) { var link = $('[data-orbit-link="'+self.slides().eq(index).attr('data-orbit-slide')+'"]'); link.siblings().removeClass(settings.bullets_active_class); link.addClass(settings.bullets_active_class); }; self.build_markup = function() { slides_container.wrap('<div class="'+settings.container_class+'"></div>'); container = slides_container.parent(); slides_container.addClass(settings.slides_container_class); if (settings.navigation_arrows) { container.append($('<a href="#"><span></span></a>').addClass(settings.prev_class)); container.append($('<a href="#"><span></span></a>').addClass(settings.next_class)); } if (settings.timer) { timer_container = $('<div>').addClass(settings.timer_container_class); timer_container.append('<span>'); timer_container.append($('<div>').addClass(settings.timer_progress_class)); timer_container.addClass(settings.timer_paused_class); container.append(timer_container); } if (settings.slide_number) { number_container = $('<div>').addClass(settings.slide_number_class); number_container.append('<span></span> ' + settings.slide_number_text + ' <span></span>'); container.append(number_container); } if (settings.bullets) { bullets_container = $('<ol>').addClass(settings.bullets_container_class); container.append(bullets_container); bullets_container.wrap('<div class="orbit-bullets-container"></div>'); self.slides().each(function(idx, el) { var bullet = $('<li>').attr('data-orbit-slide', idx); bullets_container.append(bullet); }); } if (settings.stack_on_small) { container.addClass(settings.stack_on_small_class); } }; self._prepare_direction = function(next_idx, current_direction) { var dir = 'next'; if (next_idx <= idx) { dir = 'prev'; } if (settings.animation === 'slide') { setTimeout(function(){ slides_container.removeClass("swipe-prev swipe-next"); if (dir === 'next') {slides_container.addClass("swipe-next");} else if (dir === 'prev') {slides_container.addClass("swipe-prev");} },0); } var slides = self.slides(); if (next_idx >= slides.length) { if (!settings.circular) return false; next_idx = 0; } else if (next_idx < 0) { if (!settings.circular) return false; next_idx = slides.length - 1; } var current = $(slides.get(idx)) , next = $(slides.get(next_idx)); return [dir, current, next, next_idx]; }; self._goto = function(next_idx, start_timer) { if (next_idx === null) {return false;} if (self.cache.animating) {return false;} if (next_idx === idx) {return false;} if (typeof self.cache.timer === 'object') {self.cache.timer.restart();} var slides = self.slides(); self.cache.animating = true; var res = self._prepare_direction(next_idx) , dir = res[0] , current = res[1] , next = res[2] , next_idx = res[3]; slides_container.trigger('before-slide-change.fndtn.orbit'); settings.before_slide_change(); idx = next_idx; current.css("transitionDuration", settings.animation_speed+"ms"); next.css("transitionDuration", settings.animation_speed+"ms"); var callback = function() { var unlock = function() { if (start_timer === true) {self.cache.timer.restart();} self.update_slide_number(idx); next.addClass(settings.active_slide_class); self.update_active_link(next_idx); slides_container.trigger('after-slide-change.fndtn.orbit',[{slide_number: idx, total_slides: slides.length}]); settings.after_slide_change(idx, slides.length); setTimeout(function(){ self.cache.animating = false; }, 100); }; if (slides_container.height() != next.height() && settings.variable_height) { slides_container.animate({'height': next.height()}, 250, 'linear', unlock); } else { unlock(); } }; if (slides.length === 1) {callback(); return false;} var start_animation = function() { if (dir === 'next') {animate.next(current, next, callback);} if (dir === 'prev') {animate.prev(current, next, callback);} }; if (next.height() > slides_container.height() && settings.variable_height) { slides_container.animate({'height': next.height()}, 250, 'linear', start_animation); } else { start_animation(); } }; self.next = function(e) { e.stopImmediatePropagation(); e.preventDefault(); self._prepare_direction(idx + 1); setTimeout(function(){ self._goto(idx + 1); }, 100); }; self.prev = function(e) { e.stopImmediatePropagation(); e.preventDefault(); self._prepare_direction(idx - 1); setTimeout(function(){ self._goto(idx - 1) }, 100); }; self.link_custom = function(e) { e.preventDefault(); var link = $(this).attr('data-orbit-link'); if ((typeof link === 'string') && (link = $.trim(link)) != "") { var slide = container.find('[data-orbit-slide='+link+']'); if (slide.index() != -1) { setTimeout(function(){ self._goto(slide.index()); },100); } } }; self.link_bullet = function(e) { var index = $(this).attr('data-orbit-slide'); if ((typeof index === 'string') && (index = $.trim(index)) != "") { if(isNaN(parseInt(index))) { var slide = container.find('[data-orbit-slide='+index+']'); if (slide.index() != -1) { setTimeout(function(){ self._goto(slide.index() + 1); },100); } } else { setTimeout(function(){ self._goto(parseInt(index)); },100); } } } self.timer_callback = function() { self._goto(idx + 1, true); } self.compute_dimensions = function() { var current = $(self.slides().get(idx)); var h = current.height(); if (!settings.variable_height) { self.slides().each(function(){ if ($(this).height() > h) { h = $(this).height(); } }); } slides_container.height(h); }; self.create_timer = function() { var t = new Timer( container.find('.'+settings.timer_container_class), settings, self.timer_callback ); return t; }; self.stop_timer = function() { if (typeof self.cache.timer === 'object') self.cache.timer.stop(); }; self.toggle_timer = function() { var t = container.find('.'+settings.timer_container_class); if (t.hasClass(settings.timer_paused_class)) { if (typeof self.cache.timer === 'undefined') {self.cache.timer = self.create_timer();} self.cache.timer.start(); } else { if (typeof self.cache.timer === 'object') {self.cache.timer.stop();} } }; self.init = function() { self.build_markup(); if (settings.timer) { self.cache.timer = self.create_timer(); Foundation.utils.image_loaded(this.slides().children('img'), self.cache.timer.start); } // animate = new FadeAnimation(settings, slides_container); // if (settings.animation === 'slide') // animate = new SlideAnimation(settings, slides_container); if(settings.animation === 'fade') {slides_container.addClass('fade');} animate = new CSSAnimation(settings, slides_container); container.on('click', '.'+settings.next_class, self.next); container.on('click', '.'+settings.prev_class, self.prev); container.on('click', '[data-orbit-slide]', self.link_bullet); container.on('click', self.toggle_timer); if (settings.swipe) { slides_container.on('touchstart.fndtn.orbit',function(e) { if (self.cache.animating) {return;} if (!e.touches) {e = e.originalEvent;} self.cache.start_page_x = e.touches[0].pageX; self.cache.start_page_y = e.touches[0].pageY; self.cache.start_time = (new Date()).getTime(); self.cache.delta_x = 0; self.cache.is_scrolling = null; self.cache.direction = null; self.stop_timer(); // does not appear to prevent callback from occurring }) .on('touchmove.fndtn.orbit',function(e) { if (Math.abs(self.cache.delta_x) > 5) { e.preventDefault(); e.stopPropagation(); } if (self.cache.animating) {return;} requestAnimationFrame(function(){ if (!e.touches) { e = e.originalEvent; } // Ignore pinch/zoom events if(e.touches.length > 1 || e.scale && e.scale !== 1) return; self.cache.delta_x = e.touches[0].pageX - self.cache.start_page_x; if (self.cache.is_scrolling === null) { self.cache.is_scrolling = !!( self.cache.is_scrolling || Math.abs(self.cache.delta_x) < Math.abs(e.touches[0].pageY - self.cache.start_page_y) ); } if (self.cache.is_scrolling) { return; } var direction = (self.cache.delta_x < 0) ? (idx+1) : (idx-1); if (self.cache.direction !== direction) { var res = self._prepare_direction(direction); self.cache.direction = direction; self.cache.dir = res[0]; self.cache.current = res[1]; self.cache.next = res[2]; } if (settings.animation === 'slide') { var offset, next_offset; offset = (self.cache.delta_x / container.width()) * 100; if (offset >= 0) {next_offset = -(100 - offset);} else {next_offset = 100 + offset;} self.cache.current.css("transform","translate3d("+offset+"%,0,0)"); self.cache.next.css("transform","translate3d("+next_offset+"%,0,0)"); } }); }) .on('touchend.fndtn.orbit', function(e) { if (self.cache.animating) {return;} e.preventDefault(); e.stopPropagation(); setTimeout(function(){ self._goto(self.cache.direction); }, 50); }); } container.on('mouseenter.fndtn.orbit', function(e) { if (settings.timer && settings.pause_on_hover) { self.stop_timer(); } }) .on('mouseleave.fndtn.orbit', function(e) { if (settings.timer && settings.resume_on_mouseout) { self.cache.timer.start(); } }); $(document).on('click', '[data-orbit-link]', self.link_custom); $(window).on('resize', self.compute_dimensions); Foundation.utils.image_loaded(this.slides().children('img'), self.compute_dimensions); Foundation.utils.image_loaded(this.slides().children('img'), function() { container.prev('.preloader').css('display', 'none'); self.update_slide_number(0); self.update_active_link(0); slides_container.trigger('ready.fndtn.orbit'); }); }; self.init(); }; var Timer = function(el, settings, callback) { var self = this, duration = settings.timer_speed, progress = el.find('.'+settings.timer_progress_class), start, timeout, left = -1; this.update_progress = function(w) { var new_progress = progress.clone(); new_progress.attr('style', ''); new_progress.css('width', w+'%'); progress.replaceWith(new_progress); progress = new_progress; }; this.restart = function() { clearTimeout(timeout); el.addClass(settings.timer_paused_class); left = -1; self.update_progress(0); self.start(); }; this.start = function() { if (!el.hasClass(settings.timer_paused_class)) {return true;} left = (left === -1) ? duration : left; el.removeClass(settings.timer_paused_class); start = new Date().getTime(); progress.animate({'width': '100%'}, left, 'linear'); timeout = setTimeout(function() { self.restart(); callback(); }, left); el.trigger('timer-started.fndtn.orbit') }; this.stop = function() { if (el.hasClass(settings.timer_paused_class)) {return true;} clearTimeout(timeout); el.addClass(settings.timer_paused_class); var end = new Date().getTime(); left = left - (end - start); var w = 100 - ((left / duration) * 100); self.update_progress(w); el.trigger('timer-stopped.fndtn.orbit'); }; }; var CSSAnimation = function(settings, container) { var animation_end = "webkitTransitionEnd otransitionend oTransitionEnd msTransitionEnd transitionend"; this.next = function(current, next, callback) { next.on(animation_end, function(e){ next.unbind(animation_end); current.removeClass("active animate-out"); next.removeClass("animate-in"); callback(); }); container.children().css({ "transform":"", "transitionDuration":"" }); current.addClass("animate-out"); next.addClass("animate-in"); }; this.prev = function(current, prev, callback) { prev.on(animation_end, function(e){ prev.unbind(animation_end); current.removeClass("active animate-out"); prev.removeClass("animate-in"); callback(); }); current.css({"transform":"", "transitionDuration":""}).addClass("animate-out"); prev.css({"transform":"", "transitionDuration":""}).addClass("animate-in"); }; }; Foundation.libs = Foundation.libs || {}; Foundation.libs.orbit = { name: 'orbit', version: '5.2.1', settings: { animation: 'slide', timer_speed: 10000, pause_on_hover: true, resume_on_mouseout: false, animation_speed: 500, stack_on_small: false, navigation_arrows: true, slide_number: true, slide_number_text: 'of', container_class: 'orbit-container', stack_on_small_class: 'orbit-stack-on-small', next_class: 'orbit-next', prev_class: 'orbit-prev', timer_container_class: 'orbit-timer', timer_paused_class: 'paused', timer_progress_class: 'orbit-progress', slides_container_class: 'orbit-slides-container', slide_selector: '*', bullets_container_class: 'orbit-bullets', bullets_active_class: 'active', slide_number_class: 'orbit-slide-number', caption_class: 'orbit-caption', active_slide_class: 'active', orbit_transition_class: 'orbit-transitioning', bullets: true, circular: true, timer: true, variable_height: false, swipe: true, before_slide_change: noop, after_slide_change: noop }, init : function (scope, method, options) { var self = this; this.bindings(method, options); }, events : function (instance) { var orbit_instance = new Orbit(this.S(instance), this.S(instance).data('orbit-init')); this.S(instance).data(self.name + '-instance', orbit_instance); }, reflow : function () { var self = this; if (self.S(self.scope).is('[data-orbit]')) { var $el = self.S(self.scope); var instance = $el.data(self.name + '-instance'); instance.compute_dimensions(); } else { self.S('[data-orbit]', self.scope).each(function(idx, el) { var $el = self.S(el); var opts = self.data_options($el); var instance = $el.data(self.name + '-instance'); instance.compute_dimensions(); }); } } }; }(jQuery, this, this.document));
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/data/static/foundation/js/foundation/foundation.orbit.js
foundation.orbit.js
;(function ($, window, document, undefined) { 'use strict'; Foundation.libs.tab = { name : 'tab', version : '5.2.1', settings : { active_class: 'active', callback : function () {}, deep_linking: false, scroll_to_content: true }, default_tab_hashes: [], init : function (scope, method, options) { var self = this, S = this.S; this.bindings(method, options); this.handle_location_hash_change(); // Store the default active tabs which will be referenced when the // location hash is absent, as in the case of navigating the tabs and // returning to the first viewing via the browser Back button. S('[' + this.attr_name() + '] > dd.active > a', this.scope).each(function () { self.default_tab_hashes.push(this.hash); }); }, events : function () { var self = this, S = this.S; // Click event: tab title S(this.scope).off('.tab').on('click.fndtn.tab', '[' + this.attr_name() + '] > dd > a', function (e) { e.preventDefault(); e.stopPropagation(); self.toggle_active_tab(S(this).parent()); }); // Location hash change event S(window).on('hashchange.fndtn.tab', function (e) { e.preventDefault(); self.handle_location_hash_change(); }); }, handle_location_hash_change : function () { var self = this, S = this.S; S('[' + this.attr_name() + ']', this.scope).each(function () { var settings = S(this).data(self.attr_name(true) + '-init'); if (settings.deep_linking) { // Match the location hash to a label var hash = self.scope.location.hash; if (hash != '') { // Check whether the location hash references a tab content div or // another element on the page (inside or outside the tab content div) var hash_element = S(hash); if (hash_element.hasClass('content') && hash_element.parent().hasClass('tab-content')) { // Tab content div self.toggle_active_tab($('[' + self.attr_name() + '] > dd > a[href=' + hash + ']').parent()); } else { // Not the tab content div. If inside the tab content, find the // containing tab and toggle it as active. var hash_tab_container_id = hash_element.closest('.content').attr('id'); if (hash_tab_container_id != undefined) { self.toggle_active_tab($('[' + self.attr_name() + '] > dd > a[href=#' + hash_tab_container_id + ']').parent(), hash); } } } else { // Reference the default tab hashes which were initialized in the init function for (var ind in self.default_tab_hashes) { self.toggle_active_tab($('[' + self.attr_name() + '] > dd > a[href=' + self.default_tab_hashes[ind] + ']').parent()); } } } }); }, toggle_active_tab: function (tab, location_hash) { var S = this.S, tabs = tab.closest('[' + this.attr_name() + ']'), anchor = tab.children('a').first(), target_hash = '#' + anchor.attr('href').split('#')[1], target = S(target_hash), siblings = tab.siblings(), settings = tabs.data(this.attr_name(true) + '-init'); // allow usage of data-tab-content attribute instead of href if (S(this).data(this.data_attr('tab-content'))) { target_hash = '#' + S(this).data(this.data_attr('tab-content')).split('#')[1]; target = S(target_hash); } if (settings.deep_linking) { // Get the scroll Y position prior to moving to the hash ID var cur_ypos = $('body,html').scrollTop(); // Update the location hash to preserve browser history // Note that the hash does not need to correspond to the // tab content ID anchor; it can be an ID inside or outside of the tab // content div. if (location_hash != undefined) { window.location.hash = location_hash; } else { window.location.hash = target_hash; } if (settings.scroll_to_content) { // If the user is requesting the content of a tab, then scroll to the // top of the title area; otherwise, scroll to the element within // the content area as defined by the hash value. if (location_hash == undefined || location_hash == target_hash) { tab.parent()[0].scrollIntoView(); } else { S(target_hash)[0].scrollIntoView(); } } else { // Adjust the scrollbar to the Y position prior to setting the hash // Only do this for the tab content anchor, otherwise there will be // conflicts with in-tab anchor links nested in the tab-content div if (location_hash == undefined || location_hash == target_hash) { $('body,html').scrollTop(cur_ypos); } } } // WARNING: The activation and deactivation of the tab content must // occur after the deep linking in order to properly refresh the browser // window (notably in Chrome). tab.addClass(settings.active_class).triggerHandler('opened'); siblings.removeClass(settings.active_class); target.siblings().removeClass(settings.active_class).end().addClass(settings.active_class); settings.callback(tab); target.triggerHandler('toggled', [tab]); tabs.triggerHandler('toggled', [target]); }, data_attr: function (str) { if (this.namespace.length > 0) { return this.namespace + '-' + str; } return str; }, off : function () {}, reflow : function () {} }; }(jQuery, this, this.document));
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/data/static/foundation/js/foundation/foundation.tab.js
foundation.tab.js
;(function ($, window, document, undefined) { 'use strict'; Foundation.libs.reveal = { name : 'reveal', version : '5.2.1', locked : false, settings : { animation: 'fadeAndPop', animation_speed: 250, close_on_background_click: true, close_on_esc: true, dismiss_modal_class: 'close-reveal-modal', bg_class: 'reveal-modal-bg', open: function(){}, opened: function(){}, close: function(){}, closed: function(){}, bg : $('.reveal-modal-bg'), css : { open : { 'opacity': 0, 'visibility': 'visible', 'display' : 'block' }, close : { 'opacity': 1, 'visibility': 'hidden', 'display': 'none' } } }, init : function (scope, method, options) { $.extend(true, this.settings, method, options); this.bindings(method, options); }, events : function (scope) { var self = this, S = self.S; S(this.scope) .off('.reveal') .on('click.fndtn.reveal', '[' + this.add_namespace('data-reveal-id') + ']', function (e) { e.preventDefault(); if (!self.locked) { var element = S(this), ajax = element.data(self.data_attr('reveal-ajax')); self.locked = true; if (typeof ajax === 'undefined') { self.open.call(self, element); } else { var url = ajax === true ? element.attr('href') : ajax; self.open.call(self, element, {url: url}); } } }); S(document) .on('touchend.fndtn.reveal click.fndtn.reveal', this.close_targets(), function (e) { e.preventDefault(); if (!self.locked) { var settings = S('[' + self.attr_name() + '].open').data(self.attr_name(true) + '-init'), bg_clicked = S(e.target)[0] === S('.' + settings.bg_class)[0]; if (bg_clicked) { if (settings.close_on_background_click) { e.stopPropagation(); } else { return; } } self.locked = true; self.close.call(self, bg_clicked ? S('[' + self.attr_name() + '].open') : S(this).closest('[' + self.attr_name() + ']')); } }); if(S('[' + self.attr_name() + ']', this.scope).length > 0) { S(this.scope) // .off('.reveal') .on('open.fndtn.reveal', this.settings.open) .on('opened.fndtn.reveal', this.settings.opened) .on('opened.fndtn.reveal', this.open_video) .on('close.fndtn.reveal', this.settings.close) .on('closed.fndtn.reveal', this.settings.closed) .on('closed.fndtn.reveal', this.close_video); } else { S(this.scope) // .off('.reveal') .on('open.fndtn.reveal', '[' + self.attr_name() + ']', this.settings.open) .on('opened.fndtn.reveal', '[' + self.attr_name() + ']', this.settings.opened) .on('opened.fndtn.reveal', '[' + self.attr_name() + ']', this.open_video) .on('close.fndtn.reveal', '[' + self.attr_name() + ']', this.settings.close) .on('closed.fndtn.reveal', '[' + self.attr_name() + ']', this.settings.closed) .on('closed.fndtn.reveal', '[' + self.attr_name() + ']', this.close_video); } return true; }, // PATCH #3: turning on key up capture only when a reveal window is open key_up_on : function (scope) { var self = this; // PATCH #1: fixing multiple keyup event trigger from single key press self.S('body').off('keyup.fndtn.reveal').on('keyup.fndtn.reveal', function ( event ) { var open_modal = self.S('[' + self.attr_name() + '].open'), settings = open_modal.data(self.attr_name(true) + '-init'); // PATCH #2: making sure that the close event can be called only while unlocked, // so that multiple keyup.fndtn.reveal events don't prevent clean closing of the reveal window. if ( settings && event.which === 27 && settings.close_on_esc && !self.locked) { // 27 is the keycode for the Escape key self.close.call(self, open_modal); } }); return true; }, // PATCH #3: turning on key up capture only when a reveal window is open key_up_off : function (scope) { this.S('body').off('keyup.fndtn.reveal'); return true; }, open : function (target, ajax_settings) { var self = this; if (target) { if (typeof target.selector !== 'undefined') { var modal = self.S('#' + target.data(self.data_attr('reveal-id'))); } else { var modal = self.S(this.scope); ajax_settings = target; } } else { var modal = self.S(this.scope); } var settings = modal.data(self.attr_name(true) + '-init'); if (!modal.hasClass('open')) { var open_modal = self.S('[' + self.attr_name() + '].open'); if (typeof modal.data('css-top') === 'undefined') { modal.data('css-top', parseInt(modal.css('top'), 10)) .data('offset', this.cache_offset(modal)); } this.key_up_on(modal); // PATCH #3: turning on key up capture only when a reveal window is open modal.trigger('open'); if (open_modal.length < 1) { this.toggle_bg(modal); } if (typeof ajax_settings === 'string') { ajax_settings = { url: ajax_settings }; } if (typeof ajax_settings === 'undefined' || !ajax_settings.url) { if (open_modal.length > 0) { this.hide(open_modal, settings.css.close); } this.show(modal, settings.css.open); } else { var old_success = typeof ajax_settings.success !== 'undefined' ? ajax_settings.success : null; $.extend(ajax_settings, { success: function (data, textStatus, jqXHR) { if ( $.isFunction(old_success) ) { old_success(data, textStatus, jqXHR); } modal.html(data); self.S(modal).foundation('section', 'reflow'); if (open_modal.length > 0) { self.hide(open_modal, settings.css.close); } self.show(modal, settings.css.open); } }); $.ajax(ajax_settings); } } }, close : function (modal) { var modal = modal && modal.length ? modal : this.S(this.scope), open_modals = this.S('[' + this.attr_name() + '].open'), settings = modal.data(this.attr_name(true) + '-init'); if (open_modals.length > 0) { this.locked = true; this.key_up_off(modal); // PATCH #3: turning on key up capture only when a reveal window is open modal.trigger('close'); this.toggle_bg(modal); this.hide(open_modals, settings.css.close, settings); } }, close_targets : function () { var base = '.' + this.settings.dismiss_modal_class; if (this.settings.close_on_background_click) { return base + ', .' + this.settings.bg_class; } return base; }, toggle_bg : function (modal) { var settings = modal.data(this.attr_name(true)); if (this.S('.' + this.settings.bg_class).length === 0) { this.settings.bg = $('<div />', {'class': this.settings.bg_class}) .appendTo('body').hide(); } if (this.settings.bg.filter(':visible').length > 0) { this.hide(this.settings.bg); } else { this.show(this.settings.bg); } }, show : function (el, css) { // is modal if (css) { var settings = el.data(this.attr_name(true) + '-init'); if (el.parent('body').length === 0) { var placeholder = el.wrap('<div style="display: none;" />').parent(), rootElement = this.settings.rootElement || 'body'; el.on('closed.fndtn.reveal.wrapped', function() { el.detach().appendTo(placeholder); el.unwrap().unbind('closed.fndtn.reveal.wrapped'); }); el.detach().appendTo(rootElement); } var animData = getAnimationData(settings.animation); if (!animData.animate) { this.locked = false; } if (animData.pop) { css.top = $(window).scrollTop() - el.data('offset') + 'px'; var end_css = { top: $(window).scrollTop() + el.data('css-top') + 'px', opacity: 1 }; return setTimeout(function () { return el .css(css) .animate(end_css, settings.animation_speed, 'linear', function () { this.locked = false; el.trigger('opened'); }.bind(this)) .addClass('open'); }.bind(this), settings.animation_speed / 2); } if (animData.fade) { css.top = $(window).scrollTop() + el.data('css-top') + 'px'; var end_css = {opacity: 1}; return setTimeout(function () { return el .css(css) .animate(end_css, settings.animation_speed, 'linear', function () { this.locked = false; el.trigger('opened'); }.bind(this)) .addClass('open'); }.bind(this), settings.animation_speed / 2); } return el.css(css).show().css({opacity: 1}).addClass('open').trigger('opened'); } var settings = this.settings; // should we animate the background? if (getAnimationData(settings.animation).fade) { return el.fadeIn(settings.animation_speed / 2); } this.locked = false; return el.show(); }, hide : function (el, css) { // is modal if (css) { var settings = el.data(this.attr_name(true) + '-init'); var animData = getAnimationData(settings.animation); if (!animData.animate) { this.locked = false; } if (animData.pop) { var end_css = { top: - $(window).scrollTop() - el.data('offset') + 'px', opacity: 0 }; return setTimeout(function () { return el .animate(end_css, settings.animation_speed, 'linear', function () { this.locked = false; el.css(css).trigger('closed'); }.bind(this)) .removeClass('open'); }.bind(this), settings.animation_speed / 2); } if (animData.fade) { var end_css = {opacity: 0}; return setTimeout(function () { return el .animate(end_css, settings.animation_speed, 'linear', function () { this.locked = false; el.css(css).trigger('closed'); }.bind(this)) .removeClass('open'); }.bind(this), settings.animation_speed / 2); } return el.hide().css(css).removeClass('open').trigger('closed'); } var settings = this.settings; // should we animate the background? if (getAnimationData(settings.animation).fade) { return el.fadeOut(settings.animation_speed / 2); } return el.hide(); }, close_video : function (e) { var video = $('.flex-video', e.target), iframe = $('iframe', video); if (iframe.length > 0) { iframe.attr('data-src', iframe[0].src); iframe.attr('src', 'about:blank'); video.hide(); } }, open_video : function (e) { var video = $('.flex-video', e.target), iframe = video.find('iframe'); if (iframe.length > 0) { var data_src = iframe.attr('data-src'); if (typeof data_src === 'string') { iframe[0].src = iframe.attr('data-src'); } else { var src = iframe[0].src; iframe[0].src = undefined; iframe[0].src = src; } video.show(); } }, data_attr: function (str) { if (this.namespace.length > 0) { return this.namespace + '-' + str; } return str; }, cache_offset : function (modal) { var offset = modal.show().height() + parseInt(modal.css('top'), 10); modal.hide(); return offset; }, off : function () { $(this.scope).off('.fndtn.reveal'); }, reflow : function () {} }; /* * getAnimationData('popAndFade') // {animate: true, pop: true, fade: true} * getAnimationData('fade') // {animate: true, pop: false, fade: true} * getAnimationData('pop') // {animate: true, pop: true, fade: false} * getAnimationData('foo') // {animate: false, pop: false, fade: false} * getAnimationData(null) // {animate: false, pop: false, fade: false} */ function getAnimationData(str) { var fade = /fade/i.test(str); var pop = /pop/i.test(str); return { animate: fade || pop, pop: pop, fade: fade }; } }(jQuery, this, this.document));
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/data/static/foundation/js/foundation/foundation.reveal.js
foundation.reveal.js
;(function ($, window, document, undefined) { 'use strict'; Foundation.libs.tooltip = { name : 'tooltip', version : '5.2.1', settings : { additional_inheritable_classes : [], tooltip_class : '.tooltip', append_to: 'body', touch_close_text: 'Tap To Close', disable_for_touch: false, hover_delay: 200, tip_template : function (selector, content) { return '<span data-selector="' + selector + '" class="' + Foundation.libs.tooltip.settings.tooltip_class.substring(1) + '">' + content + '<span class="nub"></span></span>'; } }, cache : {}, init : function (scope, method, options) { Foundation.inherit(this, 'random_str'); this.bindings(method, options); }, events : function (instance) { var self = this, S = self.S; self.create(this.S(instance)); $(this.scope) .off('.tooltip') .on('mouseenter.fndtn.tooltip mouseleave.fndtn.tooltip touchstart.fndtn.tooltip MSPointerDown.fndtn.tooltip', '[' + this.attr_name() + ']:not(a)', function (e) { var $this = S(this), settings = $.extend({}, self.settings, self.data_options($this)), is_touch = false; if (/mouse/i.test(e.type) && self.ie_touch(e)) return false; if ($this.hasClass('open')) { if (Modernizr.touch && /touchstart|MSPointerDown/i.test(e.type)) e.preventDefault(); self.hide($this); } else { if (settings.disable_for_touch && Modernizr.touch && /touchstart|MSPointerDown/i.test(e.type)) { return; } else if(!settings.disable_for_touch && Modernizr.touch && /touchstart|MSPointerDown/i.test(e.type)) { e.preventDefault(); S(settings.tooltip_class + '.open').hide(); is_touch = true; } if (/enter|over/i.test(e.type)) { this.timer = setTimeout(function () { var tip = self.showTip($this); }.bind(this), self.settings.hover_delay); } else if (e.type === 'mouseout' || e.type === 'mouseleave') { clearTimeout(this.timer); self.hide($this); } else { self.showTip($this); } } }) .on('mouseleave.fndtn.tooltip touchstart.fndtn.tooltip MSPointerDown.fndtn.tooltip', '[' + this.attr_name() + '].open', function (e) { if (/mouse/i.test(e.type) && self.ie_touch(e)) return false; if($(this).data('tooltip-open-event-type') == 'touch' && e.type == 'mouseleave') { return; } else if($(this).data('tooltip-open-event-type') == 'mouse' && /MSPointerDown|touchstart/i.test(e.type)) { self.convert_to_touch($(this)); } else { self.hide($(this)); } }) .on('DOMNodeRemoved DOMAttrModified', '[' + this.attr_name() + ']:not(a)', function (e) { self.hide(S(this)); }); }, ie_touch : function (e) { // How do I distinguish between IE11 and Windows Phone 8????? return false; }, showTip : function ($target) { var $tip = this.getTip($target); return this.show($target); }, getTip : function ($target) { var selector = this.selector($target), settings = $.extend({}, this.settings, this.data_options($target)), tip = null; if (selector) { tip = this.S('span[data-selector="' + selector + '"]' + settings.tooltip_class); } return (typeof tip === 'object') ? tip : false; }, selector : function ($target) { var id = $target.attr('id'), dataSelector = $target.attr(this.attr_name()) || $target.attr('data-selector'); if ((id && id.length < 1 || !id) && typeof dataSelector != 'string') { dataSelector = this.random_str(6); $target.attr('data-selector', dataSelector); } return (id && id.length > 0) ? id : dataSelector; }, create : function ($target) { var self = this, settings = $.extend({}, this.settings, this.data_options($target)), tip_template = this.settings.tip_template; if (typeof settings.tip_template === 'string' && window.hasOwnProperty(settings.tip_template)) { tip_template = window[settings.tip_template]; } var $tip = $(tip_template(this.selector($target), $('<div></div>').html($target.attr('title')).html())), classes = this.inheritable_classes($target); $tip.addClass(classes).appendTo(settings.append_to); if (Modernizr.touch) { $tip.append('<span class="tap-to-close">'+settings.touch_close_text+'</span>'); $tip.on('touchstart.fndtn.tooltip MSPointerDown.fndtn.tooltip', function(e) { self.hide($target); }); } $target.removeAttr('title').attr('title',''); }, reposition : function (target, tip, classes) { var width, nub, nubHeight, nubWidth, column, objPos; tip.css('visibility', 'hidden').show(); width = target.data('width'); nub = tip.children('.nub'); nubHeight = nub.outerHeight(); nubWidth = nub.outerHeight(); if (this.small()) { tip.css({'width' : '100%' }); } else { tip.css({'width' : (width) ? width : 'auto'}); } objPos = function (obj, top, right, bottom, left, width) { return obj.css({ 'top' : (top) ? top : 'auto', 'bottom' : (bottom) ? bottom : 'auto', 'left' : (left) ? left : 'auto', 'right' : (right) ? right : 'auto', }).end(); }; objPos(tip, (target.offset().top + target.outerHeight() + 10), 'auto', 'auto', target.offset().left); if (this.small()) { objPos(tip, (target.offset().top + target.outerHeight() + 10), 'auto', 'auto', 12.5, $(this.scope).width()); tip.addClass('tip-override'); objPos(nub, -nubHeight, 'auto', 'auto', target.offset().left); } else { var left = target.offset().left; if (Foundation.rtl) { nub.addClass('rtl'); left = target.offset().left + target.outerWidth() - tip.outerWidth(); } objPos(tip, (target.offset().top + target.outerHeight() + 10), 'auto', 'auto', left); tip.removeClass('tip-override'); if (classes && classes.indexOf('tip-top') > -1) { if (Foundation.rtl) nub.addClass('rtl'); objPos(tip, (target.offset().top - tip.outerHeight()), 'auto', 'auto', left) .removeClass('tip-override'); } else if (classes && classes.indexOf('tip-left') > -1) { objPos(tip, (target.offset().top + (target.outerHeight() / 2) - (tip.outerHeight() / 2)), 'auto', 'auto', (target.offset().left - tip.outerWidth() - nubHeight)) .removeClass('tip-override'); nub.removeClass('rtl'); } else if (classes && classes.indexOf('tip-right') > -1) { objPos(tip, (target.offset().top + (target.outerHeight() / 2) - (tip.outerHeight() / 2)), 'auto', 'auto', (target.offset().left + target.outerWidth() + nubHeight)) .removeClass('tip-override'); nub.removeClass('rtl'); } } tip.css('visibility', 'visible').hide(); }, small : function () { return matchMedia(Foundation.media_queries.small).matches; }, inheritable_classes : function ($target) { var settings = $.extend({}, this.settings, this.data_options($target)), inheritables = ['tip-top', 'tip-left', 'tip-bottom', 'tip-right', 'radius', 'round'].concat(settings.additional_inheritable_classes), classes = $target.attr('class'), filtered = classes ? $.map(classes.split(' '), function (el, i) { if ($.inArray(el, inheritables) !== -1) { return el; } }).join(' ') : ''; return $.trim(filtered); }, convert_to_touch : function($target) { var self = this, $tip = self.getTip($target), settings = $.extend({}, self.settings, self.data_options($target)); if ($tip.find('.tap-to-close').length === 0) { $tip.append('<span class="tap-to-close">'+settings.touch_close_text+'</span>'); $tip.on('click.fndtn.tooltip.tapclose touchstart.fndtn.tooltip.tapclose MSPointerDown.fndtn.tooltip.tapclose', function(e) { self.hide($target); }); } $target.data('tooltip-open-event-type', 'touch'); }, show : function ($target) { var $tip = this.getTip($target); if ($target.data('tooltip-open-event-type') == 'touch') { this.convert_to_touch($target); } this.reposition($target, $tip, $target.attr('class')); $target.addClass('open'); $tip.fadeIn(150); }, hide : function ($target) { var $tip = this.getTip($target); $tip.fadeOut(150, function() { $tip.find('.tap-to-close').remove(); $tip.off('click.fndtn.tooltip.tapclose touchstart.fndtn.tooltip.tapclose MSPointerDown.fndtn.tapclose'); $target.removeClass('open'); }); }, off : function () { var self = this; this.S(this.scope).off('.fndtn.tooltip'); this.S(this.settings.tooltip_class).each(function (i) { $('[' + self.attr_name() + ']').get(i).attr('title', $(this).text()); }).remove(); }, reflow : function () {} }; }(jQuery, this, this.document));
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/data/static/foundation/js/foundation/foundation.tooltip.js
foundation.tooltip.js
!function(a,b){"object"==typeof module&&"object"==typeof module.exports?module.exports=a.document?b(a,!0):function(a){if(!a.document)throw new Error("jQuery requires a window with a document");return b(a)}:b(a)}("undefined"!=typeof window?window:this,function(a,b){function c(a){var b=a.length,c=ab.type(a);return"function"===c||ab.isWindow(a)?!1:1===a.nodeType&&b?!0:"array"===c||0===b||"number"==typeof b&&b>0&&b-1 in a}function d(a,b,c){if(ab.isFunction(b))return ab.grep(a,function(a,d){return!!b.call(a,d,a)!==c});if(b.nodeType)return ab.grep(a,function(a){return a===b!==c});if("string"==typeof b){if(hb.test(b))return ab.filter(b,a,c);b=ab.filter(b,a)}return ab.grep(a,function(a){return U.call(b,a)>=0!==c})}function e(a,b){for(;(a=a[b])&&1!==a.nodeType;);return a}function f(a){var b=ob[a]={};return ab.each(a.match(nb)||[],function(a,c){b[c]=!0}),b}function g(){$.removeEventListener("DOMContentLoaded",g,!1),a.removeEventListener("load",g,!1),ab.ready()}function h(){Object.defineProperty(this.cache={},0,{get:function(){return{}}}),this.expando=ab.expando+Math.random()}function i(a,b,c){var d;if(void 0===c&&1===a.nodeType)if(d="data-"+b.replace(ub,"-$1").toLowerCase(),c=a.getAttribute(d),"string"==typeof c){try{c="true"===c?!0:"false"===c?!1:"null"===c?null:+c+""===c?+c:tb.test(c)?ab.parseJSON(c):c}catch(e){}sb.set(a,b,c)}else c=void 0;return c}function j(){return!0}function k(){return!1}function l(){try{return $.activeElement}catch(a){}}function m(a,b){return ab.nodeName(a,"table")&&ab.nodeName(11!==b.nodeType?b:b.firstChild,"tr")?a.getElementsByTagName("tbody")[0]||a.appendChild(a.ownerDocument.createElement("tbody")):a}function n(a){return a.type=(null!==a.getAttribute("type"))+"/"+a.type,a}function o(a){var b=Kb.exec(a.type);return b?a.type=b[1]:a.removeAttribute("type"),a}function p(a,b){for(var c=0,d=a.length;d>c;c++)rb.set(a[c],"globalEval",!b||rb.get(b[c],"globalEval"))}function q(a,b){var c,d,e,f,g,h,i,j;if(1===b.nodeType){if(rb.hasData(a)&&(f=rb.access(a),g=rb.set(b,f),j=f.events)){delete g.handle,g.events={};for(e in j)for(c=0,d=j[e].length;d>c;c++)ab.event.add(b,e,j[e][c])}sb.hasData(a)&&(h=sb.access(a),i=ab.extend({},h),sb.set(b,i))}}function r(a,b){var c=a.getElementsByTagName?a.getElementsByTagName(b||"*"):a.querySelectorAll?a.querySelectorAll(b||"*"):[];return void 0===b||b&&ab.nodeName(a,b)?ab.merge([a],c):c}function s(a,b){var c=b.nodeName.toLowerCase();"input"===c&&yb.test(a.type)?b.checked=a.checked:("input"===c||"textarea"===c)&&(b.defaultValue=a.defaultValue)}function t(b,c){var d=ab(c.createElement(b)).appendTo(c.body),e=a.getDefaultComputedStyle?a.getDefaultComputedStyle(d[0]).display:ab.css(d[0],"display");return d.detach(),e}function u(a){var b=$,c=Ob[a];return c||(c=t(a,b),"none"!==c&&c||(Nb=(Nb||ab("<iframe frameborder='0' width='0' height='0'/>")).appendTo(b.documentElement),b=Nb[0].contentDocument,b.write(),b.close(),c=t(a,b),Nb.detach()),Ob[a]=c),c}function v(a,b,c){var d,e,f,g,h=a.style;return c=c||Rb(a),c&&(g=c.getPropertyValue(b)||c[b]),c&&(""!==g||ab.contains(a.ownerDocument,a)||(g=ab.style(a,b)),Qb.test(g)&&Pb.test(b)&&(d=h.width,e=h.minWidth,f=h.maxWidth,h.minWidth=h.maxWidth=h.width=g,g=c.width,h.width=d,h.minWidth=e,h.maxWidth=f)),void 0!==g?g+"":g}function w(a,b){return{get:function(){return a()?void delete this.get:(this.get=b).apply(this,arguments)}}}function x(a,b){if(b in a)return b;for(var c=b[0].toUpperCase()+b.slice(1),d=b,e=Xb.length;e--;)if(b=Xb[e]+c,b in a)return b;return d}function y(a,b,c){var d=Tb.exec(b);return d?Math.max(0,d[1]-(c||0))+(d[2]||"px"):b}function z(a,b,c,d,e){for(var f=c===(d?"border":"content")?4:"width"===b?1:0,g=0;4>f;f+=2)"margin"===c&&(g+=ab.css(a,c+wb[f],!0,e)),d?("content"===c&&(g-=ab.css(a,"padding"+wb[f],!0,e)),"margin"!==c&&(g-=ab.css(a,"border"+wb[f]+"Width",!0,e))):(g+=ab.css(a,"padding"+wb[f],!0,e),"padding"!==c&&(g+=ab.css(a,"border"+wb[f]+"Width",!0,e)));return g}function A(a,b,c){var d=!0,e="width"===b?a.offsetWidth:a.offsetHeight,f=Rb(a),g="border-box"===ab.css(a,"boxSizing",!1,f);if(0>=e||null==e){if(e=v(a,b,f),(0>e||null==e)&&(e=a.style[b]),Qb.test(e))return e;d=g&&(Z.boxSizingReliable()||e===a.style[b]),e=parseFloat(e)||0}return e+z(a,b,c||(g?"border":"content"),d,f)+"px"}function B(a,b){for(var c,d,e,f=[],g=0,h=a.length;h>g;g++)d=a[g],d.style&&(f[g]=rb.get(d,"olddisplay"),c=d.style.display,b?(f[g]||"none"!==c||(d.style.display=""),""===d.style.display&&xb(d)&&(f[g]=rb.access(d,"olddisplay",u(d.nodeName)))):f[g]||(e=xb(d),(c&&"none"!==c||!e)&&rb.set(d,"olddisplay",e?c:ab.css(d,"display"))));for(g=0;h>g;g++)d=a[g],d.style&&(b&&"none"!==d.style.display&&""!==d.style.display||(d.style.display=b?f[g]||"":"none"));return a}function C(a,b,c,d,e){return new C.prototype.init(a,b,c,d,e)}function D(){return setTimeout(function(){Yb=void 0}),Yb=ab.now()}function E(a,b){var c,d=0,e={height:a};for(b=b?1:0;4>d;d+=2-b)c=wb[d],e["margin"+c]=e["padding"+c]=a;return b&&(e.opacity=e.width=a),e}function F(a,b,c){for(var d,e=(cc[b]||[]).concat(cc["*"]),f=0,g=e.length;g>f;f++)if(d=e[f].call(c,b,a))return d}function G(a,b,c){var d,e,f,g,h,i,j,k=this,l={},m=a.style,n=a.nodeType&&xb(a),o=rb.get(a,"fxshow");c.queue||(h=ab._queueHooks(a,"fx"),null==h.unqueued&&(h.unqueued=0,i=h.empty.fire,h.empty.fire=function(){h.unqueued||i()}),h.unqueued++,k.always(function(){k.always(function(){h.unqueued--,ab.queue(a,"fx").length||h.empty.fire()})})),1===a.nodeType&&("height"in b||"width"in b)&&(c.overflow=[m.overflow,m.overflowX,m.overflowY],j=ab.css(a,"display"),"none"===j&&(j=u(a.nodeName)),"inline"===j&&"none"===ab.css(a,"float")&&(m.display="inline-block")),c.overflow&&(m.overflow="hidden",k.always(function(){m.overflow=c.overflow[0],m.overflowX=c.overflow[1],m.overflowY=c.overflow[2]}));for(d in b)if(e=b[d],$b.exec(e)){if(delete b[d],f=f||"toggle"===e,e===(n?"hide":"show")){if("show"!==e||!o||void 0===o[d])continue;n=!0}l[d]=o&&o[d]||ab.style(a,d)}if(!ab.isEmptyObject(l)){o?"hidden"in o&&(n=o.hidden):o=rb.access(a,"fxshow",{}),f&&(o.hidden=!n),n?ab(a).show():k.done(function(){ab(a).hide()}),k.done(function(){var b;rb.remove(a,"fxshow");for(b in l)ab.style(a,b,l[b])});for(d in l)g=F(n?o[d]:0,d,k),d in o||(o[d]=g.start,n&&(g.end=g.start,g.start="width"===d||"height"===d?1:0))}}function H(a,b){var c,d,e,f,g;for(c in a)if(d=ab.camelCase(c),e=b[d],f=a[c],ab.isArray(f)&&(e=f[1],f=a[c]=f[0]),c!==d&&(a[d]=f,delete a[c]),g=ab.cssHooks[d],g&&"expand"in g){f=g.expand(f),delete a[d];for(c in f)c in a||(a[c]=f[c],b[c]=e)}else b[d]=e}function I(a,b,c){var d,e,f=0,g=bc.length,h=ab.Deferred().always(function(){delete i.elem}),i=function(){if(e)return!1;for(var b=Yb||D(),c=Math.max(0,j.startTime+j.duration-b),d=c/j.duration||0,f=1-d,g=0,i=j.tweens.length;i>g;g++)j.tweens[g].run(f);return h.notifyWith(a,[j,f,c]),1>f&&i?c:(h.resolveWith(a,[j]),!1)},j=h.promise({elem:a,props:ab.extend({},b),opts:ab.extend(!0,{specialEasing:{}},c),originalProperties:b,originalOptions:c,startTime:Yb||D(),duration:c.duration,tweens:[],createTween:function(b,c){var d=ab.Tween(a,j.opts,b,c,j.opts.specialEasing[b]||j.opts.easing);return j.tweens.push(d),d},stop:function(b){var c=0,d=b?j.tweens.length:0;if(e)return this;for(e=!0;d>c;c++)j.tweens[c].run(1);return b?h.resolveWith(a,[j,b]):h.rejectWith(a,[j,b]),this}}),k=j.props;for(H(k,j.opts.specialEasing);g>f;f++)if(d=bc[f].call(j,a,k,j.opts))return d;return ab.map(k,F,j),ab.isFunction(j.opts.start)&&j.opts.start.call(a,j),ab.fx.timer(ab.extend(i,{elem:a,anim:j,queue:j.opts.queue})),j.progress(j.opts.progress).done(j.opts.done,j.opts.complete).fail(j.opts.fail).always(j.opts.always)}function J(a){return function(b,c){"string"!=typeof b&&(c=b,b="*");var d,e=0,f=b.toLowerCase().match(nb)||[];if(ab.isFunction(c))for(;d=f[e++];)"+"===d[0]?(d=d.slice(1)||"*",(a[d]=a[d]||[]).unshift(c)):(a[d]=a[d]||[]).push(c)}}function K(a,b,c,d){function e(h){var i;return f[h]=!0,ab.each(a[h]||[],function(a,h){var j=h(b,c,d);return"string"!=typeof j||g||f[j]?g?!(i=j):void 0:(b.dataTypes.unshift(j),e(j),!1)}),i}var f={},g=a===vc;return e(b.dataTypes[0])||!f["*"]&&e("*")}function L(a,b){var c,d,e=ab.ajaxSettings.flatOptions||{};for(c in b)void 0!==b[c]&&((e[c]?a:d||(d={}))[c]=b[c]);return d&&ab.extend(!0,a,d),a}function M(a,b,c){for(var d,e,f,g,h=a.contents,i=a.dataTypes;"*"===i[0];)i.shift(),void 0===d&&(d=a.mimeType||b.getResponseHeader("Content-Type"));if(d)for(e in h)if(h[e]&&h[e].test(d)){i.unshift(e);break}if(i[0]in c)f=i[0];else{for(e in c){if(!i[0]||a.converters[e+" "+i[0]]){f=e;break}g||(g=e)}f=f||g}return f?(f!==i[0]&&i.unshift(f),c[f]):void 0}function N(a,b,c,d){var e,f,g,h,i,j={},k=a.dataTypes.slice();if(k[1])for(g in a.converters)j[g.toLowerCase()]=a.converters[g];for(f=k.shift();f;)if(a.responseFields[f]&&(c[a.responseFields[f]]=b),!i&&d&&a.dataFilter&&(b=a.dataFilter(b,a.dataType)),i=f,f=k.shift())if("*"===f)f=i;else if("*"!==i&&i!==f){if(g=j[i+" "+f]||j["* "+f],!g)for(e in j)if(h=e.split(" "),h[1]===f&&(g=j[i+" "+h[0]]||j["* "+h[0]])){g===!0?g=j[e]:j[e]!==!0&&(f=h[0],k.unshift(h[1]));break}if(g!==!0)if(g&&a["throws"])b=g(b);else try{b=g(b)}catch(l){return{state:"parsererror",error:g?l:"No conversion from "+i+" to "+f}}}return{state:"success",data:b}}function O(a,b,c,d){var e;if(ab.isArray(b))ab.each(b,function(b,e){c||zc.test(a)?d(a,e):O(a+"["+("object"==typeof e?b:"")+"]",e,c,d)});else if(c||"object"!==ab.type(b))d(a,b);else for(e in b)O(a+"["+e+"]",b[e],c,d)}function P(a){return ab.isWindow(a)?a:9===a.nodeType&&a.defaultView}var Q=[],R=Q.slice,S=Q.concat,T=Q.push,U=Q.indexOf,V={},W=V.toString,X=V.hasOwnProperty,Y="".trim,Z={},$=a.document,_="2.1.0",ab=function(a,b){return new ab.fn.init(a,b)},bb=/^-ms-/,cb=/-([\da-z])/gi,db=function(a,b){return b.toUpperCase()};ab.fn=ab.prototype={jquery:_,constructor:ab,selector:"",length:0,toArray:function(){return R.call(this)},get:function(a){return null!=a?0>a?this[a+this.length]:this[a]:R.call(this)},pushStack:function(a){var b=ab.merge(this.constructor(),a);return b.prevObject=this,b.context=this.context,b},each:function(a,b){return ab.each(this,a,b)},map:function(a){return this.pushStack(ab.map(this,function(b,c){return a.call(b,c,b)}))},slice:function(){return this.pushStack(R.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(a){var b=this.length,c=+a+(0>a?b:0);return this.pushStack(c>=0&&b>c?[this[c]]:[])},end:function(){return this.prevObject||this.constructor(null)},push:T,sort:Q.sort,splice:Q.splice},ab.extend=ab.fn.extend=function(){var a,b,c,d,e,f,g=arguments[0]||{},h=1,i=arguments.length,j=!1;for("boolean"==typeof g&&(j=g,g=arguments[h]||{},h++),"object"==typeof g||ab.isFunction(g)||(g={}),h===i&&(g=this,h--);i>h;h++)if(null!=(a=arguments[h]))for(b in a)c=g[b],d=a[b],g!==d&&(j&&d&&(ab.isPlainObject(d)||(e=ab.isArray(d)))?(e?(e=!1,f=c&&ab.isArray(c)?c:[]):f=c&&ab.isPlainObject(c)?c:{},g[b]=ab.extend(j,f,d)):void 0!==d&&(g[b]=d));return g},ab.extend({expando:"jQuery"+(_+Math.random()).replace(/\D/g,""),isReady:!0,error:function(a){throw new Error(a)},noop:function(){},isFunction:function(a){return"function"===ab.type(a)},isArray:Array.isArray,isWindow:function(a){return null!=a&&a===a.window},isNumeric:function(a){return a-parseFloat(a)>=0},isPlainObject:function(a){if("object"!==ab.type(a)||a.nodeType||ab.isWindow(a))return!1;try{if(a.constructor&&!X.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(b){return!1}return!0},isEmptyObject:function(a){var b;for(b in a)return!1;return!0},type:function(a){return null==a?a+"":"object"==typeof a||"function"==typeof a?V[W.call(a)]||"object":typeof a},globalEval:function(a){var b,c=eval;a=ab.trim(a),a&&(1===a.indexOf("use strict")?(b=$.createElement("script"),b.text=a,$.head.appendChild(b).parentNode.removeChild(b)):c(a))},camelCase:function(a){return a.replace(bb,"ms-").replace(cb,db)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toLowerCase()===b.toLowerCase()},each:function(a,b,d){var e,f=0,g=a.length,h=c(a);if(d){if(h)for(;g>f&&(e=b.apply(a[f],d),e!==!1);f++);else for(f in a)if(e=b.apply(a[f],d),e===!1)break}else if(h)for(;g>f&&(e=b.call(a[f],f,a[f]),e!==!1);f++);else for(f in a)if(e=b.call(a[f],f,a[f]),e===!1)break;return a},trim:function(a){return null==a?"":Y.call(a)},makeArray:function(a,b){var d=b||[];return null!=a&&(c(Object(a))?ab.merge(d,"string"==typeof a?[a]:a):T.call(d,a)),d},inArray:function(a,b,c){return null==b?-1:U.call(b,a,c)},merge:function(a,b){for(var c=+b.length,d=0,e=a.length;c>d;d++)a[e++]=b[d];return a.length=e,a},grep:function(a,b,c){for(var d,e=[],f=0,g=a.length,h=!c;g>f;f++)d=!b(a[f],f),d!==h&&e.push(a[f]);return e},map:function(a,b,d){var e,f=0,g=a.length,h=c(a),i=[];if(h)for(;g>f;f++)e=b(a[f],f,d),null!=e&&i.push(e);else for(f in a)e=b(a[f],f,d),null!=e&&i.push(e);return S.apply([],i)},guid:1,proxy:function(a,b){var c,d,e;return"string"==typeof b&&(c=a[b],b=a,a=c),ab.isFunction(a)?(d=R.call(arguments,2),e=function(){return a.apply(b||this,d.concat(R.call(arguments)))},e.guid=a.guid=a.guid||ab.guid++,e):void 0},now:Date.now,support:Z}),ab.each("Boolean Number String Function Array Date RegExp Object Error".split(" "),function(a,b){V["[object "+b+"]"]=b.toLowerCase()});var eb=/*! * Sizzle CSS Selector Engine v1.10.16 * http://sizzlejs.com/ * * Copyright 2013 jQuery Foundation, Inc. and other contributors * Released under the MIT license * http://jquery.org/license * * Date: 2014-01-13 */ function(a){function b(a,b,c,d){var e,f,g,h,i,j,l,o,p,q;if((b?b.ownerDocument||b:O)!==G&&F(b),b=b||G,c=c||[],!a||"string"!=typeof a)return c;if(1!==(h=b.nodeType)&&9!==h)return[];if(I&&!d){if(e=sb.exec(a))if(g=e[1]){if(9===h){if(f=b.getElementById(g),!f||!f.parentNode)return c;if(f.id===g)return c.push(f),c}else if(b.ownerDocument&&(f=b.ownerDocument.getElementById(g))&&M(b,f)&&f.id===g)return c.push(f),c}else{if(e[2])return _.apply(c,b.getElementsByTagName(a)),c;if((g=e[3])&&x.getElementsByClassName&&b.getElementsByClassName)return _.apply(c,b.getElementsByClassName(g)),c}if(x.qsa&&(!J||!J.test(a))){if(o=l=N,p=b,q=9===h&&a,1===h&&"object"!==b.nodeName.toLowerCase()){for(j=m(a),(l=b.getAttribute("id"))?o=l.replace(ub,"\\$&"):b.setAttribute("id",o),o="[id='"+o+"'] ",i=j.length;i--;)j[i]=o+n(j[i]);p=tb.test(a)&&k(b.parentNode)||b,q=j.join(",")}if(q)try{return _.apply(c,p.querySelectorAll(q)),c}catch(r){}finally{l||b.removeAttribute("id")}}}return v(a.replace(ib,"$1"),b,c,d)}function c(){function a(c,d){return b.push(c+" ")>y.cacheLength&&delete a[b.shift()],a[c+" "]=d}var b=[];return a}function d(a){return a[N]=!0,a}function e(a){var b=G.createElement("div");try{return!!a(b)}catch(c){return!1}finally{b.parentNode&&b.parentNode.removeChild(b),b=null}}function f(a,b){for(var c=a.split("|"),d=a.length;d--;)y.attrHandle[c[d]]=b}function g(a,b){var c=b&&a,d=c&&1===a.nodeType&&1===b.nodeType&&(~b.sourceIndex||W)-(~a.sourceIndex||W);if(d)return d;if(c)for(;c=c.nextSibling;)if(c===b)return-1;return a?1:-1}function h(a){return function(b){var c=b.nodeName.toLowerCase();return"input"===c&&b.type===a}}function i(a){return function(b){var c=b.nodeName.toLowerCase();return("input"===c||"button"===c)&&b.type===a}}function j(a){return d(function(b){return b=+b,d(function(c,d){for(var e,f=a([],c.length,b),g=f.length;g--;)c[e=f[g]]&&(c[e]=!(d[e]=c[e]))})})}function k(a){return a&&typeof a.getElementsByTagName!==V&&a}function l(){}function m(a,c){var d,e,f,g,h,i,j,k=S[a+" "];if(k)return c?0:k.slice(0);for(h=a,i=[],j=y.preFilter;h;){(!d||(e=jb.exec(h)))&&(e&&(h=h.slice(e[0].length)||h),i.push(f=[])),d=!1,(e=kb.exec(h))&&(d=e.shift(),f.push({value:d,type:e[0].replace(ib," ")}),h=h.slice(d.length));for(g in y.filter)!(e=ob[g].exec(h))||j[g]&&!(e=j[g](e))||(d=e.shift(),f.push({value:d,type:g,matches:e}),h=h.slice(d.length));if(!d)break}return c?h.length:h?b.error(a):S(a,i).slice(0)}function n(a){for(var b=0,c=a.length,d="";c>b;b++)d+=a[b].value;return d}function o(a,b,c){var d=b.dir,e=c&&"parentNode"===d,f=Q++;return b.first?function(b,c,f){for(;b=b[d];)if(1===b.nodeType||e)return a(b,c,f)}:function(b,c,g){var h,i,j=[P,f];if(g){for(;b=b[d];)if((1===b.nodeType||e)&&a(b,c,g))return!0}else for(;b=b[d];)if(1===b.nodeType||e){if(i=b[N]||(b[N]={}),(h=i[d])&&h[0]===P&&h[1]===f)return j[2]=h[2];if(i[d]=j,j[2]=a(b,c,g))return!0}}}function p(a){return a.length>1?function(b,c,d){for(var e=a.length;e--;)if(!a[e](b,c,d))return!1;return!0}:a[0]}function q(a,b,c,d,e){for(var f,g=[],h=0,i=a.length,j=null!=b;i>h;h++)(f=a[h])&&(!c||c(f,d,e))&&(g.push(f),j&&b.push(h));return g}function r(a,b,c,e,f,g){return e&&!e[N]&&(e=r(e)),f&&!f[N]&&(f=r(f,g)),d(function(d,g,h,i){var j,k,l,m=[],n=[],o=g.length,p=d||u(b||"*",h.nodeType?[h]:h,[]),r=!a||!d&&b?p:q(p,m,a,h,i),s=c?f||(d?a:o||e)?[]:g:r;if(c&&c(r,s,h,i),e)for(j=q(s,n),e(j,[],h,i),k=j.length;k--;)(l=j[k])&&(s[n[k]]=!(r[n[k]]=l));if(d){if(f||a){if(f){for(j=[],k=s.length;k--;)(l=s[k])&&j.push(r[k]=l);f(null,s=[],j,i)}for(k=s.length;k--;)(l=s[k])&&(j=f?bb.call(d,l):m[k])>-1&&(d[j]=!(g[j]=l))}}else s=q(s===g?s.splice(o,s.length):s),f?f(null,g,s,i):_.apply(g,s)})}function s(a){for(var b,c,d,e=a.length,f=y.relative[a[0].type],g=f||y.relative[" "],h=f?1:0,i=o(function(a){return a===b},g,!0),j=o(function(a){return bb.call(b,a)>-1},g,!0),k=[function(a,c,d){return!f&&(d||c!==C)||((b=c).nodeType?i(a,c,d):j(a,c,d))}];e>h;h++)if(c=y.relative[a[h].type])k=[o(p(k),c)];else{if(c=y.filter[a[h].type].apply(null,a[h].matches),c[N]){for(d=++h;e>d&&!y.relative[a[d].type];d++);return r(h>1&&p(k),h>1&&n(a.slice(0,h-1).concat({value:" "===a[h-2].type?"*":""})).replace(ib,"$1"),c,d>h&&s(a.slice(h,d)),e>d&&s(a=a.slice(d)),e>d&&n(a))}k.push(c)}return p(k)}function t(a,c){var e=c.length>0,f=a.length>0,g=function(d,g,h,i,j){var k,l,m,n=0,o="0",p=d&&[],r=[],s=C,t=d||f&&y.find.TAG("*",j),u=P+=null==s?1:Math.random()||.1,v=t.length;for(j&&(C=g!==G&&g);o!==v&&null!=(k=t[o]);o++){if(f&&k){for(l=0;m=a[l++];)if(m(k,g,h)){i.push(k);break}j&&(P=u)}e&&((k=!m&&k)&&n--,d&&p.push(k))}if(n+=o,e&&o!==n){for(l=0;m=c[l++];)m(p,r,g,h);if(d){if(n>0)for(;o--;)p[o]||r[o]||(r[o]=Z.call(i));r=q(r)}_.apply(i,r),j&&!d&&r.length>0&&n+c.length>1&&b.uniqueSort(i)}return j&&(P=u,C=s),p};return e?d(g):g}function u(a,c,d){for(var e=0,f=c.length;f>e;e++)b(a,c[e],d);return d}function v(a,b,c,d){var e,f,g,h,i,j=m(a);if(!d&&1===j.length){if(f=j[0]=j[0].slice(0),f.length>2&&"ID"===(g=f[0]).type&&x.getById&&9===b.nodeType&&I&&y.relative[f[1].type]){if(b=(y.find.ID(g.matches[0].replace(vb,wb),b)||[])[0],!b)return c;a=a.slice(f.shift().value.length)}for(e=ob.needsContext.test(a)?0:f.length;e--&&(g=f[e],!y.relative[h=g.type]);)if((i=y.find[h])&&(d=i(g.matches[0].replace(vb,wb),tb.test(f[0].type)&&k(b.parentNode)||b))){if(f.splice(e,1),a=d.length&&n(f),!a)return _.apply(c,d),c;break}}return B(a,j)(d,b,!I,c,tb.test(a)&&k(b.parentNode)||b),c}var w,x,y,z,A,B,C,D,E,F,G,H,I,J,K,L,M,N="sizzle"+-new Date,O=a.document,P=0,Q=0,R=c(),S=c(),T=c(),U=function(a,b){return a===b&&(E=!0),0},V="undefined",W=1<<31,X={}.hasOwnProperty,Y=[],Z=Y.pop,$=Y.push,_=Y.push,ab=Y.slice,bb=Y.indexOf||function(a){for(var b=0,c=this.length;c>b;b++)if(this[b]===a)return b;return-1},cb="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",db="[\\x20\\t\\r\\n\\f]",eb="(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+",fb=eb.replace("w","w#"),gb="\\["+db+"*("+eb+")"+db+"*(?:([*^$|!~]?=)"+db+"*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|("+fb+")|)|)"+db+"*\\]",hb=":("+eb+")(?:\\(((['\"])((?:\\\\.|[^\\\\])*?)\\3|((?:\\\\.|[^\\\\()[\\]]|"+gb.replace(3,8)+")*)|.*)\\)|)",ib=new RegExp("^"+db+"+|((?:^|[^\\\\])(?:\\\\.)*)"+db+"+$","g"),jb=new RegExp("^"+db+"*,"+db+"*"),kb=new RegExp("^"+db+"*([>+~]|"+db+")"+db+"*"),lb=new RegExp("="+db+"*([^\\]'\"]*?)"+db+"*\\]","g"),mb=new RegExp(hb),nb=new RegExp("^"+fb+"$"),ob={ID:new RegExp("^#("+eb+")"),CLASS:new RegExp("^\\.("+eb+")"),TAG:new RegExp("^("+eb.replace("w","w*")+")"),ATTR:new RegExp("^"+gb),PSEUDO:new RegExp("^"+hb),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+db+"*(even|odd|(([+-]|)(\\d*)n|)"+db+"*(?:([+-]|)"+db+"*(\\d+)|))"+db+"*\\)|)","i"),bool:new RegExp("^(?:"+cb+")$","i"),needsContext:new RegExp("^"+db+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+db+"*((?:-\\d)?\\d*)"+db+"*\\)|)(?=[^-]|$)","i")},pb=/^(?:input|select|textarea|button)$/i,qb=/^h\d$/i,rb=/^[^{]+\{\s*\[native \w/,sb=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,tb=/[+~]/,ub=/'|\\/g,vb=new RegExp("\\\\([\\da-f]{1,6}"+db+"?|("+db+")|.)","ig"),wb=function(a,b,c){var d="0x"+b-65536;return d!==d||c?b:0>d?String.fromCharCode(d+65536):String.fromCharCode(d>>10|55296,1023&d|56320)};try{_.apply(Y=ab.call(O.childNodes),O.childNodes),Y[O.childNodes.length].nodeType}catch(xb){_={apply:Y.length?function(a,b){$.apply(a,ab.call(b))}:function(a,b){for(var c=a.length,d=0;a[c++]=b[d++];);a.length=c-1}}}x=b.support={},A=b.isXML=function(a){var b=a&&(a.ownerDocument||a).documentElement;return b?"HTML"!==b.nodeName:!1},F=b.setDocument=function(a){var b,c=a?a.ownerDocument||a:O,d=c.defaultView;return c!==G&&9===c.nodeType&&c.documentElement?(G=c,H=c.documentElement,I=!A(c),d&&d!==d.top&&(d.addEventListener?d.addEventListener("unload",function(){F()},!1):d.attachEvent&&d.attachEvent("onunload",function(){F()})),x.attributes=e(function(a){return a.className="i",!a.getAttribute("className")}),x.getElementsByTagName=e(function(a){return a.appendChild(c.createComment("")),!a.getElementsByTagName("*").length}),x.getElementsByClassName=rb.test(c.getElementsByClassName)&&e(function(a){return a.innerHTML="<div class='a'></div><div class='a i'></div>",a.firstChild.className="i",2===a.getElementsByClassName("i").length}),x.getById=e(function(a){return H.appendChild(a).id=N,!c.getElementsByName||!c.getElementsByName(N).length}),x.getById?(y.find.ID=function(a,b){if(typeof b.getElementById!==V&&I){var c=b.getElementById(a);return c&&c.parentNode?[c]:[]}},y.filter.ID=function(a){var b=a.replace(vb,wb);return function(a){return a.getAttribute("id")===b}}):(delete y.find.ID,y.filter.ID=function(a){var b=a.replace(vb,wb);return function(a){var c=typeof a.getAttributeNode!==V&&a.getAttributeNode("id");return c&&c.value===b}}),y.find.TAG=x.getElementsByTagName?function(a,b){return typeof b.getElementsByTagName!==V?b.getElementsByTagName(a):void 0}:function(a,b){var c,d=[],e=0,f=b.getElementsByTagName(a);if("*"===a){for(;c=f[e++];)1===c.nodeType&&d.push(c);return d}return f},y.find.CLASS=x.getElementsByClassName&&function(a,b){return typeof b.getElementsByClassName!==V&&I?b.getElementsByClassName(a):void 0},K=[],J=[],(x.qsa=rb.test(c.querySelectorAll))&&(e(function(a){a.innerHTML="<select t=''><option selected=''></option></select>",a.querySelectorAll("[t^='']").length&&J.push("[*^$]="+db+"*(?:''|\"\")"),a.querySelectorAll("[selected]").length||J.push("\\["+db+"*(?:value|"+cb+")"),a.querySelectorAll(":checked").length||J.push(":checked")}),e(function(a){var b=c.createElement("input");b.setAttribute("type","hidden"),a.appendChild(b).setAttribute("name","D"),a.querySelectorAll("[name=d]").length&&J.push("name"+db+"*[*^$|!~]?="),a.querySelectorAll(":enabled").length||J.push(":enabled",":disabled"),a.querySelectorAll("*,:x"),J.push(",.*:")})),(x.matchesSelector=rb.test(L=H.webkitMatchesSelector||H.mozMatchesSelector||H.oMatchesSelector||H.msMatchesSelector))&&e(function(a){x.disconnectedMatch=L.call(a,"div"),L.call(a,"[s!='']:x"),K.push("!=",hb)}),J=J.length&&new RegExp(J.join("|")),K=K.length&&new RegExp(K.join("|")),b=rb.test(H.compareDocumentPosition),M=b||rb.test(H.contains)?function(a,b){var c=9===a.nodeType?a.documentElement:a,d=b&&b.parentNode;return a===d||!(!d||1!==d.nodeType||!(c.contains?c.contains(d):a.compareDocumentPosition&&16&a.compareDocumentPosition(d)))}:function(a,b){if(b)for(;b=b.parentNode;)if(b===a)return!0;return!1},U=b?function(a,b){if(a===b)return E=!0,0;var d=!a.compareDocumentPosition-!b.compareDocumentPosition;return d?d:(d=(a.ownerDocument||a)===(b.ownerDocument||b)?a.compareDocumentPosition(b):1,1&d||!x.sortDetached&&b.compareDocumentPosition(a)===d?a===c||a.ownerDocument===O&&M(O,a)?-1:b===c||b.ownerDocument===O&&M(O,b)?1:D?bb.call(D,a)-bb.call(D,b):0:4&d?-1:1)}:function(a,b){if(a===b)return E=!0,0;var d,e=0,f=a.parentNode,h=b.parentNode,i=[a],j=[b];if(!f||!h)return a===c?-1:b===c?1:f?-1:h?1:D?bb.call(D,a)-bb.call(D,b):0;if(f===h)return g(a,b);for(d=a;d=d.parentNode;)i.unshift(d);for(d=b;d=d.parentNode;)j.unshift(d);for(;i[e]===j[e];)e++;return e?g(i[e],j[e]):i[e]===O?-1:j[e]===O?1:0},c):G},b.matches=function(a,c){return b(a,null,null,c)},b.matchesSelector=function(a,c){if((a.ownerDocument||a)!==G&&F(a),c=c.replace(lb,"='$1']"),!(!x.matchesSelector||!I||K&&K.test(c)||J&&J.test(c)))try{var d=L.call(a,c);if(d||x.disconnectedMatch||a.document&&11!==a.document.nodeType)return d}catch(e){}return b(c,G,null,[a]).length>0},b.contains=function(a,b){return(a.ownerDocument||a)!==G&&F(a),M(a,b)},b.attr=function(a,b){(a.ownerDocument||a)!==G&&F(a);var c=y.attrHandle[b.toLowerCase()],d=c&&X.call(y.attrHandle,b.toLowerCase())?c(a,b,!I):void 0;return void 0!==d?d:x.attributes||!I?a.getAttribute(b):(d=a.getAttributeNode(b))&&d.specified?d.value:null},b.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)},b.uniqueSort=function(a){var b,c=[],d=0,e=0;if(E=!x.detectDuplicates,D=!x.sortStable&&a.slice(0),a.sort(U),E){for(;b=a[e++];)b===a[e]&&(d=c.push(e));for(;d--;)a.splice(c[d],1)}return D=null,a},z=b.getText=function(a){var b,c="",d=0,e=a.nodeType;if(e){if(1===e||9===e||11===e){if("string"==typeof a.textContent)return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=z(a)}else if(3===e||4===e)return a.nodeValue}else for(;b=a[d++];)c+=z(b);return c},y=b.selectors={cacheLength:50,createPseudo:d,match:ob,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(vb,wb),a[3]=(a[4]||a[5]||"").replace(vb,wb),"~="===a[2]&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),"nth"===a[1].slice(0,3)?(a[3]||b.error(a[0]),a[4]=+(a[4]?a[5]+(a[6]||1):2*("even"===a[3]||"odd"===a[3])),a[5]=+(a[7]+a[8]||"odd"===a[3])):a[3]&&b.error(a[0]),a},PSEUDO:function(a){var b,c=!a[5]&&a[2];return ob.CHILD.test(a[0])?null:(a[3]&&void 0!==a[4]?a[2]=a[4]:c&&mb.test(c)&&(b=m(c,!0))&&(b=c.indexOf(")",c.length-b)-c.length)&&(a[0]=a[0].slice(0,b),a[2]=c.slice(0,b)),a.slice(0,3))}},filter:{TAG:function(a){var b=a.replace(vb,wb).toLowerCase();return"*"===a?function(){return!0}:function(a){return a.nodeName&&a.nodeName.toLowerCase()===b}},CLASS:function(a){var b=R[a+" "];return b||(b=new RegExp("(^|"+db+")"+a+"("+db+"|$)"))&&R(a,function(a){return b.test("string"==typeof a.className&&a.className||typeof a.getAttribute!==V&&a.getAttribute("class")||"")})},ATTR:function(a,c,d){return function(e){var f=b.attr(e,a);return null==f?"!="===c:c?(f+="","="===c?f===d:"!="===c?f!==d:"^="===c?d&&0===f.indexOf(d):"*="===c?d&&f.indexOf(d)>-1:"$="===c?d&&f.slice(-d.length)===d:"~="===c?(" "+f+" ").indexOf(d)>-1:"|="===c?f===d||f.slice(0,d.length+1)===d+"-":!1):!0}},CHILD:function(a,b,c,d,e){var f="nth"!==a.slice(0,3),g="last"!==a.slice(-4),h="of-type"===b;return 1===d&&0===e?function(a){return!!a.parentNode}:function(b,c,i){var j,k,l,m,n,o,p=f!==g?"nextSibling":"previousSibling",q=b.parentNode,r=h&&b.nodeName.toLowerCase(),s=!i&&!h;if(q){if(f){for(;p;){for(l=b;l=l[p];)if(h?l.nodeName.toLowerCase()===r:1===l.nodeType)return!1;o=p="only"===a&&!o&&"nextSibling"}return!0}if(o=[g?q.firstChild:q.lastChild],g&&s){for(k=q[N]||(q[N]={}),j=k[a]||[],n=j[0]===P&&j[1],m=j[0]===P&&j[2],l=n&&q.childNodes[n];l=++n&&l&&l[p]||(m=n=0)||o.pop();)if(1===l.nodeType&&++m&&l===b){k[a]=[P,n,m];break}}else if(s&&(j=(b[N]||(b[N]={}))[a])&&j[0]===P)m=j[1];else for(;(l=++n&&l&&l[p]||(m=n=0)||o.pop())&&((h?l.nodeName.toLowerCase()!==r:1!==l.nodeType)||!++m||(s&&((l[N]||(l[N]={}))[a]=[P,m]),l!==b)););return m-=e,m===d||m%d===0&&m/d>=0}}},PSEUDO:function(a,c){var e,f=y.pseudos[a]||y.setFilters[a.toLowerCase()]||b.error("unsupported pseudo: "+a);return f[N]?f(c):f.length>1?(e=[a,a,"",c],y.setFilters.hasOwnProperty(a.toLowerCase())?d(function(a,b){for(var d,e=f(a,c),g=e.length;g--;)d=bb.call(a,e[g]),a[d]=!(b[d]=e[g])}):function(a){return f(a,0,e)}):f}},pseudos:{not:d(function(a){var b=[],c=[],e=B(a.replace(ib,"$1"));return e[N]?d(function(a,b,c,d){for(var f,g=e(a,null,d,[]),h=a.length;h--;)(f=g[h])&&(a[h]=!(b[h]=f))}):function(a,d,f){return b[0]=a,e(b,null,f,c),!c.pop()}}),has:d(function(a){return function(c){return b(a,c).length>0}}),contains:d(function(a){return function(b){return(b.textContent||b.innerText||z(b)).indexOf(a)>-1}}),lang:d(function(a){return nb.test(a||"")||b.error("unsupported lang: "+a),a=a.replace(vb,wb).toLowerCase(),function(b){var c;do if(c=I?b.lang:b.getAttribute("xml:lang")||b.getAttribute("lang"))return c=c.toLowerCase(),c===a||0===c.indexOf(a+"-");while((b=b.parentNode)&&1===b.nodeType);return!1}}),target:function(b){var c=a.location&&a.location.hash;return c&&c.slice(1)===b.id},root:function(a){return a===H},focus:function(a){return a===G.activeElement&&(!G.hasFocus||G.hasFocus())&&!!(a.type||a.href||~a.tabIndex)},enabled:function(a){return a.disabled===!1},disabled:function(a){return a.disabled===!0},checked:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&!!a.checked||"option"===b&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},empty:function(a){for(a=a.firstChild;a;a=a.nextSibling)if(a.nodeType<6)return!1;return!0},parent:function(a){return!y.pseudos.empty(a)},header:function(a){return qb.test(a.nodeName)},input:function(a){return pb.test(a.nodeName)},button:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&"button"===a.type||"button"===b},text:function(a){var b;return"input"===a.nodeName.toLowerCase()&&"text"===a.type&&(null==(b=a.getAttribute("type"))||"text"===b.toLowerCase())},first:j(function(){return[0]}),last:j(function(a,b){return[b-1]}),eq:j(function(a,b,c){return[0>c?c+b:c]}),even:j(function(a,b){for(var c=0;b>c;c+=2)a.push(c);return a}),odd:j(function(a,b){for(var c=1;b>c;c+=2)a.push(c);return a}),lt:j(function(a,b,c){for(var d=0>c?c+b:c;--d>=0;)a.push(d);return a}),gt:j(function(a,b,c){for(var d=0>c?c+b:c;++d<b;)a.push(d);return a})}},y.pseudos.nth=y.pseudos.eq;for(w in{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})y.pseudos[w]=h(w);for(w in{submit:!0,reset:!0})y.pseudos[w]=i(w);return l.prototype=y.filters=y.pseudos,y.setFilters=new l,B=b.compile=function(a,b){var c,d=[],e=[],f=T[a+" "];if(!f){for(b||(b=m(a)),c=b.length;c--;)f=s(b[c]),f[N]?d.push(f):e.push(f);f=T(a,t(e,d))}return f},x.sortStable=N.split("").sort(U).join("")===N,x.detectDuplicates=!!E,F(),x.sortDetached=e(function(a){return 1&a.compareDocumentPosition(G.createElement("div"))}),e(function(a){return a.innerHTML="<a href='#'></a>","#"===a.firstChild.getAttribute("href")})||f("type|href|height|width",function(a,b,c){return c?void 0:a.getAttribute(b,"type"===b.toLowerCase()?1:2)}),x.attributes&&e(function(a){return a.innerHTML="<input/>",a.firstChild.setAttribute("value",""),""===a.firstChild.getAttribute("value")})||f("value",function(a,b,c){return c||"input"!==a.nodeName.toLowerCase()?void 0:a.defaultValue}),e(function(a){return null==a.getAttribute("disabled")})||f(cb,function(a,b,c){var d;return c?void 0:a[b]===!0?b.toLowerCase():(d=a.getAttributeNode(b))&&d.specified?d.value:null}),b}(a);ab.find=eb,ab.expr=eb.selectors,ab.expr[":"]=ab.expr.pseudos,ab.unique=eb.uniqueSort,ab.text=eb.getText,ab.isXMLDoc=eb.isXML,ab.contains=eb.contains;var fb=ab.expr.match.needsContext,gb=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,hb=/^.[^:#\[\.,]*$/;ab.filter=function(a,b,c){var d=b[0];return c&&(a=":not("+a+")"),1===b.length&&1===d.nodeType?ab.find.matchesSelector(d,a)?[d]:[]:ab.find.matches(a,ab.grep(b,function(a){return 1===a.nodeType}))},ab.fn.extend({find:function(a){var b,c=this.length,d=[],e=this;if("string"!=typeof a)return this.pushStack(ab(a).filter(function(){for(b=0;c>b;b++)if(ab.contains(e[b],this))return!0}));for(b=0;c>b;b++)ab.find(a,e[b],d);return d=this.pushStack(c>1?ab.unique(d):d),d.selector=this.selector?this.selector+" "+a:a,d},filter:function(a){return this.pushStack(d(this,a||[],!1))},not:function(a){return this.pushStack(d(this,a||[],!0))},is:function(a){return!!d(this,"string"==typeof a&&fb.test(a)?ab(a):a||[],!1).length}});var ib,jb=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,kb=ab.fn.init=function(a,b){var c,d;if(!a)return this;if("string"==typeof a){if(c="<"===a[0]&&">"===a[a.length-1]&&a.length>=3?[null,a,null]:jb.exec(a),!c||!c[1]&&b)return!b||b.jquery?(b||ib).find(a):this.constructor(b).find(a);if(c[1]){if(b=b instanceof ab?b[0]:b,ab.merge(this,ab.parseHTML(c[1],b&&b.nodeType?b.ownerDocument||b:$,!0)),gb.test(c[1])&&ab.isPlainObject(b))for(c in b)ab.isFunction(this[c])?this[c](b[c]):this.attr(c,b[c]);return this}return d=$.getElementById(c[2]),d&&d.parentNode&&(this.length=1,this[0]=d),this.context=$,this.selector=a,this}return a.nodeType?(this.context=this[0]=a,this.length=1,this):ab.isFunction(a)?"undefined"!=typeof ib.ready?ib.ready(a):a(ab):(void 0!==a.selector&&(this.selector=a.selector,this.context=a.context),ab.makeArray(a,this))};kb.prototype=ab.fn,ib=ab($);var lb=/^(?:parents|prev(?:Until|All))/,mb={children:!0,contents:!0,next:!0,prev:!0};ab.extend({dir:function(a,b,c){for(var d=[],e=void 0!==c;(a=a[b])&&9!==a.nodeType;)if(1===a.nodeType){if(e&&ab(a).is(c))break;d.push(a)}return d},sibling:function(a,b){for(var c=[];a;a=a.nextSibling)1===a.nodeType&&a!==b&&c.push(a);return c}}),ab.fn.extend({has:function(a){var b=ab(a,this),c=b.length;return this.filter(function(){for(var a=0;c>a;a++)if(ab.contains(this,b[a]))return!0})},closest:function(a,b){for(var c,d=0,e=this.length,f=[],g=fb.test(a)||"string"!=typeof a?ab(a,b||this.context):0;e>d;d++)for(c=this[d];c&&c!==b;c=c.parentNode)if(c.nodeType<11&&(g?g.index(c)>-1:1===c.nodeType&&ab.find.matchesSelector(c,a))){f.push(c);break}return this.pushStack(f.length>1?ab.unique(f):f)},index:function(a){return a?"string"==typeof a?U.call(ab(a),this[0]):U.call(this,a.jquery?a[0]:a):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(a,b){return this.pushStack(ab.unique(ab.merge(this.get(),ab(a,b))))},addBack:function(a){return this.add(null==a?this.prevObject:this.prevObject.filter(a))}}),ab.each({parent:function(a){var b=a.parentNode;return b&&11!==b.nodeType?b:null},parents:function(a){return ab.dir(a,"parentNode")},parentsUntil:function(a,b,c){return ab.dir(a,"parentNode",c)},next:function(a){return e(a,"nextSibling")},prev:function(a){return e(a,"previousSibling")},nextAll:function(a){return ab.dir(a,"nextSibling")},prevAll:function(a){return ab.dir(a,"previousSibling")},nextUntil:function(a,b,c){return ab.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return ab.dir(a,"previousSibling",c)},siblings:function(a){return ab.sibling((a.parentNode||{}).firstChild,a)},children:function(a){return ab.sibling(a.firstChild)},contents:function(a){return a.contentDocument||ab.merge([],a.childNodes)}},function(a,b){ab.fn[a]=function(c,d){var e=ab.map(this,b,c);return"Until"!==a.slice(-5)&&(d=c),d&&"string"==typeof d&&(e=ab.filter(d,e)),this.length>1&&(mb[a]||ab.unique(e),lb.test(a)&&e.reverse()),this.pushStack(e)}});var nb=/\S+/g,ob={};ab.Callbacks=function(a){a="string"==typeof a?ob[a]||f(a):ab.extend({},a);var b,c,d,e,g,h,i=[],j=!a.once&&[],k=function(f){for(b=a.memory&&f,c=!0,h=e||0,e=0,g=i.length,d=!0;i&&g>h;h++)if(i[h].apply(f[0],f[1])===!1&&a.stopOnFalse){b=!1;break}d=!1,i&&(j?j.length&&k(j.shift()):b?i=[]:l.disable())},l={add:function(){if(i){var c=i.length;!function f(b){ab.each(b,function(b,c){var d=ab.type(c);"function"===d?a.unique&&l.has(c)||i.push(c):c&&c.length&&"string"!==d&&f(c)})}(arguments),d?g=i.length:b&&(e=c,k(b))}return this},remove:function(){return i&&ab.each(arguments,function(a,b){for(var c;(c=ab.inArray(b,i,c))>-1;)i.splice(c,1),d&&(g>=c&&g--,h>=c&&h--)}),this},has:function(a){return a?ab.inArray(a,i)>-1:!(!i||!i.length)},empty:function(){return i=[],g=0,this},disable:function(){return i=j=b=void 0,this},disabled:function(){return!i},lock:function(){return j=void 0,b||l.disable(),this},locked:function(){return!j},fireWith:function(a,b){return!i||c&&!j||(b=b||[],b=[a,b.slice?b.slice():b],d?j.push(b):k(b)),this},fire:function(){return l.fireWith(this,arguments),this},fired:function(){return!!c}};return l},ab.extend({Deferred:function(a){var b=[["resolve","done",ab.Callbacks("once memory"),"resolved"],["reject","fail",ab.Callbacks("once memory"),"rejected"],["notify","progress",ab.Callbacks("memory")]],c="pending",d={state:function(){return c},always:function(){return e.done(arguments).fail(arguments),this},then:function(){var a=arguments;return ab.Deferred(function(c){ab.each(b,function(b,f){var g=ab.isFunction(a[b])&&a[b];e[f[1]](function(){var a=g&&g.apply(this,arguments);a&&ab.isFunction(a.promise)?a.promise().done(c.resolve).fail(c.reject).progress(c.notify):c[f[0]+"With"](this===d?c.promise():this,g?[a]:arguments)})}),a=null}).promise()},promise:function(a){return null!=a?ab.extend(a,d):d}},e={};return d.pipe=d.then,ab.each(b,function(a,f){var g=f[2],h=f[3];d[f[1]]=g.add,h&&g.add(function(){c=h},b[1^a][2].disable,b[2][2].lock),e[f[0]]=function(){return e[f[0]+"With"](this===e?d:this,arguments),this},e[f[0]+"With"]=g.fireWith}),d.promise(e),a&&a.call(e,e),e},when:function(a){var b,c,d,e=0,f=R.call(arguments),g=f.length,h=1!==g||a&&ab.isFunction(a.promise)?g:0,i=1===h?a:ab.Deferred(),j=function(a,c,d){return function(e){c[a]=this,d[a]=arguments.length>1?R.call(arguments):e,d===b?i.notifyWith(c,d):--h||i.resolveWith(c,d)}};if(g>1)for(b=new Array(g),c=new Array(g),d=new Array(g);g>e;e++)f[e]&&ab.isFunction(f[e].promise)?f[e].promise().done(j(e,d,f)).fail(i.reject).progress(j(e,c,b)):--h;return h||i.resolveWith(d,f),i.promise()}});var pb;ab.fn.ready=function(a){return ab.ready.promise().done(a),this},ab.extend({isReady:!1,readyWait:1,holdReady:function(a){a?ab.readyWait++:ab.ready(!0)},ready:function(a){(a===!0?--ab.readyWait:ab.isReady)||(ab.isReady=!0,a!==!0&&--ab.readyWait>0||(pb.resolveWith($,[ab]),ab.fn.trigger&&ab($).trigger("ready").off("ready")))}}),ab.ready.promise=function(b){return pb||(pb=ab.Deferred(),"complete"===$.readyState?setTimeout(ab.ready):($.addEventListener("DOMContentLoaded",g,!1),a.addEventListener("load",g,!1))),pb.promise(b)},ab.ready.promise();var qb=ab.access=function(a,b,c,d,e,f,g){var h=0,i=a.length,j=null==c;if("object"===ab.type(c)){e=!0;for(h in c)ab.access(a,b,h,c[h],!0,f,g)}else if(void 0!==d&&(e=!0,ab.isFunction(d)||(g=!0),j&&(g?(b.call(a,d),b=null):(j=b,b=function(a,b,c){return j.call(ab(a),c)})),b))for(;i>h;h++)b(a[h],c,g?d:d.call(a[h],h,b(a[h],c)));return e?a:j?b.call(a):i?b(a[0],c):f};ab.acceptData=function(a){return 1===a.nodeType||9===a.nodeType||!+a.nodeType},h.uid=1,h.accepts=ab.acceptData,h.prototype={key:function(a){if(!h.accepts(a))return 0;var b={},c=a[this.expando];if(!c){c=h.uid++;try{b[this.expando]={value:c},Object.defineProperties(a,b)}catch(d){b[this.expando]=c,ab.extend(a,b)}}return this.cache[c]||(this.cache[c]={}),c},set:function(a,b,c){var d,e=this.key(a),f=this.cache[e];if("string"==typeof b)f[b]=c;else if(ab.isEmptyObject(f))ab.extend(this.cache[e],b);else for(d in b)f[d]=b[d];return f},get:function(a,b){var c=this.cache[this.key(a)];return void 0===b?c:c[b]},access:function(a,b,c){var d;return void 0===b||b&&"string"==typeof b&&void 0===c?(d=this.get(a,b),void 0!==d?d:this.get(a,ab.camelCase(b))):(this.set(a,b,c),void 0!==c?c:b)},remove:function(a,b){var c,d,e,f=this.key(a),g=this.cache[f];if(void 0===b)this.cache[f]={};else{ab.isArray(b)?d=b.concat(b.map(ab.camelCase)):(e=ab.camelCase(b),b in g?d=[b,e]:(d=e,d=d in g?[d]:d.match(nb)||[])),c=d.length;for(;c--;)delete g[d[c]]}},hasData:function(a){return!ab.isEmptyObject(this.cache[a[this.expando]]||{})},discard:function(a){a[this.expando]&&delete this.cache[a[this.expando]]}};var rb=new h,sb=new h,tb=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,ub=/([A-Z])/g;ab.extend({hasData:function(a){return sb.hasData(a)||rb.hasData(a)},data:function(a,b,c){return sb.access(a,b,c)},removeData:function(a,b){sb.remove(a,b)},_data:function(a,b,c){return rb.access(a,b,c)},_removeData:function(a,b){rb.remove(a,b)}}),ab.fn.extend({data:function(a,b){var c,d,e,f=this[0],g=f&&f.attributes;if(void 0===a){if(this.length&&(e=sb.get(f),1===f.nodeType&&!rb.get(f,"hasDataAttrs"))){for(c=g.length;c--;)d=g[c].name,0===d.indexOf("data-")&&(d=ab.camelCase(d.slice(5)),i(f,d,e[d]));rb.set(f,"hasDataAttrs",!0)}return e}return"object"==typeof a?this.each(function(){sb.set(this,a)}):qb(this,function(b){var c,d=ab.camelCase(a);if(f&&void 0===b){if(c=sb.get(f,a),void 0!==c)return c;if(c=sb.get(f,d),void 0!==c)return c;if(c=i(f,d,void 0),void 0!==c)return c}else this.each(function(){var c=sb.get(this,d);sb.set(this,d,b),-1!==a.indexOf("-")&&void 0!==c&&sb.set(this,a,b)})},null,b,arguments.length>1,null,!0)},removeData:function(a){return this.each(function(){sb.remove(this,a)})}}),ab.extend({queue:function(a,b,c){var d;return a?(b=(b||"fx")+"queue",d=rb.get(a,b),c&&(!d||ab.isArray(c)?d=rb.access(a,b,ab.makeArray(c)):d.push(c)),d||[]):void 0},dequeue:function(a,b){b=b||"fx";var c=ab.queue(a,b),d=c.length,e=c.shift(),f=ab._queueHooks(a,b),g=function(){ab.dequeue(a,b)};"inprogress"===e&&(e=c.shift(),d--),e&&("fx"===b&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return rb.get(a,c)||rb.access(a,c,{empty:ab.Callbacks("once memory").add(function(){rb.remove(a,[b+"queue",c])})})}}),ab.fn.extend({queue:function(a,b){var c=2;return"string"!=typeof a&&(b=a,a="fx",c--),arguments.length<c?ab.queue(this[0],a):void 0===b?this:this.each(function(){var c=ab.queue(this,a,b);ab._queueHooks(this,a),"fx"===a&&"inprogress"!==c[0]&&ab.dequeue(this,a)})},dequeue:function(a){return this.each(function(){ab.dequeue(this,a)})},clearQueue:function(a){return this.queue(a||"fx",[])},promise:function(a,b){var c,d=1,e=ab.Deferred(),f=this,g=this.length,h=function(){--d||e.resolveWith(f,[f])};for("string"!=typeof a&&(b=a,a=void 0),a=a||"fx";g--;)c=rb.get(f[g],a+"queueHooks"),c&&c.empty&&(d++,c.empty.add(h));return h(),e.promise(b)}});var vb=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,wb=["Top","Right","Bottom","Left"],xb=function(a,b){return a=b||a,"none"===ab.css(a,"display")||!ab.contains(a.ownerDocument,a)},yb=/^(?:checkbox|radio)$/i;!function(){var a=$.createDocumentFragment(),b=a.appendChild($.createElement("div"));b.innerHTML="<input type='radio' checked='checked' name='t'/>",Z.checkClone=b.cloneNode(!0).cloneNode(!0).lastChild.checked,b.innerHTML="<textarea>x</textarea>",Z.noCloneChecked=!!b.cloneNode(!0).lastChild.defaultValue}();var zb="undefined";Z.focusinBubbles="onfocusin"in a;var Ab=/^key/,Bb=/^(?:mouse|contextmenu)|click/,Cb=/^(?:focusinfocus|focusoutblur)$/,Db=/^([^.]*)(?:\.(.+)|)$/;ab.event={global:{},add:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,n,o,p,q=rb.get(a);if(q)for(c.handler&&(f=c,c=f.handler,e=f.selector),c.guid||(c.guid=ab.guid++),(i=q.events)||(i=q.events={}),(g=q.handle)||(g=q.handle=function(b){return typeof ab!==zb&&ab.event.triggered!==b.type?ab.event.dispatch.apply(a,arguments):void 0}),b=(b||"").match(nb)||[""],j=b.length;j--;)h=Db.exec(b[j])||[],n=p=h[1],o=(h[2]||"").split(".").sort(),n&&(l=ab.event.special[n]||{},n=(e?l.delegateType:l.bindType)||n,l=ab.event.special[n]||{},k=ab.extend({type:n,origType:p,data:d,handler:c,guid:c.guid,selector:e,needsContext:e&&ab.expr.match.needsContext.test(e),namespace:o.join(".")},f),(m=i[n])||(m=i[n]=[],m.delegateCount=0,l.setup&&l.setup.call(a,d,o,g)!==!1||a.addEventListener&&a.addEventListener(n,g,!1)),l.add&&(l.add.call(a,k),k.handler.guid||(k.handler.guid=c.guid)),e?m.splice(m.delegateCount++,0,k):m.push(k),ab.event.global[n]=!0)},remove:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,n,o,p,q=rb.hasData(a)&&rb.get(a);if(q&&(i=q.events)){for(b=(b||"").match(nb)||[""],j=b.length;j--;)if(h=Db.exec(b[j])||[],n=p=h[1],o=(h[2]||"").split(".").sort(),n){for(l=ab.event.special[n]||{},n=(d?l.delegateType:l.bindType)||n,m=i[n]||[],h=h[2]&&new RegExp("(^|\\.)"+o.join("\\.(?:.*\\.|)")+"(\\.|$)"),g=f=m.length;f--;)k=m[f],!e&&p!==k.origType||c&&c.guid!==k.guid||h&&!h.test(k.namespace)||d&&d!==k.selector&&("**"!==d||!k.selector)||(m.splice(f,1),k.selector&&m.delegateCount--,l.remove&&l.remove.call(a,k));g&&!m.length&&(l.teardown&&l.teardown.call(a,o,q.handle)!==!1||ab.removeEvent(a,n,q.handle),delete i[n])}else for(n in i)ab.event.remove(a,n+b[j],c,d,!0);ab.isEmptyObject(i)&&(delete q.handle,rb.remove(a,"events"))}},trigger:function(b,c,d,e){var f,g,h,i,j,k,l,m=[d||$],n=X.call(b,"type")?b.type:b,o=X.call(b,"namespace")?b.namespace.split("."):[];if(g=h=d=d||$,3!==d.nodeType&&8!==d.nodeType&&!Cb.test(n+ab.event.triggered)&&(n.indexOf(".")>=0&&(o=n.split("."),n=o.shift(),o.sort()),j=n.indexOf(":")<0&&"on"+n,b=b[ab.expando]?b:new ab.Event(n,"object"==typeof b&&b),b.isTrigger=e?2:3,b.namespace=o.join("."),b.namespace_re=b.namespace?new RegExp("(^|\\.)"+o.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,b.result=void 0,b.target||(b.target=d),c=null==c?[b]:ab.makeArray(c,[b]),l=ab.event.special[n]||{},e||!l.trigger||l.trigger.apply(d,c)!==!1)){if(!e&&!l.noBubble&&!ab.isWindow(d)){for(i=l.delegateType||n,Cb.test(i+n)||(g=g.parentNode);g;g=g.parentNode)m.push(g),h=g; h===(d.ownerDocument||$)&&m.push(h.defaultView||h.parentWindow||a)}for(f=0;(g=m[f++])&&!b.isPropagationStopped();)b.type=f>1?i:l.bindType||n,k=(rb.get(g,"events")||{})[b.type]&&rb.get(g,"handle"),k&&k.apply(g,c),k=j&&g[j],k&&k.apply&&ab.acceptData(g)&&(b.result=k.apply(g,c),b.result===!1&&b.preventDefault());return b.type=n,e||b.isDefaultPrevented()||l._default&&l._default.apply(m.pop(),c)!==!1||!ab.acceptData(d)||j&&ab.isFunction(d[n])&&!ab.isWindow(d)&&(h=d[j],h&&(d[j]=null),ab.event.triggered=n,d[n](),ab.event.triggered=void 0,h&&(d[j]=h)),b.result}},dispatch:function(a){a=ab.event.fix(a);var b,c,d,e,f,g=[],h=R.call(arguments),i=(rb.get(this,"events")||{})[a.type]||[],j=ab.event.special[a.type]||{};if(h[0]=a,a.delegateTarget=this,!j.preDispatch||j.preDispatch.call(this,a)!==!1){for(g=ab.event.handlers.call(this,a,i),b=0;(e=g[b++])&&!a.isPropagationStopped();)for(a.currentTarget=e.elem,c=0;(f=e.handlers[c++])&&!a.isImmediatePropagationStopped();)(!a.namespace_re||a.namespace_re.test(f.namespace))&&(a.handleObj=f,a.data=f.data,d=((ab.event.special[f.origType]||{}).handle||f.handler).apply(e.elem,h),void 0!==d&&(a.result=d)===!1&&(a.preventDefault(),a.stopPropagation()));return j.postDispatch&&j.postDispatch.call(this,a),a.result}},handlers:function(a,b){var c,d,e,f,g=[],h=b.delegateCount,i=a.target;if(h&&i.nodeType&&(!a.button||"click"!==a.type))for(;i!==this;i=i.parentNode||this)if(i.disabled!==!0||"click"!==a.type){for(d=[],c=0;h>c;c++)f=b[c],e=f.selector+" ",void 0===d[e]&&(d[e]=f.needsContext?ab(e,this).index(i)>=0:ab.find(e,this,null,[i]).length),d[e]&&d.push(f);d.length&&g.push({elem:i,handlers:d})}return h<b.length&&g.push({elem:this,handlers:b.slice(h)}),g},props:"altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(a,b){return null==a.which&&(a.which=null!=b.charCode?b.charCode:b.keyCode),a}},mouseHooks:{props:"button buttons clientX clientY offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(a,b){var c,d,e,f=b.button;return null==a.pageX&&null!=b.clientX&&(c=a.target.ownerDocument||$,d=c.documentElement,e=c.body,a.pageX=b.clientX+(d&&d.scrollLeft||e&&e.scrollLeft||0)-(d&&d.clientLeft||e&&e.clientLeft||0),a.pageY=b.clientY+(d&&d.scrollTop||e&&e.scrollTop||0)-(d&&d.clientTop||e&&e.clientTop||0)),a.which||void 0===f||(a.which=1&f?1:2&f?3:4&f?2:0),a}},fix:function(a){if(a[ab.expando])return a;var b,c,d,e=a.type,f=a,g=this.fixHooks[e];for(g||(this.fixHooks[e]=g=Bb.test(e)?this.mouseHooks:Ab.test(e)?this.keyHooks:{}),d=g.props?this.props.concat(g.props):this.props,a=new ab.Event(f),b=d.length;b--;)c=d[b],a[c]=f[c];return a.target||(a.target=$),3===a.target.nodeType&&(a.target=a.target.parentNode),g.filter?g.filter(a,f):a},special:{load:{noBubble:!0},focus:{trigger:function(){return this!==l()&&this.focus?(this.focus(),!1):void 0},delegateType:"focusin"},blur:{trigger:function(){return this===l()&&this.blur?(this.blur(),!1):void 0},delegateType:"focusout"},click:{trigger:function(){return"checkbox"===this.type&&this.click&&ab.nodeName(this,"input")?(this.click(),!1):void 0},_default:function(a){return ab.nodeName(a.target,"a")}},beforeunload:{postDispatch:function(a){void 0!==a.result&&(a.originalEvent.returnValue=a.result)}}},simulate:function(a,b,c,d){var e=ab.extend(new ab.Event,c,{type:a,isSimulated:!0,originalEvent:{}});d?ab.event.trigger(e,null,b):ab.event.dispatch.call(b,e),e.isDefaultPrevented()&&c.preventDefault()}},ab.removeEvent=function(a,b,c){a.removeEventListener&&a.removeEventListener(b,c,!1)},ab.Event=function(a,b){return this instanceof ab.Event?(a&&a.type?(this.originalEvent=a,this.type=a.type,this.isDefaultPrevented=a.defaultPrevented||void 0===a.defaultPrevented&&a.getPreventDefault&&a.getPreventDefault()?j:k):this.type=a,b&&ab.extend(this,b),this.timeStamp=a&&a.timeStamp||ab.now(),void(this[ab.expando]=!0)):new ab.Event(a,b)},ab.Event.prototype={isDefaultPrevented:k,isPropagationStopped:k,isImmediatePropagationStopped:k,preventDefault:function(){var a=this.originalEvent;this.isDefaultPrevented=j,a&&a.preventDefault&&a.preventDefault()},stopPropagation:function(){var a=this.originalEvent;this.isPropagationStopped=j,a&&a.stopPropagation&&a.stopPropagation()},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=j,this.stopPropagation()}},ab.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(a,b){ab.event.special[a]={delegateType:b,bindType:b,handle:function(a){var c,d=this,e=a.relatedTarget,f=a.handleObj;return(!e||e!==d&&!ab.contains(d,e))&&(a.type=f.origType,c=f.handler.apply(this,arguments),a.type=b),c}}}),Z.focusinBubbles||ab.each({focus:"focusin",blur:"focusout"},function(a,b){var c=function(a){ab.event.simulate(b,a.target,ab.event.fix(a),!0)};ab.event.special[b]={setup:function(){var d=this.ownerDocument||this,e=rb.access(d,b);e||d.addEventListener(a,c,!0),rb.access(d,b,(e||0)+1)},teardown:function(){var d=this.ownerDocument||this,e=rb.access(d,b)-1;e?rb.access(d,b,e):(d.removeEventListener(a,c,!0),rb.remove(d,b))}}}),ab.fn.extend({on:function(a,b,c,d,e){var f,g;if("object"==typeof a){"string"!=typeof b&&(c=c||b,b=void 0);for(g in a)this.on(g,b,c,a[g],e);return this}if(null==c&&null==d?(d=b,c=b=void 0):null==d&&("string"==typeof b?(d=c,c=void 0):(d=c,c=b,b=void 0)),d===!1)d=k;else if(!d)return this;return 1===e&&(f=d,d=function(a){return ab().off(a),f.apply(this,arguments)},d.guid=f.guid||(f.guid=ab.guid++)),this.each(function(){ab.event.add(this,a,d,c,b)})},one:function(a,b,c,d){return this.on(a,b,c,d,1)},off:function(a,b,c){var d,e;if(a&&a.preventDefault&&a.handleObj)return d=a.handleObj,ab(a.delegateTarget).off(d.namespace?d.origType+"."+d.namespace:d.origType,d.selector,d.handler),this;if("object"==typeof a){for(e in a)this.off(e,b,a[e]);return this}return(b===!1||"function"==typeof b)&&(c=b,b=void 0),c===!1&&(c=k),this.each(function(){ab.event.remove(this,a,c,b)})},trigger:function(a,b){return this.each(function(){ab.event.trigger(a,b,this)})},triggerHandler:function(a,b){var c=this[0];return c?ab.event.trigger(a,b,c,!0):void 0}});var Eb=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,Fb=/<([\w:]+)/,Gb=/<|&#?\w+;/,Hb=/<(?:script|style|link)/i,Ib=/checked\s*(?:[^=]|=\s*.checked.)/i,Jb=/^$|\/(?:java|ecma)script/i,Kb=/^true\/(.*)/,Lb=/^\s*<!(?:\[CDATA\[|--)|(?:\]\]|--)>\s*$/g,Mb={option:[1,"<select multiple='multiple'>","</select>"],thead:[1,"<table>","</table>"],col:[2,"<table><colgroup>","</colgroup></table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:[0,"",""]};Mb.optgroup=Mb.option,Mb.tbody=Mb.tfoot=Mb.colgroup=Mb.caption=Mb.thead,Mb.th=Mb.td,ab.extend({clone:function(a,b,c){var d,e,f,g,h=a.cloneNode(!0),i=ab.contains(a.ownerDocument,a);if(!(Z.noCloneChecked||1!==a.nodeType&&11!==a.nodeType||ab.isXMLDoc(a)))for(g=r(h),f=r(a),d=0,e=f.length;e>d;d++)s(f[d],g[d]);if(b)if(c)for(f=f||r(a),g=g||r(h),d=0,e=f.length;e>d;d++)q(f[d],g[d]);else q(a,h);return g=r(h,"script"),g.length>0&&p(g,!i&&r(a,"script")),h},buildFragment:function(a,b,c,d){for(var e,f,g,h,i,j,k=b.createDocumentFragment(),l=[],m=0,n=a.length;n>m;m++)if(e=a[m],e||0===e)if("object"===ab.type(e))ab.merge(l,e.nodeType?[e]:e);else if(Gb.test(e)){for(f=f||k.appendChild(b.createElement("div")),g=(Fb.exec(e)||["",""])[1].toLowerCase(),h=Mb[g]||Mb._default,f.innerHTML=h[1]+e.replace(Eb,"<$1></$2>")+h[2],j=h[0];j--;)f=f.lastChild;ab.merge(l,f.childNodes),f=k.firstChild,f.textContent=""}else l.push(b.createTextNode(e));for(k.textContent="",m=0;e=l[m++];)if((!d||-1===ab.inArray(e,d))&&(i=ab.contains(e.ownerDocument,e),f=r(k.appendChild(e),"script"),i&&p(f),c))for(j=0;e=f[j++];)Jb.test(e.type||"")&&c.push(e);return k},cleanData:function(a){for(var b,c,d,e,f,g,h=ab.event.special,i=0;void 0!==(c=a[i]);i++){if(ab.acceptData(c)&&(f=c[rb.expando],f&&(b=rb.cache[f]))){if(d=Object.keys(b.events||{}),d.length)for(g=0;void 0!==(e=d[g]);g++)h[e]?ab.event.remove(c,e):ab.removeEvent(c,e,b.handle);rb.cache[f]&&delete rb.cache[f]}delete sb.cache[c[sb.expando]]}}}),ab.fn.extend({text:function(a){return qb(this,function(a){return void 0===a?ab.text(this):this.empty().each(function(){(1===this.nodeType||11===this.nodeType||9===this.nodeType)&&(this.textContent=a)})},null,a,arguments.length)},append:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=m(this,a);b.appendChild(a)}})},prepend:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=m(this,a);b.insertBefore(a,b.firstChild)}})},before:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this)})},after:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this.nextSibling)})},remove:function(a,b){for(var c,d=a?ab.filter(a,this):this,e=0;null!=(c=d[e]);e++)b||1!==c.nodeType||ab.cleanData(r(c)),c.parentNode&&(b&&ab.contains(c.ownerDocument,c)&&p(r(c,"script")),c.parentNode.removeChild(c));return this},empty:function(){for(var a,b=0;null!=(a=this[b]);b++)1===a.nodeType&&(ab.cleanData(r(a,!1)),a.textContent="");return this},clone:function(a,b){return a=null==a?!1:a,b=null==b?a:b,this.map(function(){return ab.clone(this,a,b)})},html:function(a){return qb(this,function(a){var b=this[0]||{},c=0,d=this.length;if(void 0===a&&1===b.nodeType)return b.innerHTML;if("string"==typeof a&&!Hb.test(a)&&!Mb[(Fb.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Eb,"<$1></$2>");try{for(;d>c;c++)b=this[c]||{},1===b.nodeType&&(ab.cleanData(r(b,!1)),b.innerHTML=a);b=0}catch(e){}}b&&this.empty().append(a)},null,a,arguments.length)},replaceWith:function(){var a=arguments[0];return this.domManip(arguments,function(b){a=this.parentNode,ab.cleanData(r(this)),a&&a.replaceChild(b,this)}),a&&(a.length||a.nodeType)?this:this.remove()},detach:function(a){return this.remove(a,!0)},domManip:function(a,b){a=S.apply([],a);var c,d,e,f,g,h,i=0,j=this.length,k=this,l=j-1,m=a[0],p=ab.isFunction(m);if(p||j>1&&"string"==typeof m&&!Z.checkClone&&Ib.test(m))return this.each(function(c){var d=k.eq(c);p&&(a[0]=m.call(this,c,d.html())),d.domManip(a,b)});if(j&&(c=ab.buildFragment(a,this[0].ownerDocument,!1,this),d=c.firstChild,1===c.childNodes.length&&(c=d),d)){for(e=ab.map(r(c,"script"),n),f=e.length;j>i;i++)g=c,i!==l&&(g=ab.clone(g,!0,!0),f&&ab.merge(e,r(g,"script"))),b.call(this[i],g,i);if(f)for(h=e[e.length-1].ownerDocument,ab.map(e,o),i=0;f>i;i++)g=e[i],Jb.test(g.type||"")&&!rb.access(g,"globalEval")&&ab.contains(h,g)&&(g.src?ab._evalUrl&&ab._evalUrl(g.src):ab.globalEval(g.textContent.replace(Lb,"")))}return this}}),ab.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){ab.fn[a]=function(a){for(var c,d=[],e=ab(a),f=e.length-1,g=0;f>=g;g++)c=g===f?this:this.clone(!0),ab(e[g])[b](c),T.apply(d,c.get());return this.pushStack(d)}});var Nb,Ob={},Pb=/^margin/,Qb=new RegExp("^("+vb+")(?!px)[a-z%]+$","i"),Rb=function(a){return a.ownerDocument.defaultView.getComputedStyle(a,null)};!function(){function b(){h.style.cssText="-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%",f.appendChild(g);var b=a.getComputedStyle(h,null);c="1%"!==b.top,d="4px"===b.width,f.removeChild(g)}var c,d,e="padding:0;margin:0;border:0;display:block;-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box",f=$.documentElement,g=$.createElement("div"),h=$.createElement("div");h.style.backgroundClip="content-box",h.cloneNode(!0).style.backgroundClip="",Z.clearCloneStyle="content-box"===h.style.backgroundClip,g.style.cssText="border:0;width:0;height:0;position:absolute;top:0;left:-9999px;margin-top:1px",g.appendChild(h),a.getComputedStyle&&ab.extend(Z,{pixelPosition:function(){return b(),c},boxSizingReliable:function(){return null==d&&b(),d},reliableMarginRight:function(){var b,c=h.appendChild($.createElement("div"));return c.style.cssText=h.style.cssText=e,c.style.marginRight=c.style.width="0",h.style.width="1px",f.appendChild(g),b=!parseFloat(a.getComputedStyle(c,null).marginRight),f.removeChild(g),h.innerHTML="",b}})}(),ab.swap=function(a,b,c,d){var e,f,g={};for(f in b)g[f]=a.style[f],a.style[f]=b[f];e=c.apply(a,d||[]);for(f in b)a.style[f]=g[f];return e};var Sb=/^(none|table(?!-c[ea]).+)/,Tb=new RegExp("^("+vb+")(.*)$","i"),Ub=new RegExp("^([+-])=("+vb+")","i"),Vb={position:"absolute",visibility:"hidden",display:"block"},Wb={letterSpacing:0,fontWeight:400},Xb=["Webkit","O","Moz","ms"];ab.extend({cssHooks:{opacity:{get:function(a,b){if(b){var c=v(a,"opacity");return""===c?"1":c}}}},cssNumber:{columnCount:!0,fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":"cssFloat"},style:function(a,b,c,d){if(a&&3!==a.nodeType&&8!==a.nodeType&&a.style){var e,f,g,h=ab.camelCase(b),i=a.style;return b=ab.cssProps[h]||(ab.cssProps[h]=x(i,h)),g=ab.cssHooks[b]||ab.cssHooks[h],void 0===c?g&&"get"in g&&void 0!==(e=g.get(a,!1,d))?e:i[b]:(f=typeof c,"string"===f&&(e=Ub.exec(c))&&(c=(e[1]+1)*e[2]+parseFloat(ab.css(a,b)),f="number"),null!=c&&c===c&&("number"!==f||ab.cssNumber[h]||(c+="px"),Z.clearCloneStyle||""!==c||0!==b.indexOf("background")||(i[b]="inherit"),g&&"set"in g&&void 0===(c=g.set(a,c,d))||(i[b]="",i[b]=c)),void 0)}},css:function(a,b,c,d){var e,f,g,h=ab.camelCase(b);return b=ab.cssProps[h]||(ab.cssProps[h]=x(a.style,h)),g=ab.cssHooks[b]||ab.cssHooks[h],g&&"get"in g&&(e=g.get(a,!0,c)),void 0===e&&(e=v(a,b,d)),"normal"===e&&b in Wb&&(e=Wb[b]),""===c||c?(f=parseFloat(e),c===!0||ab.isNumeric(f)?f||0:e):e}}),ab.each(["height","width"],function(a,b){ab.cssHooks[b]={get:function(a,c,d){return c?0===a.offsetWidth&&Sb.test(ab.css(a,"display"))?ab.swap(a,Vb,function(){return A(a,b,d)}):A(a,b,d):void 0},set:function(a,c,d){var e=d&&Rb(a);return y(a,c,d?z(a,b,d,"border-box"===ab.css(a,"boxSizing",!1,e),e):0)}}}),ab.cssHooks.marginRight=w(Z.reliableMarginRight,function(a,b){return b?ab.swap(a,{display:"inline-block"},v,[a,"marginRight"]):void 0}),ab.each({margin:"",padding:"",border:"Width"},function(a,b){ab.cssHooks[a+b]={expand:function(c){for(var d=0,e={},f="string"==typeof c?c.split(" "):[c];4>d;d++)e[a+wb[d]+b]=f[d]||f[d-2]||f[0];return e}},Pb.test(a)||(ab.cssHooks[a+b].set=y)}),ab.fn.extend({css:function(a,b){return qb(this,function(a,b,c){var d,e,f={},g=0;if(ab.isArray(b)){for(d=Rb(a),e=b.length;e>g;g++)f[b[g]]=ab.css(a,b[g],!1,d);return f}return void 0!==c?ab.style(a,b,c):ab.css(a,b)},a,b,arguments.length>1)},show:function(){return B(this,!0)},hide:function(){return B(this)},toggle:function(a){return"boolean"==typeof a?a?this.show():this.hide():this.each(function(){xb(this)?ab(this).show():ab(this).hide()})}}),ab.Tween=C,C.prototype={constructor:C,init:function(a,b,c,d,e,f){this.elem=a,this.prop=c,this.easing=e||"swing",this.options=b,this.start=this.now=this.cur(),this.end=d,this.unit=f||(ab.cssNumber[c]?"":"px")},cur:function(){var a=C.propHooks[this.prop];return a&&a.get?a.get(this):C.propHooks._default.get(this)},run:function(a){var b,c=C.propHooks[this.prop];return this.pos=b=this.options.duration?ab.easing[this.easing](a,this.options.duration*a,0,1,this.options.duration):a,this.now=(this.end-this.start)*b+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),c&&c.set?c.set(this):C.propHooks._default.set(this),this}},C.prototype.init.prototype=C.prototype,C.propHooks={_default:{get:function(a){var b;return null==a.elem[a.prop]||a.elem.style&&null!=a.elem.style[a.prop]?(b=ab.css(a.elem,a.prop,""),b&&"auto"!==b?b:0):a.elem[a.prop]},set:function(a){ab.fx.step[a.prop]?ab.fx.step[a.prop](a):a.elem.style&&(null!=a.elem.style[ab.cssProps[a.prop]]||ab.cssHooks[a.prop])?ab.style(a.elem,a.prop,a.now+a.unit):a.elem[a.prop]=a.now}}},C.propHooks.scrollTop=C.propHooks.scrollLeft={set:function(a){a.elem.nodeType&&a.elem.parentNode&&(a.elem[a.prop]=a.now)}},ab.easing={linear:function(a){return a},swing:function(a){return.5-Math.cos(a*Math.PI)/2}},ab.fx=C.prototype.init,ab.fx.step={};var Yb,Zb,$b=/^(?:toggle|show|hide)$/,_b=new RegExp("^(?:([+-])=|)("+vb+")([a-z%]*)$","i"),ac=/queueHooks$/,bc=[G],cc={"*":[function(a,b){var c=this.createTween(a,b),d=c.cur(),e=_b.exec(b),f=e&&e[3]||(ab.cssNumber[a]?"":"px"),g=(ab.cssNumber[a]||"px"!==f&&+d)&&_b.exec(ab.css(c.elem,a)),h=1,i=20;if(g&&g[3]!==f){f=f||g[3],e=e||[],g=+d||1;do h=h||".5",g/=h,ab.style(c.elem,a,g+f);while(h!==(h=c.cur()/d)&&1!==h&&--i)}return e&&(g=c.start=+g||+d||0,c.unit=f,c.end=e[1]?g+(e[1]+1)*e[2]:+e[2]),c}]};ab.Animation=ab.extend(I,{tweener:function(a,b){ab.isFunction(a)?(b=a,a=["*"]):a=a.split(" ");for(var c,d=0,e=a.length;e>d;d++)c=a[d],cc[c]=cc[c]||[],cc[c].unshift(b)},prefilter:function(a,b){b?bc.unshift(a):bc.push(a)}}),ab.speed=function(a,b,c){var d=a&&"object"==typeof a?ab.extend({},a):{complete:c||!c&&b||ab.isFunction(a)&&a,duration:a,easing:c&&b||b&&!ab.isFunction(b)&&b};return d.duration=ab.fx.off?0:"number"==typeof d.duration?d.duration:d.duration in ab.fx.speeds?ab.fx.speeds[d.duration]:ab.fx.speeds._default,(null==d.queue||d.queue===!0)&&(d.queue="fx"),d.old=d.complete,d.complete=function(){ab.isFunction(d.old)&&d.old.call(this),d.queue&&ab.dequeue(this,d.queue)},d},ab.fn.extend({fadeTo:function(a,b,c,d){return this.filter(xb).css("opacity",0).show().end().animate({opacity:b},a,c,d)},animate:function(a,b,c,d){var e=ab.isEmptyObject(a),f=ab.speed(b,c,d),g=function(){var b=I(this,ab.extend({},a),f);(e||rb.get(this,"finish"))&&b.stop(!0)};return g.finish=g,e||f.queue===!1?this.each(g):this.queue(f.queue,g)},stop:function(a,b,c){var d=function(a){var b=a.stop;delete a.stop,b(c)};return"string"!=typeof a&&(c=b,b=a,a=void 0),b&&a!==!1&&this.queue(a||"fx",[]),this.each(function(){var b=!0,e=null!=a&&a+"queueHooks",f=ab.timers,g=rb.get(this);if(e)g[e]&&g[e].stop&&d(g[e]);else for(e in g)g[e]&&g[e].stop&&ac.test(e)&&d(g[e]);for(e=f.length;e--;)f[e].elem!==this||null!=a&&f[e].queue!==a||(f[e].anim.stop(c),b=!1,f.splice(e,1));(b||!c)&&ab.dequeue(this,a)})},finish:function(a){return a!==!1&&(a=a||"fx"),this.each(function(){var b,c=rb.get(this),d=c[a+"queue"],e=c[a+"queueHooks"],f=ab.timers,g=d?d.length:0;for(c.finish=!0,ab.queue(this,a,[]),e&&e.stop&&e.stop.call(this,!0),b=f.length;b--;)f[b].elem===this&&f[b].queue===a&&(f[b].anim.stop(!0),f.splice(b,1));for(b=0;g>b;b++)d[b]&&d[b].finish&&d[b].finish.call(this);delete c.finish})}}),ab.each(["toggle","show","hide"],function(a,b){var c=ab.fn[b];ab.fn[b]=function(a,d,e){return null==a||"boolean"==typeof a?c.apply(this,arguments):this.animate(E(b,!0),a,d,e)}}),ab.each({slideDown:E("show"),slideUp:E("hide"),slideToggle:E("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(a,b){ab.fn[a]=function(a,c,d){return this.animate(b,a,c,d)}}),ab.timers=[],ab.fx.tick=function(){var a,b=0,c=ab.timers;for(Yb=ab.now();b<c.length;b++)a=c[b],a()||c[b]!==a||c.splice(b--,1);c.length||ab.fx.stop(),Yb=void 0},ab.fx.timer=function(a){ab.timers.push(a),a()?ab.fx.start():ab.timers.pop()},ab.fx.interval=13,ab.fx.start=function(){Zb||(Zb=setInterval(ab.fx.tick,ab.fx.interval))},ab.fx.stop=function(){clearInterval(Zb),Zb=null},ab.fx.speeds={slow:600,fast:200,_default:400},ab.fn.delay=function(a,b){return a=ab.fx?ab.fx.speeds[a]||a:a,b=b||"fx",this.queue(b,function(b,c){var d=setTimeout(b,a);c.stop=function(){clearTimeout(d)}})},function(){var a=$.createElement("input"),b=$.createElement("select"),c=b.appendChild($.createElement("option"));a.type="checkbox",Z.checkOn=""!==a.value,Z.optSelected=c.selected,b.disabled=!0,Z.optDisabled=!c.disabled,a=$.createElement("input"),a.value="t",a.type="radio",Z.radioValue="t"===a.value}();var dc,ec,fc=ab.expr.attrHandle;ab.fn.extend({attr:function(a,b){return qb(this,ab.attr,a,b,arguments.length>1)},removeAttr:function(a){return this.each(function(){ab.removeAttr(this,a)})}}),ab.extend({attr:function(a,b,c){var d,e,f=a.nodeType;if(a&&3!==f&&8!==f&&2!==f)return typeof a.getAttribute===zb?ab.prop(a,b,c):(1===f&&ab.isXMLDoc(a)||(b=b.toLowerCase(),d=ab.attrHooks[b]||(ab.expr.match.bool.test(b)?ec:dc)),void 0===c?d&&"get"in d&&null!==(e=d.get(a,b))?e:(e=ab.find.attr(a,b),null==e?void 0:e):null!==c?d&&"set"in d&&void 0!==(e=d.set(a,c,b))?e:(a.setAttribute(b,c+""),c):void ab.removeAttr(a,b))},removeAttr:function(a,b){var c,d,e=0,f=b&&b.match(nb);if(f&&1===a.nodeType)for(;c=f[e++];)d=ab.propFix[c]||c,ab.expr.match.bool.test(c)&&(a[d]=!1),a.removeAttribute(c)},attrHooks:{type:{set:function(a,b){if(!Z.radioValue&&"radio"===b&&ab.nodeName(a,"input")){var c=a.value;return a.setAttribute("type",b),c&&(a.value=c),b}}}}}),ec={set:function(a,b,c){return b===!1?ab.removeAttr(a,c):a.setAttribute(c,c),c}},ab.each(ab.expr.match.bool.source.match(/\w+/g),function(a,b){var c=fc[b]||ab.find.attr;fc[b]=function(a,b,d){var e,f;return d||(f=fc[b],fc[b]=e,e=null!=c(a,b,d)?b.toLowerCase():null,fc[b]=f),e}});var gc=/^(?:input|select|textarea|button)$/i;ab.fn.extend({prop:function(a,b){return qb(this,ab.prop,a,b,arguments.length>1)},removeProp:function(a){return this.each(function(){delete this[ab.propFix[a]||a]})}}),ab.extend({propFix:{"for":"htmlFor","class":"className"},prop:function(a,b,c){var d,e,f,g=a.nodeType;if(a&&3!==g&&8!==g&&2!==g)return f=1!==g||!ab.isXMLDoc(a),f&&(b=ab.propFix[b]||b,e=ab.propHooks[b]),void 0!==c?e&&"set"in e&&void 0!==(d=e.set(a,c,b))?d:a[b]=c:e&&"get"in e&&null!==(d=e.get(a,b))?d:a[b]},propHooks:{tabIndex:{get:function(a){return a.hasAttribute("tabindex")||gc.test(a.nodeName)||a.href?a.tabIndex:-1}}}}),Z.optSelected||(ab.propHooks.selected={get:function(a){var b=a.parentNode;return b&&b.parentNode&&b.parentNode.selectedIndex,null}}),ab.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){ab.propFix[this.toLowerCase()]=this});var hc=/[\t\r\n\f]/g;ab.fn.extend({addClass:function(a){var b,c,d,e,f,g,h="string"==typeof a&&a,i=0,j=this.length;if(ab.isFunction(a))return this.each(function(b){ab(this).addClass(a.call(this,b,this.className))});if(h)for(b=(a||"").match(nb)||[];j>i;i++)if(c=this[i],d=1===c.nodeType&&(c.className?(" "+c.className+" ").replace(hc," "):" ")){for(f=0;e=b[f++];)d.indexOf(" "+e+" ")<0&&(d+=e+" ");g=ab.trim(d),c.className!==g&&(c.className=g)}return this},removeClass:function(a){var b,c,d,e,f,g,h=0===arguments.length||"string"==typeof a&&a,i=0,j=this.length;if(ab.isFunction(a))return this.each(function(b){ab(this).removeClass(a.call(this,b,this.className))});if(h)for(b=(a||"").match(nb)||[];j>i;i++)if(c=this[i],d=1===c.nodeType&&(c.className?(" "+c.className+" ").replace(hc," "):"")){for(f=0;e=b[f++];)for(;d.indexOf(" "+e+" ")>=0;)d=d.replace(" "+e+" "," ");g=a?ab.trim(d):"",c.className!==g&&(c.className=g)}return this},toggleClass:function(a,b){var c=typeof a;return"boolean"==typeof b&&"string"===c?b?this.addClass(a):this.removeClass(a):this.each(ab.isFunction(a)?function(c){ab(this).toggleClass(a.call(this,c,this.className,b),b)}:function(){if("string"===c)for(var b,d=0,e=ab(this),f=a.match(nb)||[];b=f[d++];)e.hasClass(b)?e.removeClass(b):e.addClass(b);else(c===zb||"boolean"===c)&&(this.className&&rb.set(this,"__className__",this.className),this.className=this.className||a===!1?"":rb.get(this,"__className__")||"")})},hasClass:function(a){for(var b=" "+a+" ",c=0,d=this.length;d>c;c++)if(1===this[c].nodeType&&(" "+this[c].className+" ").replace(hc," ").indexOf(b)>=0)return!0;return!1}});var ic=/\r/g;ab.fn.extend({val:function(a){var b,c,d,e=this[0];{if(arguments.length)return d=ab.isFunction(a),this.each(function(c){var e;1===this.nodeType&&(e=d?a.call(this,c,ab(this).val()):a,null==e?e="":"number"==typeof e?e+="":ab.isArray(e)&&(e=ab.map(e,function(a){return null==a?"":a+""})),b=ab.valHooks[this.type]||ab.valHooks[this.nodeName.toLowerCase()],b&&"set"in b&&void 0!==b.set(this,e,"value")||(this.value=e))});if(e)return b=ab.valHooks[e.type]||ab.valHooks[e.nodeName.toLowerCase()],b&&"get"in b&&void 0!==(c=b.get(e,"value"))?c:(c=e.value,"string"==typeof c?c.replace(ic,""):null==c?"":c)}}}),ab.extend({valHooks:{select:{get:function(a){for(var b,c,d=a.options,e=a.selectedIndex,f="select-one"===a.type||0>e,g=f?null:[],h=f?e+1:d.length,i=0>e?h:f?e:0;h>i;i++)if(c=d[i],!(!c.selected&&i!==e||(Z.optDisabled?c.disabled:null!==c.getAttribute("disabled"))||c.parentNode.disabled&&ab.nodeName(c.parentNode,"optgroup"))){if(b=ab(c).val(),f)return b;g.push(b)}return g},set:function(a,b){for(var c,d,e=a.options,f=ab.makeArray(b),g=e.length;g--;)d=e[g],(d.selected=ab.inArray(ab(d).val(),f)>=0)&&(c=!0);return c||(a.selectedIndex=-1),f}}}}),ab.each(["radio","checkbox"],function(){ab.valHooks[this]={set:function(a,b){return ab.isArray(b)?a.checked=ab.inArray(ab(a).val(),b)>=0:void 0}},Z.checkOn||(ab.valHooks[this].get=function(a){return null===a.getAttribute("value")?"on":a.value})}),ab.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(a,b){ab.fn[b]=function(a,c){return arguments.length>0?this.on(b,null,a,c):this.trigger(b)}}),ab.fn.extend({hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)},bind:function(a,b,c){return this.on(a,null,b,c)},unbind:function(a,b){return this.off(a,null,b)},delegate:function(a,b,c,d){return this.on(b,a,c,d)},undelegate:function(a,b,c){return 1===arguments.length?this.off(a,"**"):this.off(b,a||"**",c)}});var jc=ab.now(),kc=/\?/;ab.parseJSON=function(a){return JSON.parse(a+"")},ab.parseXML=function(a){var b,c;if(!a||"string"!=typeof a)return null;try{c=new DOMParser,b=c.parseFromString(a,"text/xml")}catch(d){b=void 0}return(!b||b.getElementsByTagName("parsererror").length)&&ab.error("Invalid XML: "+a),b};var lc,mc,nc=/#.*$/,oc=/([?&])_=[^&]*/,pc=/^(.*?):[ \t]*([^\r\n]*)$/gm,qc=/^(?:about|app|app-storage|.+-extension|file|res|widget):$/,rc=/^(?:GET|HEAD)$/,sc=/^\/\//,tc=/^([\w.+-]+:)(?:\/\/(?:[^\/?#]*@|)([^\/?#:]*)(?::(\d+)|)|)/,uc={},vc={},wc="*/".concat("*");try{mc=location.href}catch(xc){mc=$.createElement("a"),mc.href="",mc=mc.href}lc=tc.exec(mc.toLowerCase())||[],ab.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:mc,type:"GET",isLocal:qc.test(lc[1]),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":wc,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":ab.parseJSON,"text xml":ab.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(a,b){return b?L(L(a,ab.ajaxSettings),b):L(ab.ajaxSettings,a)},ajaxPrefilter:J(uc),ajaxTransport:J(vc),ajax:function(a,b){function c(a,b,c,g){var i,k,r,s,u,w=b;2!==t&&(t=2,h&&clearTimeout(h),d=void 0,f=g||"",v.readyState=a>0?4:0,i=a>=200&&300>a||304===a,c&&(s=M(l,v,c)),s=N(l,s,v,i),i?(l.ifModified&&(u=v.getResponseHeader("Last-Modified"),u&&(ab.lastModified[e]=u),u=v.getResponseHeader("etag"),u&&(ab.etag[e]=u)),204===a||"HEAD"===l.type?w="nocontent":304===a?w="notmodified":(w=s.state,k=s.data,r=s.error,i=!r)):(r=w,(a||!w)&&(w="error",0>a&&(a=0))),v.status=a,v.statusText=(b||w)+"",i?o.resolveWith(m,[k,w,v]):o.rejectWith(m,[v,w,r]),v.statusCode(q),q=void 0,j&&n.trigger(i?"ajaxSuccess":"ajaxError",[v,l,i?k:r]),p.fireWith(m,[v,w]),j&&(n.trigger("ajaxComplete",[v,l]),--ab.active||ab.event.trigger("ajaxStop")))}"object"==typeof a&&(b=a,a=void 0),b=b||{};var d,e,f,g,h,i,j,k,l=ab.ajaxSetup({},b),m=l.context||l,n=l.context&&(m.nodeType||m.jquery)?ab(m):ab.event,o=ab.Deferred(),p=ab.Callbacks("once memory"),q=l.statusCode||{},r={},s={},t=0,u="canceled",v={readyState:0,getResponseHeader:function(a){var b;if(2===t){if(!g)for(g={};b=pc.exec(f);)g[b[1].toLowerCase()]=b[2];b=g[a.toLowerCase()]}return null==b?null:b},getAllResponseHeaders:function(){return 2===t?f:null},setRequestHeader:function(a,b){var c=a.toLowerCase();return t||(a=s[c]=s[c]||a,r[a]=b),this},overrideMimeType:function(a){return t||(l.mimeType=a),this},statusCode:function(a){var b;if(a)if(2>t)for(b in a)q[b]=[q[b],a[b]];else v.always(a[v.status]);return this},abort:function(a){var b=a||u;return d&&d.abort(b),c(0,b),this}};if(o.promise(v).complete=p.add,v.success=v.done,v.error=v.fail,l.url=((a||l.url||mc)+"").replace(nc,"").replace(sc,lc[1]+"//"),l.type=b.method||b.type||l.method||l.type,l.dataTypes=ab.trim(l.dataType||"*").toLowerCase().match(nb)||[""],null==l.crossDomain&&(i=tc.exec(l.url.toLowerCase()),l.crossDomain=!(!i||i[1]===lc[1]&&i[2]===lc[2]&&(i[3]||("http:"===i[1]?"80":"443"))===(lc[3]||("http:"===lc[1]?"80":"443")))),l.data&&l.processData&&"string"!=typeof l.data&&(l.data=ab.param(l.data,l.traditional)),K(uc,l,b,v),2===t)return v;j=l.global,j&&0===ab.active++&&ab.event.trigger("ajaxStart"),l.type=l.type.toUpperCase(),l.hasContent=!rc.test(l.type),e=l.url,l.hasContent||(l.data&&(e=l.url+=(kc.test(e)?"&":"?")+l.data,delete l.data),l.cache===!1&&(l.url=oc.test(e)?e.replace(oc,"$1_="+jc++):e+(kc.test(e)?"&":"?")+"_="+jc++)),l.ifModified&&(ab.lastModified[e]&&v.setRequestHeader("If-Modified-Since",ab.lastModified[e]),ab.etag[e]&&v.setRequestHeader("If-None-Match",ab.etag[e])),(l.data&&l.hasContent&&l.contentType!==!1||b.contentType)&&v.setRequestHeader("Content-Type",l.contentType),v.setRequestHeader("Accept",l.dataTypes[0]&&l.accepts[l.dataTypes[0]]?l.accepts[l.dataTypes[0]]+("*"!==l.dataTypes[0]?", "+wc+"; q=0.01":""):l.accepts["*"]);for(k in l.headers)v.setRequestHeader(k,l.headers[k]);if(l.beforeSend&&(l.beforeSend.call(m,v,l)===!1||2===t))return v.abort();u="abort";for(k in{success:1,error:1,complete:1})v[k](l[k]);if(d=K(vc,l,b,v)){v.readyState=1,j&&n.trigger("ajaxSend",[v,l]),l.async&&l.timeout>0&&(h=setTimeout(function(){v.abort("timeout")},l.timeout));try{t=1,d.send(r,c)}catch(w){if(!(2>t))throw w;c(-1,w)}}else c(-1,"No Transport");return v},getJSON:function(a,b,c){return ab.get(a,b,c,"json")},getScript:function(a,b){return ab.get(a,void 0,b,"script")}}),ab.each(["get","post"],function(a,b){ab[b]=function(a,c,d,e){return ab.isFunction(c)&&(e=e||d,d=c,c=void 0),ab.ajax({url:a,type:b,dataType:e,data:c,success:d})}}),ab.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(a,b){ab.fn[b]=function(a){return this.on(b,a)}}),ab._evalUrl=function(a){return ab.ajax({url:a,type:"GET",dataType:"script",async:!1,global:!1,"throws":!0})},ab.fn.extend({wrapAll:function(a){var b;return ab.isFunction(a)?this.each(function(b){ab(this).wrapAll(a.call(this,b))}):(this[0]&&(b=ab(a,this[0].ownerDocument).eq(0).clone(!0),this[0].parentNode&&b.insertBefore(this[0]),b.map(function(){for(var a=this;a.firstElementChild;)a=a.firstElementChild;return a}).append(this)),this)},wrapInner:function(a){return this.each(ab.isFunction(a)?function(b){ab(this).wrapInner(a.call(this,b))}:function(){var b=ab(this),c=b.contents();c.length?c.wrapAll(a):b.append(a)})},wrap:function(a){var b=ab.isFunction(a);return this.each(function(c){ab(this).wrapAll(b?a.call(this,c):a)})},unwrap:function(){return this.parent().each(function(){ab.nodeName(this,"body")||ab(this).replaceWith(this.childNodes)}).end()}}),ab.expr.filters.hidden=function(a){return a.offsetWidth<=0&&a.offsetHeight<=0},ab.expr.filters.visible=function(a){return!ab.expr.filters.hidden(a)};var yc=/%20/g,zc=/\[\]$/,Ac=/\r?\n/g,Bc=/^(?:submit|button|image|reset|file)$/i,Cc=/^(?:input|select|textarea|keygen)/i;ab.param=function(a,b){var c,d=[],e=function(a,b){b=ab.isFunction(b)?b():null==b?"":b,d[d.length]=encodeURIComponent(a)+"="+encodeURIComponent(b) };if(void 0===b&&(b=ab.ajaxSettings&&ab.ajaxSettings.traditional),ab.isArray(a)||a.jquery&&!ab.isPlainObject(a))ab.each(a,function(){e(this.name,this.value)});else for(c in a)O(c,a[c],b,e);return d.join("&").replace(yc,"+")},ab.fn.extend({serialize:function(){return ab.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var a=ab.prop(this,"elements");return a?ab.makeArray(a):this}).filter(function(){var a=this.type;return this.name&&!ab(this).is(":disabled")&&Cc.test(this.nodeName)&&!Bc.test(a)&&(this.checked||!yb.test(a))}).map(function(a,b){var c=ab(this).val();return null==c?null:ab.isArray(c)?ab.map(c,function(a){return{name:b.name,value:a.replace(Ac,"\r\n")}}):{name:b.name,value:c.replace(Ac,"\r\n")}}).get()}}),ab.ajaxSettings.xhr=function(){try{return new XMLHttpRequest}catch(a){}};var Dc=0,Ec={},Fc={0:200,1223:204},Gc=ab.ajaxSettings.xhr();a.ActiveXObject&&ab(a).on("unload",function(){for(var a in Ec)Ec[a]()}),Z.cors=!!Gc&&"withCredentials"in Gc,Z.ajax=Gc=!!Gc,ab.ajaxTransport(function(a){var b;return Z.cors||Gc&&!a.crossDomain?{send:function(c,d){var e,f=a.xhr(),g=++Dc;if(f.open(a.type,a.url,a.async,a.username,a.password),a.xhrFields)for(e in a.xhrFields)f[e]=a.xhrFields[e];a.mimeType&&f.overrideMimeType&&f.overrideMimeType(a.mimeType),a.crossDomain||c["X-Requested-With"]||(c["X-Requested-With"]="XMLHttpRequest");for(e in c)f.setRequestHeader(e,c[e]);b=function(a){return function(){b&&(delete Ec[g],b=f.onload=f.onerror=null,"abort"===a?f.abort():"error"===a?d(f.status,f.statusText):d(Fc[f.status]||f.status,f.statusText,"string"==typeof f.responseText?{text:f.responseText}:void 0,f.getAllResponseHeaders()))}},f.onload=b(),f.onerror=b("error"),b=Ec[g]=b("abort"),f.send(a.hasContent&&a.data||null)},abort:function(){b&&b()}}:void 0}),ab.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/(?:java|ecma)script/},converters:{"text script":function(a){return ab.globalEval(a),a}}}),ab.ajaxPrefilter("script",function(a){void 0===a.cache&&(a.cache=!1),a.crossDomain&&(a.type="GET")}),ab.ajaxTransport("script",function(a){if(a.crossDomain){var b,c;return{send:function(d,e){b=ab("<script>").prop({async:!0,charset:a.scriptCharset,src:a.url}).on("load error",c=function(a){b.remove(),c=null,a&&e("error"===a.type?404:200,a.type)}),$.head.appendChild(b[0])},abort:function(){c&&c()}}}});var Hc=[],Ic=/(=)\?(?=&|$)|\?\?/;ab.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var a=Hc.pop()||ab.expando+"_"+jc++;return this[a]=!0,a}}),ab.ajaxPrefilter("json jsonp",function(b,c,d){var e,f,g,h=b.jsonp!==!1&&(Ic.test(b.url)?"url":"string"==typeof b.data&&!(b.contentType||"").indexOf("application/x-www-form-urlencoded")&&Ic.test(b.data)&&"data");return h||"jsonp"===b.dataTypes[0]?(e=b.jsonpCallback=ab.isFunction(b.jsonpCallback)?b.jsonpCallback():b.jsonpCallback,h?b[h]=b[h].replace(Ic,"$1"+e):b.jsonp!==!1&&(b.url+=(kc.test(b.url)?"&":"?")+b.jsonp+"="+e),b.converters["script json"]=function(){return g||ab.error(e+" was not called"),g[0]},b.dataTypes[0]="json",f=a[e],a[e]=function(){g=arguments},d.always(function(){a[e]=f,b[e]&&(b.jsonpCallback=c.jsonpCallback,Hc.push(e)),g&&ab.isFunction(f)&&f(g[0]),g=f=void 0}),"script"):void 0}),ab.parseHTML=function(a,b,c){if(!a||"string"!=typeof a)return null;"boolean"==typeof b&&(c=b,b=!1),b=b||$;var d=gb.exec(a),e=!c&&[];return d?[b.createElement(d[1])]:(d=ab.buildFragment([a],b,e),e&&e.length&&ab(e).remove(),ab.merge([],d.childNodes))};var Jc=ab.fn.load;ab.fn.load=function(a,b,c){if("string"!=typeof a&&Jc)return Jc.apply(this,arguments);var d,e,f,g=this,h=a.indexOf(" ");return h>=0&&(d=a.slice(h),a=a.slice(0,h)),ab.isFunction(b)?(c=b,b=void 0):b&&"object"==typeof b&&(e="POST"),g.length>0&&ab.ajax({url:a,type:e,dataType:"html",data:b}).done(function(a){f=arguments,g.html(d?ab("<div>").append(ab.parseHTML(a)).find(d):a)}).complete(c&&function(a,b){g.each(c,f||[a.responseText,b,a])}),this},ab.expr.filters.animated=function(a){return ab.grep(ab.timers,function(b){return a===b.elem}).length};var Kc=a.document.documentElement;ab.offset={setOffset:function(a,b,c){var d,e,f,g,h,i,j,k=ab.css(a,"position"),l=ab(a),m={};"static"===k&&(a.style.position="relative"),h=l.offset(),f=ab.css(a,"top"),i=ab.css(a,"left"),j=("absolute"===k||"fixed"===k)&&(f+i).indexOf("auto")>-1,j?(d=l.position(),g=d.top,e=d.left):(g=parseFloat(f)||0,e=parseFloat(i)||0),ab.isFunction(b)&&(b=b.call(a,c,h)),null!=b.top&&(m.top=b.top-h.top+g),null!=b.left&&(m.left=b.left-h.left+e),"using"in b?b.using.call(a,m):l.css(m)}},ab.fn.extend({offset:function(a){if(arguments.length)return void 0===a?this:this.each(function(b){ab.offset.setOffset(this,a,b)});var b,c,d=this[0],e={top:0,left:0},f=d&&d.ownerDocument;if(f)return b=f.documentElement,ab.contains(b,d)?(typeof d.getBoundingClientRect!==zb&&(e=d.getBoundingClientRect()),c=P(f),{top:e.top+c.pageYOffset-b.clientTop,left:e.left+c.pageXOffset-b.clientLeft}):e},position:function(){if(this[0]){var a,b,c=this[0],d={top:0,left:0};return"fixed"===ab.css(c,"position")?b=c.getBoundingClientRect():(a=this.offsetParent(),b=this.offset(),ab.nodeName(a[0],"html")||(d=a.offset()),d.top+=ab.css(a[0],"borderTopWidth",!0),d.left+=ab.css(a[0],"borderLeftWidth",!0)),{top:b.top-d.top-ab.css(c,"marginTop",!0),left:b.left-d.left-ab.css(c,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||Kc;a&&!ab.nodeName(a,"html")&&"static"===ab.css(a,"position");)a=a.offsetParent;return a||Kc})}}),ab.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(b,c){var d="pageYOffset"===c;ab.fn[b]=function(e){return qb(this,function(b,e,f){var g=P(b);return void 0===f?g?g[c]:b[e]:void(g?g.scrollTo(d?a.pageXOffset:f,d?f:a.pageYOffset):b[e]=f)},b,e,arguments.length,null)}}),ab.each(["top","left"],function(a,b){ab.cssHooks[b]=w(Z.pixelPosition,function(a,c){return c?(c=v(a,b),Qb.test(c)?ab(a).position()[b]+"px":c):void 0})}),ab.each({Height:"height",Width:"width"},function(a,b){ab.each({padding:"inner"+a,content:b,"":"outer"+a},function(c,d){ab.fn[d]=function(d,e){var f=arguments.length&&(c||"boolean"!=typeof d),g=c||(d===!0||e===!0?"margin":"border");return qb(this,function(b,c,d){var e;return ab.isWindow(b)?b.document.documentElement["client"+a]:9===b.nodeType?(e=b.documentElement,Math.max(b.body["scroll"+a],e["scroll"+a],b.body["offset"+a],e["offset"+a],e["client"+a])):void 0===d?ab.css(b,c,g):ab.style(b,c,d,g)},b,f?d:void 0,f,null)}})}),ab.fn.size=function(){return this.length},ab.fn.andSelf=ab.fn.addBack,"function"==typeof define&&define.amd&&define("jquery",[],function(){return ab});var Lc=a.jQuery,Mc=a.$;return ab.noConflict=function(b){return a.$===ab&&(a.$=Mc),b&&a.jQuery===ab&&(a.jQuery=Lc),ab},typeof b===zb&&(a.jQuery=a.$=ab),ab});
Annalist
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/data/static/foundation/js/vendor/jquery.js
jquery.js